diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index e8eed400d961fc..235bc78599400e 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -119,7 +119,7 @@ Python/dynload_*.c @ericsnowcurrently Lib/test/test_module/ @ericsnowcurrently Doc/c-api/module.rst @ericsnowcurrently **/*importlib/resources/* @jaraco @warsaw @FFY00 -**/importlib/metadata/* @jaraco @warsaw +**/*importlib/metadata/* @jaraco @warsaw # Dates and times **/*datetime* @pganssle @abalkin diff --git a/.gitignore b/.gitignore index 2d380a441d2394..8872e9d5508ff1 100644 --- a/.gitignore +++ b/.gitignore @@ -69,7 +69,7 @@ Lib/test/data/* /_bootstrap_python /Makefile /Makefile.pre -iOSTestbed.* +/iOSTestbed.* iOS/Frameworks/ iOS/Resources/Info.plist iOS/testbed/build diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 69d85238985150..663a11897d98e2 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.2.0 + rev: v0.3.4 hooks: - id: ruff name: Run Ruff on Lib/test/ @@ -14,6 +14,8 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.5.0 hooks: + - id: check-case-conflict + - id: check-merge-conflict - id: check-toml exclude: ^Lib/test/test_tomllib/ - id: check-yaml diff --git a/Android/README.md b/Android/README.md new file mode 100644 index 00000000000000..5ed186e06e3951 --- /dev/null +++ b/Android/README.md @@ -0,0 +1,64 @@ +# Python for Android + +These instructions are only needed if you're planning to compile Python for +Android yourself. Most users should *not* need to do this. If you're looking to +use Python on Android, one of the following tools will provide a much more +approachable user experience: + +* [Briefcase](https://briefcase.readthedocs.io), from the BeeWare project +* [Buildozer](https://buildozer.readthedocs.io), from the Kivy project +* [Chaquopy](https://chaquo.com/chaquopy/) + + +## Prerequisites + +Export the `ANDROID_HOME` environment variable to point at your Android SDK. If +you don't already have the SDK, here's how to install it: + +* Download the "Command line tools" from . +* Create a directory `android-sdk/cmdline-tools`, and unzip the command line + tools package into it. +* Rename `android-sdk/cmdline-tools/cmdline-tools` to + `android-sdk/cmdline-tools/latest`. +* `export ANDROID_HOME=/path/to/android-sdk` + + +## Building + +Building for Android requires doing a cross-build where you have a "build" +Python to help produce an Android build of CPython. This procedure has been +tested on Linux and macOS. + +The easiest way to do a build is to use the `android.py` script. You can either +have it perform the entire build process from start to finish in one step, or +you can do it in discrete steps that mirror running `configure` and `make` for +each of the two builds of Python you end up producing. + +The discrete steps for building via `android.py` are: + +```sh +./android.py configure-build +./android.py make-build +./android.py configure-host HOST +./android.py make-host HOST +``` + +To see the possible values of HOST, run `./android.py configure-host --help`. + +Or to do it all in a single command, run: + +```sh +./android.py build HOST +``` + +In the end you should have a build Python in `cross-build/build`, and an Android +build in `cross-build/HOST`. + +You can use `--` as a separator for any of the `configure`-related commands – +including `build` itself – to pass arguments to the underlying `configure` +call. For example, if you want a pydebug build that also caches the results from +`configure`, you can do: + +```sh +./android.py build HOST -- -C --with-pydebug +``` diff --git a/Android/android-env.sh b/Android/android-env.sh new file mode 100644 index 00000000000000..3ce3e035cfb8fe --- /dev/null +++ b/Android/android-env.sh @@ -0,0 +1,87 @@ +# This script must be sourced with the following variables already set: +: ${ANDROID_HOME:?} # Path to Android SDK +: ${HOST:?} # GNU target triplet + +# You may also override the following: +: ${api_level:=21} # Minimum Android API level the build will run on +: ${PREFIX:-} # Path in which to find required libraries + + +# Print all messages on stderr so they're visible when running within build-wheel. +log() { + echo "$1" >&2 +} + +fail() { + log "$1" + exit 1 +} + +# When moving to a new version of the NDK, carefully review the following: +# +# * https://developer.android.com/ndk/downloads/revision_history +# +# * https://android.googlesource.com/platform/ndk/+/ndk-rXX-release/docs/BuildSystemMaintainers.md +# where XX is the NDK version. Do a diff against the version you're upgrading from, e.g.: +# https://android.googlesource.com/platform/ndk/+/ndk-r25-release..ndk-r26-release/docs/BuildSystemMaintainers.md +ndk_version=26.2.11394342 + +ndk=$ANDROID_HOME/ndk/$ndk_version +if ! [ -e $ndk ]; then + log "Installing NDK: this may take several minutes" + yes | $ANDROID_HOME/cmdline-tools/latest/bin/sdkmanager "ndk;$ndk_version" +fi + +if [ $HOST = "arm-linux-androideabi" ]; then + clang_triplet=armv7a-linux-androideabi +else + clang_triplet=$HOST +fi + +# These variables are based on BuildSystemMaintainers.md above, and +# $ndk/build/cmake/android.toolchain.cmake. +toolchain=$(echo $ndk/toolchains/llvm/prebuilt/*) +export AR="$toolchain/bin/llvm-ar" +export AS="$toolchain/bin/llvm-as" +export CC="$toolchain/bin/${clang_triplet}${api_level}-clang" +export CXX="${CC}++" +export LD="$toolchain/bin/ld" +export NM="$toolchain/bin/llvm-nm" +export RANLIB="$toolchain/bin/llvm-ranlib" +export READELF="$toolchain/bin/llvm-readelf" +export STRIP="$toolchain/bin/llvm-strip" + +# The quotes make sure the wildcard in the `toolchain` assignment has been expanded. +for path in "$AR" "$AS" "$CC" "$CXX" "$LD" "$NM" "$RANLIB" "$READELF" "$STRIP"; do + if ! [ -e "$path" ]; then + fail "$path does not exist" + fi +done + +export CFLAGS="" +export LDFLAGS="-Wl,--build-id=sha1 -Wl,--no-rosegment" + +# Many packages get away with omitting -lm on Linux, but Android is stricter. +LDFLAGS="$LDFLAGS -lm" + +# -mstackrealign is included where necessary in the clang launcher scripts which are +# pointed to by $CC, so we don't need to include it here. +if [ $HOST = "arm-linux-androideabi" ]; then + CFLAGS="$CFLAGS -march=armv7-a -mthumb" +fi + +if [ -n "${PREFIX:-}" ]; then + abs_prefix=$(realpath $PREFIX) + CFLAGS="$CFLAGS -I$abs_prefix/include" + LDFLAGS="$LDFLAGS -L$abs_prefix/lib" + + export PKG_CONFIG="pkg-config --define-prefix" + export PKG_CONFIG_LIBDIR="$abs_prefix/lib/pkgconfig" +fi + +# Use the same variable name as conda-build +if [ $(uname) = "Darwin" ]; then + export CPU_COUNT=$(sysctl -n hw.ncpu) +else + export CPU_COUNT=$(nproc) +fi diff --git a/Android/android.py b/Android/android.py new file mode 100755 index 00000000000000..5c57e53c415d2b --- /dev/null +++ b/Android/android.py @@ -0,0 +1,202 @@ +#!/usr/bin/env python3 + +import argparse +import os +import re +import shutil +import subprocess +import sys +import sysconfig +from os.path import relpath +from pathlib import Path + +SCRIPT_NAME = Path(__file__).name +CHECKOUT = Path(__file__).resolve().parent.parent +CROSS_BUILD_DIR = CHECKOUT / "cross-build" + + +def delete_if_exists(path): + if path.exists(): + print(f"Deleting {path} ...") + shutil.rmtree(path) + + +def subdir(name, *, clean=None): + path = CROSS_BUILD_DIR / name + if clean: + delete_if_exists(path) + if not path.exists(): + if clean is None: + sys.exit( + f"{path} does not exist. Create it by running the appropriate " + f"`configure` subcommand of {SCRIPT_NAME}.") + else: + path.mkdir(parents=True) + return path + + +def run(command, *, host=None, **kwargs): + env = os.environ.copy() + if host: + env_script = CHECKOUT / "Android/android-env.sh" + env_output = subprocess.run( + f"set -eu; " + f"HOST={host}; " + f"PREFIX={subdir(host)}/prefix; " + f". {env_script}; " + f"export", + check=True, shell=True, text=True, stdout=subprocess.PIPE + ).stdout + + for line in env_output.splitlines(): + # We don't require every line to match, as there may be some other + # output from installing the NDK. + if match := re.search( + "^(declare -x |export )?(\\w+)=['\"]?(.*?)['\"]?$", line + ): + key, value = match[2], match[3] + if env.get(key) != value: + print(line) + env[key] = value + + if env == os.environ: + raise ValueError(f"Found no variables in {env_script.name} output:\n" + + env_output) + + print(">", " ".join(map(str, command))) + try: + subprocess.run(command, check=True, env=env, **kwargs) + except subprocess.CalledProcessError as e: + sys.exit(e) + + +def build_python_path(): + """The path to the build Python binary.""" + build_dir = subdir("build") + binary = build_dir / "python" + if not binary.is_file(): + binary = binary.with_suffix(".exe") + if not binary.is_file(): + raise FileNotFoundError("Unable to find `python(.exe)` in " + f"{build_dir}") + + return binary + + +def configure_build_python(context): + os.chdir(subdir("build", clean=context.clean)) + + command = [relpath(CHECKOUT / "configure")] + if context.args: + command.extend(context.args) + run(command) + + +def make_build_python(context): + os.chdir(subdir("build")) + run(["make", "-j", str(os.cpu_count())]) + + +def unpack_deps(host): + deps_url = "https://github.com/beeware/cpython-android-source-deps/releases/download" + for name_ver in ["bzip2-1.0.8-1", "libffi-3.4.4-2", "openssl-3.0.13-1", + "sqlite-3.45.1-0", "xz-5.4.6-0"]: + filename = f"{name_ver}-{host}.tar.gz" + run(["wget", f"{deps_url}/{name_ver}/{filename}"]) + run(["tar", "-xf", filename]) + os.remove(filename) + + +def configure_host_python(context): + host_dir = subdir(context.host, clean=context.clean) + + prefix_dir = host_dir / "prefix" + if not prefix_dir.exists(): + prefix_dir.mkdir() + os.chdir(prefix_dir) + unpack_deps(context.host) + + build_dir = host_dir / "build" + build_dir.mkdir(exist_ok=True) + os.chdir(build_dir) + + command = [ + # Basic cross-compiling configuration + relpath(CHECKOUT / "configure"), + f"--host={context.host}", + f"--build={sysconfig.get_config_var('BUILD_GNU_TYPE')}", + f"--with-build-python={build_python_path()}", + "--without-ensurepip", + + # Android always uses a shared libpython. + "--enable-shared", + "--without-static-libpython", + + # Dependent libraries. The others are found using pkg-config: see + # android-env.sh. + f"--with-openssl={prefix_dir}", + ] + + if context.args: + command.extend(context.args) + run(command, host=context.host) + + +def make_host_python(context): + host_dir = subdir(context.host) + os.chdir(host_dir / "build") + run(["make", "-j", str(os.cpu_count())], host=context.host) + run(["make", "install", f"prefix={host_dir}/prefix"], host=context.host) + + +def build_all(context): + steps = [configure_build_python, make_build_python, configure_host_python, + make_host_python] + for step in steps: + step(context) + + +def clean_all(context): + delete_if_exists(CROSS_BUILD_DIR) + + +def main(): + parser = argparse.ArgumentParser() + subcommands = parser.add_subparsers(dest="subcommand") + build = subcommands.add_parser("build", help="Build everything") + configure_build = subcommands.add_parser("configure-build", + help="Run `configure` for the " + "build Python") + make_build = subcommands.add_parser("make-build", + help="Run `make` for the build Python") + configure_host = subcommands.add_parser("configure-host", + help="Run `configure` for Android") + make_host = subcommands.add_parser("make-host", + help="Run `make` for Android") + clean = subcommands.add_parser("clean", help="Delete files and directories " + "created by this script") + for subcommand in build, configure_build, configure_host: + subcommand.add_argument( + "--clean", action="store_true", default=False, dest="clean", + help="Delete any relevant directories before building") + for subcommand in build, configure_host, make_host: + subcommand.add_argument( + "host", metavar="HOST", + choices=["aarch64-linux-android", "x86_64-linux-android"], + help="Host triplet: choices=[%(choices)s]") + for subcommand in build, configure_build, configure_host: + subcommand.add_argument("args", nargs="*", + help="Extra arguments to pass to `configure`") + + context = parser.parse_args() + dispatch = {"configure-build": configure_build_python, + "make-build": make_build_python, + "configure-host": configure_host_python, + "make-host": make_host_python, + "build": build_all, + "clean": clean_all} + dispatch[context.subcommand](context) + + +if __name__ == "__main__": + main() diff --git a/Doc/Makefile b/Doc/Makefile index 38fd60f2ae01d1..dd068c520ad60c 100644 --- a/Doc/Makefile +++ b/Doc/Makefile @@ -163,6 +163,7 @@ venv: echo "venv already exists."; \ echo "To recreate it, remove it first with \`make clean-venv'."; \ else \ + echo "Creating venv in $(VENVDIR)"; \ $(PYTHON) -m venv $(VENVDIR); \ $(VENVDIR)/bin/python3 -m pip install --upgrade pip; \ $(VENVDIR)/bin/python3 -m pip install -r $(REQUIREMENTS); \ diff --git a/Doc/c-api/bytes.rst b/Doc/c-api/bytes.rst index 4790d3b2da4375..bca78a9c369385 100644 --- a/Doc/c-api/bytes.rst +++ b/Doc/c-api/bytes.rst @@ -191,10 +191,10 @@ called with a non-bytes parameter. .. c:function:: int _PyBytes_Resize(PyObject **bytes, Py_ssize_t newsize) - A way to resize a bytes object even though it is "immutable". Only use this - to build up a brand new bytes object; don't use this if the bytes may already - be known in other parts of the code. It is an error to call this function if - the refcount on the input bytes object is not one. Pass the address of an + Resize a bytes object. *newsize* will be the new length of the bytes object. + You can think of it as creating a new bytes object and destroying the old + one, only more efficiently. + Pass the address of an existing bytes object as an lvalue (it may be written into), and the new size desired. On success, *\*bytes* holds the resized bytes object and ``0`` is returned; the address in *\*bytes* may differ from its input value. If the diff --git a/Doc/c-api/code.rst b/Doc/c-api/code.rst index f6fdd7574323c7..968c472219c643 100644 --- a/Doc/c-api/code.rst +++ b/Doc/c-api/code.rst @@ -34,10 +34,16 @@ bound into a function. Return the number of free variables in a code object. -.. c:function:: int PyCode_GetFirstFree(PyCodeObject *co) +.. c:function:: int PyUnstable_Code_GetFirstFree(PyCodeObject *co) Return the position of the first free variable in a code object. + .. versionchanged:: 3.13 + + Renamed from ``PyCode_GetFirstFree`` as part of :ref:`unstable-c-api`. + The old name is deprecated, but will remain available until the + signature changes again. + .. c:function:: PyCodeObject* PyUnstable_Code_New(int argcount, int kwonlyargcount, int nlocals, int stacksize, int flags, PyObject *code, PyObject *consts, PyObject *names, PyObject *varnames, PyObject *freevars, PyObject *cellvars, PyObject *filename, PyObject *name, PyObject *qualname, int firstlineno, PyObject *linetable, PyObject *exceptiontable) Return a new code object. If you need a dummy code object to create a frame, diff --git a/Doc/c-api/hash.rst b/Doc/c-api/hash.rst index 1cf094cfcdca24..ddf0b3e15dbdbe 100644 --- a/Doc/c-api/hash.rst +++ b/Doc/c-api/hash.rst @@ -82,3 +82,14 @@ See also the :c:member:`PyTypeObject.tp_hash` member and :ref:`numeric-hash`. The function cannot fail: it cannot return ``-1``. .. versionadded:: 3.13 + +.. c:function:: Py_hash_t PyObject_GenericHash(PyObject *obj) + + Generic hashing function that is meant to be put into a type + object's ``tp_hash`` slot. + Its result only depends on the object's identity. + + .. impl-detail:: + In CPython, it is equivalent to :c:func:`Py_HashPointer`. + + .. versionadded:: 3.13 diff --git a/Doc/c-api/object.rst b/Doc/c-api/object.rst index 12476412799a4f..ba454db9117504 100644 --- a/Doc/c-api/object.rst +++ b/Doc/c-api/object.rst @@ -6,6 +6,55 @@ Object Protocol =============== +.. c:function:: PyObject* Py_GetConstant(unsigned int constant_id) + + Get a :term:`strong reference` to a constant. + + Set an exception and return ``NULL`` if *constant_id* is invalid. + + *constant_id* must be one of these constant identifiers: + + .. c:namespace:: NULL + + ======================================== ===== ========================= + Constant Identifier Value Returned object + ======================================== ===== ========================= + .. c:macro:: Py_CONSTANT_NONE ``0`` :py:data:`None` + .. c:macro:: Py_CONSTANT_FALSE ``1`` :py:data:`False` + .. c:macro:: Py_CONSTANT_TRUE ``2`` :py:data:`True` + .. c:macro:: Py_CONSTANT_ELLIPSIS ``3`` :py:data:`Ellipsis` + .. c:macro:: Py_CONSTANT_NOT_IMPLEMENTED ``4`` :py:data:`NotImplemented` + .. c:macro:: Py_CONSTANT_ZERO ``5`` ``0`` + .. c:macro:: Py_CONSTANT_ONE ``6`` ``1`` + .. c:macro:: Py_CONSTANT_EMPTY_STR ``7`` ``''`` + .. c:macro:: Py_CONSTANT_EMPTY_BYTES ``8`` ``b''`` + .. c:macro:: Py_CONSTANT_EMPTY_TUPLE ``9`` ``()`` + ======================================== ===== ========================= + + Numeric values are only given for projects which cannot use the constant + identifiers. + + + .. versionadded:: 3.13 + + .. impl-detail:: + + In CPython, all of these constants are :term:`immortal`. + + +.. c:function:: PyObject* Py_GetConstantBorrowed(unsigned int constant_id) + + Similar to :c:func:`Py_GetConstant`, but return a :term:`borrowed + reference`. + + This function is primarily intended for backwards compatibility: + using :c:func:`Py_GetConstant` is recommended for new code. + + The reference is borrowed from the interpreter, and is valid until the + interpreter finalization. + .. versionadded:: 3.13 + + .. c:var:: PyObject* Py_NotImplemented The ``NotImplemented`` singleton, used to signal that an operation is diff --git a/Doc/c-api/typeobj.rst b/Doc/c-api/typeobj.rst index 8a26f237652d12..e66ab01878cac0 100644 --- a/Doc/c-api/typeobj.rst +++ b/Doc/c-api/typeobj.rst @@ -883,6 +883,10 @@ and :c:data:`PyType_Type` effectively act as defaults.) :c:member:`~PyTypeObject.tp_richcompare` and :c:member:`~PyTypeObject.tp_hash`, when the subtype's :c:member:`~PyTypeObject.tp_richcompare` and :c:member:`~PyTypeObject.tp_hash` are both ``NULL``. + **Default:** + + :c:data:`PyBaseObject_Type` uses :c:func:`PyObject_GenericHash`. + .. c:member:: ternaryfunc PyTypeObject.tp_call diff --git a/Doc/data/stable_abi.dat b/Doc/data/stable_abi.dat index 9d0ad3d036dac3..2763bea5137cc7 100644 --- a/Doc/data/stable_abi.dat +++ b/Doc/data/stable_abi.dat @@ -679,6 +679,7 @@ function,PyType_GenericNew,3.2,, function,PyType_GetFlags,3.2,, function,PyType_GetFullyQualifiedName,3.13,, function,PyType_GetModule,3.10,, +function,PyType_GetModuleByDef,3.13,, function,PyType_GetModuleName,3.13,, function,PyType_GetModuleState,3.10,, function,PyType_GetName,3.11,, @@ -838,6 +839,8 @@ function,Py_GenericAlias,3.9,, var,Py_GenericAliasType,3.9,, function,Py_GetBuildInfo,3.2,, function,Py_GetCompiler,3.2,, +function,Py_GetConstant,3.13,, +function,Py_GetConstantBorrowed,3.13,, function,Py_GetCopyright,3.2,, function,Py_GetExecPrefix,3.2,, function,Py_GetPath,3.2,, diff --git a/Doc/glossary.rst b/Doc/glossary.rst index 72fb09ef6207c9..ee8b26665d6921 100644 --- a/Doc/glossary.rst +++ b/Doc/glossary.rst @@ -727,18 +727,6 @@ Glossary thread removes *key* from *mapping* after the test, but before the lookup. This issue can be solved with locks or by using the EAFP approach. - locale encoding - On Unix, it is the encoding of the LC_CTYPE locale. It can be set with - :func:`locale.setlocale(locale.LC_CTYPE, new_locale) `. - - On Windows, it is the ANSI code page (ex: ``"cp1252"``). - - On Android and VxWorks, Python uses ``"utf-8"`` as the locale encoding. - - ``locale.getencoding()`` can be used to get the locale encoding. - - See also the :term:`filesystem encoding and error handler`. - list A built-in Python :term:`sequence`. Despite its name it is more akin to an array in other languages than to a linked list since access to @@ -758,6 +746,18 @@ Glossary :term:`finder`. See :pep:`302` for details and :class:`importlib.abc.Loader` for an :term:`abstract base class`. + locale encoding + On Unix, it is the encoding of the LC_CTYPE locale. It can be set with + :func:`locale.setlocale(locale.LC_CTYPE, new_locale) `. + + On Windows, it is the ANSI code page (ex: ``"cp1252"``). + + On Android and VxWorks, Python uses ``"utf-8"`` as the locale encoding. + + :func:`locale.getencoding` can be used to get the locale encoding. + + See also the :term:`filesystem encoding and error handler`. + magic method .. index:: pair: magic; method diff --git a/Doc/howto/logging-cookbook.rst b/Doc/howto/logging-cookbook.rst index ad3e34d0b33bd2..d8ebeabcd522b1 100644 --- a/Doc/howto/logging-cookbook.rst +++ b/Doc/howto/logging-cookbook.rst @@ -1846,8 +1846,11 @@ the use of a :class:`Filter` does not provide the desired result. .. _zeromq-handlers: -Subclassing QueueHandler - a ZeroMQ example -------------------------------------------- +Subclassing QueueHandler and QueueListener- a ZeroMQ example +------------------------------------------------------------ + +Subclass ``QueueHandler`` +^^^^^^^^^^^^^^^^^^^^^^^^^ You can use a :class:`QueueHandler` subclass to send messages to other kinds of queues, for example a ZeroMQ 'publish' socket. In the example below,the @@ -1885,8 +1888,8 @@ data needed by the handler to create the socket:: self.queue.close() -Subclassing QueueListener - a ZeroMQ example --------------------------------------------- +Subclass ``QueueListener`` +^^^^^^^^^^^^^^^^^^^^^^^^^^ You can also subclass :class:`QueueListener` to get messages from other kinds of queues, for example a ZeroMQ 'subscribe' socket. Here's an example:: @@ -1903,25 +1906,134 @@ of queues, for example a ZeroMQ 'subscribe' socket. Here's an example:: msg = self.queue.recv_json() return logging.makeLogRecord(msg) +.. _pynng-handlers: -.. seealso:: +Subclassing QueueHandler and QueueListener- a ``pynng`` example +--------------------------------------------------------------- - Module :mod:`logging` - API reference for the logging module. +In a similar way to the above section, we can implement a listener and handler +using `pynng `_, which is a Python binding to +`NNG `_, billed as a spiritual successor to ZeroMQ. +The following snippets illustrate -- you can test them in an environment which has +``pynng`` installed. Juat for variety, we present the listener first. - Module :mod:`logging.config` - Configuration API for the logging module. - Module :mod:`logging.handlers` - Useful handlers included with the logging module. +Subclass ``QueueListener`` +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. code-block:: python + + import json + import logging + import logging.handlers + + import pynng - :ref:`A basic logging tutorial ` + DEFAULT_ADDR = "tcp://localhost:13232" - :ref:`A more advanced logging tutorial ` + interrupted = False + class NNGSocketListener(logging.handlers.QueueListener): + + def __init__(self, uri, /, *handlers, **kwargs): + # Have a timeout for interruptability, and open a + # subscriber socket + socket = pynng.Sub0(listen=uri, recv_timeout=500) + # The b'' subscription matches all topics + topics = kwargs.pop('topics', None) or b'' + socket.subscribe(topics) + # We treat the socket as a queue + super().__init__(socket, *handlers, **kwargs) + + def dequeue(self, block): + data = None + # Keep looping while not interrupted and no data received over the + # socket + while not interrupted: + try: + data = self.queue.recv(block=block) + break + except pynng.Timeout: + pass + except pynng.Closed: # sometimes hit when you hit Ctrl-C + break + if data is None: + return None + # Get the logging event sent from a publisher + event = json.loads(data.decode('utf-8')) + return logging.makeLogRecord(event) + + def enqueue_sentinel(self): + # Not used in this implementation, as the socket isn't really a + # queue + pass + + logging.getLogger('pynng').propagate = False + listener = NNGSocketListener(DEFAULT_ADDR, logging.StreamHandler(), topics=b'') + listener.start() + print('Press Ctrl-C to stop.') + try: + while True: + pass + except KeyboardInterrupt: + interrupted = True + finally: + listener.stop() + + +Subclass ``QueueHandler`` +^^^^^^^^^^^^^^^^^^^^^^^^^ .. currentmodule:: logging +.. code-block:: python + + import json + import logging + import logging.handlers + import time + import random + + import pynng + + DEFAULT_ADDR = "tcp://localhost:13232" + + class NNGSocketHandler(logging.handlers.QueueHandler): + + def __init__(self, uri): + socket = pynng.Pub0(dial=uri, send_timeout=500) + super().__init__(socket) + + def enqueue(self, record): + # Send the record as UTF-8 encoded JSON + d = dict(record.__dict__) + data = json.dumps(d) + self.queue.send(data.encode('utf-8')) + + def close(self): + self.queue.close() + + logging.getLogger('pynng').propagate = False + handler = NNGSocketHandler(DEFAULT_ADDR) + logging.basicConfig(level=logging.DEBUG, + handlers=[logging.StreamHandler(), handler], + format='%(levelname)-8s %(name)10s %(message)s') + levels = (logging.DEBUG, logging.INFO, logging.WARNING, logging.ERROR, + logging.CRITICAL) + logger_names = ('myapp', 'myapp.lib1', 'myapp.lib2') + msgno = 1 + while True: + # Just randomly select some loggers and levels and log away + level = random.choice(levels) + logger = logging.getLogger(random.choice(logger_names)) + logger.log(level, 'Message no. %5d' % msgno) + msgno += 1 + delay = random.random() * 2 + 0.5 + time.sleep(delay) + +You can run the above two snippets in separate command shells. + + An example dictionary-based configuration ----------------------------------------- @@ -3418,7 +3530,7 @@ The worker thread is implemented using Qt's ``QThread`` class rather than the :mod:`threading` module, as there are circumstances where one has to use ``QThread``, which offers better integration with other ``Qt`` components. -The code should work with recent releases of either ``PySide6``, ``PyQt6``, +The code should work with recent releases of any of ``PySide6``, ``PyQt6``, ``PySide2`` or ``PyQt5``. You should be able to adapt the approach to earlier versions of Qt. Please refer to the comments in the code snippet for more detailed information. diff --git a/Doc/includes/wasm-ios-notavail.rst b/Doc/includes/wasm-ios-notavail.rst new file mode 100644 index 00000000000000..c820665f5e403c --- /dev/null +++ b/Doc/includes/wasm-ios-notavail.rst @@ -0,0 +1,8 @@ +.. include for modules that don't work on WASM or iOS + +.. availability:: not WASI, not iOS. + + This module does not work or is not available on WebAssembly platforms, or + on iOS. See :ref:`wasm-availability` for more information on WASM + availability; see :ref:`iOS-availability` for more information on iOS + availability. diff --git a/Doc/includes/wasm-notavail.rst b/Doc/includes/wasm-notavail.rst index e680e1f9b43807..c1b79d2a4a0508 100644 --- a/Doc/includes/wasm-notavail.rst +++ b/Doc/includes/wasm-notavail.rst @@ -1,7 +1,6 @@ .. include for modules that don't work on WASM -.. availability:: not Emscripten, not WASI. +.. availability:: not WASI. - This module does not work or is not available on WebAssembly platforms - ``wasm32-emscripten`` and ``wasm32-wasi``. See + This module does not work or is not available on WebAssembly. See :ref:`wasm-availability` for more information. diff --git a/Doc/library/ctypes.rst b/Doc/library/ctypes.rst index 36976470b5a468..9f7d6456e623a2 100644 --- a/Doc/library/ctypes.rst +++ b/Doc/library/ctypes.rst @@ -1334,8 +1334,9 @@ Here are some examples:: 'libbz2.so.1.0' >>> -On macOS, :func:`~ctypes.util.find_library` tries several predefined naming schemes and paths -to locate the library, and returns a full pathname if successful:: +On macOS and Android, :func:`~ctypes.util.find_library` uses the system's +standard naming schemes and paths to locate the library, and returns a full +pathname if successful:: >>> from ctypes.util import find_library >>> find_library("c") diff --git a/Doc/library/curses.rst b/Doc/library/curses.rst index 9b8a98f05f7cbb..550872ce2ca59e 100644 --- a/Doc/library/curses.rst +++ b/Doc/library/curses.rst @@ -21,6 +21,8 @@ for Windows, DOS, and possibly other systems as well. This extension module is designed to match the API of ncurses, an open-source curses library hosted on Linux and the BSD variants of Unix. +.. include:: ../includes/wasm-ios-notavail.rst + .. note:: Whenever the documentation mentions a *character* it can be specified diff --git a/Doc/library/dataclasses.rst b/Doc/library/dataclasses.rst index c612c138fc6ea8..61b2263339da71 100644 --- a/Doc/library/dataclasses.rst +++ b/Doc/library/dataclasses.rst @@ -1,5 +1,5 @@ -:mod:`dataclasses` --- Data Classes -=================================== +:mod:`!dataclasses` --- Data Classes +==================================== .. module:: dataclasses :synopsis: Generate special methods on user-defined classes. @@ -31,7 +31,7 @@ using :pep:`526` type annotations. For example, this code:: def total_cost(self) -> float: return self.unit_price * self.quantity_on_hand -will add, among other things, a :meth:`~object.__init__` that looks like:: +will add, among other things, a :meth:`!__init__` that looks like:: def __init__(self, name: str, unit_price: float, quantity_on_hand: int = 0): self.name = name @@ -49,26 +49,26 @@ Module contents .. decorator:: dataclass(*, init=True, repr=True, eq=True, order=False, unsafe_hash=False, frozen=False, match_args=True, kw_only=False, slots=False, weakref_slot=False) This function is a :term:`decorator` that is used to add generated - :term:`special method`\s to classes, as described below. + :term:`special methods ` to classes, as described below. - The :func:`dataclass` decorator examines the class to find + The ``@dataclass`` decorator examines the class to find ``field``\s. A ``field`` is defined as a class variable that has a :term:`type annotation `. With two - exceptions described below, nothing in :func:`dataclass` + exceptions described below, nothing in ``@dataclass`` examines the type specified in the variable annotation. The order of the fields in all of the generated methods is the order in which they appear in the class definition. - The :func:`dataclass` decorator will add various "dunder" methods to + The ``@dataclass`` decorator will add various "dunder" methods to the class, described below. If any of the added methods already exist in the class, the behavior depends on the parameter, as documented below. The decorator returns the same class that it is called on; no new class is created. - If :func:`dataclass` is used just as a simple decorator with no parameters, + If ``@dataclass`` is used just as a simple decorator with no parameters, it acts as if it has the default values documented in this - signature. That is, these three uses of :func:`dataclass` are + signature. That is, these three uses of ``@dataclass`` are equivalent:: @dataclass @@ -84,12 +84,12 @@ Module contents class C: ... - The parameters to :func:`dataclass` are: + The parameters to ``@dataclass`` are: - ``init``: If true (the default), a :meth:`~object.__init__` method will be generated. - If the class already defines :meth:`~object.__init__`, this parameter is + If the class already defines :meth:`!__init__`, this parameter is ignored. - ``repr``: If true (the default), a :meth:`~object.__repr__` method will be @@ -99,7 +99,7 @@ Module contents are not included. For example: ``InventoryItem(name='widget', unit_price=3.0, quantity_on_hand=10)``. - If the class already defines :meth:`~object.__repr__`, this parameter is + If the class already defines :meth:`!__repr__`, this parameter is ignored. - ``eq``: If true (the default), an :meth:`~object.__eq__` method will be @@ -107,7 +107,7 @@ Module contents of its fields, in order. Both instances in the comparison must be of the identical type. - If the class already defines :meth:`~object.__eq__`, this parameter is + If the class already defines :meth:`!__eq__`, this parameter is ignored. - ``order``: If true (the default is ``False``), :meth:`~object.__lt__`, @@ -117,43 +117,43 @@ Module contents identical type. If ``order`` is true and ``eq`` is false, a :exc:`ValueError` is raised. - If the class already defines any of :meth:`~object.__lt__`, - :meth:`~object.__le__`, :meth:`~object.__gt__`, or :meth:`~object.__ge__`, then + If the class already defines any of :meth:`!__lt__`, + :meth:`!__le__`, :meth:`!__gt__`, or :meth:`!__ge__`, then :exc:`TypeError` is raised. - ``unsafe_hash``: If ``False`` (the default), a :meth:`~object.__hash__` method is generated according to how ``eq`` and ``frozen`` are set. - :meth:`~object.__hash__` is used by built-in :meth:`hash()`, and when objects are + :meth:`!__hash__` is used by built-in :meth:`hash()`, and when objects are added to hashed collections such as dictionaries and sets. Having a - :meth:`~object.__hash__` implies that instances of the class are immutable. + :meth:`!__hash__` implies that instances of the class are immutable. Mutability is a complicated property that depends on the programmer's - intent, the existence and behavior of :meth:`~object.__eq__`, and the values of - the ``eq`` and ``frozen`` flags in the :func:`dataclass` decorator. + intent, the existence and behavior of :meth:`!__eq__`, and the values of + the ``eq`` and ``frozen`` flags in the ``@dataclass`` decorator. - By default, :func:`dataclass` will not implicitly add a :meth:`~object.__hash__` + By default, ``@dataclass`` will not implicitly add a :meth:`~object.__hash__` method unless it is safe to do so. Neither will it add or change an - existing explicitly defined :meth:`~object.__hash__` method. Setting the class + existing explicitly defined :meth:`!__hash__` method. Setting the class attribute ``__hash__ = None`` has a specific meaning to Python, as - described in the :meth:`~object.__hash__` documentation. + described in the :meth:`!__hash__` documentation. - If :meth:`~object.__hash__` is not explicitly defined, or if it is set to ``None``, - then :func:`dataclass` *may* add an implicit :meth:`~object.__hash__` method. - Although not recommended, you can force :func:`dataclass` to create a - :meth:`~object.__hash__` method with ``unsafe_hash=True``. This might be the case + If :meth:`!__hash__` is not explicitly defined, or if it is set to ``None``, + then ``@dataclass`` *may* add an implicit :meth:`!__hash__` method. + Although not recommended, you can force ``@dataclass`` to create a + :meth:`!__hash__` method with ``unsafe_hash=True``. This might be the case if your class is logically immutable but can still be mutated. This is a specialized use case and should be considered carefully. - Here are the rules governing implicit creation of a :meth:`~object.__hash__` - method. Note that you cannot both have an explicit :meth:`~object.__hash__` + Here are the rules governing implicit creation of a :meth:`!__hash__` + method. Note that you cannot both have an explicit :meth:`!__hash__` method in your dataclass and set ``unsafe_hash=True``; this will result in a :exc:`TypeError`. - If ``eq`` and ``frozen`` are both true, by default :func:`dataclass` will - generate a :meth:`~object.__hash__` method for you. If ``eq`` is true and - ``frozen`` is false, :meth:`~object.__hash__` will be set to ``None``, marking it + If ``eq`` and ``frozen`` are both true, by default ``@dataclass`` will + generate a :meth:`!__hash__` method for you. If ``eq`` is true and + ``frozen`` is false, :meth:`!__hash__` will be set to ``None``, marking it unhashable (which it is, since it is mutable). If ``eq`` is false, - :meth:`~object.__hash__` will be left untouched meaning the :meth:`~object.__hash__` + :meth:`!__hash__` will be left untouched meaning the :meth:`!__hash__` method of the superclass will be used (if the superclass is :class:`object`, this means it will fall back to id-based hashing). @@ -165,7 +165,7 @@ Module contents - ``match_args``: If true (the default is ``True``), the ``__match_args__`` tuple will be created from the list of parameters to the generated :meth:`~object.__init__` method (even if - :meth:`~object.__init__` is not generated, see above). If false, or if + :meth:`!__init__` is not generated, see above). If false, or if ``__match_args__`` is already defined in the class, then ``__match_args__`` will not be generated. @@ -175,7 +175,7 @@ Module contents fields will be marked as keyword-only. If a field is marked as keyword-only, then the only effect is that the :meth:`~object.__init__` parameter generated from a keyword-only field must be specified - with a keyword when :meth:`~object.__init__` is called. There is no + with a keyword when :meth:`!__init__` is called. There is no effect on any other aspect of dataclasses. See the :term:`parameter` glossary entry for details. Also see the :const:`KW_ONLY` section. @@ -184,7 +184,7 @@ Module contents - ``slots``: If true (the default is ``False``), :attr:`~object.__slots__` attribute will be generated and new class will be returned instead of the original one. - If :attr:`~object.__slots__` is already defined in the class, then :exc:`TypeError` + If :attr:`!__slots__` is already defined in the class, then :exc:`TypeError` is raised. .. versionadded:: 3.10 @@ -229,7 +229,7 @@ Module contents required. There are, however, some dataclass features that require additional per-field information. To satisfy this need for additional information, you can replace the default field value - with a call to the provided :func:`field` function. For example:: + with a call to the provided :func:`!field` function. For example:: @dataclass class C: @@ -243,10 +243,10 @@ Module contents used because ``None`` is a valid value for some parameters with a distinct meaning. No code should directly use the :const:`MISSING` value. - The parameters to :func:`field` are: + The parameters to :func:`!field` are: - ``default``: If provided, this will be the default value for this - field. This is needed because the :meth:`field` call itself + field. This is needed because the :func:`!field` call itself replaces the normal position of the default value. - ``default_factory``: If provided, it must be a zero-argument @@ -293,10 +293,10 @@ Module contents .. versionadded:: 3.10 If the default value of a field is specified by a call to - :func:`field()`, then the class attribute for this field will be + :func:`!field`, then the class attribute for this field will be replaced by the specified ``default`` value. If no ``default`` is provided, then the class attribute will be deleted. The intent is - that after the :func:`dataclass` decorator runs, the class + that after the :func:`@dataclass ` decorator runs, the class attributes will all contain the default values for the fields, just as if the default value itself were specified. For example, after:: @@ -314,10 +314,10 @@ Module contents .. class:: Field - :class:`Field` objects describe each defined field. These objects + :class:`!Field` objects describe each defined field. These objects are created internally, and are returned by the :func:`fields` module-level method (see below). Users should never instantiate a - :class:`Field` object directly. Its documented attributes are: + :class:`!Field` object directly. Its documented attributes are: - ``name``: The name of the field. - ``type``: The type of the field. @@ -343,7 +343,7 @@ Module contents lists, and tuples are recursed into. Other objects are copied with :func:`copy.deepcopy`. - Example of using :func:`asdict` on nested dataclasses:: + Example of using :func:`!asdict` on nested dataclasses:: @dataclass class Point: @@ -364,7 +364,7 @@ Module contents dict((field.name, getattr(obj, field.name)) for field in fields(obj)) - :func:`asdict` raises :exc:`TypeError` if ``obj`` is not a dataclass + :func:`!asdict` raises :exc:`TypeError` if ``obj`` is not a dataclass instance. .. function:: astuple(obj, *, tuple_factory=tuple) @@ -384,7 +384,7 @@ Module contents tuple(getattr(obj, field.name) for field in dataclasses.fields(obj)) - :func:`astuple` raises :exc:`TypeError` if ``obj`` is not a dataclass + :func:`!astuple` raises :exc:`TypeError` if ``obj`` is not a dataclass instance. .. function:: make_dataclass(cls_name, fields, *, bases=(), namespace=None, init=True, repr=True, eq=True, order=False, unsafe_hash=False, frozen=False, match_args=True, kw_only=False, slots=False, weakref_slot=False, module=None) @@ -397,7 +397,7 @@ Module contents ``typing.Any`` is used for ``type``. The values of ``init``, ``repr``, ``eq``, ``order``, ``unsafe_hash``, ``frozen``, ``match_args``, ``kw_only``, ``slots``, and ``weakref_slot`` have - the same meaning as they do in :func:`dataclass`. + the same meaning as they do in :func:`@dataclass `. If ``module`` is defined, the ``__module__`` attribute of the dataclass is set to that value. @@ -405,7 +405,7 @@ Module contents This function is not strictly required, because any Python mechanism for creating a new class with ``__annotations__`` can - then apply the :func:`dataclass` function to convert that class to + then apply the ``@dataclass`` function to convert that class to a dataclass. This function is provided as a convenience. For example:: @@ -438,15 +438,15 @@ Module contents :meth:`__post_init__`, if present, is also called. Init-only variables without default values, if any exist, must be - specified on the call to :func:`replace` so that they can be passed to - :meth:`~object.__init__` and :meth:`__post_init__`. + specified on the call to :func:`!replace` so that they can be passed to + :meth:`!__init__` and :meth:`__post_init__`. It is an error for ``changes`` to contain any fields that are defined as having ``init=False``. A :exc:`ValueError` will be raised in this case. Be forewarned about how ``init=False`` fields work during a call to - :func:`replace`. They are not copied from the source object, but + :func:`!replace`. They are not copied from the source object, but rather are initialized in :meth:`__post_init__`, if they're initialized at all. It is expected that ``init=False`` fields will be rarely and judiciously used. If they are used, it might be wise @@ -475,11 +475,11 @@ Module contents .. data:: KW_ONLY A sentinel value used as a type annotation. Any fields after a - pseudo-field with the type of :const:`KW_ONLY` are marked as + pseudo-field with the type of :const:`!KW_ONLY` are marked as keyword-only fields. Note that a pseudo-field of type - :const:`KW_ONLY` is otherwise completely ignored. This includes the + :const:`!KW_ONLY` is otherwise completely ignored. This includes the name of such a field. By convention, a name of ``_`` is used for a - :const:`KW_ONLY` field. Keyword-only fields signify + :const:`!KW_ONLY` field. Keyword-only fields signify :meth:`~object.__init__` parameters that must be specified as keywords when the class is instantiated. @@ -495,7 +495,7 @@ Module contents p = Point(0, y=1.5, z=2.0) In a single dataclass, it is an error to specify more than one - field whose type is :const:`KW_ONLY`. + field whose type is :const:`!KW_ONLY`. .. versionadded:: 3.10 @@ -515,9 +515,9 @@ Post-init processing When defined on the class, it will be called by the generated :meth:`~object.__init__`, normally as ``self.__post_init__()``. However, if any ``InitVar`` fields are defined, they will also be - passed to :meth:`__post_init__` in the order they were defined in the - class. If no :meth:`~object.__init__` method is generated, then - :meth:`__post_init__` will not automatically be called. + passed to :meth:`!__post_init__` in the order they were defined in the + class. If no :meth:`!__init__` method is generated, then + :meth:`!__post_init__` will not automatically be called. Among other uses, this allows for initializing field values that depend on one or more other fields. For example:: @@ -531,8 +531,8 @@ Post-init processing def __post_init__(self): self.c = self.a + self.b -The :meth:`~object.__init__` method generated by :func:`dataclass` does not call base -class :meth:`~object.__init__` methods. If the base class has an :meth:`~object.__init__` method +The :meth:`~object.__init__` method generated by :func:`@dataclass ` does not call base +class :meth:`!__init__` methods. If the base class has an :meth:`!__init__` method that has to be called, it is common to call this method in a :meth:`__post_init__` method:: @@ -548,18 +548,18 @@ that has to be called, it is common to call this method in a def __post_init__(self): super().__init__(self.side, self.side) -Note, however, that in general the dataclass-generated :meth:`~object.__init__` methods +Note, however, that in general the dataclass-generated :meth:`!__init__` methods don't need to be called, since the derived dataclass will take care of initializing all fields of any base class that is a dataclass itself. See the section below on init-only variables for ways to pass -parameters to :meth:`__post_init__`. Also see the warning about how +parameters to :meth:`!__post_init__`. Also see the warning about how :func:`replace` handles ``init=False`` fields. Class variables --------------- -One of the few places where :func:`dataclass` actually inspects the type +One of the few places where :func:`@dataclass ` actually inspects the type of a field is to determine if a field is a class variable as defined in :pep:`526`. It does this by checking if the type of the field is ``typing.ClassVar``. If a field is a ``ClassVar``, it is excluded @@ -570,7 +570,7 @@ module-level :func:`fields` function. Init-only variables ------------------- -Another place where :func:`dataclass` inspects a type annotation is to +Another place where :func:`@dataclass ` inspects a type annotation is to determine if a field is an init-only variable. It does this by seeing if the type of a field is of type ``dataclasses.InitVar``. If a field is an ``InitVar``, it is considered a pseudo-field called an init-only @@ -602,19 +602,19 @@ Frozen instances ---------------- It is not possible to create truly immutable Python objects. However, -by passing ``frozen=True`` to the :meth:`dataclass` decorator you can +by passing ``frozen=True`` to the :func:`@dataclass ` decorator you can emulate immutability. In that case, dataclasses will add :meth:`~object.__setattr__` and :meth:`~object.__delattr__` methods to the class. These methods will raise a :exc:`FrozenInstanceError` when invoked. There is a tiny performance penalty when using ``frozen=True``: :meth:`~object.__init__` cannot use simple assignment to initialize fields, and -must use :meth:`!object.__setattr__`. +must use :meth:`!__setattr__`. Inheritance ----------- -When the dataclass is being created by the :meth:`dataclass` decorator, +When the dataclass is being created by the :func:`@dataclass ` decorator, it looks through all of the class's base classes in reverse MRO (that is, starting at :class:`object`) and, for each dataclass that it finds, adds the fields from that base class to an ordered mapping of fields. @@ -641,8 +641,8 @@ The generated :meth:`~object.__init__` method for ``C`` will look like:: def __init__(self, x: int = 15, y: int = 0, z: int = 10): -Re-ordering of keyword-only parameters in :meth:`~object.__init__` ------------------------------------------------------------------- +Re-ordering of keyword-only parameters in :meth:`!__init__` +----------------------------------------------------------- After the parameters needed for :meth:`~object.__init__` are computed, any keyword-only parameters are moved to come after all regular @@ -665,7 +665,7 @@ fields, and ``Base.x`` and ``D.z`` are regular fields:: z: int = 10 t: int = field(kw_only=True, default=0) -The generated :meth:`~object.__init__` method for ``D`` will look like:: +The generated :meth:`!__init__` method for ``D`` will look like:: def __init__(self, x: Any = 15.0, z: int = 10, *, y: int = 0, w: int = 1, t: int = 0): @@ -674,7 +674,7 @@ the list of fields: parameters derived from regular fields are followed by parameters derived from keyword-only fields. The relative ordering of keyword-only parameters is maintained in the -re-ordered :meth:`~object.__init__` parameter list. +re-ordered :meth:`!__init__` parameter list. Default factory functions @@ -689,7 +689,7 @@ example, to create a new instance of a list, use:: If a field is excluded from :meth:`~object.__init__` (using ``init=False``) and the field also specifies ``default_factory``, then the default factory function will always be called from the generated -:meth:`~object.__init__` function. This happens because there is no other +:meth:`!__init__` function. This happens because there is no other way to give the field an initial value. Mutable default values @@ -738,7 +738,7 @@ for ``x`` when creating a class instance will share the same copy of ``x``. Because dataclasses just use normal Python class creation they also share this behavior. There is no general way for Data Classes to detect this condition. Instead, the -:func:`dataclass` decorator will raise a :exc:`ValueError` if it +:func:`@dataclass ` decorator will raise a :exc:`ValueError` if it detects an unhashable default parameter. The assumption is that if a value is unhashable, it is mutable. This is a partial solution, but it does protect against many common errors. @@ -764,15 +764,17 @@ Descriptor-typed fields Fields that are assigned :ref:`descriptor objects ` as their default value have the following special behaviors: -* The value for the field passed to the dataclass's ``__init__`` method is - passed to the descriptor's ``__set__`` method rather than overwriting the +* The value for the field passed to the dataclass's :meth:`~object.__init__` method is + passed to the descriptor's :meth:`~object.__set__` method rather than overwriting the descriptor object. + * Similarly, when getting or setting the field, the descriptor's - ``__get__`` or ``__set__`` method is called rather than returning or + :meth:`~object.__get__` or :meth:`!__set__` method is called rather than returning or overwriting the descriptor object. -* To determine whether a field contains a default value, ``dataclasses`` - will call the descriptor's ``__get__`` method using its class access - form (i.e. ``descriptor.__get__(obj=None, type=cls)``. If the + +* To determine whether a field contains a default value, :func:`@dataclass ` + will call the descriptor's :meth:`!__get__` method using its class access + form: ``descriptor.__get__(obj=None, type=cls)``. If the descriptor returns a value in this case, it will be used as the field's default. On the other hand, if the descriptor raises :exc:`AttributeError` in this situation, no default value will be diff --git a/Doc/library/dbm.rst b/Doc/library/dbm.rst index 227b55c4315419..54627363ba76ae 100644 --- a/Doc/library/dbm.rst +++ b/Doc/library/dbm.rst @@ -19,6 +19,7 @@ slow-but-simple implementation in module :mod:`dbm.dumb` will be used. There is a `third party interface `_ to the Oracle Berkeley DB. +.. include:: ../includes/wasm-ios-notavail.rst .. exception:: error @@ -455,4 +456,3 @@ The :mod:`!dbm.dumb` module defines the following: .. method:: dumbdbm.close() Close the database. - diff --git a/Doc/library/doctest.rst b/Doc/library/doctest.rst index 835a3a76806148..135758187894ec 100644 --- a/Doc/library/doctest.rst +++ b/Doc/library/doctest.rst @@ -123,10 +123,10 @@ And so on, eventually ending with: OverflowError: n too large ok 2 items passed all tests: - 1 tests in __main__ - 8 tests in __main__.factorial - 9 tests in 2 items. - 9 passed and 0 failed. + 1 test in __main__ + 6 tests in __main__.factorial + 7 tests in 2 items. + 7 passed. Test passed. $ @@ -1933,7 +1933,7 @@ such a test runner:: optionflags=flags) else: fail, total = doctest.testmod(optionflags=flags) - print("{} failures out of {} tests".format(fail, total)) + print(f"{fail} failures out of {total} tests") .. rubric:: Footnotes diff --git a/Doc/library/ensurepip.rst b/Doc/library/ensurepip.rst index de3b93f5e61073..168e45cfd6fc90 100644 --- a/Doc/library/ensurepip.rst +++ b/Doc/library/ensurepip.rst @@ -38,7 +38,7 @@ when creating a virtual environment) or after explicitly uninstalling :pep:`453`: Explicit bootstrapping of pip in Python installations The original rationale and specification for this module. -.. include:: ../includes/wasm-notavail.rst +.. include:: ../includes/wasm-ios-notavail.rst Command line interface ---------------------- diff --git a/Doc/library/fcntl.rst b/Doc/library/fcntl.rst index b93d6ac7aab956..59215f34e01cb7 100644 --- a/Doc/library/fcntl.rst +++ b/Doc/library/fcntl.rst @@ -18,7 +18,7 @@ interface to the :c:func:`fcntl` and :c:func:`ioctl` Unix routines. See the :manpage:`fcntl(2)` and :manpage:`ioctl(2)` Unix manual pages for full details. -.. availability:: Unix, not Emscripten, not WASI. +.. availability:: Unix, not WASI. All functions in this module take a file descriptor *fd* as their first argument. This can be an integer file descriptor, such as returned by diff --git a/Doc/library/grp.rst b/Doc/library/grp.rst index 274a353103b488..9cf25b7ae137a3 100644 --- a/Doc/library/grp.rst +++ b/Doc/library/grp.rst @@ -10,7 +10,7 @@ This module provides access to the Unix group database. It is available on all Unix versions. -.. availability:: Unix, not Emscripten, not WASI. +.. availability:: Unix, not WASI, not iOS. Group database entries are reported as a tuple-like object, whose attributes correspond to the members of the ``group`` structure (Attribute field below, see diff --git a/Doc/library/gzip.rst b/Doc/library/gzip.rst index 79be215a766045..044be8c1c1bf41 100644 --- a/Doc/library/gzip.rst +++ b/Doc/library/gzip.rst @@ -100,10 +100,12 @@ The module defines the following items: compression, and ``9`` is slowest and produces the most compression. ``0`` is no compression. The default is ``9``. - The *mtime* argument is an optional numeric timestamp to be written to - the last modification time field in the stream when compressing. It - should only be provided in compression mode. If omitted or ``None``, the - current time is used. See the :attr:`mtime` attribute for more details. + The optional *mtime* argument is the timestamp requested by gzip. The time + is in Unix format, i.e., seconds since 00:00:00 UTC, January 1, 1970. + If *mtime* is omitted or None, the current time is used. Use *mtime* = 0 + to generate a compressed stream that does not depend on creation time. + + See below for the :attr:`mtime` attribute that is set when decompressing. Calling a :class:`GzipFile` object's :meth:`!close` method does not close *fileobj*, since you might wish to append more material after the compressed @@ -133,15 +135,10 @@ The module defines the following items: .. attribute:: mtime - When decompressing, the value of the last modification time field in - the most recently read header may be read from this attribute, as an - integer. The initial value before reading any headers is ``None``. - - All :program:`gzip` compressed streams are required to contain this - timestamp field. Some programs, such as :program:`gunzip`\ , make use - of the timestamp. The format is the same as the return value of - :func:`time.time` and the :attr:`~os.stat_result.st_mtime` attribute of - the object returned by :func:`os.stat`. + When decompressing, this attribute is set to the last timestamp in the most + recently read header. It is an integer, holding the number of seconds + since the Unix epoch (00:00:00 UTC, January 1, 1970). + The initial value before reading any headers is ``None``. .. attribute:: name diff --git a/Doc/library/imaplib.rst b/Doc/library/imaplib.rst index d5c868def3b64f..ccfd0cd3dde109 100644 --- a/Doc/library/imaplib.rst +++ b/Doc/library/imaplib.rst @@ -622,7 +622,7 @@ retrieves and prints all messages:: import getpass, imaplib - M = imaplib.IMAP4() + M = imaplib.IMAP4(host='example.org') M.login(getpass.getuser(), getpass.getpass()) M.select() typ, data = M.search(None, 'ALL') diff --git a/Doc/library/importlib.rst b/Doc/library/importlib.rst index d92bb2f8e5cf83..b58ef359378e4f 100644 --- a/Doc/library/importlib.rst +++ b/Doc/library/importlib.rst @@ -1241,6 +1241,69 @@ find and load modules. and how the module's :attr:`__file__` is populated. +.. class:: AppleFrameworkLoader(name, path) + + A specialization of :class:`importlib.machinery.ExtensionFileLoader` that + is able to load extension modules in Framework format. + + For compatibility with the iOS App Store, *all* binary modules in an iOS app + must be dynamic libraries, contained in a framework with appropriate + metadata, stored in the ``Frameworks`` folder of the packaged app. There can + be only a single binary per framework, and there can be no executable binary + material outside the Frameworks folder. + + To accomodate this requirement, when running on iOS, extension module + binaries are *not* packaged as ``.so`` files on ``sys.path``, but as + individual standalone frameworks. To discover those frameworks, this loader + is be registered against the ``.fwork`` file extension, with a ``.fwork`` + file acting as a placeholder in the original location of the binary on + ``sys.path``. The ``.fwork`` file contains the path of the actual binary in + the ``Frameworks`` folder, relative to the app bundle. To allow for + resolving a framework-packaged binary back to the original location, the + framework is expected to contain a ``.origin`` file that contains the + location of the ``.fwork`` file, relative to the app bundle. + + For example, consider the case of an import ``from foo.bar import _whiz``, + where ``_whiz`` is implemented with the binary module + ``sources/foo/bar/_whiz.abi3.so``, with ``sources`` being the location + registered on ``sys.path``, relative to the application bundle. This module + *must* be distributed as + ``Frameworks/foo.bar._whiz.framework/foo.bar._whiz`` (creating the framework + name from the full import path of the module), with an ``Info.plist`` file + in the ``.framework`` directory identifying the binary as a framework. The + ``foo.bar._whiz`` module would be represented in the original location with + a ``sources/foo/bar/_whiz.abi3.fwork`` marker file, containing the path + ``Frameworks/foo.bar._whiz/foo.bar._whiz``. The framework would also contain + ``Frameworks/foo.bar._whiz.framework/foo.bar._whiz.origin``, containing the + path to the ``.fwork`` file. + + When a module is loaded with this loader, the ``__file__`` for the module + will report as the location of the ``.fwork`` file. This allows code to use + the ``__file__`` of a module as an anchor for file system traveral. + However, the spec origin will reference the location of the *actual* binary + in the ``.framework`` folder. + + The Xcode project building the app is responsible for converting any ``.so`` + files from wherever they exist in the ``PYTHONPATH`` into frameworks in the + ``Frameworks`` folder (including stripping extensions from the module file, + the addition of framework metadata, and signing the resulting framework), + and creating the ``.fwork`` and ``.origin`` files. This will usually be done + with a build step in the Xcode project; see the iOS documentation for + details on how to construct this build step. + + .. versionadded:: 3.13 + + .. availability:: iOS. + + .. attribute:: name + + Name of the module the loader supports. + + .. attribute:: path + + Path to the ``.fwork`` file for the extension module. + + :mod:`importlib.util` -- Utility code for importers --------------------------------------------------- diff --git a/Doc/library/intro.rst b/Doc/library/intro.rst index 5a4c9b8b16ab3b..ffc8939d21157d 100644 --- a/Doc/library/intro.rst +++ b/Doc/library/intro.rst @@ -58,7 +58,7 @@ Notes on availability operating system. * If not separately noted, all functions that claim "Availability: Unix" are - supported on macOS, which builds on a Unix core. + supported on macOS and iOS, both of which build on a Unix core. * If an availability note contains both a minimum Kernel version and a minimum libc version, then both conditions must hold. For example a feature with note @@ -119,3 +119,44 @@ DOM APIs as well as limited networking capabilities with JavaScript's .. _wasmtime: https://wasmtime.dev/ .. _Pyodide: https://pyodide.org/ .. _PyScript: https://pyscript.net/ + +.. _iOS-availability: + +iOS +--- + +iOS is, in most respects, a POSIX operating system. File I/O, socket handling, +and threading all behave as they would on any POSIX operating system. However, +there are several major differences between iOS and other POSIX systems. + +* iOS can only use Python in "embedded" mode. There is no Python REPL, and no + ability to execute binaries that are part of the normal Python developer + experience, such as :program:`pip`. To add Python code to your iOS app, you must use + the :ref:`Python embedding API ` to add a Python interpreter to an + iOS app created with Xcode. See the :ref:`iOS usage guide ` for + more details. + +* An iOS app cannot use any form of subprocessing, background processing, or + inter-process communication. If an iOS app attempts to create a subprocess, + the process creating the subprocess will either lock up, or crash. An iOS app + has no visibility of other applications that are running, nor any ability to + communicate with other running applications, outside of the iOS-specific APIs + that exist for this purpose. + +* iOS apps have limited access to modify system resources (such as the system + clock). These resources will often be *readable*, but attempts to modify + those resources will usually fail. + +* iOS apps have a limited concept of console input and output. ``stdout`` and + ``stderr`` *exist*, and content written to ``stdout`` and ``stderr`` will be + visible in logs when running in Xcode, but this content *won't* be recorded + in the system log. If a user who has installed your app provides their app + logs as a diagnostic aid, they will not include any detail written to + ``stdout`` or ``stderr``. + + iOS apps have no concept of ``stdin`` at all. While iOS apps can have a + keyboard, this is a software feature, not something that is attached to + ``stdin``. + + As a result, Python library that involve console manipulation (such as + :mod:`curses` and :mod:`readline`) are not available on iOS. diff --git a/Doc/library/ipaddress.rst b/Doc/library/ipaddress.rst index 73f4960082617b..8f090b5eec5980 100644 --- a/Doc/library/ipaddress.rst +++ b/Doc/library/ipaddress.rst @@ -192,6 +192,18 @@ write code that handles both IP versions correctly. Address objects are ``is_private`` has value opposite to :attr:`is_global`, except for the shared address space (``100.64.0.0/10`` range) where they are both ``False``. + .. versionchanged:: 3.13 + + Fixed some false positives and false negatives. + + * ``192.0.0.0/24`` is considered private with the exception of ``192.0.0.9/32`` and + ``192.0.0.10/32`` (previously: only the ``192.0.0.0/29`` sub-range was considered private). + * ``64:ff9b:1::/48`` is considered private. + * ``2002::/16`` is considered private. + * There are exceptions within ``2001::/23`` (otherwise considered private): ``2001:1::1/128``, + ``2001:1::2/128``, ``2001:3::/32``, ``2001:4:112::/48``, ``2001:20::/28``, ``2001:30::/28``. + The exceptions are not considered private. + .. attribute:: is_global ``True`` if the address is defined as globally reachable by @@ -209,6 +221,10 @@ write code that handles both IP versions correctly. Address objects are .. versionadded:: 3.4 + .. versionchanged:: 3.13 + + Fixed some false positives and false negatives, see :attr:`is_private` for details. + .. attribute:: is_unspecified ``True`` if the address is unspecified. See :RFC:`5735` (for IPv4) diff --git a/Doc/library/multiprocessing.rst b/Doc/library/multiprocessing.rst index 0b87de4c61e6aa..afc148c78e97bd 100644 --- a/Doc/library/multiprocessing.rst +++ b/Doc/library/multiprocessing.rst @@ -8,7 +8,7 @@ -------------- -.. include:: ../includes/wasm-notavail.rst +.. include:: ../includes/wasm-ios-notavail.rst Introduction ------------ diff --git a/Doc/library/os.path.rst b/Doc/library/os.path.rst index 3ee2b7db1e511b..dcc877da0b3122 100644 --- a/Doc/library/os.path.rst +++ b/Doc/library/os.path.rst @@ -4,7 +4,7 @@ .. module:: os.path :synopsis: Operations on pathnames. -**Source code:** :source:`Lib/posixpath.py` (for POSIX) and +**Source code:** :source:`Lib/genericpath.py`, :source:`Lib/posixpath.py` (for POSIX) and :source:`Lib/ntpath.py` (for Windows). .. index:: single: path; operations @@ -85,8 +85,6 @@ the :mod:`glob` module.) if *paths* is empty. Unlike :func:`commonprefix`, this returns a valid path. - .. availability:: Unix, Windows. - .. versionadded:: 3.5 .. versionchanged:: 3.6 @@ -324,10 +322,11 @@ the :mod:`glob` module.) Dev Drives. See `the Windows documentation `_ for information on enabling and creating Dev Drives. - .. availability:: Windows. - .. versionadded:: 3.12 + .. versionchanged:: 3.13 + The function is now available on all platforms, and will always return ``False`` on those that have no support for Dev Drives + .. function:: isreserved(path) @@ -442,8 +441,6 @@ the :mod:`glob` module.) *start* defaults to :data:`os.curdir`. - .. availability:: Unix, Windows. - .. versionchanged:: 3.6 Accepts a :term:`path-like object`. @@ -454,8 +451,6 @@ the :mod:`glob` module.) This is determined by the device number and i-node number and raises an exception if an :func:`os.stat` call on either pathname fails. - .. availability:: Unix, Windows. - .. versionchanged:: 3.2 Added Windows support. @@ -470,8 +465,6 @@ the :mod:`glob` module.) Return ``True`` if the file descriptors *fp1* and *fp2* refer to the same file. - .. availability:: Unix, Windows. - .. versionchanged:: 3.2 Added Windows support. @@ -486,8 +479,6 @@ the :mod:`glob` module.) :func:`os.lstat`, or :func:`os.stat`. This function implements the underlying comparison used by :func:`samefile` and :func:`sameopenfile`. - .. availability:: Unix, Windows. - .. versionchanged:: 3.4 Added Windows support. diff --git a/Doc/library/os.rst b/Doc/library/os.rst index 09d8228f986e47..e2bd481fa30b0d 100644 --- a/Doc/library/os.rst +++ b/Doc/library/os.rst @@ -34,12 +34,12 @@ Notes on the availability of these functions: * On VxWorks, os.popen, os.fork, os.execv and os.spawn*p* are not supported. -* On WebAssembly platforms ``wasm32-emscripten`` and ``wasm32-wasi``, large - parts of the :mod:`os` module are not available or behave differently. API - related to processes (e.g. :func:`~os.fork`, :func:`~os.execve`), signals - (e.g. :func:`~os.kill`, :func:`~os.wait`), and resources - (e.g. :func:`~os.nice`) are not available. Others like :func:`~os.getuid` - and :func:`~os.getpid` are emulated or stubs. +* On WebAssembly platforms, and on iOS, large parts of the :mod:`os` module are + not available or behave differently. API related to processes (e.g. + :func:`~os.fork`, :func:`~os.execve`) and resources (e.g. :func:`~os.nice`) + are not available. Others like :func:`~os.getuid` and :func:`~os.getpid` are + emulated or stubs. WebAssembly platforms also lack support for signals (e.g. + :func:`~os.kill`, :func:`~os.wait`). .. note:: @@ -178,7 +178,7 @@ process and user. Return the filename corresponding to the controlling terminal of the process. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. data:: environ @@ -355,7 +355,7 @@ process and user. Return the effective group id of the current process. This corresponds to the "set id" bit on the file being executed in the current process. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. function:: geteuid() @@ -364,7 +364,7 @@ process and user. Return the current process's effective user id. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. function:: getgid() @@ -375,8 +375,8 @@ process and user. .. availability:: Unix. - The function is a stub on Emscripten and WASI, see - :ref:`wasm-availability` for more information. + The function is a stub on WASI, see :ref:`wasm-availability` for more + information. .. function:: getgrouplist(user, group, /) @@ -386,7 +386,7 @@ process and user. field from the password record for *user*, because that group ID will otherwise be potentially omitted. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. versionadded:: 3.3 @@ -395,7 +395,7 @@ process and user. Return list of supplemental group ids associated with the current process. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. note:: @@ -423,7 +423,7 @@ process and user. falls back to ``pwd.getpwuid(os.getuid())[0]`` to get the login name of the current real user id. - .. availability:: Unix, Windows, not Emscripten, not WASI. + .. availability:: Unix, Windows, not WASI. .. function:: getpgid(pid) @@ -431,7 +431,7 @@ process and user. Return the process group id of the process with process id *pid*. If *pid* is 0, the process group id of the current process is returned. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. function:: getpgrp() @@ -439,7 +439,7 @@ process and user. Return the id of the current process group. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. function:: getpid() @@ -448,8 +448,8 @@ process and user. Return the current process id. - The function is a stub on Emscripten and WASI, see - :ref:`wasm-availability` for more information. + The function is a stub on WASI, see :ref:`wasm-availability` for more + information. .. function:: getppid() @@ -459,7 +459,7 @@ process and user. the id returned is the one of the init process (1), on Windows it is still the same id, which may be already reused by another process. - .. availability:: Unix, Windows, not Emscripten, not WASI. + .. availability:: Unix, Windows, not WASI. .. versionchanged:: 3.2 Added support for Windows. @@ -477,7 +477,7 @@ process and user. (respectively) the calling process, the process group of the calling process, or the real user ID of the calling process. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. versionadded:: 3.3 @@ -488,7 +488,7 @@ process and user. Parameters for the :func:`getpriority` and :func:`setpriority` functions. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. versionadded:: 3.3 @@ -509,7 +509,7 @@ process and user. Return a tuple (ruid, euid, suid) denoting the current process's real, effective, and saved user ids. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. versionadded:: 3.2 @@ -519,7 +519,7 @@ process and user. Return a tuple (rgid, egid, sgid) denoting the current process's real, effective, and saved group ids. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. versionadded:: 3.2 @@ -532,8 +532,8 @@ process and user. .. availability:: Unix. - The function is a stub on Emscripten and WASI, see - :ref:`wasm-availability` for more information. + The function is a stub on WASI, see :ref:`wasm-availability` for more + information. .. function:: initgroups(username, gid, /) @@ -542,7 +542,7 @@ process and user. the groups of which the specified username is a member, plus the specified group id. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. versionadded:: 3.2 @@ -576,21 +576,21 @@ process and user. Set the current process's effective group id. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. function:: seteuid(euid, /) Set the current process's effective user id. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. function:: setgid(gid, /) Set the current process' group id. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. function:: setgroups(groups, /) @@ -599,7 +599,7 @@ process and user. *groups*. *groups* must be a sequence, and each element must be an integer identifying a group. This operation is typically available only to the superuser. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. note:: On macOS, the length of *groups* may not exceed the system-defined maximum number of effective group ids, typically 16. @@ -649,7 +649,7 @@ process and user. Call the system call :c:func:`!setpgrp` or ``setpgrp(0, 0)`` depending on which version is implemented (if any). See the Unix manual for the semantics. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. function:: setpgid(pid, pgrp, /) @@ -658,7 +658,7 @@ process and user. process with id *pid* to the process group with id *pgrp*. See the Unix manual for the semantics. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. function:: setpriority(which, who, priority) @@ -675,7 +675,7 @@ process and user. *priority* is a value in the range -20 to 19. The default priority is 0; lower priorities cause more favorable scheduling. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. versionadded:: 3.3 @@ -684,14 +684,14 @@ process and user. Set the current process's real and effective group ids. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. function:: setresgid(rgid, egid, sgid, /) Set the current process's real, effective, and saved group ids. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. versionadded:: 3.2 @@ -700,7 +700,7 @@ process and user. Set the current process's real, effective, and saved user ids. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. versionadded:: 3.2 @@ -709,21 +709,21 @@ process and user. Set the current process's real and effective user ids. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. function:: getsid(pid, /) Call the system call :c:func:`!getsid`. See the Unix manual for the semantics. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. function:: setsid() Call the system call :c:func:`!setsid`. See the Unix manual for the semantics. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. function:: setuid(uid, /) @@ -732,7 +732,7 @@ process and user. Set the current process's user id. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. placed in this section since it relates to errno.... a little weak @@ -755,8 +755,8 @@ process and user. Set the current numeric umask and return the previous umask. - The function is a stub on Emscripten and WASI, see - :ref:`wasm-availability` for more information. + The function is a stub on WASI, see :ref:`wasm-availability` for more + information. .. function:: uname() @@ -784,6 +784,11 @@ process and user. :func:`socket.gethostname` or even ``socket.gethostbyaddr(socket.gethostname())``. + On macOS, iOS and Android, this returns the *kernel* name and version (i.e., + ``'Darwin'`` on macOS and iOS; ``'Linux'`` on Android). :func:`platform.uname()` + can be used to get the user-facing operating system name and version on iOS and + Android. + .. availability:: Unix. .. versionchanged:: 3.3 @@ -1003,8 +1008,8 @@ as internal buffering of data. .. availability:: Unix, Windows. - The function is limited on Emscripten and WASI, see - :ref:`wasm-availability` for more information. + The function is limited on WASI, see :ref:`wasm-availability` for more + information. .. versionchanged:: 3.13 Added support on Windows. @@ -1021,8 +1026,8 @@ as internal buffering of data. .. availability:: Unix. - The function is limited on Emscripten and WASI, see - :ref:`wasm-availability` for more information. + The function is limited on WASI, see :ref:`wasm-availability` for more + information. .. function:: fdatasync(fd) @@ -1112,8 +1117,8 @@ as internal buffering of data. .. availability:: Unix, Windows. - The function is limited on Emscripten and WASI, see - :ref:`wasm-availability` for more information. + The function is limited on WASI, see :ref:`wasm-availability` for more + information. On Windows, this function is limited to pipes. @@ -1131,7 +1136,7 @@ as internal buffering of data. Calls the C standard library function :c:func:`grantpt`. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. versionadded:: 3.13 @@ -1175,7 +1180,7 @@ as internal buffering of data. Make the calling process a session leader; make the tty the controlling tty, the stdin, the stdout, and the stderr of the calling process; close fd. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. versionadded:: 3.11 @@ -1359,7 +1364,7 @@ or `the MSDN `_ on Windo descriptors are :ref:`non-inheritable `. For a (slightly) more portable approach, use the :mod:`pty` module. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. versionchanged:: 3.4 The new file descriptors are now non-inheritable. @@ -1385,7 +1390,7 @@ or `the MSDN `_ on Windo Return a pair of file descriptors ``(r, w)`` usable for reading and writing, respectively. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. versionadded:: 3.3 @@ -1395,7 +1400,7 @@ or `the MSDN `_ on Windo Ensures that enough disk space is allocated for the file specified by *fd* starting from *offset* and continuing for *len* bytes. - .. availability:: Unix, not Emscripten. + .. availability:: Unix. .. versionadded:: 3.3 @@ -1455,7 +1460,7 @@ or `the MSDN `_ on Windo If the value :data:`O_CLOEXEC` is available on the system, it is added to *oflag*. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. versionadded:: 3.13 @@ -1527,7 +1532,7 @@ or `the MSDN `_ on Windo it is available; otherwise, the C standard library function :c:func:`ptsname`, which is not guaranteed to be thread-safe, is called. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. versionadded:: 3.13 @@ -1654,7 +1659,7 @@ or `the MSDN `_ on Windo Cross-platform applications should not use *headers*, *trailers* and *flags* arguments. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. note:: @@ -1674,7 +1679,7 @@ or `the MSDN `_ on Windo Parameters to the :func:`sendfile` function, if the implementation supports them. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. versionadded:: 3.3 @@ -1683,7 +1688,7 @@ or `the MSDN `_ on Windo Parameter to the :func:`sendfile` function, if the implementation supports it. The data won't be cached in the virtual memory and will be freed afterwards. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. versionadded:: 3.11 @@ -1697,8 +1702,8 @@ or `the MSDN `_ on Windo .. availability:: Unix, Windows. - The function is limited on Emscripten and WASI, see - :ref:`wasm-availability` for more information. + The function is limited on WASI, see :ref:`wasm-availability` for more + information. On Windows, this function is limited to pipes. @@ -1792,7 +1797,7 @@ or `the MSDN `_ on Windo Calls the C standard library function :c:func:`unlockpt`. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. versionadded:: 3.13 @@ -1893,8 +1898,7 @@ Using the :mod:`subprocess` module, all file descriptors except standard streams are closed, and inheritable handles are only inherited if the *close_fds* parameter is ``False``. -On WebAssembly platforms ``wasm32-emscripten`` and ``wasm32-wasi``, the file -descriptor cannot be modified. +On WebAssembly platforms, the file descriptor cannot be modified. .. function:: get_inheritable(fd, /) @@ -2080,7 +2084,7 @@ features: .. audit-event:: os.chflags path,flags os.chflags - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. versionchanged:: 3.3 Added the *follow_symlinks* parameter. @@ -2126,8 +2130,8 @@ features: constants or a corresponding integer value). All other bits are ignored. The default value of *follow_symlinks* is ``False`` on Windows. - The function is limited on Emscripten and WASI, see - :ref:`wasm-availability` for more information. + The function is limited on WASI, see :ref:`wasm-availability` for more + information. .. audit-event:: os.chmod path,mode,dir_fd os.chmod @@ -2159,8 +2163,8 @@ features: .. availability:: Unix. - The function is limited on Emscripten and WASI, see - :ref:`wasm-availability` for more information. + The function is limited on WASI, see :ref:`wasm-availability` for more + information. .. versionadded:: 3.3 Added support for specifying *path* as an open file descriptor, @@ -2174,7 +2178,7 @@ features: Change the root directory of the current process to *path*. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. versionchanged:: 3.6 Accepts a :term:`path-like object`. @@ -2214,7 +2218,7 @@ features: .. audit-event:: os.chflags path,flags os.lchflags - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. versionchanged:: 3.6 Accepts a :term:`path-like object`. @@ -2264,7 +2268,7 @@ features: .. audit-event:: os.link src,dst,src_dir_fd,dst_dir_fd os.link - .. availability:: Unix, Windows, not Emscripten. + .. availability:: Unix, Windows. .. versionchanged:: 3.2 Added Windows support. @@ -2500,7 +2504,7 @@ features: FIFO for reading, and the client opens it for writing. Note that :func:`mkfifo` doesn't open the FIFO --- it just creates the rendezvous point. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. versionchanged:: 3.3 Added the *dir_fd* parameter. @@ -2522,7 +2526,7 @@ features: This function can also support :ref:`paths relative to directory descriptors `. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. versionchanged:: 3.3 Added the *dir_fd* parameter. @@ -3444,8 +3448,8 @@ features: .. availability:: Unix, Windows. - The function is limited on Emscripten and WASI, see - :ref:`wasm-availability` for more information. + The function is limited on WASI, see :ref:`wasm-availability` for more + information. .. versionchanged:: 3.2 Added support for Windows 6.0 (Vista) symbolic links. @@ -4271,7 +4275,7 @@ to be ignored. .. audit-event:: os.exec path,args,env os.execl - .. availability:: Unix, Windows, not Emscripten, not WASI. + .. availability:: Unix, Windows, not WASI, not iOS. .. versionchanged:: 3.3 Added support for specifying *path* as an open file descriptor @@ -4314,49 +4318,49 @@ written in Python, such as a mail server's external command delivery program. Exit code that means the command was used incorrectly, such as when the wrong number of arguments are given. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. data:: EX_DATAERR Exit code that means the input data was incorrect. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. data:: EX_NOINPUT Exit code that means an input file did not exist or was not readable. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. data:: EX_NOUSER Exit code that means a specified user did not exist. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. data:: EX_NOHOST Exit code that means a specified host did not exist. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. data:: EX_UNAVAILABLE Exit code that means that a required service is unavailable. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. data:: EX_SOFTWARE Exit code that means an internal software error was detected. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. data:: EX_OSERR @@ -4364,7 +4368,7 @@ written in Python, such as a mail server's external command delivery program. Exit code that means an operating system error was detected, such as the inability to fork or create a pipe. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. data:: EX_OSFILE @@ -4372,21 +4376,21 @@ written in Python, such as a mail server's external command delivery program. Exit code that means some system file did not exist, could not be opened, or had some other kind of error. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. data:: EX_CANTCREAT Exit code that means a user specified output file could not be created. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. data:: EX_IOERR Exit code that means that an error occurred while doing I/O on some file. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. data:: EX_TEMPFAIL @@ -4395,7 +4399,7 @@ written in Python, such as a mail server's external command delivery program. that may not really be an error, such as a network connection that couldn't be made during a retryable operation. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. data:: EX_PROTOCOL @@ -4403,7 +4407,7 @@ written in Python, such as a mail server's external command delivery program. Exit code that means that a protocol exchange was illegal, invalid, or not understood. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. data:: EX_NOPERM @@ -4411,21 +4415,21 @@ written in Python, such as a mail server's external command delivery program. Exit code that means that there were insufficient permissions to perform the operation (but not intended for file system problems). - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. data:: EX_CONFIG Exit code that means that some kind of configuration error occurred. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. data:: EX_NOTFOUND Exit code that means something like "an entry was not found". - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. function:: fork() @@ -4474,7 +4478,7 @@ written in Python, such as a mail server's external command delivery program. for technical details of why we're surfacing this longstanding platform compatibility problem to developers. - .. availability:: POSIX, not Emscripten, not WASI. + .. availability:: POSIX, not WASI, not iOS. .. function:: forkpty() @@ -4501,7 +4505,7 @@ written in Python, such as a mail server's external command delivery program. threads, this now raises a :exc:`DeprecationWarning`. See the longer explanation on :func:`os.fork`. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI, not iOS. .. function:: kill(pid, sig, /) @@ -4525,7 +4529,7 @@ written in Python, such as a mail server's external command delivery program. .. audit-event:: os.kill pid,sig os.kill - .. availability:: Unix, Windows, not Emscripten, not WASI. + .. availability:: Unix, Windows, not WASI, not iOS. .. versionchanged:: 3.2 Added Windows support. @@ -4541,14 +4545,14 @@ written in Python, such as a mail server's external command delivery program. .. audit-event:: os.killpg pgid,sig os.killpg - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI, not iOS. .. function:: nice(increment, /) Add *increment* to the process's "niceness". Return the new niceness. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. .. function:: pidfd_open(pid, flags=0) @@ -4578,7 +4582,7 @@ written in Python, such as a mail server's external command delivery program. Lock program segments into memory. The value of *op* (defined in ````) determines which segments are locked. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI, not iOS. .. function:: popen(cmd, mode='r', buffering=-1) @@ -4610,7 +4614,7 @@ written in Python, such as a mail server's external command delivery program. documentation for more powerful ways to manage and communicate with subprocesses. - .. availability:: not Emscripten, not WASI. + .. availability:: not WASI, not iOS. .. note:: The :ref:`Python UTF-8 Mode ` affects encodings used @@ -4718,7 +4722,7 @@ written in Python, such as a mail server's external command delivery program. ``os.POSIX_SPAWN_CLOSEFROM`` is available on platforms where :c:func:`!posix_spawn_file_actions_addclosefrom_np` exists. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI, not iOS. .. function:: posix_spawnp(path, argv, env, *, file_actions=None, \ setpgroup=None, resetids=False, setsid=False, setsigmask=(), \ @@ -4734,7 +4738,7 @@ written in Python, such as a mail server's external command delivery program. .. versionadded:: 3.8 - .. availability:: POSIX, not Emscripten, not WASI. + .. availability:: POSIX, not WASI, not iOS. See :func:`posix_spawn` documentation. @@ -4767,7 +4771,7 @@ written in Python, such as a mail server's external command delivery program. There is no way to unregister a function. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI, not iOS. .. versionadded:: 3.7 @@ -4836,7 +4840,7 @@ written in Python, such as a mail server's external command delivery program. .. audit-event:: os.spawn mode,path,args,env os.spawnl - .. availability:: Unix, Windows, not Emscripten, not WASI. + .. availability:: Unix, Windows, not WASI, not iOS. :func:`spawnlp`, :func:`spawnlpe`, :func:`spawnvp` and :func:`spawnvpe` are not available on Windows. :func:`spawnle` and @@ -4960,7 +4964,7 @@ written in Python, such as a mail server's external command delivery program. .. audit-event:: os.system command os.system - .. availability:: Unix, Windows, not Emscripten, not WASI. + .. availability:: Unix, Windows, not WASI, not iOS. .. function:: times() @@ -5004,7 +5008,7 @@ written in Python, such as a mail server's external command delivery program. :func:`waitstatus_to_exitcode` can be used to convert the exit status into an exit code. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI, not iOS. .. seealso:: @@ -5038,7 +5042,7 @@ written in Python, such as a mail server's external command delivery program. Otherwise, if there are no matching children that could be waited for, :exc:`ChildProcessError` is raised. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI, not iOS. .. versionadded:: 3.3 @@ -5079,7 +5083,7 @@ written in Python, such as a mail server's external command delivery program. :func:`waitstatus_to_exitcode` can be used to convert the exit status into an exit code. - .. availability:: Unix, Windows, not Emscripten, not WASI. + .. availability:: Unix, Windows, not WASI, not iOS. .. versionchanged:: 3.5 If the system call is interrupted and the signal handler does not raise an @@ -5099,7 +5103,7 @@ written in Python, such as a mail server's external command delivery program. :func:`waitstatus_to_exitcode` can be used to convert the exit status into an exitcode. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI, not iOS. .. function:: wait4(pid, options) @@ -5113,7 +5117,7 @@ written in Python, such as a mail server's external command delivery program. :func:`waitstatus_to_exitcode` can be used to convert the exit status into an exitcode. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI, not iOS. .. data:: P_PID @@ -5130,7 +5134,7 @@ written in Python, such as a mail server's external command delivery program. * :data:`!P_PIDFD` - wait for the child identified by the file descriptor *id* (a process file descriptor created with :func:`pidfd_open`). - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI, not iOS. .. note:: :data:`!P_PIDFD` is only available on Linux >= 5.4. @@ -5145,7 +5149,7 @@ written in Python, such as a mail server's external command delivery program. :func:`waitid` causes child processes to be reported if they have been continued from a job control stop since they were last reported. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI, not iOS. .. data:: WEXITED @@ -5156,7 +5160,7 @@ written in Python, such as a mail server's external command delivery program. The other ``wait*`` functions always report children that have terminated, so this option is not available for them. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI, not iOS. .. versionadded:: 3.3 @@ -5168,7 +5172,7 @@ written in Python, such as a mail server's external command delivery program. This option is not available for the other ``wait*`` functions. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI, not iOS. .. versionadded:: 3.3 @@ -5181,7 +5185,7 @@ written in Python, such as a mail server's external command delivery program. This option is not available for :func:`waitid`. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI, not iOS. .. data:: WNOHANG @@ -5190,7 +5194,7 @@ written in Python, such as a mail server's external command delivery program. :func:`waitid` to return right away if no child process status is available immediately. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI, not iOS. .. data:: WNOWAIT @@ -5200,7 +5204,7 @@ written in Python, such as a mail server's external command delivery program. This option is not available for the other ``wait*`` functions. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI, not iOS. .. data:: CLD_EXITED @@ -5213,7 +5217,7 @@ written in Python, such as a mail server's external command delivery program. These are the possible values for :attr:`!si_code` in the result returned by :func:`waitid`. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI, not iOS. .. versionadded:: 3.3 @@ -5248,7 +5252,7 @@ written in Python, such as a mail server's external command delivery program. :func:`WIFEXITED`, :func:`WEXITSTATUS`, :func:`WIFSIGNALED`, :func:`WTERMSIG`, :func:`WIFSTOPPED`, :func:`WSTOPSIG` functions. - .. availability:: Unix, Windows, not Emscripten, not WASI. + .. availability:: Unix, Windows, not WASI, not iOS. .. versionadded:: 3.9 @@ -5264,7 +5268,7 @@ used to determine the disposition of a process. This function should be employed only if :func:`WIFSIGNALED` is true. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI, not iOS. .. function:: WIFCONTINUED(status) @@ -5275,7 +5279,7 @@ used to determine the disposition of a process. See :data:`WCONTINUED` option. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI, not iOS. .. function:: WIFSTOPPED(status) @@ -5287,14 +5291,14 @@ used to determine the disposition of a process. done using :data:`WUNTRACED` option or when the process is being traced (see :manpage:`ptrace(2)`). - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI, not iOS. .. function:: WIFSIGNALED(status) Return ``True`` if the process was terminated by a signal, otherwise return ``False``. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI, not iOS. .. function:: WIFEXITED(status) @@ -5303,7 +5307,7 @@ used to determine the disposition of a process. by calling ``exit()`` or ``_exit()``, or by returning from ``main()``; otherwise return ``False``. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI, not iOS. .. function:: WEXITSTATUS(status) @@ -5312,7 +5316,7 @@ used to determine the disposition of a process. This function should be employed only if :func:`WIFEXITED` is true. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI, not iOS. .. function:: WSTOPSIG(status) @@ -5321,7 +5325,7 @@ used to determine the disposition of a process. This function should be employed only if :func:`WIFSTOPPED` is true. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI, not iOS. .. function:: WTERMSIG(status) @@ -5330,7 +5334,7 @@ used to determine the disposition of a process. This function should be employed only if :func:`WIFSIGNALED` is true. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI, not iOS. Interface to the scheduler diff --git a/Doc/library/pathlib.rst b/Doc/library/pathlib.rst index 3ff2631d73c0b2..4fba3622b073a7 100644 --- a/Doc/library/pathlib.rst +++ b/Doc/library/pathlib.rst @@ -1682,6 +1682,10 @@ The patterns accepted and results generated by :meth:`Path.glob` and 5. The values returned from pathlib's ``path.glob()`` and ``path.rglob()`` include the *path* as a prefix, unlike the results of ``glob.glob(root_dir=path)``. +6. The values returned from pathlib's ``path.glob()`` and ``path.rglob()`` + may include *path* itself, for example when globbing "``**``", whereas the + results of ``glob.glob(root_dir=path)`` never include an empty string that + would correspond to *path*. Comparison to the :mod:`os` and :mod:`os.path` modules diff --git a/Doc/library/platform.rst b/Doc/library/platform.rst index 4bc3956449b930..069dab791dcbe5 100644 --- a/Doc/library/platform.rst +++ b/Doc/library/platform.rst @@ -148,6 +148,9 @@ Cross Platform Returns the system/OS name, such as ``'Linux'``, ``'Darwin'``, ``'Java'``, ``'Windows'``. An empty string is returned if the value cannot be determined. + On iOS and Android, this returns the user-facing OS name (i.e, ``'iOS``, + ``'iPadOS'`` or ``'Android'``). To obtain the kernel name (``'Darwin'`` or + ``'Linux'``), use :func:`os.uname()`. .. function:: system_alias(system, release, version) @@ -161,6 +164,8 @@ Cross Platform Returns the system's release version, e.g. ``'#3 on degas'``. An empty string is returned if the value cannot be determined. + On iOS and Android, this is the user-facing OS version. To obtain the + Darwin or Linux kernel version, use :func:`os.uname()`. .. function:: uname() @@ -238,7 +243,6 @@ Windows Platform macOS Platform -------------- - .. function:: mac_ver(release='', versioninfo=('','',''), machine='') Get macOS version information and return it as tuple ``(release, versioninfo, @@ -248,6 +252,24 @@ macOS Platform Entries which cannot be determined are set to ``''``. All tuple entries are strings. +iOS Platform +------------ + +.. function:: ios_ver(system='', release='', model='', is_simulator=False) + + Get iOS version information and return it as a + :func:`~collections.namedtuple` with the following attributes: + + * ``system`` is the OS name; either ``'iOS'`` or ``'iPadOS'``. + * ``release`` is the iOS version number as a string (e.g., ``'17.2'``). + * ``model`` is the device model identifier; this will be a string like + ``'iPhone13,2'`` for a physical device, or ``'iPhone'`` on a simulator. + * ``is_simulator`` is a boolean describing if the app is running on a + simulator or a physical device. + + Entries which cannot be determined are set to the defaults given as + parameters. + Unix Platforms -------------- @@ -301,3 +323,39 @@ Linux Platforms return ids .. versionadded:: 3.10 + + +Android Platform +---------------- + +.. function:: android_ver(release="", api_level=0, manufacturer="", \ + model="", device="", is_emulator=False) + + Get Android device information. Returns a :func:`~collections.namedtuple` + with the following attributes. Values which cannot be determined are set to + the defaults given as parameters. + + * ``release`` - Android version, as a string (e.g. ``"14"``). + + * ``api_level`` - API level of the running device, as an integer (e.g. ``34`` + for Android 14). To get the API level which Python was built against, see + :func:`sys.getandroidapilevel`. + + * ``manufacturer`` - `Manufacturer name + `__. + + * ``model`` - `Model name + `__ – + typically the marketing name or model number. + + * ``device`` - `Device name + `__ – + typically the model number or a codename. + + * ``is_emulator`` - ``True`` if the device is an emulator; ``False`` if it's + a physical device. + + Google maintains a `list of known model and device names + `__. + + .. versionadded:: 3.13 diff --git a/Doc/library/pprint.rst b/Doc/library/pprint.rst index 32e1351b7ffeeb..eebd270a096ba5 100644 --- a/Doc/library/pprint.rst +++ b/Doc/library/pprint.rst @@ -44,6 +44,17 @@ Functions *args* and *kwargs* will be passed to :func:`~pprint.pprint` as formatting parameters. + >>> import pprint + >>> stuff = ['spam', 'eggs', 'lumberjack', 'knights', 'ni'] + >>> stuff.insert(0, stuff) + >>> pprint.pp(stuff) + [, + 'spam', + 'eggs', + 'lumberjack', + 'knights', + 'ni'] + .. versionadded:: 3.8 @@ -61,16 +72,8 @@ Functions :class:`PrettyPrinter` constructor and their meanings are as described in its documentation below. - >>> import pprint - >>> stuff = ['spam', 'eggs', 'lumberjack', 'knights', 'ni'] - >>> stuff.insert(0, stuff) - >>> pprint.pprint(stuff) - [, - 'spam', - 'eggs', - 'lumberjack', - 'knights', - 'ni'] + Note that *sort_dicts* is ``True`` by default and you might want to use + :func:`~pprint.pp` instead where it is ``False`` by default. .. function:: pformat(object, indent=1, width=80, depth=None, *, \ compact=False, sort_dicts=True, underscore_numbers=False) @@ -261,7 +264,7 @@ are converted to strings. The default implementation uses the internals of the Example ------- -To demonstrate several uses of the :func:`~pprint.pprint` function and its parameters, +To demonstrate several uses of the :func:`~pprint.pp` function and its parameters, let's fetch information about a project from `PyPI `_:: >>> import json @@ -270,9 +273,9 @@ let's fetch information about a project from `PyPI `_:: >>> with urlopen('https://pypi.org/pypi/sampleproject/json') as resp: ... project_info = json.load(resp)['info'] -In its basic form, :func:`~pprint.pprint` shows the whole object:: +In its basic form, :func:`~pprint.pp` shows the whole object:: - >>> pprint.pprint(project_info) + >>> pprint.pp(project_info) {'author': 'The Python Packaging Authority', 'author_email': 'pypa-dev@googlegroups.com', 'bugtrack_url': None, @@ -329,7 +332,7 @@ In its basic form, :func:`~pprint.pprint` shows the whole object:: The result can be limited to a certain *depth* (ellipsis is used for deeper contents):: - >>> pprint.pprint(project_info, depth=1) + >>> pprint.pp(project_info, depth=1) {'author': 'The Python Packaging Authority', 'author_email': 'pypa-dev@googlegroups.com', 'bugtrack_url': None, @@ -375,7 +378,7 @@ contents):: Additionally, maximum character *width* can be suggested. If a long object cannot be split, the specified width will be exceeded:: - >>> pprint.pprint(project_info, depth=1, width=60) + >>> pprint.pp(project_info, depth=1, width=60) {'author': 'The Python Packaging Authority', 'author_email': 'pypa-dev@googlegroups.com', 'bugtrack_url': None, diff --git a/Doc/library/pwd.rst b/Doc/library/pwd.rst index dbe68cd14ec4d4..a6c6d79b60b20a 100644 --- a/Doc/library/pwd.rst +++ b/Doc/library/pwd.rst @@ -10,7 +10,7 @@ This module provides access to the Unix user account and password database. It is available on all Unix versions. -.. availability:: Unix, not Emscripten, not WASI. +.. availability:: Unix, not WASI, not iOS. Password database entries are reported as a tuple-like object, whose attributes correspond to the members of the ``passwd`` structure (Attribute field below, diff --git a/Doc/library/pydoc.rst b/Doc/library/pydoc.rst index 03e0915bf6d135..df969b2fc7c04c 100644 --- a/Doc/library/pydoc.rst +++ b/Doc/library/pydoc.rst @@ -16,19 +16,19 @@ -------------- -The :mod:`pydoc` module automatically generates documentation from Python +The :mod:`!pydoc` module automatically generates documentation from Python modules. The documentation can be presented as pages of text on the console, served to a web browser, or saved to HTML files. For modules, classes, functions and methods, the displayed documentation is -derived from the docstring (i.e. the :attr:`__doc__` attribute) of the object, +derived from the docstring (i.e. the :attr:`!__doc__` attribute) of the object, and recursively of its documentable members. If there is no docstring, -:mod:`pydoc` tries to obtain a description from the block of comment lines just +:mod:`!pydoc` tries to obtain a description from the block of comment lines just above the definition of the class, function or method in the source file, or at the top of the module (see :func:`inspect.getcomments`). The built-in function :func:`help` invokes the online help system in the -interactive interpreter, which uses :mod:`pydoc` to generate its documentation +interactive interpreter, which uses :mod:`!pydoc` to generate its documentation as text on the console. The same text documentation can also be viewed from outside the Python interpreter by running :program:`pydoc` as a script at the operating system's command prompt. For example, running :: @@ -46,7 +46,7 @@ produced for that file. .. note:: - In order to find objects and their documentation, :mod:`pydoc` imports the + In order to find objects and their documentation, :mod:`!pydoc` imports the module(s) to be documented. Therefore, any code on module level will be executed on that occasion. Use an ``if __name__ == '__main__':`` guard to only execute code when a file is invoked as a script and not just imported. @@ -90,7 +90,7 @@ Python interpreter and typed ``import spam``. Module docs for core modules are assumed to reside in ``https://docs.python.org/X.Y/library/`` where ``X`` and ``Y`` are the major and minor version numbers of the Python interpreter. This can -be overridden by setting the :envvar:`PYTHONDOCS` environment variable +be overridden by setting the :envvar:`!PYTHONDOCS` environment variable to a different URL or to a local directory containing the Library Reference Manual pages. @@ -101,7 +101,7 @@ Reference Manual pages. The ``-g`` command line option was removed. .. versionchanged:: 3.4 - :mod:`pydoc` now uses :func:`inspect.signature` rather than + :mod:`!pydoc` now uses :func:`inspect.signature` rather than :func:`inspect.getfullargspec` to extract signature information from callables. diff --git a/Doc/library/readline.rst b/Doc/library/readline.rst index 54c6d9f3b32b1a..8f8718ec51c41b 100644 --- a/Doc/library/readline.rst +++ b/Doc/library/readline.rst @@ -24,6 +24,8 @@ in the GNU Readline manual for information about the format and allowable constructs of that file, and the capabilities of the Readline library in general. +.. include:: ../includes/wasm-ios-notavail.rst + .. note:: The underlying Readline library API may be implemented by diff --git a/Doc/library/resource.rst b/Doc/library/resource.rst index 389a63f089d850..4fea8d5cb718c1 100644 --- a/Doc/library/resource.rst +++ b/Doc/library/resource.rst @@ -13,7 +13,7 @@ This module provides basic mechanisms for measuring and controlling system resources utilized by a program. -.. availability:: Unix, not Emscripten, not WASI. +.. availability:: Unix, not WASI. Symbolic constants are used to specify particular system resources and to request usage information about either the current process or its children. diff --git a/Doc/library/signal.rst b/Doc/library/signal.rst index 85a073aad233ac..05ef45c123b02e 100644 --- a/Doc/library/signal.rst +++ b/Doc/library/signal.rst @@ -26,9 +26,9 @@ explicitly reset (Python emulates the BSD style interface regardless of the underlying implementation), with the exception of the handler for :const:`SIGCHLD`, which follows the underlying implementation. -On WebAssembly platforms ``wasm32-emscripten`` and ``wasm32-wasi``, signals -are emulated and therefore behave differently. Several functions and signals -are not available on these platforms. +On WebAssembly platforms, signals are emulated and therefore behave +differently. Several functions and signals are not available on these +platforms. Execution of Python signal handlers ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/Doc/library/socket.rst b/Doc/library/socket.rst index 3a931e25de91e5..76af783c6292f9 100644 --- a/Doc/library/socket.rst +++ b/Doc/library/socket.rst @@ -1213,7 +1213,7 @@ The :mod:`socket` module also offers various network-related services: buffer. Raises :exc:`OverflowError` if *length* is outside the permissible range of values. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. Most Unix platforms. @@ -1236,7 +1236,7 @@ The :mod:`socket` module also offers various network-related services: amount of ancillary data that can be received, since additional data may be able to fit into the padding area. - .. availability:: Unix, not Emscripten, not WASI. + .. availability:: Unix, not WASI. most Unix platforms. @@ -1276,7 +1276,7 @@ The :mod:`socket` module also offers various network-related services: (index int, name string) tuples. :exc:`OSError` if the system call fails. - .. availability:: Unix, Windows, not Emscripten, not WASI. + .. availability:: Unix, Windows, not WASI. .. versionadded:: 3.3 @@ -1303,7 +1303,7 @@ The :mod:`socket` module also offers various network-related services: interface name. :exc:`OSError` if no interface with the given name exists. - .. availability:: Unix, Windows, not Emscripten, not WASI. + .. availability:: Unix, Windows, not WASI. .. versionadded:: 3.3 @@ -1320,7 +1320,7 @@ The :mod:`socket` module also offers various network-related services: interface index number. :exc:`OSError` if no interface with the given index exists. - .. availability:: Unix, Windows, not Emscripten, not WASI. + .. availability:: Unix, Windows, not WASI. .. versionadded:: 3.3 @@ -1337,7 +1337,7 @@ The :mod:`socket` module also offers various network-related services: The *fds* parameter is a sequence of file descriptors. Consult :meth:`~socket.sendmsg` for the documentation of these parameters. - .. availability:: Unix, Windows, not Emscripten, not WASI. + .. availability:: Unix, Windows, not WASI. Unix platforms supporting :meth:`~socket.sendmsg` and :const:`SCM_RIGHTS` mechanism. @@ -1351,7 +1351,7 @@ The :mod:`socket` module also offers various network-related services: Return ``(msg, list(fds), flags, addr)``. Consult :meth:`~socket.recvmsg` for the documentation of these parameters. - .. availability:: Unix, Windows, not Emscripten, not WASI. + .. availability:: Unix, Windows, not WASI. Unix platforms supporting :meth:`~socket.sendmsg` and :const:`SCM_RIGHTS` mechanism. diff --git a/Doc/library/statistics.rst b/Doc/library/statistics.rst index 1785c6bcc212b7..197c123f8356d8 100644 --- a/Doc/library/statistics.rst +++ b/Doc/library/statistics.rst @@ -80,7 +80,7 @@ or sample. :func:`median` Median (middle value) of data. :func:`median_low` Low median of data. :func:`median_high` High median of data. -:func:`median_grouped` Median, or 50th percentile, of grouped data. +:func:`median_grouped` Median (50th percentile) of grouped data. :func:`mode` Single mode (most common value) of discrete or nominal data. :func:`multimode` List of modes (most common values) of discrete or nominal data. :func:`quantiles` Divide data into intervals with equal probability. @@ -261,11 +261,12 @@ However, for reading convenience, most of the examples show sorted sequences. Added support for *weights*. -.. function:: kde(data, h, kernel='normal') +.. function:: kde(data, h, kernel='normal', *, cumulative=False) `Kernel Density Estimation (KDE) `_: - Create a continuous probability density function from discrete samples. + Create a continuous probability density function or cumulative + distribution function from discrete samples. The basic idea is to smooth the data using `a kernel function `_. @@ -280,11 +281,13 @@ However, for reading convenience, most of the examples show sorted sequences. as much as the more influential bandwidth smoothing parameter. Kernels that give some weight to every sample point include - *normal* or *gauss*, *logistic*, and *sigmoid*. + *normal* (*gauss*), *logistic*, and *sigmoid*. Kernels that only give weight to sample points within the bandwidth - include *rectangular* or *uniform*, *triangular*, *parabolic* or - *epanechnikov*, *quartic* or *biweight*, *triweight*, and *cosine*. + include *rectangular* (*uniform*), *triangular*, *parabolic* + (*epanechnikov*), *quartic* (*biweight*), *triweight*, and *cosine*. + + If *cumulative* is true, will return a cumulative distribution function. A :exc:`StatisticsError` will be raised if the *data* sequence is empty. @@ -378,55 +381,56 @@ However, for reading convenience, most of the examples show sorted sequences. be an actual data point rather than interpolated. -.. function:: median_grouped(data, interval=1) +.. function:: median_grouped(data, interval=1.0) - Return the median of grouped continuous data, calculated as the 50th - percentile, using interpolation. If *data* is empty, :exc:`StatisticsError` - is raised. *data* can be a sequence or iterable. + Estimates the median for numeric data that has been `grouped or binned + `_ around the midpoints + of consecutive, fixed-width intervals. - .. doctest:: + The *data* can be any iterable of numeric data with each value being + exactly the midpoint of a bin. At least one value must be present. - >>> median_grouped([52, 52, 53, 54]) - 52.5 + The *interval* is the width of each bin. - In the following example, the data are rounded, so that each value represents - the midpoint of data classes, e.g. 1 is the midpoint of the class 0.5--1.5, 2 - is the midpoint of 1.5--2.5, 3 is the midpoint of 2.5--3.5, etc. With the data - given, the middle value falls somewhere in the class 3.5--4.5, and - interpolation is used to estimate it: + For example, demographic information may have been summarized into + consecutive ten-year age groups with each group being represented + by the 5-year midpoints of the intervals: .. doctest:: - >>> median_grouped([1, 2, 2, 3, 4, 4, 4, 4, 4, 5]) - 3.7 - - Optional argument *interval* represents the class interval, and defaults - to 1. Changing the class interval naturally will change the interpolation: + >>> from collections import Counter + >>> demographics = Counter({ + ... 25: 172, # 20 to 30 years old + ... 35: 484, # 30 to 40 years old + ... 45: 387, # 40 to 50 years old + ... 55: 22, # 50 to 60 years old + ... 65: 6, # 60 to 70 years old + ... }) + ... + + The 50th percentile (median) is the 536th person out of the 1071 + member cohort. That person is in the 30 to 40 year old age group. + + The regular :func:`median` function would assume that everyone in the + tricenarian age group was exactly 35 years old. A more tenable + assumption is that the 484 members of that age group are evenly + distributed between 30 and 40. For that, we use + :func:`median_grouped`: .. doctest:: - >>> median_grouped([1, 3, 3, 5, 7], interval=1) - 3.25 - >>> median_grouped([1, 3, 3, 5, 7], interval=2) - 3.5 - - This function does not check whether the data points are at least - *interval* apart. + >>> data = list(demographics.elements()) + >>> median(data) + 35 + >>> round(median_grouped(data, interval=10), 1) + 37.5 - .. impl-detail:: + The caller is responsible for making sure the data points are separated + by exact multiples of *interval*. This is essential for getting a + correct result. The function does not check this precondition. - Under some circumstances, :func:`median_grouped` may coerce data points to - floats. This behaviour is likely to change in the future. - - .. seealso:: - - * "Statistics for the Behavioral Sciences", Frederick J Gravetter and - Larry B Wallnau (8th Edition). - - * The `SSMEDIAN - `_ - function in the Gnome Gnumeric spreadsheet, including `this discussion - `_. + Inputs may be any numeric type that can be coerced to a float during + the interpolation step. .. function:: mode(data) @@ -997,8 +1001,8 @@ of applications in statistics. .. versionadded:: 3.8 -:class:`NormalDist` Examples and Recipes -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Examples and Recipes +-------------------- Classic probability problems @@ -1033,7 +1037,7 @@ Find the `quartiles `_ and `deciles Monte Carlo inputs for simulations ********************************** -To estimate the distribution for a model than isn't easy to solve +To estimate the distribution for a model that isn't easy to solve analytically, :class:`NormalDist` can generate input samples for a `Monte Carlo simulation `_: @@ -1144,6 +1148,64 @@ The final prediction goes to the largest posterior. This is known as the 'female' +Sampling from kernel density estimation +*************************************** + +The :func:`kde()` function creates a continuous probability density +function from discrete samples. Some applications need a way to make +random selections from that distribution. + +The technique is to pick a sample from a bandwidth scaled kernel +function and recenter the result around a randomly chosen point from +the input data. This can be done with any kernel that has a known or +accurately approximated inverse cumulative distribution function. + +.. testcode:: + + from random import choice, random, seed + from math import sqrt, log, pi, tan, asin + from statistics import NormalDist + + kernel_invcdfs = { + 'normal': NormalDist().inv_cdf, + 'logistic': lambda p: log(p / (1 - p)), + 'sigmoid': lambda p: log(tan(p * pi/2)), + 'rectangular': lambda p: 2*p - 1, + 'triangular': lambda p: sqrt(2*p) - 1 if p < 0.5 else 1 - sqrt(2 - 2*p), + 'cosine': lambda p: 2*asin(2*p - 1)/pi, + } + + def kde_random(data, h, kernel='normal'): + 'Return a function that samples from kde() smoothed data.' + kernel_invcdf = kernel_invcdfs[kernel] + def rand(): + return h * kernel_invcdf(random()) + choice(data) + return rand + +For example: + +.. doctest:: + + >>> discrete_samples = [-2.1, -1.3, -0.4, 1.9, 5.1, 6.2] + >>> rand = kde_random(discrete_samples, h=1.5) + >>> seed(8675309) + >>> selections = [rand() for i in range(10)] + >>> [round(x, 1) for x in selections] + [4.7, 7.4, 1.2, 7.8, 6.9, -1.3, 5.8, 0.2, -1.4, 5.7] + +.. testcode:: + :hide: + + from statistics import kde + from math import isclose + + # Verify that cdf / invcdf will round trip + xarr = [i/100 for i in range(-100, 101)] + for kernel, invcdf in kernel_invcdfs.items(): + cdf = kde([0.0], h=1.0, kernel=kernel, cumulative=True) + for x in xarr: + assert isclose(invcdf(cdf(x)), x, abs_tol=1E-9) + .. # This modelines must appear within the last ten lines of the file. kate: indent-width 3; remove-trailing-space on; replace-tabs on; encoding utf-8; diff --git a/Doc/library/subprocess.rst b/Doc/library/subprocess.rst index 1dcfea58a8e89f..8f6751cb11af2a 100644 --- a/Doc/library/subprocess.rst +++ b/Doc/library/subprocess.rst @@ -25,7 +25,7 @@ modules and functions can be found in the following sections. :pep:`324` -- PEP proposing the subprocess module -.. include:: ../includes/wasm-notavail.rst +.. include:: ../includes/wasm-ios-notavail.rst Using the :mod:`subprocess` Module ---------------------------------- diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst index 087a3454c33272..19d6856efe5d09 100644 --- a/Doc/library/sys.rst +++ b/Doc/library/sys.rst @@ -753,7 +753,9 @@ always available. .. function:: getandroidapilevel() - Return the build time API version of Android as an integer. + Return the build-time API level of Android as an integer. This represents the + minimum version of Android this build of Python can run on. For runtime + version information, see :func:`platform.android_ver`. .. availability:: Android. diff --git a/Doc/library/syslog.rst b/Doc/library/syslog.rst index 7b27fc7e85b62d..30bf3f09a24d42 100644 --- a/Doc/library/syslog.rst +++ b/Doc/library/syslog.rst @@ -11,7 +11,7 @@ This module provides an interface to the Unix ``syslog`` library routines. Refer to the Unix manual pages for a detailed description of the ``syslog`` facility. -.. availability:: Unix, not Emscripten, not WASI. +.. availability:: Unix, not WASI, not iOS. This module wraps the system ``syslog`` family of routines. A pure Python library that can speak to a syslog server is available in the diff --git a/Doc/library/venv.rst b/Doc/library/venv.rst index a4273f97b7a8db..ecb01b352e8cbc 100644 --- a/Doc/library/venv.rst +++ b/Doc/library/venv.rst @@ -56,7 +56,7 @@ See :pep:`405` for more background on Python virtual environments. `Python Packaging User Guide: Creating and using virtual environments `__ -.. include:: ../includes/wasm-notavail.rst +.. include:: ../includes/wasm-ios-notavail.rst Creating virtual environments ----------------------------- diff --git a/Doc/library/webbrowser.rst b/Doc/library/webbrowser.rst index ce24f2a0dd0289..3775d9f9245428 100644 --- a/Doc/library/webbrowser.rst +++ b/Doc/library/webbrowser.rst @@ -33,6 +33,13 @@ allow the remote browser to maintain its own windows on the display. If remote browsers are not available on Unix, the controlling process will launch a new browser and wait. +On iOS, the :envvar:`BROWSER` environment variable, as well as any arguments +controlling autoraise, browser preference, and new tab/window creation will be +ignored. Web pages will *always* be opened in the user's preferred browser, in +a new tab, with the browser being brought to the foreground. The use of the +:mod:`webbrowser` module on iOS requires the :mod:`ctypes` module. If +:mod:`ctypes` isn't available, calls to :func:`.open` will fail. + The script :program:`webbrowser` can be used as a command-line interface for the module. It accepts a URL as the argument. It accepts the following optional parameters: @@ -150,6 +157,8 @@ for the controller classes, all defined in this module. +------------------------+-----------------------------------------+-------+ | ``'chromium-browser'`` | ``Chromium('chromium-browser')`` | | +------------------------+-----------------------------------------+-------+ +| ``'iosbrowser'`` | ``IOSBrowser`` | \(4) | ++------------------------+-----------------------------------------+-------+ Notes: @@ -164,7 +173,10 @@ Notes: Only on Windows platforms. (3) - Only on macOS platform. + Only on macOS. + +(4) + Only on iOS. .. versionadded:: 3.2 A new :class:`!MacOSXOSAScript` class has been added @@ -179,6 +191,9 @@ Notes: Removed browsers include Grail, Mosaic, Netscape, Galeon, Skipstone, Iceape, and Firefox versions 35 and below. +.. versionchanged:: 3.13 + Support for iOS has been added. + Here are some simple examples:: url = 'https://docs.python.org/' diff --git a/Doc/library/zipimport.rst b/Doc/library/zipimport.rst index 47c81f0e63603d..7a8c837307e60a 100644 --- a/Doc/library/zipimport.rst +++ b/Doc/library/zipimport.rst @@ -30,6 +30,9 @@ Any files may be present in the ZIP archive, but importers are only invoked for corresponding :file:`.pyc` file, meaning that if a ZIP archive doesn't contain :file:`.pyc` files, importing may be rather slow. +.. versionchanged:: 3.13 + ZIP64 is supported + .. versionchanged:: 3.8 Previously, ZIP archives with an archive comment were not supported. diff --git a/Doc/reference/datamodel.rst b/Doc/reference/datamodel.rst index 75b656f385d34b..bc835b8e30cb29 100644 --- a/Doc/reference/datamodel.rst +++ b/Doc/reference/datamodel.rst @@ -299,14 +299,17 @@ Sequences These represent finite ordered sets indexed by non-negative numbers. The built-in function :func:`len` returns the number of items of a sequence. When the length of a sequence is *n*, the index set contains the numbers 0, 1, -..., *n*-1. Item *i* of sequence *a* is selected by ``a[i]``. +..., *n*-1. Item *i* of sequence *a* is selected by ``a[i]``. Some sequences, +including built-in sequences, interpret negative subscripts by adding the +sequence length. For example, ``a[-2]`` equals ``a[n-2]``, the second to last +item of sequence a with length ``n``. .. index:: single: slicing Sequences also support slicing: ``a[i:j]`` selects all items with index *k* such that *i* ``<=`` *k* ``<`` *j*. When used as an expression, a slice is a -sequence of the same type. This implies that the index set is renumbered so -that it starts at 0. +sequence of the same type. The comment above about negative indexes also applies +to negative slice positions. Some sequences also support "extended slicing" with a third "step" parameter: ``a[i:j:k]`` selects all items of *a* with index *x* where ``x = i + n*k``, *n* diff --git a/Doc/reference/lexical_analysis.rst b/Doc/reference/lexical_analysis.rst index f0b3d0a7458cbe..41ea89fd234122 100644 --- a/Doc/reference/lexical_analysis.rst +++ b/Doc/reference/lexical_analysis.rst @@ -96,10 +96,9 @@ which is recognized also by GNU Emacs, and :: which is recognized by Bram Moolenaar's VIM. -If no encoding declaration is found, the default encoding is UTF-8. In -addition, if the first bytes of the file are the UTF-8 byte-order mark -(``b'\xef\xbb\xbf'``), the declared file encoding is UTF-8 (this is supported, -among others, by Microsoft's :program:`notepad`). +If no encoding declaration is found, the default encoding is UTF-8. If the +implicit or explicit encoding of a file is UTF-8, an initial UTF-8 byte-order +mark (b'\xef\xbb\xbf') is ignored rather than being a syntax error. If an encoding is declared, the encoding name must be recognized by Python (see :ref:`standard-encodings`). The diff --git a/Doc/reference/simple_stmts.rst b/Doc/reference/simple_stmts.rst index 810232e09dc5fa..a253482156d3b4 100644 --- a/Doc/reference/simple_stmts.rst +++ b/Doc/reference/simple_stmts.rst @@ -1006,25 +1006,29 @@ The :keyword:`!nonlocal` statement .. productionlist:: python-grammar nonlocal_stmt: "nonlocal" `identifier` ("," `identifier`)* -The :keyword:`nonlocal` statement causes the listed identifiers to refer to -previously bound variables in the nearest enclosing scope excluding globals. -This is important because the default behavior for binding is to search the -local namespace first. The statement allows encapsulated code to rebind -variables outside of the local scope besides the global (module) scope. - -Names listed in a :keyword:`nonlocal` statement, unlike those listed in a -:keyword:`global` statement, must refer to pre-existing bindings in an -enclosing scope (the scope in which a new binding should be created cannot -be determined unambiguously). - -Names listed in a :keyword:`nonlocal` statement must not collide with -pre-existing bindings in the local scope. +When the definition of a function or class is nested (enclosed) within +the definitions of other functions, its nonlocal scopes are the local +scopes of the enclosing functions. The :keyword:`nonlocal` statement +causes the listed identifiers to refer to names previously bound in +nonlocal scopes. It allows encapsulated code to rebind such nonlocal +identifiers. If a name is bound in more than one nonlocal scope, the +nearest binding is used. If a name is not bound in any nonlocal scope, +or if there is no nonlocal scope, a :exc:`SyntaxError` is raised. + +The nonlocal statement applies to the entire scope of a function or +class body. A :exc:`SyntaxError` is raised if a variable is used or +assigned to prior to its nonlocal declaration in the scope. .. seealso:: :pep:`3104` - Access to Names in Outer Scopes The specification for the :keyword:`nonlocal` statement. +**Programmer's note:** :keyword:`nonlocal` is a directive to the parser +and applies only to code parsed along with it. See the note for the +:keyword:`global` statement. + + .. _type: The :keyword:`!type` statement diff --git a/Doc/tools/.nitignore b/Doc/tools/.nitignore index 5fbc24c6ee65a5..6f38f36d3311c0 100644 --- a/Doc/tools/.nitignore +++ b/Doc/tools/.nitignore @@ -44,7 +44,6 @@ Doc/library/pickletools.rst Doc/library/platform.rst Doc/library/plistlib.rst Doc/library/profile.rst -Doc/library/pydoc.rst Doc/library/pyexpat.rst Doc/library/readline.rst Doc/library/resource.rst diff --git a/Doc/tools/extensions/pyspecific.py b/Doc/tools/extensions/pyspecific.py index cd441836f62bde..c31d67d2868144 100644 --- a/Doc/tools/extensions/pyspecific.py +++ b/Doc/tools/extensions/pyspecific.py @@ -133,7 +133,7 @@ class Availability(SphinxDirective): known_platforms = frozenset({ "AIX", "Android", "BSD", "DragonFlyBSD", "Emscripten", "FreeBSD", "GNU/kFreeBSD", "Linux", "NetBSD", "OpenBSD", "POSIX", "Solaris", - "Unix", "VxWorks", "WASI", "Windows", "macOS", + "Unix", "VxWorks", "WASI", "Windows", "macOS", "iOS", # libc "BSD libc", "glibc", "musl", # POSIX platforms with pthreads @@ -164,7 +164,7 @@ def parse_platforms(self): Example:: - .. availability:: Windows, Linux >= 4.2, not Emscripten, not WASI + .. availability:: Windows, Linux >= 4.2, not WASI Arguments like "Linux >= 3.17 with glibc >= 2.27" are currently not parsed into separate tokens. diff --git a/Doc/tools/templates/indexcontent.html b/Doc/tools/templates/indexcontent.html index 5b3c174f9d1729..6f854e86ab8ef1 100644 --- a/Doc/tools/templates/indexcontent.html +++ b/Doc/tools/templates/indexcontent.html @@ -58,11 +58,11 @@

{{ docstitle|e }}

- + - + {% endblock %} diff --git a/Doc/using/configure.rst b/Doc/using/configure.rst index 3db309539d2368..eef0c5022d37af 100644 --- a/Doc/using/configure.rst +++ b/Doc/using/configure.rst @@ -881,7 +881,7 @@ Security Options macOS Options ------------- -See ``Mac/README.rst``. +See :source:`Mac/README.rst`. .. option:: --enable-universalsdk .. option:: --enable-universalsdk=SDKDIR @@ -916,6 +916,20 @@ See ``Mac/README.rst``. Specify the name for the python framework on macOS only valid when :option:`--enable-framework` is set (default: ``Python``). +iOS Options +----------- + +See :source:`iOS/README.rst`. + +.. option:: --enable-framework=INSTALLDIR + + Create a Python.framework. Unlike macOS, the *INSTALLDIR* argument + specifying the installation path is mandatory. + +.. option:: --with-framework-name=FRAMEWORK + + Specify the name for the framework (default: ``Python``). + Cross Compiling Options ----------------------- diff --git a/Doc/using/index.rst b/Doc/using/index.rst index e1a3111f36a44f..f55a12f1ab8a0d 100644 --- a/Doc/using/index.rst +++ b/Doc/using/index.rst @@ -18,4 +18,5 @@ interpreter and things that make working with Python easier. configure.rst windows.rst mac.rst + ios.rst editors.rst diff --git a/Doc/using/ios.rst b/Doc/using/ios.rst new file mode 100644 index 00000000000000..da8f42048c0faf --- /dev/null +++ b/Doc/using/ios.rst @@ -0,0 +1,314 @@ +.. _using-ios: + +=================== +Using Python on iOS +=================== + +:Authors: + Russell Keith-Magee (2024-03) + +Python on iOS is unlike Python on desktop platforms. On a desktop platform, +Python is generally installed as a system resource that can be used by any user +of that computer. Users then interact with Python by running a :program:`python` +executable and entering commands at an interactive prompt, or by running a +Python script. + +On iOS, there is no concept of installing as a system resource. The only unit +of software distribution is an "app". There is also no console where you could +run a :program:`python` executable, or interact with a Python REPL. + +As a result, the only way you can use Python on iOS is in embedded mode - that +is, by writing a native iOS application, and embedding a Python interpreter +using ``libPython``, and invoking Python code using the :ref:`Python embedding +API `. The full Python interpreter, the standard library, and all +your Python code is then packaged as a standalone bundle that can be +distributed via the iOS App Store. + +If you're looking to experiment for the first time with writing an iOS app in +Python, projects such as `BeeWare `__ and `Kivy +`__ will provide a much more approachable user experience. +These projects manage the complexities associated with getting an iOS project +running, so you only need to deal with the Python code itself. + +Python at runtime on iOS +======================== + +Platform identification +----------------------- + +When executing on iOS, ``sys.platform`` will report as ``ios``. This value will +be returned on an iPhone or iPad, regardless of whether the app is running on +the simulator or a physical device. + +Information about the specific runtime environment, including the iOS version, +device model, and whether the device is a simulator, can be obtained using +:func:`platform.ios_ver()`. :func:`platform.system()` will report ``iOS`` or +``iPadOS``, depending on the device. + +:func:`os.uname()` reports kernel-level details; it will report a name of +``Darwin``. + +Standard library availability +----------------------------- + +The Python standard library has some notable omissions and restrictions on +iOS. See the :ref:`API availability guide for iOS ` for +details. + +Binary extension modules +------------------------ + +One notable difference about iOS as a platform is that App Store distribution +imposes hard requirements on the packaging of an application. One of these +requirements governs how binary extension modules are distributed. + +The iOS App Store requires that *all* binary modules in an iOS app must be +dynamic libraries, contained in a framework with appropriate metadata, stored +in the ``Frameworks`` folder of the packaged app. There can be only a single +binary per framework, and there can be no executable binary material outside +the ``Frameworks`` folder. + +This conflicts with the usual Python approach for distributing binaries, which +allows a binary extension module to be loaded from any location on +``sys.path``. To ensure compliance with App Store policies, an iOS project must +post-process any Python packages, converting ``.so`` binary modules into +individual standalone frameworks with appropriate metadata and signing. For +details on how to perform this post-processing, see the guide for :ref:`adding +Python to your project `. + +To help Python discover binaries in their new location, the original ``.so`` +file on ``sys.path`` is replaced with a ``.fwork`` file. This file is a text +file containing the location of the framework binary, relative to the app +bundle. To allow the framework to resolve back to the original location, the +framework must contain a ``.origin`` file that contains the location of the +``.fwork`` file, relative to the app bundle. + +For example, consider the case of an import ``from foo.bar import _whiz``, +where ``_whiz`` is implemented with the binary module +``sources/foo/bar/_whiz.abi3.so``, with ``sources`` being the location +registered on ``sys.path``, relative to the application bundle. This module +*must* be distributed as ``Frameworks/foo.bar._whiz.framework/foo.bar._whiz`` +(creating the framework name from the full import path of the module), with an +``Info.plist`` file in the ``.framework`` directory identifying the binary as a +framework. The ``foo.bar._whiz`` module would be represented in the original +location with a ``sources/foo/bar/_whiz.abi3.fwork`` marker file, containing +the path ``Frameworks/foo.bar._whiz/foo.bar._whiz``. The framework would also +contain ``Frameworks/foo.bar._whiz.framework/foo.bar._whiz.origin``, containing +the path to the ``.fwork`` file. + +When running on iOS, the Python interpreter will install an +:class:`~importlib.machinery.AppleFrameworkLoader` that is able to read and +import ``.fwork`` files. Once imported, the ``__file__`` attribute of the +binary module will report as the location of the ``.fwork`` file. However, the +:class:`~importlib.machinery.ModuleSpec` for the loaded module will report the +``origin`` as the location of the binary in the framework folder. + +Compiler stub binaries +---------------------- + +Xcode doesn't expose explicit compilers for iOS; instead, it uses an ``xcrun`` +script that resolves to a full compiler path (e.g., ``xcrun --sdk iphoneos +clang`` to get the ``clang`` for an iPhone device). However, using this script +poses two problems: + +* The output of ``xcrun`` includes paths that are machine specific, resulting + in a sysconfig module that cannot be shared between users; and + +* It results in ``CC``/``CPP``/``LD``/``AR`` definitions that include spaces. + There is a lot of C ecosystem tooling that assumes that you can split a + command line at the first space to get the path to the compiler executable; + this isn't the case when using ``xcrun``. + +To avoid these problems, Python provided stubs for these tools. These stubs are +shell script wrappers around the underingly ``xcrun`` tools, distributed in a +``bin`` folder distributed alongside the compiled iOS framework. These scripts +are relocatable, and will always resolve to the appropriate local system paths. +By including these scripts in the bin folder that accompanies a framework, the +contents of the ``sysconfig`` module becomes useful for end-users to compile +their own modules. When compiling third-party Python modules for iOS, you +should ensure these stub binaries are on your path. + +Installing Python on iOS +======================== + +Tools for building iOS apps +--------------------------- + +Building for iOS requires the use of Apple's Xcode tooling. It is strongly +recommended that you use the most recent stable release of Xcode. This will +require the use of the most (or second-most) recently released macOS version, +as Apple does not maintain Xcode for older macOS versions. The Xcode Command +Line Tools are not sufficient for iOS development; you need a *full* Xcode +install. + +If you want to run your code on the iOS simulator, you'll also need to install +an iOS Simulator Platform. You should be prompted to select an iOS Simulator +Platform when you first run Xcode. Alternatively, you can add an iOS Simulator +Platform by selecting from the Platforms tab of the Xcode Settings panel. + +.. _adding-ios: + +Adding Python to an iOS project +------------------------------- + +Python can be added to any iOS project, using either Swift or Objective C. The +following examples will use Objective C; if you are using Swift, you may find a +library like `PythonKit `__ to be +helpful. + +To add Python to an iOS Xcode project: + +1. Build or obtain a Python ``XCFramework``. See the instructions in + :source:`iOS/README.rst` (in the CPython source distribution) for details on + how to build a Python ``XCFramework``. At a minimum, you will need a build + that supports ``arm64-apple-ios``, plus one of either + ``arm64-apple-ios-simulator`` or ``x86_64-apple-ios-simulator``. + +2. Drag the ``XCframework`` into your iOS project. In the following + instructions, we'll assume you've dropped the ``XCframework`` into the root + of your project; however, you can use any other location that you want by + adjusting paths as needed. + +3. Drag the ``iOS/Resources/dylib-Info-template.plist`` file into your project, + and ensure it is associated with the app target. + +4. Add your application code as a folder in your Xcode project. In the + following instructions, we'll assume that your user code is in a folder + named ``app`` in the root of your project; you can use any other location by + adjusting paths as needed. Ensure that this folder is associated with your + app target. + +5. Select the app target by selecting the root node of your Xcode project, then + the target name in the sidebar that appears. + +6. In the "General" settings, under "Frameworks, Libraries and Embedded + Content", add ``Python.xcframework``, with "Embed & Sign" selected. + +7. In the "Build Settings" tab, modify the following: + + - Build Options + + * User Script Sandboxing: No + * Enable Testability: Yes + + - Search Paths + + * Framework Search Paths: ``$(PROJECT_DIR)`` + * Header Search Paths: ``"$(BUILT_PRODUCTS_DIR)/Python.framework/Headers"`` + + - Apple Clang - Warnings - All languages + + * Quoted Include In Framework Header: No + +8. Add a build step that copies the Python standard library into your app. In + the "Build Phases" tab, add a new "Run Script" build step *before* the + "Embed Frameworks" step, but *after* the "Copy Bundle Resources" step. Name + the step "Install Target Specific Python Standard Library", disable the + "Based on dependency analysis" checkbox, and set the script content to: + + .. code-block:: bash + + set -e + + mkdir -p "$CODESIGNING_FOLDER_PATH/python/lib" + if [ "$EFFECTIVE_PLATFORM_NAME" = "-iphonesimulator" ]; then + echo "Installing Python modules for iOS Simulator" + rsync -au --delete "$PROJECT_DIR/Python.xcframework/ios-arm64_x86_64-simulator/lib/" "$CODESIGNING_FOLDER_PATH/python/lib/" + else + echo "Installing Python modules for iOS Device" + rsync -au --delete "$PROJECT_DIR/Python.xcframework/ios-arm64/lib/" "$CODESIGNING_FOLDER_PATH/python/lib/" + fi + + Note that the name of the simulator "slice" in the XCframework may be + different, depending the CPU architectures your ``XCFramework`` supports. + +9. Add a second build step that processes the binary extension modules in the + standard library into "Framework" format. Add a "Run Script" build step + *directly after* the one you added in step 8, named "Prepare Python Binary + Modules". It should also have "Based on dependency analysis" unchecked, with + the following script content: + + .. code-block:: bash + + set -e + + install_dylib () { + INSTALL_BASE=$1 + FULL_EXT=$2 + + # The name of the extension file + EXT=$(basename "$FULL_EXT") + # The location of the extension file, relative to the bundle + RELATIVE_EXT=${FULL_EXT#$CODESIGNING_FOLDER_PATH/} + # The path to the extension file, relative to the install base + PYTHON_EXT=${RELATIVE_EXT/$INSTALL_BASE/} + # The full dotted name of the extension module, constructed from the file path. + FULL_MODULE_NAME=$(echo $PYTHON_EXT | cut -d "." -f 1 | tr "/" "."); + # A bundle identifier; not actually used, but required by Xcode framework packaging + FRAMEWORK_BUNDLE_ID=$(echo $PRODUCT_BUNDLE_IDENTIFIER.$FULL_MODULE_NAME | tr "_" "-") + # The name of the framework folder. + FRAMEWORK_FOLDER="Frameworks/$FULL_MODULE_NAME.framework" + + # If the framework folder doesn't exist, create it. + if [ ! -d "$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER" ]; then + echo "Creating framework for $RELATIVE_EXT" + mkdir -p "$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER" + cp "$CODESIGNING_FOLDER_PATH/dylib-Info-template.plist" "$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/Info.plist" + plutil -replace CFBundleExecutable -string "$FULL_MODULE_NAME" "$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/Info.plist" + plutil -replace CFBundleIdentifier -string "$FRAMEWORK_BUNDLE_ID" "$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/Info.plist" + fi + + echo "Installing binary for $FRAMEWORK_FOLDER/$FULL_MODULE_NAME" + mv "$FULL_EXT" "$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/$FULL_MODULE_NAME" + # Create a placeholder .fwork file where the .so was + echo "$FRAMEWORK_FOLDER/$FULL_MODULE_NAME" > ${FULL_EXT%.so}.fwork + # Create a back reference to the .so file location in the framework + echo "${RELATIVE_EXT%.so}.fwork" > "$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/$FULL_MODULE_NAME.origin" + } + + PYTHON_VER=$(ls -1 "$CODESIGNING_FOLDER_PATH/python/lib") + echo "Install Python $PYTHON_VER standard library extension modules..." + find "$CODESIGNING_FOLDER_PATH/python/lib/$PYTHON_VER/lib-dynload" -name "*.so" | while read FULL_EXT; do + install_dylib python/lib/$PYTHON_VER/lib-dynload/ "$FULL_EXT" + done + + # Clean up dylib template + rm -f "$CODESIGNING_FOLDER_PATH/dylib-Info-template.plist" + + echo "Signing frameworks as $EXPANDED_CODE_SIGN_IDENTITY_NAME ($EXPANDED_CODE_SIGN_IDENTITY)..." + find "$CODESIGNING_FOLDER_PATH/Frameworks" -name "*.framework" -exec /usr/bin/codesign --force --sign "$EXPANDED_CODE_SIGN_IDENTITY" ${OTHER_CODE_SIGN_FLAGS:-} -o runtime --timestamp=none --preserve-metadata=identifier,entitlements,flags --generate-entitlement-der "{}" \; + +10. Add Objective C code to initialize and use a Python interpreter in embedded + mode. You should ensure that: + + * :c:member:`UTF-8 mode ` is *enabled*; + * :c:member:`Buffered stdio ` is *disabled*; + * :c:member:`Writing bytecode ` is *disabled*; + * :c:member:`Signal handlers ` are *enabled*; + * ``PYTHONHOME`` for the interpreter is configured to point at the + ``python`` subfolder of your app's bundle; and + * The ``PYTHONPATH`` for the interpreter includes: + + - the ``python/lib/python3.X`` subfolder of your app's bundle, + - the ``python/lib/python3.X/lib-dynload`` subfolder of your app's bundle, and + - the ``app`` subfolder of your app's bundle + + Your app's bundle location can be determined using ``[[NSBundle mainBundle] + resourcePath]``. + +Steps 8, 9 and 10 of these instructions assume that you have a single folder of +pure Python application code, named ``app``. If you have third-party binary +modules in your app, some additional steps will be required: + +* You need to ensure that any folders containing third-party binaries are + either associated with the app target, or copied in as part of step 8. Step 8 + should also purge any binaries that are not appropriate for the platform a + specific build is targetting (i.e., delete any device binaries if you're + building app app targeting the simulator). + +* Any folders that contain third-party binaries must be processed into + framework form by step 9. The invocation of ``install_dylib`` that processes + the ``lib-dynload`` folder can be copied and adapted for this purpose. + +* If you're using a separate folder for third-party packages, ensure that folder + is included as part of the ``PYTHONPATH`` configuration in step 10. diff --git a/Doc/using/mac.rst b/Doc/using/mac.rst index e99993238895f9..8f3372b8e017f5 100644 --- a/Doc/using/mac.rst +++ b/Doc/using/mac.rst @@ -152,26 +152,41 @@ Tk toolkit (https://www.tcl.tk). An Aqua-native version of Tk is bundled with macOS by Apple, and the latest version can be downloaded and installed from https://www.activestate.com; it can also be built from source. -*wxPython* is another popular cross-platform GUI toolkit that runs natively on -macOS. Packages and documentation are available from https://www.wxpython.org. +A number of alternative macOS GUI toolkits are available: -*PyQt* is another popular cross-platform GUI toolkit that runs natively on -macOS. More information can be found at -https://riverbankcomputing.com/software/pyqt/intro. +* `PySide `__: Official Python bindings to the + `Qt GUI toolkit `__. -*PySide* is another cross-platform Qt-based toolkit. More information at -https://www.qt.io/qt-for-python. +* `PyQt `__: Alternative + Python bindings to Qt. +* `Kivy `__: A cross-platform GUI toolkit that supports + desktop and mobile platforms. + +* `Toga `__: Part of the `BeeWare Project + `__; supports desktop, mobile, web and console apps. + +* `wxPython `__: A cross-platform toolkit that + supports desktop operating systems. .. _distributing-python-applications-on-the-mac: Distributing Python Applications ================================ -The standard tool for deploying standalone Python applications on the Mac is -:program:`py2app`. More information on installing and using :program:`py2app` -can be found at https://pypi.org/project/py2app/. +A range of tools exist for converting your Python code into a standalone +distributable application: + +* `py2app `__: Supports creating macOS ``.app`` + bundles from a Python project. +* `Briefcase `__: Part of the `BeeWare Project + `__; a cross-platform packaging tool that supports + creation of ``.app`` bundles on macOS, as well as managing signing and + notarization. + +* `PyInstaller `__: A cross-platform packaging tool that creates + a single file or folder as a distributable artifact. Other Resources =============== @@ -184,4 +199,3 @@ https://www.python.org/community/sigs/current/pythonmac-sig/ Another useful resource is the MacPython wiki: https://wiki.python.org/moin/MacPython - diff --git a/Doc/whatsnew/3.13.rst b/Doc/whatsnew/3.13.rst index 0553cc97c5c75a..5a5c506d83d735 100644 --- a/Doc/whatsnew/3.13.rst +++ b/Doc/whatsnew/3.13.rst @@ -111,6 +111,14 @@ Improved Error Messages ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^ TypeError: split() got an unexpected keyword argument 'max_split'. Did you mean 'maxsplit'? +Incremental Garbage Collection +------------------------------ + +* The cycle garbage collector is now incremental. + This means that maximum pause times are reduced + by an order of magnitude or more for larger heaps. + + Other Language Changes ====================== @@ -276,7 +284,7 @@ asyncio (Contributed by Pierre Ossman in :gh:`113538`.) base64 ---- +------ * Add :func:`base64.z85encode` and :func:`base64.z85decode` functions which allow encoding and decoding z85 data. @@ -350,6 +358,28 @@ fractions sign handling, minimum width and grouping. (Contributed by Mark Dickinson in :gh:`111320`.) +gc +-- + +* The cyclic garbage collector is now incremental, which changes the meanings + of the results of :meth:`gc.get_threshold` and :meth:`gc.get_threshold` as + well as :meth:`gc.get_count` and :meth:`gc.get_stats`. +* :meth:`gc.get_threshold` returns a three-tuple for backwards compatibility, + the first value is the threshold for young collections, as before, the second + value determines the rate at which the old collection is scanned; the + default is 10 and higher values mean that the old collection is scanned more slowly. + The third value is meangless and is always zero. +* :meth:`gc.set_threshold` ignores any items after the second. +* :meth:`gc.get_count` and :meth:`gc.get_stats`. + These functions return the same format of results as before. + The only difference is that instead of the results refering to + the young, aging and old generations, the results refer to the + young generation and the aging and collecting spaces of the old generation. + +In summary, code that attempted to manipulate the behavior of the cycle GC may +not work exactly as intended, but it is very unlikely to harmful. +All other code will work just fine. + glob ---- @@ -371,6 +401,8 @@ ipaddress * Add the :attr:`ipaddress.IPv4Address.ipv6_mapped` property, which returns the IPv4-mapped IPv6 address. (Contributed by Charles Machalow in :gh:`109466`.) +* Fix ``is_global`` and ``is_private`` behavior in ``IPv4Address``, ``IPv6Address``, ``IPv4Network`` + and ``IPv6Network``. itertools --------- @@ -668,6 +700,12 @@ xml.etree.ElementTree :func:`~xml.etree.ElementTree.iterparse` for explicit cleaning up. (Contributed by Serhiy Storchaka in :gh:`69893`.) +zipimport +--------- + +* Gains support for ZIP64 format files. Everybody loves huge code right? + (Contributed by Tim Hatch in :gh:`94146`.) + Optimizations ============= @@ -1471,6 +1509,10 @@ Changes in the Python API than directories only. Users may add a trailing slash to match only directories. +* :c:func:`!PyCode_GetFirstFree` is an ustable API now and has been renamed + to :c:func:`PyUnstable_Code_GetFirstFree`. + (Contributed by Bogdan Romanyuk in :gh:`115781`) + Build Changes ============= @@ -1493,7 +1535,7 @@ Build Changes * The ``errno``, ``fcntl``, ``grp``, ``md5``, ``pwd``, ``resource``, ``termios``, ``winsound``, ``_ctypes_test``, ``_multiprocessing.posixshmem``, ``_scproxy``, ``_stat``, - ``_statistics``, ``_testimportmultiple`` and ``_uuid`` + ``_statistics``, ``_testconsole``, ``_testimportmultiple`` and ``_uuid`` C extensions are now built with the :ref:`limited C API `. (Contributed by Victor Stinner in :gh:`85283`.) @@ -1666,6 +1708,10 @@ New Features * Add :c:func:`Py_HashPointer` function to hash a pointer. (Contributed by Victor Stinner in :gh:`111545`.) +* Add :c:func:`PyObject_GenericHash` function that implements the default + hashing function of a Python object. + (Contributed by Serhiy Storchaka in :gh:`113024`.) + * Add PyTime C API: * :c:type:`PyTime_t` type. @@ -1697,6 +1743,14 @@ New Features more information. (Contributed by Victor Stinner in :gh:`111696`.) +* Add :c:func:`Py_GetConstant` and :c:func:`Py_GetConstantBorrowed` functions + to get constants. For example, ``Py_GetConstant(Py_CONSTANT_ZERO)`` returns a + :term:`strong reference` to the constant zero. + (Contributed by Victor Stinner in :gh:`115754`.) + +* Add :c:func:`PyType_GetModuleByDef` to the limited C API + (Contributed by Victor Stinner in :gh:`116936`.) + Porting to Python 3.13 ---------------------- diff --git a/Grammar/python.gram b/Grammar/python.gram index 797c195a0a91ba..696649392ae45d 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -778,6 +778,7 @@ bitwise_and[expr_ty]: shift_expr[expr_ty]: | a=shift_expr '<<' b=sum { _PyAST_BinOp(a, LShift, b, EXTRA) } | a=shift_expr '>>' b=sum { _PyAST_BinOp(a, RShift, b, EXTRA) } + | invalid_arithmetic | sum # Arithmetic operators @@ -794,6 +795,7 @@ term[expr_ty]: | a=term '//' b=factor { _PyAST_BinOp(a, FloorDiv, b, EXTRA) } | a=term '%' b=factor { _PyAST_BinOp(a, Mod, b, EXTRA) } | a=term '@' b=factor { CHECK_VERSION(expr_ty, 5, "The '@' operator is", _PyAST_BinOp(a, MatMult, b, EXTRA)) } + | invalid_factor | factor factor[expr_ty] (memo): @@ -1415,3 +1417,8 @@ invalid_replacement_field: invalid_conversion_character: | '!' &(':' | '}') { RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("f-string: missing conversion character") } | '!' !NAME { RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("f-string: invalid conversion character") } + +invalid_arithmetic: + | sum ('+'|'-'|'*'|'/'|'%'|'//'|'@') a='not' b=inversion { RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, "'not' after an operator must be parenthesized") } +invalid_factor: + | ('+' | '-' | '~') a='not' b=factor { RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, "'not' after an operator must be parenthesized") } diff --git a/Include/Python.h b/Include/Python.h index 01fc45137a17bb..ca38a98d8c4eca 100644 --- a/Include/Python.h +++ b/Include/Python.h @@ -63,6 +63,7 @@ #include "bytearrayobject.h" #include "bytesobject.h" #include "unicodeobject.h" +#include "pyerrors.h" #include "longobject.h" #include "cpython/longintrepr.h" #include "boolobject.h" @@ -99,7 +100,6 @@ #include "cpython/picklebufobject.h" #include "cpython/pytime.h" #include "codecs.h" -#include "pyerrors.h" #include "pythread.h" #include "cpython/context.h" #include "modsupport.h" diff --git a/Include/boolobject.h b/Include/boolobject.h index 19aef5b1b87c6a..3037e61bbf6d0c 100644 --- a/Include/boolobject.h +++ b/Include/boolobject.h @@ -18,8 +18,13 @@ PyAPI_DATA(PyLongObject) _Py_FalseStruct; PyAPI_DATA(PyLongObject) _Py_TrueStruct; /* Use these macros */ -#define Py_False _PyObject_CAST(&_Py_FalseStruct) -#define Py_True _PyObject_CAST(&_Py_TrueStruct) +#if defined(Py_LIMITED_API) && Py_LIMITED_API+0 >= 0x030D0000 +# define Py_False Py_GetConstantBorrowed(Py_CONSTANT_FALSE) +# define Py_True Py_GetConstantBorrowed(Py_CONSTANT_TRUE) +#else +# define Py_False _PyObject_CAST(&_Py_FalseStruct) +# define Py_True _PyObject_CAST(&_Py_TrueStruct) +#endif // Test if an object is the True singleton, the same as "x is True" in Python. PyAPI_FUNC(int) Py_IsTrue(PyObject *x); diff --git a/Include/cpython/code.h b/Include/cpython/code.h index 4e7e212fd099db..d5dac1765638f9 100644 --- a/Include/cpython/code.h +++ b/Include/cpython/code.h @@ -226,11 +226,15 @@ static inline Py_ssize_t PyCode_GetNumFree(PyCodeObject *op) { return op->co_nfreevars; } -static inline int PyCode_GetFirstFree(PyCodeObject *op) { +static inline int PyUnstable_Code_GetFirstFree(PyCodeObject *op) { assert(PyCode_Check(op)); return op->co_nlocalsplus - op->co_nfreevars; } +Py_DEPRECATED(3.13) static inline int PyCode_GetFirstFree(PyCodeObject *op) { + return PyUnstable_Code_GetFirstFree(op); +} + #define _PyCode_CODE(CO) _Py_RVALUE((_Py_CODEUNIT *)(CO)->co_code_adaptive) #define _PyCode_NBYTES(CO) (Py_SIZE(CO) * (Py_ssize_t)sizeof(_Py_CODEUNIT)) diff --git a/Include/cpython/interpreteridobject.h b/Include/cpython/interpreteridobject.h deleted file mode 100644 index 4ab9ad5d315f80..00000000000000 --- a/Include/cpython/interpreteridobject.h +++ /dev/null @@ -1,11 +0,0 @@ -#ifndef Py_CPYTHON_INTERPRETERIDOBJECT_H -# error "this header file must not be included directly" -#endif - -/* Interpreter ID Object */ - -PyAPI_DATA(PyTypeObject) PyInterpreterID_Type; - -PyAPI_FUNC(PyObject *) PyInterpreterID_New(int64_t); -PyAPI_FUNC(PyObject *) PyInterpreterState_GetIDObject(PyInterpreterState *); -PyAPI_FUNC(PyInterpreterState *) PyInterpreterID_LookUp(PyObject *); diff --git a/Include/cpython/object.h b/Include/cpython/object.h index 7512bb70c760fd..b64db1ba9a6dd2 100644 --- a/Include/cpython/object.h +++ b/Include/cpython/object.h @@ -275,7 +275,6 @@ typedef struct _heaptypeobject { PyAPI_FUNC(const char *) _PyType_Name(PyTypeObject *); PyAPI_FUNC(PyObject *) _PyType_Lookup(PyTypeObject *, PyObject *); -PyAPI_FUNC(PyObject *) PyType_GetModuleByDef(PyTypeObject *, PyModuleDef *); PyAPI_FUNC(PyObject *) PyType_GetDict(PyTypeObject *); PyAPI_FUNC(int) PyObject_Print(PyObject *, FILE *, int); diff --git a/Include/cpython/optimizer.h b/Include/cpython/optimizer.h index df83e6d16a429d..bc960c583782c5 100644 --- a/Include/cpython/optimizer.h +++ b/Include/cpython/optimizer.h @@ -30,16 +30,63 @@ typedef struct { PyCodeObject *code; // Weak (NULL if no corresponding ENTER_EXECUTOR). } _PyVMData; +#define UOP_FORMAT_TARGET 0 +#define UOP_FORMAT_EXIT 1 +#define UOP_FORMAT_JUMP 2 +#define UOP_FORMAT_UNUSED 3 + +/* Depending on the format, + * the 32 bits between the oparg and operand are: + * UOP_FORMAT_TARGET: + * uint32_t target; + * UOP_FORMAT_EXIT + * uint16_t exit_index; + * uint16_t error_target; + * UOP_FORMAT_JUMP + * uint16_t jump_target; + * uint16_t error_target; + */ typedef struct { - uint16_t opcode; + uint16_t opcode:14; + uint16_t format:2; uint16_t oparg; union { uint32_t target; - uint32_t exit_index; + struct { + union { + uint16_t exit_index; + uint16_t jump_target; + }; + uint16_t error_target; + }; }; uint64_t operand; // A cache entry } _PyUOpInstruction; +static inline uint32_t uop_get_target(const _PyUOpInstruction *inst) +{ + assert(inst->format == UOP_FORMAT_TARGET); + return inst->target; +} + +static inline uint16_t uop_get_exit_index(const _PyUOpInstruction *inst) +{ + assert(inst->format == UOP_FORMAT_EXIT); + return inst->exit_index; +} + +static inline uint16_t uop_get_jump_target(const _PyUOpInstruction *inst) +{ + assert(inst->format == UOP_FORMAT_JUMP); + return inst->jump_target; +} + +static inline uint16_t uop_get_error_target(const _PyUOpInstruction *inst) +{ + assert(inst->format != UOP_FORMAT_TARGET); + return inst->error_target; +} + typedef struct _exit_data { uint32_t target; int16_t temperature; diff --git a/Include/cpython/pyatomic_std.h b/Include/cpython/pyatomic_std.h index f3970a45df24f9..ef34bb0b77dfe5 100644 --- a/Include/cpython/pyatomic_std.h +++ b/Include/cpython/pyatomic_std.h @@ -916,6 +916,7 @@ _Py_atomic_load_uint32_acquire(const uint32_t *obj) { _Py_USING_STD; return atomic_load_explicit((const _Atomic(uint32_t)*)obj, + memory_order_acquire); } static inline Py_ssize_t @@ -923,6 +924,7 @@ _Py_atomic_load_ssize_acquire(const Py_ssize_t *obj) { _Py_USING_STD; return atomic_load_explicit((const _Atomic(Py_ssize_t)*)obj, + memory_order_acquire); } diff --git a/Include/cpython/pyhash.h b/Include/cpython/pyhash.h index b476c3f357de92..2f8e12c1423aa1 100644 --- a/Include/cpython/pyhash.h +++ b/Include/cpython/pyhash.h @@ -43,3 +43,4 @@ typedef struct { PyAPI_FUNC(PyHash_FuncDef*) PyHash_GetFuncDef(void); PyAPI_FUNC(Py_hash_t) Py_HashPointer(const void *ptr); +PyAPI_FUNC(Py_hash_t) PyObject_GenericHash(PyObject *); diff --git a/Include/cpython/pystate.h b/Include/cpython/pystate.h index 38d0897ea13161..7fb6b176392173 100644 --- a/Include/cpython/pystate.h +++ b/Include/cpython/pystate.h @@ -209,8 +209,14 @@ struct _ts { # define Py_C_RECURSION_LIMIT 500 #elif defined(__s390x__) # define Py_C_RECURSION_LIMIT 800 +#elif defined(_WIN32) && defined(_M_ARM64) +# define Py_C_RECURSION_LIMIT 1000 #elif defined(_WIN32) # define Py_C_RECURSION_LIMIT 3000 +#elif defined(__ANDROID__) + // On an ARM64 emulator, API level 34 was OK with 10000, but API level 21 + // crashed in test_compiler_recursion_limit. +# define Py_C_RECURSION_LIMIT 3000 #elif defined(_Py_ADDRESS_SANITIZER) # define Py_C_RECURSION_LIMIT 4000 #else diff --git a/Include/cpython/pystats.h b/Include/cpython/pystats.h index 887fbbedf88502..2fb7723f583cc7 100644 --- a/Include/cpython/pystats.h +++ b/Include/cpython/pystats.h @@ -19,6 +19,8 @@ // Define _PY_INTERPRETER macro to increment interpreter_increfs and // interpreter_decrefs. Otherwise, increment increfs and decrefs. +#include "pycore_uop_ids.h" + #ifndef Py_CPYTHON_PYSTATS_H # error "this header file must not be included directly" #endif @@ -116,7 +118,7 @@ typedef struct _optimization_stats { uint64_t recursive_call; uint64_t low_confidence; uint64_t executors_invalidated; - UOpStats opcode[512]; + UOpStats opcode[MAX_UOP_ID+1]; uint64_t unsupported_opcode[256]; uint64_t trace_length_hist[_Py_UOP_HIST_SIZE]; uint64_t trace_run_length_hist[_Py_UOP_HIST_SIZE]; @@ -124,6 +126,9 @@ typedef struct _optimization_stats { uint64_t optimizer_attempts; uint64_t optimizer_successes; uint64_t optimizer_failure_reason_no_memory; + uint64_t remove_globals_builtins_changed; + uint64_t remove_globals_incorrect_keys; + uint64_t error_in_opcode[MAX_UOP_ID+1]; } OptimizationStats; typedef struct _rare_event_stats { diff --git a/Include/internal/pycore_code.h b/Include/internal/pycore_code.h index 8eabd49a18afa9..e004783ee48198 100644 --- a/Include/internal/pycore_code.h +++ b/Include/internal/pycore_code.h @@ -308,6 +308,7 @@ extern int _PyStaticCode_Init(PyCodeObject *co); #define OPT_STAT_INC(name) do { if (_Py_stats) _Py_stats->optimization_stats.name++; } while (0) #define UOP_STAT_INC(opname, name) do { if (_Py_stats) { assert(opname < 512); _Py_stats->optimization_stats.opcode[opname].name++; } } while (0) #define OPT_UNSUPPORTED_OPCODE(opname) do { if (_Py_stats) _Py_stats->optimization_stats.unsupported_opcode[opname]++; } while (0) +#define OPT_ERROR_IN_OPCODE(opname) do { if (_Py_stats) _Py_stats->optimization_stats.error_in_opcode[opname]++; } while (0) #define OPT_HIST(length, name) \ do { \ if (_Py_stats) { \ @@ -334,6 +335,7 @@ PyAPI_FUNC(PyObject*) _Py_GetSpecializationStats(void); #define OPT_STAT_INC(name) ((void)0) #define UOP_STAT_INC(opname, name) ((void)0) #define OPT_UNSUPPORTED_OPCODE(opname) ((void)0) +#define OPT_ERROR_IN_OPCODE(opname) ((void)0) #define OPT_HIST(length, name) ((void)0) #define RARE_EVENT_STAT_INC(name) ((void)0) #endif // !Py_STATS diff --git a/Include/internal/pycore_compile.h b/Include/internal/pycore_compile.h index 0f446a00b4df22..f54f4f7f37acee 100644 --- a/Include/internal/pycore_compile.h +++ b/Include/internal/pycore_compile.h @@ -66,6 +66,7 @@ int _PyCompile_InstructionSequence_UseLabel(_PyCompile_InstructionSequence *seq, int _PyCompile_InstructionSequence_Addop(_PyCompile_InstructionSequence *seq, int opcode, int oparg, _PyCompilerSrcLocation loc); +int _PyCompile_InstructionSequence_ApplyLabelMap(_PyCompile_InstructionSequence *seq); typedef struct { PyObject *u_name; diff --git a/Include/internal/pycore_critical_section.h b/Include/internal/pycore_critical_section.h index 9163b5cf0f2e8a..23b85c2f9e9bb2 100644 --- a/Include/internal/pycore_critical_section.h +++ b/Include/internal/pycore_critical_section.h @@ -87,10 +87,13 @@ extern "C" { #define _Py_CRITICAL_SECTION_MASK 0x3 #ifdef Py_GIL_DISABLED -# define Py_BEGIN_CRITICAL_SECTION(op) \ +# define Py_BEGIN_CRITICAL_SECTION_MUT(mutex) \ { \ _PyCriticalSection _cs; \ - _PyCriticalSection_Begin(&_cs, &_PyObject_CAST(op)->ob_mutex) + _PyCriticalSection_Begin(&_cs, mutex) + +# define Py_BEGIN_CRITICAL_SECTION(op) \ + Py_BEGIN_CRITICAL_SECTION_MUT(&_PyObject_CAST(op)->ob_mutex) # define Py_END_CRITICAL_SECTION() \ _PyCriticalSection_End(&_cs); \ @@ -138,6 +141,7 @@ extern "C" { #else /* !Py_GIL_DISABLED */ // The critical section APIs are no-ops with the GIL. +# define Py_BEGIN_CRITICAL_SECTION_MUT(mut) # define Py_BEGIN_CRITICAL_SECTION(op) # define Py_END_CRITICAL_SECTION() # define Py_XBEGIN_CRITICAL_SECTION(op) diff --git a/Include/internal/pycore_frame.h b/Include/internal/pycore_frame.h index 0f9e7333cf1e1c..74d9e4cac72c0e 100644 --- a/Include/internal/pycore_frame.h +++ b/Include/internal/pycore_frame.h @@ -55,7 +55,7 @@ enum _frameowner { }; typedef struct _PyInterpreterFrame { - PyObject *f_executable; /* Strong reference */ + PyObject *f_executable; /* Strong reference (code object or None) */ struct _PyInterpreterFrame *previous; PyObject *f_funcobj; /* Strong reference. Only valid if not on C stack */ PyObject *f_globals; /* Borrowed reference. Only valid if not on C stack */ diff --git a/Include/internal/pycore_function.h b/Include/internal/pycore_function.h index dad6a89af77dec..24fbb3ddbee602 100644 --- a/Include/internal/pycore_function.h +++ b/Include/internal/pycore_function.h @@ -17,20 +17,27 @@ extern PyObject* _PyFunction_Vectorcall( #define FUNC_MAX_WATCHERS 8 #define FUNC_VERSION_CACHE_SIZE (1<<12) /* Must be a power of 2 */ + +struct _func_version_cache_item { + PyFunctionObject *func; + PyObject *code; +}; + struct _py_func_state { uint32_t next_version; - // Borrowed references to function objects whose + // Borrowed references to function and code objects whose // func_version % FUNC_VERSION_CACHE_SIZE // once was equal to the index in the table. - // They are cleared when the function is deallocated. - PyFunctionObject *func_version_cache[FUNC_VERSION_CACHE_SIZE]; + // They are cleared when the function or code object is deallocated. + struct _func_version_cache_item func_version_cache[FUNC_VERSION_CACHE_SIZE]; }; extern PyFunctionObject* _PyFunction_FromConstructor(PyFrameConstructor *constr); extern uint32_t _PyFunction_GetVersionForCurrentState(PyFunctionObject *func); PyAPI_FUNC(void) _PyFunction_SetVersion(PyFunctionObject *func, uint32_t version); -PyFunctionObject *_PyFunction_LookupByVersion(uint32_t version); +void _PyFunction_ClearCodeByVersion(uint32_t version); +PyFunctionObject *_PyFunction_LookupByVersion(uint32_t version, PyObject **p_code); extern PyObject *_Py_set_function_type_params( PyThreadState* unused, PyObject *func, PyObject *type_params); diff --git a/Include/internal/pycore_gc.h b/Include/internal/pycore_gc.h index 4a7191a562cc10..c4482c4ffcfa60 100644 --- a/Include/internal/pycore_gc.h +++ b/Include/internal/pycore_gc.h @@ -109,11 +109,26 @@ static inline void _PyObject_GC_SET_SHARED_INLINE(PyObject *op) { /* Bit flags for _gc_prev */ /* Bit 0 is set when tp_finalize is called */ -#define _PyGC_PREV_MASK_FINALIZED (1) +#define _PyGC_PREV_MASK_FINALIZED 1 /* Bit 1 is set when the object is in generation which is GCed currently. */ -#define _PyGC_PREV_MASK_COLLECTING (2) -/* The (N-2) most significant bits contain the real address. */ -#define _PyGC_PREV_SHIFT (2) +#define _PyGC_PREV_MASK_COLLECTING 2 + +/* Bit 0 in _gc_next is the old space bit. + * It is set as follows: + * Young: gcstate->visited_space + * old[0]: 0 + * old[1]: 1 + * permanent: 0 + * + * During a collection all objects handled should have the bit set to + * gcstate->visited_space, as objects are moved from the young gen + * and the increment into old[gcstate->visited_space]. + * When object are moved from the pending space, old[gcstate->visited_space^1] + * into the increment, the old space bit is flipped. +*/ +#define _PyGC_NEXT_MASK_OLD_SPACE_1 1 + +#define _PyGC_PREV_SHIFT 2 #define _PyGC_PREV_MASK (((uintptr_t) -1) << _PyGC_PREV_SHIFT) /* set for debugging information */ @@ -139,11 +154,13 @@ typedef enum { // Lowest bit of _gc_next is used for flags only in GC. // But it is always 0 for normal code. static inline PyGC_Head* _PyGCHead_NEXT(PyGC_Head *gc) { - uintptr_t next = gc->_gc_next; + uintptr_t next = gc->_gc_next & _PyGC_PREV_MASK; return (PyGC_Head*)next; } static inline void _PyGCHead_SET_NEXT(PyGC_Head *gc, PyGC_Head *next) { - gc->_gc_next = (uintptr_t)next; + uintptr_t unext = (uintptr_t)next; + assert((unext & ~_PyGC_PREV_MASK) == 0); + gc->_gc_next = (gc->_gc_next & ~_PyGC_PREV_MASK) | unext; } // Lowest two bits of _gc_prev is used for _PyGC_PREV_MASK_* flags. @@ -151,6 +168,7 @@ static inline PyGC_Head* _PyGCHead_PREV(PyGC_Head *gc) { uintptr_t prev = (gc->_gc_prev & _PyGC_PREV_MASK); return (PyGC_Head*)prev; } + static inline void _PyGCHead_SET_PREV(PyGC_Head *gc, PyGC_Head *prev) { uintptr_t uprev = (uintptr_t)prev; assert((uprev & ~_PyGC_PREV_MASK) == 0); @@ -236,6 +254,13 @@ struct gc_generation { generations */ }; +struct gc_collection_stats { + /* number of collected objects */ + Py_ssize_t collected; + /* total number of uncollectable objects (put into gc.garbage) */ + Py_ssize_t uncollectable; +}; + /* Running stats per generation */ struct gc_generation_stats { /* total number of collections */ @@ -257,8 +282,8 @@ struct _gc_runtime_state { int enabled; int debug; /* linked lists of container objects */ - struct gc_generation generations[NUM_GENERATIONS]; - PyGC_Head *generation0; + struct gc_generation young; + struct gc_generation old[2]; /* a permanent generation which won't be collected */ struct gc_generation permanent_generation; struct gc_generation_stats generation_stats[NUM_GENERATIONS]; @@ -268,6 +293,13 @@ struct _gc_runtime_state { PyObject *garbage; /* a list of callbacks to be invoked when collection is performed */ PyObject *callbacks; + + Py_ssize_t heap_size; + Py_ssize_t work_to_do; + /* Which of the old spaces is the visited space */ + int visited_space; + +#ifdef Py_GIL_DISABLED /* This is the number of objects that survived the last full collection. It approximates the number of long lived objects tracked by the GC. @@ -279,6 +311,7 @@ struct _gc_runtime_state { collections, and are awaiting to undergo a full collection for the first time. */ Py_ssize_t long_lived_pending; +#endif }; #ifdef Py_GIL_DISABLED @@ -291,9 +324,8 @@ struct _gc_thread_state { extern void _PyGC_InitState(struct _gc_runtime_state *); -extern Py_ssize_t _PyGC_Collect(PyThreadState *tstate, int generation, - _PyGC_Reason reason); -extern Py_ssize_t _PyGC_CollectNoFail(PyThreadState *tstate); +extern Py_ssize_t _PyGC_Collect(PyThreadState *tstate, int generation, _PyGC_Reason reason); +extern void _PyGC_CollectNoFail(PyThreadState *tstate); /* Freeze objects tracked by the GC and ignore them in future collections. */ extern void _PyGC_Freeze(PyInterpreterState *interp); @@ -302,7 +334,7 @@ extern void _PyGC_Unfreeze(PyInterpreterState *interp); /* Number of frozen objects */ extern Py_ssize_t _PyGC_GetFreezeCount(PyInterpreterState *interp); -extern PyObject *_PyGC_GetObjects(PyInterpreterState *interp, Py_ssize_t generation); +extern PyObject *_PyGC_GetObjects(PyInterpreterState *interp, int generation); extern PyObject *_PyGC_GetReferrers(PyInterpreterState *interp, PyObject *objs); // Functions to clear types free lists diff --git a/Include/internal/pycore_global_objects_fini_generated.h b/Include/internal/pycore_global_objects_fini_generated.h index d2287687181450..9aa34f5927dea8 100644 --- a/Include/internal/pycore_global_objects_fini_generated.h +++ b/Include/internal/pycore_global_objects_fini_generated.h @@ -724,6 +724,7 @@ _PyStaticObjects_CheckRefcnt(PyInterpreterState *interp) { _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__slotnames__)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__slots__)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__spec__)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__static_attributes__)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__str__)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__sub__)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(__subclasscheck__)); diff --git a/Include/internal/pycore_global_strings.h b/Include/internal/pycore_global_strings.h index fb9ec44d3f52aa..9a0d42f6f12a1e 100644 --- a/Include/internal/pycore_global_strings.h +++ b/Include/internal/pycore_global_strings.h @@ -213,6 +213,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(__slotnames__) STRUCT_FOR_ID(__slots__) STRUCT_FOR_ID(__spec__) + STRUCT_FOR_ID(__static_attributes__) STRUCT_FOR_ID(__str__) STRUCT_FOR_ID(__sub__) STRUCT_FOR_ID(__subclasscheck__) diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index 942f47340b3966..b8d0fdcce11ba8 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -295,9 +295,11 @@ _PyInterpreterState_SetFinalizing(PyInterpreterState *interp, PyThreadState *tst } -// Export for the _xxinterpchannels module. -PyAPI_FUNC(PyInterpreterState *) _PyInterpreterState_LookUpID(int64_t); +// Exports for the _testinternalcapi module. +PyAPI_FUNC(int64_t) _PyInterpreterState_ObjectToID(PyObject *); +PyAPI_FUNC(PyInterpreterState *) _PyInterpreterState_LookUpID(int64_t); +PyAPI_FUNC(PyInterpreterState *) _PyInterpreterState_LookUpIDObject(PyObject *); PyAPI_FUNC(int) _PyInterpreterState_IDInitref(PyInterpreterState *); PyAPI_FUNC(int) _PyInterpreterState_IDIncref(PyInterpreterState *); PyAPI_FUNC(void) _PyInterpreterState_IDDecref(PyInterpreterState *); diff --git a/Include/internal/pycore_object.h b/Include/internal/pycore_object.h index 9809f5f2e0271a..0b17ddf0c973ef 100644 --- a/Include/internal/pycore_object.h +++ b/Include/internal/pycore_object.h @@ -125,19 +125,8 @@ static inline void _Py_RefcntAdd(PyObject* op, Py_ssize_t n) } #define _Py_RefcntAdd(op, n) _Py_RefcntAdd(_PyObject_CAST(op), n) -static inline void _Py_SetImmortal(PyObject *op) -{ - if (op) { -#ifdef Py_GIL_DISABLED - op->ob_tid = _Py_UNOWNED_TID; - op->ob_ref_local = _Py_IMMORTAL_REFCNT_LOCAL; - op->ob_ref_shared = 0; -#else - op->ob_refcnt = _Py_IMMORTAL_REFCNT; -#endif - } -} -#define _Py_SetImmortal(op) _Py_SetImmortal(_PyObject_CAST(op)) +extern void _Py_SetImmortal(PyObject *op); +extern void _Py_SetImmortalUntracked(PyObject *op); // Makes an immortal object mortal again with the specified refcnt. Should only // be used during runtime finalization. @@ -325,11 +314,12 @@ static inline void _PyObject_GC_TRACK( filename, lineno, __func__); PyInterpreterState *interp = _PyInterpreterState_GET(); - PyGC_Head *generation0 = interp->gc.generation0; + PyGC_Head *generation0 = &interp->gc.young.head; PyGC_Head *last = (PyGC_Head*)(generation0->_gc_prev); _PyGCHead_SET_NEXT(last, gc); _PyGCHead_SET_PREV(gc, last); - _PyGCHead_SET_NEXT(gc, generation0); + /* Young objects will be moved into the visited space during GC, so set the bit here */ + gc->_gc_next = ((uintptr_t)generation0) | interp->gc.visited_space; generation0->_gc_prev = (uintptr_t)gc; #endif } @@ -726,6 +716,8 @@ PyAPI_DATA(PyTypeObject) _PyNotImplemented_Type; // Export for the stable ABI. PyAPI_DATA(int) _Py_SwappedOp[]; +extern void _Py_GetConstant_Init(void); + #ifdef __cplusplus } #endif diff --git a/Include/internal/pycore_opcode_metadata.h b/Include/internal/pycore_opcode_metadata.h index f754de3706c812..de525f72d3523e 100644 --- a/Include/internal/pycore_opcode_metadata.h +++ b/Include/internal/pycore_opcode_metadata.h @@ -921,6 +921,7 @@ enum InstructionFormat { #define HAS_PURE_FLAG (2048) #define HAS_PASSTHROUGH_FLAG (4096) #define HAS_OPARG_AND_1_FLAG (8192) +#define HAS_ERROR_NO_POP_FLAG (16384) #define OPCODE_HAS_ARG(OP) (_PyOpcode_opcode_metadata[OP].flags & (HAS_ARG_FLAG)) #define OPCODE_HAS_CONST(OP) (_PyOpcode_opcode_metadata[OP].flags & (HAS_CONST_FLAG)) #define OPCODE_HAS_NAME(OP) (_PyOpcode_opcode_metadata[OP].flags & (HAS_NAME_FLAG)) @@ -935,6 +936,7 @@ enum InstructionFormat { #define OPCODE_HAS_PURE(OP) (_PyOpcode_opcode_metadata[OP].flags & (HAS_PURE_FLAG)) #define OPCODE_HAS_PASSTHROUGH(OP) (_PyOpcode_opcode_metadata[OP].flags & (HAS_PASSTHROUGH_FLAG)) #define OPCODE_HAS_OPARG_AND_1(OP) (_PyOpcode_opcode_metadata[OP].flags & (HAS_OPARG_AND_1_FLAG)) +#define OPCODE_HAS_ERROR_NO_POP(OP) (_PyOpcode_opcode_metadata[OP].flags & (HAS_ERROR_NO_POP_FLAG)) #define OPARG_FULL 0 #define OPARG_CACHE_1 1 @@ -954,17 +956,17 @@ struct opcode_metadata { extern const struct opcode_metadata _PyOpcode_opcode_metadata[268]; #ifdef NEED_OPCODE_METADATA const struct opcode_metadata _PyOpcode_opcode_metadata[268] = { - [BEFORE_ASYNC_WITH] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [BEFORE_WITH] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [BEFORE_ASYNC_WITH] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, + [BEFORE_WITH] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, [BINARY_OP] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [BINARY_OP_ADD_FLOAT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_EXIT_FLAG }, - [BINARY_OP_ADD_INT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG }, - [BINARY_OP_ADD_UNICODE] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG }, + [BINARY_OP_ADD_FLOAT] = { true, INSTR_FMT_IXC, HAS_EXIT_FLAG }, + [BINARY_OP_ADD_INT] = { true, INSTR_FMT_IXC, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, + [BINARY_OP_ADD_UNICODE] = { true, INSTR_FMT_IXC, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, [BINARY_OP_INPLACE_ADD_UNICODE] = { true, INSTR_FMT_IXC, HAS_LOCAL_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [BINARY_OP_MULTIPLY_FLOAT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_EXIT_FLAG }, - [BINARY_OP_MULTIPLY_INT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG }, - [BINARY_OP_SUBTRACT_FLOAT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_EXIT_FLAG }, - [BINARY_OP_SUBTRACT_INT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG }, + [BINARY_OP_MULTIPLY_FLOAT] = { true, INSTR_FMT_IXC, HAS_EXIT_FLAG }, + [BINARY_OP_MULTIPLY_INT] = { true, INSTR_FMT_IXC, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, + [BINARY_OP_SUBTRACT_FLOAT] = { true, INSTR_FMT_IXC, HAS_EXIT_FLAG }, + [BINARY_OP_SUBTRACT_INT] = { true, INSTR_FMT_IXC, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, [BINARY_SLICE] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [BINARY_SUBSCR] = { true, INSTR_FMT_IXC, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [BINARY_SUBSCR_DICT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, @@ -975,25 +977,25 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[268] = { [BUILD_CONST_KEY_MAP] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [BUILD_LIST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG }, [BUILD_MAP] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [BUILD_SET] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [BUILD_SET] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, [BUILD_SLICE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG }, [BUILD_STRING] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG }, [BUILD_TUPLE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG }, [CACHE] = { true, INSTR_FMT_IX, HAS_ESCAPES_FLAG }, - [CALL] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [CALL_ALLOC_AND_ENTER_INIT] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [CALL] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, + [CALL_ALLOC_AND_ENTER_INIT] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, [CALL_BOUND_METHOD_EXACT_ARGS] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, [CALL_BUILTIN_CLASS] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG }, [CALL_BUILTIN_FAST] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [CALL_BUILTIN_FAST_WITH_KEYWORDS] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [CALL_BUILTIN_O] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [CALL_FUNCTION_EX] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [CALL_FUNCTION_EX] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, [CALL_INTRINSIC_1] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [CALL_INTRINSIC_2] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [CALL_ISINSTANCE] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [CALL_KW] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [CALL_LEN] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [CALL_LIST_APPEND] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG }, + [CALL_ISINSTANCE] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, + [CALL_KW] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, + [CALL_LEN] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, + [CALL_LIST_APPEND] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG }, [CALL_METHOD_DESCRIPTOR_FAST] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [CALL_METHOD_DESCRIPTOR_NOARGS] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, @@ -1005,11 +1007,11 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[268] = { [CALL_TYPE_1] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, [CHECK_EG_MATCH] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [CHECK_EXC_MATCH] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [CLEANUP_THROW] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [CLEANUP_THROW] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, [COMPARE_OP] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [COMPARE_OP_FLOAT] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_ESCAPES_FLAG }, + [COMPARE_OP_FLOAT] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_EXIT_FLAG | HAS_ESCAPES_FLAG }, [COMPARE_OP_INT] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG }, - [COMPARE_OP_STR] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG }, + [COMPARE_OP_STR] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_EXIT_FLAG }, [CONTAINS_OP] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [CONTAINS_OP_DICT] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [CONTAINS_OP_SET] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, @@ -1017,40 +1019,40 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[268] = { [COPY] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_PURE_FLAG }, [COPY_FREE_VARS] = { true, INSTR_FMT_IB, HAS_ARG_FLAG }, [DELETE_ATTR] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [DELETE_DEREF] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [DELETE_FAST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG | HAS_ERROR_FLAG }, - [DELETE_GLOBAL] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [DELETE_NAME] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [DELETE_DEREF] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, + [DELETE_FAST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [DELETE_GLOBAL] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, + [DELETE_NAME] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, [DELETE_SUBSCR] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [DICT_MERGE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [DICT_UPDATE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [END_ASYNC_FOR] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [END_ASYNC_FOR] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, [END_FOR] = { true, INSTR_FMT_IX, HAS_PURE_FLAG }, [END_SEND] = { true, INSTR_FMT_IX, HAS_PURE_FLAG }, [ENTER_EXECUTOR] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG }, - [EXIT_INIT_CHECK] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [EXIT_INIT_CHECK] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, [EXTENDED_ARG] = { true, INSTR_FMT_IB, HAS_ARG_FLAG }, [FORMAT_SIMPLE] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [FORMAT_WITH_SPEC] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [FOR_ITER] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [FOR_ITER] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, [FOR_ITER_GEN] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, [FOR_ITER_LIST] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_DEOPT_FLAG }, [FOR_ITER_RANGE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG }, [FOR_ITER_TUPLE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_DEOPT_FLAG }, [GET_AITER] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [GET_ANEXT] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [GET_ANEXT] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, [GET_AWAITABLE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [GET_ITER] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [GET_LEN] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [GET_YIELD_FROM_ITER] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [GET_YIELD_FROM_ITER] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, [IMPORT_FROM] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [IMPORT_NAME] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [INSTRUMENTED_CALL] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [INSTRUMENTED_CALL_FUNCTION_EX] = { true, INSTR_FMT_IX, 0 }, [INSTRUMENTED_CALL_KW] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [INSTRUMENTED_END_FOR] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [INSTRUMENTED_END_SEND] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [INSTRUMENTED_FOR_ITER] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [INSTRUMENTED_END_FOR] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, + [INSTRUMENTED_END_SEND] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, + [INSTRUMENTED_FOR_ITER] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, [INSTRUMENTED_INSTRUCTION] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [INSTRUMENTED_JUMP_BACKWARD] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG }, [INSTRUMENTED_JUMP_FORWARD] = { true, INSTR_FMT_IB, HAS_ARG_FLAG }, @@ -1059,10 +1061,10 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[268] = { [INSTRUMENTED_POP_JUMP_IF_NONE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG }, [INSTRUMENTED_POP_JUMP_IF_NOT_NONE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG }, [INSTRUMENTED_POP_JUMP_IF_TRUE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG }, - [INSTRUMENTED_RESUME] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [INSTRUMENTED_RETURN_CONST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_CONST_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [INSTRUMENTED_RETURN_VALUE] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [INSTRUMENTED_YIELD_VALUE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [INSTRUMENTED_RESUME] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, + [INSTRUMENTED_RETURN_CONST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_CONST_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, + [INSTRUMENTED_RETURN_VALUE] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, + [INSTRUMENTED_YIELD_VALUE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, [INTERPRETER_EXIT] = { true, INSTR_FMT_IX, HAS_ESCAPES_FLAG }, [IS_OP] = { true, INSTR_FMT_IB, HAS_ARG_FLAG }, [JUMP_BACKWARD] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, @@ -1076,10 +1078,10 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[268] = { [LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, [LOAD_ATTR_INSTANCE_VALUE] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG }, [LOAD_ATTR_METHOD_LAZY_DICT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG }, - [LOAD_ATTR_METHOD_NO_DICT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG }, + [LOAD_ATTR_METHOD_NO_DICT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_EXIT_FLAG }, [LOAD_ATTR_METHOD_WITH_VALUES] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG }, [LOAD_ATTR_MODULE] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, - [LOAD_ATTR_NONDESCRIPTOR_NO_DICT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG }, + [LOAD_ATTR_NONDESCRIPTOR_NO_DICT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_EXIT_FLAG }, [LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG }, [LOAD_ATTR_PROPERTY] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, [LOAD_ATTR_SLOT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG }, @@ -1089,20 +1091,20 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[268] = { [LOAD_DEREF] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [LOAD_FAST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG | HAS_PURE_FLAG }, [LOAD_FAST_AND_CLEAR] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG }, - [LOAD_FAST_CHECK] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG | HAS_ERROR_FLAG }, + [LOAD_FAST_CHECK] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [LOAD_FAST_LOAD_FAST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG }, - [LOAD_FROM_DICT_OR_DEREF] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [LOAD_FROM_DICT_OR_GLOBALS] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [LOAD_FROM_DICT_OR_DEREF] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, + [LOAD_FROM_DICT_OR_GLOBALS] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, [LOAD_GLOBAL] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [LOAD_GLOBAL_BUILTIN] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, [LOAD_GLOBAL_MODULE] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, [LOAD_LOCALS] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [LOAD_NAME] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [LOAD_NAME] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, [LOAD_SUPER_ATTR] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [LOAD_SUPER_ATTR_ATTR] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [LOAD_SUPER_ATTR_METHOD] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [MAKE_CELL] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [MAKE_FUNCTION] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [MAKE_CELL] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, + [MAKE_FUNCTION] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, [MAP_ADD] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [MATCH_CLASS] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [MATCH_KEYS] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, @@ -1117,15 +1119,15 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[268] = { [POP_TOP] = { true, INSTR_FMT_IX, HAS_PURE_FLAG }, [PUSH_EXC_INFO] = { true, INSTR_FMT_IX, 0 }, [PUSH_NULL] = { true, INSTR_FMT_IX, HAS_PURE_FLAG }, - [RAISE_VARARGS] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [RERAISE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [RAISE_VARARGS] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, + [RERAISE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, [RESERVED] = { true, INSTR_FMT_IX, HAS_ESCAPES_FLAG }, [RESUME] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [RESUME_CHECK] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, [RETURN_CONST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_CONST_FLAG | HAS_ESCAPES_FLAG }, - [RETURN_GENERATOR] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [RETURN_GENERATOR] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, [RETURN_VALUE] = { true, INSTR_FMT_IX, HAS_ESCAPES_FLAG }, - [SEND] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [SEND] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, [SEND_GEN] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, [SETUP_ANNOTATIONS] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [SET_ADD] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, @@ -1133,7 +1135,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[268] = { [SET_UPDATE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [STORE_ATTR] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [STORE_ATTR_INSTANCE_VALUE] = { true, INSTR_FMT_IXC000, HAS_DEOPT_FLAG | HAS_EXIT_FLAG }, - [STORE_ATTR_SLOT] = { true, INSTR_FMT_IXC000, HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_ESCAPES_FLAG }, + [STORE_ATTR_SLOT] = { true, INSTR_FMT_IXC000, HAS_EXIT_FLAG | HAS_ESCAPES_FLAG }, [STORE_ATTR_WITH_HINT] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, [STORE_DEREF] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ESCAPES_FLAG }, [STORE_FAST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG }, @@ -1147,12 +1149,12 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[268] = { [STORE_SUBSCR_LIST_INT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, [SWAP] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_PURE_FLAG }, [TO_BOOL] = { true, INSTR_FMT_IXC00, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [TO_BOOL_ALWAYS_TRUE] = { true, INSTR_FMT_IXC00, HAS_DEOPT_FLAG | HAS_EXIT_FLAG }, - [TO_BOOL_BOOL] = { true, INSTR_FMT_IXC00, HAS_DEOPT_FLAG | HAS_EXIT_FLAG }, - [TO_BOOL_INT] = { true, INSTR_FMT_IXC00, HAS_DEOPT_FLAG | HAS_EXIT_FLAG }, - [TO_BOOL_LIST] = { true, INSTR_FMT_IXC00, HAS_DEOPT_FLAG | HAS_EXIT_FLAG }, - [TO_BOOL_NONE] = { true, INSTR_FMT_IXC00, HAS_DEOPT_FLAG | HAS_EXIT_FLAG }, - [TO_BOOL_STR] = { true, INSTR_FMT_IXC00, HAS_DEOPT_FLAG | HAS_EXIT_FLAG }, + [TO_BOOL_ALWAYS_TRUE] = { true, INSTR_FMT_IXC00, HAS_EXIT_FLAG }, + [TO_BOOL_BOOL] = { true, INSTR_FMT_IXC00, HAS_EXIT_FLAG }, + [TO_BOOL_INT] = { true, INSTR_FMT_IXC00, HAS_EXIT_FLAG }, + [TO_BOOL_LIST] = { true, INSTR_FMT_IXC00, HAS_EXIT_FLAG }, + [TO_BOOL_NONE] = { true, INSTR_FMT_IXC00, HAS_EXIT_FLAG }, + [TO_BOOL_STR] = { true, INSTR_FMT_IXC00, HAS_EXIT_FLAG }, [UNARY_INVERT] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [UNARY_NEGATIVE] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [UNARY_NOT] = { true, INSTR_FMT_IX, HAS_PURE_FLAG }, @@ -1188,8 +1190,6 @@ extern const struct opcode_macro_expansion _PyOpcode_macro_expansion[256]; #ifdef NEED_OPCODE_METADATA const struct opcode_macro_expansion _PyOpcode_macro_expansion[256] = { - [BEFORE_ASYNC_WITH] = { .nuops = 1, .uops = { { _BEFORE_ASYNC_WITH, 0, 0 } } }, - [BEFORE_WITH] = { .nuops = 1, .uops = { { _BEFORE_WITH, 0, 0 } } }, [BINARY_OP] = { .nuops = 1, .uops = { { _BINARY_OP, 0, 0 } } }, [BINARY_OP_ADD_FLOAT] = { .nuops = 2, .uops = { { _GUARD_BOTH_FLOAT, 0, 0 }, { _BINARY_OP_ADD_FLOAT, 0, 0 } } }, [BINARY_OP_ADD_INT] = { .nuops = 2, .uops = { { _GUARD_BOTH_INT, 0, 0 }, { _BINARY_OP_ADD_INT, 0, 0 } } }, @@ -1207,7 +1207,6 @@ _PyOpcode_macro_expansion[256] = { [BUILD_CONST_KEY_MAP] = { .nuops = 1, .uops = { { _BUILD_CONST_KEY_MAP, 0, 0 } } }, [BUILD_LIST] = { .nuops = 1, .uops = { { _BUILD_LIST, 0, 0 } } }, [BUILD_MAP] = { .nuops = 1, .uops = { { _BUILD_MAP, 0, 0 } } }, - [BUILD_SET] = { .nuops = 1, .uops = { { _BUILD_SET, 0, 0 } } }, [BUILD_SLICE] = { .nuops = 1, .uops = { { _BUILD_SLICE, 0, 0 } } }, [BUILD_STRING] = { .nuops = 1, .uops = { { _BUILD_STRING, 0, 0 } } }, [BUILD_TUPLE] = { .nuops = 1, .uops = { { _BUILD_TUPLE, 0, 0 } } }, @@ -1291,7 +1290,6 @@ _PyOpcode_macro_expansion[256] = { [LOAD_GLOBAL_BUILTIN] = { .nuops = 3, .uops = { { _GUARD_GLOBALS_VERSION, 1, 1 }, { _GUARD_BUILTINS_VERSION, 1, 2 }, { _LOAD_GLOBAL_BUILTINS, 1, 3 } } }, [LOAD_GLOBAL_MODULE] = { .nuops = 2, .uops = { { _GUARD_GLOBALS_VERSION, 1, 1 }, { _LOAD_GLOBAL_MODULE, 1, 3 } } }, [LOAD_LOCALS] = { .nuops = 1, .uops = { { _LOAD_LOCALS, 0, 0 } } }, - [LOAD_NAME] = { .nuops = 1, .uops = { { _LOAD_NAME, 0, 0 } } }, [LOAD_SUPER_ATTR_ATTR] = { .nuops = 1, .uops = { { _LOAD_SUPER_ATTR_ATTR, 0, 0 } } }, [LOAD_SUPER_ATTR_METHOD] = { .nuops = 1, .uops = { { _LOAD_SUPER_ATTR_METHOD, 0, 0 } } }, [MAKE_CELL] = { .nuops = 1, .uops = { { _MAKE_CELL, 0, 0 } } }, diff --git a/Include/internal/pycore_optimizer.h b/Include/internal/pycore_optimizer.h index fcead4d8714870..44cafe61b75596 100644 --- a/Include/internal/pycore_optimizer.h +++ b/Include/internal/pycore_optimizer.h @@ -12,7 +12,7 @@ extern "C" { #include // This is the length of the trace we project initially. -#define UOP_MAX_TRACE_LENGTH 512 +#define UOP_MAX_TRACE_LENGTH 800 #define TRACE_STACK_SIZE 5 diff --git a/Include/internal/pycore_pystate.h b/Include/internal/pycore_pystate.h index 6f9e6a332a7830..35e266acd3ab60 100644 --- a/Include/internal/pycore_pystate.h +++ b/Include/internal/pycore_pystate.h @@ -83,6 +83,9 @@ PyAPI_FUNC(void) _PyInterpreterState_SetNotRunningMain(PyInterpreterState *); PyAPI_FUNC(int) _PyInterpreterState_IsRunningMain(PyInterpreterState *); PyAPI_FUNC(int) _PyInterpreterState_FailIfRunningMain(PyInterpreterState *); +extern int _PyThreadState_IsRunningMain(PyThreadState *); +extern void _PyInterpreterState_ReinitRunningMain(PyThreadState *); + static inline const PyConfig * _Py_GetMainConfig(void) @@ -215,7 +218,8 @@ extern PyThreadState * _PyThreadState_New( PyInterpreterState *interp, int whence); extern void _PyThreadState_Bind(PyThreadState *tstate); -extern void _PyThreadState_DeleteExcept(PyThreadState *tstate); +extern PyThreadState * _PyThreadState_RemoveExcept(PyThreadState *tstate); +extern void _PyThreadState_DeleteList(PyThreadState *list); extern void _PyThreadState_ClearMimallocHeaps(PyThreadState *tstate); // Export for '_testinternalcapi' shared extension diff --git a/Include/internal/pycore_runtime_init.h b/Include/internal/pycore_runtime_init.h index cc47b9a82e2879..88d888943d28b1 100644 --- a/Include/internal/pycore_runtime_init.h +++ b/Include/internal/pycore_runtime_init.h @@ -168,12 +168,12 @@ extern PyTypeObject _PyExc_MemoryError; }, \ .gc = { \ .enabled = 1, \ - .generations = { \ - /* .head is set in _PyGC_InitState(). */ \ - { .threshold = 700, }, \ - { .threshold = 10, }, \ + .young = { .threshold = 2000, }, \ + .old = { \ { .threshold = 10, }, \ + { .threshold = 0, }, \ }, \ + .work_to_do = -5000, \ }, \ .qsbr = { \ .wr_seq = QSBR_INITIAL, \ diff --git a/Include/internal/pycore_runtime_init_generated.h b/Include/internal/pycore_runtime_init_generated.h index 658bf8030f661d..d75f0f88656128 100644 --- a/Include/internal/pycore_runtime_init_generated.h +++ b/Include/internal/pycore_runtime_init_generated.h @@ -722,6 +722,7 @@ extern "C" { INIT_ID(__slotnames__), \ INIT_ID(__slots__), \ INIT_ID(__spec__), \ + INIT_ID(__static_attributes__), \ INIT_ID(__str__), \ INIT_ID(__sub__), \ INIT_ID(__subclasscheck__), \ diff --git a/Include/internal/pycore_unicodeobject_generated.h b/Include/internal/pycore_unicodeobject_generated.h index d72353d56eae60..7f67e67f571eae 100644 --- a/Include/internal/pycore_unicodeobject_generated.h +++ b/Include/internal/pycore_unicodeobject_generated.h @@ -480,6 +480,9 @@ _PyUnicode_InitStaticStrings(PyInterpreterState *interp) { string = &_Py_ID(__spec__); assert(_PyUnicode_CheckConsistency(string, 1)); _PyUnicode_InternInPlace(interp, &string); + string = &_Py_ID(__static_attributes__); + assert(_PyUnicode_CheckConsistency(string, 1)); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(__str__); assert(_PyUnicode_CheckConsistency(string, 1)); _PyUnicode_InternInPlace(interp, &string); diff --git a/Include/internal/pycore_uop_ids.h b/Include/internal/pycore_uop_ids.h index b569b80c5f110a..bcb10ab723ecba 100644 --- a/Include/internal/pycore_uop_ids.h +++ b/Include/internal/pycore_uop_ids.h @@ -87,45 +87,47 @@ extern "C" { #define _DELETE_GLOBAL DELETE_GLOBAL #define _DELETE_NAME DELETE_NAME #define _DELETE_SUBSCR DELETE_SUBSCR +#define _DEOPT 341 #define _DICT_MERGE DICT_MERGE #define _DICT_UPDATE DICT_UPDATE #define _END_SEND END_SEND +#define _ERROR_POP_N 342 #define _EXIT_INIT_CHECK EXIT_INIT_CHECK -#define _FATAL_ERROR 341 +#define _FATAL_ERROR 343 #define _FORMAT_SIMPLE FORMAT_SIMPLE #define _FORMAT_WITH_SPEC FORMAT_WITH_SPEC -#define _FOR_ITER 342 +#define _FOR_ITER 344 #define _FOR_ITER_GEN FOR_ITER_GEN -#define _FOR_ITER_TIER_TWO 343 +#define _FOR_ITER_TIER_TWO 345 #define _GET_AITER GET_AITER #define _GET_ANEXT GET_ANEXT #define _GET_AWAITABLE GET_AWAITABLE #define _GET_ITER GET_ITER #define _GET_LEN GET_LEN #define _GET_YIELD_FROM_ITER GET_YIELD_FROM_ITER -#define _GUARD_BOTH_FLOAT 344 -#define _GUARD_BOTH_INT 345 -#define _GUARD_BOTH_UNICODE 346 -#define _GUARD_BUILTINS_VERSION 347 -#define _GUARD_DORV_VALUES 348 -#define _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT 349 -#define _GUARD_GLOBALS_VERSION 350 -#define _GUARD_IS_FALSE_POP 351 -#define _GUARD_IS_NONE_POP 352 -#define _GUARD_IS_NOT_NONE_POP 353 -#define _GUARD_IS_TRUE_POP 354 -#define _GUARD_KEYS_VERSION 355 -#define _GUARD_NOT_EXHAUSTED_LIST 356 -#define _GUARD_NOT_EXHAUSTED_RANGE 357 -#define _GUARD_NOT_EXHAUSTED_TUPLE 358 -#define _GUARD_TYPE_VERSION 359 -#define _INIT_CALL_BOUND_METHOD_EXACT_ARGS 360 -#define _INIT_CALL_PY_EXACT_ARGS 361 -#define _INIT_CALL_PY_EXACT_ARGS_0 362 -#define _INIT_CALL_PY_EXACT_ARGS_1 363 -#define _INIT_CALL_PY_EXACT_ARGS_2 364 -#define _INIT_CALL_PY_EXACT_ARGS_3 365 -#define _INIT_CALL_PY_EXACT_ARGS_4 366 +#define _GUARD_BOTH_FLOAT 346 +#define _GUARD_BOTH_INT 347 +#define _GUARD_BOTH_UNICODE 348 +#define _GUARD_BUILTINS_VERSION 349 +#define _GUARD_DORV_VALUES 350 +#define _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT 351 +#define _GUARD_GLOBALS_VERSION 352 +#define _GUARD_IS_FALSE_POP 353 +#define _GUARD_IS_NONE_POP 354 +#define _GUARD_IS_NOT_NONE_POP 355 +#define _GUARD_IS_TRUE_POP 356 +#define _GUARD_KEYS_VERSION 357 +#define _GUARD_NOT_EXHAUSTED_LIST 358 +#define _GUARD_NOT_EXHAUSTED_RANGE 359 +#define _GUARD_NOT_EXHAUSTED_TUPLE 360 +#define _GUARD_TYPE_VERSION 361 +#define _INIT_CALL_BOUND_METHOD_EXACT_ARGS 362 +#define _INIT_CALL_PY_EXACT_ARGS 363 +#define _INIT_CALL_PY_EXACT_ARGS_0 364 +#define _INIT_CALL_PY_EXACT_ARGS_1 365 +#define _INIT_CALL_PY_EXACT_ARGS_2 366 +#define _INIT_CALL_PY_EXACT_ARGS_3 367 +#define _INIT_CALL_PY_EXACT_ARGS_4 368 #define _INSTRUMENTED_CALL INSTRUMENTED_CALL #define _INSTRUMENTED_CALL_FUNCTION_EX INSTRUMENTED_CALL_FUNCTION_EX #define _INSTRUMENTED_CALL_KW INSTRUMENTED_CALL_KW @@ -142,65 +144,65 @@ extern "C" { #define _INSTRUMENTED_RETURN_CONST INSTRUMENTED_RETURN_CONST #define _INSTRUMENTED_RETURN_VALUE INSTRUMENTED_RETURN_VALUE #define _INSTRUMENTED_YIELD_VALUE INSTRUMENTED_YIELD_VALUE -#define _INTERNAL_INCREMENT_OPT_COUNTER 367 -#define _IS_NONE 368 +#define _INTERNAL_INCREMENT_OPT_COUNTER 369 +#define _IS_NONE 370 #define _IS_OP IS_OP -#define _ITER_CHECK_LIST 369 -#define _ITER_CHECK_RANGE 370 -#define _ITER_CHECK_TUPLE 371 -#define _ITER_JUMP_LIST 372 -#define _ITER_JUMP_RANGE 373 -#define _ITER_JUMP_TUPLE 374 -#define _ITER_NEXT_LIST 375 -#define _ITER_NEXT_RANGE 376 -#define _ITER_NEXT_TUPLE 377 -#define _JUMP_TO_TOP 378 +#define _ITER_CHECK_LIST 371 +#define _ITER_CHECK_RANGE 372 +#define _ITER_CHECK_TUPLE 373 +#define _ITER_JUMP_LIST 374 +#define _ITER_JUMP_RANGE 375 +#define _ITER_JUMP_TUPLE 376 +#define _ITER_NEXT_LIST 377 +#define _ITER_NEXT_RANGE 378 +#define _ITER_NEXT_TUPLE 379 +#define _JUMP_TO_TOP 380 #define _LIST_APPEND LIST_APPEND #define _LIST_EXTEND LIST_EXTEND #define _LOAD_ASSERTION_ERROR LOAD_ASSERTION_ERROR -#define _LOAD_ATTR 379 -#define _LOAD_ATTR_CLASS 380 -#define _LOAD_ATTR_CLASS_0 381 -#define _LOAD_ATTR_CLASS_1 382 +#define _LOAD_ATTR 381 +#define _LOAD_ATTR_CLASS 382 +#define _LOAD_ATTR_CLASS_0 383 +#define _LOAD_ATTR_CLASS_1 384 #define _LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN -#define _LOAD_ATTR_INSTANCE_VALUE 383 -#define _LOAD_ATTR_INSTANCE_VALUE_0 384 -#define _LOAD_ATTR_INSTANCE_VALUE_1 385 -#define _LOAD_ATTR_METHOD_LAZY_DICT 386 -#define _LOAD_ATTR_METHOD_NO_DICT 387 -#define _LOAD_ATTR_METHOD_WITH_VALUES 388 -#define _LOAD_ATTR_MODULE 389 -#define _LOAD_ATTR_NONDESCRIPTOR_NO_DICT 390 -#define _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES 391 +#define _LOAD_ATTR_INSTANCE_VALUE 385 +#define _LOAD_ATTR_INSTANCE_VALUE_0 386 +#define _LOAD_ATTR_INSTANCE_VALUE_1 387 +#define _LOAD_ATTR_METHOD_LAZY_DICT 388 +#define _LOAD_ATTR_METHOD_NO_DICT 389 +#define _LOAD_ATTR_METHOD_WITH_VALUES 390 +#define _LOAD_ATTR_MODULE 391 +#define _LOAD_ATTR_NONDESCRIPTOR_NO_DICT 392 +#define _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES 393 #define _LOAD_ATTR_PROPERTY LOAD_ATTR_PROPERTY -#define _LOAD_ATTR_SLOT 392 -#define _LOAD_ATTR_SLOT_0 393 -#define _LOAD_ATTR_SLOT_1 394 -#define _LOAD_ATTR_WITH_HINT 395 +#define _LOAD_ATTR_SLOT 394 +#define _LOAD_ATTR_SLOT_0 395 +#define _LOAD_ATTR_SLOT_1 396 +#define _LOAD_ATTR_WITH_HINT 397 #define _LOAD_BUILD_CLASS LOAD_BUILD_CLASS #define _LOAD_CONST LOAD_CONST -#define _LOAD_CONST_INLINE 396 -#define _LOAD_CONST_INLINE_BORROW 397 -#define _LOAD_CONST_INLINE_BORROW_WITH_NULL 398 -#define _LOAD_CONST_INLINE_WITH_NULL 399 +#define _LOAD_CONST_INLINE 398 +#define _LOAD_CONST_INLINE_BORROW 399 +#define _LOAD_CONST_INLINE_BORROW_WITH_NULL 400 +#define _LOAD_CONST_INLINE_WITH_NULL 401 #define _LOAD_DEREF LOAD_DEREF -#define _LOAD_FAST 400 -#define _LOAD_FAST_0 401 -#define _LOAD_FAST_1 402 -#define _LOAD_FAST_2 403 -#define _LOAD_FAST_3 404 -#define _LOAD_FAST_4 405 -#define _LOAD_FAST_5 406 -#define _LOAD_FAST_6 407 -#define _LOAD_FAST_7 408 +#define _LOAD_FAST 402 +#define _LOAD_FAST_0 403 +#define _LOAD_FAST_1 404 +#define _LOAD_FAST_2 405 +#define _LOAD_FAST_3 406 +#define _LOAD_FAST_4 407 +#define _LOAD_FAST_5 408 +#define _LOAD_FAST_6 409 +#define _LOAD_FAST_7 410 #define _LOAD_FAST_AND_CLEAR LOAD_FAST_AND_CLEAR #define _LOAD_FAST_CHECK LOAD_FAST_CHECK #define _LOAD_FAST_LOAD_FAST LOAD_FAST_LOAD_FAST #define _LOAD_FROM_DICT_OR_DEREF LOAD_FROM_DICT_OR_DEREF #define _LOAD_FROM_DICT_OR_GLOBALS LOAD_FROM_DICT_OR_GLOBALS -#define _LOAD_GLOBAL 409 -#define _LOAD_GLOBAL_BUILTINS 410 -#define _LOAD_GLOBAL_MODULE 411 +#define _LOAD_GLOBAL 411 +#define _LOAD_GLOBAL_BUILTINS 412 +#define _LOAD_GLOBAL_MODULE 413 #define _LOAD_LOCALS LOAD_LOCALS #define _LOAD_NAME LOAD_NAME #define _LOAD_SUPER_ATTR_ATTR LOAD_SUPER_ATTR_ATTR @@ -214,48 +216,49 @@ extern "C" { #define _MATCH_SEQUENCE MATCH_SEQUENCE #define _NOP NOP #define _POP_EXCEPT POP_EXCEPT -#define _POP_FRAME 412 -#define _POP_JUMP_IF_FALSE 413 -#define _POP_JUMP_IF_TRUE 414 +#define _POP_FRAME 414 +#define _POP_JUMP_IF_FALSE 415 +#define _POP_JUMP_IF_TRUE 416 #define _POP_TOP POP_TOP -#define _POP_TOP_LOAD_CONST_INLINE_BORROW 415 +#define _POP_TOP_LOAD_CONST_INLINE_BORROW 417 #define _PUSH_EXC_INFO PUSH_EXC_INFO -#define _PUSH_FRAME 416 +#define _PUSH_FRAME 418 #define _PUSH_NULL PUSH_NULL -#define _REPLACE_WITH_TRUE 417 +#define _REPLACE_WITH_TRUE 419 #define _RESUME_CHECK RESUME_CHECK -#define _SAVE_RETURN_OFFSET 418 -#define _SEND 419 +#define _SAVE_RETURN_OFFSET 420 +#define _SEND 421 #define _SEND_GEN SEND_GEN #define _SETUP_ANNOTATIONS SETUP_ANNOTATIONS #define _SET_ADD SET_ADD #define _SET_FUNCTION_ATTRIBUTE SET_FUNCTION_ATTRIBUTE #define _SET_UPDATE SET_UPDATE -#define _START_EXECUTOR 420 -#define _STORE_ATTR 421 -#define _STORE_ATTR_INSTANCE_VALUE 422 -#define _STORE_ATTR_SLOT 423 +#define _SIDE_EXIT 422 +#define _START_EXECUTOR 423 +#define _STORE_ATTR 424 +#define _STORE_ATTR_INSTANCE_VALUE 425 +#define _STORE_ATTR_SLOT 426 #define _STORE_ATTR_WITH_HINT STORE_ATTR_WITH_HINT #define _STORE_DEREF STORE_DEREF -#define _STORE_FAST 424 -#define _STORE_FAST_0 425 -#define _STORE_FAST_1 426 -#define _STORE_FAST_2 427 -#define _STORE_FAST_3 428 -#define _STORE_FAST_4 429 -#define _STORE_FAST_5 430 -#define _STORE_FAST_6 431 -#define _STORE_FAST_7 432 +#define _STORE_FAST 427 +#define _STORE_FAST_0 428 +#define _STORE_FAST_1 429 +#define _STORE_FAST_2 430 +#define _STORE_FAST_3 431 +#define _STORE_FAST_4 432 +#define _STORE_FAST_5 433 +#define _STORE_FAST_6 434 +#define _STORE_FAST_7 435 #define _STORE_FAST_LOAD_FAST STORE_FAST_LOAD_FAST #define _STORE_FAST_STORE_FAST STORE_FAST_STORE_FAST #define _STORE_GLOBAL STORE_GLOBAL #define _STORE_NAME STORE_NAME #define _STORE_SLICE STORE_SLICE -#define _STORE_SUBSCR 433 +#define _STORE_SUBSCR 436 #define _STORE_SUBSCR_DICT STORE_SUBSCR_DICT #define _STORE_SUBSCR_LIST_INT STORE_SUBSCR_LIST_INT #define _SWAP SWAP -#define _TO_BOOL 434 +#define _TO_BOOL 437 #define _TO_BOOL_BOOL TO_BOOL_BOOL #define _TO_BOOL_INT TO_BOOL_INT #define _TO_BOOL_LIST TO_BOOL_LIST @@ -265,12 +268,12 @@ extern "C" { #define _UNARY_NEGATIVE UNARY_NEGATIVE #define _UNARY_NOT UNARY_NOT #define _UNPACK_EX UNPACK_EX -#define _UNPACK_SEQUENCE 435 +#define _UNPACK_SEQUENCE 438 #define _UNPACK_SEQUENCE_LIST UNPACK_SEQUENCE_LIST #define _UNPACK_SEQUENCE_TUPLE UNPACK_SEQUENCE_TUPLE #define _UNPACK_SEQUENCE_TWO_TUPLE UNPACK_SEQUENCE_TWO_TUPLE #define _WITH_EXCEPT_START WITH_EXCEPT_START -#define MAX_UOP_ID 435 +#define MAX_UOP_ID 438 #ifdef __cplusplus } diff --git a/Include/internal/pycore_uop_metadata.h b/Include/internal/pycore_uop_metadata.h index 507bd27c01c553..51206cd4ca2fdf 100644 --- a/Include/internal/pycore_uop_metadata.h +++ b/Include/internal/pycore_uop_metadata.h @@ -15,11 +15,13 @@ extern const uint16_t _PyUop_Flags[MAX_UOP_ID+1]; extern const uint8_t _PyUop_Replication[MAX_UOP_ID+1]; extern const char * const _PyOpcode_uop_name[MAX_UOP_ID+1]; +extern int _PyUop_num_popped(int opcode, int oparg); + #ifdef NEED_OPCODE_METADATA const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { [_NOP] = HAS_PURE_FLAG, [_RESUME_CHECK] = HAS_DEOPT_FLAG, - [_LOAD_FAST_CHECK] = HAS_ARG_FLAG | HAS_LOCAL_FLAG | HAS_ERROR_FLAG, + [_LOAD_FAST_CHECK] = HAS_ARG_FLAG | HAS_LOCAL_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_LOAD_FAST_0] = HAS_LOCAL_FLAG | HAS_PURE_FLAG, [_LOAD_FAST_1] = HAS_LOCAL_FLAG | HAS_PURE_FLAG, [_LOAD_FAST_2] = HAS_LOCAL_FLAG | HAS_PURE_FLAG, @@ -49,22 +51,22 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { [_UNARY_NEGATIVE] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_UNARY_NOT] = HAS_PURE_FLAG, [_TO_BOOL] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, - [_TO_BOOL_BOOL] = HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_PASSTHROUGH_FLAG, - [_TO_BOOL_INT] = HAS_DEOPT_FLAG | HAS_EXIT_FLAG, - [_TO_BOOL_LIST] = HAS_DEOPT_FLAG | HAS_EXIT_FLAG, - [_TO_BOOL_NONE] = HAS_DEOPT_FLAG | HAS_EXIT_FLAG, - [_TO_BOOL_STR] = HAS_DEOPT_FLAG | HAS_EXIT_FLAG, + [_TO_BOOL_BOOL] = HAS_EXIT_FLAG | HAS_PASSTHROUGH_FLAG, + [_TO_BOOL_INT] = HAS_EXIT_FLAG, + [_TO_BOOL_LIST] = HAS_EXIT_FLAG, + [_TO_BOOL_NONE] = HAS_EXIT_FLAG, + [_TO_BOOL_STR] = HAS_EXIT_FLAG, [_REPLACE_WITH_TRUE] = 0, [_UNARY_INVERT] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, - [_GUARD_BOTH_INT] = HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_PASSTHROUGH_FLAG, + [_GUARD_BOTH_INT] = HAS_EXIT_FLAG | HAS_PASSTHROUGH_FLAG, [_BINARY_OP_MULTIPLY_INT] = HAS_ERROR_FLAG | HAS_PURE_FLAG, [_BINARY_OP_ADD_INT] = HAS_ERROR_FLAG | HAS_PURE_FLAG, [_BINARY_OP_SUBTRACT_INT] = HAS_ERROR_FLAG | HAS_PURE_FLAG, - [_GUARD_BOTH_FLOAT] = HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_PASSTHROUGH_FLAG, + [_GUARD_BOTH_FLOAT] = HAS_EXIT_FLAG | HAS_PASSTHROUGH_FLAG, [_BINARY_OP_MULTIPLY_FLOAT] = HAS_PURE_FLAG, [_BINARY_OP_ADD_FLOAT] = HAS_PURE_FLAG, [_BINARY_OP_SUBTRACT_FLOAT] = HAS_PURE_FLAG, - [_GUARD_BOTH_UNICODE] = HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_PASSTHROUGH_FLAG, + [_GUARD_BOTH_UNICODE] = HAS_EXIT_FLAG | HAS_PASSTHROUGH_FLAG, [_BINARY_OP_ADD_UNICODE] = HAS_ERROR_FLAG | HAS_PURE_FLAG, [_BINARY_SUBSCR] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_BINARY_SLICE] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, @@ -83,13 +85,13 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { [_CALL_INTRINSIC_2] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_POP_FRAME] = HAS_ESCAPES_FLAG, [_GET_AITER] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, - [_GET_ANEXT] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_GET_ANEXT] = HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG, [_GET_AWAITABLE] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_POP_EXCEPT] = HAS_ESCAPES_FLAG, [_LOAD_ASSERTION_ERROR] = 0, [_LOAD_BUILD_CLASS] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_STORE_NAME] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, - [_DELETE_NAME] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_DELETE_NAME] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG, [_UNPACK_SEQUENCE] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_UNPACK_SEQUENCE_TWO_TUPLE] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, [_UNPACK_SEQUENCE_TUPLE] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, @@ -98,19 +100,18 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { [_STORE_ATTR] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_DELETE_ATTR] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_STORE_GLOBAL] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, - [_DELETE_GLOBAL] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_DELETE_GLOBAL] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG, [_LOAD_LOCALS] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, - [_LOAD_FROM_DICT_OR_GLOBALS] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, - [_LOAD_NAME] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_LOAD_FROM_DICT_OR_GLOBALS] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG, [_LOAD_GLOBAL] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_GUARD_GLOBALS_VERSION] = HAS_DEOPT_FLAG, [_GUARD_BUILTINS_VERSION] = HAS_DEOPT_FLAG, [_LOAD_GLOBAL_MODULE] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, [_LOAD_GLOBAL_BUILTINS] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, - [_DELETE_FAST] = HAS_ARG_FLAG | HAS_LOCAL_FLAG | HAS_ERROR_FLAG, - [_MAKE_CELL] = HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, - [_DELETE_DEREF] = HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, - [_LOAD_FROM_DICT_OR_DEREF] = HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_DELETE_FAST] = HAS_ARG_FLAG | HAS_LOCAL_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_MAKE_CELL] = HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG, + [_DELETE_DEREF] = HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG, + [_LOAD_FROM_DICT_OR_DEREF] = HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG, [_LOAD_DEREF] = HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_STORE_DEREF] = HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ESCAPES_FLAG, [_COPY_FREE_VARS] = HAS_ARG_FLAG, @@ -119,7 +120,6 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { [_BUILD_LIST] = HAS_ARG_FLAG | HAS_ERROR_FLAG, [_LIST_EXTEND] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_SET_UPDATE] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, - [_BUILD_SET] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_BUILD_MAP] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_SETUP_ANNOTATIONS] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_BUILD_CONST_KEY_MAP] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, @@ -129,7 +129,7 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { [_LOAD_SUPER_ATTR_ATTR] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_LOAD_SUPER_ATTR_METHOD] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_LOAD_ATTR] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, - [_GUARD_TYPE_VERSION] = HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_PASSTHROUGH_FLAG, + [_GUARD_TYPE_VERSION] = HAS_EXIT_FLAG | HAS_PASSTHROUGH_FLAG, [_CHECK_MANAGED_OBJECT_HAS_VALUES] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG, [_LOAD_ATTR_INSTANCE_VALUE_0] = HAS_DEOPT_FLAG, [_LOAD_ATTR_INSTANCE_VALUE_1] = HAS_DEOPT_FLAG, @@ -165,8 +165,8 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { [_MATCH_SEQUENCE] = 0, [_MATCH_KEYS] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_GET_ITER] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, - [_GET_YIELD_FROM_ITER] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, - [_FOR_ITER_TIER_TWO] = HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_GET_YIELD_FROM_ITER] = HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG, + [_FOR_ITER_TIER_TWO] = HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG, [_ITER_CHECK_LIST] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG, [_GUARD_NOT_EXHAUSTED_LIST] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG, [_ITER_NEXT_LIST] = 0, @@ -176,8 +176,6 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { [_ITER_CHECK_RANGE] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG, [_GUARD_NOT_EXHAUSTED_RANGE] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG, [_ITER_NEXT_RANGE] = HAS_ERROR_FLAG, - [_BEFORE_ASYNC_WITH] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, - [_BEFORE_WITH] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_WITH_EXCEPT_START] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_PUSH_EXC_INFO] = 0, [_GUARD_DORV_VALUES_INST_ATTR_FROM_DICT] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG, @@ -204,18 +202,18 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { [_CALL_TYPE_1] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, [_CALL_STR_1] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_CALL_TUPLE_1] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, - [_EXIT_INIT_CHECK] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_EXIT_INIT_CHECK] = HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG, [_CALL_BUILTIN_CLASS] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG, [_CALL_BUILTIN_O] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_CALL_BUILTIN_FAST] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_CALL_BUILTIN_FAST_WITH_KEYWORDS] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, - [_CALL_LEN] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, - [_CALL_ISINSTANCE] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_CALL_LEN] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG, + [_CALL_ISINSTANCE] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG, [_CALL_METHOD_DESCRIPTOR_O] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_CALL_METHOD_DESCRIPTOR_NOARGS] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_CALL_METHOD_DESCRIPTOR_FAST] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, - [_MAKE_FUNCTION] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_MAKE_FUNCTION] = HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG, [_SET_FUNCTION_ATTRIBUTE] = HAS_ARG_FLAG | HAS_ESCAPES_FLAG, [_BUILD_SLICE] = HAS_ARG_FLAG | HAS_ERROR_FLAG, [_CONVERT_VALUE] = HAS_ARG_FLAG | HAS_ERROR_FLAG, @@ -224,14 +222,14 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { [_COPY] = HAS_ARG_FLAG | HAS_PURE_FLAG, [_BINARY_OP] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_SWAP] = HAS_ARG_FLAG | HAS_PURE_FLAG, - [_GUARD_IS_TRUE_POP] = HAS_DEOPT_FLAG | HAS_EXIT_FLAG, - [_GUARD_IS_FALSE_POP] = HAS_DEOPT_FLAG | HAS_EXIT_FLAG, - [_GUARD_IS_NONE_POP] = HAS_DEOPT_FLAG | HAS_EXIT_FLAG, - [_GUARD_IS_NOT_NONE_POP] = HAS_DEOPT_FLAG | HAS_EXIT_FLAG, + [_GUARD_IS_TRUE_POP] = HAS_EXIT_FLAG, + [_GUARD_IS_FALSE_POP] = HAS_EXIT_FLAG, + [_GUARD_IS_NONE_POP] = HAS_EXIT_FLAG, + [_GUARD_IS_NOT_NONE_POP] = HAS_EXIT_FLAG, [_JUMP_TO_TOP] = HAS_EVAL_BREAK_FLAG, [_SET_IP] = 0, [_SAVE_RETURN_OFFSET] = HAS_ARG_FLAG, - [_EXIT_TRACE] = HAS_DEOPT_FLAG | HAS_EXIT_FLAG, + [_EXIT_TRACE] = HAS_EXIT_FLAG, [_CHECK_VALIDITY] = HAS_DEOPT_FLAG, [_LOAD_CONST_INLINE] = HAS_PURE_FLAG, [_LOAD_CONST_INLINE_BORROW] = HAS_PURE_FLAG, @@ -240,10 +238,13 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { [_LOAD_CONST_INLINE_BORROW_WITH_NULL] = HAS_PURE_FLAG, [_CHECK_FUNCTION] = HAS_DEOPT_FLAG, [_INTERNAL_INCREMENT_OPT_COUNTER] = 0, - [_COLD_EXIT] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_COLD_EXIT] = HAS_ARG_FLAG | HAS_ESCAPES_FLAG, [_START_EXECUTOR] = 0, [_FATAL_ERROR] = HAS_ESCAPES_FLAG, [_CHECK_VALIDITY_AND_SET_IP] = HAS_DEOPT_FLAG, + [_DEOPT] = 0, + [_SIDE_EXIT] = 0, + [_ERROR_POP_N] = HAS_ARG_FLAG, }; const uint8_t _PyUop_Replication[MAX_UOP_ID+1] = { @@ -253,8 +254,6 @@ const uint8_t _PyUop_Replication[MAX_UOP_ID+1] = { }; const char *const _PyOpcode_uop_name[MAX_UOP_ID+1] = { - [_BEFORE_ASYNC_WITH] = "_BEFORE_ASYNC_WITH", - [_BEFORE_WITH] = "_BEFORE_WITH", [_BINARY_OP] = "_BINARY_OP", [_BINARY_OP_ADD_FLOAT] = "_BINARY_OP_ADD_FLOAT", [_BINARY_OP_ADD_INT] = "_BINARY_OP_ADD_INT", @@ -272,7 +271,6 @@ const char *const _PyOpcode_uop_name[MAX_UOP_ID+1] = { [_BUILD_CONST_KEY_MAP] = "_BUILD_CONST_KEY_MAP", [_BUILD_LIST] = "_BUILD_LIST", [_BUILD_MAP] = "_BUILD_MAP", - [_BUILD_SET] = "_BUILD_SET", [_BUILD_SLICE] = "_BUILD_SLICE", [_BUILD_STRING] = "_BUILD_STRING", [_BUILD_TUPLE] = "_BUILD_TUPLE", @@ -323,9 +321,11 @@ const char *const _PyOpcode_uop_name[MAX_UOP_ID+1] = { [_DELETE_GLOBAL] = "_DELETE_GLOBAL", [_DELETE_NAME] = "_DELETE_NAME", [_DELETE_SUBSCR] = "_DELETE_SUBSCR", + [_DEOPT] = "_DEOPT", [_DICT_MERGE] = "_DICT_MERGE", [_DICT_UPDATE] = "_DICT_UPDATE", [_END_SEND] = "_END_SEND", + [_ERROR_POP_N] = "_ERROR_POP_N", [_EXIT_INIT_CHECK] = "_EXIT_INIT_CHECK", [_EXIT_TRACE] = "_EXIT_TRACE", [_FATAL_ERROR] = "_FATAL_ERROR", @@ -416,7 +416,6 @@ const char *const _PyOpcode_uop_name[MAX_UOP_ID+1] = { [_LOAD_GLOBAL_BUILTINS] = "_LOAD_GLOBAL_BUILTINS", [_LOAD_GLOBAL_MODULE] = "_LOAD_GLOBAL_MODULE", [_LOAD_LOCALS] = "_LOAD_LOCALS", - [_LOAD_NAME] = "_LOAD_NAME", [_LOAD_SUPER_ATTR_ATTR] = "_LOAD_SUPER_ATTR_ATTR", [_LOAD_SUPER_ATTR_METHOD] = "_LOAD_SUPER_ATTR_METHOD", [_MAKE_CELL] = "_MAKE_CELL", @@ -442,6 +441,7 @@ const char *const _PyOpcode_uop_name[MAX_UOP_ID+1] = { [_SET_FUNCTION_ATTRIBUTE] = "_SET_FUNCTION_ATTRIBUTE", [_SET_IP] = "_SET_IP", [_SET_UPDATE] = "_SET_UPDATE", + [_SIDE_EXIT] = "_SIDE_EXIT", [_START_EXECUTOR] = "_START_EXECUTOR", [_STORE_ATTR] = "_STORE_ATTR", [_STORE_ATTR_INSTANCE_VALUE] = "_STORE_ATTR_INSTANCE_VALUE", @@ -481,6 +481,466 @@ const char *const _PyOpcode_uop_name[MAX_UOP_ID+1] = { [_UNPACK_SEQUENCE_TWO_TUPLE] = "_UNPACK_SEQUENCE_TWO_TUPLE", [_WITH_EXCEPT_START] = "_WITH_EXCEPT_START", }; +int _PyUop_num_popped(int opcode, int oparg) +{ + switch(opcode) { + case _NOP: + return 0; + case _RESUME_CHECK: + return 0; + case _LOAD_FAST_CHECK: + return 0; + case _LOAD_FAST_0: + return 0; + case _LOAD_FAST_1: + return 0; + case _LOAD_FAST_2: + return 0; + case _LOAD_FAST_3: + return 0; + case _LOAD_FAST_4: + return 0; + case _LOAD_FAST_5: + return 0; + case _LOAD_FAST_6: + return 0; + case _LOAD_FAST_7: + return 0; + case _LOAD_FAST: + return 0; + case _LOAD_FAST_AND_CLEAR: + return 0; + case _LOAD_FAST_LOAD_FAST: + return 0; + case _LOAD_CONST: + return 0; + case _STORE_FAST_0: + return 1; + case _STORE_FAST_1: + return 1; + case _STORE_FAST_2: + return 1; + case _STORE_FAST_3: + return 1; + case _STORE_FAST_4: + return 1; + case _STORE_FAST_5: + return 1; + case _STORE_FAST_6: + return 1; + case _STORE_FAST_7: + return 1; + case _STORE_FAST: + return 1; + case _STORE_FAST_LOAD_FAST: + return 1; + case _STORE_FAST_STORE_FAST: + return 2; + case _POP_TOP: + return 1; + case _PUSH_NULL: + return 0; + case _END_SEND: + return 2; + case _UNARY_NEGATIVE: + return 1; + case _UNARY_NOT: + return 1; + case _TO_BOOL: + return 1; + case _TO_BOOL_BOOL: + return 1; + case _TO_BOOL_INT: + return 1; + case _TO_BOOL_LIST: + return 1; + case _TO_BOOL_NONE: + return 1; + case _TO_BOOL_STR: + return 1; + case _REPLACE_WITH_TRUE: + return 1; + case _UNARY_INVERT: + return 1; + case _GUARD_BOTH_INT: + return 2; + case _BINARY_OP_MULTIPLY_INT: + return 2; + case _BINARY_OP_ADD_INT: + return 2; + case _BINARY_OP_SUBTRACT_INT: + return 2; + case _GUARD_BOTH_FLOAT: + return 2; + case _BINARY_OP_MULTIPLY_FLOAT: + return 2; + case _BINARY_OP_ADD_FLOAT: + return 2; + case _BINARY_OP_SUBTRACT_FLOAT: + return 2; + case _GUARD_BOTH_UNICODE: + return 2; + case _BINARY_OP_ADD_UNICODE: + return 2; + case _BINARY_SUBSCR: + return 2; + case _BINARY_SLICE: + return 3; + case _STORE_SLICE: + return 4; + case _BINARY_SUBSCR_LIST_INT: + return 2; + case _BINARY_SUBSCR_STR_INT: + return 2; + case _BINARY_SUBSCR_TUPLE_INT: + return 2; + case _BINARY_SUBSCR_DICT: + return 2; + case _LIST_APPEND: + return 2 + (oparg-1); + case _SET_ADD: + return 2 + (oparg-1); + case _STORE_SUBSCR: + return 3; + case _STORE_SUBSCR_LIST_INT: + return 3; + case _STORE_SUBSCR_DICT: + return 3; + case _DELETE_SUBSCR: + return 2; + case _CALL_INTRINSIC_1: + return 1; + case _CALL_INTRINSIC_2: + return 2; + case _POP_FRAME: + return 1; + case _GET_AITER: + return 1; + case _GET_ANEXT: + return 1; + case _GET_AWAITABLE: + return 1; + case _POP_EXCEPT: + return 1; + case _LOAD_ASSERTION_ERROR: + return 0; + case _LOAD_BUILD_CLASS: + return 0; + case _STORE_NAME: + return 1; + case _DELETE_NAME: + return 0; + case _UNPACK_SEQUENCE: + return 1; + case _UNPACK_SEQUENCE_TWO_TUPLE: + return 1; + case _UNPACK_SEQUENCE_TUPLE: + return 1; + case _UNPACK_SEQUENCE_LIST: + return 1; + case _UNPACK_EX: + return 1; + case _STORE_ATTR: + return 2; + case _DELETE_ATTR: + return 1; + case _STORE_GLOBAL: + return 1; + case _DELETE_GLOBAL: + return 0; + case _LOAD_LOCALS: + return 0; + case _LOAD_FROM_DICT_OR_GLOBALS: + return 1; + case _LOAD_GLOBAL: + return 0; + case _GUARD_GLOBALS_VERSION: + return 0; + case _GUARD_BUILTINS_VERSION: + return 0; + case _LOAD_GLOBAL_MODULE: + return 0; + case _LOAD_GLOBAL_BUILTINS: + return 0; + case _DELETE_FAST: + return 0; + case _MAKE_CELL: + return 0; + case _DELETE_DEREF: + return 0; + case _LOAD_FROM_DICT_OR_DEREF: + return 1; + case _LOAD_DEREF: + return 0; + case _STORE_DEREF: + return 1; + case _COPY_FREE_VARS: + return 0; + case _BUILD_STRING: + return oparg; + case _BUILD_TUPLE: + return oparg; + case _BUILD_LIST: + return oparg; + case _LIST_EXTEND: + return 2 + (oparg-1); + case _SET_UPDATE: + return 2 + (oparg-1); + case _BUILD_MAP: + return oparg*2; + case _SETUP_ANNOTATIONS: + return 0; + case _BUILD_CONST_KEY_MAP: + return 1 + oparg; + case _DICT_UPDATE: + return 2 + (oparg - 1); + case _DICT_MERGE: + return 5 + (oparg - 1); + case _MAP_ADD: + return 3 + (oparg - 1); + case _LOAD_SUPER_ATTR_ATTR: + return 3; + case _LOAD_SUPER_ATTR_METHOD: + return 3; + case _LOAD_ATTR: + return 1; + case _GUARD_TYPE_VERSION: + return 1; + case _CHECK_MANAGED_OBJECT_HAS_VALUES: + return 1; + case _LOAD_ATTR_INSTANCE_VALUE_0: + return 1; + case _LOAD_ATTR_INSTANCE_VALUE_1: + return 1; + case _LOAD_ATTR_INSTANCE_VALUE: + return 1; + case _CHECK_ATTR_MODULE: + return 1; + case _LOAD_ATTR_MODULE: + return 1; + case _CHECK_ATTR_WITH_HINT: + return 1; + case _LOAD_ATTR_WITH_HINT: + return 1; + case _LOAD_ATTR_SLOT_0: + return 1; + case _LOAD_ATTR_SLOT_1: + return 1; + case _LOAD_ATTR_SLOT: + return 1; + case _CHECK_ATTR_CLASS: + return 1; + case _LOAD_ATTR_CLASS_0: + return 1; + case _LOAD_ATTR_CLASS_1: + return 1; + case _LOAD_ATTR_CLASS: + return 1; + case _GUARD_DORV_VALUES: + return 1; + case _STORE_ATTR_INSTANCE_VALUE: + return 2; + case _STORE_ATTR_SLOT: + return 2; + case _COMPARE_OP: + return 2; + case _COMPARE_OP_FLOAT: + return 2; + case _COMPARE_OP_INT: + return 2; + case _COMPARE_OP_STR: + return 2; + case _IS_OP: + return 2; + case _CONTAINS_OP: + return 2; + case _CONTAINS_OP_SET: + return 2; + case _CONTAINS_OP_DICT: + return 2; + case _CHECK_EG_MATCH: + return 2; + case _CHECK_EXC_MATCH: + return 2; + case _IS_NONE: + return 1; + case _GET_LEN: + return 1; + case _MATCH_CLASS: + return 3; + case _MATCH_MAPPING: + return 1; + case _MATCH_SEQUENCE: + return 1; + case _MATCH_KEYS: + return 2; + case _GET_ITER: + return 1; + case _GET_YIELD_FROM_ITER: + return 1; + case _FOR_ITER_TIER_TWO: + return 1; + case _ITER_CHECK_LIST: + return 1; + case _GUARD_NOT_EXHAUSTED_LIST: + return 1; + case _ITER_NEXT_LIST: + return 1; + case _ITER_CHECK_TUPLE: + return 1; + case _GUARD_NOT_EXHAUSTED_TUPLE: + return 1; + case _ITER_NEXT_TUPLE: + return 1; + case _ITER_CHECK_RANGE: + return 1; + case _GUARD_NOT_EXHAUSTED_RANGE: + return 1; + case _ITER_NEXT_RANGE: + return 1; + case _WITH_EXCEPT_START: + return 4; + case _PUSH_EXC_INFO: + return 1; + case _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT: + return 1; + case _GUARD_KEYS_VERSION: + return 1; + case _LOAD_ATTR_METHOD_WITH_VALUES: + return 1; + case _LOAD_ATTR_METHOD_NO_DICT: + return 1; + case _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES: + return 1; + case _LOAD_ATTR_NONDESCRIPTOR_NO_DICT: + return 1; + case _CHECK_ATTR_METHOD_LAZY_DICT: + return 1; + case _LOAD_ATTR_METHOD_LAZY_DICT: + return 1; + case _CHECK_PERIODIC: + return 0; + case _CHECK_CALL_BOUND_METHOD_EXACT_ARGS: + return 2 + oparg; + case _INIT_CALL_BOUND_METHOD_EXACT_ARGS: + return 2 + oparg; + case _CHECK_PEP_523: + return 0; + case _CHECK_FUNCTION_EXACT_ARGS: + return 2 + oparg; + case _CHECK_STACK_SPACE: + return 2 + oparg; + case _INIT_CALL_PY_EXACT_ARGS_0: + return 2 + oparg; + case _INIT_CALL_PY_EXACT_ARGS_1: + return 2 + oparg; + case _INIT_CALL_PY_EXACT_ARGS_2: + return 2 + oparg; + case _INIT_CALL_PY_EXACT_ARGS_3: + return 2 + oparg; + case _INIT_CALL_PY_EXACT_ARGS_4: + return 2 + oparg; + case _INIT_CALL_PY_EXACT_ARGS: + return 2 + oparg; + case _PUSH_FRAME: + return 1; + case _CALL_TYPE_1: + return 3; + case _CALL_STR_1: + return 3; + case _CALL_TUPLE_1: + return 3; + case _EXIT_INIT_CHECK: + return 1; + case _CALL_BUILTIN_CLASS: + return 2 + oparg; + case _CALL_BUILTIN_O: + return 2 + oparg; + case _CALL_BUILTIN_FAST: + return 2 + oparg; + case _CALL_BUILTIN_FAST_WITH_KEYWORDS: + return 2 + oparg; + case _CALL_LEN: + return 2 + oparg; + case _CALL_ISINSTANCE: + return 2 + oparg; + case _CALL_METHOD_DESCRIPTOR_O: + return 2 + oparg; + case _CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS: + return 2 + oparg; + case _CALL_METHOD_DESCRIPTOR_NOARGS: + return 2 + oparg; + case _CALL_METHOD_DESCRIPTOR_FAST: + return 2 + oparg; + case _MAKE_FUNCTION: + return 1; + case _SET_FUNCTION_ATTRIBUTE: + return 2; + case _BUILD_SLICE: + return 2 + ((oparg == 3) ? 1 : 0); + case _CONVERT_VALUE: + return 1; + case _FORMAT_SIMPLE: + return 1; + case _FORMAT_WITH_SPEC: + return 2; + case _COPY: + return 1 + (oparg-1); + case _BINARY_OP: + return 2; + case _SWAP: + return 2 + (oparg-2); + case _GUARD_IS_TRUE_POP: + return 1; + case _GUARD_IS_FALSE_POP: + return 1; + case _GUARD_IS_NONE_POP: + return 1; + case _GUARD_IS_NOT_NONE_POP: + return 1; + case _JUMP_TO_TOP: + return 0; + case _SET_IP: + return 0; + case _SAVE_RETURN_OFFSET: + return 0; + case _EXIT_TRACE: + return 0; + case _CHECK_VALIDITY: + return 0; + case _LOAD_CONST_INLINE: + return 0; + case _LOAD_CONST_INLINE_BORROW: + return 0; + case _POP_TOP_LOAD_CONST_INLINE_BORROW: + return 1; + case _LOAD_CONST_INLINE_WITH_NULL: + return 0; + case _LOAD_CONST_INLINE_BORROW_WITH_NULL: + return 0; + case _CHECK_FUNCTION: + return 0; + case _INTERNAL_INCREMENT_OPT_COUNTER: + return 1; + case _COLD_EXIT: + return 0; + case _START_EXECUTOR: + return 0; + case _FATAL_ERROR: + return 0; + case _CHECK_VALIDITY_AND_SET_IP: + return 0; + case _DEOPT: + return 0; + case _SIDE_EXIT: + return 0; + case _ERROR_POP_N: + return oparg; + default: + return -1; + } +} + #endif // NEED_OPCODE_METADATA diff --git a/Include/internal/pycore_warnings.h b/Include/internal/pycore_warnings.h index 9785d7cc467de2..114796df42b2b6 100644 --- a/Include/internal/pycore_warnings.h +++ b/Include/internal/pycore_warnings.h @@ -14,6 +14,7 @@ struct _warnings_runtime_state { PyObject *filters; /* List */ PyObject *once_registry; /* Dict */ PyObject *default_action; /* String */ + struct _PyMutex mutex; long filters_version; }; diff --git a/Include/interpreteridobject.h b/Include/interpreteridobject.h deleted file mode 100644 index 8432632f339e92..00000000000000 --- a/Include/interpreteridobject.h +++ /dev/null @@ -1,17 +0,0 @@ -#ifndef Py_INTERPRETERIDOBJECT_H -#define Py_INTERPRETERIDOBJECT_H - -#ifdef __cplusplus -extern "C" { -#endif - -#ifndef Py_LIMITED_API -# define Py_CPYTHON_INTERPRETERIDOBJECT_H -# include "cpython/interpreteridobject.h" -# undef Py_CPYTHON_INTERPRETERIDOBJECT_H -#endif - -#ifdef __cplusplus -} -#endif -#endif /* !Py_INTERPRETERIDOBJECT_H */ diff --git a/Include/longobject.h b/Include/longobject.h index 51005efff636fa..19104cd9d1bef9 100644 --- a/Include/longobject.h +++ b/Include/longobject.h @@ -40,7 +40,24 @@ PyAPI_FUNC(PyObject *) PyLong_GetInfo(void); #if !defined(SIZEOF_PID_T) || SIZEOF_PID_T == SIZEOF_INT #define _Py_PARSE_PID "i" #define PyLong_FromPid PyLong_FromLong -#define PyLong_AsPid PyLong_AsLong +# if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x030d0000 +# define PyLong_AsPid PyLong_AsInt +# elif SIZEOF_INT == SIZEOF_LONG +# define PyLong_AsPid PyLong_AsLong +# else +static inline int +PyLong_AsPid(PyObject *obj) +{ + int overflow; + long result = PyLong_AsLongAndOverflow(obj, &overflow); + if (overflow || result > INT_MAX || result < INT_MIN) { + PyErr_SetString(PyExc_OverflowError, + "Python int too large to convert to C int"); + return -1; + } + return (int)result; +} +# endif #elif SIZEOF_PID_T == SIZEOF_LONG #define _Py_PARSE_PID "l" #define PyLong_FromPid PyLong_FromLong diff --git a/Include/object.h b/Include/object.h index b0c0dba06ca139..96790844a7b9f0 100644 --- a/Include/object.h +++ b/Include/object.h @@ -1068,12 +1068,34 @@ static inline PyObject* _Py_XNewRef(PyObject *obj) #endif +#define Py_CONSTANT_NONE 0 +#define Py_CONSTANT_FALSE 1 +#define Py_CONSTANT_TRUE 2 +#define Py_CONSTANT_ELLIPSIS 3 +#define Py_CONSTANT_NOT_IMPLEMENTED 4 +#define Py_CONSTANT_ZERO 5 +#define Py_CONSTANT_ONE 6 +#define Py_CONSTANT_EMPTY_STR 7 +#define Py_CONSTANT_EMPTY_BYTES 8 +#define Py_CONSTANT_EMPTY_TUPLE 9 + +#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x030d0000 +PyAPI_FUNC(PyObject*) Py_GetConstant(unsigned int constant_id); +PyAPI_FUNC(PyObject*) Py_GetConstantBorrowed(unsigned int constant_id); +#endif + + /* _Py_NoneStruct is an object of undefined type which can be used in contexts where NULL (nil) is not suitable (since NULL often means 'error'). */ PyAPI_DATA(PyObject) _Py_NoneStruct; /* Don't use this directly */ -#define Py_None (&_Py_NoneStruct) + +#if defined(Py_LIMITED_API) && Py_LIMITED_API+0 >= 0x030D0000 +# define Py_None Py_GetConstantBorrowed(Py_CONSTANT_NONE) +#else +# define Py_None (&_Py_NoneStruct) +#endif // Test if an object is the None singleton, the same as "x is None" in Python. PyAPI_FUNC(int) Py_IsNone(PyObject *x); @@ -1087,7 +1109,12 @@ Py_NotImplemented is a singleton used to signal that an operation is not implemented for a given type combination. */ PyAPI_DATA(PyObject) _Py_NotImplementedStruct; /* Don't use this directly */ -#define Py_NotImplemented (&_Py_NotImplementedStruct) + +#if defined(Py_LIMITED_API) && Py_LIMITED_API+0 >= 0x030D0000 +# define Py_NotImplemented Py_GetConstantBorrowed(Py_CONSTANT_NOT_IMPLEMENTED) +#else +# define Py_NotImplemented (&_Py_NotImplementedStruct) +#endif /* Macro for returning Py_NotImplemented from a function */ #define Py_RETURN_NOTIMPLEMENTED return Py_NotImplemented @@ -1220,6 +1247,10 @@ static inline int PyType_CheckExact(PyObject *op) { # define PyType_CheckExact(op) PyType_CheckExact(_PyObject_CAST(op)) #endif +#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x030d0000 +PyAPI_FUNC(PyObject *) PyType_GetModuleByDef(PyTypeObject *, PyModuleDef *); +#endif + #ifdef __cplusplus } #endif diff --git a/Include/sliceobject.h b/Include/sliceobject.h index c13863f27c2e63..35e2ea254ca80a 100644 --- a/Include/sliceobject.h +++ b/Include/sliceobject.h @@ -8,7 +8,11 @@ extern "C" { PyAPI_DATA(PyObject) _Py_EllipsisObject; /* Don't use this directly */ -#define Py_Ellipsis (&_Py_EllipsisObject) +#if defined(Py_LIMITED_API) && Py_LIMITED_API+0 >= 0x030D0000 +# define Py_Ellipsis Py_GetConstantBorrowed(Py_CONSTANT_ELLIPSIS) +#else +# define Py_Ellipsis (&_Py_EllipsisObject) +#endif /* Slice object interface */ diff --git a/Include/weakrefobject.h b/Include/weakrefobject.h index 727ba6934bbacb..a6e71eb178b124 100644 --- a/Include/weakrefobject.h +++ b/Include/weakrefobject.h @@ -28,7 +28,10 @@ PyAPI_FUNC(PyObject *) PyWeakref_NewRef(PyObject *ob, PyAPI_FUNC(PyObject *) PyWeakref_NewProxy(PyObject *ob, PyObject *callback); Py_DEPRECATED(3.13) PyAPI_FUNC(PyObject *) PyWeakref_GetObject(PyObject *ref); + +#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x030D0000 PyAPI_FUNC(int) PyWeakref_GetRef(PyObject *ref, PyObject **pobj); +#endif #ifndef Py_LIMITED_API diff --git a/Lib/_ios_support.py b/Lib/_ios_support.py new file mode 100644 index 00000000000000..db3fe23e45bca0 --- /dev/null +++ b/Lib/_ios_support.py @@ -0,0 +1,71 @@ +import sys +try: + from ctypes import cdll, c_void_p, c_char_p, util +except ImportError: + # ctypes is an optional module. If it's not present, we're limited in what + # we can tell about the system, but we don't want to prevent the module + # from working. + print("ctypes isn't available; iOS system calls will not be available") + objc = None +else: + # ctypes is available. Load the ObjC library, and wrap the objc_getClass, + # sel_registerName methods + lib = util.find_library("objc") + if lib is None: + # Failed to load the objc library + raise RuntimeError("ObjC runtime library couldn't be loaded") + + objc = cdll.LoadLibrary(lib) + objc.objc_getClass.restype = c_void_p + objc.objc_getClass.argtypes = [c_char_p] + objc.sel_registerName.restype = c_void_p + objc.sel_registerName.argtypes = [c_char_p] + + +def get_platform_ios(): + # Determine if this is a simulator using the multiarch value + is_simulator = sys.implementation._multiarch.endswith("simulator") + + # We can't use ctypes; abort + if not objc: + return None + + # Most of the methods return ObjC objects + objc.objc_msgSend.restype = c_void_p + # All the methods used have no arguments. + objc.objc_msgSend.argtypes = [c_void_p, c_void_p] + + # Equivalent of: + # device = [UIDevice currentDevice] + UIDevice = objc.objc_getClass(b"UIDevice") + SEL_currentDevice = objc.sel_registerName(b"currentDevice") + device = objc.objc_msgSend(UIDevice, SEL_currentDevice) + + # Equivalent of: + # device_systemVersion = [device systemVersion] + SEL_systemVersion = objc.sel_registerName(b"systemVersion") + device_systemVersion = objc.objc_msgSend(device, SEL_systemVersion) + + # Equivalent of: + # device_systemName = [device systemName] + SEL_systemName = objc.sel_registerName(b"systemName") + device_systemName = objc.objc_msgSend(device, SEL_systemName) + + # Equivalent of: + # device_model = [device model] + SEL_model = objc.sel_registerName(b"model") + device_model = objc.objc_msgSend(device, SEL_model) + + # UTF8String returns a const char*; + SEL_UTF8String = objc.sel_registerName(b"UTF8String") + objc.objc_msgSend.restype = c_char_p + + # Equivalent of: + # system = [device_systemName UTF8String] + # release = [device_systemVersion UTF8String] + # model = [device_model UTF8String] + system = objc.objc_msgSend(device_systemName, SEL_UTF8String).decode() + release = objc.objc_msgSend(device_systemVersion, SEL_UTF8String).decode() + model = objc.objc_msgSend(device_model, SEL_UTF8String).decode() + + return system, release, model, is_simulator diff --git a/Lib/asyncio/windows_events.py b/Lib/asyncio/windows_events.py index b62ea75fee3858..bf99bc271c7acd 100644 --- a/Lib/asyncio/windows_events.py +++ b/Lib/asyncio/windows_events.py @@ -8,6 +8,7 @@ import _overlapped import _winapi import errno +from functools import partial import math import msvcrt import socket @@ -324,13 +325,13 @@ def _run_forever_cleanup(self): if self._self_reading_future is not None: ov = self._self_reading_future._ov self._self_reading_future.cancel() - # self_reading_future was just cancelled so if it hasn't been - # finished yet, it never will be (it's possible that it has - # already finished and its callback is waiting in the queue, - # where it could still happen if the event loop is restarted). - # Unregister it otherwise IocpProactor.close will wait for it - # forever - if ov is not None: + # self_reading_future always uses IOCP, so even though it's + # been cancelled, we need to make sure that the IOCP message + # is received so that the kernel is not holding on to the + # memory, possibly causing memory corruption later. Only + # unregister it if IO is complete in all respects. Otherwise + # we need another _poll() later to complete the IO. + if ov is not None and not ov.pending: self._proactor._unregister(ov) self._self_reading_future = None @@ -467,6 +468,18 @@ def finish_socket_func(trans, key, ov): else: raise + @classmethod + def _finish_recvfrom(cls, trans, key, ov, *, empty_result): + try: + return cls.finish_socket_func(trans, key, ov) + except OSError as exc: + # WSARecvFrom will report ERROR_PORT_UNREACHABLE when the same + # socket is used to send to an address that is not listening. + if exc.winerror == _overlapped.ERROR_PORT_UNREACHABLE: + return empty_result, None + else: + raise + def recv(self, conn, nbytes, flags=0): self._register_with_iocp(conn) ov = _overlapped.Overlapped(NULL) @@ -501,7 +514,8 @@ def recvfrom(self, conn, nbytes, flags=0): except BrokenPipeError: return self._result((b'', None)) - return self._register(ov, conn, self.finish_socket_func) + return self._register(ov, conn, partial(self._finish_recvfrom, + empty_result=b'')) def recvfrom_into(self, conn, buf, flags=0): self._register_with_iocp(conn) @@ -511,17 +525,8 @@ def recvfrom_into(self, conn, buf, flags=0): except BrokenPipeError: return self._result((0, None)) - def finish_recv(trans, key, ov): - try: - return ov.getresult() - except OSError as exc: - if exc.winerror in (_overlapped.ERROR_NETNAME_DELETED, - _overlapped.ERROR_OPERATION_ABORTED): - raise ConnectionResetError(*exc.args) - else: - raise - - return self._register(ov, conn, finish_recv) + return self._register(ov, conn, partial(self._finish_recvfrom, + empty_result=0)) def sendto(self, conn, buf, flags=0, addr=None): self._register_with_iocp(conn) diff --git a/Lib/configparser.py b/Lib/configparser.py index 241f10aee93ec4..8f182eec306b8b 100644 --- a/Lib/configparser.py +++ b/Lib/configparser.py @@ -961,102 +961,104 @@ def _read(self, fp, fpname): lineno = 0 indent_level = 0 e = None # None, or an exception - for lineno, line in enumerate(fp, start=1): - comment_start = sys.maxsize - # strip inline comments - inline_prefixes = {p: -1 for p in self._inline_comment_prefixes} - while comment_start == sys.maxsize and inline_prefixes: - next_prefixes = {} - for prefix, index in inline_prefixes.items(): - index = line.find(prefix, index+1) - if index == -1: - continue - next_prefixes[prefix] = index - if index == 0 or (index > 0 and line[index-1].isspace()): - comment_start = min(comment_start, index) - inline_prefixes = next_prefixes - # strip full line comments - for prefix in self._comment_prefixes: - if line.strip().startswith(prefix): - comment_start = 0 - break - if comment_start == sys.maxsize: - comment_start = None - value = line[:comment_start].strip() - if not value: - if self._empty_lines_in_values: - # add empty line to the value, but only if there was no - # comment on the line - if (comment_start is None and - cursect is not None and - optname and - cursect[optname] is not None): - cursect[optname].append('') # newlines added at join - else: - # empty line marks end of value - indent_level = sys.maxsize - continue - # continuation line? - first_nonspace = self.NONSPACECRE.search(line) - cur_indent_level = first_nonspace.start() if first_nonspace else 0 - if (cursect is not None and optname and - cur_indent_level > indent_level): - if cursect[optname] is None: - raise MultilineContinuationError(fpname, lineno, line) - cursect[optname].append(value) - # a section header or option header? - else: - indent_level = cur_indent_level - # is it a section header? - mo = self.SECTCRE.match(value) - if mo: - sectname = mo.group('header') - if sectname in self._sections: - if self._strict and sectname in elements_added: - raise DuplicateSectionError(sectname, fpname, - lineno) - cursect = self._sections[sectname] - elements_added.add(sectname) - elif sectname == self.default_section: - cursect = self._defaults + try: + for lineno, line in enumerate(fp, start=1): + comment_start = sys.maxsize + # strip inline comments + inline_prefixes = {p: -1 for p in self._inline_comment_prefixes} + while comment_start == sys.maxsize and inline_prefixes: + next_prefixes = {} + for prefix, index in inline_prefixes.items(): + index = line.find(prefix, index+1) + if index == -1: + continue + next_prefixes[prefix] = index + if index == 0 or (index > 0 and line[index-1].isspace()): + comment_start = min(comment_start, index) + inline_prefixes = next_prefixes + # strip full line comments + for prefix in self._comment_prefixes: + if line.strip().startswith(prefix): + comment_start = 0 + break + if comment_start == sys.maxsize: + comment_start = None + value = line[:comment_start].strip() + if not value: + if self._empty_lines_in_values: + # add empty line to the value, but only if there was no + # comment on the line + if (comment_start is None and + cursect is not None and + optname and + cursect[optname] is not None): + cursect[optname].append('') # newlines added at join else: - cursect = self._dict() - self._sections[sectname] = cursect - self._proxies[sectname] = SectionProxy(self, sectname) - elements_added.add(sectname) - # So sections can't start with a continuation line - optname = None - # no section header in the file? - elif cursect is None: - raise MissingSectionHeaderError(fpname, lineno, line) - # an option line? + # empty line marks end of value + indent_level = sys.maxsize + continue + # continuation line? + first_nonspace = self.NONSPACECRE.search(line) + cur_indent_level = first_nonspace.start() if first_nonspace else 0 + if (cursect is not None and optname and + cur_indent_level > indent_level): + if cursect[optname] is None: + raise MultilineContinuationError(fpname, lineno, line) + cursect[optname].append(value) + # a section header or option header? else: - mo = self._optcre.match(value) + indent_level = cur_indent_level + # is it a section header? + mo = self.SECTCRE.match(value) if mo: - optname, vi, optval = mo.group('option', 'vi', 'value') - if not optname: - e = self._handle_error(e, fpname, lineno, line) - optname = self.optionxform(optname.rstrip()) - if (self._strict and - (sectname, optname) in elements_added): - raise DuplicateOptionError(sectname, optname, - fpname, lineno) - elements_added.add((sectname, optname)) - # This check is fine because the OPTCRE cannot - # match if it would set optval to None - if optval is not None: - optval = optval.strip() - cursect[optname] = [optval] + sectname = mo.group('header') + if sectname in self._sections: + if self._strict and sectname in elements_added: + raise DuplicateSectionError(sectname, fpname, + lineno) + cursect = self._sections[sectname] + elements_added.add(sectname) + elif sectname == self.default_section: + cursect = self._defaults else: - # valueless option handling - cursect[optname] = None + cursect = self._dict() + self._sections[sectname] = cursect + self._proxies[sectname] = SectionProxy(self, sectname) + elements_added.add(sectname) + # So sections can't start with a continuation line + optname = None + # no section header in the file? + elif cursect is None: + raise MissingSectionHeaderError(fpname, lineno, line) + # an option line? else: - # a non-fatal parsing error occurred. set up the - # exception but keep going. the exception will be - # raised at the end of the file and will contain a - # list of all bogus lines - e = self._handle_error(e, fpname, lineno, line) - self._join_multiline_values() + mo = self._optcre.match(value) + if mo: + optname, vi, optval = mo.group('option', 'vi', 'value') + if not optname: + e = self._handle_error(e, fpname, lineno, line) + optname = self.optionxform(optname.rstrip()) + if (self._strict and + (sectname, optname) in elements_added): + raise DuplicateOptionError(sectname, optname, + fpname, lineno) + elements_added.add((sectname, optname)) + # This check is fine because the OPTCRE cannot + # match if it would set optval to None + if optval is not None: + optval = optval.strip() + cursect[optname] = [optval] + else: + # valueless option handling + cursect[optname] = None + else: + # a non-fatal parsing error occurred. set up the + # exception but keep going. the exception will be + # raised at the end of the file and will contain a + # list of all bogus lines + e = self._handle_error(e, fpname, lineno, line) + finally: + self._join_multiline_values() # if any parsing errors occurred, raise an exception if e: raise e diff --git a/Lib/ctypes/__init__.py b/Lib/ctypes/__init__.py index f63e31a3fb0107..36b2af7f2a0d66 100644 --- a/Lib/ctypes/__init__.py +++ b/Lib/ctypes/__init__.py @@ -348,6 +348,17 @@ def __init__(self, name, mode=DEFAULT_MODE, handle=None, winmode=None): if name: name = _os.fspath(name) + + # If the filename that has been provided is an iOS/tvOS/watchOS + # .fwork file, dereference the location to the true origin of the + # binary. + if name.endswith(".fwork"): + with open(name) as f: + name = _os.path.join( + _os.path.dirname(_sys.executable), + f.read().strip() + ) + self._name = name flags = self._func_flags_ if use_errno: diff --git a/Lib/ctypes/util.py b/Lib/ctypes/util.py index c550883e7c7d4b..117bf06cb01013 100644 --- a/Lib/ctypes/util.py +++ b/Lib/ctypes/util.py @@ -67,7 +67,7 @@ def find_library(name): return fname return None -elif os.name == "posix" and sys.platform == "darwin": +elif os.name == "posix" and sys.platform in {"darwin", "ios", "tvos", "watchos"}: from ctypes.macholib.dyld import dyld_find as _dyld_find def find_library(name): possible = ['lib%s.dylib' % name, @@ -89,6 +89,15 @@ def find_library(name): from ctypes._aix import find_library +elif sys.platform == "android": + def find_library(name): + directory = "/system/lib" + if "64" in os.uname().machine: + directory += "64" + + fname = f"{directory}/lib{name}.so" + return fname if os.path.isfile(fname) else None + elif os.name == "posix": # Andreas Degert's find functions, using gcc, /sbin/ldconfig, objdump import re, tempfile diff --git a/Lib/dataclasses.py b/Lib/dataclasses.py index 45ce5a98b51ae0..3acd03cd865234 100644 --- a/Lib/dataclasses.py +++ b/Lib/dataclasses.py @@ -426,32 +426,95 @@ def _tuple_str(obj_name, fields): return f'({",".join([f"{obj_name}.{f.name}" for f in fields])},)' -def _create_fn(name, args, body, *, globals=None, locals=None, - return_type=MISSING): - # Note that we may mutate locals. Callers beware! - # The only callers are internal to this module, so no - # worries about external callers. - if locals is None: - locals = {} - return_annotation = '' - if return_type is not MISSING: - locals['__dataclass_return_type__'] = return_type - return_annotation = '->__dataclass_return_type__' - args = ','.join(args) - body = '\n'.join(f' {b}' for b in body) - - # Compute the text of the entire function. - txt = f' def {name}({args}){return_annotation}:\n{body}' - - # Free variables in exec are resolved in the global namespace. - # The global namespace we have is user-provided, so we can't modify it for - # our purposes. So we put the things we need into locals and introduce a - # scope to allow the function we're creating to close over them. - local_vars = ', '.join(locals.keys()) - txt = f"def __create_fn__({local_vars}):\n{txt}\n return {name}" - ns = {} - exec(txt, globals, ns) - return ns['__create_fn__'](**locals) +class _FuncBuilder: + def __init__(self, globals): + self.names = [] + self.src = [] + self.globals = globals + self.locals = {} + self.overwrite_errors = {} + self.unconditional_adds = {} + + def add_fn(self, name, args, body, *, locals=None, return_type=MISSING, + overwrite_error=False, unconditional_add=False, decorator=None): + if locals is not None: + self.locals.update(locals) + + # Keep track if this method is allowed to be overwritten if it already + # exists in the class. The error is method-specific, so keep it with + # the name. We'll use this when we generate all of the functions in + # the add_fns_to_class call. overwrite_error is either True, in which + # case we'll raise an error, or it's a string, in which case we'll + # raise an error and append this string. + if overwrite_error: + self.overwrite_errors[name] = overwrite_error + + # Should this function always overwrite anything that's already in the + # class? The default is to not overwrite a function that already + # exists. + if unconditional_add: + self.unconditional_adds[name] = True + + self.names.append(name) + + if return_type is not MISSING: + self.locals[f'__dataclass_{name}_return_type__'] = return_type + return_annotation = f'->__dataclass_{name}_return_type__' + else: + return_annotation = '' + args = ','.join(args) + body = '\n'.join(body) + + # Compute the text of the entire function, add it to the text we're generating. + self.src.append(f'{f' {decorator}\n' if decorator else ''} def {name}({args}){return_annotation}:\n{body}') + + def add_fns_to_class(self, cls): + # The source to all of the functions we're generating. + fns_src = '\n'.join(self.src) + + # The locals they use. + local_vars = ','.join(self.locals.keys()) + + # The names of all of the functions, used for the return value of the + # outer function. Need to handle the 0-tuple specially. + if len(self.names) == 0: + return_names = '()' + else: + return_names =f'({",".join(self.names)},)' + + # txt is the entire function we're going to execute, including the + # bodies of the functions we're defining. Here's a greatly simplified + # version: + # def __create_fn__(): + # def __init__(self, x, y): + # self.x = x + # self.y = y + # @recursive_repr + # def __repr__(self): + # return f"cls(x={self.x!r},y={self.y!r})" + # return __init__,__repr__ + + txt = f"def __create_fn__({local_vars}):\n{fns_src}\n return {return_names}" + ns = {} + exec(txt, self.globals, ns) + fns = ns['__create_fn__'](**self.locals) + + # Now that we've generated the functions, assign them into cls. + for name, fn in zip(self.names, fns): + fn.__qualname__ = f"{cls.__qualname__}.{fn.__name__}" + if self.unconditional_adds.get(name, False): + setattr(cls, name, fn) + else: + already_exists = _set_new_attribute(cls, name, fn) + + # See if it's an error to overwrite this particular function. + if already_exists and (msg_extra := self.overwrite_errors.get(name)): + error_msg = (f'Cannot overwrite attribute {fn.__name__} ' + f'in class {cls.__name__}') + if not msg_extra is True: + error_msg = f'{error_msg} {msg_extra}' + + raise TypeError(error_msg) def _field_assign(frozen, name, value, self_name): @@ -462,8 +525,8 @@ def _field_assign(frozen, name, value, self_name): # self_name is what "self" is called in this function: don't # hard-code "self", since that might be a field name. if frozen: - return f'__dataclass_builtins_object__.__setattr__({self_name},{name!r},{value})' - return f'{self_name}.{name}={value}' + return f' __dataclass_builtins_object__.__setattr__({self_name},{name!r},{value})' + return f' {self_name}.{name}={value}' def _field_init(f, frozen, globals, self_name, slots): @@ -546,7 +609,7 @@ def _init_param(f): def _init_fn(fields, std_fields, kw_only_fields, frozen, has_post_init, - self_name, globals, slots): + self_name, func_builder, slots): # fields contains both real fields and InitVar pseudo-fields. # Make sure we don't have fields without defaults following fields @@ -565,11 +628,11 @@ def _init_fn(fields, std_fields, kw_only_fields, frozen, has_post_init, raise TypeError(f'non-default argument {f.name!r} ' f'follows default argument {seen_default.name!r}') - locals = {f'__dataclass_type_{f.name}__': f.type for f in fields} - locals.update({ - '__dataclass_HAS_DEFAULT_FACTORY__': _HAS_DEFAULT_FACTORY, - '__dataclass_builtins_object__': object, - }) + locals = {**{f'__dataclass_type_{f.name}__': f.type for f in fields}, + **{'__dataclass_HAS_DEFAULT_FACTORY__': _HAS_DEFAULT_FACTORY, + '__dataclass_builtins_object__': object, + } + } body_lines = [] for f in fields: @@ -583,11 +646,11 @@ def _init_fn(fields, std_fields, kw_only_fields, frozen, has_post_init, if has_post_init: params_str = ','.join(f.name for f in fields if f._field_type is _FIELD_INITVAR) - body_lines.append(f'{self_name}.{_POST_INIT_NAME}({params_str})') + body_lines.append(f' {self_name}.{_POST_INIT_NAME}({params_str})') # If no body lines, use 'pass'. if not body_lines: - body_lines = ['pass'] + body_lines = [' pass'] _init_params = [_init_param(f) for f in std_fields] if kw_only_fields: @@ -596,68 +659,34 @@ def _init_fn(fields, std_fields, kw_only_fields, frozen, has_post_init, # (instead of just concatenting the lists together). _init_params += ['*'] _init_params += [_init_param(f) for f in kw_only_fields] - return _create_fn('__init__', - [self_name] + _init_params, - body_lines, - locals=locals, - globals=globals, - return_type=None) - - -def _repr_fn(fields, globals): - fn = _create_fn('__repr__', - ('self',), - ['return f"{self.__class__.__qualname__}(' + - ', '.join([f"{f.name}={{self.{f.name}!r}}" - for f in fields]) + - ')"'], - globals=globals) - return recursive_repr()(fn) - - -def _frozen_get_del_attr(cls, fields, globals): + func_builder.add_fn('__init__', + [self_name] + _init_params, + body_lines, + locals=locals, + return_type=None) + + +def _frozen_get_del_attr(cls, fields, func_builder): locals = {'cls': cls, 'FrozenInstanceError': FrozenInstanceError} condition = 'type(self) is cls' if fields: condition += ' or name in {' + ', '.join(repr(f.name) for f in fields) + '}' - return (_create_fn('__setattr__', - ('self', 'name', 'value'), - (f'if {condition}:', - ' raise FrozenInstanceError(f"cannot assign to field {name!r}")', - f'super(cls, self).__setattr__(name, value)'), - locals=locals, - globals=globals), - _create_fn('__delattr__', - ('self', 'name'), - (f'if {condition}:', - ' raise FrozenInstanceError(f"cannot delete field {name!r}")', - f'super(cls, self).__delattr__(name)'), - locals=locals, - globals=globals), - ) - - -def _cmp_fn(name, op, self_tuple, other_tuple, globals): - # Create a comparison function. If the fields in the object are - # named 'x' and 'y', then self_tuple is the string - # '(self.x,self.y)' and other_tuple is the string - # '(other.x,other.y)'. - - return _create_fn(name, - ('self', 'other'), - [ 'if other.__class__ is self.__class__:', - f' return {self_tuple}{op}{other_tuple}', - 'return NotImplemented'], - globals=globals) - -def _hash_fn(fields, globals): - self_tuple = _tuple_str('self', fields) - return _create_fn('__hash__', - ('self',), - [f'return hash({self_tuple})'], - globals=globals) + func_builder.add_fn('__setattr__', + ('self', 'name', 'value'), + (f' if {condition}:', + ' raise FrozenInstanceError(f"cannot assign to field {name!r}")', + f' super(cls, self).__setattr__(name, value)'), + locals=locals, + overwrite_error=True) + func_builder.add_fn('__delattr__', + ('self', 'name'), + (f' if {condition}:', + ' raise FrozenInstanceError(f"cannot delete field {name!r}")', + f' super(cls, self).__delattr__(name)'), + locals=locals, + overwrite_error=True) def _is_classvar(a_type, typing): @@ -834,19 +863,11 @@ def _get_field(cls, a_name, a_type, default_kw_only): return f -def _set_qualname(cls, value): - # Ensure that the functions returned from _create_fn uses the proper - # __qualname__ (the class they belong to). - if isinstance(value, FunctionType): - value.__qualname__ = f"{cls.__qualname__}.{value.__name__}" - return value - def _set_new_attribute(cls, name, value): # Never overwrites an existing attribute. Returns True if the # attribute already exists. if name in cls.__dict__: return True - _set_qualname(cls, value) setattr(cls, name, value) return False @@ -856,14 +877,22 @@ def _set_new_attribute(cls, name, value): # take. The common case is to do nothing, so instead of providing a # function that is a no-op, use None to signify that. -def _hash_set_none(cls, fields, globals): - return None +def _hash_set_none(cls, fields, func_builder): + # It's sort of a hack that I'm setting this here, instead of at + # func_builder.add_fns_to_class time, but since this is an exceptional case + # (it's not setting an attribute to a function, but to a scalar value), + # just do it directly here. I might come to regret this. + cls.__hash__ = None -def _hash_add(cls, fields, globals): +def _hash_add(cls, fields, func_builder): flds = [f for f in fields if (f.compare if f.hash is None else f.hash)] - return _set_qualname(cls, _hash_fn(flds, globals)) + self_tuple = _tuple_str('self', flds) + func_builder.add_fn('__hash__', + ('self',), + [f' return hash({self_tuple})'], + unconditional_add=True) -def _hash_exception(cls, fields, globals): +def _hash_exception(cls, fields, func_builder): # Raise an exception. raise TypeError(f'Cannot overwrite attribute __hash__ ' f'in class {cls.__name__}') @@ -1041,24 +1070,26 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen, (std_init_fields, kw_only_init_fields) = _fields_in_init_order(all_init_fields) + func_builder = _FuncBuilder(globals) + if init: # Does this class have a post-init function? has_post_init = hasattr(cls, _POST_INIT_NAME) - _set_new_attribute(cls, '__init__', - _init_fn(all_init_fields, - std_init_fields, - kw_only_init_fields, - frozen, - has_post_init, - # The name to use for the "self" - # param in __init__. Use "self" - # if possible. - '__dataclass_self__' if 'self' in fields - else 'self', - globals, - slots, - )) + _init_fn(all_init_fields, + std_init_fields, + kw_only_init_fields, + frozen, + has_post_init, + # The name to use for the "self" + # param in __init__. Use "self" + # if possible. + '__dataclass_self__' if 'self' in fields + else 'self', + func_builder, + slots, + ) + _set_new_attribute(cls, '__replace__', _replace) # Get the fields as a list, and include only real fields. This is @@ -1067,7 +1098,13 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen, if repr: flds = [f for f in field_list if f.repr] - _set_new_attribute(cls, '__repr__', _repr_fn(flds, globals)) + func_builder.add_fn('__repr__', + ('self',), + [' return f"{self.__class__.__qualname__}(' + + ', '.join([f"{f.name}={{self.{f.name}!r}}" + for f in flds]) + ')"'], + locals={'__dataclasses_recursive_repr': recursive_repr}, + decorator="@__dataclasses_recursive_repr()") if eq: # Create __eq__ method. There's no need for a __ne__ method, @@ -1075,14 +1112,13 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen, cmp_fields = (field for field in field_list if field.compare) terms = [f'self.{field.name}==other.{field.name}' for field in cmp_fields] field_comparisons = ' and '.join(terms) or 'True' - body = [f'if other.__class__ is self.__class__:', - f' return {field_comparisons}', - f'return NotImplemented'] - func = _create_fn('__eq__', - ('self', 'other'), - body, - globals=globals) - _set_new_attribute(cls, '__eq__', func) + func_builder.add_fn('__eq__', + ('self', 'other'), + [ ' if self is other:', + ' return True', + ' if other.__class__ is self.__class__:', + f' return {field_comparisons}', + ' return NotImplemented']) if order: # Create and set the ordering methods. @@ -1094,18 +1130,19 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen, ('__gt__', '>'), ('__ge__', '>='), ]: - if _set_new_attribute(cls, name, - _cmp_fn(name, op, self_tuple, other_tuple, - globals=globals)): - raise TypeError(f'Cannot overwrite attribute {name} ' - f'in class {cls.__name__}. Consider using ' - 'functools.total_ordering') + # Create a comparison function. If the fields in the object are + # named 'x' and 'y', then self_tuple is the string + # '(self.x,self.y)' and other_tuple is the string + # '(other.x,other.y)'. + func_builder.add_fn(name, + ('self', 'other'), + [ ' if other.__class__ is self.__class__:', + f' return {self_tuple}{op}{other_tuple}', + ' return NotImplemented'], + overwrite_error='Consider using functools.total_ordering') if frozen: - for fn in _frozen_get_del_attr(cls, field_list, globals): - if _set_new_attribute(cls, fn.__name__, fn): - raise TypeError(f'Cannot overwrite attribute {fn.__name__} ' - f'in class {cls.__name__}') + _frozen_get_del_attr(cls, field_list, func_builder) # Decide if/how we're going to create a hash function. hash_action = _hash_action[bool(unsafe_hash), @@ -1113,9 +1150,12 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen, bool(frozen), has_explicit_hash] if hash_action: - # No need to call _set_new_attribute here, since by the time - # we're here the overwriting is unconditional. - cls.__hash__ = hash_action(cls, field_list, globals) + cls.__hash__ = hash_action(cls, field_list, func_builder) + + # Generate the methods and add them to the class. This needs to be done + # before the __doc__ logic below, since inspect will look at the __init__ + # signature. + func_builder.add_fns_to_class(cls) if not getattr(cls, '__doc__'): # Create a class doc-string. @@ -1128,7 +1168,7 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen, cls.__doc__ = (cls.__name__ + text_sig) if match_args: - # I could probably compute this once + # I could probably compute this once. _set_new_attribute(cls, '__match_args__', tuple(f.name for f in std_init_fields)) @@ -1159,8 +1199,10 @@ def _dataclass_setstate(self, state): def _get_slots(cls): match cls.__dict__.get('__slots__'): + # A class which does not define __slots__ at all is equivalent + # to a class defining __slots__ = ('__dict__', '__weakref__') case None: - return + yield from ('__dict__', '__weakref__') case str(slot): yield slot # Slots may be any iterable, but we cannot handle an iterator diff --git a/Lib/dis.py b/Lib/dis.py index d146bcbb5097ef..111d624fc259c5 100644 --- a/Lib/dis.py +++ b/Lib/dis.py @@ -17,6 +17,8 @@ _specialized_opmap, ) +from _opcode import get_executor + __all__ = ["code_info", "dis", "disassemble", "distb", "disco", "findlinestarts", "findlabels", "show_code", "get_instructions", "Instruction", "Bytecode"] + _opcodes_all @@ -205,7 +207,27 @@ def _deoptop(op): return _all_opmap[deoptmap[name]] if name in deoptmap else op def _get_code_array(co, adaptive): - return co._co_code_adaptive if adaptive else co.co_code + if adaptive: + code = co._co_code_adaptive + res = [] + found = False + for i in range(0, len(code), 2): + op, arg = code[i], code[i+1] + if op == ENTER_EXECUTOR: + try: + ex = get_executor(co, i) + except ValueError: + ex = None + + if ex: + op, arg = ex.get_opcode(), ex.get_oparg() + found = True + + res.append(op.to_bytes()) + res.append(arg.to_bytes()) + return code if not found else b''.join(res) + else: + return co.co_code def code_info(x): """Formatted details of methods, functions, or code.""" @@ -514,8 +536,6 @@ def offset_from_jump_arg(self, op, arg, offset): argval = offset + 2 + signed_arg*2 caches = _get_cache_size(_all_opname[deop]) argval += 2 * caches - if deop == ENTER_EXECUTOR: - argval += 2 return argval return None @@ -680,8 +700,7 @@ def _parse_exception_table(code): def _is_backward_jump(op): return opname[op] in ('JUMP_BACKWARD', - 'JUMP_BACKWARD_NO_INTERRUPT', - 'ENTER_EXECUTOR') + 'JUMP_BACKWARD_NO_INTERRUPT') def _get_instructions_bytes(code, linestarts=None, line_offset=0, co_positions=None, original_code=None, arg_resolver=None): diff --git a/Lib/doctest.py b/Lib/doctest.py index 6049423b5147a5..7a9f4e40d814d6 100644 --- a/Lib/doctest.py +++ b/Lib/doctest.py @@ -1191,9 +1191,9 @@ class DocTestRunner: 2 tests in _TestClass 2 tests in _TestClass.__init__ 2 tests in _TestClass.get - 1 tests in _TestClass.square + 1 test in _TestClass.square 7 tests in 4 items. - 7 passed and 0 failed. + 7 passed. Test passed. TestResults(failed=0, attempted=7) @@ -1568,49 +1568,59 @@ def summarize(self, verbose=None): """ if verbose is None: verbose = self._verbose - notests = [] - passed = [] - failed = [] + + notests, passed, failed = [], [], [] total_tries = total_failures = total_skips = 0 - for item in self._stats.items(): - name, (failures, tries, skips) = item + + for name, (failures, tries, skips) in self._stats.items(): assert failures <= tries total_tries += tries total_failures += failures total_skips += skips + if tries == 0: notests.append(name) elif failures == 0: passed.append((name, tries)) else: - failed.append(item) + failed.append((name, (failures, tries, skips))) + if verbose: if notests: - print(f"{len(notests)} items had no tests:") + print(f"{_n_items(notests)} had no tests:") notests.sort() for name in notests: print(f" {name}") + if passed: - print(f"{len(passed)} items passed all tests:") - passed.sort() - for name, count in passed: - print(f" {count:3d} tests in {name}") + print(f"{_n_items(passed)} passed all tests:") + for name, count in sorted(passed): + s = "" if count == 1 else "s" + print(f" {count:3d} test{s} in {name}") + if failed: print(self.DIVIDER) - print(f"{len(failed)} items had failures:") - failed.sort() - for name, (failures, tries, skips) in failed: + print(f"{_n_items(failed)} had failures:") + for name, (failures, tries, skips) in sorted(failed): print(f" {failures:3d} of {tries:3d} in {name}") + if verbose: - print(f"{total_tries} tests in {len(self._stats)} items.") - print(f"{total_tries - total_failures} passed and {total_failures} failed.") + s = "" if total_tries == 1 else "s" + print(f"{total_tries} test{s} in {_n_items(self._stats)}.") + + and_f = f" and {total_failures} failed" if total_failures else "" + print(f"{total_tries - total_failures} passed{and_f}.") + if total_failures: - msg = f"***Test Failed*** {total_failures} failures" + s = "" if total_failures == 1 else "s" + msg = f"***Test Failed*** {total_failures} failure{s}" if total_skips: - msg = f"{msg} and {total_skips} skipped tests" + s = "" if total_skips == 1 else "s" + msg = f"{msg} and {total_skips} skipped test{s}" print(f"{msg}.") elif verbose: print("Test passed.") + return TestResults(total_failures, total_tries, skipped=total_skips) #///////////////////////////////////////////////////////////////// @@ -1627,6 +1637,15 @@ def merge(self, other): d[name] = (failures, tries, skips) +def _n_items(items: list) -> str: + """ + Helper to pluralise the number of items in a list. + """ + n = len(items) + s = "" if n == 1 else "s" + return f"{n} item{s}" + + class OutputChecker: """ A class used to check the whether the actual output from a doctest diff --git a/Lib/enum.py b/Lib/enum.py index 5c5e711f9b078f..2a135e1b1f1826 100644 --- a/Lib/enum.py +++ b/Lib/enum.py @@ -2018,7 +2018,8 @@ def _test_simple_enum(checked_enum, simple_enum): + list(simple_enum._member_map_.keys()) ) for key in set(checked_keys + simple_keys): - if key in ('__module__', '_member_map_', '_value2member_map_', '__doc__'): + if key in ('__module__', '_member_map_', '_value2member_map_', '__doc__', + '__static_attributes__'): # keys known to be different, or very long continue elif key in member_names: diff --git a/Lib/genericpath.py b/Lib/genericpath.py index 1bd5b3897c3af9..ba7b0a13c7f81d 100644 --- a/Lib/genericpath.py +++ b/Lib/genericpath.py @@ -7,8 +7,8 @@ import stat __all__ = ['commonprefix', 'exists', 'getatime', 'getctime', 'getmtime', - 'getsize', 'isdir', 'isfile', 'islink', 'samefile', 'sameopenfile', - 'samestat'] + 'getsize', 'isdevdrive', 'isdir', 'isfile', 'isjunction', 'islink', + 'lexists', 'samefile', 'sameopenfile', 'samestat'] # Does a path exist? @@ -22,6 +22,15 @@ def exists(path): return True +# Being true for dangling symbolic links is also useful. +def lexists(path): + """Test whether a path exists. Returns True for broken symbolic links""" + try: + os.lstat(path) + except (OSError, ValueError): + return False + return True + # This follows symbolic links, so both islink() and isdir() can be true # for the same path on systems that support symlinks def isfile(path): @@ -57,6 +66,21 @@ def islink(path): return stat.S_ISLNK(st.st_mode) +# Is a path a junction? +def isjunction(path): + """Test whether a path is a junction + Junctions are not supported on the current platform""" + os.fspath(path) + return False + + +def isdevdrive(path): + """Determines whether the specified path is on a Windows Dev Drive. + Dev Drives are not supported on the current platform""" + os.fspath(path) + return False + + def getsize(filename): """Return the size of a file, reported by os.stat().""" return os.stat(filename).st_size diff --git a/Lib/glob.py b/Lib/glob.py index 473502c67336f9..d59641195a1c41 100644 --- a/Lib/glob.py +++ b/Lib/glob.py @@ -104,8 +104,8 @@ def _iglob(pathname, root_dir, dir_fd, recursive, dironly, def _glob1(dirname, pattern, dir_fd, dironly, include_hidden=False): names = _listdir(dirname, dir_fd, dironly) - if include_hidden or not _ishidden(pattern): - names = (x for x in names if include_hidden or not _ishidden(x)) + if not (include_hidden or _ishidden(pattern)): + names = (x for x in names if not _ishidden(x)) return fnmatch.filter(names, pattern) def _glob0(dirname, basename, dir_fd, dironly, include_hidden=False): diff --git a/Lib/gzip.py b/Lib/gzip.py index fda93e0261e028..1d6faaa82c6a68 100644 --- a/Lib/gzip.py +++ b/Lib/gzip.py @@ -178,9 +178,10 @@ def __init__(self, filename=None, mode=None, and 9 is slowest and produces the most compression. 0 is no compression at all. The default is 9. - The mtime argument is an optional numeric timestamp to be written - to the last modification time field in the stream when compressing. - If omitted or None, the current time is used. + The optional mtime argument is the timestamp requested by gzip. The time + is in Unix format, i.e., seconds since 00:00:00 UTC, January 1, 1970. + If mtime is omitted or None, the current time is used. Use mtime = 0 + to generate a compressed stream that does not depend on creation time. """ diff --git a/Lib/importlib/_bootstrap_external.py b/Lib/importlib/_bootstrap_external.py index b26be8583d0f81..0a11dc9efc252c 100644 --- a/Lib/importlib/_bootstrap_external.py +++ b/Lib/importlib/_bootstrap_external.py @@ -52,7 +52,7 @@ # Bootstrap-related code ###################################################### _CASE_INSENSITIVE_PLATFORMS_STR_KEY = 'win', -_CASE_INSENSITIVE_PLATFORMS_BYTES_KEY = 'cygwin', 'darwin' +_CASE_INSENSITIVE_PLATFORMS_BYTES_KEY = 'cygwin', 'darwin', 'ios', 'tvos', 'watchos' _CASE_INSENSITIVE_PLATFORMS = (_CASE_INSENSITIVE_PLATFORMS_BYTES_KEY + _CASE_INSENSITIVE_PLATFORMS_STR_KEY) @@ -81,6 +81,11 @@ def _pack_uint32(x): return (int(x) & 0xFFFFFFFF).to_bytes(4, 'little') +def _unpack_uint64(data): + """Convert 8 bytes in little-endian to an integer.""" + assert len(data) == 8 + return int.from_bytes(data, 'little') + def _unpack_uint32(data): """Convert 4 bytes in little-endian to an integer.""" assert len(data) == 4 @@ -1714,6 +1719,46 @@ def __repr__(self): return f'FileFinder({self.path!r})' +class AppleFrameworkLoader(ExtensionFileLoader): + """A loader for modules that have been packaged as frameworks for + compatibility with Apple's iOS App Store policies. + """ + def create_module(self, spec): + # If the ModuleSpec has been created by the FileFinder, it will have + # been created with an origin pointing to the .fwork file. We need to + # redirect this to the location in the Frameworks folder, using the + # content of the .fwork file. + if spec.origin.endswith(".fwork"): + with _io.FileIO(spec.origin, 'r') as file: + framework_binary = file.read().decode().strip() + bundle_path = _path_split(sys.executable)[0] + spec.origin = _path_join(bundle_path, framework_binary) + + # If the loader is created based on the spec for a loaded module, the + # path will be pointing at the Framework location. If this occurs, + # get the original .fwork location to use as the module's __file__. + if self.path.endswith(".fwork"): + path = self.path + else: + with _io.FileIO(self.path + ".origin", 'r') as file: + origin = file.read().decode().strip() + bundle_path = _path_split(sys.executable)[0] + path = _path_join(bundle_path, origin) + + module = _bootstrap._call_with_frames_removed(_imp.create_dynamic, spec) + + _bootstrap._verbose_message( + "Apple framework extension module {!r} loaded from {!r} (path {!r})", + spec.name, + spec.origin, + path, + ) + + # Ensure that the __file__ points at the .fwork location + module.__file__ = path + + return module + # Import setup ############################################################### def _fix_up_module(ns, name, pathname, cpathname=None): @@ -1746,10 +1791,17 @@ def _get_supported_file_loaders(): Each item is a tuple (loader, suffixes). """ - extensions = ExtensionFileLoader, _imp.extension_suffixes() + if sys.platform in {"ios", "tvos", "watchos"}: + extension_loaders = [(AppleFrameworkLoader, [ + suffix.replace(".so", ".fwork") + for suffix in _imp.extension_suffixes() + ])] + else: + extension_loaders = [] + extension_loaders.append((ExtensionFileLoader, _imp.extension_suffixes())) source = SourceFileLoader, SOURCE_SUFFIXES bytecode = SourcelessFileLoader, BYTECODE_SUFFIXES - return [extensions, source, bytecode] + return extension_loaders + [source, bytecode] def _set_bootstrap_module(_bootstrap_module): diff --git a/Lib/importlib/abc.py b/Lib/importlib/abc.py index b56fa94eb9c135..37fef357fe2c0c 100644 --- a/Lib/importlib/abc.py +++ b/Lib/importlib/abc.py @@ -180,7 +180,11 @@ def get_code(self, fullname): else: return self.source_to_code(source, path) -_register(ExecutionLoader, machinery.ExtensionFileLoader) +_register( + ExecutionLoader, + machinery.ExtensionFileLoader, + machinery.AppleFrameworkLoader, +) class FileLoader(_bootstrap_external.FileLoader, ResourceLoader, ExecutionLoader): diff --git a/Lib/importlib/machinery.py b/Lib/importlib/machinery.py index d9a19a13f7b275..fbd30b159fb752 100644 --- a/Lib/importlib/machinery.py +++ b/Lib/importlib/machinery.py @@ -12,6 +12,7 @@ from ._bootstrap_external import SourceFileLoader from ._bootstrap_external import SourcelessFileLoader from ._bootstrap_external import ExtensionFileLoader +from ._bootstrap_external import AppleFrameworkLoader from ._bootstrap_external import NamespaceLoader diff --git a/Lib/importlib/metadata/__init__.py b/Lib/importlib/metadata/__init__.py index 41c2a4a6088b5d..245f905737cb15 100644 --- a/Lib/importlib/metadata/__init__.py +++ b/Lib/importlib/metadata/__init__.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os import re import abc @@ -16,7 +18,7 @@ import posixpath import collections -from . import _adapters, _meta +from . import _meta from ._collections import FreezableDefaultDict, Pair from ._functools import method_cache, pass_none from ._itertools import always_iterable, unique_everseen @@ -26,7 +28,7 @@ from importlib import import_module from importlib.abc import MetaPathFinder from itertools import starmap -from typing import Iterable, List, Mapping, Optional, Set, Union, cast +from typing import Any, Iterable, List, Mapping, Match, Optional, Set, cast __all__ = [ 'Distribution', @@ -163,17 +165,17 @@ class EntryPoint: value: str group: str - dist: Optional['Distribution'] = None + dist: Optional[Distribution] = None def __init__(self, name: str, value: str, group: str) -> None: vars(self).update(name=name, value=value, group=group) - def load(self): + def load(self) -> Any: """Load the entry point from its definition. If only a module is indicated by the value, return that module. Otherwise, return the named object. """ - match = self.pattern.match(self.value) + match = cast(Match, self.pattern.match(self.value)) module = import_module(match.group('module')) attrs = filter(None, (match.group('attr') or '').split('.')) return functools.reduce(getattr, attrs, module) @@ -268,7 +270,7 @@ def __repr__(self): """ return '%s(%r)' % (self.__class__.__name__, tuple(self)) - def select(self, **params): + def select(self, **params) -> EntryPoints: """ Select entry points from self that match the given parameters (typically group and/or name). @@ -304,19 +306,17 @@ def _from_text(text): class PackagePath(pathlib.PurePosixPath): """A reference to a path in a package""" - hash: Optional["FileHash"] + hash: Optional[FileHash] size: int - dist: "Distribution" + dist: Distribution def read_text(self, encoding: str = 'utf-8') -> str: # type: ignore[override] - with self.locate().open(encoding=encoding) as stream: - return stream.read() + return self.locate().read_text(encoding=encoding) def read_binary(self) -> bytes: - with self.locate().open('rb') as stream: - return stream.read() + return self.locate().read_bytes() - def locate(self) -> pathlib.Path: + def locate(self) -> SimplePath: """Return a path-like object for this path""" return self.dist.locate_file(self) @@ -330,6 +330,7 @@ def __repr__(self) -> str: class DeprecatedNonAbstract: + # Required until Python 3.14 def __new__(cls, *args, **kwargs): all_names = { name for subclass in inspect.getmro(cls) for name in vars(subclass) @@ -349,25 +350,48 @@ def __new__(cls, *args, **kwargs): class Distribution(DeprecatedNonAbstract): - """A Python distribution package.""" + """ + An abstract Python distribution package. + + Custom providers may derive from this class and define + the abstract methods to provide a concrete implementation + for their environment. Some providers may opt to override + the default implementation of some properties to bypass + the file-reading mechanism. + """ @abc.abstractmethod def read_text(self, filename) -> Optional[str]: """Attempt to load metadata file given by the name. + Python distribution metadata is organized by blobs of text + typically represented as "files" in the metadata directory + (e.g. package-1.0.dist-info). These files include things + like: + + - METADATA: The distribution metadata including fields + like Name and Version and Description. + - entry_points.txt: A series of entry points as defined in + `the entry points spec `_. + - RECORD: A record of files according to + `this recording spec `_. + + A package may provide any set of files, including those + not listed here or none at all. + :param filename: The name of the file in the distribution info. :return: The text if found, otherwise None. """ @abc.abstractmethod - def locate_file(self, path: Union[str, os.PathLike[str]]) -> pathlib.Path: + def locate_file(self, path: str | os.PathLike[str]) -> SimplePath: """ - Given a path to a file in this distribution, return a path + Given a path to a file in this distribution, return a SimplePath to it. """ @classmethod - def from_name(cls, name: str) -> "Distribution": + def from_name(cls, name: str) -> Distribution: """Return the Distribution for the given package name. :param name: The name of the distribution package to search for. @@ -385,16 +409,18 @@ def from_name(cls, name: str) -> "Distribution": raise PackageNotFoundError(name) @classmethod - def discover(cls, **kwargs) -> Iterable["Distribution"]: + def discover( + cls, *, context: Optional[DistributionFinder.Context] = None, **kwargs + ) -> Iterable[Distribution]: """Return an iterable of Distribution objects for all packages. Pass a ``context`` or pass keyword arguments for constructing a context. :context: A ``DistributionFinder.Context`` object. - :return: Iterable of Distribution objects for all packages. + :return: Iterable of Distribution objects for packages matching + the context. """ - context = kwargs.pop('context', None) if context and kwargs: raise ValueError("cannot accept context and kwargs") context = context or DistributionFinder.Context(**kwargs) @@ -403,8 +429,8 @@ def discover(cls, **kwargs) -> Iterable["Distribution"]: ) @staticmethod - def at(path: Union[str, os.PathLike[str]]) -> "Distribution": - """Return a Distribution for the indicated metadata path + def at(path: str | os.PathLike[str]) -> Distribution: + """Return a Distribution for the indicated metadata path. :param path: a string or path-like object :return: a concrete Distribution instance for the path @@ -413,7 +439,7 @@ def at(path: Union[str, os.PathLike[str]]) -> "Distribution": @staticmethod def _discover_resolvers(): - """Search the meta_path for resolvers.""" + """Search the meta_path for resolvers (MetadataPathFinders).""" declared = ( getattr(finder, 'find_distributions', None) for finder in sys.meta_path ) @@ -424,8 +450,15 @@ def metadata(self) -> _meta.PackageMetadata: """Return the parsed metadata for this Distribution. The returned object will have keys that name the various bits of - metadata. See PEP 566 for details. + metadata per the + `Core metadata specifications `_. + + Custom providers may provide the METADATA file or override this + property. """ + # deferred for performance (python/cpython#109829) + from . import _adapters + opt_text = ( self.read_text('METADATA') or self.read_text('PKG-INFO') @@ -454,6 +487,12 @@ def version(self) -> str: @property def entry_points(self) -> EntryPoints: + """ + Return EntryPoints for this distribution. + + Custom providers may provide the ``entry_points.txt`` file + or override this property. + """ return EntryPoints._from_text_for(self.read_text('entry_points.txt'), self) @property @@ -466,6 +505,10 @@ def files(self) -> Optional[List[PackagePath]]: (i.e. RECORD for dist-info, or installed-files.txt or SOURCES.txt for egg-info) is missing. Result may be empty if the metadata exists but is empty. + + Custom providers are recommended to provide a "RECORD" file (in + ``read_text``) or override this property to allow for callers to be + able to resolve filenames provided by the package. """ def make_file(name, hash=None, size_str=None): @@ -497,7 +540,7 @@ def skip_missing_files(package_paths): def _read_files_distinfo(self): """ - Read the lines of RECORD + Read the lines of RECORD. """ text = self.read_text('RECORD') return text and text.splitlines() @@ -611,6 +654,9 @@ def _load_json(self, filename): class DistributionFinder(MetaPathFinder): """ A MetaPathFinder capable of discovering installed distributions. + + Custom providers should implement this interface in order to + supply metadata. """ class Context: @@ -623,6 +669,17 @@ class Context: Each DistributionFinder may expect any parameters and should attempt to honor the canonical parameters defined below when appropriate. + + This mechanism gives a custom provider a means to + solicit additional details from the caller beyond + "name" and "path" when searching distributions. + For example, imagine a provider that exposes suites + of packages in either a "public" or "private" ``realm``. + A caller may wish to query only for distributions in + a particular realm and could call + ``distributions(realm="private")`` to signal to the + custom provider to only include distributions from that + realm. """ name = None @@ -658,11 +715,18 @@ def find_distributions(self, context=Context()) -> Iterable[Distribution]: class FastPath: """ - Micro-optimized class for searching a path for - children. + Micro-optimized class for searching a root for children. + + Root is a path on the file system that may contain metadata + directories either as natural directories or within a zip file. >>> FastPath('').children() ['...'] + + FastPath objects are cached and recycled for any given root. + + >>> FastPath('foobar') is FastPath('foobar') + True """ @functools.lru_cache() # type: ignore @@ -704,7 +768,19 @@ def lookup(self, mtime): class Lookup: + """ + A micro-optimized class for searching a (fast) path for metadata. + """ + def __init__(self, path: FastPath): + """ + Calculate all of the children representing metadata. + + From the children in the path, calculate early all of the + children that appear to represent metadata (infos) or legacy + metadata (eggs). + """ + base = os.path.basename(path.root).lower() base_is_egg = base.endswith(".egg") self.infos = FreezableDefaultDict(list) @@ -725,7 +801,10 @@ def __init__(self, path: FastPath): self.infos.freeze() self.eggs.freeze() - def search(self, prepared): + def search(self, prepared: Prepared): + """ + Yield all infos and eggs matching the Prepared query. + """ infos = ( self.infos[prepared.normalized] if prepared @@ -741,13 +820,28 @@ def search(self, prepared): class Prepared: """ - A prepared search for metadata on a possibly-named package. + A prepared search query for metadata on a possibly-named package. + + Pre-calculates the normalization to prevent repeated operations. + + >>> none = Prepared(None) + >>> none.normalized + >>> none.legacy_normalized + >>> bool(none) + False + >>> sample = Prepared('Sample__Pkg-name.foo') + >>> sample.normalized + 'sample_pkg_name_foo' + >>> sample.legacy_normalized + 'sample__pkg_name.foo' + >>> bool(sample) + True """ normalized = None legacy_normalized = None - def __init__(self, name): + def __init__(self, name: Optional[str]): self.name = name if name is None: return @@ -777,7 +871,7 @@ class MetadataPathFinder(DistributionFinder): @classmethod def find_distributions( cls, context=DistributionFinder.Context() - ) -> Iterable["PathDistribution"]: + ) -> Iterable[PathDistribution]: """ Find distributions. @@ -810,7 +904,7 @@ def __init__(self, path: SimplePath) -> None: """ self._path = path - def read_text(self, filename: Union[str, os.PathLike[str]]) -> Optional[str]: + def read_text(self, filename: str | os.PathLike[str]) -> Optional[str]: with suppress( FileNotFoundError, IsADirectoryError, @@ -824,7 +918,7 @@ def read_text(self, filename: Union[str, os.PathLike[str]]) -> Optional[str]: read_text.__doc__ = Distribution.read_text.__doc__ - def locate_file(self, path: Union[str, os.PathLike[str]]) -> pathlib.Path: + def locate_file(self, path: str | os.PathLike[str]) -> SimplePath: return self._path.parent / path @property diff --git a/Lib/importlib/metadata/_meta.py b/Lib/importlib/metadata/_meta.py index f670016de7fef2..1927d0f624d82f 100644 --- a/Lib/importlib/metadata/_meta.py +++ b/Lib/importlib/metadata/_meta.py @@ -1,3 +1,6 @@ +from __future__ import annotations + +import os from typing import Protocol from typing import Any, Dict, Iterator, List, Optional, TypeVar, Union, overload @@ -6,30 +9,27 @@ class PackageMetadata(Protocol): - def __len__(self) -> int: - ... # pragma: no cover + def __len__(self) -> int: ... # pragma: no cover - def __contains__(self, item: str) -> bool: - ... # pragma: no cover + def __contains__(self, item: str) -> bool: ... # pragma: no cover - def __getitem__(self, key: str) -> str: - ... # pragma: no cover + def __getitem__(self, key: str) -> str: ... # pragma: no cover - def __iter__(self) -> Iterator[str]: - ... # pragma: no cover + def __iter__(self) -> Iterator[str]: ... # pragma: no cover @overload - def get(self, name: str, failobj: None = None) -> Optional[str]: - ... # pragma: no cover + def get( + self, name: str, failobj: None = None + ) -> Optional[str]: ... # pragma: no cover @overload - def get(self, name: str, failobj: _T) -> Union[str, _T]: - ... # pragma: no cover + def get(self, name: str, failobj: _T) -> Union[str, _T]: ... # pragma: no cover # overload per python/importlib_metadata#435 @overload - def get_all(self, name: str, failobj: None = None) -> Optional[List[Any]]: - ... # pragma: no cover + def get_all( + self, name: str, failobj: None = None + ) -> Optional[List[Any]]: ... # pragma: no cover @overload def get_all(self, name: str, failobj: _T) -> Union[List[Any], _T]: @@ -44,20 +44,24 @@ def json(self) -> Dict[str, Union[str, List[str]]]: """ -class SimplePath(Protocol[_T]): +class SimplePath(Protocol): """ - A minimal subset of pathlib.Path required by PathDistribution. + A minimal subset of pathlib.Path required by Distribution. """ - def joinpath(self, other: Union[str, _T]) -> _T: - ... # pragma: no cover + def joinpath( + self, other: Union[str, os.PathLike[str]] + ) -> SimplePath: ... # pragma: no cover - def __truediv__(self, other: Union[str, _T]) -> _T: - ... # pragma: no cover + def __truediv__( + self, other: Union[str, os.PathLike[str]] + ) -> SimplePath: ... # pragma: no cover @property - def parent(self) -> _T: - ... # pragma: no cover + def parent(self) -> SimplePath: ... # pragma: no cover + + def read_text(self, encoding=None) -> str: ... # pragma: no cover + + def read_bytes(self) -> bytes: ... # pragma: no cover - def read_text(self) -> str: - ... # pragma: no cover + def exists(self) -> bool: ... # pragma: no cover diff --git a/Lib/importlib/resources/_common.py b/Lib/importlib/resources/_common.py index a3902535342612..e18082fb3d26a0 100644 --- a/Lib/importlib/resources/_common.py +++ b/Lib/importlib/resources/_common.py @@ -12,8 +12,6 @@ from typing import Union, Optional, cast from .abc import ResourceReader, Traversable -from ._adapters import wrap_spec - Package = Union[types.ModuleType, str] Anchor = Package @@ -109,6 +107,9 @@ def from_package(package: types.ModuleType): Return a Traversable object for the given package. """ + # deferred for performance (python/cpython#109829) + from ._adapters import wrap_spec + spec = wrap_spec(package) reader = spec.loader.get_resource_reader(spec.name) return reader.files() diff --git a/Lib/importlib/util.py b/Lib/importlib/util.py index da9bd080a8dd5a..f1bb4b1fb41576 100644 --- a/Lib/importlib/util.py +++ b/Lib/importlib/util.py @@ -178,12 +178,11 @@ def __getattribute__(self, attr): # Only the first thread to get the lock should trigger the load # and reset the module's class. The rest can now getattr(). if object.__getattribute__(self, '__class__') is _LazyModule: - # The first thread comes here multiple times as it descends the - # call stack. The first time, it sets is_loading and triggers - # exec_module(), which will access module.__dict__, module.__name__, - # and/or module.__spec__, reentering this method. These accesses - # need to be allowed to proceed without triggering the load again. - if loader_state['is_loading'] and attr.startswith('__') and attr.endswith('__'): + # Reentrant calls from the same thread must be allowed to proceed without + # triggering the load again. + # exec_module() and self-referential imports are the primary ways this can + # happen, but in any case we must return something to avoid deadlock. + if loader_state['is_loading']: return object.__getattribute__(self, attr) loader_state['is_loading'] = True diff --git a/Lib/inspect.py b/Lib/inspect.py index 8a2b2c96e993b5..422c09a92ad141 100644 --- a/Lib/inspect.py +++ b/Lib/inspect.py @@ -954,6 +954,10 @@ def getsourcefile(object): elif any(filename.endswith(s) for s in importlib.machinery.EXTENSION_SUFFIXES): return None + elif filename.endswith(".fwork"): + # Apple mobile framework markers are another type of non-source file + return None + # return a filename found in the linecache even if it doesn't exist on disk if filename in linecache.cache: return filename @@ -984,6 +988,7 @@ def getmodule(object, _filename=None): return object if hasattr(object, '__module__'): return sys.modules.get(object.__module__) + # Try the filename to modulename cache if _filename is not None and _filename in modulesbyfile: return sys.modules.get(modulesbyfile[_filename]) @@ -1119,7 +1124,7 @@ def findsource(object): # Allow filenames in form of "" to pass through. # `doctest` monkeypatches `linecache` module to enable # inspection, so let `linecache.getlines` to be called. - if not (file.startswith('<') and file.endswith('>')): + if (not (file.startswith('<') and file.endswith('>'))) or file.endswith('.fwork'): raise OSError('source code not available') module = getmodule(object, file) @@ -1152,15 +1157,8 @@ def findsource(object): if not hasattr(object, 'co_firstlineno'): raise OSError('could not find function definition') lnum = object.co_firstlineno - 1 - pat = re.compile(r'^(\s*def\s)|(\s*async\s+def\s)|(.*(? 0: - try: - line = lines[lnum] - except IndexError: - raise OSError('lineno is out of bounds') - if pat.match(line): - break - lnum = lnum - 1 + if lnum >= len(lines): + raise OSError('lineno is out of bounds') return lines, lnum raise OSError('could not find code object') diff --git a/Lib/ipaddress.py b/Lib/ipaddress.py index 7d6edcf2478a82..22cdfc93d8ad32 100644 --- a/Lib/ipaddress.py +++ b/Lib/ipaddress.py @@ -1086,7 +1086,11 @@ def is_private(self): """ return any(self.network_address in priv_network and self.broadcast_address in priv_network - for priv_network in self._constants._private_networks) + for priv_network in self._constants._private_networks) and all( + self.network_address not in network and + self.broadcast_address not in network + for network in self._constants._private_networks_exceptions + ) @property def is_global(self): @@ -1347,7 +1351,10 @@ def is_private(self): ``is_private`` has value opposite to :attr:`is_global`, except for the ``100.64.0.0/10`` IPv4 range where they are both ``False``. """ - return any(self in net for net in self._constants._private_networks) + return ( + any(self in net for net in self._constants._private_networks) + and all(self not in net for net in self._constants._private_networks_exceptions) + ) @property @functools.lru_cache() @@ -1578,13 +1585,15 @@ class _IPv4Constants: _public_network = IPv4Network('100.64.0.0/10') + # Not globally reachable address blocks listed on + # https://www.iana.org/assignments/iana-ipv4-special-registry/iana-ipv4-special-registry.xhtml _private_networks = [ IPv4Network('0.0.0.0/8'), IPv4Network('10.0.0.0/8'), IPv4Network('127.0.0.0/8'), IPv4Network('169.254.0.0/16'), IPv4Network('172.16.0.0/12'), - IPv4Network('192.0.0.0/29'), + IPv4Network('192.0.0.0/24'), IPv4Network('192.0.0.170/31'), IPv4Network('192.0.2.0/24'), IPv4Network('192.168.0.0/16'), @@ -1595,6 +1604,11 @@ class _IPv4Constants: IPv4Network('255.255.255.255/32'), ] + _private_networks_exceptions = [ + IPv4Network('192.0.0.9/32'), + IPv4Network('192.0.0.10/32'), + ] + _reserved_network = IPv4Network('240.0.0.0/4') _unspecified_address = IPv4Address('0.0.0.0') @@ -2086,7 +2100,10 @@ def is_private(self): ipv4_mapped = self.ipv4_mapped if ipv4_mapped is not None: return ipv4_mapped.is_private - return any(self in net for net in self._constants._private_networks) + return ( + any(self in net for net in self._constants._private_networks) + and all(self not in net for net in self._constants._private_networks_exceptions) + ) @property def is_global(self): @@ -2342,19 +2359,31 @@ class _IPv6Constants: _multicast_network = IPv6Network('ff00::/8') + # Not globally reachable address blocks listed on + # https://www.iana.org/assignments/iana-ipv6-special-registry/iana-ipv6-special-registry.xhtml _private_networks = [ IPv6Network('::1/128'), IPv6Network('::/128'), IPv6Network('::ffff:0:0/96'), + IPv6Network('64:ff9b:1::/48'), IPv6Network('100::/64'), IPv6Network('2001::/23'), - IPv6Network('2001:2::/48'), IPv6Network('2001:db8::/32'), - IPv6Network('2001:10::/28'), + # IANA says N/A, let's consider it not globally reachable to be safe + IPv6Network('2002::/16'), IPv6Network('fc00::/7'), IPv6Network('fe80::/10'), ] + _private_networks_exceptions = [ + IPv6Network('2001:1::1/128'), + IPv6Network('2001:1::2/128'), + IPv6Network('2001:3::/32'), + IPv6Network('2001:4:112::/48'), + IPv6Network('2001:20::/28'), + IPv6Network('2001:30::/28'), + ] + _reserved_networks = [ IPv6Network('::/8'), IPv6Network('100::/8'), IPv6Network('200::/7'), IPv6Network('400::/6'), diff --git a/Lib/logging/__init__.py b/Lib/logging/__init__.py index fcec9e76b98661..927e3e653f065a 100644 --- a/Lib/logging/__init__.py +++ b/Lib/logging/__init__.py @@ -2013,7 +2013,7 @@ def basicConfig(**kwargs): that this argument is incompatible with 'filename' - if both are present, 'stream' is ignored. handlers If specified, this should be an iterable of already created - handlers, which will be added to the root handler. Any handler + handlers, which will be added to the root logger. Any handler in the list which does not have a formatter assigned will be assigned the formatter created in this function. force If this keyword is specified as true, any existing handlers diff --git a/Lib/mimetypes.py b/Lib/mimetypes.py index 51b99701c9d727..dad3813e39dbae 100644 --- a/Lib/mimetypes.py +++ b/Lib/mimetypes.py @@ -120,7 +120,13 @@ def guess_type(self, url, strict=True): but non-standard types. """ url = os.fspath(url) - scheme, url = urllib.parse._splittype(url) + p = urllib.parse.urlparse(url) + if p.scheme and len(p.scheme) > 1: + scheme = p.scheme + url = p.path + else: + scheme = None + url = os.path.splitdrive(url)[1] if scheme == 'data': # syntax of data URLs: # dataurl := "data:" [ mediatype ] [ ";base64" ] "," data @@ -555,6 +561,7 @@ def _default_mime_types(): '.pl' : 'text/plain', '.srt' : 'text/plain', '.rtx' : 'text/richtext', + '.rtf' : 'text/rtf', '.tsv' : 'text/tab-separated-values', '.vtt' : 'text/vtt', '.py' : 'text/x-python', diff --git a/Lib/modulefinder.py b/Lib/modulefinder.py index a0a020f9eeb9b4..ac478ee7f51722 100644 --- a/Lib/modulefinder.py +++ b/Lib/modulefinder.py @@ -72,7 +72,12 @@ def _find_module(name, path=None): if isinstance(spec.loader, importlib.machinery.SourceFileLoader): kind = _PY_SOURCE - elif isinstance(spec.loader, importlib.machinery.ExtensionFileLoader): + elif isinstance( + spec.loader, ( + importlib.machinery.ExtensionFileLoader, + importlib.machinery.AppleFrameworkLoader, + ) + ): kind = _C_EXTENSION elif isinstance(spec.loader, importlib.machinery.SourcelessFileLoader): diff --git a/Lib/ntpath.py b/Lib/ntpath.py index e7cbfe17ecb3c8..f1c48ecd1e5e2a 100644 --- a/Lib/ntpath.py +++ b/Lib/ntpath.py @@ -29,7 +29,8 @@ "ismount","isreserved","expanduser","expandvars","normpath", "abspath","curdir","pardir","sep","pathsep","defpath","altsep", "extsep","devnull","realpath","supports_unicode_filenames","relpath", - "samefile", "sameopenfile", "samestat", "commonpath", "isjunction"] + "samefile", "sameopenfile", "samestat", "commonpath", "isjunction", + "isdevdrive"] def _get_bothseps(path): if isinstance(path, bytes): @@ -280,21 +281,9 @@ def isjunction(path): return False return bool(st.st_reparse_tag == stat.IO_REPARSE_TAG_MOUNT_POINT) else: - def isjunction(path): - """Test whether a path is a junction""" - os.fspath(path) - return False - - -# Being true for dangling symbolic links is also useful. + # Use genericpath.isjunction as imported above + pass -def lexists(path): - """Test whether a path exists. Returns True for broken symbolic links""" - try: - st = os.lstat(path) - except (OSError, ValueError): - return False - return True # Is a path a mount point? # Any drive letter root (eg c:\) @@ -916,15 +905,12 @@ def commonpath(paths): try: from nt import _path_isdevdrive -except ImportError: - def isdevdrive(path): - """Determines whether the specified path is on a Windows Dev Drive.""" - # Never a Dev Drive - return False -else: def isdevdrive(path): """Determines whether the specified path is on a Windows Dev Drive.""" try: return _path_isdevdrive(abspath(path)) except OSError: return False +except ImportError: + # Use genericpath.isdevdrive as imported above + pass diff --git a/Lib/pdb.py b/Lib/pdb.py index 88ea900e63f42b..f80171d172b23e 100755 --- a/Lib/pdb.py +++ b/Lib/pdb.py @@ -87,6 +87,7 @@ import linecache from contextlib import contextmanager +from rlcompleter import Completer from typing import Union @@ -573,20 +574,14 @@ def displayhook(self, obj): self.message(repr(obj)) @contextmanager - def _disable_tab_completion(self): - if self.use_rawinput and self.completekey == 'tab': - try: - import readline - except ImportError: - yield - return - try: - readline.parse_and_bind('tab: self-insert') - yield - finally: - readline.parse_and_bind('tab: complete') - else: + def _disable_command_completion(self): + completenames = self.completenames + try: + self.completenames = self.completedefault yield + finally: + self.completenames = completenames + return def default(self, line): if line[:1] == '!': line = line[1:].strip() @@ -595,7 +590,7 @@ def default(self, line): try: if (code := codeop.compile_command(line + '\n', '', 'single')) is None: # Multi-line mode - with self._disable_tab_completion(): + with self._disable_command_completion(): buffer = line continue_prompt = "... " while (code := codeop.compile_command(buffer, '', 'single')) is None: @@ -771,7 +766,10 @@ def completenames(self, text, line, begidx, endidx): if commands: return commands else: - return self._complete_expression(text, line, begidx, endidx) + expressions = self._complete_expression(text, line, begidx, endidx) + if expressions: + return expressions + return self.completedefault(text, line, begidx, endidx) def _complete_location(self, text, line, begidx, endidx): # Complete a file/module/function location for break/tbreak/clear. @@ -828,6 +826,21 @@ def _complete_expression(self, text, line, begidx, endidx): # Complete a simple name. return [n for n in ns.keys() if n.startswith(text)] + def completedefault(self, text, line, begidx, endidx): + if text.startswith("$"): + # Complete convenience variables + conv_vars = self.curframe.f_globals.get('__pdb_convenience_variables', {}) + return [f"${name}" for name in conv_vars if name.startswith(text[1:])] + + # Use rlcompleter to do the completion + state = 0 + matches = [] + completer = Completer(self.curframe.f_globals | self.curframe_locals) + while (match := completer.complete(text, state)) is not None: + matches.append(match) + state += 1 + return matches + # Command definitions, called by cmdloop() # The argument is the remaining string on the command line # Return true to exit from the command loop @@ -2237,15 +2250,19 @@ def main(): import argparse parser = argparse.ArgumentParser(prog="pdb", + usage="%(prog)s [-h] [-c command] (-m module | pyfile) [args ...]", description=_usage, formatter_class=argparse.RawDescriptionHelpFormatter, allow_abbrev=False) - parser.add_argument('-c', '--command', action='append', default=[], metavar='command') - group = parser.add_mutually_exclusive_group(required=True) - group.add_argument('-m', metavar='module') - group.add_argument('pyfile', nargs='?') - parser.add_argument('args', nargs="*") + # We need to maunally get the script from args, because the first positional + # arguments could be either the script we need to debug, or the argument + # to the -m module + parser.add_argument('-c', '--command', action='append', default=[], metavar='command', dest='commands', + help='pdb commands to execute as if given in a .pdbrc file') + parser.add_argument('-m', metavar='module', dest='module') + parser.add_argument('args', nargs='*', + help="when -m is not specified, the first arg is the script to debug") if len(sys.argv) == 1: # If no arguments were given (python -m pdb), print the whole help message. @@ -2255,11 +2272,13 @@ def main(): opts = parser.parse_args() - if opts.m: - file = opts.m + if opts.module: + file = opts.module target = _ModuleTarget(file) else: - file = opts.pyfile + if not opts.args: + parser.error("no module or script to run") + file = opts.args.pop(0) target = _ScriptTarget(file) target.check() @@ -2271,7 +2290,7 @@ def main(): # changed by the user from the command line. There is a "restart" command # which allows explicit specification of command line arguments. pdb = Pdb() - pdb.rcLines.extend(opts.command) + pdb.rcLines.extend(opts.commands) while True: try: pdb._run(target) diff --git a/Lib/platform.py b/Lib/platform.py index 2756f298f9676f..dbcb636df64981 100755 --- a/Lib/platform.py +++ b/Lib/platform.py @@ -496,6 +496,30 @@ def mac_ver(release='', versioninfo=('', '', ''), machine=''): # If that also doesn't work return the default values return release, versioninfo, machine + +# A namedtuple for iOS version information. +IOSVersionInfo = collections.namedtuple( + "IOSVersionInfo", + ["system", "release", "model", "is_simulator"] +) + + +def ios_ver(system="", release="", model="", is_simulator=False): + """Get iOS version information, and return it as a namedtuple: + (system, release, model, is_simulator). + + If values can't be determined, they are set to values provided as + parameters. + """ + if sys.platform == "ios": + import _ios_support + result = _ios_support.get_platform_ios() + if result is not None: + return IOSVersionInfo(*result) + + return IOSVersionInfo(system, release, model, is_simulator) + + def _java_getprop(name, default): """This private helper is deprecated in 3.13 and will be removed in 3.15""" from java.lang import System @@ -542,6 +566,47 @@ def java_ver(release='', vendor='', vminfo=('', '', ''), osinfo=('', '', '')): return release, vendor, vminfo, osinfo + +AndroidVer = collections.namedtuple( + "AndroidVer", "release api_level manufacturer model device is_emulator") + +def android_ver(release="", api_level=0, manufacturer="", model="", device="", + is_emulator=False): + if sys.platform == "android": + try: + from ctypes import CDLL, c_char_p, create_string_buffer + except ImportError: + pass + else: + # An NDK developer confirmed that this is an officially-supported + # API (https://stackoverflow.com/a/28416743). Use `getattr` to avoid + # private name mangling. + system_property_get = getattr(CDLL("libc.so"), "__system_property_get") + system_property_get.argtypes = (c_char_p, c_char_p) + + def getprop(name, default): + # https://android.googlesource.com/platform/bionic/+/refs/tags/android-5.0.0_r1/libc/include/sys/system_properties.h#39 + PROP_VALUE_MAX = 92 + buffer = create_string_buffer(PROP_VALUE_MAX) + length = system_property_get(name.encode("UTF-8"), buffer) + if length == 0: + # This API doesn’t distinguish between an empty property and + # a missing one. + return default + else: + return buffer.value.decode("UTF-8", "backslashreplace") + + release = getprop("ro.build.version.release", release) + api_level = int(getprop("ro.build.version.sdk", api_level)) + manufacturer = getprop("ro.product.manufacturer", manufacturer) + model = getprop("ro.product.model", model) + device = getprop("ro.product.device", device) + is_emulator = getprop("ro.kernel.qemu", "0") == "1" + + return AndroidVer( + release, api_level, manufacturer, model, device, is_emulator) + + ### System name aliasing def system_alias(system, release, version): @@ -613,7 +678,7 @@ def _platform(*args): if cleaned == platform: break platform = cleaned - while platform[-1] == '-': + while platform and platform[-1] == '-': platform = platform[:-1] return platform @@ -654,7 +719,7 @@ def _syscmd_file(target, default=''): default in case the command should fail. """ - if sys.platform in ('dos', 'win32', 'win16'): + if sys.platform in {'dos', 'win32', 'win16', 'ios', 'tvos', 'watchos'}: # XXX Others too ? return default @@ -818,6 +883,14 @@ def get_OpenVMS(): csid, cpu_number = vms_lib.getsyi('SYI$_CPU', 0) return 'Alpha' if cpu_number >= 128 else 'VAX' + # On the iOS simulator, os.uname returns the architecture as uname.machine. + # On device it returns the model name for some reason; but there's only one + # CPU architecture for iOS devices, so we know the right answer. + def get_ios(): + if sys.implementation._multiarch.endswith("simulator"): + return os.uname().machine + return 'arm64' + def from_subprocess(): """ Fall back to `uname -p` @@ -972,6 +1045,15 @@ def uname(): system = 'Windows' release = 'Vista' + # On Android, return the name and version of the OS rather than the kernel. + if sys.platform == 'android': + system = 'Android' + release = android_ver().release + + # Normalize responses on iOS + if sys.platform == 'ios': + system, release, _, _ = ios_ver() + vals = system, node, release, version, machine # Replace 'unknown' values with the more portable '' _uname_cache = uname_result(*map(_unknown_as_blank, vals)) @@ -1251,11 +1333,14 @@ def platform(aliased=False, terse=False): system, release, version = system_alias(system, release, version) if system == 'Darwin': - # macOS (darwin kernel) - macos_release = mac_ver()[0] - if macos_release: - system = 'macOS' - release = macos_release + # macOS and iOS both report as a "Darwin" kernel + if sys.platform == "ios": + system, release, _, _ = ios_ver() + else: + macos_release = mac_ver()[0] + if macos_release: + system = 'macOS' + release = macos_release if system == 'Windows': # MS platforms diff --git a/Lib/posixpath.py b/Lib/posixpath.py index 33943b4403636a..4fc02be69bd6e1 100644 --- a/Lib/posixpath.py +++ b/Lib/posixpath.py @@ -35,7 +35,7 @@ "samefile","sameopenfile","samestat", "curdir","pardir","sep","pathsep","defpath","altsep","extsep", "devnull","realpath","supports_unicode_filenames","relpath", - "commonpath", "isjunction"] + "commonpath", "isjunction","isdevdrive"] def _get_sep(path): @@ -187,26 +187,6 @@ def dirname(p): return head -# Is a path a junction? - -def isjunction(path): - """Test whether a path is a junction - Junctions are not a part of posix semantics""" - os.fspath(path) - return False - - -# Being true for dangling symbolic links is also useful. - -def lexists(path): - """Test whether a path exists. Returns True for broken symbolic links""" - try: - os.lstat(path) - except (OSError, ValueError): - return False - return True - - # Is a path a mount point? # (Does this work for all UNIXes? Is it even guaranteed to work by Posix?) diff --git a/Lib/pydoc.py b/Lib/pydoc.py index 08fd7aba7c9472..d9cf03fb4ffd2a 100755 --- a/Lib/pydoc.py +++ b/Lib/pydoc.py @@ -313,7 +313,8 @@ def visiblename(name, all=None, obj=None): if name in {'__author__', '__builtins__', '__cached__', '__credits__', '__date__', '__doc__', '__file__', '__spec__', '__loader__', '__module__', '__name__', '__package__', - '__path__', '__qualname__', '__slots__', '__version__'}: + '__path__', '__qualname__', '__slots__', '__version__', + '__static_attributes__'}: return 0 # Private names are hidden, but special names are displayed. if name.startswith('__') and name.endswith('__'): return 1 diff --git a/Lib/site.py b/Lib/site.py index 2aee63e24ca52b..162bbec4f8f41b 100644 --- a/Lib/site.py +++ b/Lib/site.py @@ -280,8 +280,8 @@ def _getuserbase(): if env_base: return env_base - # Emscripten, VxWorks, and WASI have no home directories - if sys.platform in {"emscripten", "vxworks", "wasi"}: + # Emscripten, iOS, tvOS, VxWorks, WASI, and watchOS have no home directories + if sys.platform in {"emscripten", "ios", "tvos", "vxworks", "wasi", "watchos"}: return None def joinuser(*args): diff --git a/Lib/statistics.py b/Lib/statistics.py index 5d636258fd442b..58fb31def8896e 100644 --- a/Lib/statistics.py +++ b/Lib/statistics.py @@ -138,7 +138,7 @@ from itertools import count, groupby, repeat from bisect import bisect_left, bisect_right from math import hypot, sqrt, fabs, exp, erf, tau, log, fsum, sumprod -from math import isfinite, isinf, pi, cos, cosh +from math import isfinite, isinf, pi, cos, sin, cosh, atan from functools import reduce from operator import itemgetter from collections import Counter, namedtuple, defaultdict @@ -803,9 +803,9 @@ def multimode(data): return [value for value, count in counts.items() if count == maxcount] -def kde(data, h, kernel='normal'): - """Kernel Density Estimation: Create a continuous probability - density function from discrete samples. +def kde(data, h, kernel='normal', *, cumulative=False): + """Kernel Density Estimation: Create a continuous probability density + function or cumulative distribution function from discrete samples. The basic idea is to smooth the data using a kernel function to help draw inferences about a population from a sample. @@ -820,20 +820,22 @@ def kde(data, h, kernel='normal'): Kernels that give some weight to every sample point: - normal or gauss + normal (gauss) logistic sigmoid Kernels that only give weight to sample points within the bandwidth: - rectangular or uniform + rectangular (uniform) triangular - parabolic or epanechnikov - quartic or biweight + parabolic (epanechnikov) + quartic (biweight) triweight cosine + If *cumulative* is true, will return a cumulative distribution function. + A StatisticsError will be raised if the data sequence is empty. Example @@ -847,7 +849,8 @@ def kde(data, h, kernel='normal'): Compute the area under the curve: - >>> sum(f_hat(x) for x in range(-20, 20)) + >>> area = sum(f_hat(x) for x in range(-20, 20)) + >>> round(area, 4) 1.0 Plot the estimated probability density function at @@ -876,6 +879,13 @@ def kde(data, h, kernel='normal'): 9: 0.009 x 10: 0.002 x + Estimate P(4.5 < X <= 7.5), the probability that a new sample value + will be between 4.5 and 7.5: + + >>> cdf = kde(sample, h=1.5, cumulative=True) + >>> round(cdf(7.5) - cdf(4.5), 2) + 0.22 + References ---------- @@ -888,6 +898,9 @@ def kde(data, h, kernel='normal'): Interactive graphical demonstration and exploration: https://demonstrations.wolfram.com/KernelDensityEstimation/ + Kernel estimation of cumulative distribution function of a random variable with bounded support + https://www.econstor.eu/bitstream/10419/207829/1/10.21307_stattrans-2016-037.pdf + """ n = len(data) @@ -903,45 +916,56 @@ def kde(data, h, kernel='normal'): match kernel: case 'normal' | 'gauss': - c = 1 / sqrt(2 * pi) - K = lambda t: c * exp(-1/2 * t * t) + sqrt2pi = sqrt(2 * pi) + sqrt2 = sqrt(2) + K = lambda t: exp(-1/2 * t * t) / sqrt2pi + I = lambda t: 1/2 * (1.0 + erf(t / sqrt2)) support = None case 'logistic': # 1.0 / (exp(t) + 2.0 + exp(-t)) K = lambda t: 1/2 / (1.0 + cosh(t)) + I = lambda t: 1.0 - 1.0 / (exp(t) + 1.0) support = None case 'sigmoid': # (2/pi) / (exp(t) + exp(-t)) - c = 1 / pi - K = lambda t: c / cosh(t) + c1 = 1 / pi + c2 = 2 / pi + K = lambda t: c1 / cosh(t) + I = lambda t: c2 * atan(exp(t)) support = None case 'rectangular' | 'uniform': K = lambda t: 1/2 + I = lambda t: 1/2 * t + 1/2 support = 1.0 case 'triangular': K = lambda t: 1.0 - abs(t) + I = lambda t: t*t * (1/2 if t < 0.0 else -1/2) + t + 1/2 support = 1.0 case 'parabolic' | 'epanechnikov': K = lambda t: 3/4 * (1.0 - t * t) + I = lambda t: -1/4 * t**3 + 3/4 * t + 1/2 support = 1.0 case 'quartic' | 'biweight': K = lambda t: 15/16 * (1.0 - t * t) ** 2 + I = lambda t: 3/16 * t**5 - 5/8 * t**3 + 15/16 * t + 1/2 support = 1.0 case 'triweight': K = lambda t: 35/32 * (1.0 - t * t) ** 3 + I = lambda t: 35/32 * (-1/7*t**7 + 3/5*t**5 - t**3 + t) + 1/2 support = 1.0 case 'cosine': c1 = pi / 4 c2 = pi / 2 K = lambda t: c1 * cos(c2 * t) + I = lambda t: 1/2 * sin(c2 * t) + 1/2 support = 1.0 case _: @@ -952,6 +976,9 @@ def kde(data, h, kernel='normal'): def pdf(x): return sum(K((x - x_i) / h) for x_i in data) / (n * h) + def cdf(x): + return sum(I((x - x_i) / h) for x_i in data) / n + else: sample = sorted(data) @@ -963,9 +990,19 @@ def pdf(x): supported = sample[i : j] return sum(K((x - x_i) / h) for x_i in supported) / (n * h) - pdf.__doc__ = f'PDF estimate with {h=!r} and {kernel=!r}' + def cdf(x): + i = bisect_left(sample, x - bandwidth) + j = bisect_right(sample, x + bandwidth) + supported = sample[i : j] + return sum((I((x - x_i) / h) for x_i in supported), i) / n - return pdf + if cumulative: + cdf.__doc__ = f'CDF estimate with {h=!r} and {kernel=!r}' + return cdf + + else: + pdf.__doc__ = f'PDF estimate with {h=!r} and {kernel=!r}' + return pdf # Notes on methods for computing quantiles diff --git a/Lib/subprocess.py b/Lib/subprocess.py index 1437bf8148282c..d7c7b45127104f 100644 --- a/Lib/subprocess.py +++ b/Lib/subprocess.py @@ -839,6 +839,9 @@ def __init__(self, args, bufsize=-1, executable=None, if not isinstance(bufsize, int): raise TypeError("bufsize must be an integer") + if stdout is STDOUT: + raise ValueError("STDOUT can only be used for stderr") + if pipesize is None: pipesize = -1 # Restore default if not isinstance(pipesize, int): @@ -1586,6 +1589,8 @@ def _wait(self, timeout): """Internal implementation of wait() on Windows.""" if timeout is None: timeout_millis = _winapi.INFINITE + elif timeout <= 0: + timeout_millis = 0 else: timeout_millis = int(timeout * 1000) if self.returncode is None: diff --git a/Lib/sysconfig/__init__.py b/Lib/sysconfig/__init__.py index 07ab27c7fb0c35..70bdecf2138fd9 100644 --- a/Lib/sysconfig/__init__.py +++ b/Lib/sysconfig/__init__.py @@ -21,6 +21,7 @@ # Keys for get_config_var() that are never converted to Python integers. _ALWAYS_STR = { + 'IPHONEOS_DEPLOYMENT_TARGET', 'MACOSX_DEPLOYMENT_TARGET', } @@ -57,6 +58,7 @@ 'scripts': '{base}/Scripts', 'data': '{base}', }, + # Downstream distributors can overwrite the default install scheme. # This is done to support downstream modifications where distributors change # the installation layout (eg. different site-packages directory). @@ -114,8 +116,8 @@ def _getuserbase(): if env_base: return env_base - # Emscripten, VxWorks, and WASI have no home directories - if sys.platform in {"emscripten", "vxworks", "wasi"}: + # Emscripten, iOS, tvOS, VxWorks, WASI, and watchOS have no home directories + if sys.platform in {"emscripten", "ios", "tvos", "vxworks", "wasi", "watchos"}: return None def joinuser(*args): @@ -290,6 +292,7 @@ def _get_preferred_schemes(): 'home': 'posix_home', 'user': 'osx_framework_user', } + return { 'prefix': 'posix_prefix', 'home': 'posix_home', @@ -623,10 +626,15 @@ def get_platform(): if m: release = m.group() elif osname[:6] == "darwin": - import _osx_support - osname, release, machine = _osx_support.get_platform_osx( - get_config_vars(), - osname, release, machine) + if sys.platform == "ios": + release = get_config_vars().get("IPHONEOS_DEPLOYMENT_TARGET", "12.0") + osname = sys.platform + machine = sys.implementation._multiarch + else: + import _osx_support + osname, release, machine = _osx_support.get_platform_osx( + get_config_vars(), + osname, release, machine) return f"{osname}-{release}-{machine}" diff --git a/Lib/test/archivetestdata/zipdir_backslash.zip b/Lib/test/archivetestdata/zipdir_backslash.zip new file mode 100644 index 00000000000000..979126ef5e37eb Binary files /dev/null and b/Lib/test/archivetestdata/zipdir_backslash.zip differ diff --git a/Lib/test/libregrtest/cmdline.py b/Lib/test/libregrtest/cmdline.py index 876b1bcd2ca406..3e7428c4ad3797 100644 --- a/Lib/test/libregrtest/cmdline.py +++ b/Lib/test/libregrtest/cmdline.py @@ -173,6 +173,7 @@ def __init__(self, **kwargs) -> None: self.fail_rerun = False self.tempdir = None self._add_python_opts = True + self.xmlpath = None super().__init__(**kwargs) @@ -506,17 +507,28 @@ def _parse_args(args, **kwargs): ns.randomize = True if ns.verbose: ns.header = True + # When -jN option is used, a worker process does not use --verbose3 # and so -R 3:3 -jN --verbose3 just works as expected: there is no false # alarm about memory leak. if ns.huntrleaks and ns.verbose3 and ns.use_mp is None: - ns.verbose3 = False # run_single_test() replaces sys.stdout with io.StringIO if verbose3 # is true. In this case, huntrleaks sees an write into StringIO as # a memory leak, whereas it is not (gh-71290). + ns.verbose3 = False print("WARNING: Disable --verbose3 because it's incompatible with " "--huntrleaks without -jN option", file=sys.stderr) + + if ns.huntrleaks and ns.xmlpath: + # The XML data is written into a file outside runtest_refleak(), so + # it looks like a leak but it's not. Simply disable XML output when + # hunting for reference leaks (gh-83434). + ns.xmlpath = None + print("WARNING: Disable --junit-xml because it's incompatible " + "with --huntrleaks", + file=sys.stderr) + if ns.forever: # --forever implies --failfast ns.failfast = True diff --git a/Lib/test/pythoninfo.py b/Lib/test/pythoninfo.py index 814358746d6d8a..0cfd033bb637a7 100644 --- a/Lib/test/pythoninfo.py +++ b/Lib/test/pythoninfo.py @@ -179,6 +179,9 @@ def collect_platform(info_add): info_add(f'platform.freedesktop_os_release[{key}]', os_release[key]) + if sys.platform == 'android': + call_func(info_add, 'platform.android_ver', platform, 'android_ver') + def collect_locale(info_add): import locale @@ -287,6 +290,7 @@ def format_groups(groups): "HOMEDRIVE", "HOMEPATH", "IDLESTARTUP", + "IPHONEOS_DEPLOYMENT_TARGET", "LANG", "LDFLAGS", "LDSHARED", @@ -520,6 +524,7 @@ def collect_sysconfig(info_add): 'Py_GIL_DISABLED', 'SHELL', 'SOABI', + 'TEST_MODULES', 'abs_builddir', 'abs_srcdir', 'prefix', diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index ce693e51aab31c..92e3174407f133 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -1715,7 +1715,10 @@ def run_in_subinterp(code): module is enabled. """ _check_tracemalloc() - import _testcapi + try: + import _testcapi + except ImportError: + raise unittest.SkipTest("requires _testcapi") return _testcapi.run_in_subinterp(code) @@ -1725,7 +1728,10 @@ def run_in_subinterp_with_config(code, *, own_gil=None, **config): module is enabled. """ _check_tracemalloc() - import _testinternalcapi + try: + import _testinternalcapi + except ImportError: + raise unittest.SkipTest("requires _testinternalcapi") if own_gil is not None: assert 'gil' not in config, (own_gil, config) config['gil'] = 2 if own_gil else 1 @@ -1801,18 +1807,18 @@ def missing_compiler_executable(cmd_names=[]): return cmd[0] -_is_android_emulator = None +_old_android_emulator = None def setswitchinterval(interval): # Setting a very low gil interval on the Android emulator causes python # to hang (issue #26939). - minimum_interval = 1e-5 + minimum_interval = 1e-4 # 100 us if is_android and interval < minimum_interval: - global _is_android_emulator - if _is_android_emulator is None: - import subprocess - _is_android_emulator = (subprocess.check_output( - ['getprop', 'ro.kernel.qemu']).strip() == b'1') - if _is_android_emulator: + global _old_android_emulator + if _old_android_emulator is None: + import platform + av = platform.android_ver() + _old_android_emulator = av.is_emulator and av.api_level < 24 + if _old_android_emulator: interval = minimum_interval return sys.setswitchinterval(interval) @@ -1887,12 +1893,18 @@ def restore(self): def with_pymalloc(): - import _testcapi + try: + import _testcapi + except ImportError: + raise unittest.SkipTest("requires _testcapi") return _testcapi.WITH_PYMALLOC and not Py_GIL_DISABLED def with_mimalloc(): - import _testcapi + try: + import _testcapi + except ImportError: + raise unittest.SkipTest("requires _testcapi") return _testcapi.WITH_MIMALLOC @@ -2251,16 +2263,25 @@ def _findwheel(pkgname): # and returns the path to the venv directory and the path to the python executable @contextlib.contextmanager def setup_venv_with_pip_setuptools_wheel(venv_dir): + import shlex import subprocess from .os_helper import temp_cwd + def run_command(cmd): + if verbose: + print() + print('Run:', ' '.join(map(shlex.quote, cmd))) + subprocess.run(cmd, check=True) + else: + subprocess.run(cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + check=True) + with temp_cwd() as temp_dir: # Create virtual environment to get setuptools cmd = [sys.executable, '-X', 'dev', '-m', 'venv', venv_dir] - if verbose: - print() - print('Run:', ' '.join(cmd)) - subprocess.run(cmd, check=True) + run_command(cmd) venv = os.path.join(temp_dir, venv_dir) @@ -2275,10 +2296,7 @@ def setup_venv_with_pip_setuptools_wheel(venv_dir): '-m', 'pip', 'install', _findwheel('setuptools'), _findwheel('wheel')] - if verbose: - print() - print('Run:', ' '.join(cmd)) - subprocess.run(cmd, check=True) + run_command(cmd) yield python diff --git a/Lib/test/support/bytecode_helper.py b/Lib/test/support/bytecode_helper.py index a4845065a5322e..7a0e884ccc122a 100644 --- a/Lib/test/support/bytecode_helper.py +++ b/Lib/test/support/bytecode_helper.py @@ -3,7 +3,10 @@ import unittest import dis import io -from _testinternalcapi import compiler_codegen, optimize_cfg, assemble_code_object +try: + import _testinternalcapi +except ImportError: + _testinternalcapi = None _UNSPECIFIED = object() @@ -133,23 +136,26 @@ def complete_insts_info(self, insts): return res +@unittest.skipIf(_testinternalcapi is None, "requires _testinternalcapi") class CodegenTestCase(CompilationStepTestCase): def generate_code(self, ast): - insts, _ = compiler_codegen(ast, "my_file.py", 0) + insts, _ = _testinternalcapi.compiler_codegen(ast, "my_file.py", 0) return insts +@unittest.skipIf(_testinternalcapi is None, "requires _testinternalcapi") class CfgOptimizationTestCase(CompilationStepTestCase): def get_optimized(self, insts, consts, nlocals=0): insts = self.normalize_insts(insts) insts = self.complete_insts_info(insts) - insts = optimize_cfg(insts, consts, nlocals) + insts = _testinternalcapi.optimize_cfg(insts, consts, nlocals) return insts, consts +@unittest.skipIf(_testinternalcapi is None, "requires _testinternalcapi") class AssemblerTestCase(CompilationStepTestCase): def get_code_object(self, filename, insts, metadata): - co = assemble_code_object(filename, insts, metadata) + co = _testinternalcapi.assemble_code_object(filename, insts, metadata) return co diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py index 7cecf319e3638f..3929e4e00d59c2 100644 --- a/Lib/test/test_ast.py +++ b/Lib/test/test_ast.py @@ -2916,6 +2916,47 @@ def test_FunctionDef(self): self.assertEqual(node.name, 'foo') self.assertEqual(node.decorator_list, []) + def test_custom_subclass(self): + class NoInit(ast.AST): + pass + + obj = NoInit() + self.assertIsInstance(obj, NoInit) + self.assertEqual(obj.__dict__, {}) + + class Fields(ast.AST): + _fields = ('a',) + + with self.assertWarnsRegex(DeprecationWarning, + r"Fields provides _fields but not _field_types."): + obj = Fields() + with self.assertRaises(AttributeError): + obj.a + obj = Fields(a=1) + self.assertEqual(obj.a, 1) + + class FieldsAndTypes(ast.AST): + _fields = ('a',) + _field_types = {'a': int | None} + a: int | None = None + + obj = FieldsAndTypes() + self.assertIs(obj.a, None) + obj = FieldsAndTypes(a=1) + self.assertEqual(obj.a, 1) + + class FieldsAndTypesNoDefault(ast.AST): + _fields = ('a',) + _field_types = {'a': int} + + with self.assertWarnsRegex(DeprecationWarning, + r"FieldsAndTypesNoDefault\.__init__ missing 1 required positional argument: 'a'\."): + obj = FieldsAndTypesNoDefault() + with self.assertRaises(AttributeError): + obj.a + obj = FieldsAndTypesNoDefault(a=1) + self.assertEqual(obj.a, 1) + @support.cpython_only class ModuleStateTests(unittest.TestCase): diff --git a/Lib/test/test_asyncio/test_base_events.py b/Lib/test/test_asyncio/test_base_events.py index 4cd872d3a5b2d8..c14a0bb180d79b 100644 --- a/Lib/test/test_asyncio/test_base_events.py +++ b/Lib/test/test_asyncio/test_base_events.py @@ -3,6 +3,7 @@ import concurrent.futures import errno import math +import platform import socket import sys import threading @@ -1430,6 +1431,10 @@ def test_create_connection_no_inet_pton(self, m_socket): self._test_create_connection_ip_addr(m_socket, False) @patch_socket + @unittest.skipIf( + support.is_android and platform.android_ver().api_level < 23, + "Issue gh-71123: this fails on Android before API level 23" + ) def test_create_connection_service_name(self, m_socket): m_socket.getaddrinfo = socket.getaddrinfo sock = m_socket.socket.return_value diff --git a/Lib/test/test_asyncio/test_events.py b/Lib/test/test_asyncio/test_events.py index 5b9c871e1d1b5a..88c85a36b5d448 100644 --- a/Lib/test/test_asyncio/test_events.py +++ b/Lib/test/test_asyncio/test_events.py @@ -1125,12 +1125,16 @@ def test_create_server_ssl_match_failed(self): # incorrect server_hostname f_c = self.loop.create_connection(MyProto, host, port, ssl=sslcontext_client) + + # Allow for flexible libssl error messages. + regex = re.compile(r"""( + IP address mismatch, certificate is not valid for '127.0.0.1' # OpenSSL + | + CERTIFICATE_VERIFY_FAILED # AWS-LC + )""", re.X) with mock.patch.object(self.loop, 'call_exception_handler'): with test_utils.disable_logger(): - with self.assertRaisesRegex( - ssl.CertificateError, - "IP address mismatch, certificate is not valid for " - "'127.0.0.1'"): + with self.assertRaisesRegex(ssl.CertificateError, regex): self.loop.run_until_complete(f_c) # close connection @@ -1374,6 +1378,80 @@ def test_create_datagram_endpoint_sock(self): tr.close() self.loop.run_until_complete(pr.done) + def test_datagram_send_to_non_listening_address(self): + # see: + # https://github.com/python/cpython/issues/91227 + # https://github.com/python/cpython/issues/88906 + # https://bugs.python.org/issue47071 + # https://bugs.python.org/issue44743 + # The Proactor event loop would fail to receive datagram messages after + # sending a message to an address that wasn't listening. + loop = self.loop + + class Protocol(asyncio.DatagramProtocol): + + _received_datagram = None + + def datagram_received(self, data, addr): + self._received_datagram.set_result(data) + + async def wait_for_datagram_received(self): + self._received_datagram = loop.create_future() + result = await asyncio.wait_for(self._received_datagram, 10) + self._received_datagram = None + return result + + def create_socket(): + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + sock.setblocking(False) + sock.bind(('127.0.0.1', 0)) + return sock + + socket_1 = create_socket() + transport_1, protocol_1 = loop.run_until_complete( + loop.create_datagram_endpoint(Protocol, sock=socket_1) + ) + addr_1 = socket_1.getsockname() + + socket_2 = create_socket() + transport_2, protocol_2 = loop.run_until_complete( + loop.create_datagram_endpoint(Protocol, sock=socket_2) + ) + addr_2 = socket_2.getsockname() + + # creating and immediately closing this to try to get an address that + # is not listening + socket_3 = create_socket() + transport_3, protocol_3 = loop.run_until_complete( + loop.create_datagram_endpoint(Protocol, sock=socket_3) + ) + addr_3 = socket_3.getsockname() + transport_3.abort() + + transport_1.sendto(b'a', addr=addr_2) + self.assertEqual(loop.run_until_complete( + protocol_2.wait_for_datagram_received() + ), b'a') + + transport_2.sendto(b'b', addr=addr_1) + self.assertEqual(loop.run_until_complete( + protocol_1.wait_for_datagram_received() + ), b'b') + + # this should send to an address that isn't listening + transport_1.sendto(b'c', addr=addr_3) + loop.run_until_complete(asyncio.sleep(0)) + + # transport 1 should still be able to receive messages after sending to + # an address that wasn't listening + transport_2.sendto(b'd', addr=addr_1) + self.assertEqual(loop.run_until_complete( + protocol_1.wait_for_datagram_received() + ), b'd') + + transport_1.close() + transport_2.close() + def test_internal_fds(self): loop = self.create_event_loop() if not isinstance(loop, selector_events.BaseSelectorEventLoop): diff --git a/Lib/test/test_asyncio/test_sock_lowlevel.py b/Lib/test/test_asyncio/test_sock_lowlevel.py index 075113cbe8e4a6..acef24a703ba38 100644 --- a/Lib/test/test_asyncio/test_sock_lowlevel.py +++ b/Lib/test/test_asyncio/test_sock_lowlevel.py @@ -555,12 +555,93 @@ class SelectEventLoopTests(BaseSockTestsMixin, def create_event_loop(self): return asyncio.SelectorEventLoop() + class ProactorEventLoopTests(BaseSockTestsMixin, test_utils.TestCase): def create_event_loop(self): return asyncio.ProactorEventLoop() + + async def _basetest_datagram_send_to_non_listening_address(self, + recvfrom): + # see: + # https://github.com/python/cpython/issues/91227 + # https://github.com/python/cpython/issues/88906 + # https://bugs.python.org/issue47071 + # https://bugs.python.org/issue44743 + # The Proactor event loop would fail to receive datagram messages + # after sending a message to an address that wasn't listening. + + def create_socket(): + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + sock.setblocking(False) + sock.bind(('127.0.0.1', 0)) + return sock + + socket_1 = create_socket() + addr_1 = socket_1.getsockname() + + socket_2 = create_socket() + addr_2 = socket_2.getsockname() + + # creating and immediately closing this to try to get an address + # that is not listening + socket_3 = create_socket() + addr_3 = socket_3.getsockname() + socket_3.shutdown(socket.SHUT_RDWR) + socket_3.close() + + socket_1_recv_task = self.loop.create_task(recvfrom(socket_1)) + socket_2_recv_task = self.loop.create_task(recvfrom(socket_2)) + await asyncio.sleep(0) + + await self.loop.sock_sendto(socket_1, b'a', addr_2) + self.assertEqual(await socket_2_recv_task, b'a') + + await self.loop.sock_sendto(socket_2, b'b', addr_1) + self.assertEqual(await socket_1_recv_task, b'b') + socket_1_recv_task = self.loop.create_task(recvfrom(socket_1)) + await asyncio.sleep(0) + + # this should send to an address that isn't listening + await self.loop.sock_sendto(socket_1, b'c', addr_3) + self.assertEqual(await socket_1_recv_task, b'') + socket_1_recv_task = self.loop.create_task(recvfrom(socket_1)) + await asyncio.sleep(0) + + # socket 1 should still be able to receive messages after sending + # to an address that wasn't listening + socket_2.sendto(b'd', addr_1) + self.assertEqual(await socket_1_recv_task, b'd') + + socket_1.shutdown(socket.SHUT_RDWR) + socket_1.close() + socket_2.shutdown(socket.SHUT_RDWR) + socket_2.close() + + + def test_datagram_send_to_non_listening_address_recvfrom(self): + async def recvfrom(socket): + data, _ = await self.loop.sock_recvfrom(socket, 4096) + return data + + self.loop.run_until_complete( + self._basetest_datagram_send_to_non_listening_address( + recvfrom)) + + + def test_datagram_send_to_non_listening_address_recvfrom_into(self): + async def recvfrom_into(socket): + buf = bytearray(4096) + length, _ = await self.loop.sock_recvfrom_into(socket, buf, + 4096) + return buf[:length] + + self.loop.run_until_complete( + self._basetest_datagram_send_to_non_listening_address( + recvfrom_into)) + else: import selectors diff --git a/Lib/test/test_asyncio/test_windows_events.py b/Lib/test/test_asyncio/test_windows_events.py index 6e6c90a247b291..0c128c599ba011 100644 --- a/Lib/test/test_asyncio/test_windows_events.py +++ b/Lib/test/test_asyncio/test_windows_events.py @@ -36,7 +36,23 @@ def data_received(self, data): self.trans.close() -class ProactorLoopCtrlC(test_utils.TestCase): +class WindowsEventsTestCase(test_utils.TestCase): + def _unraisablehook(self, unraisable): + # Storing unraisable.object can resurrect an object which is being + # finalized. Storing unraisable.exc_value creates a reference cycle. + self._unraisable = unraisable + print(unraisable) + + def setUp(self): + self._prev_unraisablehook = sys.unraisablehook + self._unraisable = None + sys.unraisablehook = self._unraisablehook + + def tearDown(self): + sys.unraisablehook = self._prev_unraisablehook + self.assertIsNone(self._unraisable) + +class ProactorLoopCtrlC(WindowsEventsTestCase): def test_ctrl_c(self): @@ -58,7 +74,7 @@ def SIGINT_after_delay(): thread.join() -class ProactorMultithreading(test_utils.TestCase): +class ProactorMultithreading(WindowsEventsTestCase): def test_run_from_nonmain_thread(self): finished = False @@ -79,7 +95,7 @@ def func(): self.assertTrue(finished) -class ProactorTests(test_utils.TestCase): +class ProactorTests(WindowsEventsTestCase): def setUp(self): super().setUp() @@ -283,8 +299,32 @@ async def probe(): return "done" - -class WinPolicyTests(test_utils.TestCase): + def test_loop_restart(self): + # We're fishing for the "RuntimeError: <_overlapped.Overlapped object at XXX> + # still has pending operation at deallocation, the process may crash" error + stop = threading.Event() + def threadMain(): + while not stop.is_set(): + self.loop.call_soon_threadsafe(lambda: None) + time.sleep(0.01) + thr = threading.Thread(target=threadMain) + + # In 10 60-second runs of this test prior to the fix: + # time in seconds until failure: (none), 15.0, 6.4, (none), 7.6, 8.3, 1.7, 22.2, 23.5, 8.3 + # 10 seconds had a 50% failure rate but longer would be more costly + end_time = time.time() + 10 # Run for 10 seconds + self.loop.call_soon(thr.start) + while not self._unraisable: # Stop if we got an unraisable exc + self.loop.stop() + self.loop.run_forever() + if time.time() >= end_time: + break + + stop.set() + thr.join() + + +class WinPolicyTests(WindowsEventsTestCase): def test_selector_win_policy(self): async def main(): diff --git a/Lib/test/test_bytes.py b/Lib/test/test_bytes.py index 71bb1e75c6affd..9e1985bb3a7639 100644 --- a/Lib/test/test_bytes.py +++ b/Lib/test/test_bytes.py @@ -991,13 +991,13 @@ def test_translate(self): self.assertEqual(c, b'hllo') def test_sq_item(self): - _testcapi = import_helper.import_module('_testcapi') + _testlimitedcapi = import_helper.import_module('_testlimitedcapi') obj = self.type2test((42,)) with self.assertRaises(IndexError): - _testcapi.sequence_getitem(obj, -2) + _testlimitedcapi.sequence_getitem(obj, -2) with self.assertRaises(IndexError): - _testcapi.sequence_getitem(obj, 1) - self.assertEqual(_testcapi.sequence_getitem(obj, 0), 42) + _testlimitedcapi.sequence_getitem(obj, 1) + self.assertEqual(_testlimitedcapi.sequence_getitem(obj, 0), 42) class BytesTest(BaseBytesTest, unittest.TestCase): @@ -1256,7 +1256,7 @@ class SubBytes(bytes): class ByteArrayTest(BaseBytesTest, unittest.TestCase): type2test = bytearray - _testcapi = import_helper.import_module('_testcapi') + _testlimitedcapi = import_helper.import_module('_testlimitedcapi') def test_getitem_error(self): b = bytearray(b'python') @@ -1354,7 +1354,7 @@ def setitem_as_mapping(b, i, val): b[i] = val def setitem_as_sequence(b, i, val): - self._testcapi.sequence_setitem(b, i, val) + self._testlimitedcapi.sequence_setitem(b, i, val) def do_tests(setitem): b = bytearray([1, 2, 3]) @@ -1401,7 +1401,7 @@ def del_as_mapping(b, i): del b[i] def del_as_sequence(b, i): - self._testcapi.sequence_delitem(b, i) + self._testlimitedcapi.sequence_delitem(b, i) def do_tests(delete): b = bytearray(range(10)) @@ -1810,7 +1810,7 @@ def __index__(self): with self.subTest("tp_as_sequence"): b = bytearray(b'Now you see me...') with self.assertRaises(IndexError): - self._testcapi.sequence_setitem(b, 0, Boom()) + self._testlimitedcapi.sequence_setitem(b, 0, Boom()) class AssortedBytesTest(unittest.TestCase): diff --git a/Lib/test/test_capi/test_abstract.py b/Lib/test/test_capi/test_abstract.py index 97ed939928c360..bc39036e90bf8b 100644 --- a/Lib/test/test_capi/test_abstract.py +++ b/Lib/test/test_capi/test_abstract.py @@ -4,6 +4,7 @@ from test.support import import_helper _testcapi = import_helper.import_module('_testcapi') +_testlimitedcapi = import_helper.import_module('_testlimitedcapi') from _testcapi import PY_SSIZE_T_MIN, PY_SSIZE_T_MAX NULL = None @@ -74,7 +75,7 @@ def assertTypedEqual(self, actual, expected): def test_object_str(self): # Test PyObject_Str() - object_str = _testcapi.object_str + object_str = _testlimitedcapi.object_str self.assertTypedEqual(object_str(''), '') self.assertTypedEqual(object_str('abc'), 'abc') self.assertTypedEqual(object_str('\U0001f40d'), '\U0001f40d') @@ -87,7 +88,7 @@ def test_object_str(self): def test_object_repr(self): # Test PyObject_Repr() - object_repr = _testcapi.object_repr + object_repr = _testlimitedcapi.object_repr self.assertTypedEqual(object_repr(''), "''") self.assertTypedEqual(object_repr('abc'), "'abc'") self.assertTypedEqual(object_repr('\U0001f40d'), "'\U0001f40d'") @@ -100,7 +101,7 @@ def test_object_repr(self): def test_object_ascii(self): # Test PyObject_ASCII() - object_ascii = _testcapi.object_ascii + object_ascii = _testlimitedcapi.object_ascii self.assertTypedEqual(object_ascii(''), "''") self.assertTypedEqual(object_ascii('abc'), "'abc'") self.assertTypedEqual(object_ascii('\U0001f40d'), r"'\U0001f40d'") @@ -113,7 +114,7 @@ def test_object_ascii(self): def test_object_bytes(self): # Test PyObject_Bytes() - object_bytes = _testcapi.object_bytes + object_bytes = _testlimitedcapi.object_bytes self.assertTypedEqual(object_bytes(b''), b'') self.assertTypedEqual(object_bytes(b'abc'), b'abc') self.assertTypedEqual(object_bytes(BytesSubclass(b'abc')), b'abc') @@ -132,7 +133,7 @@ def test_object_bytes(self): self.assertTypedEqual(object_bytes(NULL), b'') def test_object_getattr(self): - xgetattr = _testcapi.object_getattr + xgetattr = _testlimitedcapi.object_getattr obj = TestObject() obj.a = 11 setattr(obj, '\U0001f40d', 22) @@ -146,7 +147,7 @@ def test_object_getattr(self): # CRASHES xgetattr(NULL, 'a') def test_object_getattrstring(self): - getattrstring = _testcapi.object_getattrstring + getattrstring = _testlimitedcapi.object_getattrstring obj = TestObject() obj.a = 11 setattr(obj, '\U0001f40d', 22) @@ -188,7 +189,7 @@ def test_object_getoptionalattrstring(self): # CRASHES getoptionalattrstring(NULL, b'a') def test_object_hasattr(self): - xhasattr = _testcapi.object_hasattr + xhasattr = _testlimitedcapi.object_hasattr obj = TestObject() obj.a = 1 setattr(obj, '\U0001f40d', 2) @@ -212,7 +213,7 @@ def test_object_hasattr(self): # CRASHES xhasattr(NULL, 'a') def test_object_hasattrstring(self): - hasattrstring = _testcapi.object_hasattrstring + hasattrstring = _testlimitedcapi.object_hasattrstring obj = TestObject() obj.a = 1 setattr(obj, '\U0001f40d', 2) @@ -264,7 +265,7 @@ def test_object_hasattrstringwitherror(self): # CRASHES hasattrstring(NULL, b'a') def test_object_setattr(self): - xsetattr = _testcapi.object_setattr + xsetattr = _testlimitedcapi.object_setattr obj = TestObject() xsetattr(obj, 'a', 5) self.assertEqual(obj.a, 5) @@ -284,7 +285,7 @@ def test_object_setattr(self): # CRASHES xsetattr(NULL, 'a', 5) def test_object_setattrstring(self): - setattrstring = _testcapi.object_setattrstring + setattrstring = _testlimitedcapi.object_setattrstring obj = TestObject() setattrstring(obj, b'a', 5) self.assertEqual(obj.a, 5) @@ -305,7 +306,7 @@ def test_object_setattrstring(self): # CRASHES setattrstring(NULL, b'a', 5) def test_object_delattr(self): - xdelattr = _testcapi.object_delattr + xdelattr = _testlimitedcapi.object_delattr obj = TestObject() obj.a = 1 setattr(obj, '\U0001f40d', 2) @@ -322,7 +323,7 @@ def test_object_delattr(self): # CRASHES xdelattr(NULL, 'a') def test_object_delattrstring(self): - delattrstring = _testcapi.object_delattrstring + delattrstring = _testlimitedcapi.object_delattrstring obj = TestObject() obj.a = 1 setattr(obj, '\U0001f40d', 2) @@ -340,7 +341,7 @@ def test_object_delattrstring(self): def test_mapping_check(self): - check = _testcapi.mapping_check + check = _testlimitedcapi.mapping_check self.assertTrue(check({1: 2})) self.assertTrue(check([1, 2])) self.assertTrue(check((1, 2))) @@ -351,7 +352,7 @@ def test_mapping_check(self): self.assertFalse(check(NULL)) def test_mapping_size(self): - for size in _testcapi.mapping_size, _testcapi.mapping_length: + for size in _testlimitedcapi.mapping_size, _testlimitedcapi.mapping_length: self.assertEqual(size({1: 2}), 1) self.assertEqual(size([1, 2]), 2) self.assertEqual(size((1, 2)), 2) @@ -363,7 +364,7 @@ def test_mapping_size(self): self.assertRaises(SystemError, size, NULL) def test_object_getitem(self): - getitem = _testcapi.object_getitem + getitem = _testlimitedcapi.object_getitem dct = {'a': 1, '\U0001f40d': 2} self.assertEqual(getitem(dct, 'a'), 1) self.assertRaises(KeyError, getitem, dct, 'b') @@ -383,7 +384,7 @@ def test_object_getitem(self): self.assertRaises(SystemError, getitem, NULL, 'a') def test_mapping_getitemstring(self): - getitemstring = _testcapi.mapping_getitemstring + getitemstring = _testlimitedcapi.mapping_getitemstring dct = {'a': 1, '\U0001f40d': 2} self.assertEqual(getitemstring(dct, b'a'), 1) self.assertRaises(KeyError, getitemstring, dct, b'b') @@ -437,7 +438,7 @@ def test_mapping_getoptionalitemstring(self): # CRASHES getitemstring(NULL, b'a') def test_mapping_haskey(self): - haskey = _testcapi.mapping_haskey + haskey = _testlimitedcapi.mapping_haskey dct = {'a': 1, '\U0001f40d': 2} self.assertTrue(haskey(dct, 'a')) self.assertFalse(haskey(dct, 'b')) @@ -486,7 +487,7 @@ def test_mapping_haskey(self): 'null argument to internal routine') def test_mapping_haskeystring(self): - haskeystring = _testcapi.mapping_haskeystring + haskeystring = _testlimitedcapi.mapping_haskeystring dct = {'a': 1, '\U0001f40d': 2} self.assertTrue(haskeystring(dct, b'a')) self.assertFalse(haskeystring(dct, b'b')) @@ -527,7 +528,7 @@ def test_mapping_haskeystring(self): "null argument to internal routine") def test_mapping_haskeywitherror(self): - haskey = _testcapi.mapping_haskeywitherror + haskey = _testlimitedcapi.mapping_haskeywitherror dct = {'a': 1, '\U0001f40d': 2} self.assertTrue(haskey(dct, 'a')) self.assertFalse(haskey(dct, 'b')) @@ -548,7 +549,7 @@ def test_mapping_haskeywitherror(self): # CRASHES haskey(NULL, 'a')) def test_mapping_haskeystringwitherror(self): - haskeystring = _testcapi.mapping_haskeystringwitherror + haskeystring = _testlimitedcapi.mapping_haskeystringwitherror dct = {'a': 1, '\U0001f40d': 2} self.assertTrue(haskeystring(dct, b'a')) self.assertFalse(haskeystring(dct, b'b')) @@ -565,7 +566,7 @@ def test_mapping_haskeystringwitherror(self): # CRASHES haskeystring(NULL, b'a') def test_object_setitem(self): - setitem = _testcapi.object_setitem + setitem = _testlimitedcapi.object_setitem dct = {} setitem(dct, 'a', 5) self.assertEqual(dct, {'a': 5}) @@ -591,7 +592,7 @@ def test_object_setitem(self): self.assertRaises(SystemError, setitem, NULL, 'a', 5) def test_mapping_setitemstring(self): - setitemstring = _testcapi.mapping_setitemstring + setitemstring = _testlimitedcapi.mapping_setitemstring dct = {} setitemstring(dct, b'a', 5) self.assertEqual(dct, {'a': 5}) @@ -611,7 +612,7 @@ def test_mapping_setitemstring(self): self.assertRaises(SystemError, setitemstring, NULL, b'a', 5) def test_object_delitem(self): - for delitem in _testcapi.object_delitem, _testcapi.mapping_delitem: + for delitem in _testlimitedcapi.object_delitem, _testlimitedcapi.mapping_delitem: dct = {'a': 1, 'c': 2, '\U0001f40d': 3} delitem(dct, 'a') self.assertEqual(dct, {'c': 2, '\U0001f40d': 3}) @@ -637,7 +638,7 @@ def test_object_delitem(self): self.assertRaises(SystemError, delitem, NULL, 'a') def test_mapping_delitemstring(self): - delitemstring = _testcapi.mapping_delitemstring + delitemstring = _testlimitedcapi.mapping_delitemstring dct = {'a': 1, 'c': 2, '\U0001f40d': 3} delitemstring(dct, b'a') self.assertEqual(dct, {'c': 2, '\U0001f40d': 3}) @@ -677,23 +678,23 @@ def items(self): for mapping in [{}, OrderedDict(), Mapping1(), Mapping2(), dict_obj, OrderedDict(dict_obj), Mapping1(dict_obj), Mapping2(dict_obj)]: - self.assertListEqual(_testcapi.mapping_keys(mapping), + self.assertListEqual(_testlimitedcapi.mapping_keys(mapping), list(mapping.keys())) - self.assertListEqual(_testcapi.mapping_values(mapping), + self.assertListEqual(_testlimitedcapi.mapping_values(mapping), list(mapping.values())) - self.assertListEqual(_testcapi.mapping_items(mapping), + self.assertListEqual(_testlimitedcapi.mapping_items(mapping), list(mapping.items())) def test_mapping_keys_valuesitems_bad_arg(self): - self.assertRaises(AttributeError, _testcapi.mapping_keys, object()) - self.assertRaises(AttributeError, _testcapi.mapping_values, object()) - self.assertRaises(AttributeError, _testcapi.mapping_items, object()) - self.assertRaises(AttributeError, _testcapi.mapping_keys, []) - self.assertRaises(AttributeError, _testcapi.mapping_values, []) - self.assertRaises(AttributeError, _testcapi.mapping_items, []) - self.assertRaises(SystemError, _testcapi.mapping_keys, NULL) - self.assertRaises(SystemError, _testcapi.mapping_values, NULL) - self.assertRaises(SystemError, _testcapi.mapping_items, NULL) + self.assertRaises(AttributeError, _testlimitedcapi.mapping_keys, object()) + self.assertRaises(AttributeError, _testlimitedcapi.mapping_values, object()) + self.assertRaises(AttributeError, _testlimitedcapi.mapping_items, object()) + self.assertRaises(AttributeError, _testlimitedcapi.mapping_keys, []) + self.assertRaises(AttributeError, _testlimitedcapi.mapping_values, []) + self.assertRaises(AttributeError, _testlimitedcapi.mapping_items, []) + self.assertRaises(SystemError, _testlimitedcapi.mapping_keys, NULL) + self.assertRaises(SystemError, _testlimitedcapi.mapping_values, NULL) + self.assertRaises(SystemError, _testlimitedcapi.mapping_items, NULL) class BadMapping: def keys(self): @@ -703,12 +704,12 @@ def values(self): def items(self): return None bad_mapping = BadMapping() - self.assertRaises(TypeError, _testcapi.mapping_keys, bad_mapping) - self.assertRaises(TypeError, _testcapi.mapping_values, bad_mapping) - self.assertRaises(TypeError, _testcapi.mapping_items, bad_mapping) + self.assertRaises(TypeError, _testlimitedcapi.mapping_keys, bad_mapping) + self.assertRaises(TypeError, _testlimitedcapi.mapping_values, bad_mapping) + self.assertRaises(TypeError, _testlimitedcapi.mapping_items, bad_mapping) def test_sequence_check(self): - check = _testcapi.sequence_check + check = _testlimitedcapi.sequence_check self.assertFalse(check({1: 2})) self.assertTrue(check([1, 2])) self.assertTrue(check((1, 2))) @@ -719,7 +720,7 @@ def test_sequence_check(self): # CRASHES check(NULL) def test_sequence_size(self): - for size in _testcapi.sequence_size, _testcapi.sequence_length: + for size in _testlimitedcapi.sequence_size, _testlimitedcapi.sequence_length: self.assertEqual(size([1, 2]), 2) self.assertEqual(size((1, 2)), 2) self.assertEqual(size('abc'), 3) @@ -731,7 +732,7 @@ def test_sequence_size(self): self.assertRaises(SystemError, size, NULL) def test_sequence_getitem(self): - getitem = _testcapi.sequence_getitem + getitem = _testlimitedcapi.sequence_getitem lst = ['a', 'b', 'c'] self.assertEqual(getitem(lst, 1), 'b') self.assertEqual(getitem(lst, -1), 'c') @@ -744,7 +745,7 @@ def test_sequence_getitem(self): self.assertRaises(SystemError, getitem, NULL, 1) def test_sequence_concat(self): - concat = _testcapi.sequence_concat + concat = _testlimitedcapi.sequence_concat self.assertEqual(concat(['a', 'b'], [1, 2]), ['a', 'b', 1, 2]) self.assertEqual(concat(('a', 'b'), (1, 2)), ('a', 'b', 1, 2)) @@ -757,7 +758,7 @@ def test_sequence_concat(self): self.assertRaises(SystemError, concat, NULL, []) def test_sequence_repeat(self): - repeat = _testcapi.sequence_repeat + repeat = _testlimitedcapi.sequence_repeat self.assertEqual(repeat(['a', 'b'], 2), ['a', 'b', 'a', 'b']) self.assertEqual(repeat(('a', 'b'), 2), ('a', 'b', 'a', 'b')) self.assertEqual(repeat(['a', 'b'], 0), []) @@ -771,7 +772,7 @@ def test_sequence_repeat(self): self.assertRaises(SystemError, repeat, NULL, 2) def test_sequence_inplaceconcat(self): - inplaceconcat = _testcapi.sequence_inplaceconcat + inplaceconcat = _testlimitedcapi.sequence_inplaceconcat lst = ['a', 'b'] res = inplaceconcat(lst, [1, 2]) self.assertEqual(res, ['a', 'b', 1, 2]) @@ -790,7 +791,7 @@ def test_sequence_inplaceconcat(self): self.assertRaises(SystemError, inplaceconcat, NULL, []) def test_sequence_inplacerepeat(self): - inplacerepeat = _testcapi.sequence_inplacerepeat + inplacerepeat = _testlimitedcapi.sequence_inplacerepeat lst = ['a', 'b'] res = inplacerepeat(lst, 2) self.assertEqual(res, ['a', 'b', 'a', 'b']) @@ -807,7 +808,7 @@ def test_sequence_inplacerepeat(self): self.assertRaises(SystemError, inplacerepeat, NULL, 2) def test_sequence_setitem(self): - setitem = _testcapi.sequence_setitem + setitem = _testlimitedcapi.sequence_setitem lst = ['a', 'b', 'c'] setitem(lst, 1, 'x') self.assertEqual(lst, ['a', 'x', 'c']) @@ -825,7 +826,7 @@ def test_sequence_setitem(self): self.assertRaises(SystemError, setitem, NULL, 1, 'x') def test_sequence_delitem(self): - delitem = _testcapi.sequence_delitem + delitem = _testlimitedcapi.sequence_delitem lst = ['a', 'b', 'c'] delitem(lst, 1) self.assertEqual(lst, ['a', 'c']) @@ -840,7 +841,7 @@ def test_sequence_delitem(self): self.assertRaises(SystemError, delitem, NULL, 1) def test_sequence_setslice(self): - setslice = _testcapi.sequence_setslice + setslice = _testlimitedcapi.sequence_setslice # Correct case: for start in [*range(-6, 7), PY_SSIZE_T_MIN, PY_SSIZE_T_MAX]: @@ -882,7 +883,7 @@ def __setitem__(self, index, value): self.assertRaises(SystemError, setslice, NULL, 1, 3, 'xy') def test_sequence_delslice(self): - delslice = _testcapi.sequence_delslice + delslice = _testlimitedcapi.sequence_delslice # Correct case: for start in [*range(-6, 7), PY_SSIZE_T_MIN, PY_SSIZE_T_MAX]: @@ -920,7 +921,7 @@ def __delitem__(self, index): self.assertEqual(mapping, {1: 'a', 2: 'b', 3: 'c'}) def test_sequence_count(self): - count = _testcapi.sequence_count + count = _testlimitedcapi.sequence_count lst = ['a', 'b', 'a'] self.assertEqual(count(lst, 'a'), 2) @@ -935,7 +936,7 @@ def test_sequence_count(self): self.assertRaises(SystemError, count, NULL, 'a') def test_sequence_contains(self): - contains = _testcapi.sequence_contains + contains = _testlimitedcapi.sequence_contains lst = ['a', 'b', 'a'] self.assertEqual(contains(lst, 'a'), 1) @@ -954,7 +955,7 @@ def test_sequence_contains(self): # CRASHES contains(NULL, 'a') def test_sequence_index(self): - index = _testcapi.sequence_index + index = _testlimitedcapi.sequence_index lst = ['a', 'b', 'a'] self.assertEqual(index(lst, 'a'), 0) @@ -974,7 +975,7 @@ def test_sequence_index(self): self.assertRaises(SystemError, index, NULL, 'a') def test_sequence_list(self): - xlist = _testcapi.sequence_list + xlist = _testlimitedcapi.sequence_list self.assertEqual(xlist(['a', 'b', 'c']), ['a', 'b', 'c']) self.assertEqual(xlist(('a', 'b', 'c')), ['a', 'b', 'c']) self.assertEqual(xlist(iter(['a', 'b', 'c'])), ['a', 'b', 'c']) @@ -984,7 +985,7 @@ def test_sequence_list(self): self.assertRaises(SystemError, xlist, NULL) def test_sequence_tuple(self): - xtuple = _testcapi.sequence_tuple + xtuple = _testlimitedcapi.sequence_tuple self.assertEqual(xtuple(['a', 'b', 'c']), ('a', 'b', 'c')) self.assertEqual(xtuple(('a', 'b', 'c')), ('a', 'b', 'c')) self.assertEqual(xtuple(iter(['a', 'b', 'c'])), ('a', 'b', 'c')) @@ -994,12 +995,18 @@ def test_sequence_tuple(self): self.assertRaises(SystemError, xtuple, NULL) def test_number_check(self): - number_check = _testcapi.number_check + number_check = _testlimitedcapi.number_check self.assertTrue(number_check(1 + 1j)) self.assertTrue(number_check(1)) self.assertTrue(number_check(0.5)) self.assertFalse(number_check("1 + 1j")) + def test_object_generichash(self): + # Test PyObject_GenericHash() + generichash = _testcapi.object_generichash + for obj in object(), 1, 'string', []: + self.assertEqual(generichash(obj), object.__hash__(obj)) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_capi/test_bytes.py b/Lib/test/test_capi/test_bytes.py index a2ba7708f8fd26..f14d5545c829e5 100644 --- a/Lib/test/test_capi/test_bytes.py +++ b/Lib/test/test_capi/test_bytes.py @@ -2,6 +2,7 @@ from test.support import import_helper _testlimitedcapi = import_helper.import_module('_testlimitedcapi') +_testcapi = import_helper.import_module('_testcapi') from _testcapi import PY_SSIZE_T_MIN, PY_SSIZE_T_MAX NULL = None @@ -217,6 +218,35 @@ def test_decodeescape(self): # CRASHES decodeescape(b'abc', NULL, -1) # CRASHES decodeescape(NULL, NULL, 1) + def test_resize(self): + """Test _PyBytes_Resize()""" + resize = _testcapi.bytes_resize + + for new in True, False: + self.assertEqual(resize(b'abc', 0, new), b'') + self.assertEqual(resize(b'abc', 1, new), b'a') + self.assertEqual(resize(b'abc', 2, new), b'ab') + self.assertEqual(resize(b'abc', 3, new), b'abc') + b = resize(b'abc', 4, new) + self.assertEqual(len(b), 4) + self.assertEqual(b[:3], b'abc') + + self.assertEqual(resize(b'a', 0, new), b'') + self.assertEqual(resize(b'a', 1, new), b'a') + b = resize(b'a', 2, new) + self.assertEqual(len(b), 2) + self.assertEqual(b[:1], b'a') + + self.assertEqual(resize(b'', 0, new), b'') + self.assertEqual(len(resize(b'', 1, new)), 1) + self.assertEqual(len(resize(b'', 2, new)), 2) + + self.assertRaises(SystemError, resize, b'abc', -1, False) + self.assertRaises(SystemError, resize, bytearray(b'abc'), 3, False) + + # CRASHES resize(NULL, 0, False) + # CRASHES resize(NULL, 3, False) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_capi/test_codecs.py b/Lib/test/test_capi/test_codecs.py index 682c56979c6dfa..bd521a509d07ec 100644 --- a/Lib/test/test_capi/test_codecs.py +++ b/Lib/test/test_capi/test_codecs.py @@ -2,7 +2,7 @@ import sys from test.support import import_helper -_testcapi = import_helper.import_module('_testcapi') +_testlimitedcapi = import_helper.import_module('_testlimitedcapi') NULL = None @@ -27,7 +27,7 @@ class CAPITest(unittest.TestCase): def test_fromencodedobject(self): """Test PyUnicode_FromEncodedObject()""" - fromencodedobject = _testcapi.unicode_fromencodedobject + fromencodedobject = _testlimitedcapi.unicode_fromencodedobject self.assertEqual(fromencodedobject(b'abc', NULL), 'abc') self.assertEqual(fromencodedobject(b'abc', 'ascii'), 'abc') @@ -52,7 +52,7 @@ def test_fromencodedobject(self): def test_decode(self): """Test PyUnicode_Decode()""" - decode = _testcapi.unicode_decode + decode = _testlimitedcapi.unicode_decode self.assertEqual(decode(b'[\xe2\x82\xac]', 'utf-8'), '[\u20ac]') self.assertEqual(decode(b'[\xa4]', 'iso8859-15'), '[\u20ac]') @@ -70,7 +70,7 @@ def test_decode(self): def test_asencodedstring(self): """Test PyUnicode_AsEncodedString()""" - asencodedstring = _testcapi.unicode_asencodedstring + asencodedstring = _testlimitedcapi.unicode_asencodedstring self.assertEqual(asencodedstring('abc', NULL), b'abc') self.assertEqual(asencodedstring('abc', 'ascii'), b'abc') @@ -93,7 +93,7 @@ def test_asencodedstring(self): def test_decodeutf8(self): """Test PyUnicode_DecodeUTF8()""" - decodeutf8 = _testcapi.unicode_decodeutf8 + decodeutf8 = _testlimitedcapi.unicode_decodeutf8 for s in ['abc', '\xa1\xa2', '\u4f60\u597d', 'a\U0001f600']: b = s.encode('utf-8') @@ -113,7 +113,7 @@ def test_decodeutf8(self): def test_decodeutf8stateful(self): """Test PyUnicode_DecodeUTF8Stateful()""" - decodeutf8stateful = _testcapi.unicode_decodeutf8stateful + decodeutf8stateful = _testlimitedcapi.unicode_decodeutf8stateful for s in ['abc', '\xa1\xa2', '\u4f60\u597d', 'a\U0001f600']: b = s.encode('utf-8') @@ -136,7 +136,7 @@ def test_decodeutf8stateful(self): def test_asutf8string(self): """Test PyUnicode_AsUTF8String()""" - asutf8string = _testcapi.unicode_asutf8string + asutf8string = _testlimitedcapi.unicode_asutf8string for s in ['abc', '\xa1\xa2', '\u4f60\u597d', 'a\U0001f600']: self.assertEqual(asutf8string(s), s.encode('utf-8')) @@ -148,7 +148,7 @@ def test_asutf8string(self): def test_decodeutf16(self): """Test PyUnicode_DecodeUTF16()""" - decodeutf16 = _testcapi.unicode_decodeutf16 + decodeutf16 = _testlimitedcapi.unicode_decodeutf16 naturalbyteorder = -1 if sys.byteorder == 'little' else 1 for s in ['abc', '\xa1\xa2', '\u4f60\u597d', 'a\U0001f600']: @@ -192,7 +192,7 @@ def test_decodeutf16(self): def test_decodeutf16stateful(self): """Test PyUnicode_DecodeUTF16Stateful()""" - decodeutf16stateful = _testcapi.unicode_decodeutf16stateful + decodeutf16stateful = _testlimitedcapi.unicode_decodeutf16stateful naturalbyteorder = -1 if sys.byteorder == 'little' else 1 for s in ['abc', '\xa1\xa2', '\u4f60\u597d', 'a\U0001f600']: @@ -238,7 +238,7 @@ def test_decodeutf16stateful(self): def test_asutf16string(self): """Test PyUnicode_AsUTF16String()""" - asutf16string = _testcapi.unicode_asutf16string + asutf16string = _testlimitedcapi.unicode_asutf16string for s in ['abc', '\xa1\xa2', '\u4f60\u597d', 'a\U0001f600']: self.assertEqual(asutf16string(s), s.encode('utf-16')) @@ -250,7 +250,7 @@ def test_asutf16string(self): def test_decodeutf32(self): """Test PyUnicode_DecodeUTF8()""" - decodeutf32 = _testcapi.unicode_decodeutf32 + decodeutf32 = _testlimitedcapi.unicode_decodeutf32 naturalbyteorder = -1 if sys.byteorder == 'little' else 1 for s in ['abc', '\xa1\xa2', '\u4f60\u597d', 'a\U0001f600']: @@ -290,7 +290,7 @@ def test_decodeutf32(self): def test_decodeutf32stateful(self): """Test PyUnicode_DecodeUTF32Stateful()""" - decodeutf32stateful = _testcapi.unicode_decodeutf32stateful + decodeutf32stateful = _testlimitedcapi.unicode_decodeutf32stateful naturalbyteorder = -1 if sys.byteorder == 'little' else 1 for s in ['abc', '\xa1\xa2', '\u4f60\u597d', 'a\U0001f600']: @@ -342,7 +342,7 @@ def test_decodeutf32stateful(self): def test_asutf32string(self): """Test PyUnicode_AsUTF32String()""" - asutf32string = _testcapi.unicode_asutf32string + asutf32string = _testlimitedcapi.unicode_asutf32string for s in ['abc', '\xa1\xa2', '\u4f60\u597d', 'a\U0001f600']: self.assertEqual(asutf32string(s), s.encode('utf-32')) @@ -354,7 +354,7 @@ def test_asutf32string(self): def test_decodelatin1(self): """Test PyUnicode_DecodeLatin1()""" - decodelatin1 = _testcapi.unicode_decodelatin1 + decodelatin1 = _testlimitedcapi.unicode_decodelatin1 self.assertEqual(decodelatin1(b'abc'), 'abc') self.assertEqual(decodelatin1(b'abc', 'strict'), 'abc') @@ -365,7 +365,7 @@ def test_decodelatin1(self): def test_aslatin1string(self): """Test PyUnicode_AsLatin1String()""" - aslatin1string = _testcapi.unicode_aslatin1string + aslatin1string = _testlimitedcapi.unicode_aslatin1string self.assertEqual(aslatin1string('abc'), b'abc') self.assertEqual(aslatin1string('\xa1\xa2'), b'\xa1\xa2') @@ -377,7 +377,7 @@ def test_aslatin1string(self): def test_decodeascii(self): """Test PyUnicode_DecodeASCII()""" - decodeascii = _testcapi.unicode_decodeascii + decodeascii = _testlimitedcapi.unicode_decodeascii self.assertEqual(decodeascii(b'abc'), 'abc') self.assertEqual(decodeascii(b'abc', 'strict'), 'abc') @@ -392,7 +392,7 @@ def test_decodeascii(self): def test_asasciistring(self): """Test PyUnicode_AsASCIIString()""" - asasciistring = _testcapi.unicode_asasciistring + asasciistring = _testlimitedcapi.unicode_asasciistring self.assertEqual(asasciistring('abc'), b'abc') @@ -403,7 +403,7 @@ def test_asasciistring(self): def test_decodecharmap(self): """Test PyUnicode_DecodeCharmap()""" - decodecharmap = _testcapi.unicode_decodecharmap + decodecharmap = _testlimitedcapi.unicode_decodecharmap self.assertEqual(decodecharmap(b'\3\0\7', {0: 'a', 3: 'b', 7: 'c'}), 'bac') self.assertEqual(decodecharmap(b'\1\0\2', ['a', 'b', 'c']), 'bac') @@ -426,7 +426,7 @@ def test_decodecharmap(self): def test_ascharmapstring(self): """Test PyUnicode_AsCharmapString()""" - ascharmapstring = _testcapi.unicode_ascharmapstring + ascharmapstring = _testlimitedcapi.unicode_ascharmapstring self.assertEqual(ascharmapstring('abc', {97: 3, 98: 0, 99: 7}), b'\3\0\7') self.assertEqual(ascharmapstring('\xa1\xa2\xa3', {0xa1: 3, 0xa2: 0, 0xa3: 7}), b'\3\0\7') @@ -443,7 +443,7 @@ def test_ascharmapstring(self): def test_decodeunicodeescape(self): """Test PyUnicode_DecodeUnicodeEscape()""" - decodeunicodeescape = _testcapi.unicode_decodeunicodeescape + decodeunicodeescape = _testlimitedcapi.unicode_decodeunicodeescape self.assertEqual(decodeunicodeescape(b'abc'), 'abc') self.assertEqual(decodeunicodeescape(br'\t\n\r\x0b\x0c\x00\\'), '\t\n\r\v\f\0\\') @@ -467,7 +467,7 @@ def test_decodeunicodeescape(self): def test_asunicodeescapestring(self): """Test PyUnicode_AsUnicodeEscapeString()""" - asunicodeescapestring = _testcapi.unicode_asunicodeescapestring + asunicodeescapestring = _testlimitedcapi.unicode_asunicodeescapestring self.assertEqual(asunicodeescapestring('abc'), b'abc') self.assertEqual(asunicodeescapestring('\t\n\r\v\f\0\\'), br'\t\n\r\x0b\x0c\x00\\') @@ -481,7 +481,7 @@ def test_asunicodeescapestring(self): def test_decoderawunicodeescape(self): """Test PyUnicode_DecodeRawUnicodeEscape()""" - decoderawunicodeescape = _testcapi.unicode_decoderawunicodeescape + decoderawunicodeescape = _testlimitedcapi.unicode_decoderawunicodeescape self.assertEqual(decoderawunicodeescape(b'abc'), 'abc') self.assertEqual(decoderawunicodeescape(b'\t\n\r\v\f\0\\'), '\t\n\r\v\f\0\\') @@ -503,7 +503,7 @@ def test_decoderawunicodeescape(self): def test_asrawunicodeescapestring(self): """Test PyUnicode_AsRawUnicodeEscapeString()""" - asrawunicodeescapestring = _testcapi.unicode_asrawunicodeescapestring + asrawunicodeescapestring = _testlimitedcapi.unicode_asrawunicodeescapestring self.assertEqual(asrawunicodeescapestring('abc'), b'abc') self.assertEqual(asrawunicodeescapestring('\t\n\r\v\f\0\\'), b'\t\n\r\v\f\0\\') diff --git a/Lib/test/test_capi/test_complex.py b/Lib/test/test_capi/test_complex.py index a5b59558e7f851..328ea12f97462c 100644 --- a/Lib/test/test_capi/test_complex.py +++ b/Lib/test/test_capi/test_complex.py @@ -10,6 +10,7 @@ _testcapi = import_helper.import_module('_testcapi') +_testlimitedcapi = import_helper.import_module('_testlimitedcapi') NULL = None INF = float("inf") @@ -25,7 +26,7 @@ def __complex__(self): class CAPIComplexTest(unittest.TestCase): def test_check(self): # Test PyComplex_Check() - check = _testcapi.complex_check + check = _testlimitedcapi.complex_check self.assertTrue(check(1+2j)) self.assertTrue(check(ComplexSubclass(1+2j))) @@ -38,7 +39,7 @@ def test_check(self): def test_checkexact(self): # PyComplex_CheckExact() - checkexact = _testcapi.complex_checkexact + checkexact = _testlimitedcapi.complex_checkexact self.assertTrue(checkexact(1+2j)) self.assertFalse(checkexact(ComplexSubclass(1+2j))) @@ -57,13 +58,13 @@ def test_fromccomplex(self): def test_fromdoubles(self): # Test PyComplex_FromDoubles() - fromdoubles = _testcapi.complex_fromdoubles + fromdoubles = _testlimitedcapi.complex_fromdoubles self.assertEqual(fromdoubles(1.0, 2.0), 1.0+2.0j) def test_realasdouble(self): # Test PyComplex_RealAsDouble() - realasdouble = _testcapi.complex_realasdouble + realasdouble = _testlimitedcapi.complex_realasdouble self.assertEqual(realasdouble(1+2j), 1.0) self.assertEqual(realasdouble(-1+0j), -1.0) @@ -98,7 +99,7 @@ def test_realasdouble(self): def test_imagasdouble(self): # Test PyComplex_ImagAsDouble() - imagasdouble = _testcapi.complex_imagasdouble + imagasdouble = _testlimitedcapi.complex_imagasdouble self.assertEqual(imagasdouble(1+2j), 2.0) self.assertEqual(imagasdouble(1-1j), -1.0) diff --git a/Lib/test/test_capi/test_dict.py b/Lib/test/test_capi/test_dict.py index cca6145bc90c04..bcc978d224a583 100644 --- a/Lib/test/test_capi/test_dict.py +++ b/Lib/test/test_capi/test_dict.py @@ -3,6 +3,7 @@ from types import MappingProxyType from test import support import _testcapi +import _testlimitedcapi NULL = None @@ -25,7 +26,7 @@ def gen(): class CAPITest(unittest.TestCase): def test_dict_check(self): - check = _testcapi.dict_check + check = _testlimitedcapi.dict_check self.assertTrue(check({1: 2})) self.assertTrue(check(OrderedDict({1: 2}))) self.assertFalse(check(UserDict({1: 2}))) @@ -34,7 +35,7 @@ def test_dict_check(self): # CRASHES check(NULL) def test_dict_checkexact(self): - check = _testcapi.dict_checkexact + check = _testlimitedcapi.dict_checkexact self.assertTrue(check({1: 2})) self.assertFalse(check(OrderedDict({1: 2}))) self.assertFalse(check(UserDict({1: 2}))) @@ -43,7 +44,7 @@ def test_dict_checkexact(self): # CRASHES check(NULL) def test_dict_new(self): - dict_new = _testcapi.dict_new + dict_new = _testlimitedcapi.dict_new dct = dict_new() self.assertEqual(dct, {}) self.assertIs(type(dct), dict) @@ -51,7 +52,7 @@ def test_dict_new(self): self.assertIsNot(dct2, dct) def test_dictproxy_new(self): - dictproxy_new = _testcapi.dictproxy_new + dictproxy_new = _testlimitedcapi.dictproxy_new for dct in {1: 2}, OrderedDict({1: 2}), UserDict({1: 2}): proxy = dictproxy_new(dct) self.assertIs(type(proxy), MappingProxyType) @@ -67,7 +68,7 @@ def test_dictproxy_new(self): # CRASHES dictproxy_new(NULL) def test_dict_copy(self): - copy = _testcapi.dict_copy + copy = _testlimitedcapi.dict_copy for dct in {1: 2}, OrderedDict({1: 2}): dct_copy = copy(dct) self.assertIs(type(dct_copy), dict) @@ -79,7 +80,7 @@ def test_dict_copy(self): self.assertRaises(SystemError, copy, NULL) def test_dict_clear(self): - clear = _testcapi.dict_clear + clear = _testlimitedcapi.dict_clear dct = {1: 2} clear(dct) self.assertEqual(dct, {}) @@ -98,7 +99,7 @@ def test_dict_clear(self): # CRASHES? clear(NULL) def test_dict_size(self): - size = _testcapi.dict_size + size = _testlimitedcapi.dict_size self.assertEqual(size({1: 2}), 1) self.assertEqual(size(OrderedDict({1: 2})), 1) @@ -109,7 +110,7 @@ def test_dict_size(self): self.assertRaises(SystemError, size, NULL) def test_dict_getitem(self): - getitem = _testcapi.dict_getitem + getitem = _testlimitedcapi.dict_getitem dct = {'a': 1, '\U0001f40d': 2} self.assertEqual(getitem(dct, 'a'), 1) self.assertIs(getitem(dct, 'b'), KeyError) @@ -131,7 +132,7 @@ def test_dict_getitem(self): # CRASHES getitem(NULL, 'a') def test_dict_getitemstring(self): - getitemstring = _testcapi.dict_getitemstring + getitemstring = _testlimitedcapi.dict_getitemstring dct = {'a': 1, '\U0001f40d': 2} self.assertEqual(getitemstring(dct, b'a'), 1) self.assertIs(getitemstring(dct, b'b'), KeyError) @@ -188,7 +189,7 @@ def test_dict_getitemstringref(self): # CRASHES getitemstring(NULL, b'a') def test_dict_getitemwitherror(self): - getitem = _testcapi.dict_getitemwitherror + getitem = _testlimitedcapi.dict_getitemwitherror dct = {'a': 1, '\U0001f40d': 2} self.assertEqual(getitem(dct, 'a'), 1) self.assertIs(getitem(dct, 'b'), KeyError) @@ -206,7 +207,7 @@ def test_dict_getitemwitherror(self): # CRASHES getitem(NULL, 'a') def test_dict_contains(self): - contains = _testcapi.dict_contains + contains = _testlimitedcapi.dict_contains dct = {'a': 1, '\U0001f40d': 2} self.assertTrue(contains(dct, 'a')) self.assertFalse(contains(dct, 'b')) @@ -238,7 +239,7 @@ def test_dict_contains_string(self): # CRASHES contains(NULL, b'a') def test_dict_setitem(self): - setitem = _testcapi.dict_setitem + setitem = _testlimitedcapi.dict_setitem dct = {} setitem(dct, 'a', 5) self.assertEqual(dct, {'a': 5}) @@ -258,7 +259,7 @@ def test_dict_setitem(self): # CRASHES setitem(NULL, 'a', 5) def test_dict_setitemstring(self): - setitemstring = _testcapi.dict_setitemstring + setitemstring = _testlimitedcapi.dict_setitemstring dct = {} setitemstring(dct, b'a', 5) self.assertEqual(dct, {'a': 5}) @@ -277,7 +278,7 @@ def test_dict_setitemstring(self): # CRASHES setitemstring(NULL, b'a', 5) def test_dict_delitem(self): - delitem = _testcapi.dict_delitem + delitem = _testlimitedcapi.dict_delitem dct = {'a': 1, 'c': 2, '\U0001f40d': 3} delitem(dct, 'a') self.assertEqual(dct, {'c': 2, '\U0001f40d': 3}) @@ -298,7 +299,7 @@ def test_dict_delitem(self): # CRASHES delitem(NULL, 'a') def test_dict_delitemstring(self): - delitemstring = _testcapi.dict_delitemstring + delitemstring = _testlimitedcapi.dict_delitemstring dct = {'a': 1, 'c': 2, '\U0001f40d': 3} delitemstring(dct, b'a') self.assertEqual(dct, {'c': 2, '\U0001f40d': 3}) @@ -371,21 +372,21 @@ def items(self): return None dict_obj = {'foo': 1, 'bar': 2, 'spam': 3} for mapping in [dict_obj, DictSubclass(dict_obj), BadMapping(dict_obj)]: - self.assertListEqual(_testcapi.dict_keys(mapping), + self.assertListEqual(_testlimitedcapi.dict_keys(mapping), list(dict_obj.keys())) - self.assertListEqual(_testcapi.dict_values(mapping), + self.assertListEqual(_testlimitedcapi.dict_values(mapping), list(dict_obj.values())) - self.assertListEqual(_testcapi.dict_items(mapping), + self.assertListEqual(_testlimitedcapi.dict_items(mapping), list(dict_obj.items())) def test_dict_keys_valuesitems_bad_arg(self): for mapping in UserDict(), [], object(): - self.assertRaises(SystemError, _testcapi.dict_keys, mapping) - self.assertRaises(SystemError, _testcapi.dict_values, mapping) - self.assertRaises(SystemError, _testcapi.dict_items, mapping) + self.assertRaises(SystemError, _testlimitedcapi.dict_keys, mapping) + self.assertRaises(SystemError, _testlimitedcapi.dict_values, mapping) + self.assertRaises(SystemError, _testlimitedcapi.dict_items, mapping) def test_dict_next(self): - dict_next = _testcapi.dict_next + dict_next = _testlimitedcapi.dict_next self.assertIsNone(dict_next({}, 0)) dct = {'a': 1, 'b': 2, 'c': 3} pos = 0 @@ -402,7 +403,7 @@ def test_dict_next(self): # CRASHES dict_next(NULL, 0) def test_dict_update(self): - update = _testcapi.dict_update + update = _testlimitedcapi.dict_update for cls1 in dict, DictSubclass: for cls2 in dict, DictSubclass, UserDict: dct = cls1({'a': 1, 'b': 2}) @@ -417,7 +418,7 @@ def test_dict_update(self): self.assertRaises(SystemError, update, NULL, {}) def test_dict_merge(self): - merge = _testcapi.dict_merge + merge = _testlimitedcapi.dict_merge for cls1 in dict, DictSubclass: for cls2 in dict, DictSubclass, UserDict: dct = cls1({'a': 1, 'b': 2}) @@ -435,7 +436,7 @@ def test_dict_merge(self): self.assertRaises(SystemError, merge, NULL, {}, 0) def test_dict_mergefromseq2(self): - mergefromseq2 = _testcapi.dict_mergefromseq2 + mergefromseq2 = _testlimitedcapi.dict_mergefromseq2 for cls1 in dict, DictSubclass: for cls2 in list, iter: dct = cls1({'a': 1, 'b': 2}) diff --git a/Lib/test/test_capi/test_float.py b/Lib/test/test_capi/test_float.py index cb94d562645916..92c987794142c9 100644 --- a/Lib/test/test_capi/test_float.py +++ b/Lib/test/test_capi/test_float.py @@ -9,6 +9,7 @@ from test.support import import_helper _testcapi = import_helper.import_module('_testcapi') +_testlimitedcapi = import_helper.import_module('_testlimitedcapi') NULL = None @@ -29,7 +30,7 @@ class CAPIFloatTest(unittest.TestCase): def test_check(self): # Test PyFloat_Check() - check = _testcapi.float_check + check = _testlimitedcapi.float_check self.assertTrue(check(4.25)) self.assertTrue(check(FloatSubclass(4.25))) @@ -41,7 +42,7 @@ def test_check(self): def test_checkexact(self): # Test PyFloat_CheckExact() - checkexact = _testcapi.float_checkexact + checkexact = _testlimitedcapi.float_checkexact self.assertTrue(checkexact(4.25)) self.assertFalse(checkexact(FloatSubclass(4.25))) @@ -53,7 +54,7 @@ def test_checkexact(self): def test_fromstring(self): # Test PyFloat_FromString() - fromstring = _testcapi.float_fromstring + fromstring = _testlimitedcapi.float_fromstring self.assertEqual(fromstring("4.25"), 4.25) self.assertEqual(fromstring(b"4.25"), 4.25) @@ -72,13 +73,13 @@ def test_fromstring(self): def test_fromdouble(self): # Test PyFloat_FromDouble() - fromdouble = _testcapi.float_fromdouble + fromdouble = _testlimitedcapi.float_fromdouble self.assertEqual(fromdouble(4.25), 4.25) def test_asdouble(self): # Test PyFloat_AsDouble() - asdouble = _testcapi.float_asdouble + asdouble = _testlimitedcapi.float_asdouble class BadFloat3: def __float__(self): @@ -109,19 +110,19 @@ def __float__(self): def test_getinfo(self): # Test PyFloat_GetInfo() - getinfo = _testcapi.float_getinfo + getinfo = _testlimitedcapi.float_getinfo self.assertEqual(getinfo(), sys.float_info) def test_getmax(self): # Test PyFloat_GetMax() - getmax = _testcapi.float_getmax + getmax = _testlimitedcapi.float_getmax self.assertEqual(getmax(), sys.float_info.max) def test_getmin(self): # Test PyFloat_GetMax() - getmin = _testcapi.float_getmin + getmin = _testlimitedcapi.float_getmin self.assertEqual(getmin(), sys.float_info.min) diff --git a/Lib/test/test_capi/test_list.py b/Lib/test/test_capi/test_list.py index dceb4fce3c077b..0896a971f5c727 100644 --- a/Lib/test/test_capi/test_list.py +++ b/Lib/test/test_capi/test_list.py @@ -4,6 +4,7 @@ from test.support import import_helper from collections import UserList _testcapi = import_helper.import_module('_testcapi') +_testlimitedcapi = import_helper.import_module('_testlimitedcapi') NULL = None PY_SSIZE_T_MIN = _testcapi.PY_SSIZE_T_MIN @@ -25,7 +26,7 @@ def __del__(self): class CAPITest(unittest.TestCase): def test_check(self): # Test PyList_Check() - check = _testcapi.list_check + check = _testlimitedcapi.list_check self.assertTrue(check([1, 2])) self.assertTrue(check([])) self.assertTrue(check(ListSubclass([1, 2]))) @@ -39,7 +40,7 @@ def test_check(self): def test_list_check_exact(self): # Test PyList_CheckExact() - check = _testcapi.list_check_exact + check = _testlimitedcapi.list_check_exact self.assertTrue(check([1])) self.assertTrue(check([])) self.assertFalse(check(ListSubclass([1]))) @@ -51,7 +52,7 @@ def test_list_check_exact(self): def test_list_new(self): # Test PyList_New() - list_new = _testcapi.list_new + list_new = _testlimitedcapi.list_new lst = list_new(0) self.assertEqual(lst, []) self.assertIs(type(lst), list) @@ -62,7 +63,7 @@ def test_list_new(self): def test_list_size(self): # Test PyList_Size() - size = _testcapi.list_size + size = _testlimitedcapi.list_size self.assertEqual(size([1, 2]), 2) self.assertEqual(size(ListSubclass([1, 2])), 2) self.assertRaises(SystemError, size, UserList()) @@ -98,11 +99,11 @@ def check_list_get_item(self, getitem, exctype): def test_list_getitem(self): # Test PyList_GetItem() - self.check_list_get_item(_testcapi.list_getitem, SystemError) + self.check_list_get_item(_testlimitedcapi.list_getitem, SystemError) def test_list_get_item_ref(self): # Test PyList_GetItemRef() - self.check_list_get_item(_testcapi.list_get_item_ref, TypeError) + self.check_list_get_item(_testlimitedcapi.list_get_item_ref, TypeError) def test_list_get_item(self): # Test PyList_GET_ITEM() @@ -119,7 +120,7 @@ def test_list_get_item(self): def test_list_setitem(self): # Test PyList_SetItem() - setitem = _testcapi.list_setitem + setitem = _testlimitedcapi.list_setitem lst = [1, 2, 3] setitem(lst, 0, 10) self.assertEqual(lst, [10, 2, 3]) @@ -151,7 +152,7 @@ def test_list_set_item(self): def test_list_insert(self): # Test PyList_Insert() - insert = _testcapi.list_insert + insert = _testlimitedcapi.list_insert lst = [1, 2, 3] insert(lst, 0, 23) self.assertEqual(lst, [23, 1, 2, 3]) @@ -173,7 +174,7 @@ def test_list_insert(self): def test_list_append(self): # Test PyList_Append() - append = _testcapi.list_append + append = _testlimitedcapi.list_append lst = [1, 2, 3] append(lst, 10) self.assertEqual(lst, [1, 2, 3, 10]) @@ -186,7 +187,7 @@ def test_list_append(self): def test_list_getslice(self): # Test PyList_GetSlice() - getslice = _testcapi.list_getslice + getslice = _testlimitedcapi.list_getslice lst = [1, 2, 3] # empty @@ -210,7 +211,7 @@ def test_list_getslice(self): def test_list_setslice(self): # Test PyList_SetSlice() - list_setslice = _testcapi.list_setslice + list_setslice = _testlimitedcapi.list_setslice def set_slice(lst, low, high, value): lst = lst.copy() self.assertEqual(list_setslice(lst, low, high, value), 0) @@ -265,7 +266,7 @@ def set_slice(lst, low, high, value): def test_list_sort(self): # Test PyList_Sort() - sort = _testcapi.list_sort + sort = _testlimitedcapi.list_sort lst = [4, 6, 7, 3, 1, 5, 9, 2, 0, 8] sort(lst) self.assertEqual(lst, list(range(10))) @@ -281,7 +282,7 @@ def test_list_sort(self): def test_list_reverse(self): # Test PyList_Reverse() - reverse = _testcapi.list_reverse + reverse = _testlimitedcapi.list_reverse def list_reverse(lst): self.assertEqual(reverse(lst), 0) return lst @@ -295,7 +296,7 @@ def list_reverse(lst): def test_list_astuple(self): # Test PyList_AsTuple() - astuple = _testcapi.list_astuple + astuple = _testlimitedcapi.list_astuple self.assertEqual(astuple([]), ()) self.assertEqual(astuple([2, 5, 10]), (2, 5, 10)) diff --git a/Lib/test/test_capi/test_long.py b/Lib/test/test_capi/test_long.py index 9f5ee507a8eb85..d2140154d811b4 100644 --- a/Lib/test/test_capi/test_long.py +++ b/Lib/test/test_capi/test_long.py @@ -4,8 +4,9 @@ from test.support import import_helper -# Skip this test if the _testcapi module isn't available. +# Skip this test if the _testcapi and _testlimitedcapi modules isn't available. _testcapi = import_helper.import_module('_testcapi') +_testlimitedcapi = import_helper.import_module('_testlimitedcapi') NULL = None @@ -56,7 +57,7 @@ def test_compact_known(self): def test_long_check(self): # Test PyLong_Check() - check = _testcapi.pylong_check + check = _testlimitedcapi.pylong_check self.assertTrue(check(1)) self.assertTrue(check(123456789012345678901234567890)) self.assertTrue(check(-1)) @@ -68,7 +69,7 @@ def test_long_check(self): def test_long_checkexact(self): # Test PyLong_CheckExact() - check = _testcapi.pylong_checkexact + check = _testlimitedcapi.pylong_checkexact self.assertTrue(check(1)) self.assertTrue(check(123456789012345678901234567890)) self.assertTrue(check(-1)) @@ -80,7 +81,7 @@ def test_long_checkexact(self): def test_long_fromdouble(self): # Test PyLong_FromDouble() - fromdouble = _testcapi.pylong_fromdouble + fromdouble = _testlimitedcapi.pylong_fromdouble float_max = sys.float_info.max for value in (5.0, 5.1, 5.9, -5.1, -5.9, 0.0, -0.0, float_max, -float_max): with self.subTest(value=value): @@ -91,7 +92,7 @@ def test_long_fromdouble(self): def test_long_fromvoidptr(self): # Test PyLong_FromVoidPtr() - fromvoidptr = _testcapi.pylong_fromvoidptr + fromvoidptr = _testlimitedcapi.pylong_fromvoidptr obj = object() x = fromvoidptr(obj) y = fromvoidptr(NULL) @@ -103,7 +104,7 @@ def test_long_fromvoidptr(self): def test_long_fromstring(self): # Test PyLong_FromString() - fromstring = _testcapi.pylong_fromstring + fromstring = _testlimitedcapi.pylong_fromstring self.assertEqual(fromstring(b'123', 10), (123, 3)) self.assertEqual(fromstring(b'cafe', 16), (0xcafe, 4)) self.assertEqual(fromstring(b'xyz', 36), (44027, 3)) @@ -163,7 +164,7 @@ def test_long_fromunicodeobject(self): def test_long_asint(self): # Test PyLong_AsInt() - PyLong_AsInt = _testcapi.PyLong_AsInt + PyLong_AsInt = _testlimitedcapi.PyLong_AsInt from _testcapi import INT_MIN, INT_MAX # round trip (object -> int -> object) @@ -186,7 +187,7 @@ def test_long_asint(self): def test_long_aslong(self): # Test PyLong_AsLong() and PyLong_FromLong() - aslong = _testcapi.pylong_aslong + aslong = _testlimitedcapi.pylong_aslong from _testcapi import LONG_MIN, LONG_MAX # round trip (object -> long -> object) for value in (LONG_MIN, LONG_MAX, -1, 0, 1, 1234): @@ -206,7 +207,7 @@ def test_long_aslong(self): def test_long_aslongandoverflow(self): # Test PyLong_AsLongAndOverflow() - aslongandoverflow = _testcapi.pylong_aslongandoverflow + aslongandoverflow = _testlimitedcapi.pylong_aslongandoverflow from _testcapi import LONG_MIN, LONG_MAX # round trip (object -> long -> object) for value in (LONG_MIN, LONG_MAX, -1, 0, 1, 1234): @@ -224,7 +225,7 @@ def test_long_aslongandoverflow(self): def test_long_asunsignedlong(self): # Test PyLong_AsUnsignedLong() and PyLong_FromUnsignedLong() - asunsignedlong = _testcapi.pylong_asunsignedlong + asunsignedlong = _testlimitedcapi.pylong_asunsignedlong from _testcapi import ULONG_MAX # round trip (object -> unsigned long -> object) for value in (ULONG_MAX, 0, 1, 1234): @@ -244,7 +245,7 @@ def test_long_asunsignedlong(self): def test_long_asunsignedlongmask(self): # Test PyLong_AsUnsignedLongMask() - asunsignedlongmask = _testcapi.pylong_asunsignedlongmask + asunsignedlongmask = _testlimitedcapi.pylong_asunsignedlongmask from _testcapi import ULONG_MAX # round trip (object -> unsigned long -> object) for value in (ULONG_MAX, 0, 1, 1234): @@ -264,7 +265,7 @@ def test_long_asunsignedlongmask(self): def test_long_aslonglong(self): # Test PyLong_AsLongLong() and PyLong_FromLongLong() - aslonglong = _testcapi.pylong_aslonglong + aslonglong = _testlimitedcapi.pylong_aslonglong from _testcapi import LLONG_MIN, LLONG_MAX # round trip (object -> long long -> object) for value in (LLONG_MIN, LLONG_MAX, -1, 0, 1, 1234): @@ -284,7 +285,7 @@ def test_long_aslonglong(self): def test_long_aslonglongandoverflow(self): # Test PyLong_AsLongLongAndOverflow() - aslonglongandoverflow = _testcapi.pylong_aslonglongandoverflow + aslonglongandoverflow = _testlimitedcapi.pylong_aslonglongandoverflow from _testcapi import LLONG_MIN, LLONG_MAX # round trip (object -> long long -> object) for value in (LLONG_MIN, LLONG_MAX, -1, 0, 1, 1234): @@ -302,7 +303,7 @@ def test_long_aslonglongandoverflow(self): def test_long_asunsignedlonglong(self): # Test PyLong_AsUnsignedLongLong() and PyLong_FromUnsignedLongLong() - asunsignedlonglong = _testcapi.pylong_asunsignedlonglong + asunsignedlonglong = _testlimitedcapi.pylong_asunsignedlonglong from _testcapi import ULLONG_MAX # round trip (object -> unsigned long long -> object) for value in (ULLONG_MAX, 0, 1, 1234): @@ -322,7 +323,7 @@ def test_long_asunsignedlonglong(self): def test_long_asunsignedlonglongmask(self): # Test PyLong_AsUnsignedLongLongMask() - asunsignedlonglongmask = _testcapi.pylong_asunsignedlonglongmask + asunsignedlonglongmask = _testlimitedcapi.pylong_asunsignedlonglongmask from _testcapi import ULLONG_MAX # round trip (object -> unsigned long long -> object) for value in (ULLONG_MAX, 0, 1, 1234): @@ -342,7 +343,7 @@ def test_long_asunsignedlonglongmask(self): def test_long_as_ssize_t(self): # Test PyLong_AsSsize_t() and PyLong_FromSsize_t() - as_ssize_t = _testcapi.pylong_as_ssize_t + as_ssize_t = _testlimitedcapi.pylong_as_ssize_t from _testcapi import PY_SSIZE_T_MIN, PY_SSIZE_T_MAX # round trip (object -> Py_ssize_t -> object) for value in (PY_SSIZE_T_MIN, PY_SSIZE_T_MAX, -1, 0, 1, 1234): @@ -362,7 +363,7 @@ def test_long_as_ssize_t(self): def test_long_as_size_t(self): # Test PyLong_AsSize_t() and PyLong_FromSize_t() - as_size_t = _testcapi.pylong_as_size_t + as_size_t = _testlimitedcapi.pylong_as_size_t from _testcapi import SIZE_MAX # round trip (object -> size_t -> object) for value in (SIZE_MAX, 0, 1, 1234): @@ -382,7 +383,7 @@ def test_long_as_size_t(self): def test_long_asdouble(self): # Test PyLong_AsDouble() - asdouble = _testcapi.pylong_asdouble + asdouble = _testlimitedcapi.pylong_asdouble MAX = int(sys.float_info.max) for value in (-MAX, MAX, -1, 0, 1, 1234): with self.subTest(value=value): @@ -402,8 +403,8 @@ def test_long_asdouble(self): def test_long_asvoidptr(self): # Test PyLong_AsVoidPtr() - fromvoidptr = _testcapi.pylong_fromvoidptr - asvoidptr = _testcapi.pylong_asvoidptr + fromvoidptr = _testlimitedcapi.pylong_fromvoidptr + asvoidptr = _testlimitedcapi.pylong_asvoidptr obj = object() x = fromvoidptr(obj) y = fromvoidptr(NULL) @@ -424,6 +425,34 @@ def test_long_asvoidptr(self): self.assertRaises(OverflowError, asvoidptr, -2**1000) # CRASHES asvoidptr(NULL) + def _test_long_aspid(self, aspid): + # Test PyLong_AsPid() + from _testcapi import SIZEOF_PID_T + bits = 8 * SIZEOF_PID_T + PID_T_MIN = -2**(bits-1) + PID_T_MAX = 2**(bits-1) - 1 + # round trip (object -> long -> object) + for value in (PID_T_MIN, PID_T_MAX, -1, 0, 1, 1234): + with self.subTest(value=value): + self.assertEqual(aspid(value), value) + + self.assertEqual(aspid(IntSubclass(42)), 42) + self.assertEqual(aspid(Index(42)), 42) + self.assertEqual(aspid(MyIndexAndInt()), 10) + + self.assertRaises(OverflowError, aspid, PID_T_MIN - 1) + self.assertRaises(OverflowError, aspid, PID_T_MAX + 1) + self.assertRaises(TypeError, aspid, 1.0) + self.assertRaises(TypeError, aspid, b'2') + self.assertRaises(TypeError, aspid, '3') + self.assertRaises(SystemError, aspid, NULL) + + def test_long_aspid(self): + self._test_long_aspid(_testcapi.pylong_aspid) + + def test_long_aspid_limited(self): + self._test_long_aspid(_testlimitedcapi.pylong_aspid) + def test_long_asnativebytes(self): import math from _testcapi import ( diff --git a/Lib/test/test_capi/test_misc.py b/Lib/test/test_capi/test_misc.py index eb0bc13911701a..55a1ab6d6d9359 100644 --- a/Lib/test/test_capi/test_misc.py +++ b/Lib/test/test_capi/test_misc.py @@ -1170,7 +1170,6 @@ class MyType: self.assertEqual(get_type_fullyqualname(MyType), 'my_qualname') - @requires_limited_api class TestHeapTypeRelative(unittest.TestCase): """Test API for extending opaque types (PEP 697)""" @@ -1326,6 +1325,125 @@ def test_pyobject_getitemdata_error(self): _testcapi.pyobject_getitemdata(0) + def test_function_get_closure(self): + from types import CellType + + def regular_function(): ... + def unused_one_level(arg1): + def inner(arg2, arg3): ... + return inner + def unused_two_levels(arg1, arg2): + def decorator(arg3, arg4): + def inner(arg5, arg6): ... + return inner + return decorator + def with_one_level(arg1): + def inner(arg2, arg3): + return arg1 + arg2 + arg3 + return inner + def with_two_levels(arg1, arg2): + def decorator(arg3, arg4): + def inner(arg5, arg6): + return arg1 + arg2 + arg3 + arg4 + arg5 + arg6 + return inner + return decorator + + # Functions without closures: + self.assertIsNone(_testcapi.function_get_closure(regular_function)) + self.assertIsNone(regular_function.__closure__) + + func = unused_one_level(1) + closure = _testcapi.function_get_closure(func) + self.assertIsNone(closure) + self.assertIsNone(func.__closure__) + + func = unused_two_levels(1, 2)(3, 4) + closure = _testcapi.function_get_closure(func) + self.assertIsNone(closure) + self.assertIsNone(func.__closure__) + + # Functions with closures: + func = with_one_level(5) + closure = _testcapi.function_get_closure(func) + self.assertEqual(closure, func.__closure__) + self.assertIsInstance(closure, tuple) + self.assertEqual(len(closure), 1) + self.assertEqual(len(closure), len(func.__code__.co_freevars)) + self.assertTrue(all(isinstance(cell, CellType) for cell in closure)) + self.assertTrue(closure[0].cell_contents, 5) + + func = with_two_levels(1, 2)(3, 4) + closure = _testcapi.function_get_closure(func) + self.assertEqual(closure, func.__closure__) + self.assertIsInstance(closure, tuple) + self.assertEqual(len(closure), 4) + self.assertEqual(len(closure), len(func.__code__.co_freevars)) + self.assertTrue(all(isinstance(cell, CellType) for cell in closure)) + self.assertEqual([cell.cell_contents for cell in closure], + [1, 2, 3, 4]) + + def test_function_get_closure_error(self): + with self.assertRaises(SystemError): + _testcapi.function_get_closure(1) + with self.assertRaises(SystemError): + _testcapi.function_get_closure(None) + + def test_function_set_closure(self): + from types import CellType + + def function_without_closure(): ... + def function_with_closure(arg): + def inner(): + return arg + return inner + + func = function_without_closure + _testcapi.function_set_closure(func, (CellType(1), CellType(1))) + closure = _testcapi.function_get_closure(func) + self.assertEqual([c.cell_contents for c in closure], [1, 1]) + self.assertEqual([c.cell_contents for c in func.__closure__], [1, 1]) + + func = function_with_closure(1) + _testcapi.function_set_closure(func, + (CellType(1), CellType(2), CellType(3))) + closure = _testcapi.function_get_closure(func) + self.assertEqual([c.cell_contents for c in closure], [1, 2, 3]) + self.assertEqual([c.cell_contents for c in func.__closure__], [1, 2, 3]) + + def test_function_set_closure_none(self): + def function_without_closure(): ... + def function_with_closure(arg): + def inner(): + return arg + return inner + + _testcapi.function_set_closure(function_without_closure, None) + self.assertIsNone( + _testcapi.function_get_closure(function_without_closure)) + self.assertIsNone(function_without_closure.__closure__) + + _testcapi.function_set_closure(function_with_closure, None) + self.assertIsNone( + _testcapi.function_get_closure(function_with_closure)) + self.assertIsNone(function_with_closure.__closure__) + + def test_function_set_closure_errors(self): + def function_without_closure(): ... + + with self.assertRaises(SystemError): + _testcapi.function_set_closure(None, ()) # not a function + + with self.assertRaises(SystemError): + _testcapi.function_set_closure(function_without_closure, 1) + self.assertIsNone(function_without_closure.__closure__) # no change + + # NOTE: this works, but goes against the docs: + _testcapi.function_set_closure(function_without_closure, (1, 2)) + self.assertEqual( + _testcapi.function_get_closure(function_without_closure), (1, 2)) + self.assertEqual(function_without_closure.__closure__, (1, 2)) + + class TestPendingCalls(unittest.TestCase): # See the comment in ceval.c (at the "handle_eval_breaker" label) @@ -2056,6 +2174,13 @@ def test_module_state_shared_in_global(self): self.addCleanup(os.close, r) self.addCleanup(os.close, w) + # Apple extensions must be distributed as frameworks. This requires + # a specialist loader. + if support.is_apple_mobile: + loader = "AppleFrameworkLoader" + else: + loader = "ExtensionFileLoader" + script = textwrap.dedent(f""" import importlib.machinery import importlib.util @@ -2063,7 +2188,7 @@ def test_module_state_shared_in_global(self): fullname = '_test_module_state_shared' origin = importlib.util.find_spec('_testmultiphase').origin - loader = importlib.machinery.ExtensionFileLoader(fullname, origin) + loader = importlib.machinery.{loader}(fullname, origin) spec = importlib.util.spec_from_loader(fullname, loader) module = importlib.util.module_from_spec(spec) attr_id = str(id(module.Error)).encode() @@ -2082,132 +2207,264 @@ def test_module_state_shared_in_global(self): @requires_subinterpreters class InterpreterIDTests(unittest.TestCase): - InterpreterID = _testcapi.get_interpreterid_type() - - def new_interpreter(self): - def ensure_destroyed(interpid): + def add_interp_cleanup(self, interpid): + def ensure_destroyed(): try: _interpreters.destroy(interpid) except _interpreters.InterpreterNotFoundError: pass + self.addCleanup(ensure_destroyed) + + def new_interpreter(self): id = _interpreters.create() - self.addCleanup(lambda: ensure_destroyed(id)) + self.add_interp_cleanup(id) return id - def test_with_int(self): - id = self.InterpreterID(10, force=True) + def test_conversion_int(self): + convert = _testinternalcapi.normalize_interp_id + interpid = convert(10) + self.assertEqual(interpid, 10) - self.assertEqual(int(id), 10) - - def test_coerce_id(self): - class Int(str): + def test_conversion_coerced(self): + convert = _testinternalcapi.normalize_interp_id + class MyInt(str): def __index__(self): return 10 + interpid = convert(MyInt()) + self.assertEqual(interpid, 10) + + def test_conversion_from_interpreter(self): + convert = _testinternalcapi.normalize_interp_id + interpid = self.new_interpreter() + converted = convert(interpid) + self.assertEqual(converted, interpid) - id = self.InterpreterID(Int(), force=True) - self.assertEqual(int(id), 10) + def test_conversion_bad(self): + convert = _testinternalcapi.normalize_interp_id - def test_bad_id(self): for badid in [ object(), 10.0, '10', b'10', ]: - with self.subTest(badid): + with self.subTest(f'bad: {badid!r}'): with self.assertRaises(TypeError): - self.InterpreterID(badid) + convert(badid) badid = -1 - with self.subTest(badid): + with self.subTest(f'bad: {badid!r}'): with self.assertRaises(ValueError): - self.InterpreterID(badid) + convert(badid) badid = 2**64 - with self.subTest(badid): + with self.subTest(f'bad: {badid!r}'): with self.assertRaises(OverflowError): - self.InterpreterID(badid) + convert(badid) - def test_exists(self): - id = self.new_interpreter() - with self.assertRaises(_interpreters.InterpreterNotFoundError): - self.InterpreterID(int(id) + 1) # unforced + def test_lookup_exists(self): + interpid = self.new_interpreter() + self.assertTrue( + _testinternalcapi.interpreter_exists(interpid)) - def test_does_not_exist(self): - id = self.new_interpreter() - with self.assertRaises(_interpreters.InterpreterNotFoundError): - self.InterpreterID(int(id) + 1) # unforced + def test_lookup_does_not_exist(self): + interpid = _testinternalcapi.unused_interpreter_id() + self.assertFalse( + _testinternalcapi.interpreter_exists(interpid)) - def test_destroyed(self): - id = _interpreters.create() - _interpreters.destroy(id) - with self.assertRaises(_interpreters.InterpreterNotFoundError): - self.InterpreterID(id) # unforced - - def test_str(self): - id = self.InterpreterID(10, force=True) - self.assertEqual(str(id), '10') - - def test_repr(self): - id = self.InterpreterID(10, force=True) - self.assertEqual(repr(id), 'InterpreterID(10)') - - def test_equality(self): - id1 = self.new_interpreter() - id2 = self.InterpreterID(id1) - id3 = self.InterpreterID( - self.new_interpreter()) - - self.assertTrue(id2 == id2) # identity - self.assertTrue(id2 == id1) # int-equivalent - self.assertTrue(id1 == id2) # reversed - self.assertTrue(id2 == int(id2)) - self.assertTrue(id2 == float(int(id2))) - self.assertTrue(float(int(id2)) == id2) - self.assertFalse(id2 == float(int(id2)) + 0.1) - self.assertFalse(id2 == str(int(id2))) - self.assertFalse(id2 == 2**1000) - self.assertFalse(id2 == float('inf')) - self.assertFalse(id2 == 'spam') - self.assertFalse(id2 == id3) - - self.assertFalse(id2 != id2) - self.assertFalse(id2 != id1) - self.assertFalse(id1 != id2) - self.assertTrue(id2 != id3) - - def test_linked_lifecycle(self): - id1 = _interpreters.create() - _testcapi.unlink_interpreter_refcount(id1) + def test_lookup_destroyed(self): + interpid = _interpreters.create() + _interpreters.destroy(interpid) + self.assertFalse( + _testinternalcapi.interpreter_exists(interpid)) + + def test_linked_lifecycle_does_not_exist(self): + exists = _testinternalcapi.interpreter_exists + is_linked = _testinternalcapi.interpreter_refcount_linked + link = _testinternalcapi.link_interpreter_refcount + unlink = _testinternalcapi.unlink_interpreter_refcount + get_refcount = _testinternalcapi.get_interpreter_refcount + incref = _testinternalcapi.interpreter_incref + decref = _testinternalcapi.interpreter_decref + + with self.subTest('never existed'): + interpid = _testinternalcapi.unused_interpreter_id() + self.assertFalse( + exists(interpid)) + with self.assertRaises(_interpreters.InterpreterNotFoundError): + is_linked(interpid) + with self.assertRaises(_interpreters.InterpreterNotFoundError): + link(interpid) + with self.assertRaises(_interpreters.InterpreterNotFoundError): + unlink(interpid) + with self.assertRaises(_interpreters.InterpreterNotFoundError): + get_refcount(interpid) + with self.assertRaises(_interpreters.InterpreterNotFoundError): + incref(interpid) + with self.assertRaises(_interpreters.InterpreterNotFoundError): + decref(interpid) + + with self.subTest('destroyed'): + interpid = _interpreters.create() + _interpreters.destroy(interpid) + self.assertFalse( + exists(interpid)) + with self.assertRaises(_interpreters.InterpreterNotFoundError): + is_linked(interpid) + with self.assertRaises(_interpreters.InterpreterNotFoundError): + link(interpid) + with self.assertRaises(_interpreters.InterpreterNotFoundError): + unlink(interpid) + with self.assertRaises(_interpreters.InterpreterNotFoundError): + get_refcount(interpid) + with self.assertRaises(_interpreters.InterpreterNotFoundError): + incref(interpid) + with self.assertRaises(_interpreters.InterpreterNotFoundError): + decref(interpid) + + def test_linked_lifecycle_initial(self): + is_linked = _testinternalcapi.interpreter_refcount_linked + get_refcount = _testinternalcapi.get_interpreter_refcount + + # A new interpreter will start out not linked, with a refcount of 0. + interpid = _testinternalcapi.new_interpreter() + self.add_interp_cleanup(interpid) + linked = is_linked(interpid) + refcount = get_refcount(interpid) + + self.assertFalse(linked) + self.assertEqual(refcount, 0) + + def test_linked_lifecycle_never_linked(self): + exists = _testinternalcapi.interpreter_exists + is_linked = _testinternalcapi.interpreter_refcount_linked + get_refcount = _testinternalcapi.get_interpreter_refcount + incref = _testinternalcapi.interpreter_incref + decref = _testinternalcapi.interpreter_decref + + interpid = _testinternalcapi.new_interpreter() + self.add_interp_cleanup(interpid) + + # Incref will not automatically link it. + incref(interpid) + self.assertFalse( + is_linked(interpid)) + self.assertEqual( + 1, get_refcount(interpid)) + + # It isn't linked so it isn't destroyed. + decref(interpid) + self.assertTrue( + exists(interpid)) + self.assertFalse( + is_linked(interpid)) self.assertEqual( - _testinternalcapi.get_interpreter_refcount(id1), - 0) + 0, get_refcount(interpid)) + + def test_linked_lifecycle_link_unlink(self): + exists = _testinternalcapi.interpreter_exists + is_linked = _testinternalcapi.interpreter_refcount_linked + link = _testinternalcapi.link_interpreter_refcount + unlink = _testinternalcapi.unlink_interpreter_refcount + + interpid = _testinternalcapi.new_interpreter() + self.add_interp_cleanup(interpid) + + # Linking at refcount 0 does not destroy the interpreter. + link(interpid) + self.assertTrue( + exists(interpid)) + self.assertTrue( + is_linked(interpid)) + + # Unlinking at refcount 0 does not destroy the interpreter. + unlink(interpid) + self.assertTrue( + exists(interpid)) + self.assertFalse( + is_linked(interpid)) + + def test_linked_lifecycle_link_incref_decref(self): + exists = _testinternalcapi.interpreter_exists + is_linked = _testinternalcapi.interpreter_refcount_linked + link = _testinternalcapi.link_interpreter_refcount + get_refcount = _testinternalcapi.get_interpreter_refcount + incref = _testinternalcapi.interpreter_incref + decref = _testinternalcapi.interpreter_decref + + interpid = _testinternalcapi.new_interpreter() + self.add_interp_cleanup(interpid) + + # Linking it will not change the refcount. + link(interpid) + self.assertTrue( + is_linked(interpid)) + self.assertEqual( + 0, get_refcount(interpid)) - id2 = self.InterpreterID(id1) + # Decref with a refcount of 0 is not allowed. + incref(interpid) self.assertEqual( - _testinternalcapi.get_interpreter_refcount(id1), - 1) + 1, get_refcount(interpid)) + + # When linked, decref back to 0 destroys the interpreter. + decref(interpid) + self.assertFalse( + exists(interpid)) + + def test_linked_lifecycle_incref_link(self): + is_linked = _testinternalcapi.interpreter_refcount_linked + link = _testinternalcapi.link_interpreter_refcount + get_refcount = _testinternalcapi.get_interpreter_refcount + incref = _testinternalcapi.interpreter_incref + + interpid = _testinternalcapi.new_interpreter() + self.add_interp_cleanup(interpid) - # The interpreter isn't linked to ID objects, so it isn't destroyed. - del id2 + incref(interpid) self.assertEqual( - _testinternalcapi.get_interpreter_refcount(id1), - 0) + 1, get_refcount(interpid)) - _testcapi.link_interpreter_refcount(id1) + # Linking it will not reset the refcount. + link(interpid) + self.assertTrue( + is_linked(interpid)) self.assertEqual( - _testinternalcapi.get_interpreter_refcount(id1), - 0) + 1, get_refcount(interpid)) - id3 = self.InterpreterID(id1) + def test_linked_lifecycle_link_incref_unlink_decref(self): + exists = _testinternalcapi.interpreter_exists + is_linked = _testinternalcapi.interpreter_refcount_linked + link = _testinternalcapi.link_interpreter_refcount + unlink = _testinternalcapi.unlink_interpreter_refcount + get_refcount = _testinternalcapi.get_interpreter_refcount + incref = _testinternalcapi.interpreter_incref + decref = _testinternalcapi.interpreter_decref + + interpid = _testinternalcapi.new_interpreter() + self.add_interp_cleanup(interpid) + + link(interpid) + self.assertTrue( + is_linked(interpid)) + + incref(interpid) + self.assertEqual( + 1, get_refcount(interpid)) + + # Unlinking it will not change the refcount. + unlink(interpid) + self.assertFalse( + is_linked(interpid)) self.assertEqual( - _testinternalcapi.get_interpreter_refcount(id1), - 1) + 1, get_refcount(interpid)) - # The interpreter is linked now so is destroyed. - del id3 - with self.assertRaises(_interpreters.InterpreterNotFoundError): - _testinternalcapi.get_interpreter_refcount(id1) + # Unlinked: decref back to 0 does not destroys the interpreter. + decref(interpid) + self.assertTrue( + exists(interpid)) + self.assertEqual( + 0, get_refcount(interpid)) class BuiltinStaticTypesTests(unittest.TestCase): @@ -2338,7 +2595,7 @@ class Test_testcapi(unittest.TestCase): # Suppress warning from PyUnicode_FromUnicode(). @warnings_helper.ignore_warnings(category=DeprecationWarning) def test_widechar(self): - _testcapi.test_widechar() + _testlimitedcapi.test_widechar() def test_version_api_data(self): self.assertEqual(_testcapi.Py_Version, sys.hexversion) @@ -2371,7 +2628,12 @@ class Test_ModuleStateAccess(unittest.TestCase): def setUp(self): fullname = '_testmultiphase_meth_state_access' # XXX origin = importlib.util.find_spec('_testmultiphase').origin - loader = importlib.machinery.ExtensionFileLoader(fullname, origin) + # Apple extensions must be distributed as frameworks. This requires + # a specialist loader. + if support.is_apple_mobile: + loader = importlib.machinery.AppleFrameworkLoader(fullname, origin) + else: + loader = importlib.machinery.ExtensionFileLoader(fullname, origin) spec = importlib.util.spec_from_loader(fullname, loader) module = importlib.util.module_from_spec(spec) loader.exec_module(module) diff --git a/Lib/test/test_capi/test_object.py b/Lib/test/test_capi/test_object.py new file mode 100644 index 00000000000000..c80e9b653789ad --- /dev/null +++ b/Lib/test/test_capi/test_object.py @@ -0,0 +1,54 @@ +import enum +import unittest +from test.support import import_helper + +_testlimitedcapi = import_helper.import_module('_testlimitedcapi') + + +class Constant(enum.IntEnum): + Py_CONSTANT_NONE = 0 + Py_CONSTANT_FALSE = 1 + Py_CONSTANT_TRUE = 2 + Py_CONSTANT_ELLIPSIS = 3 + Py_CONSTANT_NOT_IMPLEMENTED = 4 + Py_CONSTANT_ZERO = 5 + Py_CONSTANT_ONE = 6 + Py_CONSTANT_EMPTY_STR = 7 + Py_CONSTANT_EMPTY_BYTES = 8 + Py_CONSTANT_EMPTY_TUPLE = 9 + + INVALID_CONSTANT = Py_CONSTANT_EMPTY_TUPLE + 1 + + +class CAPITest(unittest.TestCase): + def check_get_constant(self, get_constant): + self.assertIs(get_constant(Constant.Py_CONSTANT_NONE), None) + self.assertIs(get_constant(Constant.Py_CONSTANT_FALSE), False) + self.assertIs(get_constant(Constant.Py_CONSTANT_TRUE), True) + self.assertIs(get_constant(Constant.Py_CONSTANT_ELLIPSIS), Ellipsis) + self.assertIs(get_constant(Constant.Py_CONSTANT_NOT_IMPLEMENTED), NotImplemented) + + for constant_id, constant_type, value in ( + (Constant.Py_CONSTANT_ZERO, int, 0), + (Constant.Py_CONSTANT_ONE, int, 1), + (Constant.Py_CONSTANT_EMPTY_STR, str, ""), + (Constant.Py_CONSTANT_EMPTY_BYTES, bytes, b""), + (Constant.Py_CONSTANT_EMPTY_TUPLE, tuple, ()), + ): + with self.subTest(constant_id=constant_id): + obj = get_constant(constant_id) + self.assertEqual(type(obj), constant_type, obj) + self.assertEqual(obj, value) + + with self.assertRaises(SystemError): + get_constant(Constant.INVALID_CONSTANT) + + def test_get_constant(self): + self.check_get_constant(_testlimitedcapi.get_constant) + + def test_get_constant_borrowed(self): + self.check_get_constant(_testlimitedcapi.get_constant_borrowed) + + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/test/test_capi/test_opt.py b/Lib/test/test_capi/test_opt.py index b0859a382de523..b59f4b74a8593e 100644 --- a/Lib/test/test_capi/test_opt.py +++ b/Lib/test/test_capi/test_opt.py @@ -1,11 +1,11 @@ import contextlib -import opcode import sys import textwrap import unittest import gc import os +import _opcode import _testinternalcapi from test.support import script_helper, requires_specialization @@ -115,13 +115,11 @@ def testfunc(x): def get_first_executor(func): code = func.__code__ co_code = code.co_code - JUMP_BACKWARD = opcode.opmap["JUMP_BACKWARD"] for i in range(0, len(co_code), 2): - if co_code[i] == JUMP_BACKWARD: - try: - return _testinternalcapi.get_executor(code, i) - except ValueError: - pass + try: + return _opcode.get_executor(code, i) + except ValueError: + pass return None @@ -760,17 +758,16 @@ def test_promote_globals_to_constants(self): result = script_helper.run_python_until_end('-c', textwrap.dedent(""" import _testinternalcapi import opcode + import _opcode def get_first_executor(func): code = func.__code__ co_code = code.co_code - JUMP_BACKWARD = opcode.opmap["JUMP_BACKWARD"] for i in range(0, len(co_code), 2): - if co_code[i] == JUMP_BACKWARD: - try: - return _testinternalcapi.get_executor(code, i) - except ValueError: - pass + try: + return _opcode.get_executor(code, i) + except ValueError: + pass return None def get_opnames(ex): @@ -955,6 +952,32 @@ def testfunc(n): _, ex = self._run_with_optimizer(testfunc, 16) self.assertIsNone(ex) + def test_many_nested(self): + # overflow the trace_stack + def dummy_a(x): + return x + def dummy_b(x): + return dummy_a(x) + def dummy_c(x): + return dummy_b(x) + def dummy_d(x): + return dummy_c(x) + def dummy_e(x): + return dummy_d(x) + def dummy_f(x): + return dummy_e(x) + def dummy_g(x): + return dummy_f(x) + def dummy_h(x): + return dummy_g(x) + def testfunc(n): + a = 0 + for _ in range(n): + a += dummy_h(n) + return a + + self._run_with_optimizer(testfunc, 32) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_capi/test_set.py b/Lib/test/test_capi/test_set.py index 5235f81543e0b6..499a5148d782ab 100644 --- a/Lib/test/test_capi/test_set.py +++ b/Lib/test/test_capi/test_set.py @@ -2,8 +2,10 @@ from test.support import import_helper -# Skip this test if the _testcapi or _testinternalcapi modules aren't available. +# Skip this test if the _testcapi, _testlimitedcapi or _testinternalcapi +# modules aren't available. _testcapi = import_helper.import_module('_testcapi') +_testlimitedcapi = import_helper.import_module('_testlimitedcapi') _testinternalcapi = import_helper.import_module('_testinternalcapi') class set_subclass(set): @@ -23,7 +25,7 @@ def assertImmutable(self, action, *args): class TestSetCAPI(BaseSetTests, unittest.TestCase): def test_set_check(self): - check = _testcapi.set_check + check = _testlimitedcapi.set_check self.assertTrue(check(set())) self.assertTrue(check({1, 2})) self.assertFalse(check(frozenset())) @@ -33,7 +35,7 @@ def test_set_check(self): # CRASHES: check(NULL) def test_set_check_exact(self): - check = _testcapi.set_checkexact + check = _testlimitedcapi.set_checkexact self.assertTrue(check(set())) self.assertTrue(check({1, 2})) self.assertFalse(check(frozenset())) @@ -43,7 +45,7 @@ def test_set_check_exact(self): # CRASHES: check(NULL) def test_frozenset_check(self): - check = _testcapi.frozenset_check + check = _testlimitedcapi.frozenset_check self.assertFalse(check(set())) self.assertTrue(check(frozenset())) self.assertTrue(check(frozenset({1, 2}))) @@ -53,7 +55,7 @@ def test_frozenset_check(self): # CRASHES: check(NULL) def test_frozenset_check_exact(self): - check = _testcapi.frozenset_checkexact + check = _testlimitedcapi.frozenset_checkexact self.assertFalse(check(set())) self.assertTrue(check(frozenset())) self.assertTrue(check(frozenset({1, 2}))) @@ -63,7 +65,7 @@ def test_frozenset_check_exact(self): # CRASHES: check(NULL) def test_anyset_check(self): - check = _testcapi.anyset_check + check = _testlimitedcapi.anyset_check self.assertTrue(check(set())) self.assertTrue(check({1, 2})) self.assertTrue(check(frozenset())) @@ -74,7 +76,7 @@ def test_anyset_check(self): # CRASHES: check(NULL) def test_anyset_check_exact(self): - check = _testcapi.anyset_checkexact + check = _testlimitedcapi.anyset_checkexact self.assertTrue(check(set())) self.assertTrue(check({1, 2})) self.assertTrue(check(frozenset())) @@ -85,7 +87,7 @@ def test_anyset_check_exact(self): # CRASHES: check(NULL) def test_set_new(self): - set_new = _testcapi.set_new + set_new = _testlimitedcapi.set_new self.assertEqual(set_new().__class__, set) self.assertEqual(set_new(), set()) self.assertEqual(set_new((1, 1, 2)), {1, 2}) @@ -98,7 +100,7 @@ def test_set_new(self): set_new((1, {})) def test_frozenset_new(self): - frozenset_new = _testcapi.frozenset_new + frozenset_new = _testlimitedcapi.frozenset_new self.assertEqual(frozenset_new().__class__, frozenset) self.assertEqual(frozenset_new(), frozenset()) self.assertEqual(frozenset_new((1, 1, 2)), frozenset({1, 2})) @@ -111,7 +113,7 @@ def test_frozenset_new(self): frozenset_new((1, {})) def test_set_size(self): - get_size = _testcapi.set_size + get_size = _testlimitedcapi.set_size self.assertEqual(get_size(set()), 0) self.assertEqual(get_size(frozenset()), 0) self.assertEqual(get_size({1, 1, 2}), 2) @@ -134,7 +136,7 @@ def test_set_get_size(self): # CRASHES: get_size(object()) def test_set_contains(self): - contains = _testcapi.set_contains + contains = _testlimitedcapi.set_contains for cls in (set, frozenset, set_subclass, frozenset_subclass): with self.subTest(cls=cls): instance = cls((1, 2)) @@ -147,7 +149,7 @@ def test_set_contains(self): # CRASHES: contains(NULL, NULL) def test_add(self): - add = _testcapi.set_add + add = _testlimitedcapi.set_add for cls in (set, set_subclass): with self.subTest(cls=cls): instance = cls((1, 2)) @@ -165,7 +167,7 @@ def test_add(self): # CRASHES: add(NULL, NULL) def test_discard(self): - discard = _testcapi.set_discard + discard = _testlimitedcapi.set_discard for cls in (set, set_subclass): with self.subTest(cls=cls): instance = cls((1, 2)) @@ -187,7 +189,7 @@ def test_discard(self): # CRASHES: discard(NULL, NULL) def test_pop(self): - pop = _testcapi.set_pop + pop = _testlimitedcapi.set_pop orig = (1, 2) for cls in (set, set_subclass): with self.subTest(cls=cls): @@ -204,7 +206,7 @@ def test_pop(self): # CRASHES: pop(NULL) def test_clear(self): - clear = _testcapi.set_clear + clear = _testlimitedcapi.set_clear for cls in (set, set_subclass): with self.subTest(cls=cls): instance = cls((1, 2)) diff --git a/Lib/test/test_capi/test_unicode.py b/Lib/test/test_capi/test_unicode.py index 91c425e483f0ff..a64c75c415c3fe 100644 --- a/Lib/test/test_capi/test_unicode.py +++ b/Lib/test/test_capi/test_unicode.py @@ -8,6 +8,10 @@ from _testcapi import PY_SSIZE_T_MIN, PY_SSIZE_T_MAX except ImportError: _testcapi = None +try: + import _testlimitedcapi +except ImportError: + _testlimitedcapi = None try: import _testinternalcapi except ImportError: @@ -84,10 +88,10 @@ def test_fill(self): # TODO: Test PyUnicode_Fill() with non-modifiable unicode. @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_writechar(self): """Test PyUnicode_WriteChar()""" - from _testcapi import unicode_writechar as writechar + from _testlimitedcapi import unicode_writechar as writechar strings = [ # one string for every kind @@ -115,10 +119,10 @@ def test_writechar(self): # unicode. @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_resize(self): """Test PyUnicode_Resize()""" - from _testcapi import unicode_resize as resize + from _testlimitedcapi import unicode_resize as resize strings = [ # all strings have exactly 3 characters @@ -141,10 +145,10 @@ def test_resize(self): # and with NULL as the address. @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_append(self): """Test PyUnicode_Append()""" - from _testcapi import unicode_append as append + from _testlimitedcapi import unicode_append as append strings = [ 'abc', '\xa1\xa2\xa3', '\u4f60\u597d\u4e16', @@ -169,10 +173,10 @@ def test_append(self): # TODO: Check reference counts. @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_appendanddel(self): """Test PyUnicode_AppendAndDel()""" - from _testcapi import unicode_appendanddel as appendanddel + from _testlimitedcapi import unicode_appendanddel as appendanddel strings = [ 'abc', '\xa1\xa2\xa3', '\u4f60\u597d\u4e16', @@ -196,10 +200,10 @@ def test_appendanddel(self): # TODO: Check reference counts. @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_fromstringandsize(self): """Test PyUnicode_FromStringAndSize()""" - from _testcapi import unicode_fromstringandsize as fromstringandsize + from _testlimitedcapi import unicode_fromstringandsize as fromstringandsize self.assertEqual(fromstringandsize(b'abc'), 'abc') self.assertEqual(fromstringandsize(b'abc', 2), 'ab') @@ -221,10 +225,10 @@ def test_fromstringandsize(self): self.assertRaises(SystemError, fromstringandsize, NULL, PY_SSIZE_T_MAX) @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_fromstring(self): """Test PyUnicode_FromString()""" - from _testcapi import unicode_fromstring as fromstring + from _testlimitedcapi import unicode_fromstring as fromstring self.assertEqual(fromstring(b'abc'), 'abc') self.assertEqual(fromstring(b'\xc2\xa1\xc2\xa2'), '\xa1\xa2') @@ -273,10 +277,10 @@ def test_fromkindanddata(self): # CRASHES fromkindanddata(4, b'\xff\xff\xff\xff') @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_substring(self): """Test PyUnicode_Substring()""" - from _testcapi import unicode_substring as substring + from _testlimitedcapi import unicode_substring as substring strings = [ 'ab', 'ab\xa1\xa2', @@ -297,10 +301,10 @@ def test_substring(self): # CRASHES substring(NULL, 0, 0) @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_getlength(self): """Test PyUnicode_GetLength()""" - from _testcapi import unicode_getlength as getlength + from _testlimitedcapi import unicode_getlength as getlength for s in ['abc', '\xa1\xa2', '\u4f60\u597d', 'a\U0001f600', 'a\ud800b\udfffc', '\ud834\udd1e']: @@ -311,10 +315,10 @@ def test_getlength(self): # CRASHES getlength(NULL) @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_readchar(self): """Test PyUnicode_ReadChar()""" - from _testcapi import unicode_readchar as readchar + from _testlimitedcapi import unicode_readchar as readchar for s in ['abc', '\xa1\xa2', '\u4f60\u597d', 'a\U0001f600', 'a\ud800b\udfffc', '\ud834\udd1e']: @@ -330,10 +334,10 @@ def test_readchar(self): # CRASHES readchar(NULL, 0) @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_fromobject(self): """Test PyUnicode_FromObject()""" - from _testcapi import unicode_fromobject as fromobject + from _testlimitedcapi import unicode_fromobject as fromobject for s in ['abc', '\xa1\xa2', '\u4f60\u597d', 'a\U0001f600', 'a\ud800b\udfffc', '\ud834\udd1e']: @@ -352,7 +356,7 @@ def test_from_format(self): """Test PyUnicode_FromFormat()""" # Length modifiers "j" and "t" are not tested here because ctypes does # not expose types for intmax_t and ptrdiff_t. - # _testcapi.test_string_from_format() has a wider coverage of all + # _testlimitedcapi.test_string_from_format() has a wider coverage of all # formats. import_helper.import_module('ctypes') from ctypes import ( @@ -609,40 +613,6 @@ def check_format(expected, format, *args): check_format('xyz', b'%V', None, b'xyz') - # test %T - check_format('type: str', - b'type: %T', py_object("abc")) - check_format(f'type: st', - b'type: %.2T', py_object("abc")) - check_format(f'type: str', - b'type: %10T', py_object("abc")) - - class LocalType: - pass - obj = LocalType() - fullname = f'{__name__}.{LocalType.__qualname__}' - check_format(f'type: {fullname}', - b'type: %T', py_object(obj)) - fullname_alt = f'{__name__}:{LocalType.__qualname__}' - check_format(f'type: {fullname_alt}', - b'type: %T#', py_object(obj)) - - # test %N - check_format('type: str', - b'type: %N', py_object(str)) - check_format(f'type: st', - b'type: %.2N', py_object(str)) - check_format(f'type: str', - b'type: %10N', py_object(str)) - - check_format(f'type: {fullname}', - b'type: %N', py_object(type(obj))) - check_format(f'type: {fullname_alt}', - b'type: %N#', py_object(type(obj))) - with self.assertRaisesRegex(TypeError, "%N argument must be a type"): - check_format('type: str', - b'type: %N', py_object("abc")) - # test %ls check_format('abc', b'%ls', c_wchar_p('abc')) check_format('\u4eba\u6c11', b'%ls', c_wchar_p('\u4eba\u6c11')) @@ -741,10 +711,10 @@ class LocalType: PyUnicode_FromFormat, b'%+i', c_int(10)) @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_interninplace(self): """Test PyUnicode_InternInPlace()""" - from _testcapi import unicode_interninplace as interninplace + from _testlimitedcapi import unicode_interninplace as interninplace s = b'abc'.decode() r = interninplace(s) @@ -754,10 +724,10 @@ def test_interninplace(self): # CRASHES interninplace(NULL) @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_internfromstring(self): """Test PyUnicode_InternFromString()""" - from _testcapi import unicode_internfromstring as internfromstring + from _testlimitedcapi import unicode_internfromstring as internfromstring self.assertEqual(internfromstring(b'abc'), 'abc') self.assertEqual(internfromstring(b'\xf0\x9f\x98\x80'), '\U0001f600') @@ -768,10 +738,10 @@ def test_internfromstring(self): # CRASHES internfromstring(NULL) @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_fromwidechar(self): """Test PyUnicode_FromWideChar()""" - from _testcapi import unicode_fromwidechar as fromwidechar + from _testlimitedcapi import unicode_fromwidechar as fromwidechar from _testcapi import SIZEOF_WCHAR_T if SIZEOF_WCHAR_T == 2: @@ -803,11 +773,11 @@ def test_fromwidechar(self): self.assertRaises(SystemError, fromwidechar, NULL, PY_SSIZE_T_MIN) @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_aswidechar(self): """Test PyUnicode_AsWideChar()""" - from _testcapi import unicode_aswidechar - from _testcapi import unicode_aswidechar_null + from _testlimitedcapi import unicode_aswidechar + from _testlimitedcapi import unicode_aswidechar_null from _testcapi import SIZEOF_WCHAR_T wchar, size = unicode_aswidechar('abcdef', 2) @@ -851,11 +821,11 @@ def test_aswidechar(self): self.assertRaises(SystemError, unicode_aswidechar_null, NULL, 10) @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_aswidecharstring(self): """Test PyUnicode_AsWideCharString()""" - from _testcapi import unicode_aswidecharstring - from _testcapi import unicode_aswidecharstring_null + from _testlimitedcapi import unicode_aswidecharstring + from _testlimitedcapi import unicode_aswidecharstring_null from _testcapi import SIZEOF_WCHAR_T wchar, size = unicode_aswidecharstring('abc') @@ -927,10 +897,10 @@ def test_asucs4copy(self): # CRASHES asucs4copy(NULL) @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_fromordinal(self): """Test PyUnicode_FromOrdinal()""" - from _testcapi import unicode_fromordinal as fromordinal + from _testlimitedcapi import unicode_fromordinal as fromordinal self.assertEqual(fromordinal(0x61), 'a') self.assertEqual(fromordinal(0x20ac), '\u20ac') @@ -956,11 +926,11 @@ def test_asutf8(self): # CRASHES unicode_asutf8(NULL, 0) @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_asutf8andsize(self): """Test PyUnicode_AsUTF8AndSize()""" - from _testcapi import unicode_asutf8andsize - from _testcapi import unicode_asutf8andsize_null + from _testlimitedcapi import unicode_asutf8andsize + from _testlimitedcapi import unicode_asutf8andsize_null self.assertEqual(unicode_asutf8andsize('abc', 4), (b'abc\0', 3)) self.assertEqual(unicode_asutf8andsize('абв', 7), (b'\xd0\xb0\xd0\xb1\xd0\xb2\0', 6)) @@ -979,10 +949,10 @@ def test_asutf8andsize(self): # CRASHES unicode_asutf8andsize_null(NULL, 0) @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_getdefaultencoding(self): """Test PyUnicode_GetDefaultEncoding()""" - from _testcapi import unicode_getdefaultencoding as getdefaultencoding + from _testlimitedcapi import unicode_getdefaultencoding as getdefaultencoding self.assertEqual(getdefaultencoding(), b'utf-8') @@ -1007,10 +977,10 @@ def test_transform_decimal_and_space(self): # CRASHES transform_decimal(NULL) @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_concat(self): """Test PyUnicode_Concat()""" - from _testcapi import unicode_concat as concat + from _testlimitedcapi import unicode_concat as concat self.assertEqual(concat('abc', 'def'), 'abcdef') self.assertEqual(concat('abc', 'где'), 'abcгде') @@ -1028,10 +998,10 @@ def test_concat(self): # CRASHES concat('abc', NULL) @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_split(self): """Test PyUnicode_Split()""" - from _testcapi import unicode_split as split + from _testlimitedcapi import unicode_split as split self.assertEqual(split('a|b|c|d', '|'), ['a', 'b', 'c', 'd']) self.assertEqual(split('a|b|c|d', '|', 2), ['a', 'b', 'c|d']) @@ -1056,10 +1026,10 @@ def test_split(self): # CRASHES split(NULL, '|') @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_rsplit(self): """Test PyUnicode_RSplit()""" - from _testcapi import unicode_rsplit as rsplit + from _testlimitedcapi import unicode_rsplit as rsplit self.assertEqual(rsplit('a|b|c|d', '|'), ['a', 'b', 'c', 'd']) self.assertEqual(rsplit('a|b|c|d', '|', 2), ['a|b', 'c', 'd']) @@ -1085,10 +1055,10 @@ def test_rsplit(self): # CRASHES rsplit(NULL, '|') @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_partition(self): """Test PyUnicode_Partition()""" - from _testcapi import unicode_partition as partition + from _testlimitedcapi import unicode_partition as partition self.assertEqual(partition('a|b|c', '|'), ('a', '|', 'b|c')) self.assertEqual(partition('a||b||c', '||'), ('a', '||', 'b||c')) @@ -1105,10 +1075,10 @@ def test_partition(self): # CRASHES partition('a|b|c', NULL) @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_rpartition(self): """Test PyUnicode_RPartition()""" - from _testcapi import unicode_rpartition as rpartition + from _testlimitedcapi import unicode_rpartition as rpartition self.assertEqual(rpartition('a|b|c', '|'), ('a|b', '|', 'c')) self.assertEqual(rpartition('a||b||c', '||'), ('a||b', '||', 'c')) @@ -1125,10 +1095,10 @@ def test_rpartition(self): # CRASHES rpartition('a|b|c', NULL) @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_splitlines(self): """Test PyUnicode_SplitLines()""" - from _testcapi import unicode_splitlines as splitlines + from _testlimitedcapi import unicode_splitlines as splitlines self.assertEqual(splitlines('a\nb\rc\r\nd'), ['a', 'b', 'c', 'd']) self.assertEqual(splitlines('a\nb\rc\r\nd', True), @@ -1143,10 +1113,10 @@ def test_splitlines(self): # CRASHES splitlines(NULL) @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_translate(self): """Test PyUnicode_Translate()""" - from _testcapi import unicode_translate as translate + from _testlimitedcapi import unicode_translate as translate self.assertEqual(translate('abcd', {ord('a'): 'A', ord('b'): ord('B'), ord('c'): '<>'}), 'AB<>d') self.assertEqual(translate('абвг', {ord('а'): 'А', ord('б'): ord('Б'), ord('в'): '<>'}), 'АБ<>г') @@ -1168,10 +1138,10 @@ def test_translate(self): # CRASHES translate(NULL, []) @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_join(self): """Test PyUnicode_Join()""" - from _testcapi import unicode_join as join + from _testlimitedcapi import unicode_join as join self.assertEqual(join('|', ['a', 'b', 'c']), 'a|b|c') self.assertEqual(join('|', ['a', '', 'c']), 'a||c') self.assertEqual(join('', ['a', 'b', 'c']), 'abc') @@ -1186,10 +1156,10 @@ def test_join(self): self.assertRaises(SystemError, join, '|', NULL) @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_count(self): """Test PyUnicode_Count()""" - from _testcapi import unicode_count + from _testlimitedcapi import unicode_count for str in "\xa1", "\u8000\u8080", "\ud800\udc02", "\U0001f100\U0001f1f1": for i, ch in enumerate(str): @@ -1217,10 +1187,10 @@ def test_count(self): # CRASHES unicode_count(str, NULL, 0, len(str)) @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_tailmatch(self): """Test PyUnicode_Tailmatch()""" - from _testcapi import unicode_tailmatch as tailmatch + from _testlimitedcapi import unicode_tailmatch as tailmatch str = 'ababahalamaha' self.assertEqual(tailmatch(str, 'aba', 0, len(str), -1), 1) @@ -1252,10 +1222,10 @@ def test_tailmatch(self): # CRASHES tailmatch(str, NULL, 0, len(str), -1) @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_find(self): """Test PyUnicode_Find()""" - from _testcapi import unicode_find as find + from _testlimitedcapi import unicode_find as find for str in "\xa1", "\u8000\u8080", "\ud800\udc02", "\U0001f100\U0001f1f1": for i, ch in enumerate(str): @@ -1293,10 +1263,10 @@ def test_find(self): # CRASHES find(str, NULL, 0, len(str), 1) @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_findchar(self): """Test PyUnicode_FindChar()""" - from _testcapi import unicode_findchar + from _testlimitedcapi import unicode_findchar for str in "\xa1", "\u8000\u8080", "\ud800\udc02", "\U0001f100\U0001f1f1": for i, ch in enumerate(str): @@ -1329,10 +1299,10 @@ def test_findchar(self): # CRASHES unicode_findchar(NULL, ord('!'), 0, len(str), 1), 1) @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_replace(self): """Test PyUnicode_Replace()""" - from _testcapi import unicode_replace as replace + from _testlimitedcapi import unicode_replace as replace str = 'abracadabra' self.assertEqual(replace(str, 'a', '='), '=br=c=d=br=') @@ -1360,10 +1330,10 @@ def test_replace(self): # CRASHES replace(NULL, 'a', '=') @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_compare(self): """Test PyUnicode_Compare()""" - from _testcapi import unicode_compare as compare + from _testlimitedcapi import unicode_compare as compare self.assertEqual(compare('abc', 'abc'), 0) self.assertEqual(compare('abc', 'def'), -1) @@ -1382,10 +1352,10 @@ def test_compare(self): # CRASHES compare('abc', NULL) @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_comparewithasciistring(self): """Test PyUnicode_CompareWithASCIIString()""" - from _testcapi import unicode_comparewithasciistring as comparewithasciistring + from _testlimitedcapi import unicode_comparewithasciistring as comparewithasciistring self.assertEqual(comparewithasciistring('abc', b'abc'), 0) self.assertEqual(comparewithasciistring('abc', b'def'), -1) @@ -1399,11 +1369,11 @@ def test_comparewithasciistring(self): # CRASHES comparewithasciistring(NULL, b'abc') @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_equaltoutf8(self): # Test PyUnicode_EqualToUTF8() - from _testcapi import unicode_equaltoutf8 as equaltoutf8 - from _testcapi import unicode_asutf8andsize as asutf8andsize + from _testlimitedcapi import unicode_equaltoutf8 as equaltoutf8 + from _testlimitedcapi import unicode_asutf8andsize as asutf8andsize strings = [ 'abc', '\xa1\xa2\xa3', '\u4f60\u597d\u4e16', @@ -1445,11 +1415,11 @@ def test_equaltoutf8(self): '\ud801'.encode("utf8", "surrogatepass")), 0) @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_equaltoutf8andsize(self): # Test PyUnicode_EqualToUTF8AndSize() - from _testcapi import unicode_equaltoutf8andsize as equaltoutf8andsize - from _testcapi import unicode_asutf8andsize as asutf8andsize + from _testlimitedcapi import unicode_equaltoutf8andsize as equaltoutf8andsize + from _testlimitedcapi import unicode_asutf8andsize as asutf8andsize strings = [ 'abc', '\xa1\xa2\xa3', '\u4f60\u597d\u4e16', @@ -1514,10 +1484,10 @@ def check_not_equal_encoding(text, encoding): # CRASHES equaltoutf8andsize('abc', NULL) @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_richcompare(self): """Test PyUnicode_RichCompare()""" - from _testcapi import unicode_richcompare as richcompare + from _testlimitedcapi import unicode_richcompare as richcompare LT, LE, EQ, NE, GT, GE = range(6) strings = ('abc', 'абв', '\U0001f600', 'abc\0') @@ -1542,10 +1512,10 @@ def test_richcompare(self): # CRASHES richcompare('abc', NULL, op) @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_format(self): """Test PyUnicode_Format()""" - from _testcapi import unicode_format as format + from _testlimitedcapi import unicode_format as format self.assertEqual(format('x=%d!', 42), 'x=42!') self.assertEqual(format('x=%d!', (42,)), 'x=42!') @@ -1555,10 +1525,10 @@ def test_format(self): self.assertRaises(SystemError, format, NULL, 42) @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_contains(self): """Test PyUnicode_Contains()""" - from _testcapi import unicode_contains as contains + from _testlimitedcapi import unicode_contains as contains self.assertEqual(contains('abcd', ''), 1) self.assertEqual(contains('abcd', 'b'), 1) @@ -1577,10 +1547,10 @@ def test_contains(self): # CRASHES contains('abcd', NULL) @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_isidentifier(self): """Test PyUnicode_IsIdentifier()""" - from _testcapi import unicode_isidentifier as isidentifier + from _testlimitedcapi import unicode_isidentifier as isidentifier self.assertEqual(isidentifier("a"), 1) self.assertEqual(isidentifier("b0"), 1) diff --git a/Lib/test/test_cext/__init__.py b/Lib/test/test_cext/__init__.py index 302ea3d3a82300..e4472b3355759c 100644 --- a/Lib/test/test_cext/__init__.py +++ b/Lib/test/test_cext/__init__.py @@ -1,10 +1,13 @@ # gh-116869: Build a basic C test extension to check that the Python C API # does not emit C compiler warnings. +# +# The Python C API must be compatible with building +# with the -Werror=declaration-after-statement compiler flag. import os.path +import shlex import shutil import subprocess -import sysconfig import unittest from test import support @@ -13,35 +16,45 @@ SETUP = os.path.join(os.path.dirname(__file__), 'setup.py') -# gh-110119: pip does not currently support 't' in the ABI flag use by -# --disable-gil builds. Once it does, we can remove this skip. -@unittest.skipIf(support.Py_GIL_DISABLED, - 'test does not work with --disable-gil') +# With MSVC on a debug build, the linker fails with: cannot open file +# 'python311.lib', it should look 'python311_d.lib'. +@unittest.skipIf(support.MS_WINDOWS and support.Py_DEBUG, + 'test fails on Windows debug build') +# Building and running an extension in clang sanitizing mode is not +# straightforward +@support.skip_if_sanitizer('test does not work with analyzing builds', + address=True, memory=True, ub=True, thread=True) +# the test uses venv+pip: skip if it's not available +@support.requires_venv_with_pip() @support.requires_subprocess() @support.requires_resource('cpu') class TestExt(unittest.TestCase): - def test_build_c99(self): - self.check_build('c99', '_test_c99_ext') + # Default build with no options + def test_build(self): + self.check_build('_test_cext') def test_build_c11(self): - self.check_build('c11', '_test_c11_ext') - - # With MSVC, the linker fails with: cannot open file 'python311.lib' - # https://github.com/python/cpython/pull/32175#issuecomment-1111175897 - @unittest.skipIf(support.MS_WINDOWS, 'test fails on Windows') - # Building and running an extension in clang sanitizing mode is not - # straightforward - @unittest.skipIf( - '-fsanitize' in (sysconfig.get_config_var('PY_CFLAGS') or ''), - 'test does not work with analyzing builds') - # the test uses venv+pip: skip if it's not available - @support.requires_venv_with_pip() - def check_build(self, clang_std, extension_name): + self.check_build('_test_c11_cext', std='c11') + + @unittest.skipIf(support.MS_WINDOWS, "MSVC doesn't support /std:c99") + def test_build_c99(self): + self.check_build('_test_c99_cext', std='c99') + + @unittest.skipIf(support.Py_GIL_DISABLED, 'incompatible with Free Threading') + def test_build_limited(self): + self.check_build('_test_limited_cext', limited=True) + + @unittest.skipIf(support.Py_GIL_DISABLED, 'broken for now with Free Threading') + def test_build_limited_c11(self): + self.check_build('_test_limited_c11_cext', limited=True, std='c11') + + def check_build(self, extension_name, std=None, limited=False): venv_dir = 'env' with support.setup_venv_with_pip_setuptools_wheel(venv_dir) as python_exe: - self._check_build(clang_std, extension_name, python_exe) + self._check_build(extension_name, python_exe, + std=std, limited=limited) - def _check_build(self, clang_std, extension_name, python_exe): + def _check_build(self, extension_name, python_exe, std, limited): pkg_dir = 'pkg' os.mkdir(pkg_dir) shutil.copy(SETUP, os.path.join(pkg_dir, os.path.basename(SETUP))) @@ -49,10 +62,13 @@ def _check_build(self, clang_std, extension_name, python_exe): def run_cmd(operation, cmd): env = os.environ.copy() - env['CPYTHON_TEST_STD'] = clang_std + if std: + env['CPYTHON_TEST_STD'] = std + if limited: + env['CPYTHON_TEST_LIMITED'] = '1' env['CPYTHON_TEST_EXT_NAME'] = extension_name if support.verbose: - print('Run:', ' '.join(cmd)) + print('Run:', ' '.join(map(shlex.quote, cmd))) subprocess.run(cmd, check=True, env=env) else: proc = subprocess.run(cmd, @@ -61,6 +77,7 @@ def run_cmd(operation, cmd): stderr=subprocess.STDOUT, text=True) if proc.returncode: + print('Run:', ' '.join(map(shlex.quote, cmd))) print(proc.stdout, end='') self.fail( f"{operation} failed with exit code {proc.returncode}") diff --git a/Lib/test/test_cext/extension.c b/Lib/test/test_cext/extension.c index cfecad39f8af14..662abd4005e8d2 100644 --- a/Lib/test/test_cext/extension.c +++ b/Lib/test/test_cext/extension.c @@ -6,12 +6,8 @@ #include "Python.h" -#if defined (__STDC_VERSION__) && __STDC_VERSION__ > 201710L -# define NAME _test_c2x_ext -#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 201112L -# define NAME _test_c11_ext -#else -# define NAME _test_c99_ext +#ifndef MODULE_NAME +# error "MODULE_NAME macro must be defined" #endif #define _STR(NAME) #NAME @@ -25,11 +21,11 @@ PyDoc_STRVAR(_testcext_add_doc, static PyObject * _testcext_add(PyObject *Py_UNUSED(module), PyObject *args) { - long i, j; + long i, j, res; if (!PyArg_ParseTuple(args, "ll:foo", &i, &j)) { return NULL; } - long res = i + j; + res = i + j; return PyLong_FromLong(res); } @@ -43,9 +39,11 @@ static PyMethodDef _testcext_methods[] = { static int _testcext_exec(PyObject *module) { +#ifdef __STDC_VERSION__ if (PyModule_AddIntMacro(module, __STDC_VERSION__) < 0) { return -1; } +#endif return 0; } @@ -59,7 +57,7 @@ PyDoc_STRVAR(_testcext_doc, "C test extension."); static struct PyModuleDef _testcext_module = { PyModuleDef_HEAD_INIT, // m_base - STR(NAME), // m_name + STR(MODULE_NAME), // m_name _testcext_doc, // m_doc 0, // m_size _testcext_methods, // m_methods @@ -74,7 +72,7 @@ static struct PyModuleDef _testcext_module = { #define FUNC_NAME(NAME) _FUNC_NAME(NAME) PyMODINIT_FUNC -FUNC_NAME(NAME)(void) +FUNC_NAME(MODULE_NAME)(void) { return PyModuleDef_Init(&_testcext_module); } diff --git a/Lib/test/test_cext/setup.py b/Lib/test/test_cext/setup.py index dd57a5fb8e1e1b..ccad3fa62ad086 100644 --- a/Lib/test/test_cext/setup.py +++ b/Lib/test/test_cext/setup.py @@ -1,6 +1,7 @@ # gh-91321: Build a basic C test extension to check that the Python C API is # compatible with C and does not emit C compiler warnings. import os +import platform import shlex import sys import sysconfig @@ -17,30 +18,82 @@ # extension using the Python C API does not emit C compiler warnings. '-Werror', ] + if not support.Py_GIL_DISABLED: + CFLAGS.append( + # gh-116869: The Python C API must be compatible with building + # with the -Werror=declaration-after-statement compiler flag. + '-Werror=declaration-after-statement', + ) else: # Don't pass any compiler flag to MSVC CFLAGS = [] def main(): - std = os.environ["CPYTHON_TEST_STD"] - name = os.environ["CPYTHON_TEST_EXT_NAME"] - cflags = [*CFLAGS, f'-std={std}'] + std = os.environ.get("CPYTHON_TEST_STD", "") + module_name = os.environ["CPYTHON_TEST_EXT_NAME"] + limited = bool(os.environ.get("CPYTHON_TEST_LIMITED", "")) - # Remove existing -std options to only test ours + cflags = list(CFLAGS) + cflags.append(f'-DMODULE_NAME={module_name}') + + # Add -std=STD or /std:STD (MSVC) compiler flag + if std: + if support.MS_WINDOWS: + cflags.append(f'/std:{std}') + else: + cflags.append(f'-std={std}') + + # Remove existing -std or /std options from CC command line. + # Python adds -std=c11 option. cmd = (sysconfig.get_config_var('CC') or '') if cmd is not None: + if support.MS_WINDOWS: + std_prefix = '/std' + else: + std_prefix = '-std' cmd = shlex.split(cmd) - cmd = [arg for arg in cmd if not arg.startswith('-std=')] + cmd = [arg for arg in cmd if not arg.startswith(std_prefix)] cmd = shlex.join(cmd) # CC env var overrides sysconfig CC variable in setuptools os.environ['CC'] = cmd + # Define Py_LIMITED_API macro + if limited: + version = sys.hexversion + cflags.append(f'-DPy_LIMITED_API={version:#x}') + + # On Windows, add PCbuild\amd64\ to include and library directories + include_dirs = [] + library_dirs = [] + if support.MS_WINDOWS: + srcdir = sysconfig.get_config_var('srcdir') + machine = platform.uname().machine + pcbuild = os.path.join(srcdir, 'PCbuild', machine) + if os.path.exists(pcbuild): + # pyconfig.h is generated in PCbuild\amd64\ + include_dirs.append(pcbuild) + # python313.lib is generated in PCbuild\amd64\ + library_dirs.append(pcbuild) + print(f"Add PCbuild directory: {pcbuild}") + + # Display information to help debugging + for env_name in ('CC', 'CFLAGS'): + if env_name in os.environ: + print(f"{env_name} env var: {os.environ[env_name]!r}") + else: + print(f"{env_name} env var: ") + print(f"extra_compile_args: {cflags!r}") + ext = Extension( - name, + module_name, sources=[SOURCE], - extra_compile_args=cflags) - setup(name='internal' + name, version='0.0', ext_modules=[ext]) + extra_compile_args=cflags, + include_dirs=include_dirs, + library_dirs=library_dirs) + setup(name=f'internal_{module_name}', + version='0.0', + ext_modules=[ext]) if __name__ == "__main__": diff --git a/Lib/test/test_class.py b/Lib/test/test_class.py index d59271435e9eb0..ad89a22c625dfd 100644 --- a/Lib/test/test_class.py +++ b/Lib/test/test_class.py @@ -448,15 +448,15 @@ def __delattr__(self, *args): def testHasAttrString(self): import sys from test.support import import_helper - _testcapi = import_helper.import_module('_testcapi') + _testlimitedcapi = import_helper.import_module('_testlimitedcapi') class A: def __init__(self): self.attr = 1 a = A() - self.assertEqual(_testcapi.object_hasattrstring(a, b"attr"), 1) - self.assertEqual(_testcapi.object_hasattrstring(a, b"noattr"), 0) + self.assertEqual(_testlimitedcapi.object_hasattrstring(a, b"attr"), 1) + self.assertEqual(_testlimitedcapi.object_hasattrstring(a, b"noattr"), 0) self.assertIsNone(sys.exception()) def testDel(self): diff --git a/Lib/test/test_clinic.py b/Lib/test/test_clinic.py index a60f087ef2816e..52cb4d6e187855 100644 --- a/Lib/test/test_clinic.py +++ b/Lib/test/test_clinic.py @@ -2657,6 +2657,7 @@ def test_cli_converters(self): float() int() long() + object() Py_ssize_t() size_t() unsigned_int() diff --git a/Lib/test/test_codecs.py b/Lib/test/test_codecs.py index fe3776d6dd9337..e05b95c2d60bad 100644 --- a/Lib/test/test_codecs.py +++ b/Lib/test/test_codecs.py @@ -13,9 +13,9 @@ from test.support import os_helper try: - import _testcapi + import _testlimitedcapi except ImportError: - _testcapi = None + _testlimitedcapi = None try: import _testinternalcapi except ImportError: @@ -2224,14 +2224,14 @@ def test_basics(self): "encoding=%r" % encoding) @support.cpython_only - @unittest.skipIf(_testcapi is None, 'need _testcapi module') + @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module') def test_basics_capi(self): s = "abc123" # all codecs should be able to encode these for encoding in all_unicode_encodings: if encoding not in broken_unicode_with_stateful: # check incremental decoder/encoder (fetched via the C API) try: - cencoder = _testcapi.codec_incrementalencoder(encoding) + cencoder = _testlimitedcapi.codec_incrementalencoder(encoding) except LookupError: # no IncrementalEncoder pass else: @@ -2240,7 +2240,7 @@ def test_basics_capi(self): for c in s: encodedresult += cencoder.encode(c) encodedresult += cencoder.encode("", True) - cdecoder = _testcapi.codec_incrementaldecoder(encoding) + cdecoder = _testlimitedcapi.codec_incrementaldecoder(encoding) decodedresult = "" for c in encodedresult: decodedresult += cdecoder.decode(bytes([c])) @@ -2251,12 +2251,12 @@ def test_basics_capi(self): if encoding not in ("idna", "mbcs"): # check incremental decoder/encoder with errors argument try: - cencoder = _testcapi.codec_incrementalencoder(encoding, "ignore") + cencoder = _testlimitedcapi.codec_incrementalencoder(encoding, "ignore") except LookupError: # no IncrementalEncoder pass else: encodedresult = b"".join(cencoder.encode(c) for c in s) - cdecoder = _testcapi.codec_incrementaldecoder(encoding, "ignore") + cdecoder = _testlimitedcapi.codec_incrementaldecoder(encoding, "ignore") decodedresult = "".join(cdecoder.decode(bytes([c])) for c in encodedresult) self.assertEqual(decodedresult, s, diff --git a/Lib/test/test_compile.py b/Lib/test/test_compile.py index d3e69bfedccd07..9d5f721806a884 100644 --- a/Lib/test/test_compile.py +++ b/Lib/test/test_compile.py @@ -1960,6 +1960,64 @@ def test_load_super_attr(self): ) +class TestExpectedAttributes(unittest.TestCase): + + def test_basic(self): + class C: + def f(self): + self.a = self.b = 42 + + self.assertIsInstance(C.__static_attributes__, tuple) + self.assertEqual(sorted(C.__static_attributes__), ['a', 'b']) + + def test_nested_function(self): + class C: + def f(self): + self.x = 1 + self.y = 2 + self.x = 3 # check deduplication + + def g(self, obj): + self.y = 4 + self.z = 5 + + def h(self, a): + self.u = 6 + self.v = 7 + + obj.self = 8 + + self.assertEqual(sorted(C.__static_attributes__), ['u', 'v', 'x', 'y', 'z']) + + def test_nested_class(self): + class C: + def f(self): + self.x = 42 + self.y = 42 + + class D: + def g(self): + self.y = 42 + self.z = 42 + + self.assertEqual(sorted(C.__static_attributes__), ['x', 'y']) + self.assertEqual(sorted(C.D.__static_attributes__), ['y', 'z']) + + def test_subclass(self): + class C: + def f(self): + self.x = 42 + self.y = 42 + + class D(C): + def g(self): + self.y = 42 + self.z = 42 + + self.assertEqual(sorted(C.__static_attributes__), ['x', 'y']) + self.assertEqual(sorted(D.__static_attributes__), ['y', 'z']) + + class TestExpressionStackSize(unittest.TestCase): # These tests check that the computed stack size for a code object # stays within reasonable bounds (see issue #21523 for an example diff --git a/Lib/test/test_concurrent_futures/test_thread_pool.py b/Lib/test/test_concurrent_futures/test_thread_pool.py index 5926a632aa4bec..16043fd1235614 100644 --- a/Lib/test/test_concurrent_futures/test_thread_pool.py +++ b/Lib/test/test_concurrent_futures/test_thread_pool.py @@ -49,6 +49,7 @@ def test_idle_thread_reuse(self): self.assertEqual(len(executor._threads), 1) executor.shutdown(wait=True) + @support.requires_fork() @unittest.skipUnless(hasattr(os, 'register_at_fork'), 'need os.register_at_fork') @support.requires_resource('cpu') def test_hang_global_shutdown_lock(self): diff --git a/Lib/test/test_configparser.py b/Lib/test/test_configparser.py index 5d58e34740adaf..6340e378c4f21a 100644 --- a/Lib/test/test_configparser.py +++ b/Lib/test/test_configparser.py @@ -646,6 +646,21 @@ def test_weird_errors(self): "'opt' in section 'Bar' already exists") self.assertEqual(e.args, ("Bar", "opt", "", None)) + def test_get_after_duplicate_option_error(self): + cf = self.newconfig() + ini = textwrap.dedent("""\ + [Foo] + x{equals}1 + y{equals}2 + y{equals}3 + """.format(equals=self.delimiters[0])) + if self.strict: + with self.assertRaises(configparser.DuplicateOptionError): + cf.read_string(ini) + else: + cf.read_string(ini) + self.assertEqual(cf.get('Foo', 'x'), '1') + def test_write(self): config_string = ( "[Long Line]\n" diff --git a/Lib/test/test_cppext/__init__.py b/Lib/test/test_cppext/__init__.py index c6039bd17b0662..00a2840d49c779 100644 --- a/Lib/test/test_cppext/__init__.py +++ b/Lib/test/test_cppext/__init__.py @@ -1,10 +1,10 @@ # gh-91321: Build a basic C++ test extension to check that the Python C API is # compatible with C++ and does not emit C++ compiler warnings. import os.path +import shlex import shutil -import unittest import subprocess -import sysconfig +import unittest from test import support @@ -12,36 +12,41 @@ SETUP = os.path.join(os.path.dirname(__file__), 'setup.py') -# gh-110119: pip does not currently support 't' in the ABI flag use by -# --disable-gil builds. Once it does, we can remove this skip. -@unittest.skipIf(support.Py_GIL_DISABLED, - 'test does not work with --disable-gil') +# With MSVC on a debug build, the linker fails with: cannot open file +# 'python311.lib', it should look 'python311_d.lib'. +@unittest.skipIf(support.MS_WINDOWS and support.Py_DEBUG, + 'test fails on Windows debug build') +# Building and running an extension in clang sanitizing mode is not +# straightforward +@support.skip_if_sanitizer('test does not work with analyzing builds', + address=True, memory=True, ub=True, thread=True) +# the test uses venv+pip: skip if it's not available +@support.requires_venv_with_pip() @support.requires_subprocess() +@support.requires_resource('cpu') class TestCPPExt(unittest.TestCase): - @support.requires_resource('cpu') - def test_build_cpp11(self): - self.check_build(False, '_testcpp11ext') + def test_build(self): + self.check_build('_testcppext') - @support.requires_resource('cpu') def test_build_cpp03(self): - self.check_build(True, '_testcpp03ext') + self.check_build('_testcpp03ext', std='c++03') + + @unittest.skipIf(support.MS_WINDOWS, "MSVC doesn't support /std:c++11") + def test_build_cpp11(self): + self.check_build('_testcpp11ext', std='c++11') + + # Only test C++14 on MSVC. + # On s390x RHEL7, GCC 4.8.5 doesn't support C++14. + @unittest.skipIf(not support.MS_WINDOWS, "need Windows") + def test_build_cpp14(self): + self.check_build('_testcpp14ext', std='c++14') - # With MSVC, the linker fails with: cannot open file 'python311.lib' - # https://github.com/python/cpython/pull/32175#issuecomment-1111175897 - @unittest.skipIf(support.MS_WINDOWS, 'test fails on Windows') - # Building and running an extension in clang sanitizing mode is not - # straightforward - @unittest.skipIf( - '-fsanitize' in (sysconfig.get_config_var('PY_CFLAGS') or ''), - 'test does not work with analyzing builds') - # the test uses venv+pip: skip if it's not available - @support.requires_venv_with_pip() - def check_build(self, std_cpp03, extension_name): + def check_build(self, extension_name, std=None): venv_dir = 'env' with support.setup_venv_with_pip_setuptools_wheel(venv_dir) as python_exe: - self._check_build(std_cpp03, extension_name, python_exe) + self._check_build(extension_name, python_exe, std=std) - def _check_build(self, std_cpp03, extension_name, python_exe): + def _check_build(self, extension_name, python_exe, std): pkg_dir = 'pkg' os.mkdir(pkg_dir) shutil.copy(SETUP, os.path.join(pkg_dir, os.path.basename(SETUP))) @@ -49,10 +54,11 @@ def _check_build(self, std_cpp03, extension_name, python_exe): def run_cmd(operation, cmd): env = os.environ.copy() - env['CPYTHON_TEST_CPP_STD'] = 'c++03' if std_cpp03 else 'c++11' + if std: + env['CPYTHON_TEST_CPP_STD'] = std env['CPYTHON_TEST_EXT_NAME'] = extension_name if support.verbose: - print('Run:', ' '.join(cmd)) + print('Run:', ' '.join(map(shlex.quote, cmd))) subprocess.run(cmd, check=True, env=env) else: proc = subprocess.run(cmd, @@ -61,6 +67,7 @@ def run_cmd(operation, cmd): stderr=subprocess.STDOUT, text=True) if proc.returncode: + print('Run:', ' '.join(map(shlex.quote, cmd))) print(proc.stdout, end='') self.fail( f"{operation} failed with exit code {proc.returncode}") diff --git a/Lib/test/test_cppext/extension.cpp b/Lib/test/test_cppext/extension.cpp index 90669b10cb2c6d..a569c2251d1ad7 100644 --- a/Lib/test/test_cppext/extension.cpp +++ b/Lib/test/test_cppext/extension.cpp @@ -8,10 +8,8 @@ #include "Python.h" -#if __cplusplus >= 201103 -# define NAME _testcpp11ext -#else -# define NAME _testcpp03ext +#ifndef MODULE_NAME +# error "MODULE_NAME macro must be defined" #endif #define _STR(NAME) #NAME @@ -160,7 +158,7 @@ PyType_Slot VirtualPyObject_Slots[] = { }; PyType_Spec VirtualPyObject_Spec = { - /* .name */ STR(NAME) ".VirtualPyObject", + /* .name */ STR(MODULE_NAME) ".VirtualPyObject", /* .basicsize */ sizeof(VirtualPyObject), /* .itemsize */ 0, /* .flags */ Py_TPFLAGS_DEFAULT, @@ -240,7 +238,7 @@ PyDoc_STRVAR(_testcppext_doc, "C++ test extension."); static struct PyModuleDef _testcppext_module = { PyModuleDef_HEAD_INIT, // m_base - STR(NAME), // m_name + STR(MODULE_NAME), // m_name _testcppext_doc, // m_doc 0, // m_size _testcppext_methods, // m_methods @@ -254,7 +252,7 @@ static struct PyModuleDef _testcppext_module = { #define FUNC_NAME(NAME) _FUNC_NAME(NAME) PyMODINIT_FUNC -FUNC_NAME(NAME)(void) +FUNC_NAME(MODULE_NAME)(void) { return PyModuleDef_Init(&_testcppext_module); } diff --git a/Lib/test/test_cppext/setup.py b/Lib/test/test_cppext/setup.py index c7ba1efb4dd05a..80b3e0d5212f7b 100644 --- a/Lib/test/test_cppext/setup.py +++ b/Lib/test/test_cppext/setup.py @@ -1,8 +1,8 @@ # gh-91321: Build a basic C++ test extension to check that the Python C API is # compatible with C++ and does not emit C++ compiler warnings. import os +import platform import shlex -import sys import sysconfig from test import support @@ -25,28 +25,66 @@ def main(): cppflags = list(CPPFLAGS) - std = os.environ["CPYTHON_TEST_CPP_STD"] - name = os.environ["CPYTHON_TEST_EXT_NAME"] + std = os.environ.get("CPYTHON_TEST_CPP_STD", "") + module_name = os.environ["CPYTHON_TEST_EXT_NAME"] - cppflags = [*CPPFLAGS, f'-std={std}'] + cppflags = list(CPPFLAGS) + cppflags.append(f'-DMODULE_NAME={module_name}') + + # Add -std=STD or /std:STD (MSVC) compiler flag + if std: + if support.MS_WINDOWS: + cppflags.append(f'/std:{std}') + else: + cppflags.append(f'-std={std}') # gh-105776: When "gcc -std=11" is used as the C++ compiler, -std=c11 # option emits a C++ compiler warning. Remove "-std11" option from the # CC command. cmd = (sysconfig.get_config_var('CC') or '') if cmd is not None: + if support.MS_WINDOWS: + std_prefix = '/std' + else: + std_prefix = '-std' cmd = shlex.split(cmd) - cmd = [arg for arg in cmd if not arg.startswith('-std=')] + cmd = [arg for arg in cmd if not arg.startswith(std_prefix)] cmd = shlex.join(cmd) # CC env var overrides sysconfig CC variable in setuptools os.environ['CC'] = cmd - cpp_ext = Extension( - name, + # On Windows, add PCbuild\amd64\ to include and library directories + include_dirs = [] + library_dirs = [] + if support.MS_WINDOWS: + srcdir = sysconfig.get_config_var('srcdir') + machine = platform.uname().machine + pcbuild = os.path.join(srcdir, 'PCbuild', machine) + if os.path.exists(pcbuild): + # pyconfig.h is generated in PCbuild\amd64\ + include_dirs.append(pcbuild) + # python313.lib is generated in PCbuild\amd64\ + library_dirs.append(pcbuild) + print(f"Add PCbuild directory: {pcbuild}") + + # Display information to help debugging + for env_name in ('CC', 'CFLAGS', 'CPPFLAGS'): + if env_name in os.environ: + print(f"{env_name} env var: {os.environ[env_name]!r}") + else: + print(f"{env_name} env var: ") + print(f"extra_compile_args: {cppflags!r}") + + ext = Extension( + module_name, sources=[SOURCE], language='c++', - extra_compile_args=cppflags) - setup(name='internal' + name, version='0.0', ext_modules=[cpp_ext]) + extra_compile_args=cppflags, + include_dirs=include_dirs, + library_dirs=library_dirs) + setup(name=f'internal_{module_name}', + version='0.0', + ext_modules=[ext]) if __name__ == "__main__": diff --git a/Lib/test/test_ctypes/test_arrays.py b/Lib/test/test_ctypes/test_arrays.py index 774316e227ff73..3568cf97f40b50 100644 --- a/Lib/test/test_ctypes/test_arrays.py +++ b/Lib/test/test_ctypes/test_arrays.py @@ -37,6 +37,22 @@ def test_type_flags(self): self.assertTrue(cls.__flags__ & Py_TPFLAGS_IMMUTABLETYPE) self.assertFalse(cls.__flags__ & Py_TPFLAGS_DISALLOW_INSTANTIATION) + def test_metaclass_details(self): + # Abstract classes (whose metaclass __init__ was not called) can't be + # instantiated directly + NewArray = PyCArrayType.__new__(PyCArrayType, 'NewArray', (Array,), {}) + for cls in Array, NewArray: + with self.subTest(cls=cls): + with self.assertRaisesRegex(TypeError, "abstract class"): + obj = cls() + + # Cannot call the metaclass __init__ more than once + class T(Array): + _type_ = c_int + _length_ = 13 + with self.assertRaisesRegex(SystemError, "already initialized"): + PyCArrayType.__init__(T, 'ptr', (), {}) + def test_simple(self): # create classes holding simple numeric types, and check # various properties. diff --git a/Lib/test/test_ctypes/test_callbacks.py b/Lib/test/test_ctypes/test_callbacks.py index 64f92ffdca6a3f..8038169afe4304 100644 --- a/Lib/test/test_ctypes/test_callbacks.py +++ b/Lib/test/test_ctypes/test_callbacks.py @@ -106,7 +106,7 @@ def test_pyobject(self): def test_unsupported_restype_1(self): # Only "fundamental" result types are supported for callback - # functions, the type must have a non-NULL stgdict->setfunc. + # functions, the type must have a non-NULL stginfo->setfunc. # POINTER(c_double), for example, is not supported. prototype = self.functype.__func__(POINTER(c_double)) diff --git a/Lib/test/test_ctypes/test_find.py b/Lib/test/test_ctypes/test_find.py index 7732ff37308848..85b28617d2d754 100644 --- a/Lib/test/test_ctypes/test_find.py +++ b/Lib/test/test_ctypes/test_find.py @@ -129,5 +129,28 @@ def test_gh114257(self): self.assertIsNone(find_library("libc")) +@unittest.skipUnless(sys.platform == 'android', 'Test only valid for Android') +class FindLibraryAndroid(unittest.TestCase): + def test_find(self): + for name in [ + "c", "m", # POSIX + "z", # Non-POSIX, but present on Linux + "log", # Not present on Linux + ]: + with self.subTest(name=name): + path = find_library(name) + self.assertIsInstance(path, str) + self.assertEqual( + os.path.dirname(path), + "/system/lib64" if "64" in os.uname().machine + else "/system/lib") + self.assertEqual(os.path.basename(path), f"lib{name}.so") + self.assertTrue(os.path.isfile(path), path) + + for name in ["libc", "nonexistent"]: + with self.subTest(name=name): + self.assertIsNone(find_library(name)) + + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_ctypes/test_funcptr.py b/Lib/test/test_ctypes/test_funcptr.py index 0eed39484fb39e..03cfddea6ea61a 100644 --- a/Lib/test/test_ctypes/test_funcptr.py +++ b/Lib/test/test_ctypes/test_funcptr.py @@ -29,6 +29,12 @@ def test_type_flags(self): self.assertTrue(_CFuncPtr.__flags__ & Py_TPFLAGS_IMMUTABLETYPE) self.assertFalse(_CFuncPtr.__flags__ & Py_TPFLAGS_DISALLOW_INSTANTIATION) + def test_metaclass_details(self): + # Cannot call the metaclass __init__ more than once + CdeclCallback = CFUNCTYPE(c_int, c_int, c_int) + with self.assertRaisesRegex(SystemError, "already initialized"): + PyCFuncPtrType.__init__(CdeclCallback, 'ptr', (), {}) + def test_basic(self): X = WINFUNCTYPE(c_int, c_int, c_int) diff --git a/Lib/test/test_ctypes/test_pointers.py b/Lib/test/test_ctypes/test_pointers.py index 8cf2114c282cab..3a5f3660dbbe23 100644 --- a/Lib/test/test_ctypes/test_pointers.py +++ b/Lib/test/test_ctypes/test_pointers.py @@ -33,6 +33,11 @@ def test_type_flags(self): self.assertTrue(_Pointer.__flags__ & Py_TPFLAGS_IMMUTABLETYPE) self.assertFalse(_Pointer.__flags__ & Py_TPFLAGS_DISALLOW_INSTANTIATION) + def test_metaclass_details(self): + # Cannot call the metaclass __init__ more than once + with self.assertRaisesRegex(SystemError, "already initialized"): + PyCPointerType.__init__(POINTER(c_byte), 'ptr', (), {}) + def test_pointer_crash(self): class A(POINTER(c_ulong)): diff --git a/Lib/test/test_ctypes/test_simplesubclasses.py b/Lib/test/test_ctypes/test_simplesubclasses.py index c96798e67f23f7..4e4bef3690f66a 100644 --- a/Lib/test/test_ctypes/test_simplesubclasses.py +++ b/Lib/test/test_ctypes/test_simplesubclasses.py @@ -26,6 +26,29 @@ def test_type_flags(self): self.assertTrue(_SimpleCData.__flags__ & Py_TPFLAGS_IMMUTABLETYPE) self.assertFalse(_SimpleCData.__flags__ & Py_TPFLAGS_DISALLOW_INSTANTIATION) + def test_metaclass_details(self): + # Abstract classes (whose metaclass __init__ was not called) can't be + # instantiated directly + NewT = PyCSimpleType.__new__(PyCSimpleType, 'NewT', (_SimpleCData,), {}) + for cls in _SimpleCData, NewT: + with self.subTest(cls=cls): + with self.assertRaisesRegex(TypeError, "abstract class"): + obj = cls() + + # Cannot call the metaclass __init__ more than once + class T(_SimpleCData): + _type_ = "i" + with self.assertRaisesRegex(SystemError, "already initialized"): + PyCSimpleType.__init__(T, 'ptr', (), {}) + + def test_swapped_type_creation(self): + cls = PyCSimpleType.__new__(PyCSimpleType, '', (), {'_type_': 'i'}) + with self.assertRaises(TypeError): + PyCSimpleType.__init__(cls) + PyCSimpleType.__init__(cls, '', (), {'_type_': 'i'}) + self.assertEqual(cls.__ctype_le__.__dict__.get('_type_'), 'i') + self.assertEqual(cls.__ctype_be__.__dict__.get('_type_'), 'i') + def test_compare(self): self.assertEqual(MyInt(3), MyInt(3)) self.assertNotEqual(MyInt(42), MyInt(43)) diff --git a/Lib/test/test_ctypes/test_struct_fields.py b/Lib/test/test_ctypes/test_struct_fields.py index f474a02fa8db06..7adab794809def 100644 --- a/Lib/test/test_ctypes/test_struct_fields.py +++ b/Lib/test/test_ctypes/test_struct_fields.py @@ -69,7 +69,7 @@ def test_cfield_inheritance_hierarchy(self): def test_gh99275(self): class BrokenStructure(Structure): def __init_subclass__(cls, **kwargs): - cls._fields_ = [] # This line will fail, `stgdict` is not ready + cls._fields_ = [] # This line will fail, `stginfo` is not ready with self.assertRaisesRegex(TypeError, 'ctypes state is not initialized'): diff --git a/Lib/test/test_ctypes/test_structures.py b/Lib/test/test_ctypes/test_structures.py index 98bc4bdcac9306..8d83ce4f281b16 100644 --- a/Lib/test/test_ctypes/test_structures.py +++ b/Lib/test/test_ctypes/test_structures.py @@ -85,6 +85,23 @@ def test_type_flags(self): self.assertTrue(Structure.__flags__ & Py_TPFLAGS_IMMUTABLETYPE) self.assertFalse(Structure.__flags__ & Py_TPFLAGS_DISALLOW_INSTANTIATION) + def test_metaclass_details(self): + # Abstract classes (whose metaclass __init__ was not called) can't be + # instantiated directly + NewStructure = PyCStructType.__new__(PyCStructType, 'NewStructure', + (Structure,), {}) + for cls in Structure, NewStructure: + with self.subTest(cls=cls): + with self.assertRaisesRegex(TypeError, "abstract class"): + obj = cls() + + # Cannot call the metaclass __init__ more than once + class T(Structure): + _fields_ = [("x", c_char), + ("y", c_char)] + with self.assertRaisesRegex(SystemError, "already initialized"): + PyCStructType.__init__(T, 'ptr', (), {}) + def test_simple_structs(self): for code, tp in self.formats.items(): class X(Structure): @@ -507,8 +524,8 @@ def _test_issue18060(self, Vector): @unittest.skipUnless(sys.byteorder == 'little', "can't test on this platform") def test_issue18060_a(self): # This test case calls - # PyCStructUnionType_update_stgdict() for each - # _fields_ assignment, and PyCStgDict_clone() + # PyCStructUnionType_update_stginfo() for each + # _fields_ assignment, and PyCStgInfo_clone() # for the Mid and Vector class definitions. class Base(Structure): _fields_ = [('y', c_double), @@ -523,7 +540,7 @@ class Vector(Mid): pass @unittest.skipUnless(sys.byteorder == 'little', "can't test on this platform") def test_issue18060_b(self): # This test case calls - # PyCStructUnionType_update_stgdict() for each + # PyCStructUnionType_update_stginfo() for each # _fields_ assignment. class Base(Structure): _fields_ = [('y', c_double), @@ -538,7 +555,7 @@ class Vector(Mid): @unittest.skipUnless(sys.byteorder == 'little', "can't test on this platform") def test_issue18060_c(self): # This test case calls - # PyCStructUnionType_update_stgdict() for each + # PyCStructUnionType_update_stginfo() for each # _fields_ assignment. class Base(Structure): _fields_ = [('y', c_double)] diff --git a/Lib/test/test_ctypes/test_unions.py b/Lib/test/test_ctypes/test_unions.py index cf5344bdf19165..e2dff0f22a9213 100644 --- a/Lib/test/test_ctypes/test_unions.py +++ b/Lib/test/test_ctypes/test_unions.py @@ -1,5 +1,5 @@ import unittest -from ctypes import Union +from ctypes import Union, c_char from ._support import (_CData, UnionType, Py_TPFLAGS_DISALLOW_INSTANTIATION, Py_TPFLAGS_IMMUTABLETYPE) @@ -16,3 +16,20 @@ def test_type_flags(self): with self.subTest(cls=Union): self.assertTrue(Union.__flags__ & Py_TPFLAGS_IMMUTABLETYPE) self.assertFalse(Union.__flags__ & Py_TPFLAGS_DISALLOW_INSTANTIATION) + + def test_metaclass_details(self): + # Abstract classes (whose metaclass __init__ was not called) can't be + # instantiated directly + NewUnion = UnionType.__new__(UnionType, 'NewUnion', + (Union,), {}) + for cls in Union, NewUnion: + with self.subTest(cls=cls): + with self.assertRaisesRegex(TypeError, "abstract class"): + obj = cls() + + # Cannot call the metaclass __init__ more than once + class T(Union): + _fields_ = [("x", c_char), + ("y", c_char)] + with self.assertRaisesRegex(SystemError, "already initialized"): + UnionType.__init__(T, 'ptr', (), {}) diff --git a/Lib/test/test_dataclasses/__init__.py b/Lib/test/test_dataclasses/__init__.py index ede74b0dd15ccf..832e5672c77d0d 100644 --- a/Lib/test/test_dataclasses/__init__.py +++ b/Lib/test/test_dataclasses/__init__.py @@ -2471,6 +2471,15 @@ def __repr__(self): class TestEq(unittest.TestCase): + def test_recursive_eq(self): + # Test a class with recursive child + @dataclass + class C: + recursive: object = ... + c = C() + c.recursive = c + self.assertEqual(c, c) + def test_no_eq(self): # Test a class with no __eq__ and eq=False. @dataclass(eq=False) @@ -3498,6 +3507,17 @@ class A(Base): self.assertIs(a.__weakref__, a_ref) + def test_dataclass_derived_weakref_slot(self): + class A: + pass + + @dataclass(slots=True, weakref_slot=True) + class B(A): + pass + + B() + + class TestDescriptors(unittest.TestCase): def test_set_name(self): # See bpo-33141. diff --git a/Lib/test/test_descr.py b/Lib/test/test_descr.py index 5404d8d3b99d5d..097ca38e0b1ed8 100644 --- a/Lib/test/test_descr.py +++ b/Lib/test/test_descr.py @@ -5080,7 +5080,8 @@ def test_iter_keys(self): keys = list(it) keys.sort() self.assertEqual(keys, ['__dict__', '__doc__', '__module__', - '__weakref__', 'meth']) + '__static_attributes__', '__weakref__', + 'meth']) @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(), 'trace function introduces __local__') @@ -5089,7 +5090,7 @@ def test_iter_values(self): it = self.C.__dict__.values() self.assertNotIsInstance(it, list) values = list(it) - self.assertEqual(len(values), 5) + self.assertEqual(len(values), 6) @unittest.skipIf(hasattr(sys, 'gettrace') and sys.gettrace(), 'trace function introduces __local__') @@ -5100,7 +5101,8 @@ def test_iter_items(self): keys = [item[0] for item in it] keys.sort() self.assertEqual(keys, ['__dict__', '__doc__', '__module__', - '__weakref__', 'meth']) + '__static_attributes__', '__weakref__', + 'meth']) def test_dict_type_with_metaclass(self): # Testing type of __dict__ when metaclass set... diff --git a/Lib/test/test_dis.py b/Lib/test/test_dis.py index a93cb509b651c5..747a73829fa705 100644 --- a/Lib/test/test_dis.py +++ b/Lib/test/test_dis.py @@ -1201,19 +1201,10 @@ def test_call_specialize(self): @cpython_only @requires_specialization def test_loop_quicken(self): - import _testinternalcapi # Loop can trigger a quicken where the loop is located - self.code_quicken(loop_test, 1) + self.code_quicken(loop_test, 4) got = self.get_disassembly(loop_test, adaptive=True) expected = dis_loop_test_quickened_code - if _testinternalcapi.get_optimizer(): - # We *may* see ENTER_EXECUTOR in the disassembly. This is a - # temporary hack to keep the test working until dis is able to - # handle the instruction correctly (GH-112383): - got = got.replace( - "ENTER_EXECUTOR 16", - "JUMP_BACKWARD 16 (to L1)", - ) self.do_disassembly_compare(got, expected) @cpython_only diff --git a/Lib/test/test_doctest/test_doctest.py b/Lib/test/test_doctest/test_doctest.py index 43be200b983227..3e883c56f6c766 100644 --- a/Lib/test/test_doctest/test_doctest.py +++ b/Lib/test/test_doctest/test_doctest.py @@ -2628,9 +2628,9 @@ def test_testfile(): r""" ... NameError: name 'favorite_color' is not defined ********************************************************************** - 1 items had failures: + 1 item had failures: 1 of 2 in test_doctest.txt - ***Test Failed*** 1 failures. + ***Test Failed*** 1 failure. TestResults(failed=1, attempted=2) >>> doctest.master = None # Reset master. @@ -2657,9 +2657,9 @@ def test_testfile(): r""" Got: 'red' ********************************************************************** - 1 items had failures: + 1 item had failures: 1 of 2 in test_doctest.txt - ***Test Failed*** 1 failures. + ***Test Failed*** 1 failure. TestResults(failed=1, attempted=2) >>> doctest.master = None # Reset master. @@ -2689,10 +2689,10 @@ def test_testfile(): r""" b ok - 1 items passed all tests: + 1 item passed all tests: 2 tests in test_doctest.txt - 2 tests in 1 items. - 2 passed and 0 failed. + 2 tests in 1 item. + 2 passed. Test passed. TestResults(failed=0, attempted=2) >>> doctest.master = None # Reset master. @@ -2749,7 +2749,7 @@ def test_testfile(): r""" ********************************************************************** ... ********************************************************************** - 1 items had failures: + 1 item had failures: 2 of 2 in test_doctest4.txt ***Test Failed*** 2 failures. TestResults(failed=2, attempted=2) @@ -2772,10 +2772,10 @@ def test_testfile(): r""" Expecting: 'b\u0105r' ok - 1 items passed all tests: + 1 item passed all tests: 2 tests in test_doctest4.txt - 2 tests in 1 items. - 2 passed and 0 failed. + 2 tests in 1 item. + 2 passed. Test passed. TestResults(failed=0, attempted=2) >>> doctest.master = None # Reset master. @@ -2997,10 +2997,10 @@ def test_CLI(): r""" Expecting: 'a' ok - 1 items passed all tests: + 1 item passed all tests: 2 tests in myfile.doc - 2 tests in 1 items. - 2 passed and 0 failed. + 2 tests in 1 item. + 2 passed. Test passed. Now we'll write a couple files, one with three tests, the other a python module @@ -3074,7 +3074,7 @@ def test_CLI(): r""" Got: 'ajkml' ********************************************************************** - 1 items had failures: + 1 item had failures: 2 of 3 in myfile.doc ***Test Failed*** 2 failures. @@ -3101,9 +3101,9 @@ def test_CLI(): r""" Got: 'abcdef' ********************************************************************** - 1 items had failures: + 1 item had failures: 1 of 2 in myfile.doc - ***Test Failed*** 1 failures. + ***Test Failed*** 1 failure. The fifth test uses verbose with the two options, so we should get verbose success output for the tests in both files: @@ -3126,10 +3126,10 @@ def test_CLI(): r""" Expecting: 'a...l' ok - 1 items passed all tests: + 1 item passed all tests: 3 tests in myfile.doc - 3 tests in 1 items. - 3 passed and 0 failed. + 3 tests in 1 item. + 3 passed. Test passed. Trying: 1 + 1 @@ -3141,12 +3141,12 @@ def test_CLI(): r""" Expecting: 'abc def' ok - 1 items had no tests: + 1 item had no tests: myfile2 - 1 items passed all tests: + 1 item passed all tests: 2 tests in myfile2.test_func 2 tests in 2 items. - 2 passed and 0 failed. + 2 passed. Test passed. We should also check some typical error cases. diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py index 1a6d8afe6ed6fe..3ba4929dd1b133 100644 --- a/Lib/test/test_functools.py +++ b/Lib/test/test_functools.py @@ -334,8 +334,10 @@ def test_recursive_pickle(self): f.__setstate__((f, (), {}, {})) try: for proto in range(pickle.HIGHEST_PROTOCOL + 1): - with self.assertRaises(RecursionError): - pickle.dumps(f, proto) + # gh-117008: Small limit since pickle uses C stack memory + with support.infinite_recursion(100): + with self.assertRaises(RecursionError): + pickle.dumps(f, proto) finally: f.__setstate__((capture, (), {}, {})) diff --git a/Lib/test/test_gc.py b/Lib/test/test_gc.py index f1a7afac0bcd19..fa8e50fccb2c7b 100644 --- a/Lib/test/test_gc.py +++ b/Lib/test/test_gc.py @@ -384,19 +384,11 @@ def test_collect_generations(self): # each call to collect(N) x = [] gc.collect(0) - # x is now in gen 1 + # x is now in the old gen a, b, c = gc.get_count() - gc.collect(1) - # x is now in gen 2 - d, e, f = gc.get_count() - gc.collect(2) - # x is now in gen 3 - g, h, i = gc.get_count() - # We don't check a, d, g since their exact values depends on + # We don't check a since its exact values depends on # internal implementation details of the interpreter. self.assertEqual((b, c), (1, 0)) - self.assertEqual((e, f), (0, 1)) - self.assertEqual((h, i), (0, 0)) def test_trashcan(self): class Ouch: @@ -831,42 +823,10 @@ def test_get_objects_generations(self): self.assertTrue( any(l is element for element in gc.get_objects(generation=0)) ) - self.assertFalse( - any(l is element for element in gc.get_objects(generation=1)) - ) - self.assertFalse( - any(l is element for element in gc.get_objects(generation=2)) - ) - gc.collect(generation=0) - self.assertFalse( - any(l is element for element in gc.get_objects(generation=0)) - ) - self.assertTrue( - any(l is element for element in gc.get_objects(generation=1)) - ) - self.assertFalse( - any(l is element for element in gc.get_objects(generation=2)) - ) - gc.collect(generation=1) - self.assertFalse( - any(l is element for element in gc.get_objects(generation=0)) - ) - self.assertFalse( - any(l is element for element in gc.get_objects(generation=1)) - ) - self.assertTrue( - any(l is element for element in gc.get_objects(generation=2)) - ) - gc.collect(generation=2) + gc.collect() self.assertFalse( any(l is element for element in gc.get_objects(generation=0)) ) - self.assertFalse( - any(l is element for element in gc.get_objects(generation=1)) - ) - self.assertTrue( - any(l is element for element in gc.get_objects(generation=2)) - ) del l gc.collect() @@ -1077,6 +1037,71 @@ class Z: gc.enable() +class IncrementalGCTests(unittest.TestCase): + + def setUp(self): + # Reenable GC as it is disabled module-wide + gc.enable() + + def tearDown(self): + gc.disable() + + @unittest.skipIf(Py_GIL_DISABLED, "Free threading does not support incremental GC") + # Use small increments to emulate longer running process in a shorter time + @gc_threshold(200, 10) + def test_incremental_gc_handles_fast_cycle_creation(self): + + class LinkedList: + + #Use slots to reduce number of implicit objects + __slots__ = "next", "prev", "surprise" + + def __init__(self, next=None, prev=None): + self.next = next + if next is not None: + next.prev = self + self.prev = prev + if prev is not None: + prev.next = self + + def make_ll(depth): + head = LinkedList() + for i in range(depth): + head = LinkedList(head, head.prev) + return head + + head = make_ll(1000) + count = 1000 + + # There will be some objects we aren't counting, + # e.g. the gc stats dicts. This test checks + # that the counts don't grow, so we try to + # correct for the uncounted objects + # This is just an estimate. + CORRECTION = 20 + + enabled = gc.isenabled() + gc.enable() + olds = [] + for i in range(20_000): + newhead = make_ll(20) + count += 20 + newhead.surprise = head + olds.append(newhead) + if len(olds) == 20: + stats = gc.get_stats() + young = stats[0] + incremental = stats[1] + old = stats[2] + collected = young['collected'] + incremental['collected'] + old['collected'] + count += CORRECTION + live = count - collected + self.assertLess(live, 25000) + del olds[:] + if not enabled: + gc.disable() + + class GCCallbackTests(unittest.TestCase): def setUp(self): # Save gc state and disable it. @@ -1198,6 +1223,7 @@ def test_collect_garbage(self): self.assertEqual(len(gc.garbage), 0) + @requires_subprocess() @unittest.skipIf(BUILD_WITH_NDEBUG, 'built with -NDEBUG') def test_refcount_errors(self): diff --git a/Lib/test/test_glob.py b/Lib/test/test_glob.py index 2de997501039ad..6719bdbb0cc9b1 100644 --- a/Lib/test/test_glob.py +++ b/Lib/test/test_glob.py @@ -41,6 +41,11 @@ def setUp(self): os.symlink(self.norm('broken'), self.norm('sym1')) os.symlink('broken', self.norm('sym2')) os.symlink(os.path.join('a', 'bcd'), self.norm('sym3')) + self.open_dirfd() + + def open_dirfd(self): + if self.dir_fd is not None: + os.close(self.dir_fd) if {os.open, os.stat} <= os.supports_dir_fd and os.scandir in os.supports_fd: self.dir_fd = os.open(self.tempdir, os.O_RDONLY | os.O_DIRECTORY) else: @@ -350,6 +355,10 @@ def test_glob_non_directory(self): def test_glob_named_pipe(self): path = os.path.join(self.tempdir, 'mypipe') os.mkfifo(path) + + # gh-117127: Reopen self.dir_fd to pick up directory changes + self.open_dirfd() + self.assertEqual(self.rglob('mypipe'), [path]) self.assertEqual(self.rglob('mypipe*'), [path]) self.assertEqual(self.rglob('mypipe', ''), []) diff --git a/Lib/test/test_imaplib.py b/Lib/test/test_imaplib.py index b97474acca370f..79bf7dbdbb81a0 100644 --- a/Lib/test/test_imaplib.py +++ b/Lib/test/test_imaplib.py @@ -8,6 +8,7 @@ import time import calendar import threading +import re import socket from test.support import verbose, run_with_tz, run_with_locale, cpython_only, requires_resource @@ -558,9 +559,13 @@ def test_ssl_raises(self): self.assertEqual(ssl_context.check_hostname, True) ssl_context.load_verify_locations(CAFILE) - with self.assertRaisesRegex(ssl.CertificateError, - "IP address mismatch, certificate is not valid for " - "'127.0.0.1'"): + # Allow for flexible libssl error messages. + regex = re.compile(r"""( + IP address mismatch, certificate is not valid for '127.0.0.1' # OpenSSL + | + CERTIFICATE_VERIFY_FAILED # AWS-LC + )""", re.X) + with self.assertRaisesRegex(ssl.CertificateError, regex): _, server = self._setup(SimpleIMAPHandler) client = self.imap_class(*server.server_address, ssl_context=ssl_context) @@ -954,10 +959,13 @@ def test_ssl_verified(self): ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) ssl_context.load_verify_locations(CAFILE) - with self.assertRaisesRegex( - ssl.CertificateError, - "IP address mismatch, certificate is not valid for " - "'127.0.0.1'"): + # Allow for flexible libssl error messages. + regex = re.compile(r"""( + IP address mismatch, certificate is not valid for '127.0.0.1' # OpenSSL + | + CERTIFICATE_VERIFY_FAILED # AWS-LC + )""", re.X) + with self.assertRaisesRegex(ssl.CertificateError, regex): with self.reaped_server(SimpleIMAPHandler) as server: client = self.imap_class(*server.server_address, ssl_context=ssl_context) diff --git a/Lib/test/test_import/__init__.py b/Lib/test/test_import/__init__.py index 7b0126226c4aba..4deed7f3ba2522 100644 --- a/Lib/test/test_import/__init__.py +++ b/Lib/test/test_import/__init__.py @@ -5,7 +5,11 @@ import importlib.util from importlib._bootstrap_external import _get_sourcefile from importlib.machinery import ( - BuiltinImporter, ExtensionFileLoader, FrozenImporter, SourceFileLoader, + AppleFrameworkLoader, + BuiltinImporter, + ExtensionFileLoader, + FrozenImporter, + SourceFileLoader, ) import marshal import os @@ -25,7 +29,7 @@ from test.support import os_helper from test.support import ( - STDLIB_DIR, swap_attr, swap_item, cpython_only, is_emscripten, + STDLIB_DIR, swap_attr, swap_item, cpython_only, is_apple_mobile, is_emscripten, is_wasi, run_in_subinterp, run_in_subinterp_with_config, Py_TRACE_REFS) from test.support.import_helper import ( forget, make_legacy_pyc, unlink, unload, ready_to_import, @@ -66,6 +70,7 @@ def _require_loader(module, loader, skip): MODULE_KINDS = { BuiltinImporter: 'built-in', ExtensionFileLoader: 'extension', + AppleFrameworkLoader: 'framework extension', FrozenImporter: 'frozen', SourceFileLoader: 'pure Python', } @@ -91,7 +96,12 @@ def require_builtin(module, *, skip=False): assert module.__spec__.origin == 'built-in', module.__spec__ def require_extension(module, *, skip=False): - _require_loader(module, ExtensionFileLoader, skip) + # Apple extensions must be distributed as frameworks. This requires + # a specialist loader. + if is_apple_mobile: + _require_loader(module, AppleFrameworkLoader, skip) + else: + _require_loader(module, ExtensionFileLoader, skip) def require_frozen(module, *, skip=True): module = _require_loader(module, FrozenImporter, skip) @@ -134,7 +144,8 @@ def restore__testsinglephase(*, _orig=_testsinglephase): # it to its nominal state. sys.modules.pop('_testsinglephase', None) _orig._clear_globals() - _testinternalcapi.clear_extension('_testsinglephase', _orig.__file__) + origin = _orig.__spec__.origin + _testinternalcapi.clear_extension('_testsinglephase', origin) import _testsinglephase @@ -360,7 +371,7 @@ def test_from_import_missing_attr_has_name_and_so_path(self): self.assertEqual(cm.exception.path, _testcapi.__file__) self.assertRegex( str(cm.exception), - r"cannot import name 'i_dont_exist' from '_testcapi' \(.*\.(so|pyd)\)" + r"cannot import name 'i_dont_exist' from '_testcapi' \(.*\.(so|fwork|pyd)\)" ) else: self.assertEqual( @@ -1689,6 +1700,14 @@ def pipe(self): os.set_blocking(r, False) return (r, w) + def create_extension_loader(self, modname, filename): + # Apple extensions must be distributed as frameworks. This requires + # a specialist loader. + if is_apple_mobile: + return AppleFrameworkLoader(modname, filename) + else: + return ExtensionFileLoader(modname, filename) + def import_script(self, name, fd, filename=None, check_override=None): override_text = '' if check_override is not None: @@ -1697,12 +1716,19 @@ def import_script(self, name, fd, filename=None, check_override=None): _imp._override_multi_interp_extensions_check({check_override}) ''' if filename: + # Apple extensions must be distributed as frameworks. This requires + # a specialist loader. + if is_apple_mobile: + loader = "AppleFrameworkLoader" + else: + loader = "ExtensionFileLoader" + return textwrap.dedent(f''' from importlib.util import spec_from_loader, module_from_spec - from importlib.machinery import ExtensionFileLoader + from importlib.machinery import {loader} import os, sys {override_text} - loader = ExtensionFileLoader({name!r}, {filename!r}) + loader = {loader}({name!r}, {filename!r}) spec = spec_from_loader({name!r}, loader) try: module = module_from_spec(spec) @@ -1883,7 +1909,7 @@ def test_multi_init_extension_compat(self): def test_multi_init_extension_non_isolated_compat(self): modname = '_test_non_isolated' filename = _testmultiphase.__file__ - loader = ExtensionFileLoader(modname, filename) + loader = self.create_extension_loader(modname, filename) spec = importlib.util.spec_from_loader(modname, loader) module = importlib.util.module_from_spec(spec) loader.exec_module(module) @@ -1901,7 +1927,7 @@ def test_multi_init_extension_non_isolated_compat(self): def test_multi_init_extension_per_interpreter_gil_compat(self): modname = '_test_shared_gil_only' filename = _testmultiphase.__file__ - loader = ExtensionFileLoader(modname, filename) + loader = self.create_extension_loader(modname, filename) spec = importlib.util.spec_from_loader(modname, loader) module = importlib.util.module_from_spec(spec) loader.exec_module(module) @@ -2034,10 +2060,25 @@ class SinglephaseInitTests(unittest.TestCase): @classmethod def setUpClass(cls): spec = importlib.util.find_spec(cls.NAME) - from importlib.machinery import ExtensionFileLoader - cls.FILE = spec.origin cls.LOADER = type(spec.loader) - assert cls.LOADER is ExtensionFileLoader + + # Apple extensions must be distributed as frameworks. This requires + # a specialist loader, and we need to differentiate between the + # spec.origin and the original file location. + if is_apple_mobile: + assert cls.LOADER is AppleFrameworkLoader + + cls.ORIGIN = spec.origin + with open(spec.origin + ".origin", "r") as f: + cls.FILE = os.path.join( + os.path.dirname(sys.executable), + f.read().strip() + ) + else: + assert cls.LOADER is ExtensionFileLoader + + cls.ORIGIN = spec.origin + cls.FILE = spec.origin # Start fresh. cls.clean_up() @@ -2053,14 +2094,15 @@ def tearDown(self): @classmethod def clean_up(cls): name = cls.NAME - filename = cls.FILE if name in sys.modules: if hasattr(sys.modules[name], '_clear_globals'): - assert sys.modules[name].__file__ == filename + assert sys.modules[name].__file__ == cls.FILE, \ + f"{sys.modules[name].__file__} != {cls.FILE}" + sys.modules[name]._clear_globals() del sys.modules[name] # Clear all internally cached data for the extension. - _testinternalcapi.clear_extension(name, filename) + _testinternalcapi.clear_extension(name, cls.ORIGIN) ######################### # helpers @@ -2068,7 +2110,7 @@ def clean_up(cls): def add_module_cleanup(self, name): def clean_up(): # Clear all internally cached data for the extension. - _testinternalcapi.clear_extension(name, self.FILE) + _testinternalcapi.clear_extension(name, self.ORIGIN) self.addCleanup(clean_up) def _load_dynamic(self, name, path): @@ -2091,7 +2133,7 @@ def load(self, name): except AttributeError: already_loaded = self.already_loaded = {} assert name not in already_loaded - mod = self._load_dynamic(name, self.FILE) + mod = self._load_dynamic(name, self.ORIGIN) self.assertNotIn(mod, already_loaded.values()) already_loaded[name] = mod return types.SimpleNamespace( @@ -2103,7 +2145,7 @@ def load(self, name): def re_load(self, name, mod): assert sys.modules[name] is mod assert mod.__dict__ == mod.__dict__ - reloaded = self._load_dynamic(name, self.FILE) + reloaded = self._load_dynamic(name, self.ORIGIN) return types.SimpleNamespace( name=name, module=reloaded, @@ -2129,7 +2171,7 @@ def clean_up(): name = {self.NAME!r} if name in sys.modules: sys.modules.pop(name)._clear_globals() - _testinternalcapi.clear_extension(name, {self.FILE!r}) + _testinternalcapi.clear_extension(name, {self.ORIGIN!r}) ''')) _interpreters.destroy(interpid) self.addCleanup(clean_up) @@ -2146,7 +2188,7 @@ def import_in_subinterp(self, interpid=None, *, postcleanup = f''' {import_} mod._clear_globals() - _testinternalcapi.clear_extension(name, {self.FILE!r}) + _testinternalcapi.clear_extension(name, {self.ORIGIN!r}) ''' try: @@ -2184,7 +2226,7 @@ def check_common(self, loaded): # mod.__name__ might not match, but the spec will. self.assertEqual(mod.__spec__.name, loaded.name) self.assertEqual(mod.__file__, self.FILE) - self.assertEqual(mod.__spec__.origin, self.FILE) + self.assertEqual(mod.__spec__.origin, self.ORIGIN) if not isolated: self.assertTrue(issubclass(mod.error, Exception)) self.assertEqual(mod.int_const, 1969) @@ -2578,7 +2620,7 @@ def test_basic_multiple_interpreters_deleted_no_reset(self): # First, load in the main interpreter but then completely clear it. loaded_main = self.load(self.NAME) loaded_main.module._clear_globals() - _testinternalcapi.clear_extension(self.NAME, self.FILE) + _testinternalcapi.clear_extension(self.NAME, self.ORIGIN) # At this point: # * alive in 0 interpreters diff --git a/Lib/test/test_importlib/extension/test_finder.py b/Lib/test/test_importlib/extension/test_finder.py index 3de120958fd27d..cdc8884d668a66 100644 --- a/Lib/test/test_importlib/extension/test_finder.py +++ b/Lib/test/test_importlib/extension/test_finder.py @@ -1,3 +1,4 @@ +from test.support import is_apple_mobile from test.test_importlib import abc, util machinery = util.import_importlib('importlib.machinery') @@ -19,9 +20,27 @@ def setUp(self): ) def find_spec(self, fullname): - importer = self.machinery.FileFinder(util.EXTENSIONS.path, - (self.machinery.ExtensionFileLoader, - self.machinery.EXTENSION_SUFFIXES)) + if is_apple_mobile: + # Apple mobile platforms require a specialist loader that uses + # .fwork files as placeholders for the true `.so` files. + loaders = [ + ( + self.machinery.AppleFrameworkLoader, + [ + ext.replace(".so", ".fwork") + for ext in self.machinery.EXTENSION_SUFFIXES + ] + ) + ] + else: + loaders = [ + ( + self.machinery.ExtensionFileLoader, + self.machinery.EXTENSION_SUFFIXES + ) + ] + + importer = self.machinery.FileFinder(util.EXTENSIONS.path, *loaders) return importer.find_spec(fullname) diff --git a/Lib/test/test_importlib/extension/test_loader.py b/Lib/test/test_importlib/extension/test_loader.py index f4879e75847d8d..7607f0e0857595 100644 --- a/Lib/test/test_importlib/extension/test_loader.py +++ b/Lib/test/test_importlib/extension/test_loader.py @@ -1,3 +1,4 @@ +from test.support import is_apple_mobile from test.test_importlib import abc, util machinery = util.import_importlib('importlib.machinery') @@ -23,8 +24,15 @@ def setUp(self): raise unittest.SkipTest( f"{util.EXTENSIONS.name} is a builtin module" ) - self.loader = self.machinery.ExtensionFileLoader(util.EXTENSIONS.name, - util.EXTENSIONS.file_path) + + # Apple extensions must be distributed as frameworks. This requires + # a specialist loader. + if is_apple_mobile: + self.LoaderClass = self.machinery.AppleFrameworkLoader + else: + self.LoaderClass = self.machinery.ExtensionFileLoader + + self.loader = self.LoaderClass(util.EXTENSIONS.name, util.EXTENSIONS.file_path) def load_module(self, fullname): with warnings.catch_warnings(): @@ -32,13 +40,11 @@ def load_module(self, fullname): return self.loader.load_module(fullname) def test_equality(self): - other = self.machinery.ExtensionFileLoader(util.EXTENSIONS.name, - util.EXTENSIONS.file_path) + other = self.LoaderClass(util.EXTENSIONS.name, util.EXTENSIONS.file_path) self.assertEqual(self.loader, other) def test_inequality(self): - other = self.machinery.ExtensionFileLoader('_' + util.EXTENSIONS.name, - util.EXTENSIONS.file_path) + other = self.LoaderClass('_' + util.EXTENSIONS.name, util.EXTENSIONS.file_path) self.assertNotEqual(self.loader, other) def test_load_module_API(self): @@ -58,8 +64,7 @@ def test_module(self): ('__package__', '')]: self.assertEqual(getattr(module, attr), value) self.assertIn(util.EXTENSIONS.name, sys.modules) - self.assertIsInstance(module.__loader__, - self.machinery.ExtensionFileLoader) + self.assertIsInstance(module.__loader__, self.LoaderClass) # No extension module as __init__ available for testing. test_package = None @@ -86,7 +91,7 @@ def test_is_package(self): self.assertFalse(self.loader.is_package(util.EXTENSIONS.name)) for suffix in self.machinery.EXTENSION_SUFFIXES: path = os.path.join('some', 'path', 'pkg', '__init__' + suffix) - loader = self.machinery.ExtensionFileLoader('pkg', path) + loader = self.LoaderClass('pkg', path) self.assertTrue(loader.is_package('pkg')) @@ -101,6 +106,14 @@ class SinglePhaseExtensionModuleTests(abc.LoaderTests): def setUp(self): if not self.machinery.EXTENSION_SUFFIXES or not util.EXTENSIONS: raise unittest.SkipTest("Requires dynamic loading support.") + + # Apple extensions must be distributed as frameworks. This requires + # a specialist loader. + if is_apple_mobile: + self.LoaderClass = self.machinery.AppleFrameworkLoader + else: + self.LoaderClass = self.machinery.ExtensionFileLoader + self.name = '_testsinglephase' if self.name in sys.builtin_module_names: raise unittest.SkipTest( @@ -109,8 +122,8 @@ def setUp(self): finder = self.machinery.FileFinder(None) self.spec = importlib.util.find_spec(self.name) assert self.spec - self.loader = self.machinery.ExtensionFileLoader( - self.name, self.spec.origin) + + self.loader = self.LoaderClass(self.name, self.spec.origin) def load_module(self): with warnings.catch_warnings(): @@ -120,7 +133,7 @@ def load_module(self): def load_module_by_name(self, fullname): # Load a module from the test extension by name. origin = self.spec.origin - loader = self.machinery.ExtensionFileLoader(fullname, origin) + loader = self.LoaderClass(fullname, origin) spec = importlib.util.spec_from_loader(fullname, loader) module = importlib.util.module_from_spec(spec) loader.exec_module(module) @@ -137,8 +150,7 @@ def test_module(self): with self.assertRaises(AttributeError): module.__path__ self.assertIs(module, sys.modules[self.name]) - self.assertIsInstance(module.__loader__, - self.machinery.ExtensionFileLoader) + self.assertIsInstance(module.__loader__, self.LoaderClass) # No extension module as __init__ available for testing. test_package = None @@ -182,6 +194,14 @@ class MultiPhaseExtensionModuleTests(abc.LoaderTests): def setUp(self): if not self.machinery.EXTENSION_SUFFIXES or not util.EXTENSIONS: raise unittest.SkipTest("Requires dynamic loading support.") + + # Apple extensions must be distributed as frameworks. This requires + # a specialist loader. + if is_apple_mobile: + self.LoaderClass = self.machinery.AppleFrameworkLoader + else: + self.LoaderClass = self.machinery.ExtensionFileLoader + self.name = '_testmultiphase' if self.name in sys.builtin_module_names: raise unittest.SkipTest( @@ -190,8 +210,7 @@ def setUp(self): finder = self.machinery.FileFinder(None) self.spec = importlib.util.find_spec(self.name) assert self.spec - self.loader = self.machinery.ExtensionFileLoader( - self.name, self.spec.origin) + self.loader = self.LoaderClass(self.name, self.spec.origin) def load_module(self): # Load the module from the test extension. @@ -202,7 +221,7 @@ def load_module(self): def load_module_by_name(self, fullname): # Load a module from the test extension by name. origin = self.spec.origin - loader = self.machinery.ExtensionFileLoader(fullname, origin) + loader = self.LoaderClass(fullname, origin) spec = importlib.util.spec_from_loader(fullname, loader) module = importlib.util.module_from_spec(spec) loader.exec_module(module) @@ -228,8 +247,7 @@ def test_module(self): with self.assertRaises(AttributeError): module.__path__ self.assertIs(module, sys.modules[self.name]) - self.assertIsInstance(module.__loader__, - self.machinery.ExtensionFileLoader) + self.assertIsInstance(module.__loader__, self.LoaderClass) def test_functionality(self): # Test basic functionality of stuff defined in an extension module. diff --git a/Lib/test/test_importlib/data/__init__.py b/Lib/test/test_importlib/metadata/__init__.py similarity index 100% rename from Lib/test/test_importlib/data/__init__.py rename to Lib/test/test_importlib/metadata/__init__.py diff --git a/Lib/test/test_importlib/_context.py b/Lib/test/test_importlib/metadata/_context.py similarity index 100% rename from Lib/test/test_importlib/_context.py rename to Lib/test/test_importlib/metadata/_context.py diff --git a/Lib/test/test_importlib/_path.py b/Lib/test/test_importlib/metadata/_path.py similarity index 88% rename from Lib/test/test_importlib/_path.py rename to Lib/test/test_importlib/metadata/_path.py index 25c799fa44cd55..b3cfb9cd549d6c 100644 --- a/Lib/test/test_importlib/_path.py +++ b/Lib/test/test_importlib/metadata/_path.py @@ -17,20 +17,15 @@ class Symlink(str): @runtime_checkable class TreeMaker(Protocol): - def __truediv__(self, *args, **kwargs): - ... # pragma: no cover + def __truediv__(self, *args, **kwargs): ... # pragma: no cover - def mkdir(self, **kwargs): - ... # pragma: no cover + def mkdir(self, **kwargs): ... # pragma: no cover - def write_text(self, content, **kwargs): - ... # pragma: no cover + def write_text(self, content, **kwargs): ... # pragma: no cover - def write_bytes(self, content): - ... # pragma: no cover + def write_bytes(self, content): ... # pragma: no cover - def symlink_to(self, target): - ... # pragma: no cover + def symlink_to(self, target): ... # pragma: no cover def _ensure_tree_maker(obj: Union[str, TreeMaker]) -> TreeMaker: diff --git a/Lib/test/test_importlib/metadata/data/__init__.py b/Lib/test/test_importlib/metadata/data/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/Lib/test/test_importlib/data/example-21.12-py3-none-any.whl b/Lib/test/test_importlib/metadata/data/example-21.12-py3-none-any.whl similarity index 100% rename from Lib/test/test_importlib/data/example-21.12-py3-none-any.whl rename to Lib/test/test_importlib/metadata/data/example-21.12-py3-none-any.whl diff --git a/Lib/test/test_importlib/data/example-21.12-py3.6.egg b/Lib/test/test_importlib/metadata/data/example-21.12-py3.6.egg similarity index 100% rename from Lib/test/test_importlib/data/example-21.12-py3.6.egg rename to Lib/test/test_importlib/metadata/data/example-21.12-py3.6.egg diff --git a/Lib/test/test_importlib/data/example2-1.0.0-py3-none-any.whl b/Lib/test/test_importlib/metadata/data/example2-1.0.0-py3-none-any.whl similarity index 100% rename from Lib/test/test_importlib/data/example2-1.0.0-py3-none-any.whl rename to Lib/test/test_importlib/metadata/data/example2-1.0.0-py3-none-any.whl diff --git a/Lib/test/test_importlib/metadata/data/sources/example/example/__init__.py b/Lib/test/test_importlib/metadata/data/sources/example/example/__init__.py new file mode 100644 index 00000000000000..ba73b743394169 --- /dev/null +++ b/Lib/test/test_importlib/metadata/data/sources/example/example/__init__.py @@ -0,0 +1,2 @@ +def main(): + return 'example' diff --git a/Lib/test/test_importlib/metadata/data/sources/example/setup.py b/Lib/test/test_importlib/metadata/data/sources/example/setup.py new file mode 100644 index 00000000000000..479488a0348186 --- /dev/null +++ b/Lib/test/test_importlib/metadata/data/sources/example/setup.py @@ -0,0 +1,11 @@ +from setuptools import setup + +setup( + name='example', + version='21.12', + license='Apache Software License', + packages=['example'], + entry_points={ + 'console_scripts': ['example = example:main', 'Example=example:main'], + }, +) diff --git a/Lib/test/test_importlib/metadata/data/sources/example2/example2/__init__.py b/Lib/test/test_importlib/metadata/data/sources/example2/example2/__init__.py new file mode 100644 index 00000000000000..de645c2e8bc75b --- /dev/null +++ b/Lib/test/test_importlib/metadata/data/sources/example2/example2/__init__.py @@ -0,0 +1,2 @@ +def main(): + return "example" diff --git a/Lib/test/test_importlib/metadata/data/sources/example2/pyproject.toml b/Lib/test/test_importlib/metadata/data/sources/example2/pyproject.toml new file mode 100644 index 00000000000000..011f4751fb9e32 --- /dev/null +++ b/Lib/test/test_importlib/metadata/data/sources/example2/pyproject.toml @@ -0,0 +1,10 @@ +[build-system] +build-backend = 'trampolim' +requires = ['trampolim'] + +[project] +name = 'example2' +version = '1.0.0' + +[project.scripts] +example = 'example2:main' diff --git a/Lib/test/test_importlib/fixtures.py b/Lib/test/test_importlib/metadata/fixtures.py similarity index 94% rename from Lib/test/test_importlib/fixtures.py rename to Lib/test/test_importlib/metadata/fixtures.py index 8c973356b5660d..7ff94c9afe88e1 100644 --- a/Lib/test/test_importlib/fixtures.py +++ b/Lib/test/test_importlib/metadata/fixtures.py @@ -9,7 +9,8 @@ import functools import contextlib -from test.support.os_helper import FS_NONASCII +from test.support import import_helper +from test.support import os_helper from test.support import requires_zlib from . import _path @@ -85,6 +86,7 @@ def add_sys_path(dir): def setUp(self): super().setUp() self.fixtures.enter_context(self.add_sys_path(self.site_dir)) + self.fixtures.enter_context(import_helper.isolated_modules()) class SiteBuilder(SiteDir): @@ -141,15 +143,13 @@ class DistInfoPkgEditable(DistInfoPkg): some_hash = '524127ce937f7cb65665130c695abd18ca386f60bb29687efb976faa1596fdcc' files: FilesSpec = { 'distinfo_pkg-1.0.0.dist-info': { - 'direct_url.json': json.dumps( - { - "archive_info": { - "hash": f"sha256={some_hash}", - "hashes": {"sha256": f"{some_hash}"}, - }, - "url": "file:///path/to/distinfo_pkg-1.0.0.editable-py3-none-any.whl", - } - ) + 'direct_url.json': json.dumps({ + "archive_info": { + "hash": f"sha256={some_hash}", + "hashes": {"sha256": f"{some_hash}"}, + }, + "url": "file:///path/to/distinfo_pkg-1.0.0.editable-py3-none-any.whl", + }) }, } @@ -338,7 +338,9 @@ def record_names(file_defs): class FileBuilder: def unicode_filename(self): - return FS_NONASCII or self.skip("File system does not support non-ascii.") + return os_helper.FS_NONASCII or self.skip( + "File system does not support non-ascii." + ) def DALS(str): @@ -348,7 +350,7 @@ def DALS(str): @requires_zlib() class ZipFixtures: - root = 'test.test_importlib.data' + root = 'test.test_importlib.metadata.data' def _fixture_on_path(self, filename): pkg_file = resources.files(self.root).joinpath(filename) diff --git a/Lib/test/test_importlib/stubs.py b/Lib/test/test_importlib/metadata/stubs.py similarity index 100% rename from Lib/test/test_importlib/stubs.py rename to Lib/test/test_importlib/metadata/stubs.py diff --git a/Lib/test/test_importlib/test_metadata_api.py b/Lib/test/test_importlib/metadata/test_api.py similarity index 100% rename from Lib/test/test_importlib/test_metadata_api.py rename to Lib/test/test_importlib/metadata/test_api.py diff --git a/Lib/test/test_importlib/test_main.py b/Lib/test/test_importlib/metadata/test_main.py similarity index 96% rename from Lib/test/test_importlib/test_main.py rename to Lib/test/test_importlib/metadata/test_main.py index 0a769b89841234..c4accaeb9ba9ed 100644 --- a/Lib/test/test_importlib/test_main.py +++ b/Lib/test/test_importlib/metadata/test_main.py @@ -2,6 +2,7 @@ import pickle import unittest import warnings +import importlib import importlib.metadata import contextlib from test.support import os_helper @@ -308,12 +309,10 @@ def test_sortable(self): """ EntryPoint objects are sortable, but result is undefined. """ - sorted( - [ - EntryPoint(name='b', value='val', group='group'), - EntryPoint(name='a', value='val', group='group'), - ] - ) + sorted([ + EntryPoint(name='b', value='val', group='group'), + EntryPoint(name='a', value='val', group='group'), + ]) class FileSystem( @@ -380,18 +379,16 @@ def test_packages_distributions_all_module_types(self): 'all_distributions-1.0.0.dist-info': metadata, } for i, suffix in enumerate(suffixes): - files.update( - { - f'importable-name {i}{suffix}': '', - f'in_namespace_{i}': { - f'mod{suffix}': '', - }, - f'in_package_{i}': { - '__init__.py': '', - f'mod{suffix}': '', - }, - } - ) + files.update({ + f'importable-name {i}{suffix}': '', + f'in_namespace_{i}': { + f'mod{suffix}': '', + }, + f'in_package_{i}': { + '__init__.py': '', + f'mod{suffix}': '', + }, + }) metadata.update(RECORD=fixtures.build_record(files)) fixtures.build_files(files, prefix=self.site_dir) diff --git a/Lib/test/test_importlib/test_zip.py b/Lib/test/test_importlib/metadata/test_zip.py similarity index 100% rename from Lib/test/test_importlib/test_zip.py rename to Lib/test/test_importlib/metadata/test_zip.py diff --git a/Lib/test/test_importlib/test_lazy.py b/Lib/test/test_importlib/test_lazy.py index 38ab21907b58d9..4d2cc4eb62b67c 100644 --- a/Lib/test/test_importlib/test_lazy.py +++ b/Lib/test/test_importlib/test_lazy.py @@ -178,6 +178,24 @@ def access_module(): # Or multiple load attempts self.assertEqual(loader.load_count, 1) + def test_lazy_self_referential_modules(self): + # Directory modules with submodules that reference the parent can attempt to access + # the parent module during a load. Verify that this common pattern works with lazy loading. + # json is a good example in the stdlib. + json_modules = [name for name in sys.modules if name.startswith('json')] + with test_util.uncache(*json_modules): + # Standard lazy loading, unwrapped + spec = util.find_spec('json') + loader = util.LazyLoader(spec.loader) + spec.loader = loader + module = util.module_from_spec(spec) + sys.modules['json'] = module + loader.exec_module(module) + + # Trigger load with attribute lookup, ensure expected behavior + test_load = module.loads('{}') + self.assertEqual(test_load, {}) + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_importlib/test_util.py b/Lib/test/test_importlib/test_util.py index a09286806e5152..a6a76e589761e0 100644 --- a/Lib/test/test_importlib/test_util.py +++ b/Lib/test/test_importlib/test_util.py @@ -707,13 +707,20 @@ def test_single_phase_init_module(self): @unittest.skipIf(_testmultiphase is None, "test requires _testmultiphase module") def test_incomplete_multi_phase_init_module(self): + # Apple extensions must be distributed as frameworks. This requires + # a specialist loader. + if support.is_apple_mobile: + loader = "AppleFrameworkLoader" + else: + loader = "ExtensionFileLoader" + prescript = textwrap.dedent(f''' from importlib.util import spec_from_loader, module_from_spec - from importlib.machinery import ExtensionFileLoader + from importlib.machinery import {loader} name = '_test_shared_gil_only' filename = {_testmultiphase.__file__!r} - loader = ExtensionFileLoader(name, filename) + loader = {loader}(name, filename) spec = spec_from_loader(name, loader) ''') diff --git a/Lib/test/test_importlib/util.py b/Lib/test/test_importlib/util.py index a900cc1dddf425..89272484009c56 100644 --- a/Lib/test/test_importlib/util.py +++ b/Lib/test/test_importlib/util.py @@ -8,6 +8,7 @@ import os.path from test import support from test.support import import_helper +from test.support import is_apple_mobile from test.support import os_helper import unittest import sys @@ -43,6 +44,11 @@ def _extension_details(): global EXTENSIONS for path in sys.path: for ext in machinery.EXTENSION_SUFFIXES: + # Apple mobile platforms mechanically load .so files, + # but the findable files are labelled .fwork + if is_apple_mobile: + ext = ext.replace(".so", ".fwork") + filename = EXTENSIONS.name + ext file_path = os.path.join(path, filename) if os.path.exists(file_path): diff --git a/Lib/test/test_inspect/inspect_fodder2.py b/Lib/test/test_inspect/inspect_fodder2.py index 8639cf2e72cd7a..bb9d3e88cfbee1 100644 --- a/Lib/test/test_inspect/inspect_fodder2.py +++ b/Lib/test/test_inspect/inspect_fodder2.py @@ -310,3 +310,8 @@ def f(): class cls310: def g(): pass + +# line 314 +class ClassWithCodeObject: + import sys + code = sys._getframe(0).f_code diff --git a/Lib/test/test_inspect/test_inspect.py b/Lib/test/test_inspect/test_inspect.py index 21d9f96c8c460e..dc46c0bc8ed353 100644 --- a/Lib/test/test_inspect/test_inspect.py +++ b/Lib/test/test_inspect/test_inspect.py @@ -983,6 +983,9 @@ def test_findsource_with_out_of_bounds_lineno(self): def test_getsource_on_method(self): self.assertSourceEqual(mod2.ClassWithMethod.method, 118, 119) + def test_getsource_on_class_code_object(self): + self.assertSourceEqual(mod2.ClassWithCodeObject.code, 315, 317) + def test_nested_func(self): self.assertSourceEqual(mod2.cls135.func136, 136, 139) diff --git a/Lib/test/test_io.py b/Lib/test/test_io.py index 5491c0575dbd3f..4ea1ef15c0661d 100644 --- a/Lib/test/test_io.py +++ b/Lib/test/test_io.py @@ -1160,7 +1160,7 @@ class APIMismatchTest(unittest.TestCase): def test_RawIOBase_io_in_pyio_match(self): """Test that pyio RawIOBase class has all c RawIOBase methods""" mismatch = support.detect_api_mismatch(pyio.RawIOBase, io.RawIOBase, - ignore=('__weakref__',)) + ignore=('__weakref__', '__static_attributes__')) self.assertEqual(mismatch, set(), msg='Python RawIOBase does not have all C RawIOBase methods') def test_RawIOBase_pyio_in_io_match(self): diff --git a/Lib/test/test_ipaddress.py b/Lib/test/test_ipaddress.py index b4952acc2b61b1..f1519df673747a 100644 --- a/Lib/test/test_ipaddress.py +++ b/Lib/test/test_ipaddress.py @@ -2288,6 +2288,10 @@ def testReservedIpv4(self): self.assertEqual(True, ipaddress.ip_address( '172.31.255.255').is_private) self.assertEqual(False, ipaddress.ip_address('172.32.0.0').is_private) + self.assertFalse(ipaddress.ip_address('192.0.0.0').is_global) + self.assertTrue(ipaddress.ip_address('192.0.0.9').is_global) + self.assertTrue(ipaddress.ip_address('192.0.0.10').is_global) + self.assertFalse(ipaddress.ip_address('192.0.0.255').is_global) self.assertEqual(True, ipaddress.ip_address('169.254.100.200').is_link_local) @@ -2313,6 +2317,7 @@ def testPrivateNetworks(self): self.assertEqual(True, ipaddress.ip_network("169.254.0.0/16").is_private) self.assertEqual(True, ipaddress.ip_network("172.16.0.0/12").is_private) self.assertEqual(True, ipaddress.ip_network("192.0.0.0/29").is_private) + self.assertEqual(False, ipaddress.ip_network("192.0.0.9/32").is_private) self.assertEqual(True, ipaddress.ip_network("192.0.0.170/31").is_private) self.assertEqual(True, ipaddress.ip_network("192.0.2.0/24").is_private) self.assertEqual(True, ipaddress.ip_network("192.168.0.0/16").is_private) @@ -2329,8 +2334,8 @@ def testPrivateNetworks(self): self.assertEqual(True, ipaddress.ip_network("::/128").is_private) self.assertEqual(True, ipaddress.ip_network("::ffff:0:0/96").is_private) self.assertEqual(True, ipaddress.ip_network("100::/64").is_private) - self.assertEqual(True, ipaddress.ip_network("2001::/23").is_private) self.assertEqual(True, ipaddress.ip_network("2001:2::/48").is_private) + self.assertEqual(False, ipaddress.ip_network("2001:3::/48").is_private) self.assertEqual(True, ipaddress.ip_network("2001:db8::/32").is_private) self.assertEqual(True, ipaddress.ip_network("2001:10::/28").is_private) self.assertEqual(True, ipaddress.ip_network("fc00::/7").is_private) @@ -2409,6 +2414,20 @@ def testReservedIpv6(self): self.assertEqual(True, ipaddress.ip_address('0::0').is_unspecified) self.assertEqual(False, ipaddress.ip_address('::1').is_unspecified) + self.assertFalse(ipaddress.ip_address('64:ff9b:1::').is_global) + self.assertFalse(ipaddress.ip_address('2001::').is_global) + self.assertTrue(ipaddress.ip_address('2001:1::1').is_global) + self.assertTrue(ipaddress.ip_address('2001:1::2').is_global) + self.assertFalse(ipaddress.ip_address('2001:2::').is_global) + self.assertTrue(ipaddress.ip_address('2001:3::').is_global) + self.assertFalse(ipaddress.ip_address('2001:4::').is_global) + self.assertTrue(ipaddress.ip_address('2001:4:112::').is_global) + self.assertFalse(ipaddress.ip_address('2001:10::').is_global) + self.assertTrue(ipaddress.ip_address('2001:20::').is_global) + self.assertTrue(ipaddress.ip_address('2001:30::').is_global) + self.assertFalse(ipaddress.ip_address('2001:40::').is_global) + self.assertFalse(ipaddress.ip_address('2002::').is_global) + # some generic IETF reserved addresses self.assertEqual(True, ipaddress.ip_address('100::').is_reserved) self.assertEqual(True, ipaddress.ip_network('4000::1/128').is_reserved) diff --git a/Lib/test/test_metaclass.py b/Lib/test/test_metaclass.py index 36e8ab4cda3dad..70f9c5d9400bf6 100644 --- a/Lib/test/test_metaclass.py +++ b/Lib/test/test_metaclass.py @@ -167,6 +167,7 @@ d['foo'] = 4 d['foo'] = 42 d['bar'] = 123 + d['__static_attributes__'] = () >>> Use a metaclass that doesn't derive from type. @@ -182,12 +183,12 @@ ... b = 24 ... meta: C () - ns: [('__module__', 'test.test_metaclass'), ('__qualname__', 'C'), ('a', 42), ('b', 24)] + ns: [('__module__', 'test.test_metaclass'), ('__qualname__', 'C'), ('__static_attributes__', ()), ('a', 42), ('b', 24)] kw: [] >>> type(C) is dict True >>> print(sorted(C.items())) - [('__module__', 'test.test_metaclass'), ('__qualname__', 'C'), ('a', 42), ('b', 24)] + [('__module__', 'test.test_metaclass'), ('__qualname__', 'C'), ('__static_attributes__', ()), ('a', 42), ('b', 24)] >>> And again, with a __prepare__ attribute. @@ -208,8 +209,9 @@ d['a'] = 1 d['a'] = 2 d['b'] = 3 + d['__static_attributes__'] = () meta: C () - ns: [('__module__', 'test.test_metaclass'), ('__qualname__', 'C'), ('a', 2), ('b', 3)] + ns: [('__module__', 'test.test_metaclass'), ('__qualname__', 'C'), ('__static_attributes__', ()), ('a', 2), ('b', 3)] kw: [('other', 'booh')] >>> diff --git a/Lib/test/test_mimetypes.py b/Lib/test/test_mimetypes.py index 01bba0ac2eed5a..30e1c56bf0bc52 100644 --- a/Lib/test/test_mimetypes.py +++ b/Lib/test/test_mimetypes.py @@ -1,5 +1,6 @@ import io import mimetypes +import os import pathlib import sys import unittest.mock @@ -109,15 +110,40 @@ def test_filename_with_url_delimiters(self): # compared to when interpreted as filename because of the semicolon. eq = self.assertEqual gzip_expected = ('application/x-tar', 'gzip') - eq(self.db.guess_type(";1.tar.gz"), gzip_expected) - eq(self.db.guess_type("?1.tar.gz"), gzip_expected) - eq(self.db.guess_type("#1.tar.gz"), gzip_expected) - eq(self.db.guess_type("#1#.tar.gz"), gzip_expected) - eq(self.db.guess_type(";1#.tar.gz"), gzip_expected) - eq(self.db.guess_type(";&1=123;?.tar.gz"), gzip_expected) - eq(self.db.guess_type("?k1=v1&k2=v2.tar.gz"), gzip_expected) + for name in ( + ';1.tar.gz', + '?1.tar.gz', + '#1.tar.gz', + '#1#.tar.gz', + ';1#.tar.gz', + ';&1=123;?.tar.gz', + '?k1=v1&k2=v2.tar.gz', + ): + for prefix in ('', '/', '\\', + 'c:', 'c:/', 'c:\\', 'c:/d/', 'c:\\d\\', + '//share/server/', '\\\\share\\server\\'): + path = prefix + name + with self.subTest(path=path): + eq(self.db.guess_type(path), gzip_expected) + expected = (None, None) if os.name == 'nt' else gzip_expected + for prefix in ('//', '\\\\', '//share/', '\\\\share\\'): + path = prefix + name + with self.subTest(path=path): + eq(self.db.guess_type(path), expected) eq(self.db.guess_type(r" \"\`;b&b&c |.tar.gz"), gzip_expected) + def test_url(self): + result = self.db.guess_type('http://host.html') + msg = 'URL only has a host name, not a file' + self.assertSequenceEqual(result, (None, None), msg) + result = self.db.guess_type('http://example.com/host.html') + msg = 'Should be text/html' + self.assertSequenceEqual(result, ('text/html', None), msg) + result = self.db.guess_type('http://example.com/host.html#x.tar') + self.assertSequenceEqual(result, ('text/html', None)) + result = self.db.guess_type('http://example.com/host.html?q=x.tar') + self.assertSequenceEqual(result, ('text/html', None)) + def test_guess_all_types(self): # First try strict. Use a set here for testing the results because if # test_urllib2 is run before test_mimetypes, global state is modified @@ -187,6 +213,7 @@ def check_extensions(): self.assertEqual(mimetypes.guess_extension('message/rfc822'), '.eml') self.assertEqual(mimetypes.guess_extension('text/html'), '.html') self.assertEqual(mimetypes.guess_extension('text/plain'), '.txt') + self.assertEqual(mimetypes.guess_extension('text/rtf'), '.rtf') self.assertEqual(mimetypes.guess_extension('video/mpeg'), '.mpeg') self.assertEqual(mimetypes.guess_extension('video/quicktime'), '.mov') diff --git a/Lib/test/test_operator.py b/Lib/test/test_operator.py index 1db738d228b1b9..0d34d671563d19 100644 --- a/Lib/test/test_operator.py +++ b/Lib/test/test_operator.py @@ -1,6 +1,8 @@ import unittest import pickle import sys +from decimal import Decimal +from fractions import Fraction from test import support from test.support import import_helper @@ -508,6 +510,44 @@ def __getitem__(self, other): return 5 # so that C is a sequence self.assertEqual(operator.ixor (c, 5), "ixor") self.assertEqual(operator.iconcat (c, c), "iadd") + def test_iconcat_without_getitem(self): + operator = self.module + + msg = "'int' object can't be concatenated" + with self.assertRaisesRegex(TypeError, msg): + operator.iconcat(1, 0.5) + + def test_index(self): + operator = self.module + class X: + def __index__(self): + return 1 + + self.assertEqual(operator.index(X()), 1) + self.assertEqual(operator.index(0), 0) + self.assertEqual(operator.index(1), 1) + self.assertEqual(operator.index(2), 2) + with self.assertRaises((AttributeError, TypeError)): + operator.index(1.5) + with self.assertRaises((AttributeError, TypeError)): + operator.index(Fraction(3, 7)) + with self.assertRaises((AttributeError, TypeError)): + operator.index(Decimal(1)) + with self.assertRaises((AttributeError, TypeError)): + operator.index(None) + + def test_not_(self): + operator = self.module + class C: + def __bool__(self): + raise SyntaxError + self.assertRaises(TypeError, operator.not_) + self.assertRaises(SyntaxError, operator.not_, C()) + self.assertFalse(operator.not_(5)) + self.assertFalse(operator.not_([0])) + self.assertTrue(operator.not_(0)) + self.assertTrue(operator.not_([])) + def test_length_hint(self): operator = self.module class X(object): @@ -533,6 +573,13 @@ def __length_hint__(self): with self.assertRaises(LookupError): operator.length_hint(X(LookupError)) + class Y: pass + + msg = "'str' object cannot be interpreted as an integer" + with self.assertRaisesRegex(TypeError, msg): + operator.length_hint(X(2), "abc") + self.assertEqual(operator.length_hint(Y(), 10), 10) + def test_call(self): operator = self.module diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py index 4c157842d95523..00b415f43c49b8 100644 --- a/Lib/test/test_os.py +++ b/Lib/test/test_os.py @@ -13,6 +13,7 @@ import locale import os import pickle +import platform import select import selectors import shutil @@ -4085,9 +4086,15 @@ def test_eventfd_select(self): @unittest.skipUnless(hasattr(os, 'timerfd_create'), 'requires os.timerfd_create') @support.requires_linux_version(2, 6, 30) class TimerfdTests(unittest.TestCase): - # Tolerate a difference of 1 ms - CLOCK_RES_NS = 1_000_000 - CLOCK_RES = CLOCK_RES_NS * 1e-9 + # 1 ms accuracy is reliably achievable on every platform except Android + # emulators, where we allow 10 ms (gh-108277). + if sys.platform == "android" and platform.android_ver().is_emulator: + CLOCK_RES_PLACES = 2 + else: + CLOCK_RES_PLACES = 3 + + CLOCK_RES = 10 ** -CLOCK_RES_PLACES + CLOCK_RES_NS = 10 ** (9 - CLOCK_RES_PLACES) def timerfd_create(self, *args, **kwargs): fd = os.timerfd_create(*args, **kwargs) @@ -4109,18 +4116,18 @@ def test_timerfd_initval(self): # 1st call next_expiration, interval2 = os.timerfd_settime(fd, initial=initial_expiration, interval=interval) - self.assertAlmostEqual(interval2, 0.0, places=3) - self.assertAlmostEqual(next_expiration, 0.0, places=3) + self.assertAlmostEqual(interval2, 0.0, places=self.CLOCK_RES_PLACES) + self.assertAlmostEqual(next_expiration, 0.0, places=self.CLOCK_RES_PLACES) # 2nd call next_expiration, interval2 = os.timerfd_settime(fd, initial=initial_expiration, interval=interval) - self.assertAlmostEqual(interval2, interval, places=3) - self.assertAlmostEqual(next_expiration, initial_expiration, places=3) + self.assertAlmostEqual(interval2, interval, places=self.CLOCK_RES_PLACES) + self.assertAlmostEqual(next_expiration, initial_expiration, places=self.CLOCK_RES_PLACES) # timerfd_gettime next_expiration, interval2 = os.timerfd_gettime(fd) - self.assertAlmostEqual(interval2, interval, places=3) - self.assertAlmostEqual(next_expiration, initial_expiration, places=3) + self.assertAlmostEqual(interval2, interval, places=self.CLOCK_RES_PLACES) + self.assertAlmostEqual(next_expiration, initial_expiration, places=self.CLOCK_RES_PLACES) def test_timerfd_non_blocking(self): fd = self.timerfd_create(time.CLOCK_REALTIME, flags=os.TFD_NONBLOCK) @@ -4174,8 +4181,8 @@ def test_timerfd_interval(self): # timerfd_gettime next_expiration, interval2 = os.timerfd_gettime(fd) - self.assertAlmostEqual(interval2, interval, places=3) - self.assertAlmostEqual(next_expiration, initial_expiration, places=3) + self.assertAlmostEqual(interval2, interval, places=self.CLOCK_RES_PLACES) + self.assertAlmostEqual(next_expiration, initial_expiration, places=self.CLOCK_RES_PLACES) count = 3 t = time.perf_counter() @@ -4206,8 +4213,8 @@ def test_timerfd_TFD_TIMER_ABSTIME(self): # timerfd_gettime # Note: timerfd_gettime returns relative values even if TFD_TIMER_ABSTIME is specified. next_expiration, interval2 = os.timerfd_gettime(fd) - self.assertAlmostEqual(interval2, interval, places=3) - self.assertAlmostEqual(next_expiration, offset, places=3) + self.assertAlmostEqual(interval2, interval, places=self.CLOCK_RES_PLACES) + self.assertAlmostEqual(next_expiration, offset, places=self.CLOCK_RES_PLACES) t = time.perf_counter() count_signaled = self.read_count_signaled(fd) @@ -5357,20 +5364,21 @@ def test_fork_warns_when_non_python_thread_exists(self): self.assertEqual(err.decode("utf-8"), "") self.assertEqual(out.decode("utf-8"), "") - def test_fork_at_exit(self): + def test_fork_at_finalization(self): code = """if 1: import atexit import os - def exit_handler(): - pid = os.fork() - if pid != 0: - print("shouldn't be printed") - - atexit.register(exit_handler) + class AtFinalization: + def __del__(self): + print("OK") + pid = os.fork() + if pid != 0: + print("shouldn't be printed") + at_finalization = AtFinalization() """ _, out, err = assert_python_ok("-c", code) - self.assertEqual(b"", out) + self.assertEqual(b"OK\n", out) self.assertIn(b"can't fork at interpreter shutdown", err) diff --git a/Lib/test/test_pathlib/test_pathlib.py b/Lib/test/test_pathlib/test_pathlib.py index 6509c08d227346..3a6f73c4fe82a4 100644 --- a/Lib/test/test_pathlib/test_pathlib.py +++ b/Lib/test/test_pathlib/test_pathlib.py @@ -1103,15 +1103,15 @@ def test_is_socket_true(self): self.assertIs(self.cls(self.base, 'mysock\x00').is_socket(), False) def test_is_char_device_true(self): - # Under Unix, /dev/null should generally be a char device. - P = self.cls('/dev/null') + # os.devnull should generally be a char device. + P = self.cls(os.devnull) if not P.exists(): - self.skipTest("/dev/null required") + self.skipTest("null device required") self.assertTrue(P.is_char_device()) self.assertFalse(P.is_block_device()) self.assertFalse(P.is_file()) - self.assertIs(self.cls('/dev/null\udfff').is_char_device(), False) - self.assertIs(self.cls('/dev/null\x00').is_char_device(), False) + self.assertIs(self.cls(f'{os.devnull}\udfff').is_char_device(), False) + self.assertIs(self.cls(f'{os.devnull}\x00').is_char_device(), False) def test_is_mount_root(self): if os.name == 'nt': diff --git a/Lib/test/test_pdb.py b/Lib/test/test_pdb.py index 69691e930562bc..eedbcec1e66dcb 100644 --- a/Lib/test/test_pdb.py +++ b/Lib/test/test_pdb.py @@ -3057,6 +3057,15 @@ def test_module_is_run_as_main(self): stdout, stderr = self.run_pdb_module(script, commands) self.assertTrue(any("SUCCESS" in l for l in stdout.splitlines()), stdout) + def test_run_module_with_args(self): + commands = """ + continue + """ + self._run_pdb(["calendar", "-m"], commands, expected_returncode=2) + + stdout, _ = self._run_pdb(["-m", "calendar", "1"], commands) + self.assertIn("December", stdout) + def test_breakpoint(self): script = """ if __name__ == '__main__': @@ -3567,6 +3576,57 @@ def test_expression_completion(self): self.assertIn(b'species', output) self.assertIn(b'$_frame', output) + def test_builtin_completion(self): + script = textwrap.dedent(""" + value = "speci" + import pdb; pdb.Pdb().set_trace() + """) + + # Complete: print(value + 'al') + input = b"pri\tval\t + 'al')\n" + + # Continue + input += b"c\n" + + output = run_pty(script, input) + + self.assertIn(b'special', output) + + def test_local_namespace(self): + script = textwrap.dedent(""" + def f(): + original = "I live Pythin" + import pdb; pdb.Pdb().set_trace() + f() + """) + + # Complete: original.replace('i', 'o') + input = b"orig\t.repl\t('i', 'o')\n" + + # Continue + input += b"c\n" + + output = run_pty(script, input) + + self.assertIn(b'I love Python', output) + + def test_multiline_completion(self): + script = textwrap.dedent(""" + import pdb; pdb.Pdb().set_trace() + """) + + input = b"def func():\n" + # Complete: \treturn 40 + 2 + input += b"\tret\t 40 + 2\n" + input += b"\n" + # Complete: func() + input += b"fun\t()\n" + input += b"c\n" + + output = run_pty(script, input) + + self.assertIn(b'42', output) + def load_tests(loader, tests, pattern): from test import test_pdb diff --git a/Lib/test/test_platform.py b/Lib/test/test_platform.py index 9f8aeeea257311..40d5fb338ce563 100644 --- a/Lib/test/test_platform.py +++ b/Lib/test/test_platform.py @@ -10,6 +10,14 @@ from test import support from test.support import os_helper +try: + # Some of the iOS tests need ctypes to operate. + # Confirm that the ctypes module is available + # is available. + import _ctypes +except ImportError: + _ctypes = None + FEDORA_OS_RELEASE = """\ NAME=Fedora VERSION="32 (Thirty Two)" @@ -219,6 +227,30 @@ def test_uname(self): self.assertEqual(res[-1], res.processor) self.assertEqual(len(res), 6) + if os.name == "posix": + uname = os.uname() + self.assertEqual(res.node, uname.nodename) + self.assertEqual(res.version, uname.version) + self.assertEqual(res.machine, uname.machine) + + if sys.platform == "android": + self.assertEqual(res.system, "Android") + self.assertEqual(res.release, platform.android_ver().release) + elif sys.platform == "ios": + # Platform module needs ctypes for full operation. If ctypes + # isn't available, there's no ObjC module, and dummy values are + # returned. + if _ctypes: + self.assertIn(res.system, {"iOS", "iPadOS"}) + self.assertEqual(res.release, platform.ios_ver().release) + else: + self.assertEqual(res.system, "") + self.assertEqual(res.release, "") + else: + self.assertEqual(res.system, uname.sysname) + self.assertEqual(res.release, uname.release) + + @unittest.skipUnless(sys.platform.startswith('win'), "windows only test") def test_uname_win32_without_wmi(self): def raises_oserror(*a): @@ -409,6 +441,56 @@ def test_mac_ver_with_fork(self): # parent support.wait_process(pid, exitcode=0) + def test_ios_ver(self): + result = platform.ios_ver() + + # ios_ver is only fully available on iOS where ctypes is available. + if sys.platform == "ios" and _ctypes: + system, release, model, is_simulator = result + # Result is a namedtuple + self.assertEqual(result.system, system) + self.assertEqual(result.release, release) + self.assertEqual(result.model, model) + self.assertEqual(result.is_simulator, is_simulator) + + # We can't assert specific values without reproducing the logic of + # ios_ver(), so we check that the values are broadly what we expect. + + # System is either iOS or iPadOS, depending on the test device + self.assertIn(system, {"iOS", "iPadOS"}) + + # Release is a numeric version specifier with at least 2 parts + parts = release.split(".") + self.assertGreaterEqual(len(parts), 2) + self.assertTrue(all(part.isdigit() for part in parts)) + + # If this is a simulator, we get a high level device descriptor + # with no identifying model number. If this is a physical device, + # we get a model descriptor like "iPhone13,1" + if is_simulator: + self.assertIn(model, {"iPhone", "iPad"}) + else: + self.assertTrue( + (model.startswith("iPhone") or model.startswith("iPad")) + and "," in model + ) + + self.assertEqual(type(is_simulator), bool) + else: + # On non-iOS platforms, calling ios_ver doesn't fail; you get + # default values + self.assertEqual(result.system, "") + self.assertEqual(result.release, "") + self.assertEqual(result.model, "") + self.assertFalse(result.is_simulator) + + # Check the fallback values can be overridden by arguments + override = platform.ios_ver("Foo", "Bar", "Whiz", True) + self.assertEqual(override.system, "Foo") + self.assertEqual(override.release, "Bar") + self.assertEqual(override.model, "Whiz") + self.assertTrue(override.is_simulator) + @unittest.skipIf(support.is_emscripten, "Does not apply to Emscripten") def test_libc_ver(self): # check that libc_ver(executable) doesn't raise an exception @@ -458,6 +540,43 @@ def test_libc_ver(self): self.assertEqual(platform.libc_ver(filename, chunksize=chunksize), ('glibc', '1.23.4')) + def test_android_ver(self): + res = platform.android_ver() + self.assertIsInstance(res, tuple) + self.assertEqual(res, (res.release, res.api_level, res.manufacturer, + res.model, res.device, res.is_emulator)) + + if sys.platform == "android": + for name in ["release", "manufacturer", "model", "device"]: + with self.subTest(name): + value = getattr(res, name) + self.assertIsInstance(value, str) + self.assertNotEqual(value, "") + + self.assertIsInstance(res.api_level, int) + self.assertGreaterEqual(res.api_level, sys.getandroidapilevel()) + + self.assertIsInstance(res.is_emulator, bool) + + # When not running on Android, it should return the default values. + else: + self.assertEqual(res.release, "") + self.assertEqual(res.api_level, 0) + self.assertEqual(res.manufacturer, "") + self.assertEqual(res.model, "") + self.assertEqual(res.device, "") + self.assertEqual(res.is_emulator, False) + + # Default values may also be overridden using parameters. + res = platform.android_ver( + "alpha", 1, "bravo", "charlie", "delta", True) + self.assertEqual(res.release, "alpha") + self.assertEqual(res.api_level, 1) + self.assertEqual(res.manufacturer, "bravo") + self.assertEqual(res.model, "charlie") + self.assertEqual(res.device, "delta") + self.assertEqual(res.is_emulator, True) + @support.cpython_only def test__comparable_version(self): from platform import _comparable_version as V diff --git a/Lib/test/test_posix.py b/Lib/test/test_posix.py index 2706d5eb6d9830..1d22869046fd12 100644 --- a/Lib/test/test_posix.py +++ b/Lib/test/test_posix.py @@ -1335,12 +1335,21 @@ def test_sched_getaffinity(self): def test_sched_setaffinity(self): mask = posix.sched_getaffinity(0) self.addCleanup(posix.sched_setaffinity, 0, list(mask)) + if len(mask) > 1: # Empty masks are forbidden mask.pop() posix.sched_setaffinity(0, mask) self.assertEqual(posix.sched_getaffinity(0), mask) - self.assertRaises(OSError, posix.sched_setaffinity, 0, []) + + try: + posix.sched_setaffinity(0, []) + # gh-117061: On RHEL9, sched_setaffinity(0, []) does not fail + except OSError: + # sched_setaffinity() manual page documents EINVAL error + # when the mask is empty. + pass + self.assertRaises(ValueError, posix.sched_setaffinity, 0, [-10]) self.assertRaises(ValueError, posix.sched_setaffinity, 0, map(int, "0X")) self.assertRaises(OverflowError, posix.sched_setaffinity, 0, [1<<128]) diff --git a/Lib/test/test_regrtest.py b/Lib/test/test_regrtest.py index 903ad50ba088e8..6a6b21102fcae8 100644 --- a/Lib/test/test_regrtest.py +++ b/Lib/test/test_regrtest.py @@ -464,6 +464,24 @@ def test_bisect(self): regrtest = self.create_regrtest(args) self.assertTrue(regrtest.want_bisect) + def test_verbose3_huntrleaks(self): + args = ['-R', '3:10', '--verbose3'] + with support.captured_stderr(): + regrtest = self.create_regrtest(args) + self.assertIsNotNone(regrtest.hunt_refleak) + self.assertEqual(regrtest.hunt_refleak.warmups, 3) + self.assertEqual(regrtest.hunt_refleak.runs, 10) + self.assertFalse(regrtest.output_on_failure) + + def test_xml_huntrleaks(self): + args = ['-R', '3:12', '--junit-xml', 'output.xml'] + with support.captured_stderr(): + regrtest = self.create_regrtest(args) + self.assertIsNotNone(regrtest.hunt_refleak) + self.assertEqual(regrtest.hunt_refleak.warmups, 3) + self.assertEqual(regrtest.hunt_refleak.runs, 12) + self.assertIsNone(regrtest.junit_filename) + @dataclasses.dataclass(slots=True) class Rerun: diff --git a/Lib/test/test_sax.py b/Lib/test/test_sax.py index 97e96668f85c8a..9b3014a94a081e 100644 --- a/Lib/test/test_sax.py +++ b/Lib/test/test_sax.py @@ -1215,10 +1215,10 @@ def test_expat_incremental_reset(self): self.assertEqual(result.getvalue(), start + b"text") + @unittest.skipIf(pyexpat.version_info < (2, 6, 0), + f'Expat {pyexpat.version_info} does not ' + 'support reparse deferral') def test_flush_reparse_deferral_enabled(self): - if pyexpat.version_info < (2, 6, 0): - self.skipTest(f'Expat {pyexpat.version_info} does not support reparse deferral') - result = BytesIO() xmlgen = XMLGenerator(result) parser = create_parser() @@ -1251,8 +1251,8 @@ def test_flush_reparse_deferral_disabled(self): if pyexpat.version_info >= (2, 6, 0): parser._parser.SetReparseDeferralEnabled(False) + self.assertEqual(result.getvalue(), start) # i.e. no elements started - self.assertEqual(result.getvalue(), start) # i.e. no elements started self.assertFalse(parser._parser.GetReparseDeferralEnabled()) parser.flush() diff --git a/Lib/test/test_socket.py b/Lib/test/test_socket.py index a7e657f5718524..661a859b0d0601 100644 --- a/Lib/test/test_socket.py +++ b/Lib/test/test_socket.py @@ -209,7 +209,10 @@ def socket_setdefaulttimeout(timeout): HAVE_SOCKET_VSOCK = _have_socket_vsock() -HAVE_SOCKET_UDPLITE = hasattr(socket, "IPPROTO_UDPLITE") +# Older Android versions block UDPLITE with SELinux. +HAVE_SOCKET_UDPLITE = ( + hasattr(socket, "IPPROTO_UDPLITE") + and not (support.is_android and platform.android_ver().api_level < 29)) HAVE_SOCKET_BLUETOOTH = _have_socket_bluetooth() @@ -1217,8 +1220,8 @@ def testGetServBy(self): else: raise OSError # Try same call with optional protocol omitted - # Issue #26936: Android getservbyname() was broken before API 23. - if (not support.is_android) or sys.getandroidapilevel() >= 23: + # Issue gh-71123: this fails on Android before API level 23. + if not (support.is_android and platform.android_ver().api_level < 23): port2 = socket.getservbyname(service) eq(port, port2) # Try udp, but don't barf if it doesn't exist @@ -1229,8 +1232,9 @@ def testGetServBy(self): else: eq(udpport, port) # Now make sure the lookup by port returns the same service name - # Issue #26936: Android getservbyport() is broken. - if not support.is_android: + # Issue #26936: when the protocol is omitted, this fails on Android + # before API level 28. + if not (support.is_android and platform.android_ver().api_level < 28): eq(socket.getservbyport(port2), service) eq(socket.getservbyport(port, 'tcp'), service) if udpport is not None: @@ -1575,8 +1579,8 @@ def testGetaddrinfo(self): socket.getaddrinfo('::1', 80) # port can be a string service name such as "http", a numeric # port number or None - # Issue #26936: Android getaddrinfo() was broken before API level 23. - if (not support.is_android) or sys.getandroidapilevel() >= 23: + # Issue #26936: this fails on Android before API level 23. + if not (support.is_android and platform.android_ver().api_level < 23): socket.getaddrinfo(HOST, "http") socket.getaddrinfo(HOST, 80) socket.getaddrinfo(HOST, None) diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py index 489cb5e23ba57e..bd831ac22419af 100644 --- a/Lib/test/test_ssl.py +++ b/Lib/test/test_ssl.py @@ -551,7 +551,7 @@ def test_openssl_version(self): else: openssl_ver = f"OpenSSL {major:d}.{minor:d}.{fix:d}" self.assertTrue( - s.startswith((openssl_ver, libressl_ver)), + s.startswith((openssl_ver, libressl_ver, "AWS-LC")), (s, t, hex(n)) ) @@ -1169,24 +1169,30 @@ def test_load_cert_chain(self): with self.assertRaises(OSError) as cm: ctx.load_cert_chain(NONEXISTINGCERT) self.assertEqual(cm.exception.errno, errno.ENOENT) - with self.assertRaisesRegex(ssl.SSLError, "PEM lib"): + with self.assertRaisesRegex(ssl.SSLError, "PEM (lib|routines)"): ctx.load_cert_chain(BADCERT) - with self.assertRaisesRegex(ssl.SSLError, "PEM lib"): + with self.assertRaisesRegex(ssl.SSLError, "PEM (lib|routines)"): ctx.load_cert_chain(EMPTYCERT) # Separate key and cert ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) ctx.load_cert_chain(ONLYCERT, ONLYKEY) ctx.load_cert_chain(certfile=ONLYCERT, keyfile=ONLYKEY) ctx.load_cert_chain(certfile=BYTES_ONLYCERT, keyfile=BYTES_ONLYKEY) - with self.assertRaisesRegex(ssl.SSLError, "PEM lib"): + with self.assertRaisesRegex(ssl.SSLError, "PEM (lib|routines)"): ctx.load_cert_chain(ONLYCERT) - with self.assertRaisesRegex(ssl.SSLError, "PEM lib"): + with self.assertRaisesRegex(ssl.SSLError, "PEM (lib|routines)"): ctx.load_cert_chain(ONLYKEY) - with self.assertRaisesRegex(ssl.SSLError, "PEM lib"): + with self.assertRaisesRegex(ssl.SSLError, "PEM (lib|routines)"): ctx.load_cert_chain(certfile=ONLYKEY, keyfile=ONLYCERT) # Mismatching key and cert ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) - with self.assertRaisesRegex(ssl.SSLError, "key values mismatch"): + # Allow for flexible libssl error messages. + regex = re.compile(r"""( + key values mismatch # OpenSSL + | + KEY_VALUES_MISMATCH # AWS-LC + )""", re.X) + with self.assertRaisesRegex(ssl.SSLError, regex): ctx.load_cert_chain(CAFILE_CACERT, ONLYKEY) # Password protected key and cert ctx.load_cert_chain(CERTFILE_PROTECTED, password=KEY_PASSWORD) @@ -1254,7 +1260,7 @@ def test_load_verify_locations(self): with self.assertRaises(OSError) as cm: ctx.load_verify_locations(NONEXISTINGCERT) self.assertEqual(cm.exception.errno, errno.ENOENT) - with self.assertRaisesRegex(ssl.SSLError, "PEM lib"): + with self.assertRaisesRegex(ssl.SSLError, "PEM (lib|routines)"): ctx.load_verify_locations(BADCERT) ctx.load_verify_locations(CERTFILE, CAPATH) ctx.load_verify_locations(CERTFILE, capath=BYTES_CAPATH) @@ -1662,9 +1668,10 @@ def test_lib_reason(self): with self.assertRaises(ssl.SSLError) as cm: ctx.load_dh_params(CERTFILE) self.assertEqual(cm.exception.library, 'PEM') - self.assertEqual(cm.exception.reason, 'NO_START_LINE') + regex = "(NO_START_LINE|UNSUPPORTED_PUBLIC_KEY_TYPE)" + self.assertRegex(cm.exception.reason, regex) s = str(cm.exception) - self.assertTrue(s.startswith("[PEM: NO_START_LINE] no start line"), s) + self.assertTrue("NO_START_LINE" in s, s) def test_subclass(self): # Check that the appropriate SSLError subclass is raised @@ -1844,7 +1851,13 @@ def test_connect_fail(self): s = test_wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_REQUIRED) self.addCleanup(s.close) - self.assertRaisesRegex(ssl.SSLError, "certificate verify failed", + # Allow for flexible libssl error messages. + regex = re.compile(r"""( + certificate verify failed # OpenSSL + | + CERTIFICATE_VERIFY_FAILED # AWS-LC + )""", re.X) + self.assertRaisesRegex(ssl.SSLError, regex, s.connect, self.server_addr) def test_connect_ex(self): @@ -1912,7 +1925,13 @@ def test_connect_with_context_fail(self): server_hostname=SIGNED_CERTFILE_HOSTNAME ) self.addCleanup(s.close) - self.assertRaisesRegex(ssl.SSLError, "certificate verify failed", + # Allow for flexible libssl error messages. + regex = re.compile(r"""( + certificate verify failed # OpenSSL + | + CERTIFICATE_VERIFY_FAILED # AWS-LC + )""", re.X) + self.assertRaisesRegex(ssl.SSLError, regex, s.connect, self.server_addr) def test_connect_capath(self): @@ -2129,14 +2148,16 @@ def test_bio_handshake(self): self.assertIsNone(sslobj.version()) self.assertIsNone(sslobj.shared_ciphers()) self.assertRaises(ValueError, sslobj.getpeercert) - if 'tls-unique' in ssl.CHANNEL_BINDING_TYPES: + # tls-unique is not defined for TLSv1.3 + # https://datatracker.ietf.org/doc/html/rfc8446#appendix-C.5 + if 'tls-unique' in ssl.CHANNEL_BINDING_TYPES and sslobj.version() != "TLSv1.3": self.assertIsNone(sslobj.get_channel_binding('tls-unique')) self.ssl_io_loop(sock, incoming, outgoing, sslobj.do_handshake) self.assertTrue(sslobj.cipher()) self.assertIsNone(sslobj.shared_ciphers()) self.assertIsNotNone(sslobj.version()) self.assertTrue(sslobj.getpeercert()) - if 'tls-unique' in ssl.CHANNEL_BINDING_TYPES: + if 'tls-unique' in ssl.CHANNEL_BINDING_TYPES and sslobj.version() != "TLSv1.3": self.assertTrue(sslobj.get_channel_binding('tls-unique')) try: self.ssl_io_loop(sock, incoming, outgoing, sslobj.unwrap) @@ -2408,16 +2429,18 @@ def run(self): self.write(msg.lower()) except OSError as e: # handles SSLError and socket errors + if isinstance(e, ConnectionError): + # OpenSSL 1.1.1 sometimes raises + # ConnectionResetError when connection is not + # shut down gracefully. + if self.server.chatty and support.verbose: + print(f" Connection reset by peer: {self.addr}") + + self.close() + self.running = False + return if self.server.chatty and support.verbose: - if isinstance(e, ConnectionError): - # OpenSSL 1.1.1 sometimes raises - # ConnectionResetError when connection is not - # shut down gracefully. - print( - f" Connection reset by peer: {self.addr}" - ) - else: - handle_error("Test server failure:\n") + handle_error("Test server failure:\n") try: self.write(b"ERROR\n") except OSError: @@ -2861,11 +2884,16 @@ def test_crl_check(self): client_context.verify_flags |= ssl.VERIFY_CRL_CHECK_LEAF server = ThreadedEchoServer(context=server_context, chatty=True) + # Allow for flexible libssl error messages. + regex = re.compile(r"""( + certificate verify failed # OpenSSL + | + CERTIFICATE_VERIFY_FAILED # AWS-LC + )""", re.X) with server: with client_context.wrap_socket(socket.socket(), server_hostname=hostname) as s: - with self.assertRaisesRegex(ssl.SSLError, - "certificate verify failed"): + with self.assertRaisesRegex(ssl.SSLError, regex): s.connect((HOST, server.port)) # now load a CRL file. The CRL file is signed by the CA. @@ -2896,12 +2924,16 @@ def test_check_hostname(self): # incorrect hostname should raise an exception server = ThreadedEchoServer(context=server_context, chatty=True) + # Allow for flexible libssl error messages. + regex = re.compile(r"""( + certificate verify failed # OpenSSL + | + CERTIFICATE_VERIFY_FAILED # AWS-LC + )""", re.X) with server: with client_context.wrap_socket(socket.socket(), server_hostname="invalid") as s: - with self.assertRaisesRegex( - ssl.CertificateError, - "Hostname mismatch, certificate is not valid for 'invalid'."): + with self.assertRaisesRegex(ssl.CertificateError, regex): s.connect((HOST, server.port)) # missing server_hostname arg should cause an exception, too @@ -3136,8 +3168,8 @@ def test_wrong_cert_tls13(self): suppress_ragged_eofs=False) as s: s.connect((HOST, server.port)) with self.assertRaisesRegex( - ssl.SSLError, - 'alert unknown ca|EOF occurred' + OSError, + 'alert unknown ca|EOF occurred|TLSV1_ALERT_UNKNOWN_CA|closed by the remote host|Connection reset by peer' ): # TLS 1.3 perform client cert exchange after handshake s.write(b'data') @@ -3201,13 +3233,21 @@ def test_ssl_cert_verify_error(self): server_hostname=SIGNED_CERTFILE_HOSTNAME) as s: try: s.connect((HOST, server.port)) + self.fail("Expected connection failure") except ssl.SSLError as e: msg = 'unable to get local issuer certificate' self.assertIsInstance(e, ssl.SSLCertVerificationError) self.assertEqual(e.verify_code, 20) self.assertEqual(e.verify_message, msg) - self.assertIn(msg, repr(e)) - self.assertIn('certificate verify failed', repr(e)) + # Allow for flexible libssl error messages. + regex = f"({msg}|CERTIFICATE_VERIFY_FAILED)" + self.assertRegex(repr(e), regex) + regex = re.compile(r"""( + certificate verify failed # OpenSSL + | + CERTIFICATE_VERIFY_FAILED # AWS-LC + )""", re.X) + self.assertRegex(repr(e), regex) def test_PROTOCOL_TLS(self): """Connecting to an SSLv23 server with various client options""" @@ -3739,7 +3779,7 @@ def test_no_shared_ciphers(self): server_hostname=hostname) as s: with self.assertRaises(OSError): s.connect((HOST, server.port)) - self.assertIn("no shared cipher", server.conn_errors[0]) + self.assertIn("NO_SHARED_CIPHER", server.conn_errors[0]) def test_version_basic(self): """ @@ -3827,7 +3867,7 @@ def test_min_max_version_mismatch(self): server_hostname=hostname) as s: with self.assertRaises(ssl.SSLError) as e: s.connect((HOST, server.port)) - self.assertIn("alert", str(e.exception)) + self.assertRegex("(alert|ALERT)", str(e.exception)) @requires_tls_version('SSLv3') def test_min_max_version_sslv3(self): @@ -3869,6 +3909,10 @@ def test_tls_unique_channel_binding(self): client_context, server_context, hostname = testing_context() + # tls-unique is not defined for TLSv1.3 + # https://datatracker.ietf.org/doc/html/rfc8446#appendix-C.5 + client_context.maximum_version = ssl.TLSVersion.TLSv1_2 + server = ThreadedEchoServer(context=server_context, chatty=True, connectionchatty=False) @@ -3969,7 +4013,7 @@ def test_dh_params(self): cipher = stats["cipher"][0] parts = cipher.split("-") if "ADH" not in parts and "EDH" not in parts and "DHE" not in parts: - self.fail("Non-DH cipher: " + cipher[0]) + self.fail("Non-DH key exchange: " + cipher[0]) def test_ecdh_curve(self): # server secp384r1, client auto @@ -4136,8 +4180,9 @@ def cb_raising(ssl_sock, server_name, initial_context): chatty=False, sni_name='supermessage') - self.assertEqual(cm.exception.reason, - 'SSLV3_ALERT_HANDSHAKE_FAILURE') + # Allow for flexible libssl error messages. + regex = "(SSLV3_ALERT_HANDSHAKE_FAILURE|NO_PRIVATE_VALUE)" + self.assertRegex(regex, cm.exception.reason) self.assertEqual(catch.unraisable.exc_type, ZeroDivisionError) def test_sni_callback_wrong_return_type(self): @@ -4489,8 +4534,8 @@ def msg_cb(conn, direction, version, content_type, msg_type, data): # test sometimes fails with EOF error. Test passes as long as # server aborts connection with an error. with self.assertRaisesRegex( - ssl.SSLError, - '(certificate required|EOF occurred)' + OSError, + 'certificate required|EOF occurred|closed by the remote host|Connection reset by peer' ): # receive CertificateRequest data = s.recv(1024) diff --git a/Lib/test/test_stable_abi_ctypes.py b/Lib/test/test_stable_abi_ctypes.py index 117c27d27b38dc..d0e4f3c71c15e0 100644 --- a/Lib/test/test_stable_abi_ctypes.py +++ b/Lib/test/test_stable_abi_ctypes.py @@ -708,6 +708,7 @@ def test_windows_feature_macros(self): "PyType_GetFlags", "PyType_GetFullyQualifiedName", "PyType_GetModule", + "PyType_GetModuleByDef", "PyType_GetModuleName", "PyType_GetModuleState", "PyType_GetName", @@ -856,6 +857,8 @@ def test_windows_feature_macros(self): "Py_GetArgcArgv", "Py_GetBuildInfo", "Py_GetCompiler", + "Py_GetConstant", + "Py_GetConstantBorrowed", "Py_GetCopyright", "Py_GetExecPrefix", "Py_GetPath", diff --git a/Lib/test/test_statistics.py b/Lib/test/test_statistics.py index 1cf41638a7f01a..204787a88a9c5f 100644 --- a/Lib/test/test_statistics.py +++ b/Lib/test/test_statistics.py @@ -2379,6 +2379,18 @@ def integrate(func, low, high, steps=10_000): area = integrate(f_hat, -20, 20) self.assertAlmostEqual(area, 1.0, places=4) + # Check CDF against an integral of the PDF + + data = [3, 5, 10, 12] + h = 2.3 + x = 10.5 + for kernel in kernels: + with self.subTest(kernel=kernel): + cdf = kde(data, h, kernel, cumulative=True) + f_hat = kde(data, h, kernel) + area = integrate(f_hat, -20, x, 100_000) + self.assertAlmostEqual(cdf(x), area, places=4) + # Check error cases with self.assertRaises(StatisticsError): @@ -2395,6 +2407,8 @@ def integrate(func, low, high, steps=10_000): kde(sample, h='str') # Wrong bandwidth type with self.assertRaises(StatisticsError): kde(sample, h=1.0, kernel='bogus') # Invalid kernel + with self.assertRaises(TypeError): + kde(sample, 1.0, 'gauss', True) # Positional cumulative argument # Test name and docstring of the generated function @@ -2403,7 +2417,7 @@ def integrate(func, low, high, steps=10_000): f_hat = kde(sample, h, kernel) self.assertEqual(f_hat.__name__, 'pdf') self.assertIn(kernel, f_hat.__doc__) - self.assertIn(str(h), f_hat.__doc__) + self.assertIn(repr(h), f_hat.__doc__) # Test closed interval for the support boundaries. # In particular, 'uniform' should non-zero at the boundaries. diff --git a/Lib/test/test_subprocess.py b/Lib/test/test_subprocess.py index c44a778d5bbefe..9ecd8426cb5537 100644 --- a/Lib/test/test_subprocess.py +++ b/Lib/test/test_subprocess.py @@ -1607,6 +1607,22 @@ def test_class_getitems(self): self.assertIsInstance(subprocess.Popen[bytes], types.GenericAlias) self.assertIsInstance(subprocess.CompletedProcess[str], types.GenericAlias) + @unittest.skipUnless(hasattr(subprocess, '_winapi'), + 'need subprocess._winapi') + def test_wait_negative_timeout(self): + proc = subprocess.Popen(ZERO_RETURN_CMD) + with proc: + patch = mock.patch.object( + subprocess._winapi, + 'WaitForSingleObject', + return_value=subprocess._winapi.WAIT_OBJECT_0) + with patch as mock_wait: + proc.wait(-1) # negative timeout + mock_wait.assert_called_once_with(proc._handle, 0) + proc.returncode = None + + self.assertEqual(proc.wait(), 0) + class RunFuncTestCase(BaseTestCase): def run_python(self, code, **kwargs): @@ -1747,6 +1763,13 @@ def test_capture_output(self): self.assertIn(b'BDFL', cp.stdout) self.assertIn(b'FLUFL', cp.stderr) + def test_stdout_stdout(self): + # run() refuses to accept stdout=STDOUT + with self.assertRaises(ValueError, + msg=("STDOUT can only be used for stderr")): + self.run_python("print('will not be run')", + stdout=subprocess.STDOUT) + def test_stdout_with_capture_output_arg(self): # run() refuses to accept 'stdout' with 'capture_output' tf = tempfile.TemporaryFile() @@ -3382,14 +3405,15 @@ def test_preexec_at_exit(self): def dummy(): pass - def exit_handler(): - subprocess.Popen({ZERO_RETURN_CMD}, preexec_fn=dummy) - print("shouldn't be printed") - - atexit.register(exit_handler) + class AtFinalization: + def __del__(self): + print("OK") + subprocess.Popen({ZERO_RETURN_CMD}, preexec_fn=dummy) + print("shouldn't be printed") + at_finalization = AtFinalization() """ _, out, err = assert_python_ok("-c", code) - self.assertEqual(out, b'') + self.assertEqual(out.strip(), b"OK") self.assertIn(b"preexec_fn not supported at interpreter shutdown", err) @unittest.skipIf(not sysconfig.get_config_var("HAVE_VFORK"), diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py index ee9b873d9023f0..d686dbf0c29149 100644 --- a/Lib/test/test_syntax.py +++ b/Lib/test/test_syntax.py @@ -1712,6 +1712,49 @@ Traceback (most recent call last): SyntaxError: only single target (not list) can be annotated +# 'not' after operators: + +>>> 3 + not 3 +Traceback (most recent call last): +SyntaxError: 'not' after an operator must be parenthesized + +>>> 3 * not 3 +Traceback (most recent call last): +SyntaxError: 'not' after an operator must be parenthesized + +>>> + not 3 +Traceback (most recent call last): +SyntaxError: 'not' after an operator must be parenthesized + +>>> - not 3 +Traceback (most recent call last): +SyntaxError: 'not' after an operator must be parenthesized + +>>> ~ not 3 +Traceback (most recent call last): +SyntaxError: 'not' after an operator must be parenthesized + +>>> 3 + - not 3 +Traceback (most recent call last): +SyntaxError: 'not' after an operator must be parenthesized + +>>> 3 + not -1 +Traceback (most recent call last): +SyntaxError: 'not' after an operator must be parenthesized + +# Check that we don't introduce misleading errors +>>> not 1 */ 2 +Traceback (most recent call last): +SyntaxError: invalid syntax + +>>> not 1 + +Traceback (most recent call last): +SyntaxError: invalid syntax + +>>> not + 1 + +Traceback (most recent call last): +SyntaxError: invalid syntax + Corner-cases that used to fail to raise the correct error: >>> def f(*, x=lambda __debug__:0): pass diff --git a/Lib/test/test_sysconfig.py b/Lib/test/test_sysconfig.py index c8315bbc8b727d..61c6a5a42502e7 100644 --- a/Lib/test/test_sysconfig.py +++ b/Lib/test/test_sysconfig.py @@ -8,7 +8,11 @@ from copy import copy from test.support import ( - captured_stdout, PythonSymlink, requires_subprocess, is_wasi + captured_stdout, + is_apple_mobile, + is_wasi, + PythonSymlink, + requires_subprocess, ) from test.support.import_helper import import_module from test.support.os_helper import (TESTFN, unlink, skip_unless_symlink, @@ -346,6 +350,8 @@ def test_get_platform(self): # XXX more platforms to tests here @unittest.skipIf(is_wasi, "Incompatible with WASI mapdir and OOT builds") + @unittest.skipIf(is_apple_mobile, + f"{sys.platform} doesn't distribute header files in the runtime environment") def test_get_config_h_filename(self): config_h = sysconfig.get_config_h_filename() self.assertTrue(os.path.isfile(config_h), config_h) @@ -423,6 +429,9 @@ def test_library(self): self.assertTrue(library.startswith(f'python{major}{minor}')) self.assertTrue(library.endswith('.dll')) self.assertEqual(library, ldlibrary) + elif is_apple_mobile: + framework = sysconfig.get_config_var('PYTHONFRAMEWORK') + self.assertEqual(ldlibrary, f"{framework}.framework/{framework}") else: self.assertTrue(library.startswith(f'libpython{major}.{minor}')) self.assertTrue(library.endswith('.a')) @@ -476,6 +485,8 @@ def test_platform_in_subprocess(self): self.assertEqual(my_platform, test_platform) @unittest.skipIf(is_wasi, "Incompatible with WASI mapdir and OOT builds") + @unittest.skipIf(is_apple_mobile, + f"{sys.platform} doesn't include config folder at runtime") def test_srcdir(self): # See Issues #15322, #15364. srcdir = sysconfig.get_config_var('srcdir') @@ -556,6 +567,8 @@ class MakefileTests(unittest.TestCase): @unittest.skipIf(sys.platform.startswith('win'), 'Test is not Windows compatible') @unittest.skipIf(is_wasi, "Incompatible with WASI mapdir and OOT builds") + @unittest.skipIf(is_apple_mobile, + f"{sys.platform} doesn't include config folder at runtime") def test_get_makefile_filename(self): makefile = sysconfig.get_makefile_filename() self.assertTrue(os.path.isfile(makefile), makefile) diff --git a/Lib/test/test_threading.py b/Lib/test/test_threading.py index f1dc12944cb6cc..4414d2bb9cdb59 100644 --- a/Lib/test/test_threading.py +++ b/Lib/test/test_threading.py @@ -1154,21 +1154,21 @@ def import_threading(): self.assertEqual(out, b'') self.assertEqual(err, b'') - def test_start_new_thread_at_exit(self): + def test_start_new_thread_at_finalization(self): code = """if 1: - import atexit import _thread def f(): print("shouldn't be printed") - def exit_handler(): - _thread.start_new_thread(f, ()) - - atexit.register(exit_handler) + class AtFinalization: + def __del__(self): + print("OK") + _thread.start_new_thread(f, ()) + at_finalization = AtFinalization() """ _, out, err = assert_python_ok("-c", code) - self.assertEqual(out, b'') + self.assertEqual(out.strip(), b"OK") self.assertIn(b"can't create new thread at interpreter shutdown", err) class ThreadJoinOnShutdown(BaseTestCase): @@ -1297,6 +1297,30 @@ def main(): rc, out, err = assert_python_ok('-c', script) self.assertFalse(err) + def test_thread_from_thread(self): + script = """if True: + import threading + import time + + def thread2(): + time.sleep(0.05) + print("OK") + + def thread1(): + time.sleep(0.05) + t2 = threading.Thread(target=thread2) + t2.start() + + t = threading.Thread(target=thread1) + t.start() + # do not join() -- the interpreter waits for non-daemon threads to + # finish. + """ + rc, out, err = assert_python_ok('-c', script) + self.assertEqual(err, b"") + self.assertEqual(out.strip(), b"OK") + self.assertEqual(rc, 0) + @skip_unless_reliable_fork def test_reinit_tls_after_fork(self): # Issue #13817: fork() would deadlock in a multithreaded program with diff --git a/Lib/test/test_urllib2.py b/Lib/test/test_urllib2.py index 739c15df13de21..6febb491788b42 100644 --- a/Lib/test/test_urllib2.py +++ b/Lib/test/test_urllib2.py @@ -777,7 +777,7 @@ def connect_ftp(self, user, passwd, host, port, dirs, ["foo", "bar"], "", None), ("ftp://localhost/baz.gif;type=a", "localhost", ftplib.FTP_PORT, "", "", "A", - [], "baz.gif", None), # XXX really this should guess image/gif + [], "baz.gif", "image/gif"), ]: req = Request(url) req.timeout = None diff --git a/Lib/test/test_venv.py b/Lib/test/test_venv.py index f60c662d322e38..63cc4b743862bc 100644 --- a/Lib/test/test_venv.py +++ b/Lib/test/test_venv.py @@ -531,7 +531,7 @@ def test_multiprocessing_recursion(self): rmtree(self.env_dir) self.run_with_capture(venv.create, self.env_dir) script = os.path.join(TEST_HOME_DIR, '_test_venv_multiprocessing.py') - subprocess.check_call([self.envpy(real_env_dir=True), script]) + subprocess.check_call([self.envpy(real_env_dir=True), "-I", script]) @unittest.skipIf(os.name == 'nt', 'not relevant on Windows') def test_deactivate_with_strict_bash_opts(self): diff --git a/Lib/test/test_webbrowser.py b/Lib/test/test_webbrowser.py index 8e5517c417580c..767539104f51d0 100644 --- a/Lib/test/test_webbrowser.py +++ b/Lib/test/test_webbrowser.py @@ -7,11 +7,14 @@ import webbrowser from test import support from test.support import import_helper +from test.support import is_apple_mobile from test.support import os_helper +from test.support import requires_subprocess +from test.support import threading_helper from unittest import mock -if not support.has_subprocess_support: - raise unittest.SkipTest("test webserver requires subprocess") +# The webbrowser module uses threading locks +threading_helper.requires_working_threading(module=True) URL = 'https://www.example.com' CMD_NAME = 'test' @@ -26,6 +29,7 @@ def wait(self, seconds=None): return 0 +@requires_subprocess() class CommandTestMixin: def _test(self, meth, *, args=[URL], kw={}, options, arguments): @@ -230,6 +234,73 @@ def test_open_new_tab(self): arguments=[f'openURL({URL},new-tab)']) +@unittest.skipUnless(sys.platform == "ios", "Test only applicable to iOS") +class IOSBrowserTest(unittest.TestCase): + def _obj_ref(self, *args): + # Construct a string representation of the arguments that can be used + # as a proxy for object instance references + return "|".join(str(a) for a in args) + + @unittest.skipIf(getattr(webbrowser, "objc", None) is None, + "iOS Webbrowser tests require ctypes") + def setUp(self): + # Intercept the the objc library. Wrap the calls to get the + # references to classes and selectors to return strings, and + # wrap msgSend to return stringified object references + self.orig_objc = webbrowser.objc + + webbrowser.objc = mock.Mock() + webbrowser.objc.objc_getClass = lambda cls: f"C#{cls.decode()}" + webbrowser.objc.sel_registerName = lambda sel: f"S#{sel.decode()}" + webbrowser.objc.objc_msgSend.side_effect = self._obj_ref + + def tearDown(self): + webbrowser.objc = self.orig_objc + + def _test(self, meth, **kwargs): + # The browser always gets focus, there's no concept of separate browser + # windows, and there's no API-level control over creating a new tab. + # Therefore, all calls to webbrowser are effectively the same. + getattr(webbrowser, meth)(URL, **kwargs) + + # The ObjC String version of the URL is created with UTF-8 encoding + url_string_args = [ + "C#NSString", + "S#stringWithCString:encoding:", + b'https://www.example.com', + 4, + ] + # The NSURL version of the URL is created from that string + url_obj_args = [ + "C#NSURL", + "S#URLWithString:", + self._obj_ref(*url_string_args), + ] + # The openURL call is invoked on the shared application + shared_app_args = ["C#UIApplication", "S#sharedApplication"] + + # Verify that the last call is the one that opens the URL. + webbrowser.objc.objc_msgSend.assert_called_with( + self._obj_ref(*shared_app_args), + "S#openURL:options:completionHandler:", + self._obj_ref(*url_obj_args), + None, + None + ) + + def test_open(self): + self._test('open') + + def test_open_with_autoraise_false(self): + self._test('open', autoraise=False) + + def test_open_new(self): + self._test('open_new') + + def test_open_new_tab(self): + self._test('open_new_tab') + + class BrowserRegistrationTest(unittest.TestCase): def setUp(self): @@ -324,6 +395,10 @@ def test_synthesize(self): webbrowser.register(name, None, webbrowser.GenericBrowser(name)) webbrowser.get(sys.executable) + @unittest.skipIf( + is_apple_mobile, + "Apple mobile doesn't allow modifying browser with environment" + ) def test_environment(self): webbrowser = import_helper.import_fresh_module('webbrowser') try: @@ -335,6 +410,10 @@ def test_environment(self): webbrowser = import_helper.import_fresh_module('webbrowser') webbrowser.get() + @unittest.skipIf( + is_apple_mobile, + "Apple mobile doesn't allow modifying browser with environment" + ) def test_environment_preferred(self): webbrowser = import_helper.import_fresh_module('webbrowser') try: diff --git a/Lib/test/test_xml_etree.py b/Lib/test/test_xml_etree.py index 3f01a79cc05efd..bae61f754e75f5 100644 --- a/Lib/test/test_xml_etree.py +++ b/Lib/test/test_xml_etree.py @@ -1707,11 +1707,10 @@ def test_unknown_event(self): with self.assertRaises(ValueError): ET.XMLPullParser(events=('start', 'end', 'bogus')) + @unittest.skipIf(pyexpat.version_info < (2, 6, 0), + f'Expat {pyexpat.version_info} does not ' + 'support reparse deferral') def test_flush_reparse_deferral_enabled(self): - if pyexpat.version_info < (2, 6, 0): - self.skipTest(f'Expat {pyexpat.version_info} does not ' - 'support reparse deferral') - parser = ET.XMLPullParser(events=('start', 'end')) for chunk in (""): @@ -1743,8 +1742,8 @@ def test_flush_reparse_deferral_disabled(self): self.skipTest(f'XMLParser.(Get|Set)ReparseDeferralEnabled ' 'methods not available in C') parser._parser._parser.SetReparseDeferralEnabled(False) + self.assert_event_tags(parser, []) # i.e. no elements started - self.assert_event_tags(parser, []) # i.e. no elements started if ET is pyET: self.assertFalse(parser._parser._parser.GetReparseDeferralEnabled()) diff --git a/Lib/test/test_zipfile/test_core.py b/Lib/test/test_zipfile/test_core.py index 368e60a37b0555..a605aa1f14fe3f 100644 --- a/Lib/test/test_zipfile/test_core.py +++ b/Lib/test/test_zipfile/test_core.py @@ -2937,6 +2937,22 @@ def test_bug_6050(self): os.mkdir(os.path.join(TESTFN2, "a")) self.test_extract_dir() + def test_extract_dir_backslash(self): + zfname = findfile("zipdir_backslash.zip", subdir="archivetestdata") + with zipfile.ZipFile(zfname) as zipf: + zipf.extractall(TESTFN2) + if os.name == 'nt': + self.assertTrue(os.path.isdir(os.path.join(TESTFN2, "a"))) + self.assertTrue(os.path.isdir(os.path.join(TESTFN2, "a", "b"))) + self.assertTrue(os.path.isfile(os.path.join(TESTFN2, "a", "b", "c"))) + self.assertTrue(os.path.isdir(os.path.join(TESTFN2, "d"))) + self.assertTrue(os.path.isdir(os.path.join(TESTFN2, "d", "e"))) + else: + self.assertTrue(os.path.isfile(os.path.join(TESTFN2, "a\\b\\c"))) + self.assertTrue(os.path.isfile(os.path.join(TESTFN2, "d\\e\\"))) + self.assertFalse(os.path.exists(os.path.join(TESTFN2, "a"))) + self.assertFalse(os.path.exists(os.path.join(TESTFN2, "d"))) + def test_write_dir(self): dirpath = os.path.join(TESTFN2, "x") os.mkdir(dirpath) diff --git a/Lib/test/test_zipimport.py b/Lib/test/test_zipimport.py index c12798d221e9b7..ae49700294330c 100644 --- a/Lib/test/test_zipimport.py +++ b/Lib/test/test_zipimport.py @@ -128,6 +128,10 @@ def makeZip(self, files, zipName=TEMP_ZIP, **kw): f.write(stuff) f.write(data) + def getZip64Files(self): + # This is the simplest way to make zipfile generate the zip64 EOCD block + return {f"f{n}.py": (NOW, test_src) for n in range(65537)} + def doTest(self, expected_ext, files, *modules, **kw): self.makeZip(files, **kw) @@ -798,6 +802,14 @@ def testLargestPossibleComment(self): files = {TESTMOD + ".py": (NOW, test_src)} self.doTest(".py", files, TESTMOD, comment=b"c" * ((1 << 16) - 1)) + def testZip64(self): + files = self.getZip64Files() + self.doTest(".py", files, "f6") + + def testZip64CruftAndComment(self): + files = self.getZip64Files() + self.doTest(".py", files, "f65536", comment=b"c" * ((1 << 16) - 1)) + @support.requires_zlib() class CompressedZipImportTestCase(UncompressedZipImportTestCase): diff --git a/Lib/typing.py b/Lib/typing.py index 533b64062834d2..581d187235dc7e 100644 --- a/Lib/typing.py +++ b/Lib/typing.py @@ -1717,7 +1717,7 @@ class _TypingEllipsis: '__abstractmethods__', '__annotations__', '__dict__', '__doc__', '__init__', '__module__', '__new__', '__slots__', '__subclasshook__', '__weakref__', '__class_getitem__', - '__match_args__', + '__match_args__', '__static_attributes__', }) # These special attributes will be not collected as protocol members. diff --git a/Lib/venv/scripts/posix/activate.fish b/Lib/venv/scripts/common/activate.fish similarity index 100% rename from Lib/venv/scripts/posix/activate.fish rename to Lib/venv/scripts/common/activate.fish diff --git a/Lib/webbrowser.py b/Lib/webbrowser.py index 77d8f9406b27f7..442c7d00279aef 100755 --- a/Lib/webbrowser.py +++ b/Lib/webbrowser.py @@ -486,6 +486,9 @@ def register_standard_browsers(): # OS X can use below Unix support (but we prefer using the OS X # specific stuff) + if sys.platform == "ios": + register("iosbrowser", None, IOSBrowser(), preferred=True) + if sys.platform == "serenityos": # SerenityOS webbrowser, simply called "Browser". register("Browser", None, BackgroundBrowser("Browser")) @@ -608,6 +611,70 @@ def open(self, url, new=0, autoraise=True): rc = osapipe.close() return not rc +# +# Platform support for iOS +# +if sys.platform == "ios": + from _ios_support import objc + if objc: + # If objc exists, we know ctypes is also importable. + from ctypes import c_void_p, c_char_p, c_ulong + + class IOSBrowser(BaseBrowser): + def open(self, url, new=0, autoraise=True): + sys.audit("webbrowser.open", url) + # If ctypes isn't available, we can't open a browser + if objc is None: + return False + + # All the messages in this call return object references. + objc.objc_msgSend.restype = c_void_p + + # This is the equivalent of: + # NSString url_string = + # [NSString stringWithCString:url.encode("utf-8") + # encoding:NSUTF8StringEncoding]; + NSString = objc.objc_getClass(b"NSString") + constructor = objc.sel_registerName(b"stringWithCString:encoding:") + objc.objc_msgSend.argtypes = [c_void_p, c_void_p, c_char_p, c_ulong] + url_string = objc.objc_msgSend( + NSString, + constructor, + url.encode("utf-8"), + 4, # NSUTF8StringEncoding = 4 + ) + + # Create an NSURL object representing the URL + # This is the equivalent of: + # NSURL *nsurl = [NSURL URLWithString:url]; + NSURL = objc.objc_getClass(b"NSURL") + urlWithString_ = objc.sel_registerName(b"URLWithString:") + objc.objc_msgSend.argtypes = [c_void_p, c_void_p, c_void_p] + ns_url = objc.objc_msgSend(NSURL, urlWithString_, url_string) + + # Get the shared UIApplication instance + # This code is the equivalent of: + # UIApplication shared_app = [UIApplication sharedApplication] + UIApplication = objc.objc_getClass(b"UIApplication") + sharedApplication = objc.sel_registerName(b"sharedApplication") + objc.objc_msgSend.argtypes = [c_void_p, c_void_p] + shared_app = objc.objc_msgSend(UIApplication, sharedApplication) + + # Open the URL on the shared application + # This code is the equivalent of: + # [shared_app openURL:ns_url + # options:NIL + # completionHandler:NIL]; + openURL_ = objc.sel_registerName(b"openURL:options:completionHandler:") + objc.objc_msgSend.argtypes = [ + c_void_p, c_void_p, c_void_p, c_void_p, c_void_p + ] + # Method returns void + objc.objc_msgSend.restype = None + objc.objc_msgSend(shared_app, openURL_, ns_url, None, None) + + return True + def parse_args(arg_list: list[str] | None): import argparse diff --git a/Lib/zipfile/__init__.py b/Lib/zipfile/__init__.py index cc08f602fe44e0..b330ece4b276ad 100644 --- a/Lib/zipfile/__init__.py +++ b/Lib/zipfile/__init__.py @@ -605,7 +605,15 @@ def from_file(cls, filename, arcname=None, *, strict_timestamps=True): def is_dir(self): """Return True if this archive member is a directory.""" - return self.filename.endswith('/') + if self.filename.endswith('/'): + return True + # The ZIP format specification requires to use forward slashes + # as the directory separator, but in practice some ZIP files + # created on Windows can use backward slashes. For compatibility + # with the extraction code which already handles this: + if os.path.altsep: + return self.filename.endswith((os.path.sep, os.path.altsep)) + return False # ZIP encryption uses the CRC32 one-byte primitive for scrambling some diff --git a/Lib/zipimport.py b/Lib/zipimport.py index 823a82ee830465..21d2dca46f569b 100644 --- a/Lib/zipimport.py +++ b/Lib/zipimport.py @@ -15,7 +15,7 @@ #from importlib import _bootstrap_external #from importlib import _bootstrap # for _verbose_message import _frozen_importlib_external as _bootstrap_external -from _frozen_importlib_external import _unpack_uint16, _unpack_uint32 +from _frozen_importlib_external import _unpack_uint16, _unpack_uint32, _unpack_uint64 import _frozen_importlib as _bootstrap # for _verbose_message import _imp # for check_hash_based_pycs import _io # for open @@ -40,8 +40,14 @@ class ZipImportError(ImportError): _module_type = type(sys) END_CENTRAL_DIR_SIZE = 22 -STRING_END_ARCHIVE = b'PK\x05\x06' +END_CENTRAL_DIR_SIZE_64 = 56 +END_CENTRAL_DIR_LOCATOR_SIZE_64 = 20 +STRING_END_ARCHIVE = b'PK\x05\x06' # standard EOCD signature +STRING_END_LOCATOR_64 = b'PK\x06\x07' # Zip64 EOCD Locator signature +STRING_END_ZIP_64 = b'PK\x06\x06' # Zip64 EOCD signature MAX_COMMENT_LEN = (1 << 16) - 1 +MAX_UINT32 = 0xffffffff +ZIP64_EXTRA_TAG = 0x1 class zipimporter(_bootstrap_external._LoaderBasics): """zipimporter(archivepath) -> zipimporter object @@ -356,49 +362,72 @@ def _read_directory(archive): # to not cause problems when some runs 'python3 /dev/fd/9 9= 0 and pos64+END_CENTRAL_DIR_SIZE_64+END_CENTRAL_DIR_LOCATOR_SIZE_64==pos): + # Zip64 at "correct" offset from standard EOCD + buffer = data[pos64:pos64 + END_CENTRAL_DIR_SIZE_64] + if len(buffer) != END_CENTRAL_DIR_SIZE_64: + raise ZipImportError( + f"corrupt Zip64 file: Expected {END_CENTRAL_DIR_SIZE_64} byte " + f"zip64 central directory, but read {len(buffer)} bytes.", + path=archive) + header_position = file_size - len(data) + pos64 + + central_directory_size = _unpack_uint64(buffer[40:48]) + central_directory_position = _unpack_uint64(buffer[48:56]) + num_entries = _unpack_uint64(buffer[24:32]) + elif pos >= 0: buffer = data[pos:pos+END_CENTRAL_DIR_SIZE] if len(buffer) != END_CENTRAL_DIR_SIZE: raise ZipImportError(f"corrupt Zip file: {archive!r}", path=archive) + header_position = file_size - len(data) + pos - header_size = _unpack_uint32(buffer[12:16]) - header_offset = _unpack_uint32(buffer[16:20]) - if header_position < header_size: + # Buffer now contains a valid EOCD, and header_position gives the + # starting position of it. + central_directory_size = _unpack_uint32(buffer[12:16]) + central_directory_position = _unpack_uint32(buffer[16:20]) + num_entries = _unpack_uint16(buffer[8:10]) + + # N.b. if someday you want to prefer the standard (non-zip64) EOCD, + # you need to adjust position by 76 for arc to be 0. + else: + raise ZipImportError(f'not a Zip file: {archive!r}', + path=archive) + + # Buffer now contains a valid EOCD, and header_position gives the + # starting position of it. + # XXX: These are cursory checks but are not as exact or strict as they + # could be. Checking the arc-adjusted value is probably good too. + if header_position < central_directory_size: raise ZipImportError(f'bad central directory size: {archive!r}', path=archive) - if header_position < header_offset: + if header_position < central_directory_position: raise ZipImportError(f'bad central directory offset: {archive!r}', path=archive) - header_position -= header_size - arc_offset = header_position - header_offset + header_position -= central_directory_size + # On just-a-zipfile these values are the same and arc_offset is zero; if + # the file has some bytes prepended, `arc_offset` is the number of such + # bytes. This is used for pex as well as self-extracting .exe. + arc_offset = header_position - central_directory_position if arc_offset < 0: raise ZipImportError(f'bad central directory size or offset: {archive!r}', path=archive) @@ -415,6 +444,11 @@ def _read_directory(archive): raise EOFError('EOF read where not expected') # Start of file header if buffer[:4] != b'PK\x01\x02': + if count != num_entries: + raise ZipImportError( + f"mismatched num_entries: {count} should be {num_entries} in {archive!r}", + path=archive, + ) break # Bad: Central Dir File Header if len(buffer) != 46: raise EOFError('EOF read where not expected') @@ -430,9 +464,6 @@ def _read_directory(archive): comment_size = _unpack_uint16(buffer[32:34]) file_offset = _unpack_uint32(buffer[42:46]) header_size = name_size + extra_size + comment_size - if file_offset > header_offset: - raise ZipImportError(f'bad local header offset: {archive!r}', path=archive) - file_offset += arc_offset try: name = fp.read(name_size) @@ -444,7 +475,10 @@ def _read_directory(archive): # slower than reading the data because fseek flushes stdio's # internal buffers. See issue #8745. try: - if len(fp.read(header_size - name_size)) != header_size - name_size: + extra_data_len = header_size - name_size + extra_data = memoryview(fp.read(extra_data_len)) + + if len(extra_data) != extra_data_len: raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive) except OSError: raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive) @@ -461,6 +495,60 @@ def _read_directory(archive): name = name.replace('/', path_sep) path = _bootstrap_external._path_join(archive, name) + + # Ordering matches unpacking below. + if ( + file_size == MAX_UINT32 or + data_size == MAX_UINT32 or + file_offset == MAX_UINT32 + ): + # need to decode extra_data looking for a zip64 extra (which might not + # be present) + while extra_data: + if len(extra_data) < 4: + raise ZipImportError(f"can't read header extra: {archive!r}", path=archive) + tag = _unpack_uint16(extra_data[:2]) + size = _unpack_uint16(extra_data[2:4]) + if len(extra_data) < 4 + size: + raise ZipImportError(f"can't read header extra: {archive!r}", path=archive) + if tag == ZIP64_EXTRA_TAG: + if (len(extra_data) - 4) % 8 != 0: + raise ZipImportError(f"can't read header extra: {archive!r}", path=archive) + num_extra_values = (len(extra_data) - 4) // 8 + if num_extra_values > 3: + raise ZipImportError(f"can't read header extra: {archive!r}", path=archive) + values = struct.unpack_from(f"<{min(num_extra_values, 3)}Q", + extra_data, offset=4) + + # N.b. Here be dragons: the ordering of these is different than + # the header fields, and it's really easy to get it wrong since + # naturally-occuring zips that use all 3 are >4GB + if file_size == MAX_UINT32: + file_size = values.pop(0) + if data_size == MAX_UINT32: + data_size = values.pop(0) + if file_offset == MAX_UINT32: + file_offset = values.pop(0) + + break + + # For a typical zip, this bytes-slicing only happens 2-3 times, on + # small data like timestamps and filesizes. + extra_data = extra_data[4+size:] + else: + _bootstrap._verbose_message( + "zipimport: suspected zip64 but no zip64 extra for {!r}", + path, + ) + # XXX These two statements seem swapped because `central_directory_position` + # is a position within the actual file, but `file_offset` (when compared) is + # as encoded in the entry, not adjusted for this file. + # N.b. this must be after we've potentially read the zip64 extra which can + # change `file_offset`. + if file_offset > central_directory_position: + raise ZipImportError(f'bad local header offset: {archive!r}', path=archive) + file_offset += arc_offset + t = (path, compress, data_size, file_size, file_offset, time, date, crc) files[name] = t count += 1 diff --git a/Makefile.pre.in b/Makefile.pre.in index 404e7ee3e42054..5b89d6ba1acf71 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -186,12 +186,18 @@ PYTHONFRAMEWORKPREFIX= @PYTHONFRAMEWORKPREFIX@ PYTHONFRAMEWORKINSTALLDIR= @PYTHONFRAMEWORKINSTALLDIR@ PYTHONFRAMEWORKINSTALLNAMEPREFIX= @PYTHONFRAMEWORKINSTALLNAMEPREFIX@ RESSRCDIR= @RESSRCDIR@ -# Deployment target selected during configure, to be checked +# macOS deployment target selected during configure, to be checked # by distutils. The export statement is needed to ensure that the # deployment target is active during build. MACOSX_DEPLOYMENT_TARGET=@CONFIGURE_MACOSX_DEPLOYMENT_TARGET@ @EXPORT_MACOSX_DEPLOYMENT_TARGET@export MACOSX_DEPLOYMENT_TARGET +# iOS Deployment target selected during configure. Unlike macOS, the iOS +# deployment target is controlled using `-mios-version-min` arguments added to +# CFLAGS and LDFLAGS by the configure script. This variable is not used during +# the build, and is only listed here so it will be included in sysconfigdata. +IPHONEOS_DEPLOYMENT_TARGET=@IPHONEOS_DEPLOYMENT_TARGET@ + # Option to install to strip binaries STRIPFLAG=-s @@ -507,7 +513,6 @@ OBJECT_OBJS= \ Objects/floatobject.o \ Objects/frameobject.o \ Objects/funcobject.o \ - Objects/interpreteridobject.o \ Objects/iterobject.o \ Objects/listobject.o \ Objects/longobject.o \ @@ -1003,7 +1008,6 @@ PYTHON_HEADERS= \ $(srcdir)/Include/frameobject.h \ $(srcdir)/Include/genericaliasobject.h \ $(srcdir)/Include/import.h \ - $(srcdir)/Include/interpreteridobject.h \ $(srcdir)/Include/intrcheck.h \ $(srcdir)/Include/iterobject.h \ $(srcdir)/Include/listobject.h \ @@ -1077,7 +1081,6 @@ PYTHON_HEADERS= \ $(srcdir)/Include/cpython/genobject.h \ $(srcdir)/Include/cpython/import.h \ $(srcdir)/Include/cpython/initconfig.h \ - $(srcdir)/Include/cpython/interpreteridobject.h \ $(srcdir)/Include/cpython/listobject.h \ $(srcdir)/Include/cpython/longintrepr.h \ $(srcdir)/Include/cpython/longobject.h \ @@ -2041,11 +2044,23 @@ testios: cp -r $(srcdir)/iOS/testbed $(XCFOLDER) # Copy the framework from the install location to the testbed project. cp -r $(PYTHONFRAMEWORKPREFIX)/* $(XCFOLDER)/Python.xcframework/ios-arm64_x86_64-simulator + # Run the test suite for the Xcode project, targeting the iOS simulator. - # If the suite fails, extract and print the console output, then re-raise the failure + # If the suite fails, touch a file in the test folder as a marker if ! xcodebuild test -project $(XCFOLDER)/iOSTestbed.xcodeproj -scheme "iOSTestbed" -destination "platform=iOS Simulator,name=iPhone SE (3rd Generation)" -resultBundlePath $(XCRESULT) ; then \ - xcrun xcresulttool get --path $(XCRESULT) --id $$(xcrun xcresulttool get --path $(XCRESULT) --format json | $(PYTHON_FOR_BUILD) -c "import sys, json; result = json.load(sys.stdin); print(result['actions']['_values'][0]['actionResult']['logRef']['id']['_value'])"); \ - echo ; \ + touch $(XCFOLDER)/failed; \ + fi + + # Regardless of success or failure, extract and print the test output + xcrun xcresulttool get --path $(XCRESULT) \ + --id $$( \ + xcrun xcresulttool get --path $(XCRESULT) --format json | \ + $(PYTHON_FOR_BUILD) -c "import sys, json; result = json.load(sys.stdin); print(result['actions']['_values'][0]['actionResult']['logRef']['id']['_value'])" \ + ) \ + --format json | \ + $(PYTHON_FOR_BUILD) -c "import sys, json; result = json.load(sys.stdin); print(result['subsections']['_values'][1]['subsections']['_values'][0]['emittedOutput']['_value'])" + + @if test -e $(XCFOLDER)/failed ; then \ exit 1; \ fi @@ -2351,10 +2366,16 @@ TESTSUBDIRS= idlelib/idle_test \ test/test_import/data/unwritable \ test/test_importlib \ test/test_importlib/builtin \ - test/test_importlib/data \ test/test_importlib/extension \ test/test_importlib/frozen \ test/test_importlib/import_ \ + test/test_importlib/metadata \ + test/test_importlib/metadata/data \ + test/test_importlib/metadata/data/sources \ + test/test_importlib/metadata/data/sources/example \ + test/test_importlib/metadata/data/sources/example/example \ + test/test_importlib/metadata/data/sources/example2 \ + test/test_importlib/metadata/data/sources/example2/example2 \ test/test_importlib/namespace_pkgs \ test/test_importlib/namespace_pkgs/both_portions \ test/test_importlib/namespace_pkgs/both_portions/foo \ @@ -2774,8 +2795,8 @@ frameworkinstallmobileheaders: frameworkinstallunversionedstructure inclinstall echo "Removing old framework headers"; \ rm -rf $(DESTDIR)$(PYTHONFRAMEWORKINSTALLDIR)/Headers; \ fi - mv "$(DESTDIR)$(PYTHONFRAMEWORKPREFIX)/include/python$(VERSION)" "$(DESTDIR)$(PYTHONFRAMEWORKINSTALLDIR)/Headers" - $(LN) -fs "../$(PYTHONFRAMEWORKDIR)/Headers" "$(DESTDIR)$(PYTHONFRAMEWORKPREFIX)/include/python$(VERSION)" + mv "$(DESTDIR)$(PYTHONFRAMEWORKPREFIX)/include/python$(LDVERSION)" "$(DESTDIR)$(PYTHONFRAMEWORKINSTALLDIR)/Headers" + $(LN) -fs "../$(PYTHONFRAMEWORKDIR)/Headers" "$(DESTDIR)$(PYTHONFRAMEWORKPREFIX)/include/python$(LDVERSION)" # Build the toplevel Makefile Makefile.pre: $(srcdir)/Makefile.pre.in config.status @@ -3051,7 +3072,8 @@ MODULE__SHA2_DEPS=$(srcdir)/Modules/hashlib.h $(LIBHACL_SHA2_HEADERS) $(LIBHACL_ MODULE__SHA3_DEPS=$(srcdir)/Modules/hashlib.h $(LIBHACL_HEADERS) Modules/_hacl/Hacl_Hash_SHA3.h Modules/_hacl/Hacl_Hash_SHA3.c MODULE__SOCKET_DEPS=$(srcdir)/Modules/socketmodule.h $(srcdir)/Modules/addrinfo.h $(srcdir)/Modules/getaddrinfo.c $(srcdir)/Modules/getnameinfo.c MODULE__SSL_DEPS=$(srcdir)/Modules/_ssl.h $(srcdir)/Modules/_ssl/cert.c $(srcdir)/Modules/_ssl/debughelpers.c $(srcdir)/Modules/_ssl/misc.c $(srcdir)/Modules/_ssl_data_111.h $(srcdir)/Modules/_ssl_data_300.h $(srcdir)/Modules/socketmodule.h -MODULE__TESTCAPI_DEPS=$(srcdir)/Modules/_testcapi/testcapi_long.h $(srcdir)/Modules/_testcapi/parts.h $(srcdir)/Modules/_testcapi/util.h +MODULE__TESTCAPI_DEPS=$(srcdir)/Modules/_testcapi/parts.h $(srcdir)/Modules/_testcapi/util.h +MODULE__TESTLIMITEDCAPI_DEPS=$(srcdir)/Modules/_testlimitedcapi/testcapi_long.h $(srcdir)/Modules/_testlimitedcapi/parts.h $(srcdir)/Modules/_testlimitedcapi/util.h MODULE__TESTINTERNALCAPI_DEPS=$(srcdir)/Modules/_testinternalcapi/parts.h MODULE__SQLITE3_DEPS=$(srcdir)/Modules/_sqlite/connection.h $(srcdir)/Modules/_sqlite/cursor.h $(srcdir)/Modules/_sqlite/microprotocols.h $(srcdir)/Modules/_sqlite/module.h $(srcdir)/Modules/_sqlite/prepare_protocol.h $(srcdir)/Modules/_sqlite/row.h $(srcdir)/Modules/_sqlite/util.h diff --git a/Misc/NEWS.d/next/Build/2024-03-06-17-26-55.gh-issue-71052.vLbu9u.rst b/Misc/NEWS.d/next/Build/2024-03-06-17-26-55.gh-issue-71052.vLbu9u.rst new file mode 100644 index 00000000000000..53776c0216f553 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2024-03-06-17-26-55.gh-issue-71052.vLbu9u.rst @@ -0,0 +1 @@ +Add Android build script and instructions. diff --git a/Misc/NEWS.d/next/C API/2023-12-12-19-48-31.gh-issue-113024.rXcQs7.rst b/Misc/NEWS.d/next/C API/2023-12-12-19-48-31.gh-issue-113024.rXcQs7.rst new file mode 100644 index 00000000000000..60ed6e64c3b6b8 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2023-12-12-19-48-31.gh-issue-113024.rXcQs7.rst @@ -0,0 +1 @@ +Add :c:func:`PyObject_GenericHash` function. diff --git a/Misc/NEWS.d/next/C API/2024-03-14-10-33-58.gh-issue-85283.LOgmdU.rst b/Misc/NEWS.d/next/C API/2024-03-14-10-33-58.gh-issue-85283.LOgmdU.rst index ef8a934b435a88..c8e6b1b1e6ed62 100644 --- a/Misc/NEWS.d/next/C API/2024-03-14-10-33-58.gh-issue-85283.LOgmdU.rst +++ b/Misc/NEWS.d/next/C API/2024-03-14-10-33-58.gh-issue-85283.LOgmdU.rst @@ -1,2 +1,3 @@ -The ``fcntl``, ``grp``, ``pwd``, ``termios`` and ``_statistics`` C extensions are now -built with the :ref:`limited C API `. Patch by Victor Stinner. +The ``fcntl``, ``grp``, ``pwd``, ``termios``, ``_statistics`` and +``_testconsole`` C extensions are now built with the :ref:`limited C API +`. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/C API/2024-03-15-23-55-24.gh-issue-115754.xnzc__.rst b/Misc/NEWS.d/next/C API/2024-03-15-23-55-24.gh-issue-115754.xnzc__.rst new file mode 100644 index 00000000000000..d76c98ee54056d --- /dev/null +++ b/Misc/NEWS.d/next/C API/2024-03-15-23-55-24.gh-issue-115754.xnzc__.rst @@ -0,0 +1,3 @@ +Add :c:func:`Py_GetConstant` and :c:func:`Py_GetConstantBorrowed` functions to +get constants. For example, ``Py_GetConstant(Py_CONSTANT_ZERO)`` returns a +:term:`strong reference` to the constant zero. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/C API/2024-03-15-23-57-33.gh-issue-115754.zLdv82.rst b/Misc/NEWS.d/next/C API/2024-03-15-23-57-33.gh-issue-115754.zLdv82.rst new file mode 100644 index 00000000000000..feff0c0897eae1 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2024-03-15-23-57-33.gh-issue-115754.zLdv82.rst @@ -0,0 +1,5 @@ +In the limited C API version 3.13, getting ``Py_None``, ``Py_False``, +``Py_True``, ``Py_Ellipsis`` and ``Py_NotImplemented`` singletons is now +implemented as function calls at the stable ABI level to hide implementation +details. Getting these constants still return borrowed references. Patch by +Victor Stinner. diff --git a/Misc/NEWS.d/next/C API/2024-03-17-22-42-21.gh-issue-116936.tNrzfm.rst b/Misc/NEWS.d/next/C API/2024-03-17-22-42-21.gh-issue-116936.tNrzfm.rst new file mode 100644 index 00000000000000..bd2abc94082a5a --- /dev/null +++ b/Misc/NEWS.d/next/C API/2024-03-17-22-42-21.gh-issue-116936.tNrzfm.rst @@ -0,0 +1,2 @@ +Add :c:func:`PyType_GetModuleByDef` to the limited C API. Patch by Victor +Stinner. diff --git a/Misc/NEWS.d/next/C API/2024-03-19-09-49-04.gh-issue-115756.4Ls_Tl.rst b/Misc/NEWS.d/next/C API/2024-03-19-09-49-04.gh-issue-115756.4Ls_Tl.rst new file mode 100644 index 00000000000000..6960395fe229a3 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2024-03-19-09-49-04.gh-issue-115756.4Ls_Tl.rst @@ -0,0 +1,3 @@ +:c:func:`!PyCode_GetFirstFree` is an ustable API now and has been renamed to +:c:func:`PyUnstable_Code_GetFirstFree`. (Contributed by Bogdan Romanyuk in +:gh:`115781`) diff --git a/Misc/NEWS.d/next/C API/2024-03-20-13-13-22.gh-issue-117021.0Q5jBx.rst b/Misc/NEWS.d/next/C API/2024-03-20-13-13-22.gh-issue-117021.0Q5jBx.rst new file mode 100644 index 00000000000000..2f93e1e6da00aa --- /dev/null +++ b/Misc/NEWS.d/next/C API/2024-03-20-13-13-22.gh-issue-117021.0Q5jBx.rst @@ -0,0 +1,2 @@ +Fix integer overflow in :c:func:`PyLong_AsPid` on non-Windows 64-bit +platforms. diff --git a/Misc/NEWS.d/next/C API/2024-03-22-19-29-24.gh-issue-87193.u7O-jY.rst b/Misc/NEWS.d/next/C API/2024-03-22-19-29-24.gh-issue-87193.u7O-jY.rst new file mode 100644 index 00000000000000..cb921a9c7bf36e --- /dev/null +++ b/Misc/NEWS.d/next/C API/2024-03-22-19-29-24.gh-issue-87193.u7O-jY.rst @@ -0,0 +1,3 @@ +:c:func:`_PyBytes_Resize` can now be called for bytes objects with reference +count > 1, including 1-byte bytes objects. It creates a new bytes object and +destroys the old one if it has reference count > 1. diff --git a/Misc/NEWS.d/next/Core and Builtins/2021-09-04-22-33-01.bpo-24612.SsTuUX.rst b/Misc/NEWS.d/next/Core and Builtins/2021-09-04-22-33-01.bpo-24612.SsTuUX.rst new file mode 100644 index 00000000000000..d54ffc4b76db11 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2021-09-04-22-33-01.bpo-24612.SsTuUX.rst @@ -0,0 +1,2 @@ +Improve the :exc:`SyntaxError` that happens when 'not' appears after an +operator. Patch by Pablo Galindo diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-10-05-09-33-48.gh-issue-97901.BOLluU.rst b/Misc/NEWS.d/next/Core and Builtins/2022-10-05-09-33-48.gh-issue-97901.BOLluU.rst new file mode 100644 index 00000000000000..4d2bd65ea1fee6 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2022-10-05-09-33-48.gh-issue-97901.BOLluU.rst @@ -0,0 +1 @@ +Mime type ``text/rtf`` is now supported by :mod:`mimetypes`. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-10-14-00-05-17.gh-issue-109870.oKpJ3P.rst b/Misc/NEWS.d/next/Core and Builtins/2023-10-14-00-05-17.gh-issue-109870.oKpJ3P.rst new file mode 100644 index 00000000000000..390bb1260ea843 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-10-14-00-05-17.gh-issue-109870.oKpJ3P.rst @@ -0,0 +1,3 @@ +Dataclasses now calls :func:`exec` once per dataclass, instead of once +per method being added. This can speed up dataclass creation by up to +20%. diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-01-07-04-22-51.gh-issue-108362.oB9Gcf.rst b/Misc/NEWS.d/next/Core and Builtins/2024-01-07-04-22-51.gh-issue-108362.oB9Gcf.rst new file mode 100644 index 00000000000000..893904bcecea8a --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2024-01-07-04-22-51.gh-issue-108362.oB9Gcf.rst @@ -0,0 +1,12 @@ +Implement an incremental cyclic garbage collector. By collecting the old +generation in increments, there is no need for a full heap scan. This can +hugely reduce maximum pause time for programs with large heaps. + +Reduce the number of generations from three to two. The old generation is +split into two spaces, "visited" and "pending". + +Collection happens in two steps:: +* An increment is formed from the young generation and a small part of the pending space. +* This increment is scanned and the survivors moved to the end of the visited space. + +When the collecting space becomes empty, the two spaces are swapped. diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-02-25-14-17-25.gh-issue-115775.CNbGbJ.rst b/Misc/NEWS.d/next/Core and Builtins/2024-02-25-14-17-25.gh-issue-115775.CNbGbJ.rst new file mode 100644 index 00000000000000..78bef746b67d85 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2024-02-25-14-17-25.gh-issue-115775.CNbGbJ.rst @@ -0,0 +1,3 @@ +Compiler populates the new ``__static_attributes__`` field on a class with +the names of attributes of this class which are accessed through self.X from +any function in its body. diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-03-07-16-12-39.gh-issue-114099.ujdjn2.rst b/Misc/NEWS.d/next/Core and Builtins/2024-03-07-16-12-39.gh-issue-114099.ujdjn2.rst new file mode 100644 index 00000000000000..5405a3bdc36f9e --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2024-03-07-16-12-39.gh-issue-114099.ujdjn2.rst @@ -0,0 +1,2 @@ +Added a Loader that can discover extension modules in an iOS-style Frameworks +folder. diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-03-12-20-31-57.gh-issue-113964.bJppzg.rst b/Misc/NEWS.d/next/Core and Builtins/2024-03-12-20-31-57.gh-issue-113964.bJppzg.rst new file mode 100644 index 00000000000000..ab370d4aa1baee --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2024-03-12-20-31-57.gh-issue-113964.bJppzg.rst @@ -0,0 +1,2 @@ +Starting new threads and process creation through :func:`os.fork` are now +only prevented once all non-daemon threads exit. diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-03-21-09-57-57.gh-issue-117114.Qu-p55.rst b/Misc/NEWS.d/next/Core and Builtins/2024-03-21-09-57-57.gh-issue-117114.Qu-p55.rst new file mode 100644 index 00000000000000..c9c028a8dda0e5 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2024-03-21-09-57-57.gh-issue-117114.Qu-p55.rst @@ -0,0 +1 @@ +Make :func:`os.path.isdevdrive` available on all platforms. For those that do not offer Dev Drives, it will always return ``False``. diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-03-21-12-10-11.gh-issue-117108._6jIrB.rst b/Misc/NEWS.d/next/Core and Builtins/2024-03-21-12-10-11.gh-issue-117108._6jIrB.rst new file mode 100644 index 00000000000000..57ad9606b05e05 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2024-03-21-12-10-11.gh-issue-117108._6jIrB.rst @@ -0,0 +1,3 @@ +The cycle GC now chooses the size of increments based on the total heap +size, instead of the rate of object creation. This ensures that it can keep +up with growing heaps. diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-03-25-12-51-12.gh-issue-117108.tNqDEo.rst b/Misc/NEWS.d/next/Core and Builtins/2024-03-25-12-51-12.gh-issue-117108.tNqDEo.rst new file mode 100644 index 00000000000000..a28c83ee6efe40 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2024-03-25-12-51-12.gh-issue-117108.tNqDEo.rst @@ -0,0 +1,3 @@ +Change the old space bit of objects in the young generation from 0 to +gcstate->visited, so that any objects created during GC will have the old +bit set correctly if they get moved into the old generation. diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-03-25-17-04-54.gh-issue-99108.8bjdO6.rst b/Misc/NEWS.d/next/Core and Builtins/2024-03-25-17-04-54.gh-issue-99108.8bjdO6.rst new file mode 100644 index 00000000000000..184273b42b7e9d --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2024-03-25-17-04-54.gh-issue-99108.8bjdO6.rst @@ -0,0 +1,6 @@ +Updated the :mod:`hashlib` built-in `HACL\* project`_ C code from upstream +that we use for many implementations when they are not present via OpenSSL +in a given build. This also avoids the rare potential for a C symbol name +one definition rule linking issue. + +.. _HACL\* project: https://github.com/hacl-star/hacl-star diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-03-26-17-22-38.gh-issue-117266.Kwh79O.rst b/Misc/NEWS.d/next/Core and Builtins/2024-03-26-17-22-38.gh-issue-117266.Kwh79O.rst new file mode 100644 index 00000000000000..5055954676b9ab --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2024-03-26-17-22-38.gh-issue-117266.Kwh79O.rst @@ -0,0 +1,2 @@ +Fix crashes for certain user-created subclasses of :class:`ast.AST`. Such +classes are now expected to set the ``_field_types`` attribute. diff --git a/Misc/NEWS.d/next/Documentation/2024-03-20-12-41-47.gh-issue-114099.ad_Ck9.rst b/Misc/NEWS.d/next/Documentation/2024-03-20-12-41-47.gh-issue-114099.ad_Ck9.rst new file mode 100644 index 00000000000000..c6f403ee899162 --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2024-03-20-12-41-47.gh-issue-114099.ad_Ck9.rst @@ -0,0 +1 @@ +Add an iOS platform guide, and flag modules not available on iOS. diff --git a/Misc/NEWS.d/next/Documentation/2024-03-20-15-12-37.gh-issue-115977.IMLi6K.rst b/Misc/NEWS.d/next/Documentation/2024-03-20-15-12-37.gh-issue-115977.IMLi6K.rst new file mode 100644 index 00000000000000..5f04e93d9a862b --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2024-03-20-15-12-37.gh-issue-115977.IMLi6K.rst @@ -0,0 +1 @@ +Remove compatibilty references to Emscripten. diff --git a/Misc/NEWS.d/next/Library/2019-08-27-01-03-26.gh-issue-66543._TRpYr.rst b/Misc/NEWS.d/next/Library/2019-08-27-01-03-26.gh-issue-66543._TRpYr.rst new file mode 100644 index 00000000000000..62f7aa2490bb73 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-08-27-01-03-26.gh-issue-66543._TRpYr.rst @@ -0,0 +1,4 @@ +Make :func:`mimetypes.guess_type` properly parsing of URLs with only a host +name, URLs containing fragment or query, and filenames with only a UNC +sharepoint on Windows. +Based on patch by Dong-hee Na. diff --git a/Misc/NEWS.d/next/Library/2022-06-22-14-45-32.gh-issue-89739.CqZcRL.rst b/Misc/NEWS.d/next/Library/2022-06-22-14-45-32.gh-issue-89739.CqZcRL.rst new file mode 100644 index 00000000000000..0358c0107cb697 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-06-22-14-45-32.gh-issue-89739.CqZcRL.rst @@ -0,0 +1 @@ +The :mod:`zipimport` module can now read ZIP64 files. diff --git a/Misc/NEWS.d/next/Library/2023-06-16-19-17-06.gh-issue-105866.0NBveV.rst b/Misc/NEWS.d/next/Library/2023-06-16-19-17-06.gh-issue-105866.0NBveV.rst new file mode 100644 index 00000000000000..28eae1232742f7 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-06-16-19-17-06.gh-issue-105866.0NBveV.rst @@ -0,0 +1 @@ +Fixed ``_get_slots`` bug which caused error when defining dataclasses with slots and a weakref_slot. diff --git a/Misc/NEWS.d/next/Library/2023-12-11-00-51-51.gh-issue-112948.k-OKp5.rst b/Misc/NEWS.d/next/Library/2023-12-11-00-51-51.gh-issue-112948.k-OKp5.rst new file mode 100644 index 00000000000000..0925a7caba6f07 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-12-11-00-51-51.gh-issue-112948.k-OKp5.rst @@ -0,0 +1 @@ +Make completion of :mod:`pdb` similar to Python REPL diff --git a/Misc/NEWS.d/next/Library/2023-12-28-22-52-45.gh-issue-113548.j6TJ7O.rst b/Misc/NEWS.d/next/Library/2023-12-28-22-52-45.gh-issue-113548.j6TJ7O.rst new file mode 100644 index 00000000000000..972ddeb54822e2 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-12-28-22-52-45.gh-issue-113548.j6TJ7O.rst @@ -0,0 +1 @@ +:mod:`pdb` now allows CLI arguments to ``pdb -m``. diff --git a/Misc/NEWS.d/next/Library/2024-02-18-09-50-31.gh-issue-115627.HGchj0.rst b/Misc/NEWS.d/next/Library/2024-02-18-09-50-31.gh-issue-115627.HGchj0.rst new file mode 100644 index 00000000000000..75d926ab59d557 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-02-18-09-50-31.gh-issue-115627.HGchj0.rst @@ -0,0 +1,2 @@ +Fix the :mod:`ssl` module error handling of connection terminate by peer. +It now throws an OSError with the appropriate error code instead of an EOFError. diff --git a/Misc/NEWS.d/next/Library/2024-03-05-19-56-29.gh-issue-71052.PMDK--.rst b/Misc/NEWS.d/next/Library/2024-03-05-19-56-29.gh-issue-71052.PMDK--.rst new file mode 100644 index 00000000000000..ddca54c7c9ed7b --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-03-05-19-56-29.gh-issue-71052.PMDK--.rst @@ -0,0 +1 @@ +Implement :func:`ctypes.util.find_library` on Android. diff --git a/Misc/NEWS.d/next/Library/2024-03-07-11-10-27.gh-issue-114314.iEhAMH.rst b/Misc/NEWS.d/next/Library/2024-03-07-11-10-27.gh-issue-114314.iEhAMH.rst new file mode 100644 index 00000000000000..c241d966f9087d --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-03-07-11-10-27.gh-issue-114314.iEhAMH.rst @@ -0,0 +1,3 @@ +In :mod:`ctypes`, ctype data is now stored in type objects directly rather +than in a dict subclass. This is an internal change that should not affect +usage. diff --git a/Misc/NEWS.d/next/Library/2024-03-12-19-32-17.gh-issue-71042.oI0Ron.rst b/Misc/NEWS.d/next/Library/2024-03-12-19-32-17.gh-issue-71042.oI0Ron.rst new file mode 100644 index 00000000000000..3641cbb9b2fc1a --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-03-12-19-32-17.gh-issue-71042.oI0Ron.rst @@ -0,0 +1,2 @@ +Add :func:`platform.android_ver`, which provides device and OS information +on Android. diff --git a/Misc/NEWS.d/next/Library/2024-03-14-01-38-44.gh-issue-113171.VFnObz.rst b/Misc/NEWS.d/next/Library/2024-03-14-01-38-44.gh-issue-113171.VFnObz.rst new file mode 100644 index 00000000000000..f9a72473be4e2c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-03-14-01-38-44.gh-issue-113171.VFnObz.rst @@ -0,0 +1,9 @@ +Fixed various false positives and false negatives in + +* :attr:`ipaddress.IPv4Address.is_private` (see these docs for details) +* :attr:`ipaddress.IPv4Address.is_global` +* :attr:`ipaddress.IPv6Address.is_private` +* :attr:`ipaddress.IPv6Address.is_global` + +Also in the corresponding :class:`ipaddress.IPv4Network` and :class:`ipaddress.IPv6Network` +attributes. diff --git a/Misc/NEWS.d/next/Library/2024-03-14-09-38-51.gh-issue-116647.h0d_zj.rst b/Misc/NEWS.d/next/Library/2024-03-14-09-38-51.gh-issue-116647.h0d_zj.rst new file mode 100644 index 00000000000000..081f36bff91633 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-03-14-09-38-51.gh-issue-116647.h0d_zj.rst @@ -0,0 +1 @@ +Fix recursive child in dataclasses diff --git a/Misc/NEWS.d/next/Library/2024-03-18-14-36-50.gh-issue-116957.dTCs4f.rst b/Misc/NEWS.d/next/Library/2024-03-18-14-36-50.gh-issue-116957.dTCs4f.rst new file mode 100644 index 00000000000000..51fe04957e26bc --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-03-18-14-36-50.gh-issue-116957.dTCs4f.rst @@ -0,0 +1,3 @@ +configparser: Don't leave ConfigParser values in an invalid state (stored as +a list instead of a str) after an earlier read raised DuplicateSectionError +or DuplicateOptionError. diff --git a/Misc/NEWS.d/next/Library/2024-03-19-11-08-26.gh-issue-90872.ghys95.rst b/Misc/NEWS.d/next/Library/2024-03-19-11-08-26.gh-issue-90872.ghys95.rst new file mode 100644 index 00000000000000..ead68caa9fe88b --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-03-19-11-08-26.gh-issue-90872.ghys95.rst @@ -0,0 +1,3 @@ +On Windows, :meth:`subprocess.Popen.wait` no longer calls +``WaitForSingleObject()`` with a negative timeout: pass ``0`` ms if the +timeout is negative. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Library/2024-03-19-14-35-57.gh-issue-114099.siNSpK.rst b/Misc/NEWS.d/next/Library/2024-03-19-14-35-57.gh-issue-114099.siNSpK.rst new file mode 100644 index 00000000000000..9b57cbb812db4a --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-03-19-14-35-57.gh-issue-114099.siNSpK.rst @@ -0,0 +1 @@ +Modify standard library to allow for iOS platform differences. diff --git a/Misc/NEWS.d/next/Library/2024-03-19-19-42-25.gh-issue-116987.ZVKUH1.rst b/Misc/NEWS.d/next/Library/2024-03-19-19-42-25.gh-issue-116987.ZVKUH1.rst new file mode 100644 index 00000000000000..f2da956f66c86b --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-03-19-19-42-25.gh-issue-116987.ZVKUH1.rst @@ -0,0 +1 @@ +Fixed :func:`inspect.findsource` for class code objects. diff --git a/Misc/NEWS.d/next/Library/2024-03-20-23-07-58.gh-issue-109653.uu3lrX.rst b/Misc/NEWS.d/next/Library/2024-03-20-23-07-58.gh-issue-109653.uu3lrX.rst new file mode 100644 index 00000000000000..38d7634b54c2fe --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-03-20-23-07-58.gh-issue-109653.uu3lrX.rst @@ -0,0 +1,2 @@ +Deferred select imports in importlib.metadata and importlib.resources for a +14% speedup. diff --git a/Misc/NEWS.d/next/Library/2024-03-21-17-07-38.gh-issue-117084.w1mTpT.rst b/Misc/NEWS.d/next/Library/2024-03-21-17-07-38.gh-issue-117084.w1mTpT.rst new file mode 100644 index 00000000000000..6e7790e926b9d2 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-03-21-17-07-38.gh-issue-117084.w1mTpT.rst @@ -0,0 +1,2 @@ +Fix :mod:`zipfile` extraction for directory entries with the name containing +backslashes on Windows. diff --git a/Misc/NEWS.d/next/Library/2024-03-23-13-40-13.gh-issue-112383.XuHf3G.rst b/Misc/NEWS.d/next/Library/2024-03-23-13-40-13.gh-issue-112383.XuHf3G.rst new file mode 100644 index 00000000000000..931e615c2b86c5 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-03-23-13-40-13.gh-issue-112383.XuHf3G.rst @@ -0,0 +1 @@ +Fix :mod:`dis` module's handling of ``ENTER_EXECUTOR`` instructions. diff --git a/Misc/NEWS.d/next/Library/2024-03-23-14-26-18.gh-issue-117178.vTisTG.rst b/Misc/NEWS.d/next/Library/2024-03-23-14-26-18.gh-issue-117178.vTisTG.rst new file mode 100644 index 00000000000000..f9c53ebbfc3c96 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-03-23-14-26-18.gh-issue-117178.vTisTG.rst @@ -0,0 +1,2 @@ +Fix regression in lazy loading of self-referential modules, introduced in +gh-114781. diff --git a/Misc/NEWS.d/next/Library/2024-03-25-21-15-56.gh-issue-117225.oOaZXb.rst b/Misc/NEWS.d/next/Library/2024-03-25-21-15-56.gh-issue-117225.oOaZXb.rst new file mode 100644 index 00000000000000..b6c4850f608c2a --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-03-25-21-15-56.gh-issue-117225.oOaZXb.rst @@ -0,0 +1,2 @@ +doctest: only print "and X failed" when non-zero, don't pluralise "1 items". +Patch by Hugo van Kemenade. diff --git a/Misc/NEWS.d/next/Library/2024-03-26-11-48-39.gh-issue-98966.SayV9y.rst b/Misc/NEWS.d/next/Library/2024-03-26-11-48-39.gh-issue-98966.SayV9y.rst new file mode 100644 index 00000000000000..e819a1e9a0aba0 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-03-26-11-48-39.gh-issue-98966.SayV9y.rst @@ -0,0 +1,2 @@ +In :mod:`subprocess`, raise a more informative message when +``stdout=STDOUT``. diff --git a/Misc/NEWS.d/next/Library/2024-03-27-21-05-52.gh-issue-117310.Bt2wox.rst b/Misc/NEWS.d/next/Library/2024-03-27-21-05-52.gh-issue-117310.Bt2wox.rst new file mode 100644 index 00000000000000..429b890b8b609a --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-03-27-21-05-52.gh-issue-117310.Bt2wox.rst @@ -0,0 +1,4 @@ +Fixed an unlikely early & extra ``Py_DECREF`` triggered crash in :mod:`ssl` +when creating a new ``_ssl._SSLContext`` if CPython was built implausibly such +that the default cipher list is empty **or** the SSL library it was linked +against reports a failure from its C ``SSL_CTX_set_cipher_list()`` API. diff --git a/Misc/NEWS.d/next/Library/2024-03-28-13-54-20.gh-issue-88014.zJz31I.rst b/Misc/NEWS.d/next/Library/2024-03-28-13-54-20.gh-issue-88014.zJz31I.rst new file mode 100644 index 00000000000000..f8bb784e39fbb6 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-03-28-13-54-20.gh-issue-88014.zJz31I.rst @@ -0,0 +1,3 @@ +In documentation of :class:`gzip.GzipFile` in module gzip, explain data type +of optional constructor argument *mtime*, and recommend ``mtime = 0`` for +generating deterministic streams. diff --git a/Misc/NEWS.d/next/Tests/2024-03-20-14-19-32.gh-issue-117089.WwR1Z1.rst b/Misc/NEWS.d/next/Tests/2024-03-20-14-19-32.gh-issue-117089.WwR1Z1.rst new file mode 100644 index 00000000000000..ab0baec8c96035 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2024-03-20-14-19-32.gh-issue-117089.WwR1Z1.rst @@ -0,0 +1 @@ +Consolidated tests for importlib.metadata in their own ``metadata`` package. diff --git a/Misc/NEWS.d/next/Tests/2024-03-21-11-32-29.gh-issue-116333.F-9Ram.rst b/Misc/NEWS.d/next/Tests/2024-03-21-11-32-29.gh-issue-116333.F-9Ram.rst new file mode 100644 index 00000000000000..3fdb6bb3bd7af7 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2024-03-21-11-32-29.gh-issue-116333.F-9Ram.rst @@ -0,0 +1,3 @@ +Tests of TLS related things (error codes, etc) were updated to be more +lenient about specific error message strings and behaviors as seen in the +BoringSSL and AWS-LC forks of OpenSSL. diff --git a/Misc/NEWS.d/next/Tests/2024-03-24-23-49-25.gh-issue-117187.eMLT5n.rst b/Misc/NEWS.d/next/Tests/2024-03-24-23-49-25.gh-issue-117187.eMLT5n.rst new file mode 100644 index 00000000000000..0c0b0e0f443396 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2024-03-24-23-49-25.gh-issue-117187.eMLT5n.rst @@ -0,0 +1 @@ +Fix XML tests for vanilla Expat <2.6.0. diff --git a/Misc/NEWS.d/next/Tests/2024-03-25-21-31-49.gh-issue-83434.U7Z8cY.rst b/Misc/NEWS.d/next/Tests/2024-03-25-21-31-49.gh-issue-83434.U7Z8cY.rst new file mode 100644 index 00000000000000..7b7a8fcf53bb3c --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2024-03-25-21-31-49.gh-issue-83434.U7Z8cY.rst @@ -0,0 +1,3 @@ +Disable JUnit XML output (``--junit-xml=FILE`` command line option) in +regrtest when hunting for reference leaks (``-R`` option). Patch by Victor +Stinner. diff --git a/Misc/NEWS.d/next/Windows/2024-02-24-23-03-43.gh-issue-91227.sL4zWC.rst b/Misc/NEWS.d/next/Windows/2024-02-24-23-03-43.gh-issue-91227.sL4zWC.rst new file mode 100644 index 00000000000000..8e53afdd619001 --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2024-02-24-23-03-43.gh-issue-91227.sL4zWC.rst @@ -0,0 +1 @@ +Fix the asyncio ProactorEventLoop implementation so that sending a datagram to an address that is not listening does not prevent receiving any more datagrams. diff --git a/Misc/NEWS.d/next/Windows/2024-03-14-01-58-22.gh-issue-116773.H2UldY.rst b/Misc/NEWS.d/next/Windows/2024-03-14-01-58-22.gh-issue-116773.H2UldY.rst new file mode 100644 index 00000000000000..8fc3fe80041d26 --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2024-03-14-01-58-22.gh-issue-116773.H2UldY.rst @@ -0,0 +1 @@ +Fix instances of ``<_overlapped.Overlapped object at 0xXXX> still has pending operation at deallocation, the process may crash``. diff --git a/Misc/python-config.sh.in b/Misc/python-config.sh.in index eb02223ddcd2c3..c3c0b34fc1451d 100644 --- a/Misc/python-config.sh.in +++ b/Misc/python-config.sh.in @@ -46,7 +46,7 @@ LIBM="@LIBM@" LIBC="@LIBC@" SYSLIBS="$LIBM $LIBC" ABIFLAGS="@ABIFLAGS@" -LIBS="@LIBPYTHON@ @LIBS@ $SYSLIBS" +LIBS="@MODULE_LDFLAGS@ @LIBS@ $SYSLIBS" LIBS_EMBED="-lpython${VERSION}${ABIFLAGS} @LIBS@ $SYSLIBS" BASECFLAGS="@BASECFLAGS@" LDLIBRARY="@LDLIBRARY@" diff --git a/Misc/python.pc.in b/Misc/python.pc.in index 027dba38585a89..c2c740e82b1fde 100644 --- a/Misc/python.pc.in +++ b/Misc/python.pc.in @@ -9,5 +9,5 @@ Description: Build a C extension for Python Requires: Version: @VERSION@ Libs.private: @LIBS@ -Libs: -L${libdir} @LIBPYTHON@ +Libs: -L${libdir} @MODULE_LDFLAGS@ Cflags: -I${includedir}/python@VERSION@@ABIFLAGS@ diff --git a/Misc/sbom.spdx.json b/Misc/sbom.spdx.json index 7e9aa6dd82e619..07db46b09ae5f5 100644 --- a/Misc/sbom.spdx.json +++ b/Misc/sbom.spdx.json @@ -300,11 +300,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "f77449b2b4eb99f1da0938633cc558baf9c444fb" + "checksumValue": "f8ba39b46ebdfa7d031d9c33130c6ded680a8120" }, { "algorithm": "SHA256", - "checksumValue": "0f252967debca5b35362ca53951ea16ca8bb97a19a1d24f6695f44d50010859e" + "checksumValue": "f71cf6a0e8f09354c2af2c785a1d36e0cba7613a589be01ca8a3d8478f4c8874" } ], "fileName": "Modules/_hacl/Hacl_Hash_MD5.c" @@ -314,11 +314,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "c24e6779a91c840f3d65d24abbce225b608b676e" + "checksumValue": "eaaab54cea2b0bb8ec0eedf0b373d42f1a0f8f6c" }, { "algorithm": "SHA256", - "checksumValue": "9cd062e782801013e3cacaba583e44e1b5e682e217d20208d5323354d42011f1" + "checksumValue": "9a02e2a6e163515ea0228a859d5e55c1f57b11fae5908c42f9f9814ce9bca230" } ], "fileName": "Modules/_hacl/Hacl_Hash_MD5.h" @@ -328,11 +328,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "560f6ff541b5eff480ea047b147f4212bb0db7ed" + "checksumValue": "f4f42faf8da78a230199f649c0f2a1b865799a31" }, { "algorithm": "SHA256", - "checksumValue": "0ade3ab264e912d7b4e5cdcf773db8c63e4440540d295922d74b06bcfc74c77a" + "checksumValue": "5b29bd9951646861e0e19427be5d923a5bab7a4516824ccc068f696469195eec" } ], "fileName": "Modules/_hacl/Hacl_Hash_SHA1.c" @@ -342,11 +342,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "853b77d45379146faaeac5fe899b28db386ad13c" + "checksumValue": "722b57139737ceeb88e41d3839e6f7d70578741b" }, { "algorithm": "SHA256", - "checksumValue": "b13eb14f91582703819235ea7c8f807bb93e4f1e6b695499dc1d86021dc39e72" + "checksumValue": "5640295c790d56b1b4df147d6a6c58803b1845cd7d93365bf7cc7b75ba3cacd5" } ], "fileName": "Modules/_hacl/Hacl_Hash_SHA1.h" @@ -356,11 +356,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "667120b6100c946cdaa442f1173c723339923071" + "checksumValue": "f2aa3ed6acce621c162bc3a0592780ce5aa3bc4d" }, { "algorithm": "SHA256", - "checksumValue": "b189459b863341a3a9c5c78c0208b6554a2f2ac26e0748fbd4432a91db21fae6" + "checksumValue": "30638efb75c8b185bb09c3df6977e3f3c5d21a1e696218cf7ade6bc4d5201b31" } ], "fileName": "Modules/_hacl/Hacl_Hash_SHA2.c" @@ -370,11 +370,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "81db38b0b920e63ec33c7109d1144c35cf091da0" + "checksumValue": "4903e10291d07367be3bc283935bc52926e57ba1" }, { "algorithm": "SHA256", - "checksumValue": "631c9ba19c1c2c835bb63d3f2f22b8d76fb535edfed3c254ff2a52f12af3fe61" + "checksumValue": "093d7693084af0999d2a13d207311d74b5bdfdc9c08447ed4a979e3f7505ae6b" } ], "fileName": "Modules/_hacl/Hacl_Hash_SHA2.h" @@ -384,11 +384,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "9c832b98a2f2a68202d2da016fb718965d7b7602" + "checksumValue": "66644fd3325c414fef7d985536bb477c849c8f9a" }, { "algorithm": "SHA256", - "checksumValue": "38d350d1184238966cfa821a59ae00343f362182b6c2fbea7f2651763d757fb7" + "checksumValue": "17c0db96d40d1849f02546d5f55428fa89b61b07748d5b5df45cec25c5f29c0f" } ], "fileName": "Modules/_hacl/Hacl_Hash_SHA3.c" @@ -398,11 +398,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "ecc766fb6f7ee85e902b593b61b41e5a728fca34" + "checksumValue": "580e9a73813281e99a98871380b3726576295a96" }, { "algorithm": "SHA256", - "checksumValue": "bae290a94366a2460f51e8468144baaade91d9048db111e10d2e2ffddc3f98cf" + "checksumValue": "d8d4d14bbc3a561a4e590d9b18b326e6a8095efb12423edbd949cf3c00953621" } ], "fileName": "Modules/_hacl/Hacl_Hash_SHA3.h" @@ -426,11 +426,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "2ea61d6a236147462045f65c20311819d74db80c" + "checksumValue": "12c0c680c93b8112b97cc575faacbb3cbbd315b1" }, { "algorithm": "SHA256", - "checksumValue": "2c22b4d49ba06d6a3053cdc66405bd5ae953a28fcfed1ab164e8f5e0f6e2fb8b" + "checksumValue": "455e94f24a0900deda7e6e36f4714e4253d32cea077f97e23f90c569a717bc48" } ], "fileName": "Modules/_hacl/include/krml/FStar_UInt128_Verified.h" @@ -440,11 +440,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "1a647d841180ac8ca667afa968c353425e81ad0d" + "checksumValue": "62b44acbbdc77b749c36c242cda027bacf7679f8" }, { "algorithm": "SHA256", - "checksumValue": "e5d1c5854833bec7ea02e227ec35bd7b49c5fb9e0f339efa0dd83e1595f722d4" + "checksumValue": "65decdb74c24049aa19430462a51219250cfc65d8c162778e42df88b3142fa42" } ], "fileName": "Modules/_hacl/include/krml/FStar_UInt_8_16_32_64.h" @@ -468,11 +468,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "903c9eb76b01f3a95c04c3bc841c2fb71dea5403" + "checksumValue": "ba64394679643c6d4ceaf6bd2616d48d12f996a7" }, { "algorithm": "SHA256", - "checksumValue": "08ec602c7f90a1540389c0cfc95769fa7fec251e7ca143ef83c0b9f7afcf89a7" + "checksumValue": "d16a59f37a1d4982626870e370889eb9d332a9ad035661b8062f549fc734d061" } ], "fileName": "Modules/_hacl/include/krml/internal/target.h" @@ -510,11 +510,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "5dd4ee3c835a0d176a6e9fecbe9752fd1474ff41" + "checksumValue": "60f02d21f045c8a4c2b6b84a8f7e023d9490c8e5" }, { "algorithm": "SHA256", - "checksumValue": "d82ef594cba44203576d67b047240316bb3c542912ebb7034afa1e07888cec56" + "checksumValue": "370d8ef9c48cb55472ece11e12eaf94c58118de3f5515b6df1c130b696597828" } ], "fileName": "Modules/_hacl/internal/Hacl_Hash_MD5.h" @@ -524,11 +524,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "515b3082eb7c30597773e1c63ec46688f6da3634" + "checksumValue": "6346c30a140e7d3010c98fe19d14fa229a54eb16" }, { "algorithm": "SHA256", - "checksumValue": "10aacf847006b8e0dfb64d5c327443f954db6718b4aec712fb3268230df6a752" + "checksumValue": "ab52c6092bdbbfc9884f841bf4824016792ffa96167577cbe0df00dd96f56a34" } ], "fileName": "Modules/_hacl/internal/Hacl_Hash_SHA1.h" @@ -538,11 +538,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "a044ec12b70ba97b67e9a312827d6270452a20ca" + "checksumValue": "0018e084339058dd454b4e49d10d236b4f896bf8" }, { "algorithm": "SHA256", - "checksumValue": "a1426b54fa7273ba5b50817c25b2b26fc85c4d1befb14092cd27dc4c99439463" + "checksumValue": "10e959a92b3288a6165a404c8fae2bbcd7fb00a9abbae2b7809fa55d6fe9068d" } ], "fileName": "Modules/_hacl/internal/Hacl_Hash_SHA2.h" @@ -552,11 +552,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "cfb7b520c39a73cb84c541d370455f92b998781f" + "checksumValue": "eae8a5226bf993f07584cf4c0d269022328cf3d4" }, { "algorithm": "SHA256", - "checksumValue": "fd41997f9e96b3c9a3337b1b51fab965a1e21b0c16f353d156f1a1fa00709fbf" + "checksumValue": "6853125de10d0f605e9bc3a3dbbd7254713709e9893cc3f69929ea8d3f254934" } ], "fileName": "Modules/_hacl/internal/Hacl_Hash_SHA3.h" @@ -566,11 +566,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "f5c7b3ed911af6c8d582e8b3714b0c36195dc994" + "checksumValue": "d8063060cc707a7ac70108a15934d33e7b448db6" }, { "algorithm": "SHA256", - "checksumValue": "07de72398b12957e014e97b9ac197bceef12d6d6505c2bfe8b23ee17b94ec5fa" + "checksumValue": "347dfdf856ed1e584d124d6709b51267598ea5b37c1a2e03beeb358c978beada" } ], "fileName": "Modules/_hacl/python_hacl_namespaces.h" @@ -1584,14 +1584,14 @@ "checksums": [ { "algorithm": "SHA256", - "checksumValue": "c23ac158b238c368389dc86bfc315263e5c0e57785da74144aea2cab9a3d51a2" + "checksumValue": "e31e4ca10da91c585793c0eaf1b98aee3cb43e3a58d3d8d478593e5a6bd82927" } ], - "downloadLocation": "https://github.com/hacl-star/hacl-star/archive/521af282fdf6d60227335120f18ae9309a4b8e8c.zip", + "downloadLocation": "https://github.com/hacl-star/hacl-star/archive/bb3d0dc8d9d15a5cd51094d5b69e70aa09005ff0.zip", "externalRefs": [ { "referenceCategory": "SECURITY", - "referenceLocator": "cpe:2.3:a:hacl-star:hacl-star:521af282fdf6d60227335120f18ae9309a4b8e8c:*:*:*:*:*:*:*", + "referenceLocator": "cpe:2.3:a:hacl-star:hacl-star:bb3d0dc8d9d15a5cd51094d5b69e70aa09005ff0:*:*:*:*:*:*:*", "referenceType": "cpe23Type" } ], @@ -1599,7 +1599,7 @@ "name": "hacl-star", "originator": "Organization: HACL* Developers", "primaryPackagePurpose": "SOURCE", - "versionInfo": "521af282fdf6d60227335120f18ae9309a4b8e8c" + "versionInfo": "bb3d0dc8d9d15a5cd51094d5b69e70aa09005ff0" }, { "SPDXID": "SPDXRef-PACKAGE-libb2", diff --git a/Misc/stable_abi.toml b/Misc/stable_abi.toml index c68adf8db079f9..14dda7db1c323e 100644 --- a/Misc/stable_abi.toml +++ b/Misc/stable_abi.toml @@ -2500,3 +2500,9 @@ added = '3.13' [function.PyType_GetModuleName] added = '3.13' +[function.Py_GetConstant] + added = '3.13' +[function.Py_GetConstantBorrowed] + added = '3.13' +[function.PyType_GetModuleByDef] + added = '3.13' diff --git a/Modules/Setup.stdlib.in b/Modules/Setup.stdlib.in index deada66cf1a807..ff5c05f88d0d40 100644 --- a/Modules/Setup.stdlib.in +++ b/Modules/Setup.stdlib.in @@ -162,8 +162,8 @@ @MODULE__XXTESTFUZZ_TRUE@_xxtestfuzz _xxtestfuzz/_xxtestfuzz.c _xxtestfuzz/fuzzer.c @MODULE__TESTBUFFER_TRUE@_testbuffer _testbuffer.c @MODULE__TESTINTERNALCAPI_TRUE@_testinternalcapi _testinternalcapi.c _testinternalcapi/test_lock.c _testinternalcapi/pytime.c _testinternalcapi/set.c _testinternalcapi/test_critical_sections.c -@MODULE__TESTCAPI_TRUE@_testcapi _testcapimodule.c _testcapi/vectorcall.c _testcapi/heaptype.c _testcapi/abstract.c _testcapi/unicode.c _testcapi/dict.c _testcapi/set.c _testcapi/list.c _testcapi/tuple.c _testcapi/getargs.c _testcapi/datetime.c _testcapi/docstring.c _testcapi/mem.c _testcapi/watchers.c _testcapi/long.c _testcapi/float.c _testcapi/complex.c _testcapi/numbers.c _testcapi/structmember.c _testcapi/exceptions.c _testcapi/code.c _testcapi/buffer.c _testcapi/pyatomic.c _testcapi/file.c _testcapi/codec.c _testcapi/immortal.c _testcapi/gc.c _testcapi/hash.c _testcapi/time.c -@MODULE__TESTLIMITEDCAPI_TRUE@_testlimitedcapi _testlimitedcapi.c _testlimitedcapi/bytearray.c _testlimitedcapi/bytes.c _testlimitedcapi/heaptype_relative.c _testlimitedcapi/pyos.c _testlimitedcapi/sys.c _testlimitedcapi/vectorcall_limited.c +@MODULE__TESTCAPI_TRUE@_testcapi _testcapimodule.c _testcapi/vectorcall.c _testcapi/heaptype.c _testcapi/abstract.c _testcapi/unicode.c _testcapi/dict.c _testcapi/set.c _testcapi/list.c _testcapi/tuple.c _testcapi/getargs.c _testcapi/datetime.c _testcapi/docstring.c _testcapi/mem.c _testcapi/watchers.c _testcapi/long.c _testcapi/float.c _testcapi/complex.c _testcapi/numbers.c _testcapi/structmember.c _testcapi/exceptions.c _testcapi/code.c _testcapi/buffer.c _testcapi/pyatomic.c _testcapi/file.c _testcapi/codec.c _testcapi/immortal.c _testcapi/gc.c _testcapi/hash.c _testcapi/time.c _testcapi/bytes.c +@MODULE__TESTLIMITEDCAPI_TRUE@_testlimitedcapi _testlimitedcapi.c _testlimitedcapi/abstract.c _testlimitedcapi/bytearray.c _testlimitedcapi/bytes.c _testlimitedcapi/complex.c _testlimitedcapi/dict.c _testlimitedcapi/float.c _testlimitedcapi/heaptype_relative.c _testlimitedcapi/list.c _testlimitedcapi/long.c _testlimitedcapi/object.c _testlimitedcapi/pyos.c _testlimitedcapi/set.c _testlimitedcapi/sys.c _testlimitedcapi/unicode.c _testlimitedcapi/vectorcall_limited.c @MODULE__TESTCLINIC_TRUE@_testclinic _testclinic.c @MODULE__TESTCLINIC_LIMITED_TRUE@_testclinic_limited _testclinic_limited.c diff --git a/Modules/_ctypes/_ctypes.c b/Modules/_ctypes/_ctypes.c index 94245ae41afffc..af094a0fb59e27 100644 --- a/Modules/_ctypes/_ctypes.c +++ b/Modules/_ctypes/_ctypes.c @@ -2,7 +2,7 @@ ToDo: Get rid of the checker (and also the converters) field in PyCFuncPtrObject and - StgDictObject, and replace them by slot functions in StgDictObject. + StgInfo, and replace them by slot functions in StgInfo. think about a buffer-like object (memory? bytes?) @@ -36,7 +36,6 @@ PyCData_Type Simple_Type __new__(), __init__(), _as_parameter_ PyCField_Type -PyCStgDict_Type ============================================================================== @@ -82,7 +81,6 @@ bytes(cdata) */ /* - * PyCStgDict_Type * PyCStructType_Type * UnionType_Type * PyCPointerType_Type @@ -128,20 +126,7 @@ bytes(cdata) #include "pycore_long.h" // _PyLong_GetZero() -static PyTypeObject Union_Type; -static PyTypeObject Struct_Type; -static PyTypeObject Simple_Type; - -ctypes_state global_state = { - .PyCStgDict_Type = &PyCStgDict_Type, - .PyCData_Type = &PyCData_Type, - .Struct_Type = &Struct_Type, - .Union_Type = &Union_Type, - .PyCArray_Type = &PyCArray_Type, - .Simple_Type = &Simple_Type, - .PyCPointer_Type = &PyCPointer_Type, - .PyCFuncPtr_Type = &PyCFuncPtr_Type, -}; +ctypes_state global_state = {0}; PyObject *PyExc_ArgError = NULL; @@ -459,12 +444,121 @@ static PyType_Spec structparam_spec = { .slots = structparam_slots, }; +/* + CType_Type - a base metaclass. Its instances (classes) have a StgInfo. + */ + +static int +CType_Type_traverse(PyObject *self, visitproc visit, void *arg) +{ + Py_VISIT(Py_TYPE(self)); + return 0; +} + +static void +_ctype_clear_stginfo(StgInfo *info) +{ + assert(info); + Py_CLEAR(info->proto); + Py_CLEAR(info->argtypes); + Py_CLEAR(info->converters); + Py_CLEAR(info->restype); + Py_CLEAR(info->checker); +} + +static int +CType_Type_clear(PyObject *self) +{ + ctypes_state *st = GLOBAL_STATE(); + if (st && st->PyCType_Type) { + StgInfo *info; + if (PyStgInfo_FromType(st, self, &info) < 0) { + return -1; + } + if (info) { + _ctype_clear_stginfo(info); + } + } + return 0; +} + +static void +CType_Type_dealloc(PyObject *self) +{ + ctypes_state *st = GLOBAL_STATE(); + + if (st && st->PyCType_Type) { + StgInfo *info; + if (PyStgInfo_FromType(st, self, &info) < 0) { + PyErr_WriteUnraisable(self); + } + if (info) { + PyMem_Free(info->ffi_type_pointer.elements); + info->ffi_type_pointer.elements = NULL; + PyMem_Free(info->format); + info->format = NULL; + PyMem_Free(info->shape); + info->shape = NULL; + _ctype_clear_stginfo(info); + } + } + + PyTypeObject *tp = Py_TYPE(self); + PyType_Type.tp_dealloc(self); + Py_DECREF(tp); +} + +static PyObject * +CType_Type_sizeof(PyObject *self) +{ + Py_ssize_t size = Py_TYPE(self)->tp_basicsize; + size += Py_TYPE(self)->tp_itemsize * Py_SIZE(self); + + ctypes_state *st = GLOBAL_STATE(); + StgInfo *info; + if (PyStgInfo_FromType(st, self, &info) < 0) { + return NULL; + } + if (info) { + if (info->format) { + size += strlen(info->format) + 1; + } + if (info->ffi_type_pointer.elements) { + size += (info->length + 1) * sizeof(ffi_type *); + } + size += info->ndim * sizeof(Py_ssize_t); + } + + return PyLong_FromSsize_t(size); +} + +static PyMethodDef ctype_methods[] = { + {"__sizeof__", _PyCFunction_CAST(CType_Type_sizeof), + METH_NOARGS, PyDoc_STR("Return memory consumption of the type object.")}, + {0}, +}; + +static PyType_Slot ctype_type_slots[] = { + {Py_tp_traverse, CType_Type_traverse}, + {Py_tp_clear, CType_Type_clear}, + {Py_tp_dealloc, CType_Type_dealloc}, + {Py_tp_methods, ctype_methods}, + {0, NULL}, +}; + +static PyType_Spec pyctype_type_spec = { + .name = "_ctypes.CType_Type", + .basicsize = -(Py_ssize_t)sizeof(StgInfo), + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_IMMUTABLETYPE | + Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_BASETYPE ), + .slots = ctype_type_slots, +}; /* PyCStructType_Type - a meta type/class. Creating a new class using this one as - __metaclass__ will call the constructor StructUnionType_new. It replaces the - tp_dict member with a new instance of StgDict, and initializes the C - accessible fields somehow. + __metaclass__ will call the constructor StructUnionType_new. + It initializes the C accessible fields somehow. */ static PyCArgObject * @@ -472,7 +566,6 @@ StructUnionType_paramfunc(CDataObject *self) { PyCArgObject *parg; PyObject *obj; - StgDictObject *stgdict; void *ptr; if ((size_t)self->b_size > sizeof(void*)) { @@ -507,104 +600,104 @@ StructUnionType_paramfunc(CDataObject *self) return NULL; } + ctypes_state *st = GLOBAL_STATE(); + StgInfo *stginfo; + if (PyStgInfo_FromObject(st, (PyObject *)self, &stginfo) < 0) { + Py_DECREF(obj); + return NULL; + } + assert(stginfo); /* Cannot be NULL for structure/union instances */ + parg->tag = 'V'; - stgdict = PyObject_stgdict((PyObject *)self); - assert(stgdict); /* Cannot be NULL for structure/union instances */ - parg->pffi_type = &stgdict->ffi_type_pointer; + parg->pffi_type = &stginfo->ffi_type_pointer; parg->value.p = ptr; parg->size = self->b_size; parg->obj = obj; return parg; } -static PyObject * -StructUnionType_new(PyTypeObject *type, PyObject *args, PyObject *kwds, int isStruct) +static int +StructUnionType_init(PyObject *self, PyObject *args, PyObject *kwds, int isStruct) { - PyTypeObject *result; PyObject *fields; - StgDictObject *dict; - /* create the new instance (which is a class, - since we are a metatype!) */ - result = (PyTypeObject *)PyType_Type.tp_new(type, args, kwds); - if (!result) - return NULL; + PyObject *attrdict = PyType_GetDict((PyTypeObject *)self); + if (!attrdict) { + return -1; + } /* keep this for bw compatibility */ - int r = PyDict_Contains(result->tp_dict, &_Py_ID(_abstract_)); + int r = PyDict_Contains(attrdict, &_Py_ID(_abstract_)); if (r > 0) { - return (PyObject *)result; + Py_DECREF(attrdict); + return 0; } if (r < 0) { - Py_DECREF(result); - return NULL; + Py_DECREF(attrdict); + return -1; } ctypes_state *st = GLOBAL_STATE(); - dict = (StgDictObject *)_PyObject_CallNoArgs((PyObject *)st->PyCStgDict_Type); - if (!dict) { - Py_DECREF(result); - return NULL; + StgInfo *info = PyStgInfo_Init(st, (PyTypeObject *)self); + if (!info) { + Py_DECREF(attrdict); + return -1; } if (!isStruct) { - dict->flags |= TYPEFLAG_HASUNION; - } - /* replace the class dict by our updated stgdict, which holds info - about storage requirements of the instances */ - if (-1 == PyDict_Update((PyObject *)dict, result->tp_dict)) { - Py_DECREF(result); - Py_DECREF((PyObject *)dict); - return NULL; + info->flags |= TYPEFLAG_HASUNION; } - Py_SETREF(result->tp_dict, (PyObject *)dict); - dict->format = _ctypes_alloc_format_string(NULL, "B"); - if (dict->format == NULL) { - Py_DECREF(result); - return NULL; + + info->format = _ctypes_alloc_format_string(NULL, "B"); + if (info->format == NULL) { + Py_DECREF(attrdict); + return -1; } - dict->paramfunc = StructUnionType_paramfunc; + info->paramfunc = StructUnionType_paramfunc; - if (PyDict_GetItemRef((PyObject *)dict, &_Py_ID(_fields_), &fields) < 0) { - Py_DECREF(result); - return NULL; + if (PyDict_GetItemRef((PyObject *)attrdict, &_Py_ID(_fields_), &fields) < 0) { + Py_DECREF(attrdict); + return -1; } + Py_CLEAR(attrdict); if (fields) { - if (PyObject_SetAttr((PyObject *)result, &_Py_ID(_fields_), fields) < 0) { - Py_DECREF(result); + if (PyObject_SetAttr(self, &_Py_ID(_fields_), fields) < 0) { Py_DECREF(fields); - return NULL; + return -1; } Py_DECREF(fields); - return (PyObject *)result; + return 0; } else { - StgDictObject *basedict = PyType_stgdict((PyObject *)result->tp_base); - - if (basedict == NULL) { - return (PyObject *)result; + StgInfo *baseinfo; + if (PyStgInfo_FromType(st, (PyObject *)((PyTypeObject *)self)->tp_base, + &baseinfo) < 0) { + return -1; } - /* copy base dict */ - if (-1 == PyCStgDict_clone(dict, basedict)) { - Py_DECREF(result); - return NULL; + if (baseinfo == NULL) { + return 0; } - dict->flags &= ~DICTFLAG_FINAL; /* clear the 'final' flag in the subclass dict */ - basedict->flags |= DICTFLAG_FINAL; /* set the 'final' flag in the baseclass dict */ - return (PyObject *)result; + + /* copy base info */ + if (PyCStgInfo_clone(info, baseinfo) < 0) { + return -1; + } + info->flags &= ~DICTFLAG_FINAL; /* clear the 'final' flag in the subclass info */ + baseinfo->flags |= DICTFLAG_FINAL; /* set the 'final' flag in the baseclass info */ } + return 0; } -static PyObject * -PyCStructType_new(PyTypeObject *type, PyObject *args, PyObject *kwds) +static int +PyCStructType_init(PyObject *self, PyObject *args, PyObject *kwds) { - return StructUnionType_new(type, args, kwds, 1); + return StructUnionType_init(self, args, kwds, 1); } -static PyObject * -UnionType_new(PyTypeObject *type, PyObject *args, PyObject *kwds) +static int +UnionType_init(PyObject *self, PyObject *args, PyObject *kwds) { - return StructUnionType_new(type, args, kwds, 0); + return StructUnionType_init(self, args, kwds, 0); } PyDoc_STRVAR(from_address_doc, @@ -640,8 +733,12 @@ CDataType_from_buffer(PyObject *type, PyObject *args) Py_buffer *buffer; Py_ssize_t offset = 0; - StgDictObject *dict = PyType_stgdict(type); - if (!dict) { + ctypes_state *st = GLOBAL_STATE(); + StgInfo *info; + if (PyStgInfo_FromType(st, type, &info) < 0) { + return NULL; + } + if (!info) { PyErr_SetString(PyExc_TypeError, "abstract class"); return NULL; } @@ -676,11 +773,11 @@ CDataType_from_buffer(PyObject *type, PyObject *args) return NULL; } - if (dict->size > buffer->len - offset) { + if (info->size > buffer->len - offset) { PyErr_Format(PyExc_ValueError, "Buffer size too small " "(%zd instead of at least %zd bytes)", - buffer->len, dict->size + offset); + buffer->len, info->size + offset); Py_DECREF(mv); return NULL; } @@ -717,8 +814,13 @@ CDataType_from_buffer_copy(PyObject *type, PyObject *args) Py_buffer buffer; Py_ssize_t offset = 0; PyObject *result; - StgDictObject *dict = PyType_stgdict(type); - if (!dict) { + + ctypes_state *st = GLOBAL_STATE(); + StgInfo *info; + if (PyStgInfo_FromType(st, type, &info) < 0) { + return NULL; + } + if (!info) { PyErr_SetString(PyExc_TypeError, "abstract class"); return NULL; } @@ -733,10 +835,10 @@ CDataType_from_buffer_copy(PyObject *type, PyObject *args) return NULL; } - if (dict->size > buffer.len - offset) { + if (info->size > buffer.len - offset) { PyErr_Format(PyExc_ValueError, "Buffer size too small (%zd instead of at least %zd bytes)", - buffer.len, dict->size + offset); + buffer.len, info->size + offset); PyBuffer_Release(&buffer); return NULL; } @@ -750,7 +852,7 @@ CDataType_from_buffer_copy(PyObject *type, PyObject *args) result = GenericPyCData_new((PyTypeObject *)type, NULL, NULL); if (result != NULL) { memcpy(((CDataObject *)result)->b_ptr, - (char *)buffer.buf + offset, dict->size); + (char *)buffer.buf + offset, info->size); } PyBuffer_Release(&buffer); return result; @@ -836,13 +938,14 @@ CDataType_from_param(PyObject *type, PyObject *value) PyCArgObject *p = (PyCArgObject *)value; PyObject *ob = p->obj; const char *ob_name; - StgDictObject *dict; - dict = PyType_stgdict(type); - + StgInfo *info; + if (PyStgInfo_FromType(st, type, &info) < 0) { + return NULL; + } /* If we got a PyCArgObject, we must check if the object packed in it - is an instance of the type's dict->proto */ - if(dict && ob) { - res = PyObject_IsInstance(ob, dict->proto); + is an instance of the type's info->proto */ + if(info && ob) { + res = PyObject_IsInstance(ob, info->proto); if (res == -1) return NULL; if (res) { @@ -893,18 +996,27 @@ CDataType_repeat(PyObject *self, Py_ssize_t length) static int CDataType_clear(PyTypeObject *self) { - StgDictObject *dict = PyType_stgdict((PyObject *)self); - if (dict) - Py_CLEAR(dict->proto); + ctypes_state *st = GLOBAL_STATE(); + StgInfo *info; + if (PyStgInfo_FromType(st, (PyObject *)self, &info) < 0) { + return -1; + } + if (info) { + Py_CLEAR(info->proto); + } return PyType_Type.tp_clear((PyObject *)self); } static int CDataType_traverse(PyTypeObject *self, visitproc visit, void *arg) { - StgDictObject *dict = PyType_stgdict((PyObject *)self); - if (dict) { - Py_VISIT(dict->proto); + ctypes_state *st = GLOBAL_STATE(); + StgInfo *info; + if (PyStgInfo_FromType(st, (PyObject *)self, &info) < 0) { + return -1; + } + if (info) { + Py_VISIT(info->proto); } Py_VISIT(Py_TYPE(self)); return PyType_Type.tp_traverse((PyObject *)self, visit, arg); @@ -919,7 +1031,7 @@ PyCStructType_setattro(PyObject *self, PyObject *key, PyObject *value) if (value && PyUnicode_Check(key) && _PyUnicode_EqualToASCIIString(key, "_fields_")) - return PyCStructUnionType_update_stgdict(self, value, 1); + return PyCStructUnionType_update_stginfo(self, value, 1); return 0; } @@ -933,7 +1045,7 @@ UnionType_setattro(PyObject *self, PyObject *key, PyObject *value) if (PyUnicode_Check(key) && _PyUnicode_EqualToASCIIString(key, "_fields_")) - return PyCStructUnionType_update_stgdict(self, value, 0); + return PyCStructUnionType_update_stginfo(self, value, 0); return 0; } @@ -943,14 +1055,14 @@ static PyType_Slot pycstruct_type_slots[] = { {Py_tp_traverse, CDataType_traverse}, {Py_tp_clear, CDataType_clear}, {Py_tp_methods, CDataType_methods}, - {Py_tp_new, PyCStructType_new}, + {Py_tp_init, PyCStructType_init}, // Sequence protocol. {Py_sq_repeat, CDataType_repeat}, {0, NULL}, }; -PyType_Spec pycstruct_type_spec = { +static PyType_Spec pycstruct_type_spec = { .name = "_ctypes.PyCStructType", .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_IMMUTABLETYPE), @@ -963,7 +1075,7 @@ static PyType_Slot union_type_slots[] = { {Py_tp_traverse, CDataType_traverse}, {Py_tp_clear, CDataType_clear}, {Py_tp_methods, CDataType_methods}, - {Py_tp_new, UnionType_new}, + {Py_tp_init, UnionType_init}, // Sequence protocol. {Py_sq_repeat, CDataType_repeat}, @@ -994,20 +1106,25 @@ size property/method, and the sequence protocol. */ static int -PyCPointerType_SetProto(StgDictObject *stgdict, PyObject *proto) +PyCPointerType_SetProto(StgInfo *stginfo, PyObject *proto) { + ctypes_state *st = GLOBAL_STATE(); if (!proto || !PyType_Check(proto)) { PyErr_SetString(PyExc_TypeError, "_type_ must be a type"); return -1; } - if (!PyType_stgdict(proto)) { + StgInfo *info; + if (PyStgInfo_FromType(st, proto, &info) < 0) { + return -1; + } + if (!info) { PyErr_SetString(PyExc_TypeError, "_type_ must have storage info"); return -1; } Py_INCREF(proto); - Py_XSETREF(stgdict->proto, proto); + Py_XSETREF(stginfo->proto, proto); return 0; } @@ -1027,110 +1144,103 @@ PyCPointerType_paramfunc(CDataObject *self) return parg; } -static PyObject * -PyCPointerType_new(PyTypeObject *type, PyObject *args, PyObject *kwds) +static int +PyCPointerType_init(PyObject *self, PyObject *args, PyObject *kwds) { - PyTypeObject *result; - StgDictObject *stgdict; PyObject *proto; PyObject *typedict; - typedict = PyTuple_GetItem(args, 2); if (!typedict) { - return NULL; + return -1; } + /* - stgdict items size, align, length contain info about pointers itself, - stgdict->proto has info about the pointed to type! + stginfo items size, align, length contain info about pointers itself, + stginfo->proto has info about the pointed to type! */ ctypes_state *st = GLOBAL_STATE(); - stgdict = (StgDictObject *)_PyObject_CallNoArgs( - (PyObject *)st->PyCStgDict_Type); - if (!stgdict) { - return NULL; + StgInfo *stginfo = PyStgInfo_Init(st, (PyTypeObject *)self); + if (!stginfo) { + return -1; } - stgdict->size = sizeof(void *); - stgdict->align = _ctypes_get_fielddesc("P")->pffi_type->alignment; - stgdict->length = 1; - stgdict->ffi_type_pointer = ffi_type_pointer; - stgdict->paramfunc = PyCPointerType_paramfunc; - stgdict->flags |= TYPEFLAG_ISPOINTER; + stginfo->size = sizeof(void *); + stginfo->align = _ctypes_get_fielddesc("P")->pffi_type->alignment; + stginfo->length = 1; + stginfo->ffi_type_pointer = ffi_type_pointer; + stginfo->paramfunc = PyCPointerType_paramfunc; + stginfo->flags |= TYPEFLAG_ISPOINTER; if (PyDict_GetItemRef(typedict, &_Py_ID(_type_), &proto) < 0) { - Py_DECREF((PyObject *)stgdict); - return NULL; + return -1; } if (proto) { - StgDictObject *itemdict; const char *current_format; - if (-1 == PyCPointerType_SetProto(stgdict, proto)) { + if (-1 == PyCPointerType_SetProto(stginfo, proto)) { Py_DECREF(proto); - Py_DECREF((PyObject *)stgdict); - return NULL; + return -1; } - itemdict = PyType_stgdict(proto); - /* PyCPointerType_SetProto has verified proto has a stgdict. */ - assert(itemdict); - /* If itemdict->format is NULL, then this is a pointer to an + StgInfo *iteminfo; + if (PyStgInfo_FromType(st, proto, &iteminfo) < 0) { + Py_DECREF(proto); + return -1; + } + /* PyCPointerType_SetProto has verified proto has a stginfo. */ + assert(iteminfo); + /* If iteminfo->format is NULL, then this is a pointer to an incomplete type. We create a generic format string 'pointer to bytes' in this case. XXX Better would be to fix the format string later... */ - current_format = itemdict->format ? itemdict->format : "B"; - if (itemdict->shape != NULL) { + current_format = iteminfo->format ? iteminfo->format : "B"; + if (iteminfo->shape != NULL) { /* pointer to an array: the shape needs to be prefixed */ - stgdict->format = _ctypes_alloc_format_string_with_shape( - itemdict->ndim, itemdict->shape, "&", current_format); + stginfo->format = _ctypes_alloc_format_string_with_shape( + iteminfo->ndim, iteminfo->shape, "&", current_format); } else { - stgdict->format = _ctypes_alloc_format_string("&", current_format); + stginfo->format = _ctypes_alloc_format_string("&", current_format); } Py_DECREF(proto); - if (stgdict->format == NULL) { - Py_DECREF((PyObject *)stgdict); - return NULL; + if (stginfo->format == NULL) { + return -1; } } - /* create the new instance (which is a class, - since we are a metatype!) */ - result = (PyTypeObject *)PyType_Type.tp_new(type, args, kwds); - if (result == NULL) { - Py_DECREF((PyObject *)stgdict); - return NULL; - } - - /* replace the class dict by our updated spam dict */ - if (-1 == PyDict_Update((PyObject *)stgdict, result->tp_dict)) { - Py_DECREF(result); - Py_DECREF((PyObject *)stgdict); - return NULL; - } - Py_SETREF(result->tp_dict, (PyObject *)stgdict); - - return (PyObject *)result; + return 0; } static PyObject * PyCPointerType_set_type(PyTypeObject *self, PyObject *type) { - StgDictObject *dict; - - - dict = PyType_stgdict((PyObject *)self); - if (!dict) { + PyObject *attrdict = PyType_GetDict(self); + if (!attrdict) { + return NULL; + } + ctypes_state *st = GLOBAL_STATE(); + StgInfo *info; + if (PyStgInfo_FromType(st, (PyObject *)self, &info) < 0) { + Py_DECREF(attrdict); + return NULL; + } + if (!info) { PyErr_SetString(PyExc_TypeError, "abstract class"); + Py_DECREF(attrdict); return NULL; } - if (-1 == PyCPointerType_SetProto(dict, type)) + if (-1 == PyCPointerType_SetProto(info, type)) { + Py_DECREF(attrdict); return NULL; + } - if (-1 == PyDict_SetItem((PyObject *)dict, &_Py_ID(_type_), type)) + if (-1 == PyDict_SetItem(attrdict, &_Py_ID(_type_), type)) { + Py_DECREF(attrdict); return NULL; + } + Py_DECREF(attrdict); Py_RETURN_NONE; } @@ -1139,15 +1249,17 @@ static PyObject *_byref(PyObject *); static PyObject * PyCPointerType_from_param(PyObject *type, PyObject *value) { - StgDictObject *typedict; - if (value == Py_None) { /* ConvParam will convert to a NULL pointer later */ return Py_NewRef(value); } - typedict = PyType_stgdict(type); - if (!typedict) { + ctypes_state *st = GLOBAL_STATE(); + StgInfo *typeinfo; + if (PyStgInfo_FromType(st, type, &typeinfo) < 0) { + return NULL; + } + if (!typeinfo) { PyErr_SetString(PyExc_TypeError, "abstract class"); return NULL; @@ -1156,7 +1268,8 @@ PyCPointerType_from_param(PyObject *type, PyObject *value) /* If we expect POINTER(), but receive a instance, accept it by calling byref(). */ - switch (PyObject_IsInstance(value, typedict->proto)) { + assert(typeinfo->proto); + switch (PyObject_IsInstance(value, typeinfo->proto)) { case 1: Py_INCREF(value); /* _byref steals a refcount */ return _byref(value); @@ -1166,14 +1279,16 @@ PyCPointerType_from_param(PyObject *type, PyObject *value) break; } - ctypes_state *st = GLOBAL_STATE(); if (PointerObject_Check(st, value) || ArrayObject_Check(st, value)) { /* Array instances are also pointers when the item types are the same. */ - StgDictObject *v = PyObject_stgdict(value); + StgInfo *v; + if (PyStgInfo_FromObject(st, value, &v) < 0) { + return NULL; + } assert(v); /* Cannot be NULL for pointer or array objects */ - int ret = PyObject_IsSubclass(v->proto, typedict->proto); + int ret = PyObject_IsSubclass(v->proto, typeinfo->proto); if (ret < 0) { return NULL; } @@ -1199,7 +1314,7 @@ static PyType_Slot pycpointer_type_slots[] = { {Py_tp_traverse, CDataType_traverse}, {Py_tp_clear, CDataType_clear}, {Py_tp_methods, PyCPointerType_methods}, - {Py_tp_new, PyCPointerType_new}, + {Py_tp_init, PyCPointerType_init}, // Sequence protocol. {Py_sq_repeat, CDataType_repeat}, @@ -1219,7 +1334,7 @@ static PyType_Spec pycpointer_type_spec = { PyCArrayType_Type */ /* - PyCArrayType_new ensures that the new Array subclass created has a _length_ + PyCArrayType_init ensures that the new Array subclass created has a _length_ attribute, and a _type_ attribute. */ @@ -1403,28 +1518,18 @@ PyCArrayType_paramfunc(CDataObject *self) return p; } -static PyObject * -PyCArrayType_new(PyTypeObject *type, PyObject *args, PyObject *kwds) +static int +PyCArrayType_init(PyObject *self, PyObject *args, PyObject *kwds) { - PyTypeObject *result; - StgDictObject *stgdict; - StgDictObject *itemdict; PyObject *length_attr, *type_attr; Py_ssize_t length; Py_ssize_t itemsize, itemalign; - /* create the new instance (which is a class, - since we are a metatype!) */ - result = (PyTypeObject *)PyType_Type.tp_new(type, args, kwds); - if (result == NULL) - return NULL; - /* Initialize these variables to NULL so that we can simplify error handling by using Py_XDECREF. */ - stgdict = NULL; type_attr = NULL; - if (PyObject_GetOptionalAttr((PyObject *)result, &_Py_ID(_length_), &length_attr) < 0) { + if (PyObject_GetOptionalAttr(self, &_Py_ID(_length_), &length_attr) < 0) { goto error; } if (!length_attr) { @@ -1457,7 +1562,7 @@ PyCArrayType_new(PyTypeObject *type, PyObject *args, PyObject *kwds) goto error; } - if (PyObject_GetOptionalAttr((PyObject *)result, &_Py_ID(_type_), &type_attr) < 0) { + if (PyObject_GetOptionalAttr(self, &_Py_ID(_type_), &type_attr) < 0) { goto error; } if (!type_attr) { @@ -1467,88 +1572,83 @@ PyCArrayType_new(PyTypeObject *type, PyObject *args, PyObject *kwds) } ctypes_state *st = GLOBAL_STATE(); - stgdict = (StgDictObject *)_PyObject_CallNoArgs( - (PyObject *)st->PyCStgDict_Type); - if (!stgdict) { + StgInfo *stginfo = PyStgInfo_Init(st, (PyTypeObject*)self); + if (!stginfo) { + goto error; + } + + StgInfo *iteminfo; + if (PyStgInfo_FromType(st, type_attr, &iteminfo) < 0) { goto error; } - itemdict = PyType_stgdict(type_attr); - if (!itemdict) { + if (!iteminfo) { PyErr_SetString(PyExc_TypeError, "_type_ must have storage info"); goto error; } - assert(itemdict->format); - stgdict->format = _ctypes_alloc_format_string(NULL, itemdict->format); - if (stgdict->format == NULL) + assert(iteminfo->format); + stginfo->format = _ctypes_alloc_format_string(NULL, iteminfo->format); + if (stginfo->format == NULL) goto error; - stgdict->ndim = itemdict->ndim + 1; - stgdict->shape = PyMem_Malloc(sizeof(Py_ssize_t) * stgdict->ndim); - if (stgdict->shape == NULL) { + stginfo->ndim = iteminfo->ndim + 1; + stginfo->shape = PyMem_Malloc(sizeof(Py_ssize_t) * stginfo->ndim); + if (stginfo->shape == NULL) { PyErr_NoMemory(); goto error; } - stgdict->shape[0] = length; - if (stgdict->ndim > 1) { - memmove(&stgdict->shape[1], itemdict->shape, - sizeof(Py_ssize_t) * (stgdict->ndim - 1)); + stginfo->shape[0] = length; + if (stginfo->ndim > 1) { + memmove(&stginfo->shape[1], iteminfo->shape, + sizeof(Py_ssize_t) * (stginfo->ndim - 1)); } - itemsize = itemdict->size; + itemsize = iteminfo->size; if (itemsize != 0 && length > PY_SSIZE_T_MAX / itemsize) { PyErr_SetString(PyExc_OverflowError, "array too large"); goto error; } - itemalign = itemdict->align; + itemalign = iteminfo->align; - if (itemdict->flags & (TYPEFLAG_ISPOINTER | TYPEFLAG_HASPOINTER)) - stgdict->flags |= TYPEFLAG_HASPOINTER; + if (iteminfo->flags & (TYPEFLAG_ISPOINTER | TYPEFLAG_HASPOINTER)) + stginfo->flags |= TYPEFLAG_HASPOINTER; - stgdict->size = itemsize * length; - stgdict->align = itemalign; - stgdict->length = length; - stgdict->proto = type_attr; + stginfo->size = itemsize * length; + stginfo->align = itemalign; + stginfo->length = length; + stginfo->proto = type_attr; type_attr = NULL; - stgdict->paramfunc = &PyCArrayType_paramfunc; + stginfo->paramfunc = &PyCArrayType_paramfunc; /* Arrays are passed as pointers to function calls. */ - stgdict->ffi_type_pointer = ffi_type_pointer; - - /* replace the class dict by our updated spam dict */ - if (-1 == PyDict_Update((PyObject *)stgdict, result->tp_dict)) - goto error; - Py_SETREF(result->tp_dict, (PyObject *)stgdict); /* steal the reference */ - stgdict = NULL; + stginfo->ffi_type_pointer = ffi_type_pointer; /* Special case for character arrays. A permanent annoyance: char arrays are also strings! */ - if (itemdict->getfunc == _ctypes_get_fielddesc("c")->getfunc) { - if (-1 == add_getset(result, CharArray_getsets)) + if (iteminfo->getfunc == _ctypes_get_fielddesc("c")->getfunc) { + if (-1 == add_getset((PyTypeObject*)self, CharArray_getsets)) goto error; } - else if (itemdict->getfunc == _ctypes_get_fielddesc("u")->getfunc) { - if (-1 == add_getset(result, WCharArray_getsets)) + else if (iteminfo->getfunc == _ctypes_get_fielddesc("u")->getfunc) { + if (-1 == add_getset((PyTypeObject*)self, WCharArray_getsets)) goto error; } - return (PyObject *)result; + return 0; error: - Py_XDECREF((PyObject*)stgdict); Py_XDECREF(type_attr); - Py_DECREF(result); - return NULL; + return -1; } static PyType_Slot pycarray_type_slots[] = { {Py_tp_doc, PyDoc_STR("metatype for the Array Objects")}, {Py_tp_traverse, CDataType_traverse}, {Py_tp_methods, CDataType_methods}, - {Py_tp_new, PyCArrayType_new}, + {Py_tp_init, PyCArrayType_init}, {Py_tp_clear, CDataType_clear}, // Sequence protocol. @@ -1569,7 +1669,7 @@ static PyType_Spec pycarray_type_spec = { */ /* -PyCSimpleType_new ensures that the new Simple_Type subclass created has a valid +PyCSimpleType_init ensures that the new Simple_Type subclass created has a valid _type_ attribute. */ @@ -1609,19 +1709,29 @@ c_wchar_p_from_param(PyObject *type, PyObject *value) ctypes_state *st = GLOBAL_STATE(); if (ArrayObject_Check(st, value) || PointerObject_Check(st, value)) { /* c_wchar array instance or pointer(c_wchar(...)) */ - StgDictObject *dt = PyObject_stgdict(value); - StgDictObject *dict; - assert(dt); /* Cannot be NULL for pointer or array objects */ - dict = dt && dt->proto ? PyType_stgdict(dt->proto) : NULL; - if (dict && (dict->setfunc == _ctypes_get_fielddesc("u")->setfunc)) { + StgInfo *it; + if (PyStgInfo_FromObject(st, value, &it) < 0) { + return NULL; + } + assert(it); /* Cannot be NULL for pointer or array objects */ + StgInfo *info = NULL; + if (it && it->proto) { + if (PyStgInfo_FromType(st, it->proto, &info) < 0) { + return NULL; + } + } + if (info && (info->setfunc == _ctypes_get_fielddesc("u")->setfunc)) { return Py_NewRef(value); } } if (PyCArg_CheckExact(st, value)) { /* byref(c_char(...)) */ PyCArgObject *a = (PyCArgObject *)value; - StgDictObject *dict = PyObject_stgdict(a->obj); - if (dict && (dict->setfunc == _ctypes_get_fielddesc("u")->setfunc)) { + StgInfo *info; + if (PyStgInfo_FromObject(st, a->obj, &info) < 0) { + return NULL; + } + if (info && (info->setfunc == _ctypes_get_fielddesc("u")->setfunc)) { return Py_NewRef(value); } } @@ -1673,19 +1783,29 @@ c_char_p_from_param(PyObject *type, PyObject *value) ctypes_state *st = GLOBAL_STATE(); if (ArrayObject_Check(st, value) || PointerObject_Check(st, value)) { /* c_char array instance or pointer(c_char(...)) */ - StgDictObject *dt = PyObject_stgdict(value); - StgDictObject *dict; - assert(dt); /* Cannot be NULL for pointer or array objects */ - dict = dt && dt->proto ? PyType_stgdict(dt->proto) : NULL; - if (dict && (dict->setfunc == _ctypes_get_fielddesc("c")->setfunc)) { + StgInfo *it; + if (PyStgInfo_FromObject(st, value, &it) < 0) { + return NULL; + } + assert(it); /* Cannot be NULL for pointer or array objects */ + StgInfo *info = NULL; + if (it && it->proto) { + if (PyStgInfo_FromType(st, it->proto, &info) < 0) { + return NULL; + } + } + if (info && (info->setfunc == _ctypes_get_fielddesc("c")->setfunc)) { return Py_NewRef(value); } } if (PyCArg_CheckExact(st, value)) { /* byref(c_char(...)) */ PyCArgObject *a = (PyCArgObject *)value; - StgDictObject *dict = PyObject_stgdict(a->obj); - if (dict && (dict->setfunc == _ctypes_get_fielddesc("c")->setfunc)) { + StgInfo *info; + if (PyStgInfo_FromObject(st, a->obj, &info) < 0) { + return NULL; + } + if (info && (info->setfunc == _ctypes_get_fielddesc("c")->setfunc)) { return Py_NewRef(value); } } @@ -1707,7 +1827,6 @@ c_char_p_from_param(PyObject *type, PyObject *value) static PyObject * c_void_p_from_param(PyObject *type, PyObject *value) { - StgDictObject *stgd; PyObject *as_parameter; int res; @@ -1806,15 +1925,18 @@ c_void_p_from_param(PyObject *type, PyObject *value) return (PyObject *)parg; } /* c_char_p, c_wchar_p */ - stgd = PyObject_stgdict(value); - if (stgd + StgInfo *stgi; + if (PyStgInfo_FromObject(st, value, &stgi) < 0) { + return NULL; + } + if (stgi && CDataObject_Check(st, value) - && stgd->proto - && PyUnicode_Check(stgd->proto)) + && stgi->proto + && PyUnicode_Check(stgi->proto)) { PyCArgObject *parg; - switch (PyUnicode_AsUTF8(stgd->proto)[0]) { + switch (PyUnicode_AsUTF8(stgi->proto)[0]) { case 'z': /* c_char_p */ case 'Z': /* c_wchar_p */ parg = PyCArgObject_new(); @@ -1851,7 +1973,6 @@ static PyObject *CreateSwappedType(PyTypeObject *type, PyObject *args, PyObject PyObject *proto, struct fielddesc *fmt) { PyTypeObject *result; - StgDictObject *stgdict; PyObject *name = PyTuple_GET_ITEM(args, 0); PyObject *newname; PyObject *swapped_args; @@ -1894,29 +2015,21 @@ static PyObject *CreateSwappedType(PyTypeObject *type, PyObject *args, PyObject return NULL; ctypes_state *st = GLOBAL_STATE(); - stgdict = (StgDictObject *)_PyObject_CallNoArgs( - (PyObject *)st->PyCStgDict_Type); - if (!stgdict) { + + StgInfo *stginfo = PyStgInfo_Init(st, result); + if (!stginfo) { Py_DECREF(result); return NULL; } - stgdict->ffi_type_pointer = *fmt->pffi_type; - stgdict->align = fmt->pffi_type->alignment; - stgdict->length = 0; - stgdict->size = fmt->pffi_type->size; - stgdict->setfunc = fmt->setfunc_swapped; - stgdict->getfunc = fmt->getfunc_swapped; + stginfo->ffi_type_pointer = *fmt->pffi_type; + stginfo->align = fmt->pffi_type->alignment; + stginfo->length = 0; + stginfo->size = fmt->pffi_type->size; + stginfo->setfunc = fmt->setfunc_swapped; + stginfo->getfunc = fmt->getfunc_swapped; - stgdict->proto = Py_NewRef(proto); - - /* replace the class dict by our updated spam dict */ - if (-1 == PyDict_Update((PyObject *)stgdict, result->tp_dict)) { - Py_DECREF(result); - Py_DECREF((PyObject *)stgdict); - return NULL; - } - Py_SETREF(result->tp_dict, (PyObject *)stgdict); + stginfo->proto = Py_NewRef(proto); return (PyObject *)result; } @@ -1924,14 +2037,17 @@ static PyObject *CreateSwappedType(PyTypeObject *type, PyObject *args, PyObject static PyCArgObject * PyCSimpleType_paramfunc(CDataObject *self) { - StgDictObject *dict; const char *fmt; PyCArgObject *parg; struct fielddesc *fd; - dict = PyObject_stgdict((PyObject *)self); - assert(dict); /* Cannot be NULL for CDataObject instances */ - fmt = PyUnicode_AsUTF8(dict->proto); + ctypes_state *st = GLOBAL_STATE(); + StgInfo *info; + if (PyStgInfo_FromObject(st, (PyObject *)self, &info) < 0) { + return NULL; + } + assert(info); /* Cannot be NULL for CDataObject instances */ + fmt = PyUnicode_AsUTF8(info->proto); assert(fmt); fd = _ctypes_get_fielddesc(fmt); @@ -1948,33 +2064,27 @@ PyCSimpleType_paramfunc(CDataObject *self) return parg; } -static PyObject * -PyCSimpleType_new(PyTypeObject *type, PyObject *args, PyObject *kwds) +static int +PyCSimpleType_init(PyObject *self, PyObject *args, PyObject *kwds) { - PyTypeObject *result; - StgDictObject *stgdict; PyObject *proto; const char *proto_str; Py_ssize_t proto_len; PyMethodDef *ml; struct fielddesc *fmt; - /* create the new instance (which is a class, - since we are a metatype!) */ - result = (PyTypeObject *)PyType_Type.tp_new(type, args, kwds); - if (result == NULL) - return NULL; - - if (PyObject_GetOptionalAttr((PyObject *)result, &_Py_ID(_type_), &proto) < 0) { - return NULL; + if (PyType_Type.tp_init(self, args, kwds) < 0) { + return -1; + } + if (PyObject_GetOptionalAttr(self, &_Py_ID(_type_), &proto) < 0) { + return -1; } if (!proto) { PyErr_SetString(PyExc_AttributeError, "class must define a '_type_' attribute"); error: Py_XDECREF(proto); - Py_DECREF(result); - return NULL; + return -1; } if (PyUnicode_Check(proto)) { proto_str = PyUnicode_AsUTF8AndSize(proto, &proto_len); @@ -2006,70 +2116,60 @@ PyCSimpleType_new(PyTypeObject *type, PyObject *args, PyObject *kwds) } ctypes_state *st = GLOBAL_STATE(); - stgdict = (StgDictObject *)_PyObject_CallNoArgs( - (PyObject *)st->PyCStgDict_Type); - if (!stgdict) { + StgInfo *stginfo = PyStgInfo_Init(st, (PyTypeObject *)self); + if (!stginfo) { goto error; } - stgdict->ffi_type_pointer = *fmt->pffi_type; - stgdict->align = fmt->pffi_type->alignment; - stgdict->length = 0; - stgdict->size = fmt->pffi_type->size; - stgdict->setfunc = fmt->setfunc; - stgdict->getfunc = fmt->getfunc; + + stginfo->ffi_type_pointer = *fmt->pffi_type; + stginfo->align = fmt->pffi_type->alignment; + stginfo->length = 0; + stginfo->size = fmt->pffi_type->size; + stginfo->setfunc = fmt->setfunc; + stginfo->getfunc = fmt->getfunc; #ifdef WORDS_BIGENDIAN - stgdict->format = _ctypes_alloc_format_string_for_type(proto_str[0], 1); + stginfo->format = _ctypes_alloc_format_string_for_type(proto_str[0], 1); #else - stgdict->format = _ctypes_alloc_format_string_for_type(proto_str[0], 0); + stginfo->format = _ctypes_alloc_format_string_for_type(proto_str[0], 0); #endif - if (stgdict->format == NULL) { - Py_DECREF(result); + if (stginfo->format == NULL) { Py_DECREF(proto); - Py_DECREF((PyObject *)stgdict); - return NULL; + return -1; } - stgdict->paramfunc = PyCSimpleType_paramfunc; + stginfo->paramfunc = PyCSimpleType_paramfunc; /* - if (result->tp_base != st->Simple_Type) { - stgdict->setfunc = NULL; - stgdict->getfunc = NULL; + if (self->tp_base != st->Simple_Type) { + stginfo->setfunc = NULL; + stginfo->getfunc = NULL; } */ /* This consumes the refcount on proto which we have */ - stgdict->proto = proto; - - /* replace the class dict by our updated spam dict */ - if (-1 == PyDict_Update((PyObject *)stgdict, result->tp_dict)) { - Py_DECREF(result); - Py_DECREF((PyObject *)stgdict); - return NULL; - } - Py_SETREF(result->tp_dict, (PyObject *)stgdict); + stginfo->proto = proto; /* Install from_param class methods in ctypes base classes. Overrides the PyCSimpleType_from_param generic method. */ - if (result->tp_base == st->Simple_Type) { + if (((PyTypeObject *)self)->tp_base == st->Simple_Type) { switch (*proto_str) { case 'z': /* c_char_p */ ml = &c_char_p_method; - stgdict->flags |= TYPEFLAG_ISPOINTER; + stginfo->flags |= TYPEFLAG_ISPOINTER; break; case 'Z': /* c_wchar_p */ ml = &c_wchar_p_method; - stgdict->flags |= TYPEFLAG_ISPOINTER; + stginfo->flags |= TYPEFLAG_ISPOINTER; break; case 'P': /* c_void_p */ ml = &c_void_p_method; - stgdict->flags |= TYPEFLAG_ISPOINTER; + stginfo->flags |= TYPEFLAG_ISPOINTER; break; case 's': case 'X': case 'O': ml = NULL; - stgdict->flags |= TYPEFLAG_ISPOINTER; + stginfo->flags |= TYPEFLAG_ISPOINTER; break; default: ml = NULL; @@ -2079,57 +2179,57 @@ PyCSimpleType_new(PyTypeObject *type, PyObject *args, PyObject *kwds) if (ml) { PyObject *meth; int x; - meth = PyDescr_NewClassMethod(result, ml); + meth = PyDescr_NewClassMethod((PyTypeObject*)self, ml); if (!meth) { - Py_DECREF(result); - return NULL; + return -1; } - x = PyDict_SetItemString(result->tp_dict, + x = PyDict_SetItemString(((PyTypeObject*)self)->tp_dict, ml->ml_name, meth); Py_DECREF(meth); if (x == -1) { - Py_DECREF(result); - return NULL; + return -1; } } } + PyTypeObject *type = Py_TYPE(self); if (type == st->PyCSimpleType_Type && fmt->setfunc_swapped && fmt->getfunc_swapped) { PyObject *swapped = CreateSwappedType(type, args, kwds, proto, fmt); - StgDictObject *sw_dict; if (swapped == NULL) { - Py_DECREF(result); - return NULL; + return -1; } - sw_dict = PyType_stgdict(swapped); + StgInfo *sw_info; + if (PyStgInfo_FromType(st, swapped, &sw_info) < 0) { + return -1; + } + assert(sw_info); #ifdef WORDS_BIGENDIAN - PyObject_SetAttrString((PyObject *)result, "__ctype_le__", swapped); - PyObject_SetAttrString((PyObject *)result, "__ctype_be__", (PyObject *)result); - PyObject_SetAttrString(swapped, "__ctype_be__", (PyObject *)result); + PyObject_SetAttrString(self, "__ctype_le__", swapped); + PyObject_SetAttrString(self, "__ctype_be__", self); + PyObject_SetAttrString(swapped, "__ctype_be__", self); PyObject_SetAttrString(swapped, "__ctype_le__", swapped); /* We are creating the type for the OTHER endian */ - sw_dict->format = _ctypes_alloc_format_string("<", stgdict->format+1); + sw_info->format = _ctypes_alloc_format_string("<", stginfo->format+1); #else - PyObject_SetAttrString((PyObject *)result, "__ctype_be__", swapped); - PyObject_SetAttrString((PyObject *)result, "__ctype_le__", (PyObject *)result); - PyObject_SetAttrString(swapped, "__ctype_le__", (PyObject *)result); + PyObject_SetAttrString(self, "__ctype_be__", swapped); + PyObject_SetAttrString(self, "__ctype_le__", self); + PyObject_SetAttrString(swapped, "__ctype_le__", self); PyObject_SetAttrString(swapped, "__ctype_be__", swapped); /* We are creating the type for the OTHER endian */ - sw_dict->format = _ctypes_alloc_format_string(">", stgdict->format+1); + sw_info->format = _ctypes_alloc_format_string(">", stginfo->format+1); #endif Py_DECREF(swapped); if (PyErr_Occurred()) { - Py_DECREF(result); - return NULL; + return -1; } }; - return (PyObject *)result; + return 0; } /* @@ -2139,7 +2239,6 @@ PyCSimpleType_new(PyTypeObject *type, PyObject *args, PyObject *kwds) static PyObject * PyCSimpleType_from_param(PyObject *type, PyObject *value) { - StgDictObject *dict; const char *fmt; PyCArgObject *parg; struct fielddesc *fd; @@ -2155,15 +2254,19 @@ PyCSimpleType_from_param(PyObject *type, PyObject *value) return Py_NewRef(value); } - dict = PyType_stgdict(type); - if (!dict) { + ctypes_state *st = GLOBAL_STATE(); + StgInfo *info; + if (PyStgInfo_FromType(st, type, &info) < 0) { + return NULL; + } + if (!info) { PyErr_SetString(PyExc_TypeError, "abstract class"); return NULL; } /* I think we can rely on this being a one-character string */ - fmt = PyUnicode_AsUTF8(dict->proto); + fmt = PyUnicode_AsUTF8(info->proto); assert(fmt); fd = _ctypes_get_fielddesc(fmt); @@ -2218,7 +2321,7 @@ static PyMethodDef PyCSimpleType_methods[] = { static PyType_Slot pycsimple_type_slots[] = { {Py_tp_doc, PyDoc_STR("metatype for the PyCSimpleType Objects")}, {Py_tp_methods, PyCSimpleType_methods}, - {Py_tp_new, PyCSimpleType_new}, + {Py_tp_init, PyCSimpleType_init}, {Py_tp_traverse, CDataType_traverse}, {Py_tp_clear, CDataType_clear}, @@ -2227,7 +2330,7 @@ static PyType_Slot pycsimple_type_slots[] = { {0, NULL}, }; -PyType_Spec pycsimple_type_spec = { +static PyType_Spec pycsimple_type_spec = { .name = "_ctypes.PyCSimpleType", .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_IMMUTABLETYPE), @@ -2292,10 +2395,13 @@ converters_from_argtypes(PyObject *ob) * not bitfields, the bitfields check is also being disabled as a * precaution. - StgDictObject *stgdict = PyType_stgdict(tp); + StgInfo *stginfo; + if (PyStgInfo_FromType(st, tp, &stginfo) < 0) { + return -1; + } - if (stgdict != NULL) { - if (stgdict->flags & TYPEFLAG_HASUNION) { + if (stginfo != NULL) { + if (stginfo->flags & TYPEFLAG_HASUNION) { Py_DECREF(converters); Py_DECREF(ob); if (!PyErr_Occurred()) { @@ -2306,7 +2412,7 @@ converters_from_argtypes(PyObject *ob) } return NULL; } - if (stgdict->flags & TYPEFLAG_HASBITFIELD) { + if (stginfo->flags & TYPEFLAG_HASBITFIELD) { Py_DECREF(converters); Py_DECREF(ob); if (!PyErr_Occurred()) { @@ -2338,19 +2444,19 @@ converters_from_argtypes(PyObject *ob) } static int -make_funcptrtype_dict(StgDictObject *stgdict) +make_funcptrtype_dict(PyObject *attrdict, StgInfo *stginfo) { PyObject *ob; PyObject *converters = NULL; - stgdict->align = _ctypes_get_fielddesc("P")->pffi_type->alignment; - stgdict->length = 1; - stgdict->size = sizeof(void *); - stgdict->setfunc = NULL; - stgdict->getfunc = NULL; - stgdict->ffi_type_pointer = ffi_type_pointer; + stginfo->align = _ctypes_get_fielddesc("P")->pffi_type->alignment; + stginfo->length = 1; + stginfo->size = sizeof(void *); + stginfo->setfunc = NULL; + stginfo->getfunc = NULL; + stginfo->ffi_type_pointer = ffi_type_pointer; - if (PyDict_GetItemRef((PyObject *)stgdict, &_Py_ID(_flags_), &ob) < 0) { + if (PyDict_GetItemRef((PyObject *)attrdict, &_Py_ID(_flags_), &ob) < 0) { return -1; } if (!ob || !PyLong_Check(ob)) { @@ -2359,11 +2465,11 @@ make_funcptrtype_dict(StgDictObject *stgdict) Py_XDECREF(ob); return -1; } - stgdict->flags = PyLong_AsUnsignedLongMask(ob) | TYPEFLAG_ISPOINTER; + stginfo->flags = PyLong_AsUnsignedLongMask(ob) | TYPEFLAG_ISPOINTER; Py_DECREF(ob); /* _argtypes_ is optional... */ - if (PyDict_GetItemRef((PyObject *)stgdict, &_Py_ID(_argtypes_), &ob) < 0) { + if (PyDict_GetItemRef((PyObject *)attrdict, &_Py_ID(_argtypes_), &ob) < 0) { return -1; } if (ob) { @@ -2372,29 +2478,34 @@ make_funcptrtype_dict(StgDictObject *stgdict) Py_DECREF(ob); return -1; } - stgdict->argtypes = ob; - stgdict->converters = converters; + stginfo->argtypes = ob; + stginfo->converters = converters; } - if (PyDict_GetItemRef((PyObject *)stgdict, &_Py_ID(_restype_), &ob) < 0) { + if (PyDict_GetItemRef((PyObject *)attrdict, &_Py_ID(_restype_), &ob) < 0) { return -1; } if (ob) { - if (ob != Py_None && !PyType_stgdict(ob) && !PyCallable_Check(ob)) { + StgInfo *info; + ctypes_state *st = GLOBAL_STATE(); + if (PyStgInfo_FromType(st, ob, &info) < 0) { + return -1; + } + if (ob != Py_None && !info && !PyCallable_Check(ob)) { PyErr_SetString(PyExc_TypeError, "_restype_ must be a type, a callable, or None"); Py_DECREF(ob); return -1; } - stgdict->restype = ob; + stginfo->restype = ob; if (PyObject_GetOptionalAttr(ob, &_Py_ID(_check_retval_), - &stgdict->checker) < 0) + &stginfo->checker) < 0) { return -1; } } /* XXX later, maybe. - if (PyDict_GetItemRef((PyObject *)stgdict, &_Py _ID(_errcheck_), &ob) < 0) { + if (PyDict_GetItemRef((PyObject *)attrdict, &_Py _ID(_errcheck_), &ob) < 0) { return -1; } if (ob) { @@ -2404,7 +2515,7 @@ make_funcptrtype_dict(StgDictObject *stgdict) Py_DECREF(ob); return -1; } - stgdict->errcheck = ob; + stginfo->errcheck = ob; } */ return 0; @@ -2426,54 +2537,43 @@ PyCFuncPtrType_paramfunc(CDataObject *self) return parg; } -static PyObject * -PyCFuncPtrType_new(PyTypeObject *type, PyObject *args, PyObject *kwds) +static int +PyCFuncPtrType_init(PyObject *self, PyObject *args, PyObject *kwds) { - PyTypeObject *result; - StgDictObject *stgdict; + PyObject *attrdict = PyType_GetDict((PyTypeObject *)self); + if (!attrdict) { + return -1; + } ctypes_state *st = GLOBAL_STATE(); - stgdict = (StgDictObject *)_PyObject_CallNoArgs( - (PyObject *)st->PyCStgDict_Type); - if (!stgdict) { - return NULL; + StgInfo *stginfo = PyStgInfo_Init(st, (PyTypeObject *)self); + if (!stginfo) { + Py_DECREF(attrdict); + return -1; } - stgdict->paramfunc = PyCFuncPtrType_paramfunc; + + stginfo->paramfunc = PyCFuncPtrType_paramfunc; + /* We do NOT expose the function signature in the format string. It is impossible, generally, because the only requirement for the argtypes items is that they have a .from_param method - we do not know the types of the arguments (although, in practice, most argtypes would be a ctypes type). */ - stgdict->format = _ctypes_alloc_format_string(NULL, "X{}"); - if (stgdict->format == NULL) { - Py_DECREF((PyObject *)stgdict); - return NULL; - } - stgdict->flags |= TYPEFLAG_ISPOINTER; - - /* create the new instance (which is a class, - since we are a metatype!) */ - result = (PyTypeObject *)PyType_Type.tp_new(type, args, kwds); - if (result == NULL) { - Py_DECREF((PyObject *)stgdict); - return NULL; - } - - /* replace the class dict by our updated storage dict */ - if (-1 == PyDict_Update((PyObject *)stgdict, result->tp_dict)) { - Py_DECREF(result); - Py_DECREF((PyObject *)stgdict); - return NULL; + stginfo->format = _ctypes_alloc_format_string(NULL, "X{}"); + if (stginfo->format == NULL) { + Py_DECREF(attrdict); + return -1; } - Py_SETREF(result->tp_dict, (PyObject *)stgdict); + stginfo->flags |= TYPEFLAG_ISPOINTER; - if (-1 == make_funcptrtype_dict(stgdict)) { - Py_DECREF(result); - return NULL; + if (-1 == make_funcptrtype_dict(attrdict, stginfo)) { + Py_DECREF(attrdict); + return -1; } - return (PyObject *)result; + Py_DECREF(attrdict); + return 0; } static PyType_Slot pycfuncptr_type_slots[] = { @@ -2481,7 +2581,7 @@ static PyType_Slot pycfuncptr_type_slots[] = { {Py_tp_traverse, CDataType_traverse}, {Py_tp_clear, CDataType_clear}, {Py_tp_methods, CDataType_methods}, - {Py_tp_new, PyCFuncPtrType_new}, + {Py_tp_init, PyCFuncPtrType_init}, // Sequence protocol. {Py_sq_repeat, CDataType_repeat}, @@ -2609,6 +2709,8 @@ PyCData_traverse(CDataObject *self, visitproc visit, void *arg) { Py_VISIT(self->b_objects); Py_VISIT((PyObject *)self->b_base); + PyTypeObject *type = Py_TYPE(self); + Py_VISIT(type); return 0; } @@ -2627,8 +2729,11 @@ PyCData_clear(CDataObject *self) static void PyCData_dealloc(PyObject *self) { + PyTypeObject *type = Py_TYPE(self); + PyObject_GC_UnTrack(self); PyCData_clear((CDataObject *)self); - Py_TYPE(self)->tp_free(self); + type->tp_free(self); + Py_DECREF(type); } static PyMemberDef PyCData_members[] = { @@ -2650,13 +2755,15 @@ PyCData_item_type(PyObject *type) { ctypes_state *st = GLOBAL_STATE(); if (PyCArrayTypeObject_Check(st, type)) { - StgDictObject *stg_dict; PyObject *elem_type; /* asserts used here as these are all guaranteed by construction */ - stg_dict = PyType_stgdict(type); - assert(stg_dict); - elem_type = stg_dict->proto; + StgInfo *stg_info; + if (PyStgInfo_FromType(st, type, &stg_info) < 0) { + return NULL; + } + assert(stg_info); + elem_type = stg_info->proto; assert(elem_type); return PyCData_item_type(elem_type); } @@ -2669,32 +2776,42 @@ static int PyCData_NewGetBuffer(PyObject *myself, Py_buffer *view, int flags) { CDataObject *self = (CDataObject *)myself; - StgDictObject *dict = PyObject_stgdict(myself); + + ctypes_state *st = GLOBAL_STATE(); + StgInfo *info; + if (PyStgInfo_FromObject(st, myself, &info) < 0) { + return -1; + } + assert(info); + PyObject *item_type = PyCData_item_type((PyObject*)Py_TYPE(myself)); - StgDictObject *item_dict = PyType_stgdict(item_type); + if (item_type == NULL) { + return 0; + } if (view == NULL) return 0; + StgInfo *item_info; + if (PyStgInfo_FromType(st, item_type, &item_info) < 0) { + return -1; + } + assert(item_info); + view->buf = self->b_ptr; view->obj = Py_NewRef(myself); view->len = self->b_size; view->readonly = 0; /* use default format character if not set */ - view->format = dict->format ? dict->format : "B"; - view->ndim = dict->ndim; - view->shape = dict->shape; - view->itemsize = item_dict->size; + view->format = info->format ? info->format : "B"; + view->ndim = info->ndim; + view->shape = info->shape; + view->itemsize = item_info->size; view->strides = NULL; view->suboffsets = NULL; view->internal = NULL; return 0; } -static PyBufferProcs PyCData_as_buffer = { - PyCData_NewGetBuffer, - NULL, -}; - /* * CData objects are mutable, so they cannot be hashable! */ @@ -2710,7 +2827,14 @@ PyCData_reduce(PyObject *myself, PyObject *args) { CDataObject *self = (CDataObject *)myself; - if (PyObject_stgdict(myself)->flags & (TYPEFLAG_ISPOINTER|TYPEFLAG_HASPOINTER)) { + ctypes_state *st = GLOBAL_STATE(); + StgInfo *info; + if (PyStgInfo_FromObject(st, myself, &info) < 0) { + return NULL; + } + assert(info); + + if (info->flags & (TYPEFLAG_ISPOINTER|TYPEFLAG_HASPOINTER)) { PyErr_SetString(PyExc_ValueError, "ctypes objects containing pointers cannot be pickled"); return NULL; @@ -2773,51 +2897,30 @@ static PyMethodDef PyCData_methods[] = { { NULL, NULL }, }; -PyTypeObject PyCData_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_ctypes._CData", - sizeof(CDataObject), /* tp_basicsize */ - 0, /* tp_itemsize */ - PyCData_dealloc, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - PyCData_nohash, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - &PyCData_as_buffer, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ - PyDoc_STR("XXX to be provided"), /* tp_doc */ - (traverseproc)PyCData_traverse, /* tp_traverse */ - (inquiry)PyCData_clear, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - PyCData_methods, /* tp_methods */ - PyCData_members, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - 0, /* tp_init */ - 0, /* tp_alloc */ - 0, /* tp_new */ - 0, /* tp_free */ +static PyType_Slot pycdata_slots[] = { + {Py_tp_dealloc, PyCData_dealloc}, + {Py_tp_hash, PyCData_nohash}, + {Py_tp_doc, PyDoc_STR("XXX to be provided")}, + {Py_tp_traverse, PyCData_traverse}, + {Py_tp_clear, PyCData_clear}, + {Py_tp_methods, PyCData_methods}, + {Py_tp_members, PyCData_members}, + {Py_bf_getbuffer, PyCData_NewGetBuffer}, + {0, NULL}, }; -static int PyCData_MallocBuffer(CDataObject *obj, StgDictObject *dict) +static PyType_Spec pycdata_spec = { + .name = "_ctypes._CData", + .basicsize = sizeof(CDataObject), + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_IMMUTABLETYPE | Py_TPFLAGS_DISALLOW_INSTANTIATION), + .slots = pycdata_slots, +}; + +static int +PyCData_MallocBuffer(CDataObject *obj, StgInfo *info) { - if ((size_t)dict->size <= sizeof(obj->b_value)) { + if ((size_t)info->size <= sizeof(obj->b_value)) { /* No need to call malloc, can use the default buffer */ obj->b_ptr = (char *)&obj->b_value; /* The b_needsfree flag does not mean that we actually did @@ -2831,15 +2934,15 @@ static int PyCData_MallocBuffer(CDataObject *obj, StgDictObject *dict) /* In python 2.4, and ctypes 0.9.6, the malloc call took about 33% of the creation time for c_int(). */ - obj->b_ptr = (char *)PyMem_Malloc(dict->size); + obj->b_ptr = (char *)PyMem_Malloc(info->size); if (obj->b_ptr == NULL) { PyErr_NoMemory(); return -1; } obj->b_needsfree = 1; - memset(obj->b_ptr, 0, dict->size); + memset(obj->b_ptr, 0, info->size); } - obj->b_size = dict->size; + obj->b_size = info->size; return 0; } @@ -2847,23 +2950,28 @@ PyObject * PyCData_FromBaseObj(PyObject *type, PyObject *base, Py_ssize_t index, char *adr) { CDataObject *cmem; - StgDictObject *dict; assert(PyType_Check(type)); - dict = PyType_stgdict(type); - if (!dict) { + + ctypes_state *st = GLOBAL_STATE(); + StgInfo *info; + if (PyStgInfo_FromType(st, type, &info) < 0) { + return NULL; + } + if (!info) { PyErr_SetString(PyExc_TypeError, "abstract class"); return NULL; } - dict->flags |= DICTFLAG_FINAL; + + info->flags |= DICTFLAG_FINAL; cmem = (CDataObject *)((PyTypeObject *)type)->tp_alloc((PyTypeObject *)type, 0); if (cmem == NULL) { return NULL; } assert(CDataObject_Check(GLOBAL_STATE(), cmem)); - cmem->b_length = dict->length; - cmem->b_size = dict->size; + cmem->b_length = info->length; + cmem->b_size = info->size; if (base) { /* use base's buffer */ assert(CDataObject_Check(GLOBAL_STATE(), base)); cmem->b_ptr = adr; @@ -2871,11 +2979,11 @@ PyCData_FromBaseObj(PyObject *type, PyObject *base, Py_ssize_t index, char *adr) cmem->b_base = (CDataObject *)Py_NewRef(base); cmem->b_index = index; } else { /* copy contents of adr */ - if (-1 == PyCData_MallocBuffer(cmem, dict)) { + if (-1 == PyCData_MallocBuffer(cmem, info)) { Py_DECREF(cmem); return NULL; } - memcpy(cmem->b_ptr, adr, dict->size); + memcpy(cmem->b_ptr, adr, info->size); cmem->b_index = index; } return (PyObject *)cmem; @@ -2888,20 +2996,25 @@ PyObject * PyCData_AtAddress(PyObject *type, void *buf) { CDataObject *pd; - StgDictObject *dict; if (PySys_Audit("ctypes.cdata", "n", (Py_ssize_t)buf) < 0) { return NULL; } assert(PyType_Check(type)); - dict = PyType_stgdict(type); - if (!dict) { + + ctypes_state *st = GLOBAL_STATE(); + StgInfo *info; + if (PyStgInfo_FromType(st, type, &info) < 0) { + return NULL; + } + if (!info) { PyErr_SetString(PyExc_TypeError, "abstract class"); return NULL; } - dict->flags |= DICTFLAG_FINAL; + + info->flags |= DICTFLAG_FINAL; pd = (CDataObject *)((PyTypeObject *)type)->tp_alloc((PyTypeObject *)type, 0); if (!pd) { @@ -2909,8 +3022,8 @@ PyCData_AtAddress(PyObject *type, void *buf) } assert(CDataObject_Check(GLOBAL_STATE(), pd)); pd->b_ptr = (char *)buf; - pd->b_length = dict->length; - pd->b_size = dict->size; + pd->b_length = info->length; + pd->b_size = info->size; return (PyObject *)pd; } @@ -2934,13 +3047,16 @@ PyObject * PyCData_get(PyObject *type, GETFUNC getfunc, PyObject *src, Py_ssize_t index, Py_ssize_t size, char *adr) { - StgDictObject *dict; if (getfunc) return getfunc(adr, size); assert(type); - dict = PyType_stgdict(type); - if (dict && dict->getfunc && !_ctypes_simple_instance(type)) - return dict->getfunc(adr, size); + ctypes_state *st = GLOBAL_STATE(); + StgInfo *info; + if (PyStgInfo_FromType(st, type, &info) < 0) { + return NULL; + } + if (info && info->getfunc && !_ctypes_simple_instance(type)) + return info->getfunc(adr, size); return PyCData_FromBaseObj(type, src, index, adr); } @@ -2959,9 +3075,12 @@ _PyCData_set(CDataObject *dst, PyObject *type, SETFUNC setfunc, PyObject *value, } ctypes_state *st = GLOBAL_STATE(); if (!CDataObject_Check(st, value)) { - StgDictObject *dict = PyType_stgdict(type); - if (dict && dict->setfunc) - return dict->setfunc(ptr, value, size); + StgInfo *info; + if (PyStgInfo_FromType(st, type, &info) < 0) { + return NULL; + } + if (info && info->setfunc) + return info->setfunc(ptr, value, size); /* If value is a tuple, we try to call the type with the tuple and use the result! @@ -3014,11 +3133,16 @@ _PyCData_set(CDataObject *dst, PyObject *type, SETFUNC setfunc, PyObject *value, if (PyCPointerTypeObject_Check(st, type) && ArrayObject_Check(st, value)) { - StgDictObject *p1, *p2; PyObject *keep; - p1 = PyObject_stgdict(value); + + StgInfo *p1, *p2; + if (PyStgInfo_FromObject(st, value, &p1) < 0) { + return NULL; + } assert(p1); /* Cannot be NULL for array instances */ - p2 = PyType_stgdict(type); + if (PyStgInfo_FromType(st, type, &p2) < 0) { + return NULL; + } assert(p2); /* Cannot be NULL for pointer types */ if (p1->proto != p2->proto) { @@ -3086,15 +3210,19 @@ static PyObject * GenericPyCData_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { CDataObject *obj; - StgDictObject *dict; - dict = PyType_stgdict((PyObject *)type); - if (!dict) { + ctypes_state *st = GLOBAL_STATE(); + StgInfo *info; + if (PyStgInfo_FromType(st, (PyObject *)type, &info) < 0) { + return NULL; + } + if (!info) { PyErr_SetString(PyExc_TypeError, "abstract class"); return NULL; } - dict->flags |= DICTFLAG_FINAL; + + info->flags |= DICTFLAG_FINAL; obj = (CDataObject *)type->tp_alloc(type, 0); if (!obj) @@ -3103,9 +3231,9 @@ GenericPyCData_new(PyTypeObject *type, PyObject *args, PyObject *kwds) obj->b_base = NULL; obj->b_index = 0; obj->b_objects = NULL; - obj->b_length = dict->length; + obj->b_length = info->length; - if (-1 == PyCData_MallocBuffer(obj, dict)) { + if (-1 == PyCData_MallocBuffer(obj, info)) { Py_DECREF(obj); return NULL; } @@ -3149,7 +3277,12 @@ PyCFuncPtr_set_restype(PyCFuncPtrObject *self, PyObject *ob, void *Py_UNUSED(ign Py_XDECREF(oldchecker); return 0; } - if (ob != Py_None && !PyType_stgdict(ob) && !PyCallable_Check(ob)) { + ctypes_state *st = GLOBAL_STATE(); + StgInfo *info; + if (PyStgInfo_FromType(st, ob, &info) < 0) { + return -1; + } + if (ob != Py_None && !info && !PyCallable_Check(ob)) { PyErr_SetString(PyExc_TypeError, "restype must be a type, a callable, or None"); return -1; @@ -3168,14 +3301,17 @@ PyCFuncPtr_set_restype(PyCFuncPtrObject *self, PyObject *ob, void *Py_UNUSED(ign static PyObject * PyCFuncPtr_get_restype(PyCFuncPtrObject *self, void *Py_UNUSED(ignored)) { - StgDictObject *dict; if (self->restype) { return Py_NewRef(self->restype); } - dict = PyObject_stgdict((PyObject *)self); - assert(dict); /* Cannot be NULL for PyCFuncPtrObject instances */ - if (dict->restype) { - return Py_NewRef(dict->restype); + ctypes_state *st = GLOBAL_STATE(); + StgInfo *info; + if (PyStgInfo_FromObject(st, (PyObject *)self, &info) < 0) { + return NULL; + } + assert(info); /* Cannot be NULL for PyCFuncPtrObject instances */ + if (info->restype) { + return Py_NewRef(info->restype); } else { Py_RETURN_NONE; } @@ -3203,14 +3339,17 @@ PyCFuncPtr_set_argtypes(PyCFuncPtrObject *self, PyObject *ob, void *Py_UNUSED(ig static PyObject * PyCFuncPtr_get_argtypes(PyCFuncPtrObject *self, void *Py_UNUSED(ignored)) { - StgDictObject *dict; if (self->argtypes) { return Py_NewRef(self->argtypes); } - dict = PyObject_stgdict((PyObject *)self); - assert(dict); /* Cannot be NULL for PyCFuncPtrObject instances */ - if (dict->argtypes) { - return Py_NewRef(dict->argtypes); + ctypes_state *st = GLOBAL_STATE(); + StgInfo *info; + if (PyStgInfo_FromObject(st, (PyObject *)self, &info) < 0) { + return NULL; + } + assert(info); /* Cannot be NULL for PyCFuncPtrObject instances */ + if (info->argtypes) { + return Py_NewRef(info->argtypes); } else { Py_RETURN_NONE; } @@ -3242,7 +3381,6 @@ static PPROC FindAddress(void *handle, const char *name, PyObject *type) #else char *mangled_name; int i; - StgDictObject *dict; Py_BEGIN_ALLOW_THREADS address = (PPROC)GetProcAddress(handle, name); @@ -3253,9 +3391,13 @@ static PPROC FindAddress(void *handle, const char *name, PyObject *type) return NULL; } - dict = PyType_stgdict((PyObject *)type); - /* It should not happen that dict is NULL, but better be safe */ - if (dict==NULL || dict->flags & FUNCFLAG_CDECL) + ctypes_state *st = GLOBAL_STATE(); + StgInfo *info; + if (PyStgInfo_FromType(st, (PyObject *)type, &info) < 0) { + return NULL; + } + /* It should not happen that info is NULL, but better be safe */ + if (info==NULL || info->flags & FUNCFLAG_CDECL) return address; /* for stdcall, try mangled names: @@ -3282,7 +3424,6 @@ static PPROC FindAddress(void *handle, const char *name, PyObject *type) static int _check_outarg_type(PyObject *arg, Py_ssize_t index) { - StgDictObject *dict; ctypes_state *st = GLOBAL_STATE(); if (PyCPointerTypeObject_Check(st, arg)) { @@ -3291,12 +3432,15 @@ _check_outarg_type(PyObject *arg, Py_ssize_t index) if (PyCArrayTypeObject_Check(st, arg)) { return 1; } - dict = PyType_stgdict(arg); - if (dict + StgInfo *info; + if (PyStgInfo_FromType(st, arg, &info) < 0) { + return -1; + } + if (info /* simple pointer types, c_void_p, c_wchar_p, BSTR, ... */ - && PyUnicode_Check(dict->proto) + && PyUnicode_Check(info->proto) /* We only allow c_void_p, c_char_p and c_wchar_p as a simple output parameter type */ - && (strchr("PzZ", PyUnicode_AsUTF8(dict->proto)[0]))) { + && (strchr("PzZ", PyUnicode_AsUTF8(info->proto)[0]))) { return 1; } @@ -3314,18 +3458,21 @@ static int _validate_paramflags(PyTypeObject *type, PyObject *paramflags) { Py_ssize_t i, len; - StgDictObject *dict; PyObject *argtypes; - dict = PyType_stgdict((PyObject *)type); - if (!dict) { + ctypes_state *st = GLOBAL_STATE(); + StgInfo *info; + if (PyStgInfo_FromType(st, (PyObject *)type, &info) < 0) { + return -1; + } + if (!info) { PyErr_SetString(PyExc_TypeError, "abstract class"); return 0; } - argtypes = dict->argtypes; + argtypes = info->argtypes; - if (paramflags == NULL || dict->argtypes == NULL) + if (paramflags == NULL || info->argtypes == NULL) return 1; if (!PyTuple_Check(paramflags)) { @@ -3335,7 +3482,7 @@ _validate_paramflags(PyTypeObject *type, PyObject *paramflags) } len = PyTuple_GET_SIZE(paramflags); - if (len != PyTuple_GET_SIZE(dict->argtypes)) { + if (len != PyTuple_GET_SIZE(info->argtypes)) { PyErr_SetString(PyExc_ValueError, "paramflags must have the same length as argtypes"); return 0; @@ -3565,7 +3712,6 @@ PyCFuncPtr_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { PyCFuncPtrObject *self; PyObject *callable; - StgDictObject *dict; CThunkObject *thunk; if (PyTuple_GET_SIZE(args) == 0) @@ -3616,9 +3762,13 @@ PyCFuncPtr_new(PyTypeObject *type, PyObject *args, PyObject *kwds) } */ - dict = PyType_stgdict((PyObject *)type); + ctypes_state *st = GLOBAL_STATE(); + StgInfo *info; + if (PyStgInfo_FromType(st, (PyObject *)type, &info) < 0) { + return NULL; + } /* XXXX Fails if we do: 'PyCFuncPtr(lambda x: x)' */ - if (!dict || !dict->argtypes) { + if (!info || !info->argtypes) { PyErr_SetString(PyExc_TypeError, "cannot construct instance of this class:" " no argtypes"); @@ -3626,9 +3776,9 @@ PyCFuncPtr_new(PyTypeObject *type, PyObject *args, PyObject *kwds) } thunk = _ctypes_alloc_callback(callable, - dict->argtypes, - dict->restype, - dict->flags); + info->argtypes, + info->restype, + info->flags); if (!thunk) return NULL; @@ -3737,7 +3887,6 @@ _build_callargs(PyCFuncPtrObject *self, PyObject *argtypes, { PyObject *paramflags = self->paramflags; PyObject *callargs; - StgDictObject *dict; Py_ssize_t i, len; int inargs_index = 0; /* It's a little bit difficult to determine how many arguments the @@ -3826,15 +3975,18 @@ _build_callargs(PyCFuncPtrObject *self, PyObject *argtypes, break; } ob = PyTuple_GET_ITEM(argtypes, i); - dict = PyType_stgdict(ob); - if (dict == NULL) { + StgInfo *info; + if (PyStgInfo_FromType(st, ob, &info) < 0) { + goto error; + } + if (info == NULL) { /* Cannot happen: _validate_paramflags() would not accept such an object */ PyErr_Format(PyExc_RuntimeError, - "NULL stgdict unexpected"); + "NULL stginfo unexpected"); goto error; } - if (PyUnicode_Check(dict->proto)) { + if (PyUnicode_Check(info->proto)) { PyErr_Format( PyExc_TypeError, "%s 'out' parameter must be passed as default value", @@ -3846,7 +3998,7 @@ _build_callargs(PyCFuncPtrObject *self, PyObject *argtypes, } else { /* Create an instance of the pointed-to type */ - ob = _PyObject_CallNoArgs(dict->proto); + ob = _PyObject_CallNoArgs(info->proto); } /* XXX Is the following correct any longer? @@ -3966,7 +4118,6 @@ PyCFuncPtr_call(PyCFuncPtrObject *self, PyObject *inargs, PyObject *kwds) PyObject *converters; PyObject *checker; PyObject *argtypes; - StgDictObject *dict = PyObject_stgdict((PyObject *)self); PyObject *result; PyObject *callargs; PyObject *errcheck; @@ -3979,13 +4130,19 @@ PyCFuncPtr_call(PyCFuncPtrObject *self, PyObject *inargs, PyObject *kwds) int outmask; unsigned int numretvals; - assert(dict); /* Cannot be NULL for PyCFuncPtrObject instances */ - restype = self->restype ? self->restype : dict->restype; - converters = self->converters ? self->converters : dict->converters; - checker = self->checker ? self->checker : dict->checker; - argtypes = self->argtypes ? self->argtypes : dict->argtypes; -/* later, we probably want to have an errcheck field in stgdict */ - errcheck = self->errcheck /* ? self->errcheck : dict->errcheck */; + ctypes_state *st = GLOBAL_STATE(); + StgInfo *info; + if (PyStgInfo_FromObject(st, (PyObject *)self, &info) < 0) { + return NULL; + } + assert(info); /* Cannot be NULL for PyCFuncPtrObject instances */ + + restype = self->restype ? self->restype : info->restype; + converters = self->converters ? self->converters : info->converters; + checker = self->checker ? self->checker : info->checker; + argtypes = self->argtypes ? self->argtypes : info->argtypes; +/* later, we probably want to have an errcheck field in stginfo */ + errcheck = self->errcheck /* ? self->errcheck : info->errcheck */; pProc = *(void **)self->b_ptr; @@ -4033,7 +4190,7 @@ PyCFuncPtr_call(PyCFuncPtrObject *self, PyObject *inargs, PyObject *kwds) int actual = Py_SAFE_DOWNCAST(PyTuple_GET_SIZE(callargs), Py_ssize_t, int); - if ((dict->flags & FUNCFLAG_CDECL) == FUNCFLAG_CDECL) { + if ((info->flags & FUNCFLAG_CDECL) == FUNCFLAG_CDECL) { /* For cdecl functions, we allow more actual arguments than the length of the argtypes tuple. */ @@ -4063,7 +4220,7 @@ PyCFuncPtr_call(PyCFuncPtrObject *self, PyObject *inargs, PyObject *kwds) piunk, self->iid, #endif - dict->flags, + info->flags, converters, restype, checker); @@ -4123,8 +4280,11 @@ PyCFuncPtr_clear(PyCFuncPtrObject *self) static void PyCFuncPtr_dealloc(PyCFuncPtrObject *self) { + PyObject_GC_UnTrack(self); PyCFuncPtr_clear(self); - Py_TYPE(self)->tp_free((PyObject *)self); + PyTypeObject *type = Py_TYPE(self); + type->tp_free((PyObject *)self); + Py_DECREF(type); } static PyObject * @@ -4152,59 +4312,26 @@ PyCFuncPtr_bool(PyCFuncPtrObject *self) ); } -static PyNumberMethods PyCFuncPtr_as_number = { - 0, /* nb_add */ - 0, /* nb_subtract */ - 0, /* nb_multiply */ - 0, /* nb_remainder */ - 0, /* nb_divmod */ - 0, /* nb_power */ - 0, /* nb_negative */ - 0, /* nb_positive */ - 0, /* nb_absolute */ - (inquiry)PyCFuncPtr_bool, /* nb_bool */ +static PyType_Slot pycfuncptr_slots[] = { + {Py_tp_dealloc, PyCFuncPtr_dealloc}, + {Py_tp_repr, PyCFuncPtr_repr}, + {Py_tp_call, PyCFuncPtr_call}, + {Py_tp_doc, PyDoc_STR("Function Pointer")}, + {Py_tp_traverse, PyCFuncPtr_traverse}, + {Py_tp_clear, PyCFuncPtr_clear}, + {Py_tp_getset, PyCFuncPtr_getsets}, + {Py_tp_new, PyCFuncPtr_new}, + {Py_bf_getbuffer, PyCData_NewGetBuffer}, + {Py_nb_bool, PyCFuncPtr_bool}, + {0, NULL}, }; -PyTypeObject PyCFuncPtr_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_ctypes.CFuncPtr", - sizeof(PyCFuncPtrObject), /* tp_basicsize */ - 0, /* tp_itemsize */ - (destructor)PyCFuncPtr_dealloc, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - (reprfunc)PyCFuncPtr_repr, /* tp_repr */ - &PyCFuncPtr_as_number, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - (ternaryfunc)PyCFuncPtr_call, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - &PyCData_as_buffer, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ - PyDoc_STR("Function Pointer"), /* tp_doc */ - (traverseproc)PyCFuncPtr_traverse, /* tp_traverse */ - (inquiry)PyCFuncPtr_clear, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - 0, /* tp_methods */ - 0, /* tp_members */ - PyCFuncPtr_getsets, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - 0, /* tp_init */ - 0, /* tp_alloc */ - PyCFuncPtr_new, /* tp_new */ - 0, /* tp_free */ +static PyType_Spec pycfuncptr_spec = { + .name = "_ctypes.CFuncPtr", + .basicsize = sizeof(PyCFuncPtrObject), + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_IMMUTABLETYPE), + .slots = pycfuncptr_slots, }; /*****************************************************************/ @@ -4224,11 +4351,15 @@ _init_pos_args(PyObject *self, PyTypeObject *type, PyObject *args, PyObject *kwds, Py_ssize_t index) { - StgDictObject *dict; PyObject *fields; Py_ssize_t i; - if (PyType_stgdict((PyObject *)type->tp_base)) { + ctypes_state *st = GLOBAL_STATE(); + StgInfo *baseinfo; + if (PyStgInfo_FromType(st, (PyObject *)type->tp_base, &baseinfo) < 0) { + return -1; + } + if (baseinfo) { index = _init_pos_args(self, type->tp_base, args, kwds, index); @@ -4236,8 +4367,17 @@ _init_pos_args(PyObject *self, PyTypeObject *type, return -1; } - dict = PyType_stgdict((PyObject *)type); - fields = PyDict_GetItemWithError((PyObject *)dict, &_Py_ID(_fields_)); + StgInfo *info; + if (PyStgInfo_FromType(st, (PyObject *)type, &info) < 0) { + return -1; + } + assert(info); + + PyObject *attrdict = PyType_GetDict(type); + assert(attrdict); + + fields = PyDict_GetItemWithError((PyObject *)attrdict, &_Py_ID(_fields_)); + Py_CLEAR(attrdict); if (fields == NULL) { if (PyErr_Occurred()) { return -1; @@ -4246,7 +4386,7 @@ _init_pos_args(PyObject *self, PyTypeObject *type, } for (i = index; - i < dict->length && i < PyTuple_GET_SIZE(args); + i < info->length && i < PyTuple_GET_SIZE(args); ++i) { PyObject *pair = PySequence_GetItem(fields, i - index); PyObject *name, *val; @@ -4279,7 +4419,7 @@ _init_pos_args(PyObject *self, PyTypeObject *type, if (res == -1) return -1; } - return dict->length; + return info->length; } static int @@ -4316,88 +4456,34 @@ Struct_init(PyObject *self, PyObject *args, PyObject *kwds) return 0; } -static PyTypeObject Struct_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_ctypes.Structure", - sizeof(CDataObject), /* tp_basicsize */ - 0, /* tp_itemsize */ - 0, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - &PyCData_as_buffer, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ - PyDoc_STR("Structure base class"), /* tp_doc */ - (traverseproc)PyCData_traverse, /* tp_traverse */ - (inquiry)PyCData_clear, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - 0, /* tp_methods */ - 0, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - Struct_init, /* tp_init */ - 0, /* tp_alloc */ - GenericPyCData_new, /* tp_new */ - 0, /* tp_free */ +static PyType_Slot pycstruct_slots[] = { + {Py_tp_doc, PyDoc_STR("Structure base class")}, + {Py_tp_init, Struct_init}, + {Py_tp_new, GenericPyCData_new}, + {Py_bf_getbuffer, PyCData_NewGetBuffer}, + {0, NULL}, +}; + +static PyType_Spec pycstruct_spec = { + .name = "_ctypes.Structure", + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | + Py_TPFLAGS_IMMUTABLETYPE), + .slots = pycstruct_slots, +}; + +static PyType_Slot pycunion_slots[] = { + {Py_tp_doc, PyDoc_STR("Union base class")}, + {Py_tp_init, Struct_init}, + {Py_tp_new, GenericPyCData_new}, + {Py_bf_getbuffer, PyCData_NewGetBuffer}, + {0, NULL}, }; -static PyTypeObject Union_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_ctypes.Union", - sizeof(CDataObject), /* tp_basicsize */ - 0, /* tp_itemsize */ - 0, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - &PyCData_as_buffer, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ - PyDoc_STR("Union base class"), /* tp_doc */ - (traverseproc)PyCData_traverse, /* tp_traverse */ - (inquiry)PyCData_clear, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - 0, /* tp_methods */ - 0, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - Struct_init, /* tp_init */ - 0, /* tp_alloc */ - GenericPyCData_new, /* tp_new */ - 0, /* tp_free */ +static PyType_Spec pycunion_spec = { + .name = "_ctypes.Union", + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | + Py_TPFLAGS_IMMUTABLETYPE), + .slots = pycunion_slots, }; @@ -4431,8 +4517,6 @@ Array_item(PyObject *myself, Py_ssize_t index) { CDataObject *self = (CDataObject *)myself; Py_ssize_t offset, size; - StgDictObject *stgdict; - if (index < 0 || index >= self->b_length) { PyErr_SetString(PyExc_IndexError, @@ -4440,15 +4524,19 @@ Array_item(PyObject *myself, Py_ssize_t index) return NULL; } - stgdict = PyObject_stgdict((PyObject *)self); - assert(stgdict); /* Cannot be NULL for array instances */ + ctypes_state *st = GLOBAL_STATE(); + StgInfo *stginfo; + if (PyStgInfo_FromObject(st, (PyObject *)self, &stginfo) < 0) { + return NULL; + } + /* Would it be clearer if we got the item size from - stgdict->proto's stgdict? + stginfo->proto's stginfo? */ - size = stgdict->size / stgdict->length; + size = stginfo->size / stginfo->length; offset = index * size; - return PyCData_get(stgdict->proto, stgdict->getfunc, (PyObject *)self, + return PyCData_get(stginfo->proto, stginfo->getfunc, (PyObject *)self, index, size, self->b_ptr + offset); } @@ -4467,7 +4555,6 @@ Array_subscript(PyObject *myself, PyObject *item) return Array_item(myself, i); } else if (PySlice_Check(item)) { - StgDictObject *stgdict, *itemdict; PyObject *proto; PyObject *np; Py_ssize_t start, stop, step, slicelen, i; @@ -4478,14 +4565,21 @@ Array_subscript(PyObject *myself, PyObject *item) } slicelen = PySlice_AdjustIndices(self->b_length, &start, &stop, step); - stgdict = PyObject_stgdict((PyObject *)self); - assert(stgdict); /* Cannot be NULL for array object instances */ - proto = stgdict->proto; - itemdict = PyType_stgdict(proto); - assert(itemdict); /* proto is the item type of the array, a + ctypes_state *st = GLOBAL_STATE(); + StgInfo *stginfo; + if (PyStgInfo_FromObject(st, (PyObject *)self, &stginfo) < 0) { + return NULL; + } + assert(stginfo); /* Cannot be NULL for array object instances */ + proto = stginfo->proto; + StgInfo *iteminfo; + if (PyStgInfo_FromType(st, proto, &iteminfo) < 0) { + return NULL; + } + assert(iteminfo); /* proto is the item type of the array, a ctypes type, so this cannot be NULL */ - if (itemdict->getfunc == _ctypes_get_fielddesc("c")->getfunc) { + if (iteminfo->getfunc == _ctypes_get_fielddesc("c")->getfunc) { char *ptr = (char *)self->b_ptr; char *dest; @@ -4509,7 +4603,7 @@ Array_subscript(PyObject *myself, PyObject *item) PyMem_Free(dest); return np; } - if (itemdict->getfunc == _ctypes_get_fielddesc("u")->getfunc) { + if (iteminfo->getfunc == _ctypes_get_fielddesc("u")->getfunc) { wchar_t *ptr = (wchar_t *)self->b_ptr; wchar_t *dest; @@ -4564,7 +4658,6 @@ Array_ass_item(PyObject *myself, Py_ssize_t index, PyObject *value) { CDataObject *self = (CDataObject *)myself; Py_ssize_t size, offset; - StgDictObject *stgdict; char *ptr; if (value == NULL) { @@ -4573,18 +4666,23 @@ Array_ass_item(PyObject *myself, Py_ssize_t index, PyObject *value) return -1; } - stgdict = PyObject_stgdict((PyObject *)self); - assert(stgdict); /* Cannot be NULL for array object instances */ - if (index < 0 || index >= stgdict->length) { + ctypes_state *st = GLOBAL_STATE(); + StgInfo *stginfo; + if (PyStgInfo_FromObject(st, (PyObject *)self, &stginfo) < 0) { + return -1; + } + assert(stginfo); /* Cannot be NULL for array object instances */ + + if (index < 0 || index >= stginfo->length) { PyErr_SetString(PyExc_IndexError, "invalid index"); return -1; } - size = stgdict->size / stgdict->length; + size = stginfo->size / stginfo->length; offset = index * size; ptr = self->b_ptr + offset; - return PyCData_set((PyObject *)self, stgdict->proto, stgdict->setfunc, value, + return PyCData_set((PyObject *)self, stginfo->proto, stginfo->setfunc, value, index, size, ptr); } @@ -4658,26 +4756,6 @@ static PyMethodDef Array_methods[] = { { NULL, NULL } }; -static PySequenceMethods Array_as_sequence = { - Array_length, /* sq_length; */ - 0, /* sq_concat; */ - 0, /* sq_repeat; */ - Array_item, /* sq_item; */ - 0, /* sq_slice; */ - Array_ass_item, /* sq_ass_item; */ - 0, /* sq_ass_slice; */ - 0, /* sq_contains; */ - - 0, /* sq_inplace_concat; */ - 0, /* sq_inplace_repeat; */ -}; - -static PyMappingMethods Array_as_mapping = { - Array_length, - Array_subscript, - Array_ass_subscript, -}; - PyDoc_STRVAR(array_doc, "Abstract base class for arrays.\n" "\n" @@ -4688,46 +4766,26 @@ PyDoc_STRVAR(array_doc, "reads, the resulting object is not itself an Array." ); -PyTypeObject PyCArray_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_ctypes.Array", - sizeof(CDataObject), /* tp_basicsize */ - 0, /* tp_itemsize */ - 0, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - &Array_as_sequence, /* tp_as_sequence */ - &Array_as_mapping, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - &PyCData_as_buffer, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ - array_doc, /* tp_doc */ - (traverseproc)PyCData_traverse, /* tp_traverse */ - (inquiry)PyCData_clear, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - Array_methods, /* tp_methods */ - 0, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - (initproc)Array_init, /* tp_init */ - 0, /* tp_alloc */ - GenericPyCData_new, /* tp_new */ - 0, /* tp_free */ +static PyType_Slot pycarray_slots[] = { + {Py_tp_doc, (char*)array_doc}, + {Py_tp_methods, Array_methods}, + {Py_tp_init, Array_init}, + {Py_tp_new, GenericPyCData_new}, + {Py_bf_getbuffer, PyCData_NewGetBuffer}, + {Py_sq_length, Array_length}, + {Py_sq_item, Array_item}, + {Py_sq_ass_item, Array_ass_item}, + {Py_mp_length, Array_length}, + {Py_mp_subscript, Array_subscript}, + {Py_mp_ass_subscript, Array_ass_subscript}, + {0, NULL}, +}; + +static PyType_Spec pycarray_spec = { + .name = "_ctypes.Array", + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | + Py_TPFLAGS_IMMUTABLETYPE), + .slots = pycarray_slots, }; PyObject * @@ -4804,16 +4862,22 @@ static int Simple_set_value(CDataObject *self, PyObject *value, void *Py_UNUSED(ignored)) { PyObject *result; - StgDictObject *dict = PyObject_stgdict((PyObject *)self); if (value == NULL) { PyErr_SetString(PyExc_TypeError, "can't delete attribute"); return -1; } - assert(dict); /* Cannot be NULL for CDataObject instances */ - assert(dict->setfunc); - result = dict->setfunc(self->b_ptr, value, dict->size); + + ctypes_state *st = GLOBAL_STATE(); + StgInfo *info; + if (PyStgInfo_FromObject(st, (PyObject *)self, &info) < 0) { + return -1; + } + assert(info); /* Cannot be NULL for CDataObject instances */ + assert(info->setfunc); + + result = info->setfunc(self->b_ptr, value, info->size); if (!result) return -1; @@ -4835,11 +4899,14 @@ Simple_init(CDataObject *self, PyObject *args, PyObject *kw) static PyObject * Simple_get_value(CDataObject *self, void *Py_UNUSED(ignored)) { - StgDictObject *dict; - dict = PyObject_stgdict((PyObject *)self); - assert(dict); /* Cannot be NULL for CDataObject instances */ - assert(dict->getfunc); - return dict->getfunc(self->b_ptr, self->b_size); + ctypes_state *st = GLOBAL_STATE(); + StgInfo *info; + if (PyStgInfo_FromObject(st, (PyObject *)self, &info) < 0) { + return NULL; + } + assert(info); /* Cannot be NULL for CDataObject instances */ + assert(info->getfunc); + return info->getfunc(self->b_ptr, self->b_size); } static PyGetSetDef Simple_getsets[] = { @@ -4854,7 +4921,7 @@ Simple_from_outparm(PyObject *self, PyObject *args) if (_ctypes_simple_instance((PyObject *)Py_TYPE(self))) { return Py_NewRef(self); } - /* call stgdict->getfunc */ + /* call stginfo->getfunc */ return Simple_get_value((CDataObject *)self, NULL); } @@ -4868,19 +4935,6 @@ static int Simple_bool(CDataObject *self) return memcmp(self->b_ptr, "\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0", self->b_size); } -static PyNumberMethods Simple_as_number = { - 0, /* nb_add */ - 0, /* nb_subtract */ - 0, /* nb_multiply */ - 0, /* nb_remainder */ - 0, /* nb_divmod */ - 0, /* nb_power */ - 0, /* nb_negative */ - 0, /* nb_positive */ - 0, /* nb_absolute */ - (inquiry)Simple_bool, /* nb_bool */ -}; - /* "%s(%s)" % (self.__class__.__name__, self.value) */ static PyObject * Simple_repr(CDataObject *self) @@ -4903,48 +4957,26 @@ Simple_repr(CDataObject *self) return result; } -static PyTypeObject Simple_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_ctypes._SimpleCData", - sizeof(CDataObject), /* tp_basicsize */ - 0, /* tp_itemsize */ - 0, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - (reprfunc)&Simple_repr, /* tp_repr */ - &Simple_as_number, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - &PyCData_as_buffer, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ - PyDoc_STR("XXX to be provided"), /* tp_doc */ - (traverseproc)PyCData_traverse, /* tp_traverse */ - (inquiry)PyCData_clear, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - Simple_methods, /* tp_methods */ - 0, /* tp_members */ - Simple_getsets, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - (initproc)Simple_init, /* tp_init */ - 0, /* tp_alloc */ - GenericPyCData_new, /* tp_new */ - 0, /* tp_free */ +static PyType_Slot pycsimple_slots[] = { + {Py_tp_repr, &Simple_repr}, + {Py_tp_doc, PyDoc_STR("XXX to be provided")}, + {Py_tp_methods, Simple_methods}, + {Py_tp_getset, Simple_getsets}, + {Py_tp_init, Simple_init}, + {Py_tp_new, GenericPyCData_new}, + {Py_bf_getbuffer, PyCData_NewGetBuffer}, + {Py_nb_bool, Simple_bool}, + {0, NULL}, +}; + +static PyType_Spec pycsimple_spec = { + .name = "_ctypes._SimpleCData", + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | + Py_TPFLAGS_IMMUTABLETYPE), + .slots = pycsimple_slots, }; + /******************************************************************/ /* PyCPointer_Type @@ -4955,7 +4987,6 @@ Pointer_item(PyObject *myself, Py_ssize_t index) CDataObject *self = (CDataObject *)myself; Py_ssize_t size; Py_ssize_t offset; - StgDictObject *stgdict, *itemdict; PyObject *proto; if (*(void **)self->b_ptr == NULL) { @@ -4964,19 +4995,27 @@ Pointer_item(PyObject *myself, Py_ssize_t index) return NULL; } - stgdict = PyObject_stgdict((PyObject *)self); - assert(stgdict); /* Cannot be NULL for pointer object instances */ + ctypes_state *st = GLOBAL_STATE(); + StgInfo *stginfo; + if (PyStgInfo_FromObject(st, (PyObject *)self, &stginfo) < 0) { + return NULL; + } + assert(stginfo); /* Cannot be NULL for pointer object instances */ - proto = stgdict->proto; + proto = stginfo->proto; assert(proto); - itemdict = PyType_stgdict(proto); - assert(itemdict); /* proto is the item type of the pointer, a ctypes + + StgInfo *iteminfo; + if (PyStgInfo_FromType(st, proto, &iteminfo) < 0) { + return NULL; + } + assert(iteminfo); /* proto is the item type of the pointer, a ctypes type, so this cannot be NULL */ - size = itemdict->size; - offset = index * itemdict->size; + size = iteminfo->size; + offset = index * iteminfo->size; - return PyCData_get(proto, stgdict->getfunc, (PyObject *)self, + return PyCData_get(proto, stginfo->getfunc, (PyObject *)self, index, size, (*(char **)self->b_ptr) + offset); } @@ -4986,7 +5025,6 @@ Pointer_ass_item(PyObject *myself, Py_ssize_t index, PyObject *value) CDataObject *self = (CDataObject *)myself; Py_ssize_t size; Py_ssize_t offset; - StgDictObject *stgdict, *itemdict; PyObject *proto; if (value == NULL) { @@ -5001,37 +5039,47 @@ Pointer_ass_item(PyObject *myself, Py_ssize_t index, PyObject *value) return -1; } - stgdict = PyObject_stgdict((PyObject *)self); - assert(stgdict); /* Cannot be NULL for pointer instances */ + ctypes_state *st = GLOBAL_STATE(); + StgInfo *stginfo; + if (PyStgInfo_FromObject(st, (PyObject *)self, &stginfo) < 0) { + return -1; + } + assert(stginfo); /* Cannot be NULL for pointer instances */ - proto = stgdict->proto; + proto = stginfo->proto; assert(proto); - itemdict = PyType_stgdict(proto); - assert(itemdict); /* Cannot be NULL because the itemtype of a pointer + StgInfo *iteminfo; + if (PyStgInfo_FromType(st, proto, &iteminfo) < 0) { + return -1; + } + assert(iteminfo); /* Cannot be NULL because the itemtype of a pointer is always a ctypes type */ - size = itemdict->size; - offset = index * itemdict->size; + size = iteminfo->size; + offset = index * iteminfo->size; - return PyCData_set((PyObject *)self, proto, stgdict->setfunc, value, + return PyCData_set((PyObject *)self, proto, stginfo->setfunc, value, index, size, (*(char **)self->b_ptr) + offset); } static PyObject * Pointer_get_contents(CDataObject *self, void *closure) { - StgDictObject *stgdict; - if (*(void **)self->b_ptr == NULL) { PyErr_SetString(PyExc_ValueError, "NULL pointer access"); return NULL; } - stgdict = PyObject_stgdict((PyObject *)self); - assert(stgdict); /* Cannot be NULL for pointer instances */ - return PyCData_FromBaseObj(stgdict->proto, + ctypes_state *st = GLOBAL_STATE(); + StgInfo *stginfo; + if (PyStgInfo_FromObject(st, (PyObject *)self, &stginfo) < 0) { + return NULL; + } + assert(stginfo); /* Cannot be NULL for pointer instances */ + + return PyCData_FromBaseObj(stginfo->proto, (PyObject *)self, 0, *(void **)self->b_ptr); } @@ -5039,7 +5087,6 @@ Pointer_get_contents(CDataObject *self, void *closure) static int Pointer_set_contents(CDataObject *self, PyObject *value, void *closure) { - StgDictObject *stgdict; CDataObject *dst; PyObject *keep; @@ -5048,18 +5095,21 @@ Pointer_set_contents(CDataObject *self, PyObject *value, void *closure) "Pointer does not support item deletion"); return -1; } - stgdict = PyObject_stgdict((PyObject *)self); - assert(stgdict); /* Cannot be NULL for pointer instances */ - assert(stgdict->proto); ctypes_state *st = GLOBAL_STATE(); + StgInfo *stginfo; + if (PyStgInfo_FromObject(st, (PyObject *)self, &stginfo) < 0) { + return -1; + } + assert(stginfo); /* Cannot be NULL for pointer instances */ + assert(stginfo->proto); if (!CDataObject_Check(st, value)) { - int res = PyObject_IsInstance(value, stgdict->proto); + int res = PyObject_IsInstance(value, stginfo->proto); if (res == -1) return -1; if (!res) { PyErr_Format(PyExc_TypeError, "expected %s instead of %s", - ((PyTypeObject *)(stgdict->proto))->tp_name, + ((PyTypeObject *)(stginfo->proto))->tp_name, Py_TYPE(value)->tp_name); return -1; } @@ -5107,8 +5157,12 @@ Pointer_init(CDataObject *self, PyObject *args, PyObject *kw) static PyObject * Pointer_new(PyTypeObject *type, PyObject *args, PyObject *kw) { - StgDictObject *dict = PyType_stgdict((PyObject *)type); - if (!dict || !dict->proto) { + ctypes_state *st = GLOBAL_STATE(); + StgInfo *info; + if (PyStgInfo_FromType(st, (PyObject *)type, &info) < 0) { + return NULL; + } + if (!info || !info->proto) { PyErr_SetString(PyExc_TypeError, "Cannot create instance: has no _type_"); return NULL; @@ -5130,7 +5184,6 @@ Pointer_subscript(PyObject *myself, PyObject *item) PySliceObject *slice = (PySliceObject *)item; Py_ssize_t start, stop, step; PyObject *np; - StgDictObject *stgdict, *itemdict; PyObject *proto; Py_ssize_t i, len; size_t cur; @@ -5184,13 +5237,20 @@ Pointer_subscript(PyObject *myself, PyObject *item) else len = (stop - start + 1) / step + 1; - stgdict = PyObject_stgdict((PyObject *)self); - assert(stgdict); /* Cannot be NULL for pointer instances */ - proto = stgdict->proto; + ctypes_state *st = GLOBAL_STATE(); + StgInfo *stginfo; + if (PyStgInfo_FromObject(st, (PyObject *)self, &stginfo) < 0) { + return NULL; + } + assert(stginfo); /* Cannot be NULL for pointer instances */ + proto = stginfo->proto; assert(proto); - itemdict = PyType_stgdict(proto); - assert(itemdict); - if (itemdict->getfunc == _ctypes_get_fielddesc("c")->getfunc) { + StgInfo *iteminfo; + if (PyStgInfo_FromType(st, proto, &iteminfo) < 0) { + return NULL; + } + assert(iteminfo); + if (iteminfo->getfunc == _ctypes_get_fielddesc("c")->getfunc) { char *ptr = *(char **)self->b_ptr; char *dest; @@ -5210,7 +5270,7 @@ Pointer_subscript(PyObject *myself, PyObject *item) PyMem_Free(dest); return np; } - if (itemdict->getfunc == _ctypes_get_fielddesc("u")->getfunc) { + if (iteminfo->getfunc == _ctypes_get_fielddesc("u")->getfunc) { wchar_t *ptr = *(wchar_t **)self->b_ptr; wchar_t *dest; @@ -5248,87 +5308,32 @@ Pointer_subscript(PyObject *myself, PyObject *item) } } -static PySequenceMethods Pointer_as_sequence = { - 0, /* inquiry sq_length; */ - 0, /* binaryfunc sq_concat; */ - 0, /* intargfunc sq_repeat; */ - Pointer_item, /* intargfunc sq_item; */ - 0, /* intintargfunc sq_slice; */ - Pointer_ass_item, /* intobjargproc sq_ass_item; */ - 0, /* intintobjargproc sq_ass_slice; */ - 0, /* objobjproc sq_contains; */ - /* Added in release 2.0 */ - 0, /* binaryfunc sq_inplace_concat; */ - 0, /* intargfunc sq_inplace_repeat; */ -}; - -static PyMappingMethods Pointer_as_mapping = { - 0, - Pointer_subscript, -}; - static int Pointer_bool(CDataObject *self) { return (*(void **)self->b_ptr != NULL); } -static PyNumberMethods Pointer_as_number = { - 0, /* nb_add */ - 0, /* nb_subtract */ - 0, /* nb_multiply */ - 0, /* nb_remainder */ - 0, /* nb_divmod */ - 0, /* nb_power */ - 0, /* nb_negative */ - 0, /* nb_positive */ - 0, /* nb_absolute */ - (inquiry)Pointer_bool, /* nb_bool */ +static PyType_Slot pycpointer_slots[] = { + {Py_tp_doc, PyDoc_STR("XXX to be provided")}, + {Py_tp_getset, Pointer_getsets}, + {Py_tp_init, Pointer_init}, + {Py_tp_new, Pointer_new}, + {Py_bf_getbuffer, PyCData_NewGetBuffer}, + {Py_nb_bool, Pointer_bool}, + {Py_mp_subscript, Pointer_subscript}, + {Py_sq_item, Pointer_item}, + {Py_sq_ass_item, Pointer_ass_item}, + {0, NULL}, }; -PyTypeObject PyCPointer_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_ctypes._Pointer", - sizeof(CDataObject), /* tp_basicsize */ - 0, /* tp_itemsize */ - 0, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - 0, /* tp_repr */ - &Pointer_as_number, /* tp_as_number */ - &Pointer_as_sequence, /* tp_as_sequence */ - &Pointer_as_mapping, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - &PyCData_as_buffer, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ - PyDoc_STR("XXX to be provided"), /* tp_doc */ - (traverseproc)PyCData_traverse, /* tp_traverse */ - (inquiry)PyCData_clear, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - 0, /* tp_methods */ - 0, /* tp_members */ - Pointer_getsets, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - (initproc)Pointer_init, /* tp_init */ - 0, /* tp_alloc */ - Pointer_new, /* tp_new */ - 0, /* tp_free */ +static PyType_Spec pycpointer_spec = { + .name = "_ctypes._Pointer", + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | + Py_TPFLAGS_IMMUTABLETYPE), + .slots = pycpointer_slots, }; - /******************************************************************/ /* * Module initialization. @@ -5433,7 +5438,6 @@ string_at(const char *ptr, int size) static int cast_check_pointertype(PyObject *arg) { - StgDictObject *dict; ctypes_state *st = GLOBAL_STATE(); if (PyCPointerTypeObject_Check(st, arg)) { @@ -5442,10 +5446,13 @@ cast_check_pointertype(PyObject *arg) if (PyCFuncPtrTypeObject_Check(st, arg)) { return 1; } - dict = PyType_stgdict(arg); - if (dict != NULL && dict->proto != NULL) { - if (PyUnicode_Check(dict->proto) - && (strchr("sPzUZXO", PyUnicode_AsUTF8(dict->proto)[0]))) { + StgInfo *info; + if (PyStgInfo_FromType(st, arg, &info) < 0) { + return 0; + } + if (info != NULL && info->proto != NULL) { + if (PyUnicode_Check(info->proto) + && (strchr("sPzUZXO", PyUnicode_AsUTF8(info->proto)[0]))) { /* simple pointer types, c_void_p, c_wchar_p, BSTR, ... */ return 1; } @@ -5544,26 +5551,8 @@ _ctypes_add_types(PyObject *mod) if (PyType_Ready(TYPE) < 0) { \ return -1; \ } - -#define TYPE_READY_BASE(TYPE_EXPR, TP_BASE) \ - do { \ - PyTypeObject *type = (TYPE_EXPR); \ - type->tp_base = (TP_BASE); \ - TYPE_READY(type); \ - } while (0) - -#define MOD_ADD_TYPE(TYPE_EXPR, TP_TYPE, TP_BASE) \ - do { \ - PyTypeObject *type = (TYPE_EXPR); \ - Py_SET_TYPE(type, TP_TYPE); \ - type->tp_base = TP_BASE; \ - if (PyModule_AddType(mod, type) < 0) { \ - return -1; \ - } \ - } while (0) - -#define CREATE_TYPE(MOD, TP, SPEC, BASE) do { \ - PyObject *type = PyType_FromMetaclass(NULL, MOD, SPEC, \ +#define CREATE_TYPE(TP, SPEC, META, BASE) do { \ + PyObject *type = PyType_FromMetaclass(META, mod, SPEC, \ (PyObject *)BASE); \ if (type == NULL) { \ return -1; \ @@ -5571,67 +5560,82 @@ _ctypes_add_types(PyObject *mod) TP = (PyTypeObject *)type; \ } while (0) +#define MOD_ADD_TYPE(TP, SPEC, META, BASE) do { \ + CREATE_TYPE(TP, SPEC, META, BASE); \ + if (PyModule_AddType(mod, (PyTypeObject *)(TP)) < 0) { \ + return -1; \ + } \ +} while (0) + ctypes_state *st = GLOBAL_STATE(); /* Note: ob_type is the metatype (the 'type'), defaults to PyType_Type, tp_base is the base type, defaults to 'object' aka PyBaseObject_Type. */ - CREATE_TYPE(mod, st->PyCArg_Type, &carg_spec, NULL); - CREATE_TYPE(mod, st->PyCThunk_Type, &cthunk_spec, NULL); - TYPE_READY(st->PyCData_Type); - /* StgDict is derived from PyDict_Type */ - TYPE_READY_BASE(st->PyCStgDict_Type, &PyDict_Type); + CREATE_TYPE(st->PyCArg_Type, &carg_spec, NULL, NULL); + CREATE_TYPE(st->PyCThunk_Type, &cthunk_spec, NULL, NULL); + CREATE_TYPE(st->PyCData_Type, &pycdata_spec, NULL, NULL); + + // Common Metaclass + CREATE_TYPE(st->PyCType_Type, &pyctype_type_spec, + NULL, &PyType_Type); /************************************************* * * Metaclasses */ - CREATE_TYPE(mod, st->PyCStructType_Type, &pycstruct_type_spec, - &PyType_Type); - CREATE_TYPE(mod, st->UnionType_Type, &union_type_spec, &PyType_Type); - CREATE_TYPE(mod, st->PyCPointerType_Type, &pycpointer_type_spec, - &PyType_Type); - CREATE_TYPE(mod, st->PyCArrayType_Type, &pycarray_type_spec, - &PyType_Type); - CREATE_TYPE(mod, st->PyCSimpleType_Type, &pycsimple_type_spec, - &PyType_Type); - CREATE_TYPE(mod, st->PyCFuncPtrType_Type, &pycfuncptr_type_spec, - &PyType_Type); + CREATE_TYPE(st->PyCStructType_Type, &pycstruct_type_spec, + NULL, st->PyCType_Type); + CREATE_TYPE(st->UnionType_Type, &union_type_spec, + NULL, st->PyCType_Type); + CREATE_TYPE(st->PyCPointerType_Type, &pycpointer_type_spec, + NULL, st->PyCType_Type); + CREATE_TYPE(st->PyCArrayType_Type, &pycarray_type_spec, + NULL, st->PyCType_Type); + CREATE_TYPE(st->PyCSimpleType_Type, &pycsimple_type_spec, + NULL, st->PyCType_Type); + CREATE_TYPE(st->PyCFuncPtrType_Type, &pycfuncptr_type_spec, + NULL, st->PyCType_Type); /************************************************* * * Classes using a custom metaclass */ - MOD_ADD_TYPE(st->Struct_Type, st->PyCStructType_Type, st->PyCData_Type); - MOD_ADD_TYPE(st->Union_Type, st->UnionType_Type, st->PyCData_Type); - MOD_ADD_TYPE(st->PyCPointer_Type, st->PyCPointerType_Type, st->PyCData_Type); - MOD_ADD_TYPE(st->PyCArray_Type, st->PyCArrayType_Type, st->PyCData_Type); - MOD_ADD_TYPE(st->Simple_Type, st->PyCSimpleType_Type, st->PyCData_Type); - MOD_ADD_TYPE(st->PyCFuncPtr_Type, st->PyCFuncPtrType_Type, st->PyCData_Type); + MOD_ADD_TYPE(st->Struct_Type, &pycstruct_spec, + st->PyCStructType_Type, st->PyCData_Type); + MOD_ADD_TYPE(st->Union_Type, &pycunion_spec, + st->UnionType_Type, st->PyCData_Type); + MOD_ADD_TYPE(st->PyCPointer_Type, &pycpointer_spec, + st->PyCPointerType_Type, st->PyCData_Type); + MOD_ADD_TYPE(st->PyCArray_Type, &pycarray_spec, + st->PyCArrayType_Type, st->PyCData_Type); + MOD_ADD_TYPE(st->Simple_Type, &pycsimple_spec, + st->PyCSimpleType_Type, st->PyCData_Type); + MOD_ADD_TYPE(st->PyCFuncPtr_Type, &pycfuncptr_spec, + st->PyCFuncPtrType_Type, st->PyCData_Type); /************************************************* * * Simple classes */ - CREATE_TYPE(mod, st->PyCField_Type, &cfield_spec, NULL); + CREATE_TYPE(st->PyCField_Type, &cfield_spec, NULL, NULL); /************************************************* * * Other stuff */ - CREATE_TYPE(mod, st->DictRemover_Type, &dictremover_spec, NULL); - CREATE_TYPE(mod, st->StructParam_Type, &structparam_spec, NULL); + CREATE_TYPE(st->DictRemover_Type, &dictremover_spec, NULL, NULL); + CREATE_TYPE(st->StructParam_Type, &structparam_spec, NULL, NULL); #ifdef MS_WIN32 - CREATE_TYPE(mod, st->PyComError_Type, &comerror_spec, PyExc_Exception); + CREATE_TYPE(st->PyComError_Type, &comerror_spec, NULL, PyExc_Exception); #endif #undef TYPE_READY -#undef TYPE_READY_BASE #undef MOD_ADD_TYPE #undef CREATE_TYPE return 0; diff --git a/Modules/_ctypes/callbacks.c b/Modules/_ctypes/callbacks.c index f70479435915ff..08d068e47ee2bf 100644 --- a/Modules/_ctypes/callbacks.c +++ b/Modules/_ctypes/callbacks.c @@ -109,10 +109,14 @@ PrintError(const char *msg, ...) * slower. */ static void -TryAddRef(StgDictObject *dict, CDataObject *obj) +TryAddRef(PyObject *cnv, CDataObject *obj) { IUnknown *punk; - int r = PyDict_Contains((PyObject *)dict, &_Py_ID(_needs_com_addref_)); + PyObject *attrdict = _PyType_GetDict((PyTypeObject *)cnv); + if (!attrdict) { + return; + } + int r = PyDict_Contains(attrdict, &_Py_ID(_needs_com_addref_)); if (r <= 0) { if (r < 0) { PrintError("getting _needs_com_addref_"); @@ -154,22 +158,26 @@ static void _CallPythonObject(void *mem, ctypes_state *st = GLOBAL_STATE(); for (i = 0; i < nargs; i++) { PyObject *cnv = cnvs[i]; // borrowed ref - StgDictObject *dict; - dict = PyType_stgdict(cnv); - if (dict && dict->getfunc && !_ctypes_simple_instance(cnv)) { - PyObject *v = dict->getfunc(*pArgs, dict->size); + StgInfo *info; + if (PyStgInfo_FromType(st, cnv, &info) < 0) { + goto Done; + } + + if (info && info->getfunc && !_ctypes_simple_instance(cnv)) { + PyObject *v = info->getfunc(*pArgs, info->size); if (!v) { PrintError("create argument %zd:\n", i); goto Done; } args[i] = v; /* XXX XXX XX - We have the problem that c_byte or c_short have dict->size of + We have the problem that c_byte or c_short have info->size of 1 resp. 4, but these parameters are pushed as sizeof(int) bytes. BTW, the same problem occurs when they are pushed as parameters */ - } else if (dict) { + } + else if (info) { /* Hm, shouldn't we use PyCData_AtAddress() or something like that instead? */ CDataObject *obj = (CDataObject *)_PyObject_CallNoArgs(cnv); if (!obj) { @@ -181,10 +189,10 @@ static void _CallPythonObject(void *mem, PrintError("unexpected result of create argument %zd:\n", i); goto Done; } - memcpy(obj->b_ptr, *pArgs, dict->size); + memcpy(obj->b_ptr, *pArgs, info->size); args[i] = (PyObject *)obj; #ifdef MS_WIN32 - TryAddRef(dict, obj); + TryAddRef(cnv, obj); #endif } else { PyErr_SetString(PyExc_TypeError, @@ -348,10 +356,8 @@ CThunkObject *_ctypes_alloc_callback(PyObject *callable, if (p == NULL) return NULL; -#ifdef Py_DEBUG ctypes_state *st = GLOBAL_STATE(); assert(CThunk_CheckExact(st, (PyObject *)p)); -#endif p->pcl_write = Py_ffi_closure_alloc(sizeof(ffi_closure), &p->pcl_exec); if (p->pcl_write == NULL) { @@ -372,14 +378,18 @@ CThunkObject *_ctypes_alloc_callback(PyObject *callable, p->setfunc = NULL; p->ffi_restype = &ffi_type_void; } else { - StgDictObject *dict = PyType_stgdict(restype); - if (dict == NULL || dict->setfunc == NULL) { + StgInfo *info; + if (PyStgInfo_FromType(st, restype, &info) < 0) { + goto error; + } + + if (info == NULL || info->setfunc == NULL) { PyErr_SetString(PyExc_TypeError, "invalid result type for callback function"); goto error; } - p->setfunc = dict->setfunc; - p->ffi_restype = &dict->ffi_type_pointer; + p->setfunc = info->setfunc; + p->ffi_restype = &info->ffi_type_pointer; } cc = FFI_DEFAULT_ABI; diff --git a/Modules/_ctypes/callproc.c b/Modules/_ctypes/callproc.c index 97d1dbaae03d4f..6ebbb64d61b07a 100644 --- a/Modules/_ctypes/callproc.c +++ b/Modules/_ctypes/callproc.c @@ -664,15 +664,20 @@ struct argument { */ static int ConvParam(PyObject *obj, Py_ssize_t index, struct argument *pa) { - StgDictObject *dict; pa->keep = NULL; /* so we cannot forget it later */ + ctypes_state *st = GLOBAL_STATE(); - dict = PyObject_stgdict(obj); - if (dict) { + StgInfo *info; + int result = PyStgInfo_FromObject(st, obj, &info); + if (result < 0) { + return -1; + } + if (info) { + assert(info); PyCArgObject *carg; - assert(dict->paramfunc); - /* If it has an stgdict, it is a CDataObject */ - carg = dict->paramfunc((CDataObject *)obj); + assert(info->paramfunc); + /* If it has an stginfo, it is a CDataObject */ + carg = info->paramfunc((CDataObject *)obj); if (carg == NULL) return -1; pa->ffi_type = carg->pffi_type; @@ -681,7 +686,6 @@ static int ConvParam(PyObject *obj, Py_ssize_t index, struct argument *pa) return 0; } - ctypes_state *st = GLOBAL_STATE(); if (PyCArg_CheckExact(st, obj)) { PyCArgObject *carg = (PyCArgObject *)obj; pa->ffi_type = carg->pffi_type; @@ -778,26 +782,34 @@ int can_return_struct_as_sint64(size_t s) #endif +// returns NULL with exception set on error ffi_type *_ctypes_get_ffi_type(PyObject *obj) { - StgDictObject *dict; - if (obj == NULL) + if (obj == NULL) { return &ffi_type_sint; - dict = PyType_stgdict(obj); - if (dict == NULL) + } + + ctypes_state *st = GLOBAL_STATE(); + StgInfo *info; + if (PyStgInfo_FromType(st, obj, &info) < 0) { + return NULL; + } + + if (info == NULL) { return &ffi_type_sint; + } #if defined(MS_WIN32) && !defined(_WIN32_WCE) /* This little trick works correctly with MSVC. It returns small structures in registers */ - if (dict->ffi_type_pointer.type == FFI_TYPE_STRUCT) { - if (can_return_struct_as_int(dict->ffi_type_pointer.size)) + if (info->ffi_type_pointer.type == FFI_TYPE_STRUCT) { + if (can_return_struct_as_int(info->ffi_type_pointer.size)) return &ffi_type_sint32; - else if (can_return_struct_as_sint64 (dict->ffi_type_pointer.size)) + else if (can_return_struct_as_sint64 (info->ffi_type_pointer.size)) return &ffi_type_sint64; } #endif - return &dict->ffi_type_pointer; + return &info->ffi_type_pointer; } @@ -983,7 +995,6 @@ static int _call_function_pointer(int flags, */ static PyObject *GetResult(PyObject *restype, void *result, PyObject *checker) { - StgDictObject *dict; PyObject *retval, *v; if (restype == NULL) @@ -993,17 +1004,22 @@ static PyObject *GetResult(PyObject *restype, void *result, PyObject *checker) Py_RETURN_NONE; } - dict = PyType_stgdict(restype); - if (dict == NULL) + ctypes_state *st = GLOBAL_STATE(); + StgInfo *info; + if (PyStgInfo_FromType(st, restype, &info) < 0) { + return NULL; + } + if (info == NULL) { return PyObject_CallFunction(restype, "i", *(int *)result); + } - if (dict->getfunc && !_ctypes_simple_instance(restype)) { - retval = dict->getfunc(result, dict->size); + if (info->getfunc && !_ctypes_simple_instance(restype)) { + retval = info->getfunc(result, info->size); /* If restype is py_object (detected by comparing getfunc with O_get), we have to call Py_DECREF because O_get has already called Py_INCREF. */ - if (dict->getfunc == _ctypes_get_fielddesc("O")->getfunc) { + if (info->getfunc == _ctypes_get_fielddesc("O")->getfunc) { Py_DECREF(retval); } } else @@ -1240,6 +1256,9 @@ PyObject *_ctypes_callproc(PPROC pProc, } else { rtype = _ctypes_get_ffi_type(restype); } + if (!rtype) { + goto cleanup; + } resbuf = alloca(max(rtype->size, sizeof(ffi_arg))); @@ -1683,13 +1702,16 @@ PyDoc_STRVAR(sizeof_doc, static PyObject * sizeof_func(PyObject *self, PyObject *obj) { - StgDictObject *dict; + ctypes_state *st = GLOBAL_STATE(); - dict = PyType_stgdict(obj); - if (dict) { - return PyLong_FromSsize_t(dict->size); + StgInfo *info; + if (PyStgInfo_FromType(st, obj, &info) < 0) { + return NULL; } - ctypes_state *st = GLOBAL_STATE(); + if (info) { + return PyLong_FromSsize_t(info->size); + } + if (CDataObject_Check(st, obj)) { return PyLong_FromSsize_t(((CDataObject *)obj)->b_size); } @@ -1706,16 +1728,14 @@ PyDoc_STRVAR(alignment_doc, static PyObject * align_func(PyObject *self, PyObject *obj) { - StgDictObject *dict; - - dict = PyType_stgdict(obj); - if (dict) - return PyLong_FromSsize_t(dict->align); - - dict = PyObject_stgdict(obj); - if (dict) - return PyLong_FromSsize_t(dict->align); - + ctypes_state *st = GLOBAL_STATE(); + StgInfo *info; + if (PyStgInfo_FromAny(st, obj, &info) < 0) { + return NULL; + } + if (info) { + return PyLong_FromSsize_t(info->align); + } PyErr_SetString(PyExc_TypeError, "no alignment info"); return NULL; @@ -1824,7 +1844,6 @@ static PyObject * resize(PyObject *self, PyObject *args) { CDataObject *obj; - StgDictObject *dict; Py_ssize_t size; if (!PyArg_ParseTuple(args, @@ -1832,16 +1851,21 @@ resize(PyObject *self, PyObject *args) &obj, &size)) return NULL; - dict = PyObject_stgdict((PyObject *)obj); - if (dict == NULL) { + ctypes_state *st = GLOBAL_STATE(); + StgInfo *info; + int result = PyStgInfo_FromObject(st, (PyObject *)obj, &info); + if (result < 0) { + return NULL; + } + if (info == NULL) { PyErr_SetString(PyExc_TypeError, "expected ctypes instance"); return NULL; } - if (size < dict->size) { + if (size < info->size) { PyErr_Format(PyExc_ValueError, "minimum size is %zd", - dict->size); + info->size); return NULL; } if (obj->b_needsfree == 0) { @@ -2004,28 +2028,30 @@ create_pointer_inst(PyObject *module, PyObject *arg) static PyObject * buffer_info(PyObject *self, PyObject *arg) { - StgDictObject *dict = PyType_stgdict(arg); PyObject *shape; Py_ssize_t i; - if (dict == NULL) - dict = PyObject_stgdict(arg); - if (dict == NULL) { + ctypes_state *st = GLOBAL_STATE(); + StgInfo *info; + if (PyStgInfo_FromAny(st, arg, &info) < 0) { + return NULL; + } + if (info == NULL) { PyErr_SetString(PyExc_TypeError, "not a ctypes type or object"); return NULL; } - shape = PyTuple_New(dict->ndim); + shape = PyTuple_New(info->ndim); if (shape == NULL) return NULL; - for (i = 0; i < (int)dict->ndim; ++i) - PyTuple_SET_ITEM(shape, i, PyLong_FromSsize_t(dict->shape[i])); + for (i = 0; i < (int)info->ndim; ++i) + PyTuple_SET_ITEM(shape, i, PyLong_FromSsize_t(info->shape[i])); if (PyErr_Occurred()) { Py_DECREF(shape); return NULL; } - return Py_BuildValue("siN", dict->format, dict->ndim, shape); + return Py_BuildValue("siN", info->format, info->ndim, shape); } diff --git a/Modules/_ctypes/cfield.c b/Modules/_ctypes/cfield.c index 1d5b0b14bc39e5..16b66382bfe33f 100644 --- a/Modules/_ctypes/cfield.c +++ b/Modules/_ctypes/cfield.c @@ -54,7 +54,6 @@ PyCField_FromDesc(PyObject *desc, Py_ssize_t index, Py_ssize_t size, align; SETFUNC setfunc = NULL; GETFUNC getfunc = NULL; - StgDictObject *dict; int fieldtype; #define NO_BITFIELD 0 #define NEW_BITFIELD 1 @@ -66,21 +65,27 @@ PyCField_FromDesc(PyObject *desc, Py_ssize_t index, self = (CFieldObject *)tp->tp_alloc(tp, 0); if (self == NULL) return NULL; - dict = PyType_stgdict(desc); - if (!dict) { + + StgInfo *info; + if (PyStgInfo_FromType(st, desc, &info) < 0) { + Py_DECREF(self); + return NULL; + } + if (!info) { PyErr_SetString(PyExc_TypeError, "has no _stginfo_"); Py_DECREF(self); return NULL; } + if (bitsize /* this is a bitfield request */ && *pfield_size /* we have a bitfield open */ #ifdef MS_WIN32 /* MSVC, GCC with -mms-bitfields */ - && dict->size * 8 == *pfield_size + && info->size * 8 == *pfield_size #else /* GCC */ - && dict->size * 8 <= *pfield_size + && info->size * 8 <= *pfield_size #endif && (*pbitofs + bitsize) <= *pfield_size) { /* continue bit field */ @@ -88,8 +93,8 @@ PyCField_FromDesc(PyObject *desc, Py_ssize_t index, #ifndef MS_WIN32 } else if (bitsize /* this is a bitfield request */ && *pfield_size /* we have a bitfield open */ - && dict->size * 8 >= *pfield_size - && (*pbitofs + bitsize) <= dict->size * 8) { + && info->size * 8 >= *pfield_size + && (*pbitofs + bitsize) <= info->size * 8) { /* expand bit field */ fieldtype = EXPAND_BITFIELD; #endif @@ -97,7 +102,7 @@ PyCField_FromDesc(PyObject *desc, Py_ssize_t index, /* start new bitfield */ fieldtype = NEW_BITFIELD; *pbitofs = 0; - *pfield_size = dict->size * 8; + *pfield_size = info->size * 8; } else { /* not a bit field */ fieldtype = NO_BITFIELD; @@ -105,29 +110,37 @@ PyCField_FromDesc(PyObject *desc, Py_ssize_t index, *pfield_size = 0; } - size = dict->size; + size = info->size; proto = desc; /* Field descriptors for 'c_char * n' are be scpecial cased to return a Python string instead of an Array object instance... */ if (PyCArrayTypeObject_Check(st, proto)) { - StgDictObject *adict = PyType_stgdict(proto); - StgDictObject *idict; - if (adict && adict->proto) { - idict = PyType_stgdict(adict->proto); - if (!idict) { + StgInfo *ainfo; + if (PyStgInfo_FromType(st, proto, &ainfo) < 0) { + Py_DECREF(self); + return NULL; + } + + if (ainfo && ainfo->proto) { + StgInfo *iinfo; + if (PyStgInfo_FromType(st, ainfo->proto, &iinfo) < 0) { + Py_DECREF(self); + return NULL; + } + if (!iinfo) { PyErr_SetString(PyExc_TypeError, "has no _stginfo_"); Py_DECREF(self); return NULL; } - if (idict->getfunc == _ctypes_get_fielddesc("c")->getfunc) { + if (iinfo->getfunc == _ctypes_get_fielddesc("c")->getfunc) { struct fielddesc *fd = _ctypes_get_fielddesc("s"); getfunc = fd->getfunc; setfunc = fd->setfunc; } - if (idict->getfunc == _ctypes_get_fielddesc("u")->getfunc) { + if (iinfo->getfunc == _ctypes_get_fielddesc("u")->getfunc) { struct fielddesc *fd = _ctypes_get_fielddesc("U"); getfunc = fd->getfunc; setfunc = fd->setfunc; @@ -151,9 +164,9 @@ PyCField_FromDesc(PyObject *desc, Py_ssize_t index, /* fall through */ case NO_BITFIELD: if (pack) - align = min(pack, dict->align); + align = min(pack, info->align); else - align = dict->align; + align = info->align; if (align && *poffset % align) { Py_ssize_t delta = align - (*poffset % align); *psize += delta; @@ -171,10 +184,10 @@ PyCField_FromDesc(PyObject *desc, Py_ssize_t index, break; case EXPAND_BITFIELD: - *poffset += dict->size - *pfield_size/8; - *psize += dict->size - *pfield_size/8; + *poffset += info->size - *pfield_size/8; + *psize += info->size - *pfield_size/8; - *pfield_size = dict->size * 8; + *pfield_size = info->size * 8; if (big_endian) self->size = (bitsize << 16) + *pfield_size - *pbitofs - bitsize; diff --git a/Modules/_ctypes/ctypes.h b/Modules/_ctypes/ctypes.h index 02f48a9ed55843..d7d725a4fdf669 100644 --- a/Modules/_ctypes/ctypes.h +++ b/Modules/_ctypes/ctypes.h @@ -41,7 +41,6 @@ typedef struct { PyTypeObject *PyCArg_Type; PyTypeObject *PyCField_Type; PyTypeObject *PyCThunk_Type; - PyTypeObject *PyCStgDict_Type; PyTypeObject *StructParam_Type; PyTypeObject *PyCStructType_Type; PyTypeObject *UnionType_Type; @@ -59,6 +58,7 @@ typedef struct { #ifdef MS_WIN32 PyTypeObject *PyComError_Type; #endif + PyTypeObject *PyCType_Type; } ctypes_state; extern ctypes_state global_state; @@ -144,7 +144,7 @@ typedef struct { CThunkObject *thunk; PyObject *callable; - /* These two fields will override the ones in the type's stgdict if + /* These two fields will override the ones in the type's stginfo if they are set */ PyObject *converters; PyObject *argtypes; @@ -158,17 +158,12 @@ typedef struct { PyObject *paramflags; } PyCFuncPtrObject; -extern PyTypeObject PyCStgDict_Type; -#define PyCStgDict_CheckExact(st, v) Py_IS_TYPE((v), (st)->PyCStgDict_Type) -#define PyCStgDict_Check(st, v) PyObject_TypeCheck((v), (st)->PyCStgDict_Type) - -extern int PyCStructUnionType_update_stgdict(PyObject *fields, PyObject *type, int isStruct); +extern int PyCStructUnionType_update_stginfo(PyObject *fields, PyObject *type, int isStruct); extern int PyType_stginfo(PyTypeObject *self, Py_ssize_t *psize, Py_ssize_t *palign, Py_ssize_t *plength); extern int PyObject_stginfo(PyObject *self, Py_ssize_t *psize, Py_ssize_t *palign, Py_ssize_t *plength); -extern PyTypeObject PyCData_Type; #define CDataObject_CheckExact(st, v) Py_IS_TYPE((v), (st)->PyCData_Type) #define CDataObject_Check(st, v) PyObject_TypeCheck((v), (st)->PyCData_Type) #define _CDataObject_HasExternalBuffer(v) ((v)->b_ptr != (char *)&(v)->b_value) @@ -188,10 +183,6 @@ PyCField_FromDesc(PyObject *desc, Py_ssize_t index, extern PyObject *PyCData_AtAddress(PyObject *type, void *buf); extern PyObject *PyCData_FromBytes(PyObject *type, char *data, Py_ssize_t length); -extern PyTypeObject PyCArray_Type; -extern PyTypeObject PyCPointer_Type; -extern PyTypeObject PyCFuncPtr_Type; - #define PyCArrayTypeObject_Check(st, v) PyObject_TypeCheck((v), (st)->PyCArrayType_Type) #define ArrayObject_Check(st, v) PyObject_TypeCheck((v), (st)->PyCArray_Type) #define PointerObject_Check(st, v) PyObject_TypeCheck((v), (st)->PyCPointer_Type) @@ -231,45 +222,22 @@ typedef struct { int anonymous; } CFieldObject; -/* A subclass of PyDictObject, used as the instance dictionary of ctypes - metatypes */ -typedef struct { - PyDictObject dict; /* first part identical to PyDictObject */ -/* The size and align fields are unneeded, they are in ffi_type as well. As - an experiment shows, it's trivial to get rid of them, the only thing to - remember is that in PyCArrayType_new the ffi_type fields must be filled in - - so far it was unneeded because libffi doesn't support arrays at all - (because they are passed as pointers to function calls anyway). But it's - too much risk to change that now, and there are other fields which doesn't - belong into this structure anyway. Maybe in ctypes 2.0... (ctypes 2000?) -*/ - Py_ssize_t size; /* number of bytes */ - Py_ssize_t align; /* alignment requirements */ - Py_ssize_t length; /* number of fields */ - ffi_type ffi_type_pointer; - PyObject *proto; /* Only for Pointer/ArrayObject */ - SETFUNC setfunc; /* Only for simple objects */ - GETFUNC getfunc; /* Only for simple objects */ - PARAMFUNC paramfunc; +/**************************************************************** + StgInfo - /* Following fields only used by PyCFuncPtrType_Type instances */ - PyObject *argtypes; /* tuple of CDataObjects */ - PyObject *converters; /* tuple([t.from_param for t in argtypes]) */ - PyObject *restype; /* CDataObject or NULL */ - PyObject *checker; - int flags; /* calling convention and such */ + Since Python 3.13, ctypes-specific type information is stored in the + corresponding type object, in a `StgInfo` struct accessed by the helpers + below. + Before that, each type's `tp_dict` was set to a dict *subclass* that included + the fields that are now in StgInfo. The mechanism was called "StgDict"; a few + references to that name might remain. - /* pep3118 fields, pointers need PyMem_Free */ - char *format; - int ndim; - Py_ssize_t *shape; -/* Py_ssize_t *strides; */ /* unused in ctypes */ -/* Py_ssize_t *suboffsets; */ /* unused in ctypes */ + Functions for accessing StgInfo are `static inline` for performance; + see later in this file. -} StgDictObject; + **************************************************************** -/**************************************************************** - StgDictObject fields + StgInfo fields setfunc and getfunc is only set for simple data types, it is copied from the corresponding fielddesc entry. These are functions to set and get the value @@ -280,11 +248,11 @@ typedef struct { object. Probably all the magic ctypes methods (like from_param) should have C - callable wrappers in the StgDictObject. For simple data type, for example, + callable wrappers in the StgInfo. For simple data type, for example, the fielddesc table could have entries for C codec from_param functions or other methods as well, if a subtype overrides this method in Python at construction time, or assigns to it later, tp_setattro should update the - StgDictObject function to a generic one. + StgInfo function to a generic one. Currently, PyCFuncPtr types have 'converters' and 'checker' entries in their type dict. They are only used to cache attributes from other entries, which @@ -308,13 +276,33 @@ typedef struct { *****************************************************************/ -/* May return NULL, but does not set an exception! */ -extern StgDictObject *PyType_stgdict(PyObject *obj); +typedef struct { + int initialized; + Py_ssize_t size; /* number of bytes */ + Py_ssize_t align; /* alignment requirements */ + Py_ssize_t length; /* number of fields */ + ffi_type ffi_type_pointer; + PyObject *proto; /* Only for Pointer/ArrayObject */ + SETFUNC setfunc; /* Only for simple objects */ + GETFUNC getfunc; /* Only for simple objects */ + PARAMFUNC paramfunc; + + /* Following fields only used by PyCFuncPtrType_Type instances */ + PyObject *argtypes; /* tuple of CDataObjects */ + PyObject *converters; /* tuple([t.from_param for t in argtypes]) */ + PyObject *restype; /* CDataObject or NULL */ + PyObject *checker; + int flags; /* calling convention and such */ -/* May return NULL, but does not set an exception! */ -extern StgDictObject *PyObject_stgdict(PyObject *self); + /* pep3118 fields, pointers need PyMem_Free */ + char *format; + int ndim; + Py_ssize_t *shape; +/* Py_ssize_t *strides; */ /* unused in ctypes */ +/* Py_ssize_t *suboffsets; */ /* unused in ctypes */ +} StgInfo; -extern int PyCStgDict_clone(StgDictObject *src, StgDictObject *dst); +extern int PyCStgInfo_clone(StgInfo *dst_info, StgInfo *src_info); typedef int(* PPROC)(void); @@ -416,8 +404,74 @@ void *Py_ffi_closure_alloc(size_t size, void** codeloc); #define Py_ffi_closure_alloc ffi_closure_alloc #endif -/* - Local Variables: - compile-command: "python setup.py -q build install --home ~" - End: -*/ + +/**************************************************************** + * Accessing StgInfo -- these are inlined for performance reasons. + */ + +// `PyStgInfo_From**` functions get a PyCTypeDataObject. +// These return -1 on error, 0 if "not found", 1 on OK. +// (Currently, these do not return -1 in practice. This might change +// in the future.) + +// +// Common helper: +static inline int +_stginfo_from_type(ctypes_state *state, PyTypeObject *type, StgInfo **result) +{ + *result = NULL; + if (!PyObject_IsInstance((PyObject *)type, (PyObject *)state->PyCType_Type)) { + // not a ctypes class. + return 0; + } + StgInfo *info = PyObject_GetTypeData((PyObject *)type, state->PyCType_Type); + assert(info != NULL); + if (!info->initialized) { + // StgInfo is not initialized. This happens in abstract classes. + return 0; + } + *result = info; + return 1; +} +// from a type: +static inline int +PyStgInfo_FromType(ctypes_state *state, PyObject *type, StgInfo **result) +{ + return _stginfo_from_type(state, (PyTypeObject *)type, result); +} +// from an instance: +static inline int +PyStgInfo_FromObject(ctypes_state *state, PyObject *obj, StgInfo **result) +{ + return _stginfo_from_type(state, Py_TYPE(obj), result); +} +// from either a type or an instance: +static inline int +PyStgInfo_FromAny(ctypes_state *state, PyObject *obj, StgInfo **result) +{ + if (PyType_Check(obj)) { + return _stginfo_from_type(state, (PyTypeObject *)obj, result); + } + return _stginfo_from_type(state, Py_TYPE(obj), result); +} + +// Initialize StgInfo on a newly created type +static inline StgInfo * +PyStgInfo_Init(ctypes_state *state, PyTypeObject *type) +{ + if (!PyObject_IsInstance((PyObject *)type, (PyObject *)state->PyCType_Type)) { + PyErr_Format(PyExc_SystemError, + "'%s' is not a ctypes class.", + type->tp_name); + return NULL; + } + StgInfo *info = PyObject_GetTypeData((PyObject *)type, state->PyCType_Type); + if (info->initialized) { + PyErr_Format(PyExc_SystemError, + "StgInfo of '%s' is already initialized.", + type->tp_name); + return NULL; + } + info->initialized = 1; + return info; +} diff --git a/Modules/_ctypes/stgdict.c b/Modules/_ctypes/stgdict.c index 32ee414a7a0cdd..8666ded5c2b3f2 100644 --- a/Modules/_ctypes/stgdict.c +++ b/Modules/_ctypes/stgdict.c @@ -16,201 +16,62 @@ #endif #include "ctypes.h" -/******************************************************************/ -/* - StdDict - a dictionary subclass, containing additional C accessible fields - - XXX blabla more -*/ - -/* Seems we need this, otherwise we get problems when calling - * PyDict_SetItem() (ma_lookup is NULL) +/* This file relates to StgInfo -- type-specific information for ctypes. + * See ctypes.h for details. */ -static int -PyCStgDict_init(StgDictObject *self, PyObject *args, PyObject *kwds) -{ - if (PyDict_Type.tp_init((PyObject *)self, args, kwds) < 0) - return -1; - self->format = NULL; - self->ndim = 0; - self->shape = NULL; - return 0; -} -static int -PyCStgDict_clear(StgDictObject *self) +int +PyCStgInfo_clone(StgInfo *dst_info, StgInfo *src_info) { - Py_CLEAR(self->proto); - Py_CLEAR(self->argtypes); - Py_CLEAR(self->converters); - Py_CLEAR(self->restype); - Py_CLEAR(self->checker); - return 0; -} + Py_ssize_t size; -static void -PyCStgDict_dealloc(StgDictObject *self) -{ - PyCStgDict_clear(self); - PyMem_Free(self->format); - PyMem_Free(self->shape); - PyMem_Free(self->ffi_type_pointer.elements); - PyDict_Type.tp_dealloc((PyObject *)self); -} + PyMem_Free(dst_info->ffi_type_pointer.elements); + PyMem_Free(dst_info->format); + dst_info->format = NULL; + PyMem_Free(dst_info->shape); + dst_info->shape = NULL; + dst_info->ffi_type_pointer.elements = NULL; -static PyObject * -PyCStgDict_sizeof(StgDictObject *self, void *unused) -{ - Py_ssize_t res; - - res = _PyDict_SizeOf((PyDictObject *)self); - res += sizeof(StgDictObject) - sizeof(PyDictObject); - if (self->format) - res += strlen(self->format) + 1; - res += self->ndim * sizeof(Py_ssize_t); - if (self->ffi_type_pointer.elements) - res += (self->length + 1) * sizeof(ffi_type *); - return PyLong_FromSsize_t(res); -} + memcpy(dst_info, src_info, sizeof(StgInfo)); -int -PyCStgDict_clone(StgDictObject *dst, StgDictObject *src) -{ - char *d, *s; - Py_ssize_t size; + Py_XINCREF(dst_info->proto); + Py_XINCREF(dst_info->argtypes); + Py_XINCREF(dst_info->converters); + Py_XINCREF(dst_info->restype); + Py_XINCREF(dst_info->checker); - PyCStgDict_clear(dst); - PyMem_Free(dst->ffi_type_pointer.elements); - PyMem_Free(dst->format); - dst->format = NULL; - PyMem_Free(dst->shape); - dst->shape = NULL; - dst->ffi_type_pointer.elements = NULL; - - d = (char *)dst; - s = (char *)src; - memcpy(d + sizeof(PyDictObject), - s + sizeof(PyDictObject), - sizeof(StgDictObject) - sizeof(PyDictObject)); - - Py_XINCREF(dst->proto); - Py_XINCREF(dst->argtypes); - Py_XINCREF(dst->converters); - Py_XINCREF(dst->restype); - Py_XINCREF(dst->checker); - - if (src->format) { - dst->format = PyMem_Malloc(strlen(src->format) + 1); - if (dst->format == NULL) { + if (src_info->format) { + dst_info->format = PyMem_Malloc(strlen(src_info->format) + 1); + if (dst_info->format == NULL) { PyErr_NoMemory(); return -1; } - strcpy(dst->format, src->format); + strcpy(dst_info->format, src_info->format); } - if (src->shape) { - dst->shape = PyMem_Malloc(sizeof(Py_ssize_t) * src->ndim); - if (dst->shape == NULL) { + if (src_info->shape) { + dst_info->shape = PyMem_Malloc(sizeof(Py_ssize_t) * src_info->ndim); + if (dst_info->shape == NULL) { PyErr_NoMemory(); return -1; } - memcpy(dst->shape, src->shape, - sizeof(Py_ssize_t) * src->ndim); + memcpy(dst_info->shape, src_info->shape, + sizeof(Py_ssize_t) * src_info->ndim); } - if (src->ffi_type_pointer.elements == NULL) + if (src_info->ffi_type_pointer.elements == NULL) return 0; - size = sizeof(ffi_type *) * (src->length + 1); - dst->ffi_type_pointer.elements = PyMem_Malloc(size); - if (dst->ffi_type_pointer.elements == NULL) { + size = sizeof(ffi_type *) * (src_info->length + 1); + dst_info->ffi_type_pointer.elements = PyMem_Malloc(size); + if (dst_info->ffi_type_pointer.elements == NULL) { PyErr_NoMemory(); return -1; } - memcpy(dst->ffi_type_pointer.elements, - src->ffi_type_pointer.elements, + memcpy(dst_info->ffi_type_pointer.elements, + src_info->ffi_type_pointer.elements, size); return 0; } -static struct PyMethodDef PyCStgDict_methods[] = { - {"__sizeof__", (PyCFunction)PyCStgDict_sizeof, METH_NOARGS}, - {NULL, NULL} /* sentinel */ -}; - -PyTypeObject PyCStgDict_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "StgDict", - sizeof(StgDictObject), - 0, - (destructor)PyCStgDict_dealloc, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ - 0, /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - PyCStgDict_methods, /* tp_methods */ - 0, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - (initproc)PyCStgDict_init, /* tp_init */ - 0, /* tp_alloc */ - 0, /* tp_new */ - 0, /* tp_free */ -}; - -/* May return NULL, but does not set an exception! */ -StgDictObject * -PyType_stgdict(PyObject *obj) -{ - PyTypeObject *type; - - if (!PyType_Check(obj)) { - return NULL; - } - ctypes_state *st = GLOBAL_STATE(); - type = (PyTypeObject *)obj; - if (!type->tp_dict || !PyCStgDict_CheckExact(st, type->tp_dict)) { - return NULL; - } - return (StgDictObject *)type->tp_dict; -} - -/* May return NULL, but does not set an exception! */ -/* - This function should be as fast as possible, so we don't call PyType_stgdict - above but inline the code, and avoid the PyType_Check(). -*/ -StgDictObject * -PyObject_stgdict(PyObject *self) -{ - PyTypeObject *type = Py_TYPE(self); - ctypes_state *st = GLOBAL_STATE(); - if (!type->tp_dict || !PyCStgDict_CheckExact(st, type->tp_dict)) { - return NULL; - } - return (StgDictObject *)type->tp_dict; -} - /* descr is the descriptor for a field marked as anonymous. Get all the _fields_ descriptors from descr->proto, create new descriptors with offset and index adjusted, and stuff them into type. @@ -372,12 +233,11 @@ _ctypes_alloc_format_padding(const char *prefix, Py_ssize_t padding) /* Retrieve the (optional) _pack_ attribute from a type, the _fields_ attribute, - and create an StgDictObject. Used for Structure and Union subclasses. + and initialize StgInfo. Used for Structure and Union subclasses. */ int -PyCStructUnionType_update_stgdict(PyObject *type, PyObject *fields, int isStruct) +PyCStructUnionType_update_stginfo(PyObject *type, PyObject *fields, int isStruct) { - StgDictObject *stgdict, *basedict; Py_ssize_t len, offset, size, align, i; Py_ssize_t union_size, total_align, aligned_size; Py_ssize_t field_size = 0; @@ -456,90 +316,97 @@ PyCStructUnionType_update_stgdict(PyObject *type, PyObject *fields, int isStruct return -1; } - stgdict = PyType_stgdict(type); - if (!stgdict) { + ctypes_state *st = GLOBAL_STATE(); + StgInfo *stginfo; + if (PyStgInfo_FromType(st, type, &stginfo) < 0) { + return -1; + } + if (!stginfo) { PyErr_SetString(PyExc_TypeError, "ctypes state is not initialized"); return -1; } + /* If this structure/union is already marked final we cannot assign _fields_ anymore. */ - if (stgdict->flags & DICTFLAG_FINAL) {/* is final ? */ + if (stginfo->flags & DICTFLAG_FINAL) {/* is final ? */ PyErr_SetString(PyExc_AttributeError, "_fields_ is final"); return -1; } - if (stgdict->format) { - PyMem_Free(stgdict->format); - stgdict->format = NULL; + if (stginfo->format) { + PyMem_Free(stginfo->format); + stginfo->format = NULL; } - if (stgdict->ffi_type_pointer.elements) - PyMem_Free(stgdict->ffi_type_pointer.elements); + if (stginfo->ffi_type_pointer.elements) + PyMem_Free(stginfo->ffi_type_pointer.elements); - basedict = PyType_stgdict((PyObject *)((PyTypeObject *)type)->tp_base); - if (basedict) { - stgdict->flags |= (basedict->flags & + StgInfo *baseinfo; + if (PyStgInfo_FromType(st, (PyObject *)((PyTypeObject *)type)->tp_base, + &baseinfo) < 0) { + return -1; + } + if (baseinfo) { + stginfo->flags |= (baseinfo->flags & (TYPEFLAG_HASUNION | TYPEFLAG_HASBITFIELD)); } if (!isStruct) { - stgdict->flags |= TYPEFLAG_HASUNION; + stginfo->flags |= TYPEFLAG_HASUNION; } - if (basedict) { - size = offset = basedict->size; - align = basedict->align; + if (baseinfo) { + size = offset = baseinfo->size; + align = baseinfo->align; union_size = 0; total_align = align ? align : 1; total_align = max(total_align, forced_alignment); - stgdict->ffi_type_pointer.type = FFI_TYPE_STRUCT; - stgdict->ffi_type_pointer.elements = PyMem_New(ffi_type *, basedict->length + len + 1); - if (stgdict->ffi_type_pointer.elements == NULL) { + stginfo->ffi_type_pointer.type = FFI_TYPE_STRUCT; + stginfo->ffi_type_pointer.elements = PyMem_New(ffi_type *, baseinfo->length + len + 1); + if (stginfo->ffi_type_pointer.elements == NULL) { PyErr_NoMemory(); return -1; } - memset(stgdict->ffi_type_pointer.elements, 0, - sizeof(ffi_type *) * (basedict->length + len + 1)); - if (basedict->length > 0) { - memcpy(stgdict->ffi_type_pointer.elements, - basedict->ffi_type_pointer.elements, - sizeof(ffi_type *) * (basedict->length)); + memset(stginfo->ffi_type_pointer.elements, 0, + sizeof(ffi_type *) * (baseinfo->length + len + 1)); + if (baseinfo->length > 0) { + memcpy(stginfo->ffi_type_pointer.elements, + baseinfo->ffi_type_pointer.elements, + sizeof(ffi_type *) * (baseinfo->length)); } - ffi_ofs = basedict->length; + ffi_ofs = baseinfo->length; } else { offset = 0; size = 0; align = 0; union_size = 0; total_align = forced_alignment; - stgdict->ffi_type_pointer.type = FFI_TYPE_STRUCT; - stgdict->ffi_type_pointer.elements = PyMem_New(ffi_type *, len + 1); - if (stgdict->ffi_type_pointer.elements == NULL) { + stginfo->ffi_type_pointer.type = FFI_TYPE_STRUCT; + stginfo->ffi_type_pointer.elements = PyMem_New(ffi_type *, len + 1); + if (stginfo->ffi_type_pointer.elements == NULL) { PyErr_NoMemory(); return -1; } - memset(stgdict->ffi_type_pointer.elements, 0, + memset(stginfo->ffi_type_pointer.elements, 0, sizeof(ffi_type *) * (len + 1)); ffi_ofs = 0; } - assert(stgdict->format == NULL); + assert(stginfo->format == NULL); if (isStruct) { - stgdict->format = _ctypes_alloc_format_string(NULL, "T{"); + stginfo->format = _ctypes_alloc_format_string(NULL, "T{"); } else { /* PEP3118 doesn't support union. Use 'B' for bytes. */ - stgdict->format = _ctypes_alloc_format_string(NULL, "B"); + stginfo->format = _ctypes_alloc_format_string(NULL, "B"); } - if (stgdict->format == NULL) + if (stginfo->format == NULL) return -1; - ctypes_state *st = GLOBAL_STATE(); for (i = 0; i < len; ++i) { PyObject *name = NULL, *desc = NULL; PyObject *pair = PySequence_GetItem(fields, i); PyObject *prop; - StgDictObject *dict; int bitsize = 0; if (!pair || !PyArg_ParseTuple(pair, "UO|i", &name, &desc, &bitsize)) { @@ -551,22 +418,28 @@ PyCStructUnionType_update_stgdict(PyObject *type, PyObject *fields, int isStruct if (PyCArrayTypeObject_Check(st, desc)) { arrays_seen = 1; } - dict = PyType_stgdict(desc); - if (dict == NULL) { + + StgInfo *info; + if (PyStgInfo_FromType(st, desc, &info) < 0) { + Py_DECREF(pair); + return -1; + } + if (info == NULL) { Py_DECREF(pair); PyErr_Format(PyExc_TypeError, "second item in _fields_ tuple (index %zd) must be a C type", i); return -1; } - stgdict->ffi_type_pointer.elements[ffi_ofs + i] = &dict->ffi_type_pointer; - if (dict->flags & (TYPEFLAG_ISPOINTER | TYPEFLAG_HASPOINTER)) - stgdict->flags |= TYPEFLAG_HASPOINTER; - stgdict->flags |= dict->flags & (TYPEFLAG_HASUNION | TYPEFLAG_HASBITFIELD); - dict->flags |= DICTFLAG_FINAL; /* mark field type final */ + + stginfo->ffi_type_pointer.elements[ffi_ofs + i] = &info->ffi_type_pointer; + if (info->flags & (TYPEFLAG_ISPOINTER | TYPEFLAG_HASPOINTER)) + stginfo->flags |= TYPEFLAG_HASPOINTER; + stginfo->flags |= info->flags & (TYPEFLAG_HASUNION | TYPEFLAG_HASBITFIELD); + info->flags |= DICTFLAG_FINAL; /* mark field type final */ if (PyTuple_Size(pair) == 3) { /* bits specified */ - stgdict->flags |= TYPEFLAG_HASBITFIELD; - switch(dict->ffi_type_pointer.type) { + stginfo->flags |= TYPEFLAG_HASBITFIELD; + switch(info->ffi_type_pointer.type) { case FFI_TYPE_UINT8: case FFI_TYPE_UINT16: case FFI_TYPE_UINT32: @@ -577,8 +450,8 @@ PyCStructUnionType_update_stgdict(PyObject *type, PyObject *fields, int isStruct case FFI_TYPE_SINT8: case FFI_TYPE_SINT16: case FFI_TYPE_SINT32: - if (dict->getfunc != _ctypes_get_fielddesc("c")->getfunc - && dict->getfunc != _ctypes_get_fielddesc("u")->getfunc + if (info->getfunc != _ctypes_get_fielddesc("c")->getfunc + && info->getfunc != _ctypes_get_fielddesc("u")->getfunc ) break; /* else fall through */ @@ -589,7 +462,7 @@ PyCStructUnionType_update_stgdict(PyObject *type, PyObject *fields, int isStruct Py_DECREF(pair); return -1; } - if (bitsize <= 0 || bitsize > dict->size * 8) { + if (bitsize <= 0 || bitsize > info->size * 8) { PyErr_SetString(PyExc_ValueError, "number of bits invalid for bit field"); Py_DECREF(pair); @@ -599,7 +472,7 @@ PyCStructUnionType_update_stgdict(PyObject *type, PyObject *fields, int isStruct bitsize = 0; if (isStruct) { - const char *fieldfmt = dict->format ? dict->format : "B"; + const char *fieldfmt = info->format ? info->format : "B"; const char *fieldname = PyUnicode_AsUTF8(name); char *ptr; Py_ssize_t len; @@ -629,10 +502,10 @@ PyCStructUnionType_update_stgdict(PyObject *type, PyObject *fields, int isStruct padding = ((CFieldObject *)prop)->offset - last_size; if (padding > 0) { - ptr = stgdict->format; - stgdict->format = _ctypes_alloc_format_padding(ptr, padding); + ptr = stginfo->format; + stginfo->format = _ctypes_alloc_format_padding(ptr, padding); PyMem_Free(ptr); - if (stgdict->format == NULL) { + if (stginfo->format == NULL) { Py_DECREF(pair); Py_DECREF(prop); return -1; @@ -650,17 +523,17 @@ PyCStructUnionType_update_stgdict(PyObject *type, PyObject *fields, int isStruct } sprintf(buf, "%s:%s:", fieldfmt, fieldname); - ptr = stgdict->format; - if (dict->shape != NULL) { - stgdict->format = _ctypes_alloc_format_string_with_shape( - dict->ndim, dict->shape, stgdict->format, buf); + ptr = stginfo->format; + if (info->shape != NULL) { + stginfo->format = _ctypes_alloc_format_string_with_shape( + info->ndim, info->shape, stginfo->format, buf); } else { - stgdict->format = _ctypes_alloc_format_string(stgdict->format, buf); + stginfo->format = _ctypes_alloc_format_string(stginfo->format, buf); } PyMem_Free(ptr); PyMem_Free(buf); - if (stgdict->format == NULL) { + if (stginfo->format == NULL) { Py_DECREF(pair); Py_DECREF(prop); return -1; @@ -704,29 +577,29 @@ PyCStructUnionType_update_stgdict(PyObject *type, PyObject *fields, int isStruct /* Pad up to the full size of the struct */ padding = aligned_size - size; if (padding > 0) { - ptr = stgdict->format; - stgdict->format = _ctypes_alloc_format_padding(ptr, padding); + ptr = stginfo->format; + stginfo->format = _ctypes_alloc_format_padding(ptr, padding); PyMem_Free(ptr); - if (stgdict->format == NULL) { + if (stginfo->format == NULL) { return -1; } } - ptr = stgdict->format; - stgdict->format = _ctypes_alloc_format_string(stgdict->format, "}"); + ptr = stginfo->format; + stginfo->format = _ctypes_alloc_format_string(stginfo->format, "}"); PyMem_Free(ptr); - if (stgdict->format == NULL) + if (stginfo->format == NULL) return -1; } - stgdict->ffi_type_pointer.alignment = Py_SAFE_DOWNCAST(total_align, + stginfo->ffi_type_pointer.alignment = Py_SAFE_DOWNCAST(total_align, Py_ssize_t, unsigned short); - stgdict->ffi_type_pointer.size = aligned_size; + stginfo->ffi_type_pointer.size = aligned_size; - stgdict->size = aligned_size; - stgdict->align = total_align; - stgdict->length = ffi_ofs + len; + stginfo->size = aligned_size; + stginfo->align = total_align; + stginfo->length = ffi_ofs + len; /* * The value of MAX_STRUCT_SIZE depends on the platform Python is running on. @@ -817,7 +690,6 @@ PyCStructUnionType_update_stgdict(PyObject *type, PyObject *fields, int isStruct for (i = 0; i < len; ++i) { PyObject *name, *desc; PyObject *pair = PySequence_GetItem(fields, i); - StgDictObject *dict; int bitsize = 0; if (pair == NULL) { @@ -829,25 +701,34 @@ PyCStructUnionType_update_stgdict(PyObject *type, PyObject *fields, int isStruct Py_DECREF(pair); return -1; } - dict = PyType_stgdict(desc); - if (dict == NULL) { + + StgInfo *info; + if (PyStgInfo_FromType(st, desc, &info) < 0) { + Py_DECREF(pair); + return -1; + } + if (info == NULL) { Py_DECREF(pair); PyErr_Format(PyExc_TypeError, "second item in _fields_ tuple (index %zd) must be a C type", i); return -1; } + if (!PyCArrayTypeObject_Check(st, desc)) { /* Not an array. Just need an ffi_type pointer. */ num_ffi_type_pointers++; } else { /* It's an array. */ - Py_ssize_t length = dict->length; - StgDictObject *edict; + Py_ssize_t length = info->length; - edict = PyType_stgdict(dict->proto); - if (edict == NULL) { + StgInfo *einfo; + if (PyStgInfo_FromType(st, info->proto, &einfo) < 0) { + Py_DECREF(pair); + return -1; + } + if (einfo == NULL) { Py_DECREF(pair); PyErr_Format(PyExc_TypeError, "second item in _fields_ tuple (index %zd) must be a C type", @@ -895,9 +776,9 @@ PyCStructUnionType_update_stgdict(PyObject *type, PyObject *fields, int isStruct if (num_ffi_types > 0) { memset(structs, 0, num_ffi_types * sizeof(ffi_type)); } - if (ffi_ofs && (basedict != NULL)) { + if (ffi_ofs && (baseinfo != NULL)) { memcpy(element_types, - basedict->ffi_type_pointer.elements, + baseinfo->ffi_type_pointer.elements, ffi_ofs * sizeof(ffi_type *)); } element_index = ffi_ofs; @@ -906,7 +787,6 @@ PyCStructUnionType_update_stgdict(PyObject *type, PyObject *fields, int isStruct for (i = 0; i < len; ++i) { PyObject *name, *desc; PyObject *pair = PySequence_GetItem(fields, i); - StgDictObject *dict; int bitsize = 0; if (pair == NULL) { @@ -926,9 +806,16 @@ PyCStructUnionType_update_stgdict(PyObject *type, PyObject *fields, int isStruct PyMem_Free(type_block); return -1; } - dict = PyType_stgdict(desc); + + StgInfo *info; + if (PyStgInfo_FromType(st, desc, &info) < 0) { + Py_DECREF(pair); + PyMem_Free(type_block); + return -1; + } + /* Possibly this check could be avoided, but see above comment. */ - if (dict == NULL) { + if (info == NULL) { Py_DECREF(pair); PyMem_Free(type_block); PyErr_Format(PyExc_TypeError, @@ -936,17 +823,21 @@ PyCStructUnionType_update_stgdict(PyObject *type, PyObject *fields, int isStruct i); return -1; } + assert(element_index < (ffi_ofs + len)); /* will be used below */ if (!PyCArrayTypeObject_Check(st, desc)) { /* Not an array. Just copy over the element ffi_type. */ - element_types[element_index++] = &dict->ffi_type_pointer; + element_types[element_index++] = &info->ffi_type_pointer; } else { - Py_ssize_t length = dict->length; - StgDictObject *edict; - - edict = PyType_stgdict(dict->proto); - if (edict == NULL) { + Py_ssize_t length = info->length; + StgInfo *einfo; + if (PyStgInfo_FromType(st, info->proto, &einfo) < 0) { + Py_DECREF(pair); + PyMem_Free(type_block); + return -1; + } + if (einfo == NULL) { Py_DECREF(pair); PyMem_Free(type_block); PyErr_Format(PyExc_TypeError, @@ -955,15 +846,15 @@ PyCStructUnionType_update_stgdict(PyObject *type, PyObject *fields, int isStruct return -1; } element_types[element_index++] = &structs[struct_index]; - structs[struct_index].size = length * edict->ffi_type_pointer.size; - structs[struct_index].alignment = edict->ffi_type_pointer.alignment; + structs[struct_index].size = length * einfo->ffi_type_pointer.size; + structs[struct_index].alignment = einfo->ffi_type_pointer.alignment; structs[struct_index].type = FFI_TYPE_STRUCT; structs[struct_index].elements = &dummy_types[dummy_index]; ++struct_index; /* Copy over the element's type, length times. */ while (length > 0) { assert(dummy_index < (num_ffi_type_pointers)); - dummy_types[dummy_index++] = &edict->ffi_type_pointer; + dummy_types[dummy_index++] = &einfo->ffi_type_pointer; length--; } assert(dummy_index < (num_ffi_type_pointers)); @@ -977,19 +868,19 @@ PyCStructUnionType_update_stgdict(PyObject *type, PyObject *fields, int isStruct * Replace the old elements with the new, taking into account * base class elements where necessary. */ - assert(stgdict->ffi_type_pointer.elements); - PyMem_Free(stgdict->ffi_type_pointer.elements); - stgdict->ffi_type_pointer.elements = element_types; + assert(stginfo->ffi_type_pointer.elements); + PyMem_Free(stginfo->ffi_type_pointer.elements); + stginfo->ffi_type_pointer.elements = element_types; } /* We did check that this flag was NOT set above, it must not have been set until now. */ - if (stgdict->flags & DICTFLAG_FINAL) { + if (stginfo->flags & DICTFLAG_FINAL) { PyErr_SetString(PyExc_AttributeError, "Structure or union cannot contain itself"); return -1; } - stgdict->flags |= DICTFLAG_FINAL; + stginfo->flags |= DICTFLAG_FINAL; return MakeAnonFields(type); } diff --git a/Modules/_decimal/_decimal.c b/Modules/_decimal/_decimal.c index 5b053c73e20bc9..2481455ac0d143 100644 --- a/Modules/_decimal/_decimal.c +++ b/Modules/_decimal/_decimal.c @@ -4780,7 +4780,7 @@ _dec_hash(PyDecObject *v) return -1; } else if (mpd_isnan(MPD(v))) { - return _Py_HashPointer(v); + return PyObject_GenericHash((PyObject *)v); } else { return py_hash_inf * mpd_arith_sign(MPD(v)); diff --git a/Modules/_hacl/Hacl_Hash_MD5.c b/Modules/_hacl/Hacl_Hash_MD5.c index 222ac824f01961..ed294839ed8dc0 100644 --- a/Modules/_hacl/Hacl_Hash_MD5.c +++ b/Modules/_hacl/Hacl_Hash_MD5.c @@ -25,37 +25,29 @@ #include "internal/Hacl_Hash_MD5.h" -static uint32_t -_h0[4U] = - { (uint32_t)0x67452301U, (uint32_t)0xefcdab89U, (uint32_t)0x98badcfeU, (uint32_t)0x10325476U }; +static uint32_t _h0[4U] = { 0x67452301U, 0xefcdab89U, 0x98badcfeU, 0x10325476U }; static uint32_t _t[64U] = { - (uint32_t)0xd76aa478U, (uint32_t)0xe8c7b756U, (uint32_t)0x242070dbU, (uint32_t)0xc1bdceeeU, - (uint32_t)0xf57c0fafU, (uint32_t)0x4787c62aU, (uint32_t)0xa8304613U, (uint32_t)0xfd469501U, - (uint32_t)0x698098d8U, (uint32_t)0x8b44f7afU, (uint32_t)0xffff5bb1U, (uint32_t)0x895cd7beU, - (uint32_t)0x6b901122U, (uint32_t)0xfd987193U, (uint32_t)0xa679438eU, (uint32_t)0x49b40821U, - (uint32_t)0xf61e2562U, (uint32_t)0xc040b340U, (uint32_t)0x265e5a51U, (uint32_t)0xe9b6c7aaU, - (uint32_t)0xd62f105dU, (uint32_t)0x02441453U, (uint32_t)0xd8a1e681U, (uint32_t)0xe7d3fbc8U, - (uint32_t)0x21e1cde6U, (uint32_t)0xc33707d6U, (uint32_t)0xf4d50d87U, (uint32_t)0x455a14edU, - (uint32_t)0xa9e3e905U, (uint32_t)0xfcefa3f8U, (uint32_t)0x676f02d9U, (uint32_t)0x8d2a4c8aU, - (uint32_t)0xfffa3942U, (uint32_t)0x8771f681U, (uint32_t)0x6d9d6122U, (uint32_t)0xfde5380cU, - (uint32_t)0xa4beea44U, (uint32_t)0x4bdecfa9U, (uint32_t)0xf6bb4b60U, (uint32_t)0xbebfbc70U, - (uint32_t)0x289b7ec6U, (uint32_t)0xeaa127faU, (uint32_t)0xd4ef3085U, (uint32_t)0x4881d05U, - (uint32_t)0xd9d4d039U, (uint32_t)0xe6db99e5U, (uint32_t)0x1fa27cf8U, (uint32_t)0xc4ac5665U, - (uint32_t)0xf4292244U, (uint32_t)0x432aff97U, (uint32_t)0xab9423a7U, (uint32_t)0xfc93a039U, - (uint32_t)0x655b59c3U, (uint32_t)0x8f0ccc92U, (uint32_t)0xffeff47dU, (uint32_t)0x85845dd1U, - (uint32_t)0x6fa87e4fU, (uint32_t)0xfe2ce6e0U, (uint32_t)0xa3014314U, (uint32_t)0x4e0811a1U, - (uint32_t)0xf7537e82U, (uint32_t)0xbd3af235U, (uint32_t)0x2ad7d2bbU, (uint32_t)0xeb86d391U + 0xd76aa478U, 0xe8c7b756U, 0x242070dbU, 0xc1bdceeeU, 0xf57c0fafU, 0x4787c62aU, 0xa8304613U, + 0xfd469501U, 0x698098d8U, 0x8b44f7afU, 0xffff5bb1U, 0x895cd7beU, 0x6b901122U, 0xfd987193U, + 0xa679438eU, 0x49b40821U, 0xf61e2562U, 0xc040b340U, 0x265e5a51U, 0xe9b6c7aaU, 0xd62f105dU, + 0x02441453U, 0xd8a1e681U, 0xe7d3fbc8U, 0x21e1cde6U, 0xc33707d6U, 0xf4d50d87U, 0x455a14edU, + 0xa9e3e905U, 0xfcefa3f8U, 0x676f02d9U, 0x8d2a4c8aU, 0xfffa3942U, 0x8771f681U, 0x6d9d6122U, + 0xfde5380cU, 0xa4beea44U, 0x4bdecfa9U, 0xf6bb4b60U, 0xbebfbc70U, 0x289b7ec6U, 0xeaa127faU, + 0xd4ef3085U, 0x4881d05U, 0xd9d4d039U, 0xe6db99e5U, 0x1fa27cf8U, 0xc4ac5665U, 0xf4292244U, + 0x432aff97U, 0xab9423a7U, 0xfc93a039U, 0x655b59c3U, 0x8f0ccc92U, 0xffeff47dU, 0x85845dd1U, + 0x6fa87e4fU, 0xfe2ce6e0U, 0xa3014314U, 0x4e0811a1U, 0xf7537e82U, 0xbd3af235U, 0x2ad7d2bbU, + 0xeb86d391U }; -void Hacl_Hash_Core_MD5_legacy_init(uint32_t *s) +void Hacl_Hash_MD5_init(uint32_t *s) { - KRML_MAYBE_FOR4(i, (uint32_t)0U, (uint32_t)4U, (uint32_t)1U, s[i] = _h0[i];); + KRML_MAYBE_FOR4(i, 0U, 4U, 1U, s[i] = _h0[i];); } -static void legacy_update(uint32_t *abcd, uint8_t *x) +static void update(uint32_t *abcd, uint8_t *x) { uint32_t aa = abcd[0U]; uint32_t bb = abcd[1U]; @@ -74,14 +66,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb0 + ((va + ((vb0 & vc0) | (~vb0 & vd0)) + xk + ti0) - << (uint32_t)7U - | (va + ((vb0 & vc0) | (~vb0 & vd0)) + xk + ti0) >> (uint32_t)25U); + << 7U + | (va + ((vb0 & vc0) | (~vb0 & vd0)) + xk + ti0) >> 25U); abcd[0U] = v; uint32_t va0 = abcd[3U]; uint32_t vb1 = abcd[0U]; uint32_t vc1 = abcd[1U]; uint32_t vd1 = abcd[2U]; - uint8_t *b1 = x + (uint32_t)4U; + uint8_t *b1 = x + 4U; uint32_t u0 = load32_le(b1); uint32_t xk0 = u0; uint32_t ti1 = _t[1U]; @@ -90,14 +82,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb1 + ((va0 + ((vb1 & vc1) | (~vb1 & vd1)) + xk0 + ti1) - << (uint32_t)12U - | (va0 + ((vb1 & vc1) | (~vb1 & vd1)) + xk0 + ti1) >> (uint32_t)20U); + << 12U + | (va0 + ((vb1 & vc1) | (~vb1 & vd1)) + xk0 + ti1) >> 20U); abcd[3U] = v0; uint32_t va1 = abcd[2U]; uint32_t vb2 = abcd[3U]; uint32_t vc2 = abcd[0U]; uint32_t vd2 = abcd[1U]; - uint8_t *b2 = x + (uint32_t)8U; + uint8_t *b2 = x + 8U; uint32_t u1 = load32_le(b2); uint32_t xk1 = u1; uint32_t ti2 = _t[2U]; @@ -106,14 +98,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb2 + ((va1 + ((vb2 & vc2) | (~vb2 & vd2)) + xk1 + ti2) - << (uint32_t)17U - | (va1 + ((vb2 & vc2) | (~vb2 & vd2)) + xk1 + ti2) >> (uint32_t)15U); + << 17U + | (va1 + ((vb2 & vc2) | (~vb2 & vd2)) + xk1 + ti2) >> 15U); abcd[2U] = v1; uint32_t va2 = abcd[1U]; uint32_t vb3 = abcd[2U]; uint32_t vc3 = abcd[3U]; uint32_t vd3 = abcd[0U]; - uint8_t *b3 = x + (uint32_t)12U; + uint8_t *b3 = x + 12U; uint32_t u2 = load32_le(b3); uint32_t xk2 = u2; uint32_t ti3 = _t[3U]; @@ -122,14 +114,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb3 + ((va2 + ((vb3 & vc3) | (~vb3 & vd3)) + xk2 + ti3) - << (uint32_t)22U - | (va2 + ((vb3 & vc3) | (~vb3 & vd3)) + xk2 + ti3) >> (uint32_t)10U); + << 22U + | (va2 + ((vb3 & vc3) | (~vb3 & vd3)) + xk2 + ti3) >> 10U); abcd[1U] = v2; uint32_t va3 = abcd[0U]; uint32_t vb4 = abcd[1U]; uint32_t vc4 = abcd[2U]; uint32_t vd4 = abcd[3U]; - uint8_t *b4 = x + (uint32_t)16U; + uint8_t *b4 = x + 16U; uint32_t u3 = load32_le(b4); uint32_t xk3 = u3; uint32_t ti4 = _t[4U]; @@ -138,14 +130,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb4 + ((va3 + ((vb4 & vc4) | (~vb4 & vd4)) + xk3 + ti4) - << (uint32_t)7U - | (va3 + ((vb4 & vc4) | (~vb4 & vd4)) + xk3 + ti4) >> (uint32_t)25U); + << 7U + | (va3 + ((vb4 & vc4) | (~vb4 & vd4)) + xk3 + ti4) >> 25U); abcd[0U] = v3; uint32_t va4 = abcd[3U]; uint32_t vb5 = abcd[0U]; uint32_t vc5 = abcd[1U]; uint32_t vd5 = abcd[2U]; - uint8_t *b5 = x + (uint32_t)20U; + uint8_t *b5 = x + 20U; uint32_t u4 = load32_le(b5); uint32_t xk4 = u4; uint32_t ti5 = _t[5U]; @@ -154,14 +146,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb5 + ((va4 + ((vb5 & vc5) | (~vb5 & vd5)) + xk4 + ti5) - << (uint32_t)12U - | (va4 + ((vb5 & vc5) | (~vb5 & vd5)) + xk4 + ti5) >> (uint32_t)20U); + << 12U + | (va4 + ((vb5 & vc5) | (~vb5 & vd5)) + xk4 + ti5) >> 20U); abcd[3U] = v4; uint32_t va5 = abcd[2U]; uint32_t vb6 = abcd[3U]; uint32_t vc6 = abcd[0U]; uint32_t vd6 = abcd[1U]; - uint8_t *b6 = x + (uint32_t)24U; + uint8_t *b6 = x + 24U; uint32_t u5 = load32_le(b6); uint32_t xk5 = u5; uint32_t ti6 = _t[6U]; @@ -170,14 +162,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb6 + ((va5 + ((vb6 & vc6) | (~vb6 & vd6)) + xk5 + ti6) - << (uint32_t)17U - | (va5 + ((vb6 & vc6) | (~vb6 & vd6)) + xk5 + ti6) >> (uint32_t)15U); + << 17U + | (va5 + ((vb6 & vc6) | (~vb6 & vd6)) + xk5 + ti6) >> 15U); abcd[2U] = v5; uint32_t va6 = abcd[1U]; uint32_t vb7 = abcd[2U]; uint32_t vc7 = abcd[3U]; uint32_t vd7 = abcd[0U]; - uint8_t *b7 = x + (uint32_t)28U; + uint8_t *b7 = x + 28U; uint32_t u6 = load32_le(b7); uint32_t xk6 = u6; uint32_t ti7 = _t[7U]; @@ -186,14 +178,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb7 + ((va6 + ((vb7 & vc7) | (~vb7 & vd7)) + xk6 + ti7) - << (uint32_t)22U - | (va6 + ((vb7 & vc7) | (~vb7 & vd7)) + xk6 + ti7) >> (uint32_t)10U); + << 22U + | (va6 + ((vb7 & vc7) | (~vb7 & vd7)) + xk6 + ti7) >> 10U); abcd[1U] = v6; uint32_t va7 = abcd[0U]; uint32_t vb8 = abcd[1U]; uint32_t vc8 = abcd[2U]; uint32_t vd8 = abcd[3U]; - uint8_t *b8 = x + (uint32_t)32U; + uint8_t *b8 = x + 32U; uint32_t u7 = load32_le(b8); uint32_t xk7 = u7; uint32_t ti8 = _t[8U]; @@ -202,14 +194,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb8 + ((va7 + ((vb8 & vc8) | (~vb8 & vd8)) + xk7 + ti8) - << (uint32_t)7U - | (va7 + ((vb8 & vc8) | (~vb8 & vd8)) + xk7 + ti8) >> (uint32_t)25U); + << 7U + | (va7 + ((vb8 & vc8) | (~vb8 & vd8)) + xk7 + ti8) >> 25U); abcd[0U] = v7; uint32_t va8 = abcd[3U]; uint32_t vb9 = abcd[0U]; uint32_t vc9 = abcd[1U]; uint32_t vd9 = abcd[2U]; - uint8_t *b9 = x + (uint32_t)36U; + uint8_t *b9 = x + 36U; uint32_t u8 = load32_le(b9); uint32_t xk8 = u8; uint32_t ti9 = _t[9U]; @@ -218,14 +210,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb9 + ((va8 + ((vb9 & vc9) | (~vb9 & vd9)) + xk8 + ti9) - << (uint32_t)12U - | (va8 + ((vb9 & vc9) | (~vb9 & vd9)) + xk8 + ti9) >> (uint32_t)20U); + << 12U + | (va8 + ((vb9 & vc9) | (~vb9 & vd9)) + xk8 + ti9) >> 20U); abcd[3U] = v8; uint32_t va9 = abcd[2U]; uint32_t vb10 = abcd[3U]; uint32_t vc10 = abcd[0U]; uint32_t vd10 = abcd[1U]; - uint8_t *b10 = x + (uint32_t)40U; + uint8_t *b10 = x + 40U; uint32_t u9 = load32_le(b10); uint32_t xk9 = u9; uint32_t ti10 = _t[10U]; @@ -234,14 +226,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb10 + ((va9 + ((vb10 & vc10) | (~vb10 & vd10)) + xk9 + ti10) - << (uint32_t)17U - | (va9 + ((vb10 & vc10) | (~vb10 & vd10)) + xk9 + ti10) >> (uint32_t)15U); + << 17U + | (va9 + ((vb10 & vc10) | (~vb10 & vd10)) + xk9 + ti10) >> 15U); abcd[2U] = v9; uint32_t va10 = abcd[1U]; uint32_t vb11 = abcd[2U]; uint32_t vc11 = abcd[3U]; uint32_t vd11 = abcd[0U]; - uint8_t *b11 = x + (uint32_t)44U; + uint8_t *b11 = x + 44U; uint32_t u10 = load32_le(b11); uint32_t xk10 = u10; uint32_t ti11 = _t[11U]; @@ -250,14 +242,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb11 + ((va10 + ((vb11 & vc11) | (~vb11 & vd11)) + xk10 + ti11) - << (uint32_t)22U - | (va10 + ((vb11 & vc11) | (~vb11 & vd11)) + xk10 + ti11) >> (uint32_t)10U); + << 22U + | (va10 + ((vb11 & vc11) | (~vb11 & vd11)) + xk10 + ti11) >> 10U); abcd[1U] = v10; uint32_t va11 = abcd[0U]; uint32_t vb12 = abcd[1U]; uint32_t vc12 = abcd[2U]; uint32_t vd12 = abcd[3U]; - uint8_t *b12 = x + (uint32_t)48U; + uint8_t *b12 = x + 48U; uint32_t u11 = load32_le(b12); uint32_t xk11 = u11; uint32_t ti12 = _t[12U]; @@ -266,14 +258,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb12 + ((va11 + ((vb12 & vc12) | (~vb12 & vd12)) + xk11 + ti12) - << (uint32_t)7U - | (va11 + ((vb12 & vc12) | (~vb12 & vd12)) + xk11 + ti12) >> (uint32_t)25U); + << 7U + | (va11 + ((vb12 & vc12) | (~vb12 & vd12)) + xk11 + ti12) >> 25U); abcd[0U] = v11; uint32_t va12 = abcd[3U]; uint32_t vb13 = abcd[0U]; uint32_t vc13 = abcd[1U]; uint32_t vd13 = abcd[2U]; - uint8_t *b13 = x + (uint32_t)52U; + uint8_t *b13 = x + 52U; uint32_t u12 = load32_le(b13); uint32_t xk12 = u12; uint32_t ti13 = _t[13U]; @@ -282,14 +274,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb13 + ((va12 + ((vb13 & vc13) | (~vb13 & vd13)) + xk12 + ti13) - << (uint32_t)12U - | (va12 + ((vb13 & vc13) | (~vb13 & vd13)) + xk12 + ti13) >> (uint32_t)20U); + << 12U + | (va12 + ((vb13 & vc13) | (~vb13 & vd13)) + xk12 + ti13) >> 20U); abcd[3U] = v12; uint32_t va13 = abcd[2U]; uint32_t vb14 = abcd[3U]; uint32_t vc14 = abcd[0U]; uint32_t vd14 = abcd[1U]; - uint8_t *b14 = x + (uint32_t)56U; + uint8_t *b14 = x + 56U; uint32_t u13 = load32_le(b14); uint32_t xk13 = u13; uint32_t ti14 = _t[14U]; @@ -298,14 +290,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb14 + ((va13 + ((vb14 & vc14) | (~vb14 & vd14)) + xk13 + ti14) - << (uint32_t)17U - | (va13 + ((vb14 & vc14) | (~vb14 & vd14)) + xk13 + ti14) >> (uint32_t)15U); + << 17U + | (va13 + ((vb14 & vc14) | (~vb14 & vd14)) + xk13 + ti14) >> 15U); abcd[2U] = v13; uint32_t va14 = abcd[1U]; uint32_t vb15 = abcd[2U]; uint32_t vc15 = abcd[3U]; uint32_t vd15 = abcd[0U]; - uint8_t *b15 = x + (uint32_t)60U; + uint8_t *b15 = x + 60U; uint32_t u14 = load32_le(b15); uint32_t xk14 = u14; uint32_t ti15 = _t[15U]; @@ -314,14 +306,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb15 + ((va14 + ((vb15 & vc15) | (~vb15 & vd15)) + xk14 + ti15) - << (uint32_t)22U - | (va14 + ((vb15 & vc15) | (~vb15 & vd15)) + xk14 + ti15) >> (uint32_t)10U); + << 22U + | (va14 + ((vb15 & vc15) | (~vb15 & vd15)) + xk14 + ti15) >> 10U); abcd[1U] = v14; uint32_t va15 = abcd[0U]; uint32_t vb16 = abcd[1U]; uint32_t vc16 = abcd[2U]; uint32_t vd16 = abcd[3U]; - uint8_t *b16 = x + (uint32_t)4U; + uint8_t *b16 = x + 4U; uint32_t u15 = load32_le(b16); uint32_t xk15 = u15; uint32_t ti16 = _t[16U]; @@ -330,14 +322,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb16 + ((va15 + ((vb16 & vd16) | (vc16 & ~vd16)) + xk15 + ti16) - << (uint32_t)5U - | (va15 + ((vb16 & vd16) | (vc16 & ~vd16)) + xk15 + ti16) >> (uint32_t)27U); + << 5U + | (va15 + ((vb16 & vd16) | (vc16 & ~vd16)) + xk15 + ti16) >> 27U); abcd[0U] = v15; uint32_t va16 = abcd[3U]; uint32_t vb17 = abcd[0U]; uint32_t vc17 = abcd[1U]; uint32_t vd17 = abcd[2U]; - uint8_t *b17 = x + (uint32_t)24U; + uint8_t *b17 = x + 24U; uint32_t u16 = load32_le(b17); uint32_t xk16 = u16; uint32_t ti17 = _t[17U]; @@ -346,14 +338,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb17 + ((va16 + ((vb17 & vd17) | (vc17 & ~vd17)) + xk16 + ti17) - << (uint32_t)9U - | (va16 + ((vb17 & vd17) | (vc17 & ~vd17)) + xk16 + ti17) >> (uint32_t)23U); + << 9U + | (va16 + ((vb17 & vd17) | (vc17 & ~vd17)) + xk16 + ti17) >> 23U); abcd[3U] = v16; uint32_t va17 = abcd[2U]; uint32_t vb18 = abcd[3U]; uint32_t vc18 = abcd[0U]; uint32_t vd18 = abcd[1U]; - uint8_t *b18 = x + (uint32_t)44U; + uint8_t *b18 = x + 44U; uint32_t u17 = load32_le(b18); uint32_t xk17 = u17; uint32_t ti18 = _t[18U]; @@ -362,8 +354,8 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb18 + ((va17 + ((vb18 & vd18) | (vc18 & ~vd18)) + xk17 + ti18) - << (uint32_t)14U - | (va17 + ((vb18 & vd18) | (vc18 & ~vd18)) + xk17 + ti18) >> (uint32_t)18U); + << 14U + | (va17 + ((vb18 & vd18) | (vc18 & ~vd18)) + xk17 + ti18) >> 18U); abcd[2U] = v17; uint32_t va18 = abcd[1U]; uint32_t vb19 = abcd[2U]; @@ -378,14 +370,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb19 + ((va18 + ((vb19 & vd19) | (vc19 & ~vd19)) + xk18 + ti19) - << (uint32_t)20U - | (va18 + ((vb19 & vd19) | (vc19 & ~vd19)) + xk18 + ti19) >> (uint32_t)12U); + << 20U + | (va18 + ((vb19 & vd19) | (vc19 & ~vd19)) + xk18 + ti19) >> 12U); abcd[1U] = v18; uint32_t va19 = abcd[0U]; uint32_t vb20 = abcd[1U]; uint32_t vc20 = abcd[2U]; uint32_t vd20 = abcd[3U]; - uint8_t *b20 = x + (uint32_t)20U; + uint8_t *b20 = x + 20U; uint32_t u19 = load32_le(b20); uint32_t xk19 = u19; uint32_t ti20 = _t[20U]; @@ -394,14 +386,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb20 + ((va19 + ((vb20 & vd20) | (vc20 & ~vd20)) + xk19 + ti20) - << (uint32_t)5U - | (va19 + ((vb20 & vd20) | (vc20 & ~vd20)) + xk19 + ti20) >> (uint32_t)27U); + << 5U + | (va19 + ((vb20 & vd20) | (vc20 & ~vd20)) + xk19 + ti20) >> 27U); abcd[0U] = v19; uint32_t va20 = abcd[3U]; uint32_t vb21 = abcd[0U]; uint32_t vc21 = abcd[1U]; uint32_t vd21 = abcd[2U]; - uint8_t *b21 = x + (uint32_t)40U; + uint8_t *b21 = x + 40U; uint32_t u20 = load32_le(b21); uint32_t xk20 = u20; uint32_t ti21 = _t[21U]; @@ -410,14 +402,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb21 + ((va20 + ((vb21 & vd21) | (vc21 & ~vd21)) + xk20 + ti21) - << (uint32_t)9U - | (va20 + ((vb21 & vd21) | (vc21 & ~vd21)) + xk20 + ti21) >> (uint32_t)23U); + << 9U + | (va20 + ((vb21 & vd21) | (vc21 & ~vd21)) + xk20 + ti21) >> 23U); abcd[3U] = v20; uint32_t va21 = abcd[2U]; uint32_t vb22 = abcd[3U]; uint32_t vc22 = abcd[0U]; uint32_t vd22 = abcd[1U]; - uint8_t *b22 = x + (uint32_t)60U; + uint8_t *b22 = x + 60U; uint32_t u21 = load32_le(b22); uint32_t xk21 = u21; uint32_t ti22 = _t[22U]; @@ -426,14 +418,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb22 + ((va21 + ((vb22 & vd22) | (vc22 & ~vd22)) + xk21 + ti22) - << (uint32_t)14U - | (va21 + ((vb22 & vd22) | (vc22 & ~vd22)) + xk21 + ti22) >> (uint32_t)18U); + << 14U + | (va21 + ((vb22 & vd22) | (vc22 & ~vd22)) + xk21 + ti22) >> 18U); abcd[2U] = v21; uint32_t va22 = abcd[1U]; uint32_t vb23 = abcd[2U]; uint32_t vc23 = abcd[3U]; uint32_t vd23 = abcd[0U]; - uint8_t *b23 = x + (uint32_t)16U; + uint8_t *b23 = x + 16U; uint32_t u22 = load32_le(b23); uint32_t xk22 = u22; uint32_t ti23 = _t[23U]; @@ -442,14 +434,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb23 + ((va22 + ((vb23 & vd23) | (vc23 & ~vd23)) + xk22 + ti23) - << (uint32_t)20U - | (va22 + ((vb23 & vd23) | (vc23 & ~vd23)) + xk22 + ti23) >> (uint32_t)12U); + << 20U + | (va22 + ((vb23 & vd23) | (vc23 & ~vd23)) + xk22 + ti23) >> 12U); abcd[1U] = v22; uint32_t va23 = abcd[0U]; uint32_t vb24 = abcd[1U]; uint32_t vc24 = abcd[2U]; uint32_t vd24 = abcd[3U]; - uint8_t *b24 = x + (uint32_t)36U; + uint8_t *b24 = x + 36U; uint32_t u23 = load32_le(b24); uint32_t xk23 = u23; uint32_t ti24 = _t[24U]; @@ -458,14 +450,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb24 + ((va23 + ((vb24 & vd24) | (vc24 & ~vd24)) + xk23 + ti24) - << (uint32_t)5U - | (va23 + ((vb24 & vd24) | (vc24 & ~vd24)) + xk23 + ti24) >> (uint32_t)27U); + << 5U + | (va23 + ((vb24 & vd24) | (vc24 & ~vd24)) + xk23 + ti24) >> 27U); abcd[0U] = v23; uint32_t va24 = abcd[3U]; uint32_t vb25 = abcd[0U]; uint32_t vc25 = abcd[1U]; uint32_t vd25 = abcd[2U]; - uint8_t *b25 = x + (uint32_t)56U; + uint8_t *b25 = x + 56U; uint32_t u24 = load32_le(b25); uint32_t xk24 = u24; uint32_t ti25 = _t[25U]; @@ -474,14 +466,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb25 + ((va24 + ((vb25 & vd25) | (vc25 & ~vd25)) + xk24 + ti25) - << (uint32_t)9U - | (va24 + ((vb25 & vd25) | (vc25 & ~vd25)) + xk24 + ti25) >> (uint32_t)23U); + << 9U + | (va24 + ((vb25 & vd25) | (vc25 & ~vd25)) + xk24 + ti25) >> 23U); abcd[3U] = v24; uint32_t va25 = abcd[2U]; uint32_t vb26 = abcd[3U]; uint32_t vc26 = abcd[0U]; uint32_t vd26 = abcd[1U]; - uint8_t *b26 = x + (uint32_t)12U; + uint8_t *b26 = x + 12U; uint32_t u25 = load32_le(b26); uint32_t xk25 = u25; uint32_t ti26 = _t[26U]; @@ -490,14 +482,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb26 + ((va25 + ((vb26 & vd26) | (vc26 & ~vd26)) + xk25 + ti26) - << (uint32_t)14U - | (va25 + ((vb26 & vd26) | (vc26 & ~vd26)) + xk25 + ti26) >> (uint32_t)18U); + << 14U + | (va25 + ((vb26 & vd26) | (vc26 & ~vd26)) + xk25 + ti26) >> 18U); abcd[2U] = v25; uint32_t va26 = abcd[1U]; uint32_t vb27 = abcd[2U]; uint32_t vc27 = abcd[3U]; uint32_t vd27 = abcd[0U]; - uint8_t *b27 = x + (uint32_t)32U; + uint8_t *b27 = x + 32U; uint32_t u26 = load32_le(b27); uint32_t xk26 = u26; uint32_t ti27 = _t[27U]; @@ -506,14 +498,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb27 + ((va26 + ((vb27 & vd27) | (vc27 & ~vd27)) + xk26 + ti27) - << (uint32_t)20U - | (va26 + ((vb27 & vd27) | (vc27 & ~vd27)) + xk26 + ti27) >> (uint32_t)12U); + << 20U + | (va26 + ((vb27 & vd27) | (vc27 & ~vd27)) + xk26 + ti27) >> 12U); abcd[1U] = v26; uint32_t va27 = abcd[0U]; uint32_t vb28 = abcd[1U]; uint32_t vc28 = abcd[2U]; uint32_t vd28 = abcd[3U]; - uint8_t *b28 = x + (uint32_t)52U; + uint8_t *b28 = x + 52U; uint32_t u27 = load32_le(b28); uint32_t xk27 = u27; uint32_t ti28 = _t[28U]; @@ -522,14 +514,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb28 + ((va27 + ((vb28 & vd28) | (vc28 & ~vd28)) + xk27 + ti28) - << (uint32_t)5U - | (va27 + ((vb28 & vd28) | (vc28 & ~vd28)) + xk27 + ti28) >> (uint32_t)27U); + << 5U + | (va27 + ((vb28 & vd28) | (vc28 & ~vd28)) + xk27 + ti28) >> 27U); abcd[0U] = v27; uint32_t va28 = abcd[3U]; uint32_t vb29 = abcd[0U]; uint32_t vc29 = abcd[1U]; uint32_t vd29 = abcd[2U]; - uint8_t *b29 = x + (uint32_t)8U; + uint8_t *b29 = x + 8U; uint32_t u28 = load32_le(b29); uint32_t xk28 = u28; uint32_t ti29 = _t[29U]; @@ -538,14 +530,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb29 + ((va28 + ((vb29 & vd29) | (vc29 & ~vd29)) + xk28 + ti29) - << (uint32_t)9U - | (va28 + ((vb29 & vd29) | (vc29 & ~vd29)) + xk28 + ti29) >> (uint32_t)23U); + << 9U + | (va28 + ((vb29 & vd29) | (vc29 & ~vd29)) + xk28 + ti29) >> 23U); abcd[3U] = v28; uint32_t va29 = abcd[2U]; uint32_t vb30 = abcd[3U]; uint32_t vc30 = abcd[0U]; uint32_t vd30 = abcd[1U]; - uint8_t *b30 = x + (uint32_t)28U; + uint8_t *b30 = x + 28U; uint32_t u29 = load32_le(b30); uint32_t xk29 = u29; uint32_t ti30 = _t[30U]; @@ -554,14 +546,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb30 + ((va29 + ((vb30 & vd30) | (vc30 & ~vd30)) + xk29 + ti30) - << (uint32_t)14U - | (va29 + ((vb30 & vd30) | (vc30 & ~vd30)) + xk29 + ti30) >> (uint32_t)18U); + << 14U + | (va29 + ((vb30 & vd30) | (vc30 & ~vd30)) + xk29 + ti30) >> 18U); abcd[2U] = v29; uint32_t va30 = abcd[1U]; uint32_t vb31 = abcd[2U]; uint32_t vc31 = abcd[3U]; uint32_t vd31 = abcd[0U]; - uint8_t *b31 = x + (uint32_t)48U; + uint8_t *b31 = x + 48U; uint32_t u30 = load32_le(b31); uint32_t xk30 = u30; uint32_t ti31 = _t[31U]; @@ -570,14 +562,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb31 + ((va30 + ((vb31 & vd31) | (vc31 & ~vd31)) + xk30 + ti31) - << (uint32_t)20U - | (va30 + ((vb31 & vd31) | (vc31 & ~vd31)) + xk30 + ti31) >> (uint32_t)12U); + << 20U + | (va30 + ((vb31 & vd31) | (vc31 & ~vd31)) + xk30 + ti31) >> 12U); abcd[1U] = v30; uint32_t va31 = abcd[0U]; uint32_t vb32 = abcd[1U]; uint32_t vc32 = abcd[2U]; uint32_t vd32 = abcd[3U]; - uint8_t *b32 = x + (uint32_t)20U; + uint8_t *b32 = x + 20U; uint32_t u31 = load32_le(b32); uint32_t xk31 = u31; uint32_t ti32 = _t[32U]; @@ -586,14 +578,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb32 + ((va31 + (vb32 ^ (vc32 ^ vd32)) + xk31 + ti32) - << (uint32_t)4U - | (va31 + (vb32 ^ (vc32 ^ vd32)) + xk31 + ti32) >> (uint32_t)28U); + << 4U + | (va31 + (vb32 ^ (vc32 ^ vd32)) + xk31 + ti32) >> 28U); abcd[0U] = v31; uint32_t va32 = abcd[3U]; uint32_t vb33 = abcd[0U]; uint32_t vc33 = abcd[1U]; uint32_t vd33 = abcd[2U]; - uint8_t *b33 = x + (uint32_t)32U; + uint8_t *b33 = x + 32U; uint32_t u32 = load32_le(b33); uint32_t xk32 = u32; uint32_t ti33 = _t[33U]; @@ -602,14 +594,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb33 + ((va32 + (vb33 ^ (vc33 ^ vd33)) + xk32 + ti33) - << (uint32_t)11U - | (va32 + (vb33 ^ (vc33 ^ vd33)) + xk32 + ti33) >> (uint32_t)21U); + << 11U + | (va32 + (vb33 ^ (vc33 ^ vd33)) + xk32 + ti33) >> 21U); abcd[3U] = v32; uint32_t va33 = abcd[2U]; uint32_t vb34 = abcd[3U]; uint32_t vc34 = abcd[0U]; uint32_t vd34 = abcd[1U]; - uint8_t *b34 = x + (uint32_t)44U; + uint8_t *b34 = x + 44U; uint32_t u33 = load32_le(b34); uint32_t xk33 = u33; uint32_t ti34 = _t[34U]; @@ -618,14 +610,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb34 + ((va33 + (vb34 ^ (vc34 ^ vd34)) + xk33 + ti34) - << (uint32_t)16U - | (va33 + (vb34 ^ (vc34 ^ vd34)) + xk33 + ti34) >> (uint32_t)16U); + << 16U + | (va33 + (vb34 ^ (vc34 ^ vd34)) + xk33 + ti34) >> 16U); abcd[2U] = v33; uint32_t va34 = abcd[1U]; uint32_t vb35 = abcd[2U]; uint32_t vc35 = abcd[3U]; uint32_t vd35 = abcd[0U]; - uint8_t *b35 = x + (uint32_t)56U; + uint8_t *b35 = x + 56U; uint32_t u34 = load32_le(b35); uint32_t xk34 = u34; uint32_t ti35 = _t[35U]; @@ -634,14 +626,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb35 + ((va34 + (vb35 ^ (vc35 ^ vd35)) + xk34 + ti35) - << (uint32_t)23U - | (va34 + (vb35 ^ (vc35 ^ vd35)) + xk34 + ti35) >> (uint32_t)9U); + << 23U + | (va34 + (vb35 ^ (vc35 ^ vd35)) + xk34 + ti35) >> 9U); abcd[1U] = v34; uint32_t va35 = abcd[0U]; uint32_t vb36 = abcd[1U]; uint32_t vc36 = abcd[2U]; uint32_t vd36 = abcd[3U]; - uint8_t *b36 = x + (uint32_t)4U; + uint8_t *b36 = x + 4U; uint32_t u35 = load32_le(b36); uint32_t xk35 = u35; uint32_t ti36 = _t[36U]; @@ -650,14 +642,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb36 + ((va35 + (vb36 ^ (vc36 ^ vd36)) + xk35 + ti36) - << (uint32_t)4U - | (va35 + (vb36 ^ (vc36 ^ vd36)) + xk35 + ti36) >> (uint32_t)28U); + << 4U + | (va35 + (vb36 ^ (vc36 ^ vd36)) + xk35 + ti36) >> 28U); abcd[0U] = v35; uint32_t va36 = abcd[3U]; uint32_t vb37 = abcd[0U]; uint32_t vc37 = abcd[1U]; uint32_t vd37 = abcd[2U]; - uint8_t *b37 = x + (uint32_t)16U; + uint8_t *b37 = x + 16U; uint32_t u36 = load32_le(b37); uint32_t xk36 = u36; uint32_t ti37 = _t[37U]; @@ -666,14 +658,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb37 + ((va36 + (vb37 ^ (vc37 ^ vd37)) + xk36 + ti37) - << (uint32_t)11U - | (va36 + (vb37 ^ (vc37 ^ vd37)) + xk36 + ti37) >> (uint32_t)21U); + << 11U + | (va36 + (vb37 ^ (vc37 ^ vd37)) + xk36 + ti37) >> 21U); abcd[3U] = v36; uint32_t va37 = abcd[2U]; uint32_t vb38 = abcd[3U]; uint32_t vc38 = abcd[0U]; uint32_t vd38 = abcd[1U]; - uint8_t *b38 = x + (uint32_t)28U; + uint8_t *b38 = x + 28U; uint32_t u37 = load32_le(b38); uint32_t xk37 = u37; uint32_t ti38 = _t[38U]; @@ -682,14 +674,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb38 + ((va37 + (vb38 ^ (vc38 ^ vd38)) + xk37 + ti38) - << (uint32_t)16U - | (va37 + (vb38 ^ (vc38 ^ vd38)) + xk37 + ti38) >> (uint32_t)16U); + << 16U + | (va37 + (vb38 ^ (vc38 ^ vd38)) + xk37 + ti38) >> 16U); abcd[2U] = v37; uint32_t va38 = abcd[1U]; uint32_t vb39 = abcd[2U]; uint32_t vc39 = abcd[3U]; uint32_t vd39 = abcd[0U]; - uint8_t *b39 = x + (uint32_t)40U; + uint8_t *b39 = x + 40U; uint32_t u38 = load32_le(b39); uint32_t xk38 = u38; uint32_t ti39 = _t[39U]; @@ -698,14 +690,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb39 + ((va38 + (vb39 ^ (vc39 ^ vd39)) + xk38 + ti39) - << (uint32_t)23U - | (va38 + (vb39 ^ (vc39 ^ vd39)) + xk38 + ti39) >> (uint32_t)9U); + << 23U + | (va38 + (vb39 ^ (vc39 ^ vd39)) + xk38 + ti39) >> 9U); abcd[1U] = v38; uint32_t va39 = abcd[0U]; uint32_t vb40 = abcd[1U]; uint32_t vc40 = abcd[2U]; uint32_t vd40 = abcd[3U]; - uint8_t *b40 = x + (uint32_t)52U; + uint8_t *b40 = x + 52U; uint32_t u39 = load32_le(b40); uint32_t xk39 = u39; uint32_t ti40 = _t[40U]; @@ -714,8 +706,8 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb40 + ((va39 + (vb40 ^ (vc40 ^ vd40)) + xk39 + ti40) - << (uint32_t)4U - | (va39 + (vb40 ^ (vc40 ^ vd40)) + xk39 + ti40) >> (uint32_t)28U); + << 4U + | (va39 + (vb40 ^ (vc40 ^ vd40)) + xk39 + ti40) >> 28U); abcd[0U] = v39; uint32_t va40 = abcd[3U]; uint32_t vb41 = abcd[0U]; @@ -730,14 +722,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb41 + ((va40 + (vb41 ^ (vc41 ^ vd41)) + xk40 + ti41) - << (uint32_t)11U - | (va40 + (vb41 ^ (vc41 ^ vd41)) + xk40 + ti41) >> (uint32_t)21U); + << 11U + | (va40 + (vb41 ^ (vc41 ^ vd41)) + xk40 + ti41) >> 21U); abcd[3U] = v40; uint32_t va41 = abcd[2U]; uint32_t vb42 = abcd[3U]; uint32_t vc42 = abcd[0U]; uint32_t vd42 = abcd[1U]; - uint8_t *b42 = x + (uint32_t)12U; + uint8_t *b42 = x + 12U; uint32_t u41 = load32_le(b42); uint32_t xk41 = u41; uint32_t ti42 = _t[42U]; @@ -746,14 +738,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb42 + ((va41 + (vb42 ^ (vc42 ^ vd42)) + xk41 + ti42) - << (uint32_t)16U - | (va41 + (vb42 ^ (vc42 ^ vd42)) + xk41 + ti42) >> (uint32_t)16U); + << 16U + | (va41 + (vb42 ^ (vc42 ^ vd42)) + xk41 + ti42) >> 16U); abcd[2U] = v41; uint32_t va42 = abcd[1U]; uint32_t vb43 = abcd[2U]; uint32_t vc43 = abcd[3U]; uint32_t vd43 = abcd[0U]; - uint8_t *b43 = x + (uint32_t)24U; + uint8_t *b43 = x + 24U; uint32_t u42 = load32_le(b43); uint32_t xk42 = u42; uint32_t ti43 = _t[43U]; @@ -762,14 +754,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb43 + ((va42 + (vb43 ^ (vc43 ^ vd43)) + xk42 + ti43) - << (uint32_t)23U - | (va42 + (vb43 ^ (vc43 ^ vd43)) + xk42 + ti43) >> (uint32_t)9U); + << 23U + | (va42 + (vb43 ^ (vc43 ^ vd43)) + xk42 + ti43) >> 9U); abcd[1U] = v42; uint32_t va43 = abcd[0U]; uint32_t vb44 = abcd[1U]; uint32_t vc44 = abcd[2U]; uint32_t vd44 = abcd[3U]; - uint8_t *b44 = x + (uint32_t)36U; + uint8_t *b44 = x + 36U; uint32_t u43 = load32_le(b44); uint32_t xk43 = u43; uint32_t ti44 = _t[44U]; @@ -778,14 +770,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb44 + ((va43 + (vb44 ^ (vc44 ^ vd44)) + xk43 + ti44) - << (uint32_t)4U - | (va43 + (vb44 ^ (vc44 ^ vd44)) + xk43 + ti44) >> (uint32_t)28U); + << 4U + | (va43 + (vb44 ^ (vc44 ^ vd44)) + xk43 + ti44) >> 28U); abcd[0U] = v43; uint32_t va44 = abcd[3U]; uint32_t vb45 = abcd[0U]; uint32_t vc45 = abcd[1U]; uint32_t vd45 = abcd[2U]; - uint8_t *b45 = x + (uint32_t)48U; + uint8_t *b45 = x + 48U; uint32_t u44 = load32_le(b45); uint32_t xk44 = u44; uint32_t ti45 = _t[45U]; @@ -794,14 +786,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb45 + ((va44 + (vb45 ^ (vc45 ^ vd45)) + xk44 + ti45) - << (uint32_t)11U - | (va44 + (vb45 ^ (vc45 ^ vd45)) + xk44 + ti45) >> (uint32_t)21U); + << 11U + | (va44 + (vb45 ^ (vc45 ^ vd45)) + xk44 + ti45) >> 21U); abcd[3U] = v44; uint32_t va45 = abcd[2U]; uint32_t vb46 = abcd[3U]; uint32_t vc46 = abcd[0U]; uint32_t vd46 = abcd[1U]; - uint8_t *b46 = x + (uint32_t)60U; + uint8_t *b46 = x + 60U; uint32_t u45 = load32_le(b46); uint32_t xk45 = u45; uint32_t ti46 = _t[46U]; @@ -810,14 +802,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb46 + ((va45 + (vb46 ^ (vc46 ^ vd46)) + xk45 + ti46) - << (uint32_t)16U - | (va45 + (vb46 ^ (vc46 ^ vd46)) + xk45 + ti46) >> (uint32_t)16U); + << 16U + | (va45 + (vb46 ^ (vc46 ^ vd46)) + xk45 + ti46) >> 16U); abcd[2U] = v45; uint32_t va46 = abcd[1U]; uint32_t vb47 = abcd[2U]; uint32_t vc47 = abcd[3U]; uint32_t vd47 = abcd[0U]; - uint8_t *b47 = x + (uint32_t)8U; + uint8_t *b47 = x + 8U; uint32_t u46 = load32_le(b47); uint32_t xk46 = u46; uint32_t ti47 = _t[47U]; @@ -826,8 +818,8 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb47 + ((va46 + (vb47 ^ (vc47 ^ vd47)) + xk46 + ti47) - << (uint32_t)23U - | (va46 + (vb47 ^ (vc47 ^ vd47)) + xk46 + ti47) >> (uint32_t)9U); + << 23U + | (va46 + (vb47 ^ (vc47 ^ vd47)) + xk46 + ti47) >> 9U); abcd[1U] = v46; uint32_t va47 = abcd[0U]; uint32_t vb48 = abcd[1U]; @@ -842,14 +834,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb48 + ((va47 + (vc48 ^ (vb48 | ~vd48)) + xk47 + ti48) - << (uint32_t)6U - | (va47 + (vc48 ^ (vb48 | ~vd48)) + xk47 + ti48) >> (uint32_t)26U); + << 6U + | (va47 + (vc48 ^ (vb48 | ~vd48)) + xk47 + ti48) >> 26U); abcd[0U] = v47; uint32_t va48 = abcd[3U]; uint32_t vb49 = abcd[0U]; uint32_t vc49 = abcd[1U]; uint32_t vd49 = abcd[2U]; - uint8_t *b49 = x + (uint32_t)28U; + uint8_t *b49 = x + 28U; uint32_t u48 = load32_le(b49); uint32_t xk48 = u48; uint32_t ti49 = _t[49U]; @@ -858,14 +850,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb49 + ((va48 + (vc49 ^ (vb49 | ~vd49)) + xk48 + ti49) - << (uint32_t)10U - | (va48 + (vc49 ^ (vb49 | ~vd49)) + xk48 + ti49) >> (uint32_t)22U); + << 10U + | (va48 + (vc49 ^ (vb49 | ~vd49)) + xk48 + ti49) >> 22U); abcd[3U] = v48; uint32_t va49 = abcd[2U]; uint32_t vb50 = abcd[3U]; uint32_t vc50 = abcd[0U]; uint32_t vd50 = abcd[1U]; - uint8_t *b50 = x + (uint32_t)56U; + uint8_t *b50 = x + 56U; uint32_t u49 = load32_le(b50); uint32_t xk49 = u49; uint32_t ti50 = _t[50U]; @@ -874,14 +866,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb50 + ((va49 + (vc50 ^ (vb50 | ~vd50)) + xk49 + ti50) - << (uint32_t)15U - | (va49 + (vc50 ^ (vb50 | ~vd50)) + xk49 + ti50) >> (uint32_t)17U); + << 15U + | (va49 + (vc50 ^ (vb50 | ~vd50)) + xk49 + ti50) >> 17U); abcd[2U] = v49; uint32_t va50 = abcd[1U]; uint32_t vb51 = abcd[2U]; uint32_t vc51 = abcd[3U]; uint32_t vd51 = abcd[0U]; - uint8_t *b51 = x + (uint32_t)20U; + uint8_t *b51 = x + 20U; uint32_t u50 = load32_le(b51); uint32_t xk50 = u50; uint32_t ti51 = _t[51U]; @@ -890,14 +882,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb51 + ((va50 + (vc51 ^ (vb51 | ~vd51)) + xk50 + ti51) - << (uint32_t)21U - | (va50 + (vc51 ^ (vb51 | ~vd51)) + xk50 + ti51) >> (uint32_t)11U); + << 21U + | (va50 + (vc51 ^ (vb51 | ~vd51)) + xk50 + ti51) >> 11U); abcd[1U] = v50; uint32_t va51 = abcd[0U]; uint32_t vb52 = abcd[1U]; uint32_t vc52 = abcd[2U]; uint32_t vd52 = abcd[3U]; - uint8_t *b52 = x + (uint32_t)48U; + uint8_t *b52 = x + 48U; uint32_t u51 = load32_le(b52); uint32_t xk51 = u51; uint32_t ti52 = _t[52U]; @@ -906,14 +898,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb52 + ((va51 + (vc52 ^ (vb52 | ~vd52)) + xk51 + ti52) - << (uint32_t)6U - | (va51 + (vc52 ^ (vb52 | ~vd52)) + xk51 + ti52) >> (uint32_t)26U); + << 6U + | (va51 + (vc52 ^ (vb52 | ~vd52)) + xk51 + ti52) >> 26U); abcd[0U] = v51; uint32_t va52 = abcd[3U]; uint32_t vb53 = abcd[0U]; uint32_t vc53 = abcd[1U]; uint32_t vd53 = abcd[2U]; - uint8_t *b53 = x + (uint32_t)12U; + uint8_t *b53 = x + 12U; uint32_t u52 = load32_le(b53); uint32_t xk52 = u52; uint32_t ti53 = _t[53U]; @@ -922,14 +914,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb53 + ((va52 + (vc53 ^ (vb53 | ~vd53)) + xk52 + ti53) - << (uint32_t)10U - | (va52 + (vc53 ^ (vb53 | ~vd53)) + xk52 + ti53) >> (uint32_t)22U); + << 10U + | (va52 + (vc53 ^ (vb53 | ~vd53)) + xk52 + ti53) >> 22U); abcd[3U] = v52; uint32_t va53 = abcd[2U]; uint32_t vb54 = abcd[3U]; uint32_t vc54 = abcd[0U]; uint32_t vd54 = abcd[1U]; - uint8_t *b54 = x + (uint32_t)40U; + uint8_t *b54 = x + 40U; uint32_t u53 = load32_le(b54); uint32_t xk53 = u53; uint32_t ti54 = _t[54U]; @@ -938,14 +930,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb54 + ((va53 + (vc54 ^ (vb54 | ~vd54)) + xk53 + ti54) - << (uint32_t)15U - | (va53 + (vc54 ^ (vb54 | ~vd54)) + xk53 + ti54) >> (uint32_t)17U); + << 15U + | (va53 + (vc54 ^ (vb54 | ~vd54)) + xk53 + ti54) >> 17U); abcd[2U] = v53; uint32_t va54 = abcd[1U]; uint32_t vb55 = abcd[2U]; uint32_t vc55 = abcd[3U]; uint32_t vd55 = abcd[0U]; - uint8_t *b55 = x + (uint32_t)4U; + uint8_t *b55 = x + 4U; uint32_t u54 = load32_le(b55); uint32_t xk54 = u54; uint32_t ti55 = _t[55U]; @@ -954,14 +946,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb55 + ((va54 + (vc55 ^ (vb55 | ~vd55)) + xk54 + ti55) - << (uint32_t)21U - | (va54 + (vc55 ^ (vb55 | ~vd55)) + xk54 + ti55) >> (uint32_t)11U); + << 21U + | (va54 + (vc55 ^ (vb55 | ~vd55)) + xk54 + ti55) >> 11U); abcd[1U] = v54; uint32_t va55 = abcd[0U]; uint32_t vb56 = abcd[1U]; uint32_t vc56 = abcd[2U]; uint32_t vd56 = abcd[3U]; - uint8_t *b56 = x + (uint32_t)32U; + uint8_t *b56 = x + 32U; uint32_t u55 = load32_le(b56); uint32_t xk55 = u55; uint32_t ti56 = _t[56U]; @@ -970,14 +962,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb56 + ((va55 + (vc56 ^ (vb56 | ~vd56)) + xk55 + ti56) - << (uint32_t)6U - | (va55 + (vc56 ^ (vb56 | ~vd56)) + xk55 + ti56) >> (uint32_t)26U); + << 6U + | (va55 + (vc56 ^ (vb56 | ~vd56)) + xk55 + ti56) >> 26U); abcd[0U] = v55; uint32_t va56 = abcd[3U]; uint32_t vb57 = abcd[0U]; uint32_t vc57 = abcd[1U]; uint32_t vd57 = abcd[2U]; - uint8_t *b57 = x + (uint32_t)60U; + uint8_t *b57 = x + 60U; uint32_t u56 = load32_le(b57); uint32_t xk56 = u56; uint32_t ti57 = _t[57U]; @@ -986,14 +978,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb57 + ((va56 + (vc57 ^ (vb57 | ~vd57)) + xk56 + ti57) - << (uint32_t)10U - | (va56 + (vc57 ^ (vb57 | ~vd57)) + xk56 + ti57) >> (uint32_t)22U); + << 10U + | (va56 + (vc57 ^ (vb57 | ~vd57)) + xk56 + ti57) >> 22U); abcd[3U] = v56; uint32_t va57 = abcd[2U]; uint32_t vb58 = abcd[3U]; uint32_t vc58 = abcd[0U]; uint32_t vd58 = abcd[1U]; - uint8_t *b58 = x + (uint32_t)24U; + uint8_t *b58 = x + 24U; uint32_t u57 = load32_le(b58); uint32_t xk57 = u57; uint32_t ti58 = _t[58U]; @@ -1002,14 +994,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb58 + ((va57 + (vc58 ^ (vb58 | ~vd58)) + xk57 + ti58) - << (uint32_t)15U - | (va57 + (vc58 ^ (vb58 | ~vd58)) + xk57 + ti58) >> (uint32_t)17U); + << 15U + | (va57 + (vc58 ^ (vb58 | ~vd58)) + xk57 + ti58) >> 17U); abcd[2U] = v57; uint32_t va58 = abcd[1U]; uint32_t vb59 = abcd[2U]; uint32_t vc59 = abcd[3U]; uint32_t vd59 = abcd[0U]; - uint8_t *b59 = x + (uint32_t)52U; + uint8_t *b59 = x + 52U; uint32_t u58 = load32_le(b59); uint32_t xk58 = u58; uint32_t ti59 = _t[59U]; @@ -1018,14 +1010,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb59 + ((va58 + (vc59 ^ (vb59 | ~vd59)) + xk58 + ti59) - << (uint32_t)21U - | (va58 + (vc59 ^ (vb59 | ~vd59)) + xk58 + ti59) >> (uint32_t)11U); + << 21U + | (va58 + (vc59 ^ (vb59 | ~vd59)) + xk58 + ti59) >> 11U); abcd[1U] = v58; uint32_t va59 = abcd[0U]; uint32_t vb60 = abcd[1U]; uint32_t vc60 = abcd[2U]; uint32_t vd60 = abcd[3U]; - uint8_t *b60 = x + (uint32_t)16U; + uint8_t *b60 = x + 16U; uint32_t u59 = load32_le(b60); uint32_t xk59 = u59; uint32_t ti60 = _t[60U]; @@ -1034,14 +1026,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb60 + ((va59 + (vc60 ^ (vb60 | ~vd60)) + xk59 + ti60) - << (uint32_t)6U - | (va59 + (vc60 ^ (vb60 | ~vd60)) + xk59 + ti60) >> (uint32_t)26U); + << 6U + | (va59 + (vc60 ^ (vb60 | ~vd60)) + xk59 + ti60) >> 26U); abcd[0U] = v59; uint32_t va60 = abcd[3U]; uint32_t vb61 = abcd[0U]; uint32_t vc61 = abcd[1U]; uint32_t vd61 = abcd[2U]; - uint8_t *b61 = x + (uint32_t)44U; + uint8_t *b61 = x + 44U; uint32_t u60 = load32_le(b61); uint32_t xk60 = u60; uint32_t ti61 = _t[61U]; @@ -1050,14 +1042,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb61 + ((va60 + (vc61 ^ (vb61 | ~vd61)) + xk60 + ti61) - << (uint32_t)10U - | (va60 + (vc61 ^ (vb61 | ~vd61)) + xk60 + ti61) >> (uint32_t)22U); + << 10U + | (va60 + (vc61 ^ (vb61 | ~vd61)) + xk60 + ti61) >> 22U); abcd[3U] = v60; uint32_t va61 = abcd[2U]; uint32_t vb62 = abcd[3U]; uint32_t vc62 = abcd[0U]; uint32_t vd62 = abcd[1U]; - uint8_t *b62 = x + (uint32_t)8U; + uint8_t *b62 = x + 8U; uint32_t u61 = load32_le(b62); uint32_t xk61 = u61; uint32_t ti62 = _t[62U]; @@ -1066,14 +1058,14 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb62 + ((va61 + (vc62 ^ (vb62 | ~vd62)) + xk61 + ti62) - << (uint32_t)15U - | (va61 + (vc62 ^ (vb62 | ~vd62)) + xk61 + ti62) >> (uint32_t)17U); + << 15U + | (va61 + (vc62 ^ (vb62 | ~vd62)) + xk61 + ti62) >> 17U); abcd[2U] = v61; uint32_t va62 = abcd[1U]; uint32_t vb = abcd[2U]; uint32_t vc = abcd[3U]; uint32_t vd = abcd[0U]; - uint8_t *b63 = x + (uint32_t)36U; + uint8_t *b63 = x + 36U; uint32_t u62 = load32_le(b63); uint32_t xk62 = u62; uint32_t ti = _t[63U]; @@ -1082,8 +1074,8 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) vb + ((va62 + (vc ^ (vb | ~vd)) + xk62 + ti) - << (uint32_t)21U - | (va62 + (vc ^ (vb | ~vd)) + xk62 + ti) >> (uint32_t)11U); + << 21U + | (va62 + (vc ^ (vb | ~vd)) + xk62 + ti) >> 11U); abcd[1U] = v62; uint32_t a = abcd[0U]; uint32_t b = abcd[1U]; @@ -1095,98 +1087,69 @@ static void legacy_update(uint32_t *abcd, uint8_t *x) abcd[3U] = d + dd; } -static void legacy_pad(uint64_t len, uint8_t *dst) +static void pad(uint64_t len, uint8_t *dst) { uint8_t *dst1 = dst; - dst1[0U] = (uint8_t)0x80U; - uint8_t *dst2 = dst + (uint32_t)1U; - for - (uint32_t - i = (uint32_t)0U; - i - < ((uint32_t)128U - ((uint32_t)9U + (uint32_t)(len % (uint64_t)(uint32_t)64U))) % (uint32_t)64U; - i++) + dst1[0U] = 0x80U; + uint8_t *dst2 = dst + 1U; + for (uint32_t i = 0U; i < (128U - (9U + (uint32_t)(len % (uint64_t)64U))) % 64U; i++) { - dst2[i] = (uint8_t)0U; + dst2[i] = 0U; } - uint8_t - *dst3 = - dst - + - (uint32_t)1U - + - ((uint32_t)128U - ((uint32_t)9U + (uint32_t)(len % (uint64_t)(uint32_t)64U))) - % (uint32_t)64U; - store64_le(dst3, len << (uint32_t)3U); + uint8_t *dst3 = dst + 1U + (128U - (9U + (uint32_t)(len % (uint64_t)64U))) % 64U; + store64_le(dst3, len << 3U); } -void Hacl_Hash_Core_MD5_legacy_finish(uint32_t *s, uint8_t *dst) +void Hacl_Hash_MD5_finish(uint32_t *s, uint8_t *dst) { - KRML_MAYBE_FOR4(i, - (uint32_t)0U, - (uint32_t)4U, - (uint32_t)1U, - store32_le(dst + i * (uint32_t)4U, s[i]);); + KRML_MAYBE_FOR4(i, 0U, 4U, 1U, store32_le(dst + i * 4U, s[i]);); } -void Hacl_Hash_MD5_legacy_update_multi(uint32_t *s, uint8_t *blocks, uint32_t n_blocks) +void Hacl_Hash_MD5_update_multi(uint32_t *s, uint8_t *blocks, uint32_t n_blocks) { - for (uint32_t i = (uint32_t)0U; i < n_blocks; i++) + for (uint32_t i = 0U; i < n_blocks; i++) { - uint32_t sz = (uint32_t)64U; + uint32_t sz = 64U; uint8_t *block = blocks + sz * i; - legacy_update(s, block); + update(s, block); } } void -Hacl_Hash_MD5_legacy_update_last( - uint32_t *s, - uint64_t prev_len, - uint8_t *input, - uint32_t input_len -) +Hacl_Hash_MD5_update_last(uint32_t *s, uint64_t prev_len, uint8_t *input, uint32_t input_len) { - uint32_t blocks_n = input_len / (uint32_t)64U; - uint32_t blocks_len = blocks_n * (uint32_t)64U; + uint32_t blocks_n = input_len / 64U; + uint32_t blocks_len = blocks_n * 64U; uint8_t *blocks = input; uint32_t rest_len = input_len - blocks_len; uint8_t *rest = input + blocks_len; - Hacl_Hash_MD5_legacy_update_multi(s, blocks, blocks_n); + Hacl_Hash_MD5_update_multi(s, blocks, blocks_n); uint64_t total_input_len = prev_len + (uint64_t)input_len; - uint32_t - pad_len = - (uint32_t)1U - + - ((uint32_t)128U - ((uint32_t)9U + (uint32_t)(total_input_len % (uint64_t)(uint32_t)64U))) - % (uint32_t)64U - + (uint32_t)8U; + uint32_t pad_len = 1U + (128U - (9U + (uint32_t)(total_input_len % (uint64_t)64U))) % 64U + 8U; uint32_t tmp_len = rest_len + pad_len; uint8_t tmp_twoblocks[128U] = { 0U }; uint8_t *tmp = tmp_twoblocks; uint8_t *tmp_rest = tmp; uint8_t *tmp_pad = tmp + rest_len; memcpy(tmp_rest, rest, rest_len * sizeof (uint8_t)); - legacy_pad(total_input_len, tmp_pad); - Hacl_Hash_MD5_legacy_update_multi(s, tmp, tmp_len / (uint32_t)64U); + pad(total_input_len, tmp_pad); + Hacl_Hash_MD5_update_multi(s, tmp, tmp_len / 64U); } -void Hacl_Hash_MD5_legacy_hash(uint8_t *input, uint32_t input_len, uint8_t *dst) +void Hacl_Hash_MD5_hash_oneshot(uint8_t *output, uint8_t *input, uint32_t input_len) { - uint32_t - s[4U] = - { (uint32_t)0x67452301U, (uint32_t)0xefcdab89U, (uint32_t)0x98badcfeU, (uint32_t)0x10325476U }; - uint32_t blocks_n0 = input_len / (uint32_t)64U; + uint32_t s[4U] = { 0x67452301U, 0xefcdab89U, 0x98badcfeU, 0x10325476U }; + uint32_t blocks_n0 = input_len / 64U; uint32_t blocks_n1; - if (input_len % (uint32_t)64U == (uint32_t)0U && blocks_n0 > (uint32_t)0U) + if (input_len % 64U == 0U && blocks_n0 > 0U) { - blocks_n1 = blocks_n0 - (uint32_t)1U; + blocks_n1 = blocks_n0 - 1U; } else { blocks_n1 = blocks_n0; } - uint32_t blocks_len0 = blocks_n1 * (uint32_t)64U; + uint32_t blocks_len0 = blocks_n1 * 64U; uint8_t *blocks0 = input; uint32_t rest_len0 = input_len - blocks_len0; uint8_t *rest0 = input + blocks_len0; @@ -1195,75 +1158,75 @@ void Hacl_Hash_MD5_legacy_hash(uint8_t *input, uint32_t input_len, uint8_t *dst) uint8_t *blocks = blocks0; uint32_t rest_len = rest_len0; uint8_t *rest = rest0; - Hacl_Hash_MD5_legacy_update_multi(s, blocks, blocks_n); - Hacl_Hash_MD5_legacy_update_last(s, (uint64_t)blocks_len, rest, rest_len); - Hacl_Hash_Core_MD5_legacy_finish(s, dst); + Hacl_Hash_MD5_update_multi(s, blocks, blocks_n); + Hacl_Hash_MD5_update_last(s, (uint64_t)blocks_len, rest, rest_len); + Hacl_Hash_MD5_finish(s, output); } -Hacl_Streaming_MD_state_32 *Hacl_Streaming_MD5_legacy_create_in(void) +Hacl_Streaming_MD_state_32 *Hacl_Hash_MD5_malloc(void) { - uint8_t *buf = (uint8_t *)KRML_HOST_CALLOC((uint32_t)64U, sizeof (uint8_t)); - uint32_t *block_state = (uint32_t *)KRML_HOST_CALLOC((uint32_t)4U, sizeof (uint32_t)); + uint8_t *buf = (uint8_t *)KRML_HOST_CALLOC(64U, sizeof (uint8_t)); + uint32_t *block_state = (uint32_t *)KRML_HOST_CALLOC(4U, sizeof (uint32_t)); Hacl_Streaming_MD_state_32 - s = { .block_state = block_state, .buf = buf, .total_len = (uint64_t)(uint32_t)0U }; + s = { .block_state = block_state, .buf = buf, .total_len = (uint64_t)0U }; Hacl_Streaming_MD_state_32 *p = (Hacl_Streaming_MD_state_32 *)KRML_HOST_MALLOC(sizeof (Hacl_Streaming_MD_state_32)); p[0U] = s; - Hacl_Hash_Core_MD5_legacy_init(block_state); + Hacl_Hash_MD5_init(block_state); return p; } -void Hacl_Streaming_MD5_legacy_init(Hacl_Streaming_MD_state_32 *s) +void Hacl_Hash_MD5_reset(Hacl_Streaming_MD_state_32 *state) { - Hacl_Streaming_MD_state_32 scrut = *s; + Hacl_Streaming_MD_state_32 scrut = *state; uint8_t *buf = scrut.buf; uint32_t *block_state = scrut.block_state; - Hacl_Hash_Core_MD5_legacy_init(block_state); + Hacl_Hash_MD5_init(block_state); Hacl_Streaming_MD_state_32 - tmp = { .block_state = block_state, .buf = buf, .total_len = (uint64_t)(uint32_t)0U }; - s[0U] = tmp; + tmp = { .block_state = block_state, .buf = buf, .total_len = (uint64_t)0U }; + state[0U] = tmp; } /** 0 = success, 1 = max length exceeded */ Hacl_Streaming_Types_error_code -Hacl_Streaming_MD5_legacy_update(Hacl_Streaming_MD_state_32 *p, uint8_t *data, uint32_t len) +Hacl_Hash_MD5_update(Hacl_Streaming_MD_state_32 *state, uint8_t *chunk, uint32_t chunk_len) { - Hacl_Streaming_MD_state_32 s = *p; + Hacl_Streaming_MD_state_32 s = *state; uint64_t total_len = s.total_len; - if ((uint64_t)len > (uint64_t)2305843009213693951U - total_len) + if ((uint64_t)chunk_len > 2305843009213693951ULL - total_len) { return Hacl_Streaming_Types_MaximumLengthExceeded; } uint32_t sz; - if (total_len % (uint64_t)(uint32_t)64U == (uint64_t)0U && total_len > (uint64_t)0U) + if (total_len % (uint64_t)64U == 0ULL && total_len > 0ULL) { - sz = (uint32_t)64U; + sz = 64U; } else { - sz = (uint32_t)(total_len % (uint64_t)(uint32_t)64U); + sz = (uint32_t)(total_len % (uint64_t)64U); } - if (len <= (uint32_t)64U - sz) + if (chunk_len <= 64U - sz) { - Hacl_Streaming_MD_state_32 s1 = *p; + Hacl_Streaming_MD_state_32 s1 = *state; uint32_t *block_state1 = s1.block_state; uint8_t *buf = s1.buf; uint64_t total_len1 = s1.total_len; uint32_t sz1; - if (total_len1 % (uint64_t)(uint32_t)64U == (uint64_t)0U && total_len1 > (uint64_t)0U) + if (total_len1 % (uint64_t)64U == 0ULL && total_len1 > 0ULL) { - sz1 = (uint32_t)64U; + sz1 = 64U; } else { - sz1 = (uint32_t)(total_len1 % (uint64_t)(uint32_t)64U); + sz1 = (uint32_t)(total_len1 % (uint64_t)64U); } uint8_t *buf2 = buf + sz1; - memcpy(buf2, data, len * sizeof (uint8_t)); - uint64_t total_len2 = total_len1 + (uint64_t)len; - *p + memcpy(buf2, chunk, chunk_len * sizeof (uint8_t)); + uint64_t total_len2 = total_len1 + (uint64_t)chunk_len; + *state = ( (Hacl_Streaming_MD_state_32){ @@ -1273,74 +1236,74 @@ Hacl_Streaming_MD5_legacy_update(Hacl_Streaming_MD_state_32 *p, uint8_t *data, u } ); } - else if (sz == (uint32_t)0U) + else if (sz == 0U) { - Hacl_Streaming_MD_state_32 s1 = *p; + Hacl_Streaming_MD_state_32 s1 = *state; uint32_t *block_state1 = s1.block_state; uint8_t *buf = s1.buf; uint64_t total_len1 = s1.total_len; uint32_t sz1; - if (total_len1 % (uint64_t)(uint32_t)64U == (uint64_t)0U && total_len1 > (uint64_t)0U) + if (total_len1 % (uint64_t)64U == 0ULL && total_len1 > 0ULL) { - sz1 = (uint32_t)64U; + sz1 = 64U; } else { - sz1 = (uint32_t)(total_len1 % (uint64_t)(uint32_t)64U); + sz1 = (uint32_t)(total_len1 % (uint64_t)64U); } - if (!(sz1 == (uint32_t)0U)) + if (!(sz1 == 0U)) { - Hacl_Hash_MD5_legacy_update_multi(block_state1, buf, (uint32_t)1U); + Hacl_Hash_MD5_update_multi(block_state1, buf, 1U); } uint32_t ite; - if ((uint64_t)len % (uint64_t)(uint32_t)64U == (uint64_t)0U && (uint64_t)len > (uint64_t)0U) + if ((uint64_t)chunk_len % (uint64_t)64U == 0ULL && (uint64_t)chunk_len > 0ULL) { - ite = (uint32_t)64U; + ite = 64U; } else { - ite = (uint32_t)((uint64_t)len % (uint64_t)(uint32_t)64U); + ite = (uint32_t)((uint64_t)chunk_len % (uint64_t)64U); } - uint32_t n_blocks = (len - ite) / (uint32_t)64U; - uint32_t data1_len = n_blocks * (uint32_t)64U; - uint32_t data2_len = len - data1_len; - uint8_t *data1 = data; - uint8_t *data2 = data + data1_len; - Hacl_Hash_MD5_legacy_update_multi(block_state1, data1, data1_len / (uint32_t)64U); + uint32_t n_blocks = (chunk_len - ite) / 64U; + uint32_t data1_len = n_blocks * 64U; + uint32_t data2_len = chunk_len - data1_len; + uint8_t *data1 = chunk; + uint8_t *data2 = chunk + data1_len; + Hacl_Hash_MD5_update_multi(block_state1, data1, data1_len / 64U); uint8_t *dst = buf; memcpy(dst, data2, data2_len * sizeof (uint8_t)); - *p + *state = ( (Hacl_Streaming_MD_state_32){ .block_state = block_state1, .buf = buf, - .total_len = total_len1 + (uint64_t)len + .total_len = total_len1 + (uint64_t)chunk_len } ); } else { - uint32_t diff = (uint32_t)64U - sz; - uint8_t *data1 = data; - uint8_t *data2 = data + diff; - Hacl_Streaming_MD_state_32 s1 = *p; + uint32_t diff = 64U - sz; + uint8_t *chunk1 = chunk; + uint8_t *chunk2 = chunk + diff; + Hacl_Streaming_MD_state_32 s1 = *state; uint32_t *block_state10 = s1.block_state; uint8_t *buf0 = s1.buf; uint64_t total_len10 = s1.total_len; uint32_t sz10; - if (total_len10 % (uint64_t)(uint32_t)64U == (uint64_t)0U && total_len10 > (uint64_t)0U) + if (total_len10 % (uint64_t)64U == 0ULL && total_len10 > 0ULL) { - sz10 = (uint32_t)64U; + sz10 = 64U; } else { - sz10 = (uint32_t)(total_len10 % (uint64_t)(uint32_t)64U); + sz10 = (uint32_t)(total_len10 % (uint64_t)64U); } uint8_t *buf2 = buf0 + sz10; - memcpy(buf2, data1, diff * sizeof (uint8_t)); + memcpy(buf2, chunk1, diff * sizeof (uint8_t)); uint64_t total_len2 = total_len10 + (uint64_t)diff; - *p + *state = ( (Hacl_Streaming_MD_state_32){ @@ -1349,114 +1312,109 @@ Hacl_Streaming_MD5_legacy_update(Hacl_Streaming_MD_state_32 *p, uint8_t *data, u .total_len = total_len2 } ); - Hacl_Streaming_MD_state_32 s10 = *p; + Hacl_Streaming_MD_state_32 s10 = *state; uint32_t *block_state1 = s10.block_state; uint8_t *buf = s10.buf; uint64_t total_len1 = s10.total_len; uint32_t sz1; - if (total_len1 % (uint64_t)(uint32_t)64U == (uint64_t)0U && total_len1 > (uint64_t)0U) + if (total_len1 % (uint64_t)64U == 0ULL && total_len1 > 0ULL) { - sz1 = (uint32_t)64U; + sz1 = 64U; } else { - sz1 = (uint32_t)(total_len1 % (uint64_t)(uint32_t)64U); + sz1 = (uint32_t)(total_len1 % (uint64_t)64U); } - if (!(sz1 == (uint32_t)0U)) + if (!(sz1 == 0U)) { - Hacl_Hash_MD5_legacy_update_multi(block_state1, buf, (uint32_t)1U); + Hacl_Hash_MD5_update_multi(block_state1, buf, 1U); } uint32_t ite; if - ( - (uint64_t)(len - diff) - % (uint64_t)(uint32_t)64U - == (uint64_t)0U - && (uint64_t)(len - diff) > (uint64_t)0U - ) + ((uint64_t)(chunk_len - diff) % (uint64_t)64U == 0ULL && (uint64_t)(chunk_len - diff) > 0ULL) { - ite = (uint32_t)64U; + ite = 64U; } else { - ite = (uint32_t)((uint64_t)(len - diff) % (uint64_t)(uint32_t)64U); + ite = (uint32_t)((uint64_t)(chunk_len - diff) % (uint64_t)64U); } - uint32_t n_blocks = (len - diff - ite) / (uint32_t)64U; - uint32_t data1_len = n_blocks * (uint32_t)64U; - uint32_t data2_len = len - diff - data1_len; - uint8_t *data11 = data2; - uint8_t *data21 = data2 + data1_len; - Hacl_Hash_MD5_legacy_update_multi(block_state1, data11, data1_len / (uint32_t)64U); + uint32_t n_blocks = (chunk_len - diff - ite) / 64U; + uint32_t data1_len = n_blocks * 64U; + uint32_t data2_len = chunk_len - diff - data1_len; + uint8_t *data1 = chunk2; + uint8_t *data2 = chunk2 + data1_len; + Hacl_Hash_MD5_update_multi(block_state1, data1, data1_len / 64U); uint8_t *dst = buf; - memcpy(dst, data21, data2_len * sizeof (uint8_t)); - *p + memcpy(dst, data2, data2_len * sizeof (uint8_t)); + *state = ( (Hacl_Streaming_MD_state_32){ .block_state = block_state1, .buf = buf, - .total_len = total_len1 + (uint64_t)(len - diff) + .total_len = total_len1 + (uint64_t)(chunk_len - diff) } ); } return Hacl_Streaming_Types_Success; } -void Hacl_Streaming_MD5_legacy_finish(Hacl_Streaming_MD_state_32 *p, uint8_t *dst) +void Hacl_Hash_MD5_digest(Hacl_Streaming_MD_state_32 *state, uint8_t *output) { - Hacl_Streaming_MD_state_32 scrut = *p; + Hacl_Streaming_MD_state_32 scrut = *state; uint32_t *block_state = scrut.block_state; uint8_t *buf_ = scrut.buf; uint64_t total_len = scrut.total_len; uint32_t r; - if (total_len % (uint64_t)(uint32_t)64U == (uint64_t)0U && total_len > (uint64_t)0U) + if (total_len % (uint64_t)64U == 0ULL && total_len > 0ULL) { - r = (uint32_t)64U; + r = 64U; } else { - r = (uint32_t)(total_len % (uint64_t)(uint32_t)64U); + r = (uint32_t)(total_len % (uint64_t)64U); } uint8_t *buf_1 = buf_; uint32_t tmp_block_state[4U] = { 0U }; - memcpy(tmp_block_state, block_state, (uint32_t)4U * sizeof (uint32_t)); + memcpy(tmp_block_state, block_state, 4U * sizeof (uint32_t)); uint32_t ite; - if (r % (uint32_t)64U == (uint32_t)0U && r > (uint32_t)0U) + if (r % 64U == 0U && r > 0U) { - ite = (uint32_t)64U; + ite = 64U; } else { - ite = r % (uint32_t)64U; + ite = r % 64U; } uint8_t *buf_last = buf_1 + r - ite; uint8_t *buf_multi = buf_1; - Hacl_Hash_MD5_legacy_update_multi(tmp_block_state, buf_multi, (uint32_t)0U); + Hacl_Hash_MD5_update_multi(tmp_block_state, buf_multi, 0U); uint64_t prev_len_last = total_len - (uint64_t)r; - Hacl_Hash_MD5_legacy_update_last(tmp_block_state, prev_len_last, buf_last, r); - Hacl_Hash_Core_MD5_legacy_finish(tmp_block_state, dst); + Hacl_Hash_MD5_update_last(tmp_block_state, prev_len_last, buf_last, r); + Hacl_Hash_MD5_finish(tmp_block_state, output); } -void Hacl_Streaming_MD5_legacy_free(Hacl_Streaming_MD_state_32 *s) +void Hacl_Hash_MD5_free(Hacl_Streaming_MD_state_32 *state) { - Hacl_Streaming_MD_state_32 scrut = *s; + Hacl_Streaming_MD_state_32 scrut = *state; uint8_t *buf = scrut.buf; uint32_t *block_state = scrut.block_state; KRML_HOST_FREE(block_state); KRML_HOST_FREE(buf); - KRML_HOST_FREE(s); + KRML_HOST_FREE(state); } -Hacl_Streaming_MD_state_32 *Hacl_Streaming_MD5_legacy_copy(Hacl_Streaming_MD_state_32 *s0) +Hacl_Streaming_MD_state_32 *Hacl_Hash_MD5_copy(Hacl_Streaming_MD_state_32 *state) { - Hacl_Streaming_MD_state_32 scrut = *s0; + Hacl_Streaming_MD_state_32 scrut = *state; uint32_t *block_state0 = scrut.block_state; uint8_t *buf0 = scrut.buf; uint64_t total_len0 = scrut.total_len; - uint8_t *buf = (uint8_t *)KRML_HOST_CALLOC((uint32_t)64U, sizeof (uint8_t)); - memcpy(buf, buf0, (uint32_t)64U * sizeof (uint8_t)); - uint32_t *block_state = (uint32_t *)KRML_HOST_CALLOC((uint32_t)4U, sizeof (uint32_t)); - memcpy(block_state, block_state0, (uint32_t)4U * sizeof (uint32_t)); + uint8_t *buf = (uint8_t *)KRML_HOST_CALLOC(64U, sizeof (uint8_t)); + memcpy(buf, buf0, 64U * sizeof (uint8_t)); + uint32_t *block_state = (uint32_t *)KRML_HOST_CALLOC(4U, sizeof (uint32_t)); + memcpy(block_state, block_state0, 4U * sizeof (uint32_t)); Hacl_Streaming_MD_state_32 s = { .block_state = block_state, .buf = buf, .total_len = total_len0 }; Hacl_Streaming_MD_state_32 @@ -1465,8 +1423,8 @@ Hacl_Streaming_MD_state_32 *Hacl_Streaming_MD5_legacy_copy(Hacl_Streaming_MD_sta return p; } -void Hacl_Streaming_MD5_legacy_hash(uint8_t *input, uint32_t input_len, uint8_t *dst) +void Hacl_Hash_MD5_hash(uint8_t *output, uint8_t *input, uint32_t input_len) { - Hacl_Hash_MD5_legacy_hash(input, input_len, dst); + Hacl_Hash_MD5_hash_oneshot(output, input, input_len); } diff --git a/Modules/_hacl/Hacl_Hash_MD5.h b/Modules/_hacl/Hacl_Hash_MD5.h index 13c19fd40f4d12..f69d6e5a81d63a 100644 --- a/Modules/_hacl/Hacl_Hash_MD5.h +++ b/Modules/_hacl/Hacl_Hash_MD5.h @@ -31,31 +31,32 @@ extern "C" { #endif #include +#include "python_hacl_namespaces.h" #include "krml/types.h" #include "krml/lowstar_endianness.h" #include "krml/internal/target.h" #include "Hacl_Streaming_Types.h" -typedef Hacl_Streaming_MD_state_32 Hacl_Streaming_MD5_state; +typedef Hacl_Streaming_MD_state_32 Hacl_Hash_MD5_state_t; -Hacl_Streaming_MD_state_32 *Hacl_Streaming_MD5_legacy_create_in(void); +Hacl_Streaming_MD_state_32 *Hacl_Hash_MD5_malloc(void); -void Hacl_Streaming_MD5_legacy_init(Hacl_Streaming_MD_state_32 *s); +void Hacl_Hash_MD5_reset(Hacl_Streaming_MD_state_32 *state); /** 0 = success, 1 = max length exceeded */ Hacl_Streaming_Types_error_code -Hacl_Streaming_MD5_legacy_update(Hacl_Streaming_MD_state_32 *p, uint8_t *data, uint32_t len); +Hacl_Hash_MD5_update(Hacl_Streaming_MD_state_32 *state, uint8_t *chunk, uint32_t chunk_len); -void Hacl_Streaming_MD5_legacy_finish(Hacl_Streaming_MD_state_32 *p, uint8_t *dst); +void Hacl_Hash_MD5_digest(Hacl_Streaming_MD_state_32 *state, uint8_t *output); -void Hacl_Streaming_MD5_legacy_free(Hacl_Streaming_MD_state_32 *s); +void Hacl_Hash_MD5_free(Hacl_Streaming_MD_state_32 *state); -Hacl_Streaming_MD_state_32 *Hacl_Streaming_MD5_legacy_copy(Hacl_Streaming_MD_state_32 *s0); +Hacl_Streaming_MD_state_32 *Hacl_Hash_MD5_copy(Hacl_Streaming_MD_state_32 *state); -void Hacl_Streaming_MD5_legacy_hash(uint8_t *input, uint32_t input_len, uint8_t *dst); +void Hacl_Hash_MD5_hash(uint8_t *output, uint8_t *input, uint32_t input_len); #if defined(__cplusplus) } diff --git a/Modules/_hacl/Hacl_Hash_SHA1.c b/Modules/_hacl/Hacl_Hash_SHA1.c index 5ecb3c0b3a56e0..1a8b09b1711894 100644 --- a/Modules/_hacl/Hacl_Hash_SHA1.c +++ b/Modules/_hacl/Hacl_Hash_SHA1.c @@ -25,19 +25,14 @@ #include "internal/Hacl_Hash_SHA1.h" -static uint32_t -_h0[5U] = - { - (uint32_t)0x67452301U, (uint32_t)0xefcdab89U, (uint32_t)0x98badcfeU, (uint32_t)0x10325476U, - (uint32_t)0xc3d2e1f0U - }; +static uint32_t _h0[5U] = { 0x67452301U, 0xefcdab89U, 0x98badcfeU, 0x10325476U, 0xc3d2e1f0U }; -void Hacl_Hash_Core_SHA1_legacy_init(uint32_t *s) +void Hacl_Hash_SHA1_init(uint32_t *s) { - KRML_MAYBE_FOR5(i, (uint32_t)0U, (uint32_t)5U, (uint32_t)1U, s[i] = _h0[i];); + KRML_MAYBE_FOR5(i, 0U, 5U, 1U, s[i] = _h0[i];); } -static void legacy_update(uint32_t *h, uint8_t *l) +static void update(uint32_t *h, uint8_t *l) { uint32_t ha = h[0U]; uint32_t hb = h[1U]; @@ -45,29 +40,26 @@ static void legacy_update(uint32_t *h, uint8_t *l) uint32_t hd = h[3U]; uint32_t he = h[4U]; uint32_t _w[80U] = { 0U }; - for (uint32_t i = (uint32_t)0U; i < (uint32_t)80U; i++) + for (uint32_t i = 0U; i < 80U; i++) { uint32_t v; - if (i < (uint32_t)16U) + if (i < 16U) { - uint8_t *b = l + i * (uint32_t)4U; + uint8_t *b = l + i * 4U; uint32_t u = load32_be(b); v = u; } else { - uint32_t wmit3 = _w[i - (uint32_t)3U]; - uint32_t wmit8 = _w[i - (uint32_t)8U]; - uint32_t wmit14 = _w[i - (uint32_t)14U]; - uint32_t wmit16 = _w[i - (uint32_t)16U]; - v = - (wmit3 ^ (wmit8 ^ (wmit14 ^ wmit16))) - << (uint32_t)1U - | (wmit3 ^ (wmit8 ^ (wmit14 ^ wmit16))) >> (uint32_t)31U; + uint32_t wmit3 = _w[i - 3U]; + uint32_t wmit8 = _w[i - 8U]; + uint32_t wmit14 = _w[i - 14U]; + uint32_t wmit16 = _w[i - 16U]; + v = (wmit3 ^ (wmit8 ^ (wmit14 ^ wmit16))) << 1U | (wmit3 ^ (wmit8 ^ (wmit14 ^ wmit16))) >> 31U; } _w[i] = v; } - for (uint32_t i = (uint32_t)0U; i < (uint32_t)80U; i++) + for (uint32_t i = 0U; i < 80U; i++) { uint32_t _a = h[0U]; uint32_t _b = h[1U]; @@ -76,11 +68,11 @@ static void legacy_update(uint32_t *h, uint8_t *l) uint32_t _e = h[4U]; uint32_t wmit = _w[i]; uint32_t ite0; - if (i < (uint32_t)20U) + if (i < 20U) { ite0 = (_b & _c) ^ (~_b & _d); } - else if ((uint32_t)39U < i && i < (uint32_t)60U) + else if (39U < i && i < 60U) { ite0 = (_b & _c) ^ ((_b & _d) ^ (_c & _d)); } @@ -89,32 +81,32 @@ static void legacy_update(uint32_t *h, uint8_t *l) ite0 = _b ^ (_c ^ _d); } uint32_t ite; - if (i < (uint32_t)20U) + if (i < 20U) { - ite = (uint32_t)0x5a827999U; + ite = 0x5a827999U; } - else if (i < (uint32_t)40U) + else if (i < 40U) { - ite = (uint32_t)0x6ed9eba1U; + ite = 0x6ed9eba1U; } - else if (i < (uint32_t)60U) + else if (i < 60U) { - ite = (uint32_t)0x8f1bbcdcU; + ite = 0x8f1bbcdcU; } else { - ite = (uint32_t)0xca62c1d6U; + ite = 0xca62c1d6U; } - uint32_t _T = (_a << (uint32_t)5U | _a >> (uint32_t)27U) + ite0 + _e + ite + wmit; + uint32_t _T = (_a << 5U | _a >> 27U) + ite0 + _e + ite + wmit; h[0U] = _T; h[1U] = _a; - h[2U] = _b << (uint32_t)30U | _b >> (uint32_t)2U; + h[2U] = _b << 30U | _b >> 2U; h[3U] = _c; h[4U] = _d; } - for (uint32_t i = (uint32_t)0U; i < (uint32_t)80U; i++) + for (uint32_t i = 0U; i < 80U; i++) { - _w[i] = (uint32_t)0U; + _w[i] = 0U; } uint32_t sta = h[0U]; uint32_t stb = h[1U]; @@ -128,101 +120,69 @@ static void legacy_update(uint32_t *h, uint8_t *l) h[4U] = ste + he; } -static void legacy_pad(uint64_t len, uint8_t *dst) +static void pad(uint64_t len, uint8_t *dst) { uint8_t *dst1 = dst; - dst1[0U] = (uint8_t)0x80U; - uint8_t *dst2 = dst + (uint32_t)1U; - for - (uint32_t - i = (uint32_t)0U; - i - < ((uint32_t)128U - ((uint32_t)9U + (uint32_t)(len % (uint64_t)(uint32_t)64U))) % (uint32_t)64U; - i++) + dst1[0U] = 0x80U; + uint8_t *dst2 = dst + 1U; + for (uint32_t i = 0U; i < (128U - (9U + (uint32_t)(len % (uint64_t)64U))) % 64U; i++) { - dst2[i] = (uint8_t)0U; + dst2[i] = 0U; } - uint8_t - *dst3 = - dst - + - (uint32_t)1U - + - ((uint32_t)128U - ((uint32_t)9U + (uint32_t)(len % (uint64_t)(uint32_t)64U))) - % (uint32_t)64U; - store64_be(dst3, len << (uint32_t)3U); + uint8_t *dst3 = dst + 1U + (128U - (9U + (uint32_t)(len % (uint64_t)64U))) % 64U; + store64_be(dst3, len << 3U); } -void Hacl_Hash_Core_SHA1_legacy_finish(uint32_t *s, uint8_t *dst) +void Hacl_Hash_SHA1_finish(uint32_t *s, uint8_t *dst) { - KRML_MAYBE_FOR5(i, - (uint32_t)0U, - (uint32_t)5U, - (uint32_t)1U, - store32_be(dst + i * (uint32_t)4U, s[i]);); + KRML_MAYBE_FOR5(i, 0U, 5U, 1U, store32_be(dst + i * 4U, s[i]);); } -void Hacl_Hash_SHA1_legacy_update_multi(uint32_t *s, uint8_t *blocks, uint32_t n_blocks) +void Hacl_Hash_SHA1_update_multi(uint32_t *s, uint8_t *blocks, uint32_t n_blocks) { - for (uint32_t i = (uint32_t)0U; i < n_blocks; i++) + for (uint32_t i = 0U; i < n_blocks; i++) { - uint32_t sz = (uint32_t)64U; + uint32_t sz = 64U; uint8_t *block = blocks + sz * i; - legacy_update(s, block); + update(s, block); } } void -Hacl_Hash_SHA1_legacy_update_last( - uint32_t *s, - uint64_t prev_len, - uint8_t *input, - uint32_t input_len -) +Hacl_Hash_SHA1_update_last(uint32_t *s, uint64_t prev_len, uint8_t *input, uint32_t input_len) { - uint32_t blocks_n = input_len / (uint32_t)64U; - uint32_t blocks_len = blocks_n * (uint32_t)64U; + uint32_t blocks_n = input_len / 64U; + uint32_t blocks_len = blocks_n * 64U; uint8_t *blocks = input; uint32_t rest_len = input_len - blocks_len; uint8_t *rest = input + blocks_len; - Hacl_Hash_SHA1_legacy_update_multi(s, blocks, blocks_n); + Hacl_Hash_SHA1_update_multi(s, blocks, blocks_n); uint64_t total_input_len = prev_len + (uint64_t)input_len; - uint32_t - pad_len = - (uint32_t)1U - + - ((uint32_t)128U - ((uint32_t)9U + (uint32_t)(total_input_len % (uint64_t)(uint32_t)64U))) - % (uint32_t)64U - + (uint32_t)8U; + uint32_t pad_len = 1U + (128U - (9U + (uint32_t)(total_input_len % (uint64_t)64U))) % 64U + 8U; uint32_t tmp_len = rest_len + pad_len; uint8_t tmp_twoblocks[128U] = { 0U }; uint8_t *tmp = tmp_twoblocks; uint8_t *tmp_rest = tmp; uint8_t *tmp_pad = tmp + rest_len; memcpy(tmp_rest, rest, rest_len * sizeof (uint8_t)); - legacy_pad(total_input_len, tmp_pad); - Hacl_Hash_SHA1_legacy_update_multi(s, tmp, tmp_len / (uint32_t)64U); + pad(total_input_len, tmp_pad); + Hacl_Hash_SHA1_update_multi(s, tmp, tmp_len / 64U); } -void Hacl_Hash_SHA1_legacy_hash(uint8_t *input, uint32_t input_len, uint8_t *dst) +void Hacl_Hash_SHA1_hash_oneshot(uint8_t *output, uint8_t *input, uint32_t input_len) { - uint32_t - s[5U] = - { - (uint32_t)0x67452301U, (uint32_t)0xefcdab89U, (uint32_t)0x98badcfeU, (uint32_t)0x10325476U, - (uint32_t)0xc3d2e1f0U - }; - uint32_t blocks_n0 = input_len / (uint32_t)64U; + uint32_t s[5U] = { 0x67452301U, 0xefcdab89U, 0x98badcfeU, 0x10325476U, 0xc3d2e1f0U }; + uint32_t blocks_n0 = input_len / 64U; uint32_t blocks_n1; - if (input_len % (uint32_t)64U == (uint32_t)0U && blocks_n0 > (uint32_t)0U) + if (input_len % 64U == 0U && blocks_n0 > 0U) { - blocks_n1 = blocks_n0 - (uint32_t)1U; + blocks_n1 = blocks_n0 - 1U; } else { blocks_n1 = blocks_n0; } - uint32_t blocks_len0 = blocks_n1 * (uint32_t)64U; + uint32_t blocks_len0 = blocks_n1 * 64U; uint8_t *blocks0 = input; uint32_t rest_len0 = input_len - blocks_len0; uint8_t *rest0 = input + blocks_len0; @@ -231,75 +191,75 @@ void Hacl_Hash_SHA1_legacy_hash(uint8_t *input, uint32_t input_len, uint8_t *dst uint8_t *blocks = blocks0; uint32_t rest_len = rest_len0; uint8_t *rest = rest0; - Hacl_Hash_SHA1_legacy_update_multi(s, blocks, blocks_n); - Hacl_Hash_SHA1_legacy_update_last(s, (uint64_t)blocks_len, rest, rest_len); - Hacl_Hash_Core_SHA1_legacy_finish(s, dst); + Hacl_Hash_SHA1_update_multi(s, blocks, blocks_n); + Hacl_Hash_SHA1_update_last(s, (uint64_t)blocks_len, rest, rest_len); + Hacl_Hash_SHA1_finish(s, output); } -Hacl_Streaming_MD_state_32 *Hacl_Streaming_SHA1_legacy_create_in(void) +Hacl_Streaming_MD_state_32 *Hacl_Hash_SHA1_malloc(void) { - uint8_t *buf = (uint8_t *)KRML_HOST_CALLOC((uint32_t)64U, sizeof (uint8_t)); - uint32_t *block_state = (uint32_t *)KRML_HOST_CALLOC((uint32_t)5U, sizeof (uint32_t)); + uint8_t *buf = (uint8_t *)KRML_HOST_CALLOC(64U, sizeof (uint8_t)); + uint32_t *block_state = (uint32_t *)KRML_HOST_CALLOC(5U, sizeof (uint32_t)); Hacl_Streaming_MD_state_32 - s = { .block_state = block_state, .buf = buf, .total_len = (uint64_t)(uint32_t)0U }; + s = { .block_state = block_state, .buf = buf, .total_len = (uint64_t)0U }; Hacl_Streaming_MD_state_32 *p = (Hacl_Streaming_MD_state_32 *)KRML_HOST_MALLOC(sizeof (Hacl_Streaming_MD_state_32)); p[0U] = s; - Hacl_Hash_Core_SHA1_legacy_init(block_state); + Hacl_Hash_SHA1_init(block_state); return p; } -void Hacl_Streaming_SHA1_legacy_init(Hacl_Streaming_MD_state_32 *s) +void Hacl_Hash_SHA1_reset(Hacl_Streaming_MD_state_32 *state) { - Hacl_Streaming_MD_state_32 scrut = *s; + Hacl_Streaming_MD_state_32 scrut = *state; uint8_t *buf = scrut.buf; uint32_t *block_state = scrut.block_state; - Hacl_Hash_Core_SHA1_legacy_init(block_state); + Hacl_Hash_SHA1_init(block_state); Hacl_Streaming_MD_state_32 - tmp = { .block_state = block_state, .buf = buf, .total_len = (uint64_t)(uint32_t)0U }; - s[0U] = tmp; + tmp = { .block_state = block_state, .buf = buf, .total_len = (uint64_t)0U }; + state[0U] = tmp; } /** 0 = success, 1 = max length exceeded */ Hacl_Streaming_Types_error_code -Hacl_Streaming_SHA1_legacy_update(Hacl_Streaming_MD_state_32 *p, uint8_t *data, uint32_t len) +Hacl_Hash_SHA1_update(Hacl_Streaming_MD_state_32 *state, uint8_t *chunk, uint32_t chunk_len) { - Hacl_Streaming_MD_state_32 s = *p; + Hacl_Streaming_MD_state_32 s = *state; uint64_t total_len = s.total_len; - if ((uint64_t)len > (uint64_t)2305843009213693951U - total_len) + if ((uint64_t)chunk_len > 2305843009213693951ULL - total_len) { return Hacl_Streaming_Types_MaximumLengthExceeded; } uint32_t sz; - if (total_len % (uint64_t)(uint32_t)64U == (uint64_t)0U && total_len > (uint64_t)0U) + if (total_len % (uint64_t)64U == 0ULL && total_len > 0ULL) { - sz = (uint32_t)64U; + sz = 64U; } else { - sz = (uint32_t)(total_len % (uint64_t)(uint32_t)64U); + sz = (uint32_t)(total_len % (uint64_t)64U); } - if (len <= (uint32_t)64U - sz) + if (chunk_len <= 64U - sz) { - Hacl_Streaming_MD_state_32 s1 = *p; + Hacl_Streaming_MD_state_32 s1 = *state; uint32_t *block_state1 = s1.block_state; uint8_t *buf = s1.buf; uint64_t total_len1 = s1.total_len; uint32_t sz1; - if (total_len1 % (uint64_t)(uint32_t)64U == (uint64_t)0U && total_len1 > (uint64_t)0U) + if (total_len1 % (uint64_t)64U == 0ULL && total_len1 > 0ULL) { - sz1 = (uint32_t)64U; + sz1 = 64U; } else { - sz1 = (uint32_t)(total_len1 % (uint64_t)(uint32_t)64U); + sz1 = (uint32_t)(total_len1 % (uint64_t)64U); } uint8_t *buf2 = buf + sz1; - memcpy(buf2, data, len * sizeof (uint8_t)); - uint64_t total_len2 = total_len1 + (uint64_t)len; - *p + memcpy(buf2, chunk, chunk_len * sizeof (uint8_t)); + uint64_t total_len2 = total_len1 + (uint64_t)chunk_len; + *state = ( (Hacl_Streaming_MD_state_32){ @@ -309,74 +269,74 @@ Hacl_Streaming_SHA1_legacy_update(Hacl_Streaming_MD_state_32 *p, uint8_t *data, } ); } - else if (sz == (uint32_t)0U) + else if (sz == 0U) { - Hacl_Streaming_MD_state_32 s1 = *p; + Hacl_Streaming_MD_state_32 s1 = *state; uint32_t *block_state1 = s1.block_state; uint8_t *buf = s1.buf; uint64_t total_len1 = s1.total_len; uint32_t sz1; - if (total_len1 % (uint64_t)(uint32_t)64U == (uint64_t)0U && total_len1 > (uint64_t)0U) + if (total_len1 % (uint64_t)64U == 0ULL && total_len1 > 0ULL) { - sz1 = (uint32_t)64U; + sz1 = 64U; } else { - sz1 = (uint32_t)(total_len1 % (uint64_t)(uint32_t)64U); + sz1 = (uint32_t)(total_len1 % (uint64_t)64U); } - if (!(sz1 == (uint32_t)0U)) + if (!(sz1 == 0U)) { - Hacl_Hash_SHA1_legacy_update_multi(block_state1, buf, (uint32_t)1U); + Hacl_Hash_SHA1_update_multi(block_state1, buf, 1U); } uint32_t ite; - if ((uint64_t)len % (uint64_t)(uint32_t)64U == (uint64_t)0U && (uint64_t)len > (uint64_t)0U) + if ((uint64_t)chunk_len % (uint64_t)64U == 0ULL && (uint64_t)chunk_len > 0ULL) { - ite = (uint32_t)64U; + ite = 64U; } else { - ite = (uint32_t)((uint64_t)len % (uint64_t)(uint32_t)64U); + ite = (uint32_t)((uint64_t)chunk_len % (uint64_t)64U); } - uint32_t n_blocks = (len - ite) / (uint32_t)64U; - uint32_t data1_len = n_blocks * (uint32_t)64U; - uint32_t data2_len = len - data1_len; - uint8_t *data1 = data; - uint8_t *data2 = data + data1_len; - Hacl_Hash_SHA1_legacy_update_multi(block_state1, data1, data1_len / (uint32_t)64U); + uint32_t n_blocks = (chunk_len - ite) / 64U; + uint32_t data1_len = n_blocks * 64U; + uint32_t data2_len = chunk_len - data1_len; + uint8_t *data1 = chunk; + uint8_t *data2 = chunk + data1_len; + Hacl_Hash_SHA1_update_multi(block_state1, data1, data1_len / 64U); uint8_t *dst = buf; memcpy(dst, data2, data2_len * sizeof (uint8_t)); - *p + *state = ( (Hacl_Streaming_MD_state_32){ .block_state = block_state1, .buf = buf, - .total_len = total_len1 + (uint64_t)len + .total_len = total_len1 + (uint64_t)chunk_len } ); } else { - uint32_t diff = (uint32_t)64U - sz; - uint8_t *data1 = data; - uint8_t *data2 = data + diff; - Hacl_Streaming_MD_state_32 s1 = *p; + uint32_t diff = 64U - sz; + uint8_t *chunk1 = chunk; + uint8_t *chunk2 = chunk + diff; + Hacl_Streaming_MD_state_32 s1 = *state; uint32_t *block_state10 = s1.block_state; uint8_t *buf0 = s1.buf; uint64_t total_len10 = s1.total_len; uint32_t sz10; - if (total_len10 % (uint64_t)(uint32_t)64U == (uint64_t)0U && total_len10 > (uint64_t)0U) + if (total_len10 % (uint64_t)64U == 0ULL && total_len10 > 0ULL) { - sz10 = (uint32_t)64U; + sz10 = 64U; } else { - sz10 = (uint32_t)(total_len10 % (uint64_t)(uint32_t)64U); + sz10 = (uint32_t)(total_len10 % (uint64_t)64U); } uint8_t *buf2 = buf0 + sz10; - memcpy(buf2, data1, diff * sizeof (uint8_t)); + memcpy(buf2, chunk1, diff * sizeof (uint8_t)); uint64_t total_len2 = total_len10 + (uint64_t)diff; - *p + *state = ( (Hacl_Streaming_MD_state_32){ @@ -385,114 +345,109 @@ Hacl_Streaming_SHA1_legacy_update(Hacl_Streaming_MD_state_32 *p, uint8_t *data, .total_len = total_len2 } ); - Hacl_Streaming_MD_state_32 s10 = *p; + Hacl_Streaming_MD_state_32 s10 = *state; uint32_t *block_state1 = s10.block_state; uint8_t *buf = s10.buf; uint64_t total_len1 = s10.total_len; uint32_t sz1; - if (total_len1 % (uint64_t)(uint32_t)64U == (uint64_t)0U && total_len1 > (uint64_t)0U) + if (total_len1 % (uint64_t)64U == 0ULL && total_len1 > 0ULL) { - sz1 = (uint32_t)64U; + sz1 = 64U; } else { - sz1 = (uint32_t)(total_len1 % (uint64_t)(uint32_t)64U); + sz1 = (uint32_t)(total_len1 % (uint64_t)64U); } - if (!(sz1 == (uint32_t)0U)) + if (!(sz1 == 0U)) { - Hacl_Hash_SHA1_legacy_update_multi(block_state1, buf, (uint32_t)1U); + Hacl_Hash_SHA1_update_multi(block_state1, buf, 1U); } uint32_t ite; if - ( - (uint64_t)(len - diff) - % (uint64_t)(uint32_t)64U - == (uint64_t)0U - && (uint64_t)(len - diff) > (uint64_t)0U - ) + ((uint64_t)(chunk_len - diff) % (uint64_t)64U == 0ULL && (uint64_t)(chunk_len - diff) > 0ULL) { - ite = (uint32_t)64U; + ite = 64U; } else { - ite = (uint32_t)((uint64_t)(len - diff) % (uint64_t)(uint32_t)64U); + ite = (uint32_t)((uint64_t)(chunk_len - diff) % (uint64_t)64U); } - uint32_t n_blocks = (len - diff - ite) / (uint32_t)64U; - uint32_t data1_len = n_blocks * (uint32_t)64U; - uint32_t data2_len = len - diff - data1_len; - uint8_t *data11 = data2; - uint8_t *data21 = data2 + data1_len; - Hacl_Hash_SHA1_legacy_update_multi(block_state1, data11, data1_len / (uint32_t)64U); + uint32_t n_blocks = (chunk_len - diff - ite) / 64U; + uint32_t data1_len = n_blocks * 64U; + uint32_t data2_len = chunk_len - diff - data1_len; + uint8_t *data1 = chunk2; + uint8_t *data2 = chunk2 + data1_len; + Hacl_Hash_SHA1_update_multi(block_state1, data1, data1_len / 64U); uint8_t *dst = buf; - memcpy(dst, data21, data2_len * sizeof (uint8_t)); - *p + memcpy(dst, data2, data2_len * sizeof (uint8_t)); + *state = ( (Hacl_Streaming_MD_state_32){ .block_state = block_state1, .buf = buf, - .total_len = total_len1 + (uint64_t)(len - diff) + .total_len = total_len1 + (uint64_t)(chunk_len - diff) } ); } return Hacl_Streaming_Types_Success; } -void Hacl_Streaming_SHA1_legacy_finish(Hacl_Streaming_MD_state_32 *p, uint8_t *dst) +void Hacl_Hash_SHA1_digest(Hacl_Streaming_MD_state_32 *state, uint8_t *output) { - Hacl_Streaming_MD_state_32 scrut = *p; + Hacl_Streaming_MD_state_32 scrut = *state; uint32_t *block_state = scrut.block_state; uint8_t *buf_ = scrut.buf; uint64_t total_len = scrut.total_len; uint32_t r; - if (total_len % (uint64_t)(uint32_t)64U == (uint64_t)0U && total_len > (uint64_t)0U) + if (total_len % (uint64_t)64U == 0ULL && total_len > 0ULL) { - r = (uint32_t)64U; + r = 64U; } else { - r = (uint32_t)(total_len % (uint64_t)(uint32_t)64U); + r = (uint32_t)(total_len % (uint64_t)64U); } uint8_t *buf_1 = buf_; uint32_t tmp_block_state[5U] = { 0U }; - memcpy(tmp_block_state, block_state, (uint32_t)5U * sizeof (uint32_t)); + memcpy(tmp_block_state, block_state, 5U * sizeof (uint32_t)); uint32_t ite; - if (r % (uint32_t)64U == (uint32_t)0U && r > (uint32_t)0U) + if (r % 64U == 0U && r > 0U) { - ite = (uint32_t)64U; + ite = 64U; } else { - ite = r % (uint32_t)64U; + ite = r % 64U; } uint8_t *buf_last = buf_1 + r - ite; uint8_t *buf_multi = buf_1; - Hacl_Hash_SHA1_legacy_update_multi(tmp_block_state, buf_multi, (uint32_t)0U); + Hacl_Hash_SHA1_update_multi(tmp_block_state, buf_multi, 0U); uint64_t prev_len_last = total_len - (uint64_t)r; - Hacl_Hash_SHA1_legacy_update_last(tmp_block_state, prev_len_last, buf_last, r); - Hacl_Hash_Core_SHA1_legacy_finish(tmp_block_state, dst); + Hacl_Hash_SHA1_update_last(tmp_block_state, prev_len_last, buf_last, r); + Hacl_Hash_SHA1_finish(tmp_block_state, output); } -void Hacl_Streaming_SHA1_legacy_free(Hacl_Streaming_MD_state_32 *s) +void Hacl_Hash_SHA1_free(Hacl_Streaming_MD_state_32 *state) { - Hacl_Streaming_MD_state_32 scrut = *s; + Hacl_Streaming_MD_state_32 scrut = *state; uint8_t *buf = scrut.buf; uint32_t *block_state = scrut.block_state; KRML_HOST_FREE(block_state); KRML_HOST_FREE(buf); - KRML_HOST_FREE(s); + KRML_HOST_FREE(state); } -Hacl_Streaming_MD_state_32 *Hacl_Streaming_SHA1_legacy_copy(Hacl_Streaming_MD_state_32 *s0) +Hacl_Streaming_MD_state_32 *Hacl_Hash_SHA1_copy(Hacl_Streaming_MD_state_32 *state) { - Hacl_Streaming_MD_state_32 scrut = *s0; + Hacl_Streaming_MD_state_32 scrut = *state; uint32_t *block_state0 = scrut.block_state; uint8_t *buf0 = scrut.buf; uint64_t total_len0 = scrut.total_len; - uint8_t *buf = (uint8_t *)KRML_HOST_CALLOC((uint32_t)64U, sizeof (uint8_t)); - memcpy(buf, buf0, (uint32_t)64U * sizeof (uint8_t)); - uint32_t *block_state = (uint32_t *)KRML_HOST_CALLOC((uint32_t)5U, sizeof (uint32_t)); - memcpy(block_state, block_state0, (uint32_t)5U * sizeof (uint32_t)); + uint8_t *buf = (uint8_t *)KRML_HOST_CALLOC(64U, sizeof (uint8_t)); + memcpy(buf, buf0, 64U * sizeof (uint8_t)); + uint32_t *block_state = (uint32_t *)KRML_HOST_CALLOC(5U, sizeof (uint32_t)); + memcpy(block_state, block_state0, 5U * sizeof (uint32_t)); Hacl_Streaming_MD_state_32 s = { .block_state = block_state, .buf = buf, .total_len = total_len0 }; Hacl_Streaming_MD_state_32 @@ -501,8 +456,8 @@ Hacl_Streaming_MD_state_32 *Hacl_Streaming_SHA1_legacy_copy(Hacl_Streaming_MD_st return p; } -void Hacl_Streaming_SHA1_legacy_hash(uint8_t *input, uint32_t input_len, uint8_t *dst) +void Hacl_Hash_SHA1_hash(uint8_t *output, uint8_t *input, uint32_t input_len) { - Hacl_Hash_SHA1_legacy_hash(input, input_len, dst); + Hacl_Hash_SHA1_hash_oneshot(output, input, input_len); } diff --git a/Modules/_hacl/Hacl_Hash_SHA1.h b/Modules/_hacl/Hacl_Hash_SHA1.h index dc50aa6f6d3902..ad1e8e72a739ec 100644 --- a/Modules/_hacl/Hacl_Hash_SHA1.h +++ b/Modules/_hacl/Hacl_Hash_SHA1.h @@ -31,31 +31,32 @@ extern "C" { #endif #include +#include "python_hacl_namespaces.h" #include "krml/types.h" #include "krml/lowstar_endianness.h" #include "krml/internal/target.h" #include "Hacl_Streaming_Types.h" -typedef Hacl_Streaming_MD_state_32 Hacl_Streaming_SHA1_state; +typedef Hacl_Streaming_MD_state_32 Hacl_Hash_SHA1_state_t; -Hacl_Streaming_MD_state_32 *Hacl_Streaming_SHA1_legacy_create_in(void); +Hacl_Streaming_MD_state_32 *Hacl_Hash_SHA1_malloc(void); -void Hacl_Streaming_SHA1_legacy_init(Hacl_Streaming_MD_state_32 *s); +void Hacl_Hash_SHA1_reset(Hacl_Streaming_MD_state_32 *state); /** 0 = success, 1 = max length exceeded */ Hacl_Streaming_Types_error_code -Hacl_Streaming_SHA1_legacy_update(Hacl_Streaming_MD_state_32 *p, uint8_t *data, uint32_t len); +Hacl_Hash_SHA1_update(Hacl_Streaming_MD_state_32 *state, uint8_t *chunk, uint32_t chunk_len); -void Hacl_Streaming_SHA1_legacy_finish(Hacl_Streaming_MD_state_32 *p, uint8_t *dst); +void Hacl_Hash_SHA1_digest(Hacl_Streaming_MD_state_32 *state, uint8_t *output); -void Hacl_Streaming_SHA1_legacy_free(Hacl_Streaming_MD_state_32 *s); +void Hacl_Hash_SHA1_free(Hacl_Streaming_MD_state_32 *state); -Hacl_Streaming_MD_state_32 *Hacl_Streaming_SHA1_legacy_copy(Hacl_Streaming_MD_state_32 *s0); +Hacl_Streaming_MD_state_32 *Hacl_Hash_SHA1_copy(Hacl_Streaming_MD_state_32 *state); -void Hacl_Streaming_SHA1_legacy_hash(uint8_t *input, uint32_t input_len, uint8_t *dst); +void Hacl_Hash_SHA1_hash(uint8_t *output, uint8_t *input, uint32_t input_len); #if defined(__cplusplus) } diff --git a/Modules/_hacl/Hacl_Hash_SHA2.c b/Modules/_hacl/Hacl_Hash_SHA2.c index 08e3f7edbf4ede..4b6af5fc78c680 100644 --- a/Modules/_hacl/Hacl_Hash_SHA2.c +++ b/Modules/_hacl/Hacl_Hash_SHA2.c @@ -27,14 +27,14 @@ -void Hacl_SHA2_Scalar32_sha256_init(uint32_t *hash) +void Hacl_Hash_SHA2_sha256_init(uint32_t *hash) { KRML_MAYBE_FOR8(i, - (uint32_t)0U, - (uint32_t)8U, - (uint32_t)1U, + 0U, + 8U, + 1U, uint32_t *os = hash; - uint32_t x = Hacl_Impl_SHA2_Generic_h256[i]; + uint32_t x = Hacl_Hash_SHA2_h256[i]; os[i] = x;); } @@ -42,49 +42,49 @@ static inline void sha256_update(uint8_t *b, uint32_t *hash) { uint32_t hash_old[8U] = { 0U }; uint32_t ws[16U] = { 0U }; - memcpy(hash_old, hash, (uint32_t)8U * sizeof (uint32_t)); + memcpy(hash_old, hash, 8U * sizeof (uint32_t)); uint8_t *b10 = b; uint32_t u = load32_be(b10); ws[0U] = u; - uint32_t u0 = load32_be(b10 + (uint32_t)4U); + uint32_t u0 = load32_be(b10 + 4U); ws[1U] = u0; - uint32_t u1 = load32_be(b10 + (uint32_t)8U); + uint32_t u1 = load32_be(b10 + 8U); ws[2U] = u1; - uint32_t u2 = load32_be(b10 + (uint32_t)12U); + uint32_t u2 = load32_be(b10 + 12U); ws[3U] = u2; - uint32_t u3 = load32_be(b10 + (uint32_t)16U); + uint32_t u3 = load32_be(b10 + 16U); ws[4U] = u3; - uint32_t u4 = load32_be(b10 + (uint32_t)20U); + uint32_t u4 = load32_be(b10 + 20U); ws[5U] = u4; - uint32_t u5 = load32_be(b10 + (uint32_t)24U); + uint32_t u5 = load32_be(b10 + 24U); ws[6U] = u5; - uint32_t u6 = load32_be(b10 + (uint32_t)28U); + uint32_t u6 = load32_be(b10 + 28U); ws[7U] = u6; - uint32_t u7 = load32_be(b10 + (uint32_t)32U); + uint32_t u7 = load32_be(b10 + 32U); ws[8U] = u7; - uint32_t u8 = load32_be(b10 + (uint32_t)36U); + uint32_t u8 = load32_be(b10 + 36U); ws[9U] = u8; - uint32_t u9 = load32_be(b10 + (uint32_t)40U); + uint32_t u9 = load32_be(b10 + 40U); ws[10U] = u9; - uint32_t u10 = load32_be(b10 + (uint32_t)44U); + uint32_t u10 = load32_be(b10 + 44U); ws[11U] = u10; - uint32_t u11 = load32_be(b10 + (uint32_t)48U); + uint32_t u11 = load32_be(b10 + 48U); ws[12U] = u11; - uint32_t u12 = load32_be(b10 + (uint32_t)52U); + uint32_t u12 = load32_be(b10 + 52U); ws[13U] = u12; - uint32_t u13 = load32_be(b10 + (uint32_t)56U); + uint32_t u13 = load32_be(b10 + 56U); ws[14U] = u13; - uint32_t u14 = load32_be(b10 + (uint32_t)60U); + uint32_t u14 = load32_be(b10 + 60U); ws[15U] = u14; KRML_MAYBE_FOR4(i0, - (uint32_t)0U, - (uint32_t)4U, - (uint32_t)1U, + 0U, + 4U, + 1U, KRML_MAYBE_FOR16(i, - (uint32_t)0U, - (uint32_t)16U, - (uint32_t)1U, - uint32_t k_t = Hacl_Impl_SHA2_Generic_k224_256[(uint32_t)16U * i0 + i]; + 0U, + 16U, + 1U, + uint32_t k_t = Hacl_Hash_SHA2_k224_256[16U * i0 + i]; uint32_t ws_t = ws[i]; uint32_t a0 = hash[0U]; uint32_t b0 = hash[1U]; @@ -98,20 +98,13 @@ static inline void sha256_update(uint8_t *b, uint32_t *hash) uint32_t t1 = h02 - + - ((e0 << (uint32_t)26U | e0 >> (uint32_t)6U) - ^ - ((e0 << (uint32_t)21U | e0 >> (uint32_t)11U) - ^ (e0 << (uint32_t)7U | e0 >> (uint32_t)25U))) + + ((e0 << 26U | e0 >> 6U) ^ ((e0 << 21U | e0 >> 11U) ^ (e0 << 7U | e0 >> 25U))) + ((e0 & f0) ^ (~e0 & g0)) + k_e_t + ws_t; uint32_t t2 = - ((a0 << (uint32_t)30U | a0 >> (uint32_t)2U) - ^ - ((a0 << (uint32_t)19U | a0 >> (uint32_t)13U) - ^ (a0 << (uint32_t)10U | a0 >> (uint32_t)22U))) + ((a0 << 30U | a0 >> 2U) ^ ((a0 << 19U | a0 >> 13U) ^ (a0 << 10U | a0 >> 22U))) + ((a0 & b0) ^ ((a0 & c0) ^ (b0 & c0))); uint32_t a1 = t1 + t2; uint32_t b1 = a0; @@ -129,74 +122,63 @@ static inline void sha256_update(uint8_t *b, uint32_t *hash) hash[5U] = f1; hash[6U] = g1; hash[7U] = h12;); - if (i0 < (uint32_t)3U) + if (i0 < 3U) { KRML_MAYBE_FOR16(i, - (uint32_t)0U, - (uint32_t)16U, - (uint32_t)1U, + 0U, + 16U, + 1U, uint32_t t16 = ws[i]; - uint32_t t15 = ws[(i + (uint32_t)1U) % (uint32_t)16U]; - uint32_t t7 = ws[(i + (uint32_t)9U) % (uint32_t)16U]; - uint32_t t2 = ws[(i + (uint32_t)14U) % (uint32_t)16U]; - uint32_t - s1 = - (t2 << (uint32_t)15U | t2 >> (uint32_t)17U) - ^ ((t2 << (uint32_t)13U | t2 >> (uint32_t)19U) ^ t2 >> (uint32_t)10U); - uint32_t - s0 = - (t15 << (uint32_t)25U | t15 >> (uint32_t)7U) - ^ ((t15 << (uint32_t)14U | t15 >> (uint32_t)18U) ^ t15 >> (uint32_t)3U); + uint32_t t15 = ws[(i + 1U) % 16U]; + uint32_t t7 = ws[(i + 9U) % 16U]; + uint32_t t2 = ws[(i + 14U) % 16U]; + uint32_t s1 = (t2 << 15U | t2 >> 17U) ^ ((t2 << 13U | t2 >> 19U) ^ t2 >> 10U); + uint32_t s0 = (t15 << 25U | t15 >> 7U) ^ ((t15 << 14U | t15 >> 18U) ^ t15 >> 3U); ws[i] = s1 + t7 + s0 + t16;); }); KRML_MAYBE_FOR8(i, - (uint32_t)0U, - (uint32_t)8U, - (uint32_t)1U, + 0U, + 8U, + 1U, uint32_t *os = hash; uint32_t x = hash[i] + hash_old[i]; os[i] = x;); } -void Hacl_SHA2_Scalar32_sha256_update_nblocks(uint32_t len, uint8_t *b, uint32_t *st) +void Hacl_Hash_SHA2_sha256_update_nblocks(uint32_t len, uint8_t *b, uint32_t *st) { - uint32_t blocks = len / (uint32_t)64U; - for (uint32_t i = (uint32_t)0U; i < blocks; i++) + uint32_t blocks = len / 64U; + for (uint32_t i = 0U; i < blocks; i++) { uint8_t *b0 = b; - uint8_t *mb = b0 + i * (uint32_t)64U; + uint8_t *mb = b0 + i * 64U; sha256_update(mb, st); } } void -Hacl_SHA2_Scalar32_sha256_update_last( - uint64_t totlen, - uint32_t len, - uint8_t *b, - uint32_t *hash -) +Hacl_Hash_SHA2_sha256_update_last(uint64_t totlen, uint32_t len, uint8_t *b, uint32_t *hash) { uint32_t blocks; - if (len + (uint32_t)8U + (uint32_t)1U <= (uint32_t)64U) + if (len + 8U + 1U <= 64U) { - blocks = (uint32_t)1U; + blocks = 1U; } else { - blocks = (uint32_t)2U; + blocks = 2U; } - uint32_t fin = blocks * (uint32_t)64U; + uint32_t fin = blocks * 64U; uint8_t last[128U] = { 0U }; uint8_t totlen_buf[8U] = { 0U }; - uint64_t total_len_bits = totlen << (uint32_t)3U; + uint64_t total_len_bits = totlen << 3U; store64_be(totlen_buf, total_len_bits); uint8_t *b0 = b; memcpy(last, b0, len * sizeof (uint8_t)); - last[len] = (uint8_t)0x80U; - memcpy(last + fin - (uint32_t)8U, totlen_buf, (uint32_t)8U * sizeof (uint8_t)); + last[len] = 0x80U; + memcpy(last + fin - 8U, totlen_buf, 8U * sizeof (uint8_t)); uint8_t *last00 = last; - uint8_t *last10 = last + (uint32_t)64U; + uint8_t *last10 = last + 64U; uint8_t *l0 = last00; uint8_t *l1 = last10; uint8_t *lb0 = l0; @@ -204,65 +186,56 @@ Hacl_SHA2_Scalar32_sha256_update_last( uint8_t *last0 = lb0; uint8_t *last1 = lb1; sha256_update(last0, hash); - if (blocks > (uint32_t)1U) + if (blocks > 1U) { sha256_update(last1, hash); return; } } -void Hacl_SHA2_Scalar32_sha256_finish(uint32_t *st, uint8_t *h) +void Hacl_Hash_SHA2_sha256_finish(uint32_t *st, uint8_t *h) { uint8_t hbuf[32U] = { 0U }; - KRML_MAYBE_FOR8(i, - (uint32_t)0U, - (uint32_t)8U, - (uint32_t)1U, - store32_be(hbuf + i * (uint32_t)4U, st[i]);); - memcpy(h, hbuf, (uint32_t)32U * sizeof (uint8_t)); + KRML_MAYBE_FOR8(i, 0U, 8U, 1U, store32_be(hbuf + i * 4U, st[i]);); + memcpy(h, hbuf, 32U * sizeof (uint8_t)); } -void Hacl_SHA2_Scalar32_sha224_init(uint32_t *hash) +void Hacl_Hash_SHA2_sha224_init(uint32_t *hash) { KRML_MAYBE_FOR8(i, - (uint32_t)0U, - (uint32_t)8U, - (uint32_t)1U, + 0U, + 8U, + 1U, uint32_t *os = hash; - uint32_t x = Hacl_Impl_SHA2_Generic_h224[i]; + uint32_t x = Hacl_Hash_SHA2_h224[i]; os[i] = x;); } static inline void sha224_update_nblocks(uint32_t len, uint8_t *b, uint32_t *st) { - Hacl_SHA2_Scalar32_sha256_update_nblocks(len, b, st); + Hacl_Hash_SHA2_sha256_update_nblocks(len, b, st); } -void -Hacl_SHA2_Scalar32_sha224_update_last(uint64_t totlen, uint32_t len, uint8_t *b, uint32_t *st) +void Hacl_Hash_SHA2_sha224_update_last(uint64_t totlen, uint32_t len, uint8_t *b, uint32_t *st) { - Hacl_SHA2_Scalar32_sha256_update_last(totlen, len, b, st); + Hacl_Hash_SHA2_sha256_update_last(totlen, len, b, st); } -void Hacl_SHA2_Scalar32_sha224_finish(uint32_t *st, uint8_t *h) +void Hacl_Hash_SHA2_sha224_finish(uint32_t *st, uint8_t *h) { uint8_t hbuf[32U] = { 0U }; - KRML_MAYBE_FOR8(i, - (uint32_t)0U, - (uint32_t)8U, - (uint32_t)1U, - store32_be(hbuf + i * (uint32_t)4U, st[i]);); - memcpy(h, hbuf, (uint32_t)28U * sizeof (uint8_t)); + KRML_MAYBE_FOR8(i, 0U, 8U, 1U, store32_be(hbuf + i * 4U, st[i]);); + memcpy(h, hbuf, 28U * sizeof (uint8_t)); } -void Hacl_SHA2_Scalar32_sha512_init(uint64_t *hash) +void Hacl_Hash_SHA2_sha512_init(uint64_t *hash) { KRML_MAYBE_FOR8(i, - (uint32_t)0U, - (uint32_t)8U, - (uint32_t)1U, + 0U, + 8U, + 1U, uint64_t *os = hash; - uint64_t x = Hacl_Impl_SHA2_Generic_h512[i]; + uint64_t x = Hacl_Hash_SHA2_h512[i]; os[i] = x;); } @@ -270,49 +243,49 @@ static inline void sha512_update(uint8_t *b, uint64_t *hash) { uint64_t hash_old[8U] = { 0U }; uint64_t ws[16U] = { 0U }; - memcpy(hash_old, hash, (uint32_t)8U * sizeof (uint64_t)); + memcpy(hash_old, hash, 8U * sizeof (uint64_t)); uint8_t *b10 = b; uint64_t u = load64_be(b10); ws[0U] = u; - uint64_t u0 = load64_be(b10 + (uint32_t)8U); + uint64_t u0 = load64_be(b10 + 8U); ws[1U] = u0; - uint64_t u1 = load64_be(b10 + (uint32_t)16U); + uint64_t u1 = load64_be(b10 + 16U); ws[2U] = u1; - uint64_t u2 = load64_be(b10 + (uint32_t)24U); + uint64_t u2 = load64_be(b10 + 24U); ws[3U] = u2; - uint64_t u3 = load64_be(b10 + (uint32_t)32U); + uint64_t u3 = load64_be(b10 + 32U); ws[4U] = u3; - uint64_t u4 = load64_be(b10 + (uint32_t)40U); + uint64_t u4 = load64_be(b10 + 40U); ws[5U] = u4; - uint64_t u5 = load64_be(b10 + (uint32_t)48U); + uint64_t u5 = load64_be(b10 + 48U); ws[6U] = u5; - uint64_t u6 = load64_be(b10 + (uint32_t)56U); + uint64_t u6 = load64_be(b10 + 56U); ws[7U] = u6; - uint64_t u7 = load64_be(b10 + (uint32_t)64U); + uint64_t u7 = load64_be(b10 + 64U); ws[8U] = u7; - uint64_t u8 = load64_be(b10 + (uint32_t)72U); + uint64_t u8 = load64_be(b10 + 72U); ws[9U] = u8; - uint64_t u9 = load64_be(b10 + (uint32_t)80U); + uint64_t u9 = load64_be(b10 + 80U); ws[10U] = u9; - uint64_t u10 = load64_be(b10 + (uint32_t)88U); + uint64_t u10 = load64_be(b10 + 88U); ws[11U] = u10; - uint64_t u11 = load64_be(b10 + (uint32_t)96U); + uint64_t u11 = load64_be(b10 + 96U); ws[12U] = u11; - uint64_t u12 = load64_be(b10 + (uint32_t)104U); + uint64_t u12 = load64_be(b10 + 104U); ws[13U] = u12; - uint64_t u13 = load64_be(b10 + (uint32_t)112U); + uint64_t u13 = load64_be(b10 + 112U); ws[14U] = u13; - uint64_t u14 = load64_be(b10 + (uint32_t)120U); + uint64_t u14 = load64_be(b10 + 120U); ws[15U] = u14; KRML_MAYBE_FOR5(i0, - (uint32_t)0U, - (uint32_t)5U, - (uint32_t)1U, + 0U, + 5U, + 1U, KRML_MAYBE_FOR16(i, - (uint32_t)0U, - (uint32_t)16U, - (uint32_t)1U, - uint64_t k_t = Hacl_Impl_SHA2_Generic_k384_512[(uint32_t)16U * i0 + i]; + 0U, + 16U, + 1U, + uint64_t k_t = Hacl_Hash_SHA2_k384_512[16U * i0 + i]; uint64_t ws_t = ws[i]; uint64_t a0 = hash[0U]; uint64_t b0 = hash[1U]; @@ -326,20 +299,13 @@ static inline void sha512_update(uint8_t *b, uint64_t *hash) uint64_t t1 = h02 - + - ((e0 << (uint32_t)50U | e0 >> (uint32_t)14U) - ^ - ((e0 << (uint32_t)46U | e0 >> (uint32_t)18U) - ^ (e0 << (uint32_t)23U | e0 >> (uint32_t)41U))) + + ((e0 << 50U | e0 >> 14U) ^ ((e0 << 46U | e0 >> 18U) ^ (e0 << 23U | e0 >> 41U))) + ((e0 & f0) ^ (~e0 & g0)) + k_e_t + ws_t; uint64_t t2 = - ((a0 << (uint32_t)36U | a0 >> (uint32_t)28U) - ^ - ((a0 << (uint32_t)30U | a0 >> (uint32_t)34U) - ^ (a0 << (uint32_t)25U | a0 >> (uint32_t)39U))) + ((a0 << 36U | a0 >> 28U) ^ ((a0 << 30U | a0 >> 34U) ^ (a0 << 25U | a0 >> 39U))) + ((a0 & b0) ^ ((a0 & c0) ^ (b0 & c0))); uint64_t a1 = t1 + t2; uint64_t b1 = a0; @@ -357,48 +323,42 @@ static inline void sha512_update(uint8_t *b, uint64_t *hash) hash[5U] = f1; hash[6U] = g1; hash[7U] = h12;); - if (i0 < (uint32_t)4U) + if (i0 < 4U) { KRML_MAYBE_FOR16(i, - (uint32_t)0U, - (uint32_t)16U, - (uint32_t)1U, + 0U, + 16U, + 1U, uint64_t t16 = ws[i]; - uint64_t t15 = ws[(i + (uint32_t)1U) % (uint32_t)16U]; - uint64_t t7 = ws[(i + (uint32_t)9U) % (uint32_t)16U]; - uint64_t t2 = ws[(i + (uint32_t)14U) % (uint32_t)16U]; - uint64_t - s1 = - (t2 << (uint32_t)45U | t2 >> (uint32_t)19U) - ^ ((t2 << (uint32_t)3U | t2 >> (uint32_t)61U) ^ t2 >> (uint32_t)6U); - uint64_t - s0 = - (t15 << (uint32_t)63U | t15 >> (uint32_t)1U) - ^ ((t15 << (uint32_t)56U | t15 >> (uint32_t)8U) ^ t15 >> (uint32_t)7U); + uint64_t t15 = ws[(i + 1U) % 16U]; + uint64_t t7 = ws[(i + 9U) % 16U]; + uint64_t t2 = ws[(i + 14U) % 16U]; + uint64_t s1 = (t2 << 45U | t2 >> 19U) ^ ((t2 << 3U | t2 >> 61U) ^ t2 >> 6U); + uint64_t s0 = (t15 << 63U | t15 >> 1U) ^ ((t15 << 56U | t15 >> 8U) ^ t15 >> 7U); ws[i] = s1 + t7 + s0 + t16;); }); KRML_MAYBE_FOR8(i, - (uint32_t)0U, - (uint32_t)8U, - (uint32_t)1U, + 0U, + 8U, + 1U, uint64_t *os = hash; uint64_t x = hash[i] + hash_old[i]; os[i] = x;); } -void Hacl_SHA2_Scalar32_sha512_update_nblocks(uint32_t len, uint8_t *b, uint64_t *st) +void Hacl_Hash_SHA2_sha512_update_nblocks(uint32_t len, uint8_t *b, uint64_t *st) { - uint32_t blocks = len / (uint32_t)128U; - for (uint32_t i = (uint32_t)0U; i < blocks; i++) + uint32_t blocks = len / 128U; + for (uint32_t i = 0U; i < blocks; i++) { uint8_t *b0 = b; - uint8_t *mb = b0 + i * (uint32_t)128U; + uint8_t *mb = b0 + i * 128U; sha512_update(mb, st); } } void -Hacl_SHA2_Scalar32_sha512_update_last( +Hacl_Hash_SHA2_sha512_update_last( FStar_UInt128_uint128 totlen, uint32_t len, uint8_t *b, @@ -406,25 +366,25 @@ Hacl_SHA2_Scalar32_sha512_update_last( ) { uint32_t blocks; - if (len + (uint32_t)16U + (uint32_t)1U <= (uint32_t)128U) + if (len + 16U + 1U <= 128U) { - blocks = (uint32_t)1U; + blocks = 1U; } else { - blocks = (uint32_t)2U; + blocks = 2U; } - uint32_t fin = blocks * (uint32_t)128U; + uint32_t fin = blocks * 128U; uint8_t last[256U] = { 0U }; uint8_t totlen_buf[16U] = { 0U }; - FStar_UInt128_uint128 total_len_bits = FStar_UInt128_shift_left(totlen, (uint32_t)3U); + FStar_UInt128_uint128 total_len_bits = FStar_UInt128_shift_left(totlen, 3U); store128_be(totlen_buf, total_len_bits); uint8_t *b0 = b; memcpy(last, b0, len * sizeof (uint8_t)); - last[len] = (uint8_t)0x80U; - memcpy(last + fin - (uint32_t)16U, totlen_buf, (uint32_t)16U * sizeof (uint8_t)); + last[len] = 0x80U; + memcpy(last + fin - 16U, totlen_buf, 16U * sizeof (uint8_t)); uint8_t *last00 = last; - uint8_t *last10 = last + (uint32_t)128U; + uint8_t *last10 = last + 128U; uint8_t *l0 = last00; uint8_t *l1 = last10; uint8_t *lb0 = l0; @@ -432,76 +392,68 @@ Hacl_SHA2_Scalar32_sha512_update_last( uint8_t *last0 = lb0; uint8_t *last1 = lb1; sha512_update(last0, hash); - if (blocks > (uint32_t)1U) + if (blocks > 1U) { sha512_update(last1, hash); return; } } -void Hacl_SHA2_Scalar32_sha512_finish(uint64_t *st, uint8_t *h) +void Hacl_Hash_SHA2_sha512_finish(uint64_t *st, uint8_t *h) { uint8_t hbuf[64U] = { 0U }; - KRML_MAYBE_FOR8(i, - (uint32_t)0U, - (uint32_t)8U, - (uint32_t)1U, - store64_be(hbuf + i * (uint32_t)8U, st[i]);); - memcpy(h, hbuf, (uint32_t)64U * sizeof (uint8_t)); + KRML_MAYBE_FOR8(i, 0U, 8U, 1U, store64_be(hbuf + i * 8U, st[i]);); + memcpy(h, hbuf, 64U * sizeof (uint8_t)); } -void Hacl_SHA2_Scalar32_sha384_init(uint64_t *hash) +void Hacl_Hash_SHA2_sha384_init(uint64_t *hash) { KRML_MAYBE_FOR8(i, - (uint32_t)0U, - (uint32_t)8U, - (uint32_t)1U, + 0U, + 8U, + 1U, uint64_t *os = hash; - uint64_t x = Hacl_Impl_SHA2_Generic_h384[i]; + uint64_t x = Hacl_Hash_SHA2_h384[i]; os[i] = x;); } -void Hacl_SHA2_Scalar32_sha384_update_nblocks(uint32_t len, uint8_t *b, uint64_t *st) +void Hacl_Hash_SHA2_sha384_update_nblocks(uint32_t len, uint8_t *b, uint64_t *st) { - Hacl_SHA2_Scalar32_sha512_update_nblocks(len, b, st); + Hacl_Hash_SHA2_sha512_update_nblocks(len, b, st); } void -Hacl_SHA2_Scalar32_sha384_update_last( +Hacl_Hash_SHA2_sha384_update_last( FStar_UInt128_uint128 totlen, uint32_t len, uint8_t *b, uint64_t *st ) { - Hacl_SHA2_Scalar32_sha512_update_last(totlen, len, b, st); + Hacl_Hash_SHA2_sha512_update_last(totlen, len, b, st); } -void Hacl_SHA2_Scalar32_sha384_finish(uint64_t *st, uint8_t *h) +void Hacl_Hash_SHA2_sha384_finish(uint64_t *st, uint8_t *h) { uint8_t hbuf[64U] = { 0U }; - KRML_MAYBE_FOR8(i, - (uint32_t)0U, - (uint32_t)8U, - (uint32_t)1U, - store64_be(hbuf + i * (uint32_t)8U, st[i]);); - memcpy(h, hbuf, (uint32_t)48U * sizeof (uint8_t)); + KRML_MAYBE_FOR8(i, 0U, 8U, 1U, store64_be(hbuf + i * 8U, st[i]);); + memcpy(h, hbuf, 48U * sizeof (uint8_t)); } /** Allocate initial state for the SHA2_256 hash. The state is to be freed by calling `free_256`. */ -Hacl_Streaming_MD_state_32 *Hacl_Streaming_SHA2_create_in_256(void) +Hacl_Streaming_MD_state_32 *Hacl_Hash_SHA2_malloc_256(void) { - uint8_t *buf = (uint8_t *)KRML_HOST_CALLOC((uint32_t)64U, sizeof (uint8_t)); - uint32_t *block_state = (uint32_t *)KRML_HOST_CALLOC((uint32_t)8U, sizeof (uint32_t)); + uint8_t *buf = (uint8_t *)KRML_HOST_CALLOC(64U, sizeof (uint8_t)); + uint32_t *block_state = (uint32_t *)KRML_HOST_CALLOC(8U, sizeof (uint32_t)); Hacl_Streaming_MD_state_32 - s = { .block_state = block_state, .buf = buf, .total_len = (uint64_t)(uint32_t)0U }; + s = { .block_state = block_state, .buf = buf, .total_len = (uint64_t)0U }; Hacl_Streaming_MD_state_32 *p = (Hacl_Streaming_MD_state_32 *)KRML_HOST_MALLOC(sizeof (Hacl_Streaming_MD_state_32)); p[0U] = s; - Hacl_SHA2_Scalar32_sha256_init(block_state); + Hacl_Hash_SHA2_sha256_init(block_state); return p; } @@ -511,16 +463,16 @@ The state is to be freed by calling `free_256`. Cloning the state this way is useful, for instance, if your control-flow diverges and you need to feed more (different) data into the hash in each branch. */ -Hacl_Streaming_MD_state_32 *Hacl_Streaming_SHA2_copy_256(Hacl_Streaming_MD_state_32 *s0) +Hacl_Streaming_MD_state_32 *Hacl_Hash_SHA2_copy_256(Hacl_Streaming_MD_state_32 *state) { - Hacl_Streaming_MD_state_32 scrut = *s0; + Hacl_Streaming_MD_state_32 scrut = *state; uint32_t *block_state0 = scrut.block_state; uint8_t *buf0 = scrut.buf; uint64_t total_len0 = scrut.total_len; - uint8_t *buf = (uint8_t *)KRML_HOST_CALLOC((uint32_t)64U, sizeof (uint8_t)); - memcpy(buf, buf0, (uint32_t)64U * sizeof (uint8_t)); - uint32_t *block_state = (uint32_t *)KRML_HOST_CALLOC((uint32_t)8U, sizeof (uint32_t)); - memcpy(block_state, block_state0, (uint32_t)8U * sizeof (uint32_t)); + uint8_t *buf = (uint8_t *)KRML_HOST_CALLOC(64U, sizeof (uint8_t)); + memcpy(buf, buf0, 64U * sizeof (uint8_t)); + uint32_t *block_state = (uint32_t *)KRML_HOST_CALLOC(8U, sizeof (uint32_t)); + memcpy(block_state, block_state0, 8U * sizeof (uint32_t)); Hacl_Streaming_MD_state_32 s = { .block_state = block_state, .buf = buf, .total_len = total_len0 }; Hacl_Streaming_MD_state_32 @@ -532,54 +484,54 @@ Hacl_Streaming_MD_state_32 *Hacl_Streaming_SHA2_copy_256(Hacl_Streaming_MD_state /** Reset an existing state to the initial hash state with empty data. */ -void Hacl_Streaming_SHA2_init_256(Hacl_Streaming_MD_state_32 *s) +void Hacl_Hash_SHA2_reset_256(Hacl_Streaming_MD_state_32 *state) { - Hacl_Streaming_MD_state_32 scrut = *s; + Hacl_Streaming_MD_state_32 scrut = *state; uint8_t *buf = scrut.buf; uint32_t *block_state = scrut.block_state; - Hacl_SHA2_Scalar32_sha256_init(block_state); + Hacl_Hash_SHA2_sha256_init(block_state); Hacl_Streaming_MD_state_32 - tmp = { .block_state = block_state, .buf = buf, .total_len = (uint64_t)(uint32_t)0U }; - s[0U] = tmp; + tmp = { .block_state = block_state, .buf = buf, .total_len = (uint64_t)0U }; + state[0U] = tmp; } static inline Hacl_Streaming_Types_error_code -update_224_256(Hacl_Streaming_MD_state_32 *p, uint8_t *data, uint32_t len) +update_224_256(Hacl_Streaming_MD_state_32 *state, uint8_t *chunk, uint32_t chunk_len) { - Hacl_Streaming_MD_state_32 s = *p; + Hacl_Streaming_MD_state_32 s = *state; uint64_t total_len = s.total_len; - if ((uint64_t)len > (uint64_t)2305843009213693951U - total_len) + if ((uint64_t)chunk_len > 2305843009213693951ULL - total_len) { return Hacl_Streaming_Types_MaximumLengthExceeded; } uint32_t sz; - if (total_len % (uint64_t)(uint32_t)64U == (uint64_t)0U && total_len > (uint64_t)0U) + if (total_len % (uint64_t)64U == 0ULL && total_len > 0ULL) { - sz = (uint32_t)64U; + sz = 64U; } else { - sz = (uint32_t)(total_len % (uint64_t)(uint32_t)64U); + sz = (uint32_t)(total_len % (uint64_t)64U); } - if (len <= (uint32_t)64U - sz) + if (chunk_len <= 64U - sz) { - Hacl_Streaming_MD_state_32 s1 = *p; + Hacl_Streaming_MD_state_32 s1 = *state; uint32_t *block_state1 = s1.block_state; uint8_t *buf = s1.buf; uint64_t total_len1 = s1.total_len; uint32_t sz1; - if (total_len1 % (uint64_t)(uint32_t)64U == (uint64_t)0U && total_len1 > (uint64_t)0U) + if (total_len1 % (uint64_t)64U == 0ULL && total_len1 > 0ULL) { - sz1 = (uint32_t)64U; + sz1 = 64U; } else { - sz1 = (uint32_t)(total_len1 % (uint64_t)(uint32_t)64U); + sz1 = (uint32_t)(total_len1 % (uint64_t)64U); } uint8_t *buf2 = buf + sz1; - memcpy(buf2, data, len * sizeof (uint8_t)); - uint64_t total_len2 = total_len1 + (uint64_t)len; - *p + memcpy(buf2, chunk, chunk_len * sizeof (uint8_t)); + uint64_t total_len2 = total_len1 + (uint64_t)chunk_len; + *state = ( (Hacl_Streaming_MD_state_32){ @@ -589,76 +541,74 @@ update_224_256(Hacl_Streaming_MD_state_32 *p, uint8_t *data, uint32_t len) } ); } - else if (sz == (uint32_t)0U) + else if (sz == 0U) { - Hacl_Streaming_MD_state_32 s1 = *p; + Hacl_Streaming_MD_state_32 s1 = *state; uint32_t *block_state1 = s1.block_state; uint8_t *buf = s1.buf; uint64_t total_len1 = s1.total_len; uint32_t sz1; - if (total_len1 % (uint64_t)(uint32_t)64U == (uint64_t)0U && total_len1 > (uint64_t)0U) + if (total_len1 % (uint64_t)64U == 0ULL && total_len1 > 0ULL) { - sz1 = (uint32_t)64U; + sz1 = 64U; } else { - sz1 = (uint32_t)(total_len1 % (uint64_t)(uint32_t)64U); + sz1 = (uint32_t)(total_len1 % (uint64_t)64U); } - if (!(sz1 == (uint32_t)0U)) + if (!(sz1 == 0U)) { - Hacl_SHA2_Scalar32_sha256_update_nblocks((uint32_t)64U, buf, block_state1); + Hacl_Hash_SHA2_sha256_update_nblocks(64U, buf, block_state1); } uint32_t ite; - if ((uint64_t)len % (uint64_t)(uint32_t)64U == (uint64_t)0U && (uint64_t)len > (uint64_t)0U) + if ((uint64_t)chunk_len % (uint64_t)64U == 0ULL && (uint64_t)chunk_len > 0ULL) { - ite = (uint32_t)64U; + ite = 64U; } else { - ite = (uint32_t)((uint64_t)len % (uint64_t)(uint32_t)64U); + ite = (uint32_t)((uint64_t)chunk_len % (uint64_t)64U); } - uint32_t n_blocks = (len - ite) / (uint32_t)64U; - uint32_t data1_len = n_blocks * (uint32_t)64U; - uint32_t data2_len = len - data1_len; - uint8_t *data1 = data; - uint8_t *data2 = data + data1_len; - Hacl_SHA2_Scalar32_sha256_update_nblocks(data1_len / (uint32_t)64U * (uint32_t)64U, - data1, - block_state1); + uint32_t n_blocks = (chunk_len - ite) / 64U; + uint32_t data1_len = n_blocks * 64U; + uint32_t data2_len = chunk_len - data1_len; + uint8_t *data1 = chunk; + uint8_t *data2 = chunk + data1_len; + Hacl_Hash_SHA2_sha256_update_nblocks(data1_len / 64U * 64U, data1, block_state1); uint8_t *dst = buf; memcpy(dst, data2, data2_len * sizeof (uint8_t)); - *p + *state = ( (Hacl_Streaming_MD_state_32){ .block_state = block_state1, .buf = buf, - .total_len = total_len1 + (uint64_t)len + .total_len = total_len1 + (uint64_t)chunk_len } ); } else { - uint32_t diff = (uint32_t)64U - sz; - uint8_t *data1 = data; - uint8_t *data2 = data + diff; - Hacl_Streaming_MD_state_32 s1 = *p; + uint32_t diff = 64U - sz; + uint8_t *chunk1 = chunk; + uint8_t *chunk2 = chunk + diff; + Hacl_Streaming_MD_state_32 s1 = *state; uint32_t *block_state10 = s1.block_state; uint8_t *buf0 = s1.buf; uint64_t total_len10 = s1.total_len; uint32_t sz10; - if (total_len10 % (uint64_t)(uint32_t)64U == (uint64_t)0U && total_len10 > (uint64_t)0U) + if (total_len10 % (uint64_t)64U == 0ULL && total_len10 > 0ULL) { - sz10 = (uint32_t)64U; + sz10 = 64U; } else { - sz10 = (uint32_t)(total_len10 % (uint64_t)(uint32_t)64U); + sz10 = (uint32_t)(total_len10 % (uint64_t)64U); } uint8_t *buf2 = buf0 + sz10; - memcpy(buf2, data1, diff * sizeof (uint8_t)); + memcpy(buf2, chunk1, diff * sizeof (uint8_t)); uint64_t total_len2 = total_len10 + (uint64_t)diff; - *p + *state = ( (Hacl_Streaming_MD_state_32){ @@ -667,55 +617,48 @@ update_224_256(Hacl_Streaming_MD_state_32 *p, uint8_t *data, uint32_t len) .total_len = total_len2 } ); - Hacl_Streaming_MD_state_32 s10 = *p; + Hacl_Streaming_MD_state_32 s10 = *state; uint32_t *block_state1 = s10.block_state; uint8_t *buf = s10.buf; uint64_t total_len1 = s10.total_len; uint32_t sz1; - if (total_len1 % (uint64_t)(uint32_t)64U == (uint64_t)0U && total_len1 > (uint64_t)0U) + if (total_len1 % (uint64_t)64U == 0ULL && total_len1 > 0ULL) { - sz1 = (uint32_t)64U; + sz1 = 64U; } else { - sz1 = (uint32_t)(total_len1 % (uint64_t)(uint32_t)64U); + sz1 = (uint32_t)(total_len1 % (uint64_t)64U); } - if (!(sz1 == (uint32_t)0U)) + if (!(sz1 == 0U)) { - Hacl_SHA2_Scalar32_sha256_update_nblocks((uint32_t)64U, buf, block_state1); + Hacl_Hash_SHA2_sha256_update_nblocks(64U, buf, block_state1); } uint32_t ite; if - ( - (uint64_t)(len - diff) - % (uint64_t)(uint32_t)64U - == (uint64_t)0U - && (uint64_t)(len - diff) > (uint64_t)0U - ) + ((uint64_t)(chunk_len - diff) % (uint64_t)64U == 0ULL && (uint64_t)(chunk_len - diff) > 0ULL) { - ite = (uint32_t)64U; + ite = 64U; } else { - ite = (uint32_t)((uint64_t)(len - diff) % (uint64_t)(uint32_t)64U); + ite = (uint32_t)((uint64_t)(chunk_len - diff) % (uint64_t)64U); } - uint32_t n_blocks = (len - diff - ite) / (uint32_t)64U; - uint32_t data1_len = n_blocks * (uint32_t)64U; - uint32_t data2_len = len - diff - data1_len; - uint8_t *data11 = data2; - uint8_t *data21 = data2 + data1_len; - Hacl_SHA2_Scalar32_sha256_update_nblocks(data1_len / (uint32_t)64U * (uint32_t)64U, - data11, - block_state1); + uint32_t n_blocks = (chunk_len - diff - ite) / 64U; + uint32_t data1_len = n_blocks * 64U; + uint32_t data2_len = chunk_len - diff - data1_len; + uint8_t *data1 = chunk2; + uint8_t *data2 = chunk2 + data1_len; + Hacl_Hash_SHA2_sha256_update_nblocks(data1_len / 64U * 64U, data1, block_state1); uint8_t *dst = buf; - memcpy(dst, data21, data2_len * sizeof (uint8_t)); - *p + memcpy(dst, data2, data2_len * sizeof (uint8_t)); + *state = ( (Hacl_Streaming_MD_state_32){ .block_state = block_state1, .buf = buf, - .total_len = total_len1 + (uint64_t)(len - diff) + .total_len = total_len1 + (uint64_t)(chunk_len - diff) } ); } @@ -725,209 +668,203 @@ update_224_256(Hacl_Streaming_MD_state_32 *p, uint8_t *data, uint32_t len) /** Feed an arbitrary amount of data into the hash. This function returns 0 for success, or 1 if the combined length of all of the data passed to `update_256` -(since the last call to `init_256`) exceeds 2^61-1 bytes. +(since the last call to `reset_256`) exceeds 2^61-1 bytes. This function is identical to the update function for SHA2_224. */ Hacl_Streaming_Types_error_code -Hacl_Streaming_SHA2_update_256( - Hacl_Streaming_MD_state_32 *p, +Hacl_Hash_SHA2_update_256( + Hacl_Streaming_MD_state_32 *state, uint8_t *input, uint32_t input_len ) { - return update_224_256(p, input, input_len); + return update_224_256(state, input, input_len); } /** -Write the resulting hash into `dst`, an array of 32 bytes. The state remains -valid after a call to `finish_256`, meaning the user may feed more data into -the hash via `update_256`. (The finish_256 function operates on an internal copy of +Write the resulting hash into `output`, an array of 32 bytes. The state remains +valid after a call to `digest_256`, meaning the user may feed more data into +the hash via `update_256`. (The digest_256 function operates on an internal copy of the state and therefore does not invalidate the client-held state `p`.) */ -void Hacl_Streaming_SHA2_finish_256(Hacl_Streaming_MD_state_32 *p, uint8_t *dst) +void Hacl_Hash_SHA2_digest_256(Hacl_Streaming_MD_state_32 *state, uint8_t *output) { - Hacl_Streaming_MD_state_32 scrut = *p; + Hacl_Streaming_MD_state_32 scrut = *state; uint32_t *block_state = scrut.block_state; uint8_t *buf_ = scrut.buf; uint64_t total_len = scrut.total_len; uint32_t r; - if (total_len % (uint64_t)(uint32_t)64U == (uint64_t)0U && total_len > (uint64_t)0U) + if (total_len % (uint64_t)64U == 0ULL && total_len > 0ULL) { - r = (uint32_t)64U; + r = 64U; } else { - r = (uint32_t)(total_len % (uint64_t)(uint32_t)64U); + r = (uint32_t)(total_len % (uint64_t)64U); } uint8_t *buf_1 = buf_; uint32_t tmp_block_state[8U] = { 0U }; - memcpy(tmp_block_state, block_state, (uint32_t)8U * sizeof (uint32_t)); + memcpy(tmp_block_state, block_state, 8U * sizeof (uint32_t)); uint32_t ite; - if (r % (uint32_t)64U == (uint32_t)0U && r > (uint32_t)0U) + if (r % 64U == 0U && r > 0U) { - ite = (uint32_t)64U; + ite = 64U; } else { - ite = r % (uint32_t)64U; + ite = r % 64U; } uint8_t *buf_last = buf_1 + r - ite; uint8_t *buf_multi = buf_1; - Hacl_SHA2_Scalar32_sha256_update_nblocks((uint32_t)0U, buf_multi, tmp_block_state); + Hacl_Hash_SHA2_sha256_update_nblocks(0U, buf_multi, tmp_block_state); uint64_t prev_len_last = total_len - (uint64_t)r; - Hacl_SHA2_Scalar32_sha256_update_last(prev_len_last + (uint64_t)r, - r, - buf_last, - tmp_block_state); - Hacl_SHA2_Scalar32_sha256_finish(tmp_block_state, dst); + Hacl_Hash_SHA2_sha256_update_last(prev_len_last + (uint64_t)r, r, buf_last, tmp_block_state); + Hacl_Hash_SHA2_sha256_finish(tmp_block_state, output); } /** -Free a state allocated with `create_in_256`. +Free a state allocated with `malloc_256`. This function is identical to the free function for SHA2_224. */ -void Hacl_Streaming_SHA2_free_256(Hacl_Streaming_MD_state_32 *s) +void Hacl_Hash_SHA2_free_256(Hacl_Streaming_MD_state_32 *state) { - Hacl_Streaming_MD_state_32 scrut = *s; + Hacl_Streaming_MD_state_32 scrut = *state; uint8_t *buf = scrut.buf; uint32_t *block_state = scrut.block_state; KRML_HOST_FREE(block_state); KRML_HOST_FREE(buf); - KRML_HOST_FREE(s); + KRML_HOST_FREE(state); } /** -Hash `input`, of len `input_len`, into `dst`, an array of 32 bytes. +Hash `input`, of len `input_len`, into `output`, an array of 32 bytes. */ -void Hacl_Streaming_SHA2_hash_256(uint8_t *input, uint32_t input_len, uint8_t *dst) +void Hacl_Hash_SHA2_hash_256(uint8_t *output, uint8_t *input, uint32_t input_len) { uint8_t *ib = input; - uint8_t *rb = dst; + uint8_t *rb = output; uint32_t st[8U] = { 0U }; - Hacl_SHA2_Scalar32_sha256_init(st); - uint32_t rem = input_len % (uint32_t)64U; + Hacl_Hash_SHA2_sha256_init(st); + uint32_t rem = input_len % 64U; uint64_t len_ = (uint64_t)input_len; - Hacl_SHA2_Scalar32_sha256_update_nblocks(input_len, ib, st); - uint32_t rem1 = input_len % (uint32_t)64U; + Hacl_Hash_SHA2_sha256_update_nblocks(input_len, ib, st); + uint32_t rem1 = input_len % 64U; uint8_t *b0 = ib; uint8_t *lb = b0 + input_len - rem1; - Hacl_SHA2_Scalar32_sha256_update_last(len_, rem, lb, st); - Hacl_SHA2_Scalar32_sha256_finish(st, rb); + Hacl_Hash_SHA2_sha256_update_last(len_, rem, lb, st); + Hacl_Hash_SHA2_sha256_finish(st, rb); } -Hacl_Streaming_MD_state_32 *Hacl_Streaming_SHA2_create_in_224(void) +Hacl_Streaming_MD_state_32 *Hacl_Hash_SHA2_malloc_224(void) { - uint8_t *buf = (uint8_t *)KRML_HOST_CALLOC((uint32_t)64U, sizeof (uint8_t)); - uint32_t *block_state = (uint32_t *)KRML_HOST_CALLOC((uint32_t)8U, sizeof (uint32_t)); + uint8_t *buf = (uint8_t *)KRML_HOST_CALLOC(64U, sizeof (uint8_t)); + uint32_t *block_state = (uint32_t *)KRML_HOST_CALLOC(8U, sizeof (uint32_t)); Hacl_Streaming_MD_state_32 - s = { .block_state = block_state, .buf = buf, .total_len = (uint64_t)(uint32_t)0U }; + s = { .block_state = block_state, .buf = buf, .total_len = (uint64_t)0U }; Hacl_Streaming_MD_state_32 *p = (Hacl_Streaming_MD_state_32 *)KRML_HOST_MALLOC(sizeof (Hacl_Streaming_MD_state_32)); p[0U] = s; - Hacl_SHA2_Scalar32_sha224_init(block_state); + Hacl_Hash_SHA2_sha224_init(block_state); return p; } -void Hacl_Streaming_SHA2_init_224(Hacl_Streaming_MD_state_32 *s) +void Hacl_Hash_SHA2_reset_224(Hacl_Streaming_MD_state_32 *state) { - Hacl_Streaming_MD_state_32 scrut = *s; + Hacl_Streaming_MD_state_32 scrut = *state; uint8_t *buf = scrut.buf; uint32_t *block_state = scrut.block_state; - Hacl_SHA2_Scalar32_sha224_init(block_state); + Hacl_Hash_SHA2_sha224_init(block_state); Hacl_Streaming_MD_state_32 - tmp = { .block_state = block_state, .buf = buf, .total_len = (uint64_t)(uint32_t)0U }; - s[0U] = tmp; + tmp = { .block_state = block_state, .buf = buf, .total_len = (uint64_t)0U }; + state[0U] = tmp; } Hacl_Streaming_Types_error_code -Hacl_Streaming_SHA2_update_224( - Hacl_Streaming_MD_state_32 *p, +Hacl_Hash_SHA2_update_224( + Hacl_Streaming_MD_state_32 *state, uint8_t *input, uint32_t input_len ) { - return update_224_256(p, input, input_len); + return update_224_256(state, input, input_len); } /** -Write the resulting hash into `dst`, an array of 28 bytes. The state remains -valid after a call to `finish_224`, meaning the user may feed more data into +Write the resulting hash into `output`, an array of 28 bytes. The state remains +valid after a call to `digest_224`, meaning the user may feed more data into the hash via `update_224`. */ -void Hacl_Streaming_SHA2_finish_224(Hacl_Streaming_MD_state_32 *p, uint8_t *dst) +void Hacl_Hash_SHA2_digest_224(Hacl_Streaming_MD_state_32 *state, uint8_t *output) { - Hacl_Streaming_MD_state_32 scrut = *p; + Hacl_Streaming_MD_state_32 scrut = *state; uint32_t *block_state = scrut.block_state; uint8_t *buf_ = scrut.buf; uint64_t total_len = scrut.total_len; uint32_t r; - if (total_len % (uint64_t)(uint32_t)64U == (uint64_t)0U && total_len > (uint64_t)0U) + if (total_len % (uint64_t)64U == 0ULL && total_len > 0ULL) { - r = (uint32_t)64U; + r = 64U; } else { - r = (uint32_t)(total_len % (uint64_t)(uint32_t)64U); + r = (uint32_t)(total_len % (uint64_t)64U); } uint8_t *buf_1 = buf_; uint32_t tmp_block_state[8U] = { 0U }; - memcpy(tmp_block_state, block_state, (uint32_t)8U * sizeof (uint32_t)); + memcpy(tmp_block_state, block_state, 8U * sizeof (uint32_t)); uint32_t ite; - if (r % (uint32_t)64U == (uint32_t)0U && r > (uint32_t)0U) + if (r % 64U == 0U && r > 0U) { - ite = (uint32_t)64U; + ite = 64U; } else { - ite = r % (uint32_t)64U; + ite = r % 64U; } uint8_t *buf_last = buf_1 + r - ite; uint8_t *buf_multi = buf_1; - sha224_update_nblocks((uint32_t)0U, buf_multi, tmp_block_state); + sha224_update_nblocks(0U, buf_multi, tmp_block_state); uint64_t prev_len_last = total_len - (uint64_t)r; - Hacl_SHA2_Scalar32_sha224_update_last(prev_len_last + (uint64_t)r, - r, - buf_last, - tmp_block_state); - Hacl_SHA2_Scalar32_sha224_finish(tmp_block_state, dst); + Hacl_Hash_SHA2_sha224_update_last(prev_len_last + (uint64_t)r, r, buf_last, tmp_block_state); + Hacl_Hash_SHA2_sha224_finish(tmp_block_state, output); } -void Hacl_Streaming_SHA2_free_224(Hacl_Streaming_MD_state_32 *p) +void Hacl_Hash_SHA2_free_224(Hacl_Streaming_MD_state_32 *state) { - Hacl_Streaming_SHA2_free_256(p); + Hacl_Hash_SHA2_free_256(state); } /** -Hash `input`, of len `input_len`, into `dst`, an array of 28 bytes. +Hash `input`, of len `input_len`, into `output`, an array of 28 bytes. */ -void Hacl_Streaming_SHA2_hash_224(uint8_t *input, uint32_t input_len, uint8_t *dst) +void Hacl_Hash_SHA2_hash_224(uint8_t *output, uint8_t *input, uint32_t input_len) { uint8_t *ib = input; - uint8_t *rb = dst; + uint8_t *rb = output; uint32_t st[8U] = { 0U }; - Hacl_SHA2_Scalar32_sha224_init(st); - uint32_t rem = input_len % (uint32_t)64U; + Hacl_Hash_SHA2_sha224_init(st); + uint32_t rem = input_len % 64U; uint64_t len_ = (uint64_t)input_len; sha224_update_nblocks(input_len, ib, st); - uint32_t rem1 = input_len % (uint32_t)64U; + uint32_t rem1 = input_len % 64U; uint8_t *b0 = ib; uint8_t *lb = b0 + input_len - rem1; - Hacl_SHA2_Scalar32_sha224_update_last(len_, rem, lb, st); - Hacl_SHA2_Scalar32_sha224_finish(st, rb); + Hacl_Hash_SHA2_sha224_update_last(len_, rem, lb, st); + Hacl_Hash_SHA2_sha224_finish(st, rb); } -Hacl_Streaming_MD_state_64 *Hacl_Streaming_SHA2_create_in_512(void) +Hacl_Streaming_MD_state_64 *Hacl_Hash_SHA2_malloc_512(void) { - uint8_t *buf = (uint8_t *)KRML_HOST_CALLOC((uint32_t)128U, sizeof (uint8_t)); - uint64_t *block_state = (uint64_t *)KRML_HOST_CALLOC((uint32_t)8U, sizeof (uint64_t)); + uint8_t *buf = (uint8_t *)KRML_HOST_CALLOC(128U, sizeof (uint8_t)); + uint64_t *block_state = (uint64_t *)KRML_HOST_CALLOC(8U, sizeof (uint64_t)); Hacl_Streaming_MD_state_64 - s = { .block_state = block_state, .buf = buf, .total_len = (uint64_t)(uint32_t)0U }; + s = { .block_state = block_state, .buf = buf, .total_len = (uint64_t)0U }; Hacl_Streaming_MD_state_64 *p = (Hacl_Streaming_MD_state_64 *)KRML_HOST_MALLOC(sizeof (Hacl_Streaming_MD_state_64)); p[0U] = s; - Hacl_SHA2_Scalar32_sha512_init(block_state); + Hacl_Hash_SHA2_sha512_init(block_state); return p; } @@ -937,16 +874,16 @@ The state is to be freed by calling `free_512`. Cloning the state this way is useful, for instance, if your control-flow diverges and you need to feed more (different) data into the hash in each branch. */ -Hacl_Streaming_MD_state_64 *Hacl_Streaming_SHA2_copy_512(Hacl_Streaming_MD_state_64 *s0) +Hacl_Streaming_MD_state_64 *Hacl_Hash_SHA2_copy_512(Hacl_Streaming_MD_state_64 *state) { - Hacl_Streaming_MD_state_64 scrut = *s0; + Hacl_Streaming_MD_state_64 scrut = *state; uint64_t *block_state0 = scrut.block_state; uint8_t *buf0 = scrut.buf; uint64_t total_len0 = scrut.total_len; - uint8_t *buf = (uint8_t *)KRML_HOST_CALLOC((uint32_t)128U, sizeof (uint8_t)); - memcpy(buf, buf0, (uint32_t)128U * sizeof (uint8_t)); - uint64_t *block_state = (uint64_t *)KRML_HOST_CALLOC((uint32_t)8U, sizeof (uint64_t)); - memcpy(block_state, block_state0, (uint32_t)8U * sizeof (uint64_t)); + uint8_t *buf = (uint8_t *)KRML_HOST_CALLOC(128U, sizeof (uint8_t)); + memcpy(buf, buf0, 128U * sizeof (uint8_t)); + uint64_t *block_state = (uint64_t *)KRML_HOST_CALLOC(8U, sizeof (uint64_t)); + memcpy(block_state, block_state0, 8U * sizeof (uint64_t)); Hacl_Streaming_MD_state_64 s = { .block_state = block_state, .buf = buf, .total_len = total_len0 }; Hacl_Streaming_MD_state_64 @@ -955,54 +892,54 @@ Hacl_Streaming_MD_state_64 *Hacl_Streaming_SHA2_copy_512(Hacl_Streaming_MD_state return p; } -void Hacl_Streaming_SHA2_init_512(Hacl_Streaming_MD_state_64 *s) +void Hacl_Hash_SHA2_reset_512(Hacl_Streaming_MD_state_64 *state) { - Hacl_Streaming_MD_state_64 scrut = *s; + Hacl_Streaming_MD_state_64 scrut = *state; uint8_t *buf = scrut.buf; uint64_t *block_state = scrut.block_state; - Hacl_SHA2_Scalar32_sha512_init(block_state); + Hacl_Hash_SHA2_sha512_init(block_state); Hacl_Streaming_MD_state_64 - tmp = { .block_state = block_state, .buf = buf, .total_len = (uint64_t)(uint32_t)0U }; - s[0U] = tmp; + tmp = { .block_state = block_state, .buf = buf, .total_len = (uint64_t)0U }; + state[0U] = tmp; } static inline Hacl_Streaming_Types_error_code -update_384_512(Hacl_Streaming_MD_state_64 *p, uint8_t *data, uint32_t len) +update_384_512(Hacl_Streaming_MD_state_64 *state, uint8_t *chunk, uint32_t chunk_len) { - Hacl_Streaming_MD_state_64 s = *p; + Hacl_Streaming_MD_state_64 s = *state; uint64_t total_len = s.total_len; - if ((uint64_t)len > (uint64_t)18446744073709551615U - total_len) + if ((uint64_t)chunk_len > 18446744073709551615ULL - total_len) { return Hacl_Streaming_Types_MaximumLengthExceeded; } uint32_t sz; - if (total_len % (uint64_t)(uint32_t)128U == (uint64_t)0U && total_len > (uint64_t)0U) + if (total_len % (uint64_t)128U == 0ULL && total_len > 0ULL) { - sz = (uint32_t)128U; + sz = 128U; } else { - sz = (uint32_t)(total_len % (uint64_t)(uint32_t)128U); + sz = (uint32_t)(total_len % (uint64_t)128U); } - if (len <= (uint32_t)128U - sz) + if (chunk_len <= 128U - sz) { - Hacl_Streaming_MD_state_64 s1 = *p; + Hacl_Streaming_MD_state_64 s1 = *state; uint64_t *block_state1 = s1.block_state; uint8_t *buf = s1.buf; uint64_t total_len1 = s1.total_len; uint32_t sz1; - if (total_len1 % (uint64_t)(uint32_t)128U == (uint64_t)0U && total_len1 > (uint64_t)0U) + if (total_len1 % (uint64_t)128U == 0ULL && total_len1 > 0ULL) { - sz1 = (uint32_t)128U; + sz1 = 128U; } else { - sz1 = (uint32_t)(total_len1 % (uint64_t)(uint32_t)128U); + sz1 = (uint32_t)(total_len1 % (uint64_t)128U); } uint8_t *buf2 = buf + sz1; - memcpy(buf2, data, len * sizeof (uint8_t)); - uint64_t total_len2 = total_len1 + (uint64_t)len; - *p + memcpy(buf2, chunk, chunk_len * sizeof (uint8_t)); + uint64_t total_len2 = total_len1 + (uint64_t)chunk_len; + *state = ( (Hacl_Streaming_MD_state_64){ @@ -1012,76 +949,74 @@ update_384_512(Hacl_Streaming_MD_state_64 *p, uint8_t *data, uint32_t len) } ); } - else if (sz == (uint32_t)0U) + else if (sz == 0U) { - Hacl_Streaming_MD_state_64 s1 = *p; + Hacl_Streaming_MD_state_64 s1 = *state; uint64_t *block_state1 = s1.block_state; uint8_t *buf = s1.buf; uint64_t total_len1 = s1.total_len; uint32_t sz1; - if (total_len1 % (uint64_t)(uint32_t)128U == (uint64_t)0U && total_len1 > (uint64_t)0U) + if (total_len1 % (uint64_t)128U == 0ULL && total_len1 > 0ULL) { - sz1 = (uint32_t)128U; + sz1 = 128U; } else { - sz1 = (uint32_t)(total_len1 % (uint64_t)(uint32_t)128U); + sz1 = (uint32_t)(total_len1 % (uint64_t)128U); } - if (!(sz1 == (uint32_t)0U)) + if (!(sz1 == 0U)) { - Hacl_SHA2_Scalar32_sha512_update_nblocks((uint32_t)128U, buf, block_state1); + Hacl_Hash_SHA2_sha512_update_nblocks(128U, buf, block_state1); } uint32_t ite; - if ((uint64_t)len % (uint64_t)(uint32_t)128U == (uint64_t)0U && (uint64_t)len > (uint64_t)0U) + if ((uint64_t)chunk_len % (uint64_t)128U == 0ULL && (uint64_t)chunk_len > 0ULL) { - ite = (uint32_t)128U; + ite = 128U; } else { - ite = (uint32_t)((uint64_t)len % (uint64_t)(uint32_t)128U); + ite = (uint32_t)((uint64_t)chunk_len % (uint64_t)128U); } - uint32_t n_blocks = (len - ite) / (uint32_t)128U; - uint32_t data1_len = n_blocks * (uint32_t)128U; - uint32_t data2_len = len - data1_len; - uint8_t *data1 = data; - uint8_t *data2 = data + data1_len; - Hacl_SHA2_Scalar32_sha512_update_nblocks(data1_len / (uint32_t)128U * (uint32_t)128U, - data1, - block_state1); + uint32_t n_blocks = (chunk_len - ite) / 128U; + uint32_t data1_len = n_blocks * 128U; + uint32_t data2_len = chunk_len - data1_len; + uint8_t *data1 = chunk; + uint8_t *data2 = chunk + data1_len; + Hacl_Hash_SHA2_sha512_update_nblocks(data1_len / 128U * 128U, data1, block_state1); uint8_t *dst = buf; memcpy(dst, data2, data2_len * sizeof (uint8_t)); - *p + *state = ( (Hacl_Streaming_MD_state_64){ .block_state = block_state1, .buf = buf, - .total_len = total_len1 + (uint64_t)len + .total_len = total_len1 + (uint64_t)chunk_len } ); } else { - uint32_t diff = (uint32_t)128U - sz; - uint8_t *data1 = data; - uint8_t *data2 = data + diff; - Hacl_Streaming_MD_state_64 s1 = *p; + uint32_t diff = 128U - sz; + uint8_t *chunk1 = chunk; + uint8_t *chunk2 = chunk + diff; + Hacl_Streaming_MD_state_64 s1 = *state; uint64_t *block_state10 = s1.block_state; uint8_t *buf0 = s1.buf; uint64_t total_len10 = s1.total_len; uint32_t sz10; - if (total_len10 % (uint64_t)(uint32_t)128U == (uint64_t)0U && total_len10 > (uint64_t)0U) + if (total_len10 % (uint64_t)128U == 0ULL && total_len10 > 0ULL) { - sz10 = (uint32_t)128U; + sz10 = 128U; } else { - sz10 = (uint32_t)(total_len10 % (uint64_t)(uint32_t)128U); + sz10 = (uint32_t)(total_len10 % (uint64_t)128U); } uint8_t *buf2 = buf0 + sz10; - memcpy(buf2, data1, diff * sizeof (uint8_t)); + memcpy(buf2, chunk1, diff * sizeof (uint8_t)); uint64_t total_len2 = total_len10 + (uint64_t)diff; - *p + *state = ( (Hacl_Streaming_MD_state_64){ @@ -1090,55 +1025,48 @@ update_384_512(Hacl_Streaming_MD_state_64 *p, uint8_t *data, uint32_t len) .total_len = total_len2 } ); - Hacl_Streaming_MD_state_64 s10 = *p; + Hacl_Streaming_MD_state_64 s10 = *state; uint64_t *block_state1 = s10.block_state; uint8_t *buf = s10.buf; uint64_t total_len1 = s10.total_len; uint32_t sz1; - if (total_len1 % (uint64_t)(uint32_t)128U == (uint64_t)0U && total_len1 > (uint64_t)0U) + if (total_len1 % (uint64_t)128U == 0ULL && total_len1 > 0ULL) { - sz1 = (uint32_t)128U; + sz1 = 128U; } else { - sz1 = (uint32_t)(total_len1 % (uint64_t)(uint32_t)128U); + sz1 = (uint32_t)(total_len1 % (uint64_t)128U); } - if (!(sz1 == (uint32_t)0U)) + if (!(sz1 == 0U)) { - Hacl_SHA2_Scalar32_sha512_update_nblocks((uint32_t)128U, buf, block_state1); + Hacl_Hash_SHA2_sha512_update_nblocks(128U, buf, block_state1); } uint32_t ite; if - ( - (uint64_t)(len - diff) - % (uint64_t)(uint32_t)128U - == (uint64_t)0U - && (uint64_t)(len - diff) > (uint64_t)0U - ) + ((uint64_t)(chunk_len - diff) % (uint64_t)128U == 0ULL && (uint64_t)(chunk_len - diff) > 0ULL) { - ite = (uint32_t)128U; + ite = 128U; } else { - ite = (uint32_t)((uint64_t)(len - diff) % (uint64_t)(uint32_t)128U); + ite = (uint32_t)((uint64_t)(chunk_len - diff) % (uint64_t)128U); } - uint32_t n_blocks = (len - diff - ite) / (uint32_t)128U; - uint32_t data1_len = n_blocks * (uint32_t)128U; - uint32_t data2_len = len - diff - data1_len; - uint8_t *data11 = data2; - uint8_t *data21 = data2 + data1_len; - Hacl_SHA2_Scalar32_sha512_update_nblocks(data1_len / (uint32_t)128U * (uint32_t)128U, - data11, - block_state1); + uint32_t n_blocks = (chunk_len - diff - ite) / 128U; + uint32_t data1_len = n_blocks * 128U; + uint32_t data2_len = chunk_len - diff - data1_len; + uint8_t *data1 = chunk2; + uint8_t *data2 = chunk2 + data1_len; + Hacl_Hash_SHA2_sha512_update_nblocks(data1_len / 128U * 128U, data1, block_state1); uint8_t *dst = buf; - memcpy(dst, data21, data2_len * sizeof (uint8_t)); - *p + memcpy(dst, data2, data2_len * sizeof (uint8_t)); + *state = ( (Hacl_Streaming_MD_state_64){ .block_state = block_state1, .buf = buf, - .total_len = total_len1 + (uint64_t)(len - diff) + .total_len = total_len1 + (uint64_t)(chunk_len - diff) } ); } @@ -1148,198 +1076,198 @@ update_384_512(Hacl_Streaming_MD_state_64 *p, uint8_t *data, uint32_t len) /** Feed an arbitrary amount of data into the hash. This function returns 0 for success, or 1 if the combined length of all of the data passed to `update_512` -(since the last call to `init_512`) exceeds 2^125-1 bytes. +(since the last call to `reset_512`) exceeds 2^125-1 bytes. This function is identical to the update function for SHA2_384. */ Hacl_Streaming_Types_error_code -Hacl_Streaming_SHA2_update_512( - Hacl_Streaming_MD_state_64 *p, +Hacl_Hash_SHA2_update_512( + Hacl_Streaming_MD_state_64 *state, uint8_t *input, uint32_t input_len ) { - return update_384_512(p, input, input_len); + return update_384_512(state, input, input_len); } /** -Write the resulting hash into `dst`, an array of 64 bytes. The state remains -valid after a call to `finish_512`, meaning the user may feed more data into -the hash via `update_512`. (The finish_512 function operates on an internal copy of +Write the resulting hash into `output`, an array of 64 bytes. The state remains +valid after a call to `digest_512`, meaning the user may feed more data into +the hash via `update_512`. (The digest_512 function operates on an internal copy of the state and therefore does not invalidate the client-held state `p`.) */ -void Hacl_Streaming_SHA2_finish_512(Hacl_Streaming_MD_state_64 *p, uint8_t *dst) +void Hacl_Hash_SHA2_digest_512(Hacl_Streaming_MD_state_64 *state, uint8_t *output) { - Hacl_Streaming_MD_state_64 scrut = *p; + Hacl_Streaming_MD_state_64 scrut = *state; uint64_t *block_state = scrut.block_state; uint8_t *buf_ = scrut.buf; uint64_t total_len = scrut.total_len; uint32_t r; - if (total_len % (uint64_t)(uint32_t)128U == (uint64_t)0U && total_len > (uint64_t)0U) + if (total_len % (uint64_t)128U == 0ULL && total_len > 0ULL) { - r = (uint32_t)128U; + r = 128U; } else { - r = (uint32_t)(total_len % (uint64_t)(uint32_t)128U); + r = (uint32_t)(total_len % (uint64_t)128U); } uint8_t *buf_1 = buf_; uint64_t tmp_block_state[8U] = { 0U }; - memcpy(tmp_block_state, block_state, (uint32_t)8U * sizeof (uint64_t)); + memcpy(tmp_block_state, block_state, 8U * sizeof (uint64_t)); uint32_t ite; - if (r % (uint32_t)128U == (uint32_t)0U && r > (uint32_t)0U) + if (r % 128U == 0U && r > 0U) { - ite = (uint32_t)128U; + ite = 128U; } else { - ite = r % (uint32_t)128U; + ite = r % 128U; } uint8_t *buf_last = buf_1 + r - ite; uint8_t *buf_multi = buf_1; - Hacl_SHA2_Scalar32_sha512_update_nblocks((uint32_t)0U, buf_multi, tmp_block_state); + Hacl_Hash_SHA2_sha512_update_nblocks(0U, buf_multi, tmp_block_state); uint64_t prev_len_last = total_len - (uint64_t)r; - Hacl_SHA2_Scalar32_sha512_update_last(FStar_UInt128_add(FStar_UInt128_uint64_to_uint128(prev_len_last), + Hacl_Hash_SHA2_sha512_update_last(FStar_UInt128_add(FStar_UInt128_uint64_to_uint128(prev_len_last), FStar_UInt128_uint64_to_uint128((uint64_t)r)), r, buf_last, tmp_block_state); - Hacl_SHA2_Scalar32_sha512_finish(tmp_block_state, dst); + Hacl_Hash_SHA2_sha512_finish(tmp_block_state, output); } /** -Free a state allocated with `create_in_512`. +Free a state allocated with `malloc_512`. This function is identical to the free function for SHA2_384. */ -void Hacl_Streaming_SHA2_free_512(Hacl_Streaming_MD_state_64 *s) +void Hacl_Hash_SHA2_free_512(Hacl_Streaming_MD_state_64 *state) { - Hacl_Streaming_MD_state_64 scrut = *s; + Hacl_Streaming_MD_state_64 scrut = *state; uint8_t *buf = scrut.buf; uint64_t *block_state = scrut.block_state; KRML_HOST_FREE(block_state); KRML_HOST_FREE(buf); - KRML_HOST_FREE(s); + KRML_HOST_FREE(state); } /** -Hash `input`, of len `input_len`, into `dst`, an array of 64 bytes. +Hash `input`, of len `input_len`, into `output`, an array of 64 bytes. */ -void Hacl_Streaming_SHA2_hash_512(uint8_t *input, uint32_t input_len, uint8_t *dst) +void Hacl_Hash_SHA2_hash_512(uint8_t *output, uint8_t *input, uint32_t input_len) { uint8_t *ib = input; - uint8_t *rb = dst; + uint8_t *rb = output; uint64_t st[8U] = { 0U }; - Hacl_SHA2_Scalar32_sha512_init(st); - uint32_t rem = input_len % (uint32_t)128U; + Hacl_Hash_SHA2_sha512_init(st); + uint32_t rem = input_len % 128U; FStar_UInt128_uint128 len_ = FStar_UInt128_uint64_to_uint128((uint64_t)input_len); - Hacl_SHA2_Scalar32_sha512_update_nblocks(input_len, ib, st); - uint32_t rem1 = input_len % (uint32_t)128U; + Hacl_Hash_SHA2_sha512_update_nblocks(input_len, ib, st); + uint32_t rem1 = input_len % 128U; uint8_t *b0 = ib; uint8_t *lb = b0 + input_len - rem1; - Hacl_SHA2_Scalar32_sha512_update_last(len_, rem, lb, st); - Hacl_SHA2_Scalar32_sha512_finish(st, rb); + Hacl_Hash_SHA2_sha512_update_last(len_, rem, lb, st); + Hacl_Hash_SHA2_sha512_finish(st, rb); } -Hacl_Streaming_MD_state_64 *Hacl_Streaming_SHA2_create_in_384(void) +Hacl_Streaming_MD_state_64 *Hacl_Hash_SHA2_malloc_384(void) { - uint8_t *buf = (uint8_t *)KRML_HOST_CALLOC((uint32_t)128U, sizeof (uint8_t)); - uint64_t *block_state = (uint64_t *)KRML_HOST_CALLOC((uint32_t)8U, sizeof (uint64_t)); + uint8_t *buf = (uint8_t *)KRML_HOST_CALLOC(128U, sizeof (uint8_t)); + uint64_t *block_state = (uint64_t *)KRML_HOST_CALLOC(8U, sizeof (uint64_t)); Hacl_Streaming_MD_state_64 - s = { .block_state = block_state, .buf = buf, .total_len = (uint64_t)(uint32_t)0U }; + s = { .block_state = block_state, .buf = buf, .total_len = (uint64_t)0U }; Hacl_Streaming_MD_state_64 *p = (Hacl_Streaming_MD_state_64 *)KRML_HOST_MALLOC(sizeof (Hacl_Streaming_MD_state_64)); p[0U] = s; - Hacl_SHA2_Scalar32_sha384_init(block_state); + Hacl_Hash_SHA2_sha384_init(block_state); return p; } -void Hacl_Streaming_SHA2_init_384(Hacl_Streaming_MD_state_64 *s) +void Hacl_Hash_SHA2_reset_384(Hacl_Streaming_MD_state_64 *state) { - Hacl_Streaming_MD_state_64 scrut = *s; + Hacl_Streaming_MD_state_64 scrut = *state; uint8_t *buf = scrut.buf; uint64_t *block_state = scrut.block_state; - Hacl_SHA2_Scalar32_sha384_init(block_state); + Hacl_Hash_SHA2_sha384_init(block_state); Hacl_Streaming_MD_state_64 - tmp = { .block_state = block_state, .buf = buf, .total_len = (uint64_t)(uint32_t)0U }; - s[0U] = tmp; + tmp = { .block_state = block_state, .buf = buf, .total_len = (uint64_t)0U }; + state[0U] = tmp; } Hacl_Streaming_Types_error_code -Hacl_Streaming_SHA2_update_384( - Hacl_Streaming_MD_state_64 *p, +Hacl_Hash_SHA2_update_384( + Hacl_Streaming_MD_state_64 *state, uint8_t *input, uint32_t input_len ) { - return update_384_512(p, input, input_len); + return update_384_512(state, input, input_len); } /** -Write the resulting hash into `dst`, an array of 48 bytes. The state remains -valid after a call to `finish_384`, meaning the user may feed more data into +Write the resulting hash into `output`, an array of 48 bytes. The state remains +valid after a call to `digest_384`, meaning the user may feed more data into the hash via `update_384`. */ -void Hacl_Streaming_SHA2_finish_384(Hacl_Streaming_MD_state_64 *p, uint8_t *dst) +void Hacl_Hash_SHA2_digest_384(Hacl_Streaming_MD_state_64 *state, uint8_t *output) { - Hacl_Streaming_MD_state_64 scrut = *p; + Hacl_Streaming_MD_state_64 scrut = *state; uint64_t *block_state = scrut.block_state; uint8_t *buf_ = scrut.buf; uint64_t total_len = scrut.total_len; uint32_t r; - if (total_len % (uint64_t)(uint32_t)128U == (uint64_t)0U && total_len > (uint64_t)0U) + if (total_len % (uint64_t)128U == 0ULL && total_len > 0ULL) { - r = (uint32_t)128U; + r = 128U; } else { - r = (uint32_t)(total_len % (uint64_t)(uint32_t)128U); + r = (uint32_t)(total_len % (uint64_t)128U); } uint8_t *buf_1 = buf_; uint64_t tmp_block_state[8U] = { 0U }; - memcpy(tmp_block_state, block_state, (uint32_t)8U * sizeof (uint64_t)); + memcpy(tmp_block_state, block_state, 8U * sizeof (uint64_t)); uint32_t ite; - if (r % (uint32_t)128U == (uint32_t)0U && r > (uint32_t)0U) + if (r % 128U == 0U && r > 0U) { - ite = (uint32_t)128U; + ite = 128U; } else { - ite = r % (uint32_t)128U; + ite = r % 128U; } uint8_t *buf_last = buf_1 + r - ite; uint8_t *buf_multi = buf_1; - Hacl_SHA2_Scalar32_sha384_update_nblocks((uint32_t)0U, buf_multi, tmp_block_state); + Hacl_Hash_SHA2_sha384_update_nblocks(0U, buf_multi, tmp_block_state); uint64_t prev_len_last = total_len - (uint64_t)r; - Hacl_SHA2_Scalar32_sha384_update_last(FStar_UInt128_add(FStar_UInt128_uint64_to_uint128(prev_len_last), + Hacl_Hash_SHA2_sha384_update_last(FStar_UInt128_add(FStar_UInt128_uint64_to_uint128(prev_len_last), FStar_UInt128_uint64_to_uint128((uint64_t)r)), r, buf_last, tmp_block_state); - Hacl_SHA2_Scalar32_sha384_finish(tmp_block_state, dst); + Hacl_Hash_SHA2_sha384_finish(tmp_block_state, output); } -void Hacl_Streaming_SHA2_free_384(Hacl_Streaming_MD_state_64 *p) +void Hacl_Hash_SHA2_free_384(Hacl_Streaming_MD_state_64 *state) { - Hacl_Streaming_SHA2_free_512(p); + Hacl_Hash_SHA2_free_512(state); } /** -Hash `input`, of len `input_len`, into `dst`, an array of 48 bytes. +Hash `input`, of len `input_len`, into `output`, an array of 48 bytes. */ -void Hacl_Streaming_SHA2_hash_384(uint8_t *input, uint32_t input_len, uint8_t *dst) +void Hacl_Hash_SHA2_hash_384(uint8_t *output, uint8_t *input, uint32_t input_len) { uint8_t *ib = input; - uint8_t *rb = dst; + uint8_t *rb = output; uint64_t st[8U] = { 0U }; - Hacl_SHA2_Scalar32_sha384_init(st); - uint32_t rem = input_len % (uint32_t)128U; + Hacl_Hash_SHA2_sha384_init(st); + uint32_t rem = input_len % 128U; FStar_UInt128_uint128 len_ = FStar_UInt128_uint64_to_uint128((uint64_t)input_len); - Hacl_SHA2_Scalar32_sha384_update_nblocks(input_len, ib, st); - uint32_t rem1 = input_len % (uint32_t)128U; + Hacl_Hash_SHA2_sha384_update_nblocks(input_len, ib, st); + uint32_t rem1 = input_len % 128U; uint8_t *b0 = ib; uint8_t *lb = b0 + input_len - rem1; - Hacl_SHA2_Scalar32_sha384_update_last(len_, rem, lb, st); - Hacl_SHA2_Scalar32_sha384_finish(st, rb); + Hacl_Hash_SHA2_sha384_update_last(len_, rem, lb, st); + Hacl_Hash_SHA2_sha384_finish(st, rb); } diff --git a/Modules/_hacl/Hacl_Hash_SHA2.h b/Modules/_hacl/Hacl_Hash_SHA2.h index a0e731094dfaa5..d8204b504baf82 100644 --- a/Modules/_hacl/Hacl_Hash_SHA2.h +++ b/Modules/_hacl/Hacl_Hash_SHA2.h @@ -39,19 +39,19 @@ extern "C" { #include "Hacl_Streaming_Types.h" -typedef Hacl_Streaming_MD_state_32 Hacl_Streaming_SHA2_state_sha2_224; +typedef Hacl_Streaming_MD_state_32 Hacl_Hash_SHA2_state_t_224; -typedef Hacl_Streaming_MD_state_32 Hacl_Streaming_SHA2_state_sha2_256; +typedef Hacl_Streaming_MD_state_32 Hacl_Hash_SHA2_state_t_256; -typedef Hacl_Streaming_MD_state_64 Hacl_Streaming_SHA2_state_sha2_384; +typedef Hacl_Streaming_MD_state_64 Hacl_Hash_SHA2_state_t_384; -typedef Hacl_Streaming_MD_state_64 Hacl_Streaming_SHA2_state_sha2_512; +typedef Hacl_Streaming_MD_state_64 Hacl_Hash_SHA2_state_t_512; /** Allocate initial state for the SHA2_256 hash. The state is to be freed by calling `free_256`. */ -Hacl_Streaming_MD_state_32 *Hacl_Streaming_SHA2_create_in_256(void); +Hacl_Streaming_MD_state_32 *Hacl_Hash_SHA2_malloc_256(void); /** Copies the state passed as argument into a newly allocated state (deep copy). @@ -59,73 +59,73 @@ The state is to be freed by calling `free_256`. Cloning the state this way is useful, for instance, if your control-flow diverges and you need to feed more (different) data into the hash in each branch. */ -Hacl_Streaming_MD_state_32 *Hacl_Streaming_SHA2_copy_256(Hacl_Streaming_MD_state_32 *s0); +Hacl_Streaming_MD_state_32 *Hacl_Hash_SHA2_copy_256(Hacl_Streaming_MD_state_32 *state); /** Reset an existing state to the initial hash state with empty data. */ -void Hacl_Streaming_SHA2_init_256(Hacl_Streaming_MD_state_32 *s); +void Hacl_Hash_SHA2_reset_256(Hacl_Streaming_MD_state_32 *state); /** Feed an arbitrary amount of data into the hash. This function returns 0 for success, or 1 if the combined length of all of the data passed to `update_256` -(since the last call to `init_256`) exceeds 2^61-1 bytes. +(since the last call to `reset_256`) exceeds 2^61-1 bytes. This function is identical to the update function for SHA2_224. */ Hacl_Streaming_Types_error_code -Hacl_Streaming_SHA2_update_256( - Hacl_Streaming_MD_state_32 *p, +Hacl_Hash_SHA2_update_256( + Hacl_Streaming_MD_state_32 *state, uint8_t *input, uint32_t input_len ); /** -Write the resulting hash into `dst`, an array of 32 bytes. The state remains -valid after a call to `finish_256`, meaning the user may feed more data into -the hash via `update_256`. (The finish_256 function operates on an internal copy of +Write the resulting hash into `output`, an array of 32 bytes. The state remains +valid after a call to `digest_256`, meaning the user may feed more data into +the hash via `update_256`. (The digest_256 function operates on an internal copy of the state and therefore does not invalidate the client-held state `p`.) */ -void Hacl_Streaming_SHA2_finish_256(Hacl_Streaming_MD_state_32 *p, uint8_t *dst); +void Hacl_Hash_SHA2_digest_256(Hacl_Streaming_MD_state_32 *state, uint8_t *output); /** -Free a state allocated with `create_in_256`. +Free a state allocated with `malloc_256`. This function is identical to the free function for SHA2_224. */ -void Hacl_Streaming_SHA2_free_256(Hacl_Streaming_MD_state_32 *s); +void Hacl_Hash_SHA2_free_256(Hacl_Streaming_MD_state_32 *state); /** -Hash `input`, of len `input_len`, into `dst`, an array of 32 bytes. +Hash `input`, of len `input_len`, into `output`, an array of 32 bytes. */ -void Hacl_Streaming_SHA2_hash_256(uint8_t *input, uint32_t input_len, uint8_t *dst); +void Hacl_Hash_SHA2_hash_256(uint8_t *output, uint8_t *input, uint32_t input_len); -Hacl_Streaming_MD_state_32 *Hacl_Streaming_SHA2_create_in_224(void); +Hacl_Streaming_MD_state_32 *Hacl_Hash_SHA2_malloc_224(void); -void Hacl_Streaming_SHA2_init_224(Hacl_Streaming_MD_state_32 *s); +void Hacl_Hash_SHA2_reset_224(Hacl_Streaming_MD_state_32 *state); Hacl_Streaming_Types_error_code -Hacl_Streaming_SHA2_update_224( - Hacl_Streaming_MD_state_32 *p, +Hacl_Hash_SHA2_update_224( + Hacl_Streaming_MD_state_32 *state, uint8_t *input, uint32_t input_len ); /** -Write the resulting hash into `dst`, an array of 28 bytes. The state remains -valid after a call to `finish_224`, meaning the user may feed more data into +Write the resulting hash into `output`, an array of 28 bytes. The state remains +valid after a call to `digest_224`, meaning the user may feed more data into the hash via `update_224`. */ -void Hacl_Streaming_SHA2_finish_224(Hacl_Streaming_MD_state_32 *p, uint8_t *dst); +void Hacl_Hash_SHA2_digest_224(Hacl_Streaming_MD_state_32 *state, uint8_t *output); -void Hacl_Streaming_SHA2_free_224(Hacl_Streaming_MD_state_32 *p); +void Hacl_Hash_SHA2_free_224(Hacl_Streaming_MD_state_32 *state); /** -Hash `input`, of len `input_len`, into `dst`, an array of 28 bytes. +Hash `input`, of len `input_len`, into `output`, an array of 28 bytes. */ -void Hacl_Streaming_SHA2_hash_224(uint8_t *input, uint32_t input_len, uint8_t *dst); +void Hacl_Hash_SHA2_hash_224(uint8_t *output, uint8_t *input, uint32_t input_len); -Hacl_Streaming_MD_state_64 *Hacl_Streaming_SHA2_create_in_512(void); +Hacl_Streaming_MD_state_64 *Hacl_Hash_SHA2_malloc_512(void); /** Copies the state passed as argument into a newly allocated state (deep copy). @@ -133,68 +133,68 @@ The state is to be freed by calling `free_512`. Cloning the state this way is useful, for instance, if your control-flow diverges and you need to feed more (different) data into the hash in each branch. */ -Hacl_Streaming_MD_state_64 *Hacl_Streaming_SHA2_copy_512(Hacl_Streaming_MD_state_64 *s0); +Hacl_Streaming_MD_state_64 *Hacl_Hash_SHA2_copy_512(Hacl_Streaming_MD_state_64 *state); -void Hacl_Streaming_SHA2_init_512(Hacl_Streaming_MD_state_64 *s); +void Hacl_Hash_SHA2_reset_512(Hacl_Streaming_MD_state_64 *state); /** Feed an arbitrary amount of data into the hash. This function returns 0 for success, or 1 if the combined length of all of the data passed to `update_512` -(since the last call to `init_512`) exceeds 2^125-1 bytes. +(since the last call to `reset_512`) exceeds 2^125-1 bytes. This function is identical to the update function for SHA2_384. */ Hacl_Streaming_Types_error_code -Hacl_Streaming_SHA2_update_512( - Hacl_Streaming_MD_state_64 *p, +Hacl_Hash_SHA2_update_512( + Hacl_Streaming_MD_state_64 *state, uint8_t *input, uint32_t input_len ); /** -Write the resulting hash into `dst`, an array of 64 bytes. The state remains -valid after a call to `finish_512`, meaning the user may feed more data into -the hash via `update_512`. (The finish_512 function operates on an internal copy of +Write the resulting hash into `output`, an array of 64 bytes. The state remains +valid after a call to `digest_512`, meaning the user may feed more data into +the hash via `update_512`. (The digest_512 function operates on an internal copy of the state and therefore does not invalidate the client-held state `p`.) */ -void Hacl_Streaming_SHA2_finish_512(Hacl_Streaming_MD_state_64 *p, uint8_t *dst); +void Hacl_Hash_SHA2_digest_512(Hacl_Streaming_MD_state_64 *state, uint8_t *output); /** -Free a state allocated with `create_in_512`. +Free a state allocated with `malloc_512`. This function is identical to the free function for SHA2_384. */ -void Hacl_Streaming_SHA2_free_512(Hacl_Streaming_MD_state_64 *s); +void Hacl_Hash_SHA2_free_512(Hacl_Streaming_MD_state_64 *state); /** -Hash `input`, of len `input_len`, into `dst`, an array of 64 bytes. +Hash `input`, of len `input_len`, into `output`, an array of 64 bytes. */ -void Hacl_Streaming_SHA2_hash_512(uint8_t *input, uint32_t input_len, uint8_t *dst); +void Hacl_Hash_SHA2_hash_512(uint8_t *output, uint8_t *input, uint32_t input_len); -Hacl_Streaming_MD_state_64 *Hacl_Streaming_SHA2_create_in_384(void); +Hacl_Streaming_MD_state_64 *Hacl_Hash_SHA2_malloc_384(void); -void Hacl_Streaming_SHA2_init_384(Hacl_Streaming_MD_state_64 *s); +void Hacl_Hash_SHA2_reset_384(Hacl_Streaming_MD_state_64 *state); Hacl_Streaming_Types_error_code -Hacl_Streaming_SHA2_update_384( - Hacl_Streaming_MD_state_64 *p, +Hacl_Hash_SHA2_update_384( + Hacl_Streaming_MD_state_64 *state, uint8_t *input, uint32_t input_len ); /** -Write the resulting hash into `dst`, an array of 48 bytes. The state remains -valid after a call to `finish_384`, meaning the user may feed more data into +Write the resulting hash into `output`, an array of 48 bytes. The state remains +valid after a call to `digest_384`, meaning the user may feed more data into the hash via `update_384`. */ -void Hacl_Streaming_SHA2_finish_384(Hacl_Streaming_MD_state_64 *p, uint8_t *dst); +void Hacl_Hash_SHA2_digest_384(Hacl_Streaming_MD_state_64 *state, uint8_t *output); -void Hacl_Streaming_SHA2_free_384(Hacl_Streaming_MD_state_64 *p); +void Hacl_Hash_SHA2_free_384(Hacl_Streaming_MD_state_64 *state); /** -Hash `input`, of len `input_len`, into `dst`, an array of 48 bytes. +Hash `input`, of len `input_len`, into `output`, an array of 48 bytes. */ -void Hacl_Streaming_SHA2_hash_384(uint8_t *input, uint32_t input_len, uint8_t *dst); +void Hacl_Hash_SHA2_hash_384(uint8_t *output, uint8_t *input, uint32_t input_len); #if defined(__cplusplus) } diff --git a/Modules/_hacl/Hacl_Hash_SHA3.c b/Modules/_hacl/Hacl_Hash_SHA3.c index b3febdfeb2b221..4f502866fe06bb 100644 --- a/Modules/_hacl/Hacl_Hash_SHA3.c +++ b/Modules/_hacl/Hacl_Hash_SHA3.c @@ -31,27 +31,27 @@ static uint32_t block_len(Spec_Hash_Definitions_hash_alg a) { case Spec_Hash_Definitions_SHA3_224: { - return (uint32_t)144U; + return 144U; } case Spec_Hash_Definitions_SHA3_256: { - return (uint32_t)136U; + return 136U; } case Spec_Hash_Definitions_SHA3_384: { - return (uint32_t)104U; + return 104U; } case Spec_Hash_Definitions_SHA3_512: { - return (uint32_t)72U; + return 72U; } case Spec_Hash_Definitions_Shake128: { - return (uint32_t)168U; + return 168U; } case Spec_Hash_Definitions_Shake256: { - return (uint32_t)136U; + return 136U; } default: { @@ -67,19 +67,19 @@ static uint32_t hash_len(Spec_Hash_Definitions_hash_alg a) { case Spec_Hash_Definitions_SHA3_224: { - return (uint32_t)28U; + return 28U; } case Spec_Hash_Definitions_SHA3_256: { - return (uint32_t)32U; + return 32U; } case Spec_Hash_Definitions_SHA3_384: { - return (uint32_t)48U; + return 48U; } case Spec_Hash_Definitions_SHA3_512: { - return (uint32_t)64U; + return 64U; } default: { @@ -97,10 +97,10 @@ Hacl_Hash_SHA3_update_multi_sha3( uint32_t n_blocks ) { - for (uint32_t i = (uint32_t)0U; i < n_blocks; i++) + for (uint32_t i = 0U; i < n_blocks; i++) { uint8_t *block = blocks + i * block_len(a); - Hacl_Impl_SHA3_absorb_inner(block_len(a), block, s); + Hacl_Hash_SHA3_absorb_inner(block_len(a), block, s); } } @@ -115,139 +115,139 @@ Hacl_Hash_SHA3_update_last_sha3( uint8_t suffix; if (a == Spec_Hash_Definitions_Shake128 || a == Spec_Hash_Definitions_Shake256) { - suffix = (uint8_t)0x1fU; + suffix = 0x1fU; } else { - suffix = (uint8_t)0x06U; + suffix = 0x06U; } uint32_t len = block_len(a); if (input_len == len) { - Hacl_Impl_SHA3_absorb_inner(len, input, s); - uint8_t *uu____0 = input + input_len; + Hacl_Hash_SHA3_absorb_inner(len, input, s); uint8_t lastBlock_[200U] = { 0U }; uint8_t *lastBlock = lastBlock_; - memcpy(lastBlock, uu____0, (uint32_t)0U * sizeof (uint8_t)); + memcpy(lastBlock, input + input_len, 0U * sizeof (uint8_t)); lastBlock[0U] = suffix; - Hacl_Impl_SHA3_loadState(len, lastBlock, s); - if (!((suffix & (uint8_t)0x80U) == (uint8_t)0U) && (uint32_t)0U == len - (uint32_t)1U) + Hacl_Hash_SHA3_loadState(len, lastBlock, s); + if (!(((uint32_t)suffix & 0x80U) == 0U) && 0U == len - 1U) { - Hacl_Impl_SHA3_state_permute(s); + Hacl_Hash_SHA3_state_permute(s); } uint8_t nextBlock_[200U] = { 0U }; uint8_t *nextBlock = nextBlock_; - nextBlock[len - (uint32_t)1U] = (uint8_t)0x80U; - Hacl_Impl_SHA3_loadState(len, nextBlock, s); - Hacl_Impl_SHA3_state_permute(s); + nextBlock[len - 1U] = 0x80U; + Hacl_Hash_SHA3_loadState(len, nextBlock, s); + Hacl_Hash_SHA3_state_permute(s); return; } uint8_t lastBlock_[200U] = { 0U }; uint8_t *lastBlock = lastBlock_; memcpy(lastBlock, input, input_len * sizeof (uint8_t)); lastBlock[input_len] = suffix; - Hacl_Impl_SHA3_loadState(len, lastBlock, s); - if (!((suffix & (uint8_t)0x80U) == (uint8_t)0U) && input_len == len - (uint32_t)1U) + Hacl_Hash_SHA3_loadState(len, lastBlock, s); + if (!(((uint32_t)suffix & 0x80U) == 0U) && input_len == len - 1U) { - Hacl_Impl_SHA3_state_permute(s); + Hacl_Hash_SHA3_state_permute(s); } uint8_t nextBlock_[200U] = { 0U }; uint8_t *nextBlock = nextBlock_; - nextBlock[len - (uint32_t)1U] = (uint8_t)0x80U; - Hacl_Impl_SHA3_loadState(len, nextBlock, s); - Hacl_Impl_SHA3_state_permute(s); + nextBlock[len - 1U] = 0x80U; + Hacl_Hash_SHA3_loadState(len, nextBlock, s); + Hacl_Hash_SHA3_state_permute(s); } typedef struct hash_buf2_s { - Hacl_Streaming_Keccak_hash_buf fst; - Hacl_Streaming_Keccak_hash_buf snd; + Hacl_Hash_SHA3_hash_buf fst; + Hacl_Hash_SHA3_hash_buf snd; } hash_buf2; -Spec_Hash_Definitions_hash_alg Hacl_Streaming_Keccak_get_alg(Hacl_Streaming_Keccak_state *s) +Spec_Hash_Definitions_hash_alg Hacl_Hash_SHA3_get_alg(Hacl_Hash_SHA3_state_t *s) { - Hacl_Streaming_Keccak_state scrut = *s; - Hacl_Streaming_Keccak_hash_buf block_state = scrut.block_state; + Hacl_Hash_SHA3_hash_buf block_state = (*s).block_state; return block_state.fst; } -Hacl_Streaming_Keccak_state *Hacl_Streaming_Keccak_malloc(Spec_Hash_Definitions_hash_alg a) +Hacl_Hash_SHA3_state_t *Hacl_Hash_SHA3_malloc(Spec_Hash_Definitions_hash_alg a) { KRML_CHECK_SIZE(sizeof (uint8_t), block_len(a)); uint8_t *buf0 = (uint8_t *)KRML_HOST_CALLOC(block_len(a), sizeof (uint8_t)); - uint64_t *buf = (uint64_t *)KRML_HOST_CALLOC((uint32_t)25U, sizeof (uint64_t)); - Hacl_Streaming_Keccak_hash_buf block_state = { .fst = a, .snd = buf }; - Hacl_Streaming_Keccak_state - s = { .block_state = block_state, .buf = buf0, .total_len = (uint64_t)(uint32_t)0U }; - Hacl_Streaming_Keccak_state - *p = (Hacl_Streaming_Keccak_state *)KRML_HOST_MALLOC(sizeof (Hacl_Streaming_Keccak_state)); + uint64_t *buf = (uint64_t *)KRML_HOST_CALLOC(25U, sizeof (uint64_t)); + Hacl_Hash_SHA3_hash_buf block_state = { .fst = a, .snd = buf }; + Hacl_Hash_SHA3_state_t + s = { .block_state = block_state, .buf = buf0, .total_len = (uint64_t)0U }; + Hacl_Hash_SHA3_state_t + *p = (Hacl_Hash_SHA3_state_t *)KRML_HOST_MALLOC(sizeof (Hacl_Hash_SHA3_state_t)); p[0U] = s; uint64_t *s1 = block_state.snd; - memset(s1, 0U, (uint32_t)25U * sizeof (uint64_t)); + memset(s1, 0U, 25U * sizeof (uint64_t)); return p; } -void Hacl_Streaming_Keccak_free(Hacl_Streaming_Keccak_state *s) +void Hacl_Hash_SHA3_free(Hacl_Hash_SHA3_state_t *state) { - Hacl_Streaming_Keccak_state scrut = *s; + Hacl_Hash_SHA3_state_t scrut = *state; uint8_t *buf = scrut.buf; - Hacl_Streaming_Keccak_hash_buf block_state = scrut.block_state; - uint64_t *s1 = block_state.snd; - KRML_HOST_FREE(s1); - KRML_HOST_FREE(buf); + Hacl_Hash_SHA3_hash_buf block_state = scrut.block_state; + uint64_t *s = block_state.snd; KRML_HOST_FREE(s); + KRML_HOST_FREE(buf); + KRML_HOST_FREE(state); } -Hacl_Streaming_Keccak_state *Hacl_Streaming_Keccak_copy(Hacl_Streaming_Keccak_state *s0) +Hacl_Hash_SHA3_state_t *Hacl_Hash_SHA3_copy(Hacl_Hash_SHA3_state_t *state) { - Hacl_Streaming_Keccak_state scrut0 = *s0; - Hacl_Streaming_Keccak_hash_buf block_state0 = scrut0.block_state; + Hacl_Hash_SHA3_state_t scrut0 = *state; + Hacl_Hash_SHA3_hash_buf block_state0 = scrut0.block_state; uint8_t *buf0 = scrut0.buf; uint64_t total_len0 = scrut0.total_len; Spec_Hash_Definitions_hash_alg i = block_state0.fst; KRML_CHECK_SIZE(sizeof (uint8_t), block_len(i)); uint8_t *buf1 = (uint8_t *)KRML_HOST_CALLOC(block_len(i), sizeof (uint8_t)); memcpy(buf1, buf0, block_len(i) * sizeof (uint8_t)); - uint64_t *buf = (uint64_t *)KRML_HOST_CALLOC((uint32_t)25U, sizeof (uint64_t)); - Hacl_Streaming_Keccak_hash_buf block_state = { .fst = i, .snd = buf }; + uint64_t *buf = (uint64_t *)KRML_HOST_CALLOC(25U, sizeof (uint64_t)); + Hacl_Hash_SHA3_hash_buf block_state = { .fst = i, .snd = buf }; hash_buf2 scrut = { .fst = block_state0, .snd = block_state }; uint64_t *s_dst = scrut.snd.snd; uint64_t *s_src = scrut.fst.snd; - memcpy(s_dst, s_src, (uint32_t)25U * sizeof (uint64_t)); - Hacl_Streaming_Keccak_state + memcpy(s_dst, s_src, 25U * sizeof (uint64_t)); + Hacl_Hash_SHA3_state_t s = { .block_state = block_state, .buf = buf1, .total_len = total_len0 }; - Hacl_Streaming_Keccak_state - *p = (Hacl_Streaming_Keccak_state *)KRML_HOST_MALLOC(sizeof (Hacl_Streaming_Keccak_state)); + Hacl_Hash_SHA3_state_t + *p = (Hacl_Hash_SHA3_state_t *)KRML_HOST_MALLOC(sizeof (Hacl_Hash_SHA3_state_t)); p[0U] = s; return p; } -void Hacl_Streaming_Keccak_reset(Hacl_Streaming_Keccak_state *s) +void Hacl_Hash_SHA3_reset(Hacl_Hash_SHA3_state_t *state) { - Hacl_Streaming_Keccak_state scrut = *s; + Hacl_Hash_SHA3_state_t scrut = *state; uint8_t *buf = scrut.buf; - Hacl_Streaming_Keccak_hash_buf block_state = scrut.block_state; - uint64_t *s1 = block_state.snd; - memset(s1, 0U, (uint32_t)25U * sizeof (uint64_t)); - Hacl_Streaming_Keccak_state - tmp = { .block_state = block_state, .buf = buf, .total_len = (uint64_t)(uint32_t)0U }; - s[0U] = tmp; + Hacl_Hash_SHA3_hash_buf block_state = scrut.block_state; + Spec_Hash_Definitions_hash_alg i = block_state.fst; + KRML_MAYBE_UNUSED_VAR(i); + uint64_t *s = block_state.snd; + memset(s, 0U, 25U * sizeof (uint64_t)); + Hacl_Hash_SHA3_state_t + tmp = { .block_state = block_state, .buf = buf, .total_len = (uint64_t)0U }; + state[0U] = tmp; } Hacl_Streaming_Types_error_code -Hacl_Streaming_Keccak_update(Hacl_Streaming_Keccak_state *p, uint8_t *data, uint32_t len) +Hacl_Hash_SHA3_update(Hacl_Hash_SHA3_state_t *state, uint8_t *chunk, uint32_t chunk_len) { - Hacl_Streaming_Keccak_state s = *p; - Hacl_Streaming_Keccak_hash_buf block_state = s.block_state; + Hacl_Hash_SHA3_state_t s = *state; + Hacl_Hash_SHA3_hash_buf block_state = s.block_state; uint64_t total_len = s.total_len; Spec_Hash_Definitions_hash_alg i = block_state.fst; - if ((uint64_t)len > (uint64_t)0xFFFFFFFFFFFFFFFFU - total_len) + if ((uint64_t)chunk_len > 0xFFFFFFFFFFFFFFFFULL - total_len) { return Hacl_Streaming_Types_MaximumLengthExceeded; } uint32_t sz; - if (total_len % (uint64_t)block_len(i) == (uint64_t)0U && total_len > (uint64_t)0U) + if (total_len % (uint64_t)block_len(i) == 0ULL && total_len > 0ULL) { sz = block_len(i); } @@ -255,14 +255,14 @@ Hacl_Streaming_Keccak_update(Hacl_Streaming_Keccak_state *p, uint8_t *data, uint { sz = (uint32_t)(total_len % (uint64_t)block_len(i)); } - if (len <= block_len(i) - sz) + if (chunk_len <= block_len(i) - sz) { - Hacl_Streaming_Keccak_state s1 = *p; - Hacl_Streaming_Keccak_hash_buf block_state1 = s1.block_state; + Hacl_Hash_SHA3_state_t s1 = *state; + Hacl_Hash_SHA3_hash_buf block_state1 = s1.block_state; uint8_t *buf = s1.buf; uint64_t total_len1 = s1.total_len; uint32_t sz1; - if (total_len1 % (uint64_t)block_len(i) == (uint64_t)0U && total_len1 > (uint64_t)0U) + if (total_len1 % (uint64_t)block_len(i) == 0ULL && total_len1 > 0ULL) { sz1 = block_len(i); } @@ -271,26 +271,20 @@ Hacl_Streaming_Keccak_update(Hacl_Streaming_Keccak_state *p, uint8_t *data, uint sz1 = (uint32_t)(total_len1 % (uint64_t)block_len(i)); } uint8_t *buf2 = buf + sz1; - memcpy(buf2, data, len * sizeof (uint8_t)); - uint64_t total_len2 = total_len1 + (uint64_t)len; - *p + memcpy(buf2, chunk, chunk_len * sizeof (uint8_t)); + uint64_t total_len2 = total_len1 + (uint64_t)chunk_len; + *state = - ( - (Hacl_Streaming_Keccak_state){ - .block_state = block_state1, - .buf = buf, - .total_len = total_len2 - } - ); + ((Hacl_Hash_SHA3_state_t){ .block_state = block_state1, .buf = buf, .total_len = total_len2 }); } - else if (sz == (uint32_t)0U) + else if (sz == 0U) { - Hacl_Streaming_Keccak_state s1 = *p; - Hacl_Streaming_Keccak_hash_buf block_state1 = s1.block_state; + Hacl_Hash_SHA3_state_t s1 = *state; + Hacl_Hash_SHA3_hash_buf block_state1 = s1.block_state; uint8_t *buf = s1.buf; uint64_t total_len1 = s1.total_len; uint32_t sz1; - if (total_len1 % (uint64_t)block_len(i) == (uint64_t)0U && total_len1 > (uint64_t)0U) + if (total_len1 % (uint64_t)block_len(i) == 0ULL && total_len1 > 0ULL) { sz1 = block_len(i); } @@ -298,52 +292,52 @@ Hacl_Streaming_Keccak_update(Hacl_Streaming_Keccak_state *p, uint8_t *data, uint { sz1 = (uint32_t)(total_len1 % (uint64_t)block_len(i)); } - if (!(sz1 == (uint32_t)0U)) + if (!(sz1 == 0U)) { Spec_Hash_Definitions_hash_alg a1 = block_state1.fst; uint64_t *s2 = block_state1.snd; Hacl_Hash_SHA3_update_multi_sha3(a1, s2, buf, block_len(i) / block_len(a1)); } uint32_t ite; - if ((uint64_t)len % (uint64_t)block_len(i) == (uint64_t)0U && (uint64_t)len > (uint64_t)0U) + if ((uint64_t)chunk_len % (uint64_t)block_len(i) == 0ULL && (uint64_t)chunk_len > 0ULL) { ite = block_len(i); } else { - ite = (uint32_t)((uint64_t)len % (uint64_t)block_len(i)); + ite = (uint32_t)((uint64_t)chunk_len % (uint64_t)block_len(i)); } - uint32_t n_blocks = (len - ite) / block_len(i); + uint32_t n_blocks = (chunk_len - ite) / block_len(i); uint32_t data1_len = n_blocks * block_len(i); - uint32_t data2_len = len - data1_len; - uint8_t *data1 = data; - uint8_t *data2 = data + data1_len; + uint32_t data2_len = chunk_len - data1_len; + uint8_t *data1 = chunk; + uint8_t *data2 = chunk + data1_len; Spec_Hash_Definitions_hash_alg a1 = block_state1.fst; uint64_t *s2 = block_state1.snd; Hacl_Hash_SHA3_update_multi_sha3(a1, s2, data1, data1_len / block_len(a1)); uint8_t *dst = buf; memcpy(dst, data2, data2_len * sizeof (uint8_t)); - *p + *state = ( - (Hacl_Streaming_Keccak_state){ + (Hacl_Hash_SHA3_state_t){ .block_state = block_state1, .buf = buf, - .total_len = total_len1 + (uint64_t)len + .total_len = total_len1 + (uint64_t)chunk_len } ); } else { uint32_t diff = block_len(i) - sz; - uint8_t *data1 = data; - uint8_t *data2 = data + diff; - Hacl_Streaming_Keccak_state s1 = *p; - Hacl_Streaming_Keccak_hash_buf block_state10 = s1.block_state; + uint8_t *chunk1 = chunk; + uint8_t *chunk2 = chunk + diff; + Hacl_Hash_SHA3_state_t s1 = *state; + Hacl_Hash_SHA3_hash_buf block_state10 = s1.block_state; uint8_t *buf0 = s1.buf; uint64_t total_len10 = s1.total_len; uint32_t sz10; - if (total_len10 % (uint64_t)block_len(i) == (uint64_t)0U && total_len10 > (uint64_t)0U) + if (total_len10 % (uint64_t)block_len(i) == 0ULL && total_len10 > 0ULL) { sz10 = block_len(i); } @@ -352,23 +346,23 @@ Hacl_Streaming_Keccak_update(Hacl_Streaming_Keccak_state *p, uint8_t *data, uint sz10 = (uint32_t)(total_len10 % (uint64_t)block_len(i)); } uint8_t *buf2 = buf0 + sz10; - memcpy(buf2, data1, diff * sizeof (uint8_t)); + memcpy(buf2, chunk1, diff * sizeof (uint8_t)); uint64_t total_len2 = total_len10 + (uint64_t)diff; - *p + *state = ( - (Hacl_Streaming_Keccak_state){ + (Hacl_Hash_SHA3_state_t){ .block_state = block_state10, .buf = buf0, .total_len = total_len2 } ); - Hacl_Streaming_Keccak_state s10 = *p; - Hacl_Streaming_Keccak_hash_buf block_state1 = s10.block_state; + Hacl_Hash_SHA3_state_t s10 = *state; + Hacl_Hash_SHA3_hash_buf block_state1 = s10.block_state; uint8_t *buf = s10.buf; uint64_t total_len1 = s10.total_len; uint32_t sz1; - if (total_len1 % (uint64_t)block_len(i) == (uint64_t)0U && total_len1 > (uint64_t)0U) + if (total_len1 % (uint64_t)block_len(i) == 0ULL && total_len1 > 0ULL) { sz1 = block_len(i); } @@ -376,7 +370,7 @@ Hacl_Streaming_Keccak_update(Hacl_Streaming_Keccak_state *p, uint8_t *data, uint { sz1 = (uint32_t)(total_len1 % (uint64_t)block_len(i)); } - if (!(sz1 == (uint32_t)0U)) + if (!(sz1 == 0U)) { Spec_Hash_Definitions_hash_alg a1 = block_state1.fst; uint64_t *s2 = block_state1.snd; @@ -385,35 +379,35 @@ Hacl_Streaming_Keccak_update(Hacl_Streaming_Keccak_state *p, uint8_t *data, uint uint32_t ite; if ( - (uint64_t)(len - diff) + (uint64_t)(chunk_len - diff) % (uint64_t)block_len(i) - == (uint64_t)0U - && (uint64_t)(len - diff) > (uint64_t)0U + == 0ULL + && (uint64_t)(chunk_len - diff) > 0ULL ) { ite = block_len(i); } else { - ite = (uint32_t)((uint64_t)(len - diff) % (uint64_t)block_len(i)); + ite = (uint32_t)((uint64_t)(chunk_len - diff) % (uint64_t)block_len(i)); } - uint32_t n_blocks = (len - diff - ite) / block_len(i); + uint32_t n_blocks = (chunk_len - diff - ite) / block_len(i); uint32_t data1_len = n_blocks * block_len(i); - uint32_t data2_len = len - diff - data1_len; - uint8_t *data11 = data2; - uint8_t *data21 = data2 + data1_len; + uint32_t data2_len = chunk_len - diff - data1_len; + uint8_t *data1 = chunk2; + uint8_t *data2 = chunk2 + data1_len; Spec_Hash_Definitions_hash_alg a1 = block_state1.fst; uint64_t *s2 = block_state1.snd; - Hacl_Hash_SHA3_update_multi_sha3(a1, s2, data11, data1_len / block_len(a1)); + Hacl_Hash_SHA3_update_multi_sha3(a1, s2, data1, data1_len / block_len(a1)); uint8_t *dst = buf; - memcpy(dst, data21, data2_len * sizeof (uint8_t)); - *p + memcpy(dst, data2, data2_len * sizeof (uint8_t)); + *state = ( - (Hacl_Streaming_Keccak_state){ + (Hacl_Hash_SHA3_state_t){ .block_state = block_state1, .buf = buf, - .total_len = total_len1 + (uint64_t)(len - diff) + .total_len = total_len1 + (uint64_t)(chunk_len - diff) } ); } @@ -421,19 +415,19 @@ Hacl_Streaming_Keccak_update(Hacl_Streaming_Keccak_state *p, uint8_t *data, uint } static void -finish_( +digest_( Spec_Hash_Definitions_hash_alg a, - Hacl_Streaming_Keccak_state *p, - uint8_t *dst, + Hacl_Hash_SHA3_state_t *state, + uint8_t *output, uint32_t l ) { - Hacl_Streaming_Keccak_state scrut0 = *p; - Hacl_Streaming_Keccak_hash_buf block_state = scrut0.block_state; + Hacl_Hash_SHA3_state_t scrut0 = *state; + Hacl_Hash_SHA3_hash_buf block_state = scrut0.block_state; uint8_t *buf_ = scrut0.buf; uint64_t total_len = scrut0.total_len; uint32_t r; - if (total_len % (uint64_t)block_len(a) == (uint64_t)0U && total_len > (uint64_t)0U) + if (total_len % (uint64_t)block_len(a) == 0ULL && total_len > 0ULL) { r = block_len(a); } @@ -443,25 +437,25 @@ finish_( } uint8_t *buf_1 = buf_; uint64_t buf[25U] = { 0U }; - Hacl_Streaming_Keccak_hash_buf tmp_block_state = { .fst = a, .snd = buf }; + Hacl_Hash_SHA3_hash_buf tmp_block_state = { .fst = a, .snd = buf }; hash_buf2 scrut = { .fst = block_state, .snd = tmp_block_state }; uint64_t *s_dst = scrut.snd.snd; uint64_t *s_src = scrut.fst.snd; - memcpy(s_dst, s_src, (uint32_t)25U * sizeof (uint64_t)); - uint32_t ite0; - if (r % block_len(a) == (uint32_t)0U && r > (uint32_t)0U) + memcpy(s_dst, s_src, 25U * sizeof (uint64_t)); + uint32_t ite; + if (r % block_len(a) == 0U && r > 0U) { - ite0 = block_len(a); + ite = block_len(a); } else { - ite0 = r % block_len(a); + ite = r % block_len(a); } - uint8_t *buf_last = buf_1 + r - ite0; + uint8_t *buf_last = buf_1 + r - ite; uint8_t *buf_multi = buf_1; Spec_Hash_Definitions_hash_alg a1 = tmp_block_state.fst; uint64_t *s0 = tmp_block_state.snd; - Hacl_Hash_SHA3_update_multi_sha3(a1, s0, buf_multi, (uint32_t)0U / block_len(a1)); + Hacl_Hash_SHA3_update_multi_sha3(a1, s0, buf_multi, 0U / block_len(a1)); Spec_Hash_Definitions_hash_alg a10 = tmp_block_state.fst; uint64_t *s1 = tmp_block_state.snd; Hacl_Hash_SHA3_update_last_sha3(a10, s1, buf_last, r); @@ -469,267 +463,182 @@ finish_( uint64_t *s = tmp_block_state.snd; if (a11 == Spec_Hash_Definitions_Shake128 || a11 == Spec_Hash_Definitions_Shake256) { - uint32_t ite; - if (a11 == Spec_Hash_Definitions_Shake128 || a11 == Spec_Hash_Definitions_Shake256) - { - ite = l; - } - else - { - ite = hash_len(a11); - } - Hacl_Impl_SHA3_squeeze(s, block_len(a11), ite, dst); + Hacl_Hash_SHA3_squeeze0(s, block_len(a11), l, output); return; } - Hacl_Impl_SHA3_squeeze(s, block_len(a11), hash_len(a11), dst); + Hacl_Hash_SHA3_squeeze0(s, block_len(a11), hash_len(a11), output); } Hacl_Streaming_Types_error_code -Hacl_Streaming_Keccak_finish(Hacl_Streaming_Keccak_state *s, uint8_t *dst) +Hacl_Hash_SHA3_digest(Hacl_Hash_SHA3_state_t *state, uint8_t *output) { - Spec_Hash_Definitions_hash_alg a1 = Hacl_Streaming_Keccak_get_alg(s); + Spec_Hash_Definitions_hash_alg a1 = Hacl_Hash_SHA3_get_alg(state); if (a1 == Spec_Hash_Definitions_Shake128 || a1 == Spec_Hash_Definitions_Shake256) { return Hacl_Streaming_Types_InvalidAlgorithm; } - finish_(a1, s, dst, hash_len(a1)); + digest_(a1, state, output, hash_len(a1)); return Hacl_Streaming_Types_Success; } Hacl_Streaming_Types_error_code -Hacl_Streaming_Keccak_squeeze(Hacl_Streaming_Keccak_state *s, uint8_t *dst, uint32_t l) +Hacl_Hash_SHA3_squeeze(Hacl_Hash_SHA3_state_t *s, uint8_t *dst, uint32_t l) { - Spec_Hash_Definitions_hash_alg a1 = Hacl_Streaming_Keccak_get_alg(s); + Spec_Hash_Definitions_hash_alg a1 = Hacl_Hash_SHA3_get_alg(s); if (!(a1 == Spec_Hash_Definitions_Shake128 || a1 == Spec_Hash_Definitions_Shake256)) { return Hacl_Streaming_Types_InvalidAlgorithm; } - if (l == (uint32_t)0U) + if (l == 0U) { return Hacl_Streaming_Types_InvalidLength; } - finish_(a1, s, dst, l); + digest_(a1, s, dst, l); return Hacl_Streaming_Types_Success; } -uint32_t Hacl_Streaming_Keccak_block_len(Hacl_Streaming_Keccak_state *s) +uint32_t Hacl_Hash_SHA3_block_len(Hacl_Hash_SHA3_state_t *s) { - Spec_Hash_Definitions_hash_alg a1 = Hacl_Streaming_Keccak_get_alg(s); + Spec_Hash_Definitions_hash_alg a1 = Hacl_Hash_SHA3_get_alg(s); return block_len(a1); } -uint32_t Hacl_Streaming_Keccak_hash_len(Hacl_Streaming_Keccak_state *s) +uint32_t Hacl_Hash_SHA3_hash_len(Hacl_Hash_SHA3_state_t *s) { - Spec_Hash_Definitions_hash_alg a1 = Hacl_Streaming_Keccak_get_alg(s); + Spec_Hash_Definitions_hash_alg a1 = Hacl_Hash_SHA3_get_alg(s); return hash_len(a1); } -bool Hacl_Streaming_Keccak_is_shake(Hacl_Streaming_Keccak_state *s) +bool Hacl_Hash_SHA3_is_shake(Hacl_Hash_SHA3_state_t *s) { - Spec_Hash_Definitions_hash_alg uu____0 = Hacl_Streaming_Keccak_get_alg(s); + Spec_Hash_Definitions_hash_alg uu____0 = Hacl_Hash_SHA3_get_alg(s); return uu____0 == Spec_Hash_Definitions_Shake128 || uu____0 == Spec_Hash_Definitions_Shake256; } void -Hacl_SHA3_shake128_hacl( +Hacl_Hash_SHA3_shake128_hacl( uint32_t inputByteLen, uint8_t *input, uint32_t outputByteLen, uint8_t *output ) { - Hacl_Impl_SHA3_keccak((uint32_t)1344U, - (uint32_t)256U, - inputByteLen, - input, - (uint8_t)0x1FU, - outputByteLen, - output); + Hacl_Hash_SHA3_keccak(1344U, 256U, inputByteLen, input, 0x1FU, outputByteLen, output); } void -Hacl_SHA3_shake256_hacl( +Hacl_Hash_SHA3_shake256_hacl( uint32_t inputByteLen, uint8_t *input, uint32_t outputByteLen, uint8_t *output ) { - Hacl_Impl_SHA3_keccak((uint32_t)1088U, - (uint32_t)512U, - inputByteLen, - input, - (uint8_t)0x1FU, - outputByteLen, - output); + Hacl_Hash_SHA3_keccak(1088U, 512U, inputByteLen, input, 0x1FU, outputByteLen, output); } -void Hacl_SHA3_sha3_224(uint32_t inputByteLen, uint8_t *input, uint8_t *output) +void Hacl_Hash_SHA3_sha3_224(uint8_t *output, uint8_t *input, uint32_t input_len) { - Hacl_Impl_SHA3_keccak((uint32_t)1152U, - (uint32_t)448U, - inputByteLen, - input, - (uint8_t)0x06U, - (uint32_t)28U, - output); + Hacl_Hash_SHA3_keccak(1152U, 448U, input_len, input, 0x06U, 28U, output); } -void Hacl_SHA3_sha3_256(uint32_t inputByteLen, uint8_t *input, uint8_t *output) +void Hacl_Hash_SHA3_sha3_256(uint8_t *output, uint8_t *input, uint32_t input_len) { - Hacl_Impl_SHA3_keccak((uint32_t)1088U, - (uint32_t)512U, - inputByteLen, - input, - (uint8_t)0x06U, - (uint32_t)32U, - output); + Hacl_Hash_SHA3_keccak(1088U, 512U, input_len, input, 0x06U, 32U, output); } -void Hacl_SHA3_sha3_384(uint32_t inputByteLen, uint8_t *input, uint8_t *output) +void Hacl_Hash_SHA3_sha3_384(uint8_t *output, uint8_t *input, uint32_t input_len) { - Hacl_Impl_SHA3_keccak((uint32_t)832U, - (uint32_t)768U, - inputByteLen, - input, - (uint8_t)0x06U, - (uint32_t)48U, - output); + Hacl_Hash_SHA3_keccak(832U, 768U, input_len, input, 0x06U, 48U, output); } -void Hacl_SHA3_sha3_512(uint32_t inputByteLen, uint8_t *input, uint8_t *output) +void Hacl_Hash_SHA3_sha3_512(uint8_t *output, uint8_t *input, uint32_t input_len) { - Hacl_Impl_SHA3_keccak((uint32_t)576U, - (uint32_t)1024U, - inputByteLen, - input, - (uint8_t)0x06U, - (uint32_t)64U, - output); + Hacl_Hash_SHA3_keccak(576U, 1024U, input_len, input, 0x06U, 64U, output); } static const uint32_t keccak_rotc[24U] = { - (uint32_t)1U, (uint32_t)3U, (uint32_t)6U, (uint32_t)10U, (uint32_t)15U, (uint32_t)21U, - (uint32_t)28U, (uint32_t)36U, (uint32_t)45U, (uint32_t)55U, (uint32_t)2U, (uint32_t)14U, - (uint32_t)27U, (uint32_t)41U, (uint32_t)56U, (uint32_t)8U, (uint32_t)25U, (uint32_t)43U, - (uint32_t)62U, (uint32_t)18U, (uint32_t)39U, (uint32_t)61U, (uint32_t)20U, (uint32_t)44U + 1U, 3U, 6U, 10U, 15U, 21U, 28U, 36U, 45U, 55U, 2U, 14U, 27U, 41U, 56U, 8U, 25U, 43U, 62U, 18U, + 39U, 61U, 20U, 44U }; static const uint32_t keccak_piln[24U] = { - (uint32_t)10U, (uint32_t)7U, (uint32_t)11U, (uint32_t)17U, (uint32_t)18U, (uint32_t)3U, - (uint32_t)5U, (uint32_t)16U, (uint32_t)8U, (uint32_t)21U, (uint32_t)24U, (uint32_t)4U, - (uint32_t)15U, (uint32_t)23U, (uint32_t)19U, (uint32_t)13U, (uint32_t)12U, (uint32_t)2U, - (uint32_t)20U, (uint32_t)14U, (uint32_t)22U, (uint32_t)9U, (uint32_t)6U, (uint32_t)1U + 10U, 7U, 11U, 17U, 18U, 3U, 5U, 16U, 8U, 21U, 24U, 4U, 15U, 23U, 19U, 13U, 12U, 2U, 20U, 14U, + 22U, 9U, 6U, 1U }; static const uint64_t keccak_rndc[24U] = { - (uint64_t)0x0000000000000001U, (uint64_t)0x0000000000008082U, (uint64_t)0x800000000000808aU, - (uint64_t)0x8000000080008000U, (uint64_t)0x000000000000808bU, (uint64_t)0x0000000080000001U, - (uint64_t)0x8000000080008081U, (uint64_t)0x8000000000008009U, (uint64_t)0x000000000000008aU, - (uint64_t)0x0000000000000088U, (uint64_t)0x0000000080008009U, (uint64_t)0x000000008000000aU, - (uint64_t)0x000000008000808bU, (uint64_t)0x800000000000008bU, (uint64_t)0x8000000000008089U, - (uint64_t)0x8000000000008003U, (uint64_t)0x8000000000008002U, (uint64_t)0x8000000000000080U, - (uint64_t)0x000000000000800aU, (uint64_t)0x800000008000000aU, (uint64_t)0x8000000080008081U, - (uint64_t)0x8000000000008080U, (uint64_t)0x0000000080000001U, (uint64_t)0x8000000080008008U + 0x0000000000000001ULL, 0x0000000000008082ULL, 0x800000000000808aULL, 0x8000000080008000ULL, + 0x000000000000808bULL, 0x0000000080000001ULL, 0x8000000080008081ULL, 0x8000000000008009ULL, + 0x000000000000008aULL, 0x0000000000000088ULL, 0x0000000080008009ULL, 0x000000008000000aULL, + 0x000000008000808bULL, 0x800000000000008bULL, 0x8000000000008089ULL, 0x8000000000008003ULL, + 0x8000000000008002ULL, 0x8000000000000080ULL, 0x000000000000800aULL, 0x800000008000000aULL, + 0x8000000080008081ULL, 0x8000000000008080ULL, 0x0000000080000001ULL, 0x8000000080008008ULL }; -void Hacl_Impl_SHA3_state_permute(uint64_t *s) +void Hacl_Hash_SHA3_state_permute(uint64_t *s) { - for (uint32_t i0 = (uint32_t)0U; i0 < (uint32_t)24U; i0++) + for (uint32_t i0 = 0U; i0 < 24U; i0++) { uint64_t _C[5U] = { 0U }; KRML_MAYBE_FOR5(i, - (uint32_t)0U, - (uint32_t)5U, - (uint32_t)1U, - _C[i] = - s[i - + (uint32_t)0U] - ^ - (s[i - + (uint32_t)5U] - ^ (s[i + (uint32_t)10U] ^ (s[i + (uint32_t)15U] ^ s[i + (uint32_t)20U])));); + 0U, + 5U, + 1U, + _C[i] = s[i + 0U] ^ (s[i + 5U] ^ (s[i + 10U] ^ (s[i + 15U] ^ s[i + 20U])));); KRML_MAYBE_FOR5(i1, - (uint32_t)0U, - (uint32_t)5U, - (uint32_t)1U, - uint64_t uu____0 = _C[(i1 + (uint32_t)1U) % (uint32_t)5U]; - uint64_t - _D = - _C[(i1 + (uint32_t)4U) - % (uint32_t)5U] - ^ (uu____0 << (uint32_t)1U | uu____0 >> (uint32_t)63U); - KRML_MAYBE_FOR5(i, - (uint32_t)0U, - (uint32_t)5U, - (uint32_t)1U, - s[i1 + (uint32_t)5U * i] = s[i1 + (uint32_t)5U * i] ^ _D;);); + 0U, + 5U, + 1U, + uint64_t uu____0 = _C[(i1 + 1U) % 5U]; + uint64_t _D = _C[(i1 + 4U) % 5U] ^ (uu____0 << 1U | uu____0 >> 63U); + KRML_MAYBE_FOR5(i, 0U, 5U, 1U, s[i1 + 5U * i] = s[i1 + 5U * i] ^ _D;);); uint64_t x = s[1U]; uint64_t current = x; - for (uint32_t i = (uint32_t)0U; i < (uint32_t)24U; i++) + for (uint32_t i = 0U; i < 24U; i++) { uint32_t _Y = keccak_piln[i]; uint32_t r = keccak_rotc[i]; uint64_t temp = s[_Y]; uint64_t uu____1 = current; - s[_Y] = uu____1 << r | uu____1 >> ((uint32_t)64U - r); + s[_Y] = uu____1 << r | uu____1 >> (64U - r); current = temp; } KRML_MAYBE_FOR5(i, - (uint32_t)0U, - (uint32_t)5U, - (uint32_t)1U, - uint64_t - v0 = - s[(uint32_t)0U - + (uint32_t)5U * i] - ^ (~s[(uint32_t)1U + (uint32_t)5U * i] & s[(uint32_t)2U + (uint32_t)5U * i]); - uint64_t - v1 = - s[(uint32_t)1U - + (uint32_t)5U * i] - ^ (~s[(uint32_t)2U + (uint32_t)5U * i] & s[(uint32_t)3U + (uint32_t)5U * i]); - uint64_t - v2 = - s[(uint32_t)2U - + (uint32_t)5U * i] - ^ (~s[(uint32_t)3U + (uint32_t)5U * i] & s[(uint32_t)4U + (uint32_t)5U * i]); - uint64_t - v3 = - s[(uint32_t)3U - + (uint32_t)5U * i] - ^ (~s[(uint32_t)4U + (uint32_t)5U * i] & s[(uint32_t)0U + (uint32_t)5U * i]); - uint64_t - v4 = - s[(uint32_t)4U - + (uint32_t)5U * i] - ^ (~s[(uint32_t)0U + (uint32_t)5U * i] & s[(uint32_t)1U + (uint32_t)5U * i]); - s[(uint32_t)0U + (uint32_t)5U * i] = v0; - s[(uint32_t)1U + (uint32_t)5U * i] = v1; - s[(uint32_t)2U + (uint32_t)5U * i] = v2; - s[(uint32_t)3U + (uint32_t)5U * i] = v3; - s[(uint32_t)4U + (uint32_t)5U * i] = v4;); + 0U, + 5U, + 1U, + uint64_t v0 = s[0U + 5U * i] ^ (~s[1U + 5U * i] & s[2U + 5U * i]); + uint64_t v1 = s[1U + 5U * i] ^ (~s[2U + 5U * i] & s[3U + 5U * i]); + uint64_t v2 = s[2U + 5U * i] ^ (~s[3U + 5U * i] & s[4U + 5U * i]); + uint64_t v3 = s[3U + 5U * i] ^ (~s[4U + 5U * i] & s[0U + 5U * i]); + uint64_t v4 = s[4U + 5U * i] ^ (~s[0U + 5U * i] & s[1U + 5U * i]); + s[0U + 5U * i] = v0; + s[1U + 5U * i] = v1; + s[2U + 5U * i] = v2; + s[3U + 5U * i] = v3; + s[4U + 5U * i] = v4;); uint64_t c = keccak_rndc[i0]; s[0U] = s[0U] ^ c; } } -void Hacl_Impl_SHA3_loadState(uint32_t rateInBytes, uint8_t *input, uint64_t *s) +void Hacl_Hash_SHA3_loadState(uint32_t rateInBytes, uint8_t *input, uint64_t *s) { uint8_t block[200U] = { 0U }; memcpy(block, input, rateInBytes * sizeof (uint8_t)); - for (uint32_t i = (uint32_t)0U; i < (uint32_t)25U; i++) + for (uint32_t i = 0U; i < 25U; i++) { - uint64_t u = load64_le(block + i * (uint32_t)8U); + uint64_t u = load64_le(block + i * 8U); uint64_t x = u; s[i] = s[i] ^ x; } @@ -738,18 +647,18 @@ void Hacl_Impl_SHA3_loadState(uint32_t rateInBytes, uint8_t *input, uint64_t *s) static void storeState(uint32_t rateInBytes, uint64_t *s, uint8_t *res) { uint8_t block[200U] = { 0U }; - for (uint32_t i = (uint32_t)0U; i < (uint32_t)25U; i++) + for (uint32_t i = 0U; i < 25U; i++) { uint64_t sj = s[i]; - store64_le(block + i * (uint32_t)8U, sj); + store64_le(block + i * 8U, sj); } memcpy(res, block, rateInBytes * sizeof (uint8_t)); } -void Hacl_Impl_SHA3_absorb_inner(uint32_t rateInBytes, uint8_t *block, uint64_t *s) +void Hacl_Hash_SHA3_absorb_inner(uint32_t rateInBytes, uint8_t *block, uint64_t *s) { - Hacl_Impl_SHA3_loadState(rateInBytes, block, s); - Hacl_Impl_SHA3_state_permute(s); + Hacl_Hash_SHA3_loadState(rateInBytes, block, s); + Hacl_Hash_SHA3_state_permute(s); } static void @@ -763,30 +672,30 @@ absorb( { uint32_t n_blocks = inputByteLen / rateInBytes; uint32_t rem = inputByteLen % rateInBytes; - for (uint32_t i = (uint32_t)0U; i < n_blocks; i++) + for (uint32_t i = 0U; i < n_blocks; i++) { uint8_t *block = input + i * rateInBytes; - Hacl_Impl_SHA3_absorb_inner(rateInBytes, block, s); + Hacl_Hash_SHA3_absorb_inner(rateInBytes, block, s); } uint8_t *last = input + n_blocks * rateInBytes; uint8_t lastBlock_[200U] = { 0U }; uint8_t *lastBlock = lastBlock_; memcpy(lastBlock, last, rem * sizeof (uint8_t)); lastBlock[rem] = delimitedSuffix; - Hacl_Impl_SHA3_loadState(rateInBytes, lastBlock, s); - if (!((delimitedSuffix & (uint8_t)0x80U) == (uint8_t)0U) && rem == rateInBytes - (uint32_t)1U) + Hacl_Hash_SHA3_loadState(rateInBytes, lastBlock, s); + if (!(((uint32_t)delimitedSuffix & 0x80U) == 0U) && rem == rateInBytes - 1U) { - Hacl_Impl_SHA3_state_permute(s); + Hacl_Hash_SHA3_state_permute(s); } uint8_t nextBlock_[200U] = { 0U }; uint8_t *nextBlock = nextBlock_; - nextBlock[rateInBytes - (uint32_t)1U] = (uint8_t)0x80U; - Hacl_Impl_SHA3_loadState(rateInBytes, nextBlock, s); - Hacl_Impl_SHA3_state_permute(s); + nextBlock[rateInBytes - 1U] = 0x80U; + Hacl_Hash_SHA3_loadState(rateInBytes, nextBlock, s); + Hacl_Hash_SHA3_state_permute(s); } void -Hacl_Impl_SHA3_squeeze( +Hacl_Hash_SHA3_squeeze0( uint64_t *s, uint32_t rateInBytes, uint32_t outputByteLen, @@ -797,16 +706,16 @@ Hacl_Impl_SHA3_squeeze( uint32_t remOut = outputByteLen % rateInBytes; uint8_t *last = output + outputByteLen - remOut; uint8_t *blocks = output; - for (uint32_t i = (uint32_t)0U; i < outBlocks; i++) + for (uint32_t i = 0U; i < outBlocks; i++) { storeState(rateInBytes, s, blocks + i * rateInBytes); - Hacl_Impl_SHA3_state_permute(s); + Hacl_Hash_SHA3_state_permute(s); } storeState(remOut, s, last); } void -Hacl_Impl_SHA3_keccak( +Hacl_Hash_SHA3_keccak( uint32_t rate, uint32_t capacity, uint32_t inputByteLen, @@ -816,9 +725,10 @@ Hacl_Impl_SHA3_keccak( uint8_t *output ) { - uint32_t rateInBytes = rate / (uint32_t)8U; + KRML_MAYBE_UNUSED_VAR(capacity); + uint32_t rateInBytes = rate / 8U; uint64_t s[25U] = { 0U }; absorb(s, rateInBytes, inputByteLen, input, delimitedSuffix); - Hacl_Impl_SHA3_squeeze(s, rateInBytes, outputByteLen, output); + Hacl_Hash_SHA3_squeeze0(s, rateInBytes, outputByteLen, output); } diff --git a/Modules/_hacl/Hacl_Hash_SHA3.h b/Modules/_hacl/Hacl_Hash_SHA3.h index 681b6af4a80e77..678e9f2fbe15e8 100644 --- a/Modules/_hacl/Hacl_Hash_SHA3.h +++ b/Modules/_hacl/Hacl_Hash_SHA3.h @@ -31,54 +31,55 @@ extern "C" { #endif #include +#include "python_hacl_namespaces.h" #include "krml/types.h" #include "krml/lowstar_endianness.h" #include "krml/internal/target.h" #include "Hacl_Streaming_Types.h" -typedef struct Hacl_Streaming_Keccak_hash_buf_s +typedef struct Hacl_Hash_SHA3_hash_buf_s { Spec_Hash_Definitions_hash_alg fst; uint64_t *snd; } -Hacl_Streaming_Keccak_hash_buf; +Hacl_Hash_SHA3_hash_buf; -typedef struct Hacl_Streaming_Keccak_state_s +typedef struct Hacl_Hash_SHA3_state_t_s { - Hacl_Streaming_Keccak_hash_buf block_state; + Hacl_Hash_SHA3_hash_buf block_state; uint8_t *buf; uint64_t total_len; } -Hacl_Streaming_Keccak_state; +Hacl_Hash_SHA3_state_t; -Spec_Hash_Definitions_hash_alg Hacl_Streaming_Keccak_get_alg(Hacl_Streaming_Keccak_state *s); +Spec_Hash_Definitions_hash_alg Hacl_Hash_SHA3_get_alg(Hacl_Hash_SHA3_state_t *s); -Hacl_Streaming_Keccak_state *Hacl_Streaming_Keccak_malloc(Spec_Hash_Definitions_hash_alg a); +Hacl_Hash_SHA3_state_t *Hacl_Hash_SHA3_malloc(Spec_Hash_Definitions_hash_alg a); -void Hacl_Streaming_Keccak_free(Hacl_Streaming_Keccak_state *s); +void Hacl_Hash_SHA3_free(Hacl_Hash_SHA3_state_t *state); -Hacl_Streaming_Keccak_state *Hacl_Streaming_Keccak_copy(Hacl_Streaming_Keccak_state *s0); +Hacl_Hash_SHA3_state_t *Hacl_Hash_SHA3_copy(Hacl_Hash_SHA3_state_t *state); -void Hacl_Streaming_Keccak_reset(Hacl_Streaming_Keccak_state *s); +void Hacl_Hash_SHA3_reset(Hacl_Hash_SHA3_state_t *state); Hacl_Streaming_Types_error_code -Hacl_Streaming_Keccak_update(Hacl_Streaming_Keccak_state *p, uint8_t *data, uint32_t len); +Hacl_Hash_SHA3_update(Hacl_Hash_SHA3_state_t *state, uint8_t *chunk, uint32_t chunk_len); Hacl_Streaming_Types_error_code -Hacl_Streaming_Keccak_finish(Hacl_Streaming_Keccak_state *s, uint8_t *dst); +Hacl_Hash_SHA3_digest(Hacl_Hash_SHA3_state_t *state, uint8_t *output); Hacl_Streaming_Types_error_code -Hacl_Streaming_Keccak_squeeze(Hacl_Streaming_Keccak_state *s, uint8_t *dst, uint32_t l); +Hacl_Hash_SHA3_squeeze(Hacl_Hash_SHA3_state_t *s, uint8_t *dst, uint32_t l); -uint32_t Hacl_Streaming_Keccak_block_len(Hacl_Streaming_Keccak_state *s); +uint32_t Hacl_Hash_SHA3_block_len(Hacl_Hash_SHA3_state_t *s); -uint32_t Hacl_Streaming_Keccak_hash_len(Hacl_Streaming_Keccak_state *s); +uint32_t Hacl_Hash_SHA3_hash_len(Hacl_Hash_SHA3_state_t *s); -bool Hacl_Streaming_Keccak_is_shake(Hacl_Streaming_Keccak_state *s); +bool Hacl_Hash_SHA3_is_shake(Hacl_Hash_SHA3_state_t *s); void -Hacl_SHA3_shake128_hacl( +Hacl_Hash_SHA3_shake128_hacl( uint32_t inputByteLen, uint8_t *input, uint32_t outputByteLen, @@ -86,25 +87,25 @@ Hacl_SHA3_shake128_hacl( ); void -Hacl_SHA3_shake256_hacl( +Hacl_Hash_SHA3_shake256_hacl( uint32_t inputByteLen, uint8_t *input, uint32_t outputByteLen, uint8_t *output ); -void Hacl_SHA3_sha3_224(uint32_t inputByteLen, uint8_t *input, uint8_t *output); +void Hacl_Hash_SHA3_sha3_224(uint8_t *output, uint8_t *input, uint32_t input_len); -void Hacl_SHA3_sha3_256(uint32_t inputByteLen, uint8_t *input, uint8_t *output); +void Hacl_Hash_SHA3_sha3_256(uint8_t *output, uint8_t *input, uint32_t input_len); -void Hacl_SHA3_sha3_384(uint32_t inputByteLen, uint8_t *input, uint8_t *output); +void Hacl_Hash_SHA3_sha3_384(uint8_t *output, uint8_t *input, uint32_t input_len); -void Hacl_SHA3_sha3_512(uint32_t inputByteLen, uint8_t *input, uint8_t *output); +void Hacl_Hash_SHA3_sha3_512(uint8_t *output, uint8_t *input, uint32_t input_len); -void Hacl_Impl_SHA3_absorb_inner(uint32_t rateInBytes, uint8_t *block, uint64_t *s); +void Hacl_Hash_SHA3_absorb_inner(uint32_t rateInBytes, uint8_t *block, uint64_t *s); void -Hacl_Impl_SHA3_squeeze( +Hacl_Hash_SHA3_squeeze0( uint64_t *s, uint32_t rateInBytes, uint32_t outputByteLen, @@ -112,7 +113,7 @@ Hacl_Impl_SHA3_squeeze( ); void -Hacl_Impl_SHA3_keccak( +Hacl_Hash_SHA3_keccak( uint32_t rate, uint32_t capacity, uint32_t inputByteLen, diff --git a/Modules/_hacl/include/krml/FStar_UInt128_Verified.h b/Modules/_hacl/include/krml/FStar_UInt128_Verified.h index 3d36d440735530..bdf25898f2bc25 100644 --- a/Modules/_hacl/include/krml/FStar_UInt128_Verified.h +++ b/Modules/_hacl/include/krml/FStar_UInt128_Verified.h @@ -15,7 +15,7 @@ static inline uint64_t FStar_UInt128_constant_time_carry(uint64_t a, uint64_t b) { - return (a ^ ((a ^ b) | ((a - b) ^ b))) >> (uint32_t)63U; + return (a ^ ((a ^ b) | ((a - b) ^ b))) >> 63U; } static inline uint64_t FStar_UInt128_carry(uint64_t a, uint64_t b) @@ -118,7 +118,7 @@ static inline FStar_UInt128_uint128 FStar_UInt128_lognot(FStar_UInt128_uint128 a return lit; } -static uint32_t FStar_UInt128_u32_64 = (uint32_t)64U; +static uint32_t FStar_UInt128_u32_64 = 64U; static inline uint64_t FStar_UInt128_add_u64_shift_left(uint64_t hi, uint64_t lo, uint32_t s) { @@ -134,7 +134,7 @@ FStar_UInt128_add_u64_shift_left_respec(uint64_t hi, uint64_t lo, uint32_t s) static inline FStar_UInt128_uint128 FStar_UInt128_shift_left_small(FStar_UInt128_uint128 a, uint32_t s) { - if (s == (uint32_t)0U) + if (s == 0U) { return a; } @@ -151,7 +151,7 @@ static inline FStar_UInt128_uint128 FStar_UInt128_shift_left_large(FStar_UInt128_uint128 a, uint32_t s) { FStar_UInt128_uint128 lit; - lit.low = (uint64_t)0U; + lit.low = 0ULL; lit.high = a.low << (s - FStar_UInt128_u32_64); return lit; } @@ -183,7 +183,7 @@ FStar_UInt128_add_u64_shift_right_respec(uint64_t hi, uint64_t lo, uint32_t s) static inline FStar_UInt128_uint128 FStar_UInt128_shift_right_small(FStar_UInt128_uint128 a, uint32_t s) { - if (s == (uint32_t)0U) + if (s == 0U) { return a; } @@ -201,7 +201,7 @@ FStar_UInt128_shift_right_large(FStar_UInt128_uint128 a, uint32_t s) { FStar_UInt128_uint128 lit; lit.low = a.high >> (s - FStar_UInt128_u32_64); - lit.high = (uint64_t)0U; + lit.high = 0ULL; return lit; } @@ -269,7 +269,7 @@ static inline FStar_UInt128_uint128 FStar_UInt128_uint64_to_uint128(uint64_t a) { FStar_UInt128_uint128 lit; lit.low = a; - lit.high = (uint64_t)0U; + lit.high = 0ULL; return lit; } @@ -280,10 +280,10 @@ static inline uint64_t FStar_UInt128_uint128_to_uint64(FStar_UInt128_uint128 a) static inline uint64_t FStar_UInt128_u64_mod_32(uint64_t a) { - return a & (uint64_t)0xffffffffU; + return a & 0xffffffffULL; } -static uint32_t FStar_UInt128_u32_32 = (uint32_t)32U; +static uint32_t FStar_UInt128_u32_32 = 32U; static inline uint64_t FStar_UInt128_u32_combine(uint64_t hi, uint64_t lo) { diff --git a/Modules/_hacl/include/krml/FStar_UInt_8_16_32_64.h b/Modules/_hacl/include/krml/FStar_UInt_8_16_32_64.h index a56c7d613498b7..1bdec972a2f249 100644 --- a/Modules/_hacl/include/krml/FStar_UInt_8_16_32_64.h +++ b/Modules/_hacl/include/krml/FStar_UInt_8_16_32_64.h @@ -14,16 +14,16 @@ #include "krml/types.h" #include "krml/internal/target.h" -static inline uint64_t FStar_UInt64_eq_mask(uint64_t a, uint64_t b) +static KRML_NOINLINE uint64_t FStar_UInt64_eq_mask(uint64_t a, uint64_t b) { uint64_t x = a ^ b; - uint64_t minus_x = ~x + (uint64_t)1U; + uint64_t minus_x = ~x + 1ULL; uint64_t x_or_minus_x = x | minus_x; - uint64_t xnx = x_or_minus_x >> (uint32_t)63U; - return xnx - (uint64_t)1U; + uint64_t xnx = x_or_minus_x >> 63U; + return xnx - 1ULL; } -static inline uint64_t FStar_UInt64_gte_mask(uint64_t a, uint64_t b) +static KRML_NOINLINE uint64_t FStar_UInt64_gte_mask(uint64_t a, uint64_t b) { uint64_t x = a; uint64_t y = b; @@ -32,20 +32,20 @@ static inline uint64_t FStar_UInt64_gte_mask(uint64_t a, uint64_t b) uint64_t x_sub_y_xor_y = x_sub_y ^ y; uint64_t q = x_xor_y | x_sub_y_xor_y; uint64_t x_xor_q = x ^ q; - uint64_t x_xor_q_ = x_xor_q >> (uint32_t)63U; - return x_xor_q_ - (uint64_t)1U; + uint64_t x_xor_q_ = x_xor_q >> 63U; + return x_xor_q_ - 1ULL; } -static inline uint32_t FStar_UInt32_eq_mask(uint32_t a, uint32_t b) +static KRML_NOINLINE uint32_t FStar_UInt32_eq_mask(uint32_t a, uint32_t b) { uint32_t x = a ^ b; - uint32_t minus_x = ~x + (uint32_t)1U; + uint32_t minus_x = ~x + 1U; uint32_t x_or_minus_x = x | minus_x; - uint32_t xnx = x_or_minus_x >> (uint32_t)31U; - return xnx - (uint32_t)1U; + uint32_t xnx = x_or_minus_x >> 31U; + return xnx - 1U; } -static inline uint32_t FStar_UInt32_gte_mask(uint32_t a, uint32_t b) +static KRML_NOINLINE uint32_t FStar_UInt32_gte_mask(uint32_t a, uint32_t b) { uint32_t x = a; uint32_t y = b; @@ -54,52 +54,52 @@ static inline uint32_t FStar_UInt32_gte_mask(uint32_t a, uint32_t b) uint32_t x_sub_y_xor_y = x_sub_y ^ y; uint32_t q = x_xor_y | x_sub_y_xor_y; uint32_t x_xor_q = x ^ q; - uint32_t x_xor_q_ = x_xor_q >> (uint32_t)31U; - return x_xor_q_ - (uint32_t)1U; + uint32_t x_xor_q_ = x_xor_q >> 31U; + return x_xor_q_ - 1U; } -static inline uint16_t FStar_UInt16_eq_mask(uint16_t a, uint16_t b) +static KRML_NOINLINE uint16_t FStar_UInt16_eq_mask(uint16_t a, uint16_t b) { - uint16_t x = a ^ b; - uint16_t minus_x = ~x + (uint16_t)1U; - uint16_t x_or_minus_x = x | minus_x; - uint16_t xnx = x_or_minus_x >> (uint32_t)15U; - return xnx - (uint16_t)1U; + uint16_t x = (uint32_t)a ^ (uint32_t)b; + uint16_t minus_x = (uint32_t)~x + 1U; + uint16_t x_or_minus_x = (uint32_t)x | (uint32_t)minus_x; + uint16_t xnx = (uint32_t)x_or_minus_x >> 15U; + return (uint32_t)xnx - 1U; } -static inline uint16_t FStar_UInt16_gte_mask(uint16_t a, uint16_t b) +static KRML_NOINLINE uint16_t FStar_UInt16_gte_mask(uint16_t a, uint16_t b) { uint16_t x = a; uint16_t y = b; - uint16_t x_xor_y = x ^ y; - uint16_t x_sub_y = x - y; - uint16_t x_sub_y_xor_y = x_sub_y ^ y; - uint16_t q = x_xor_y | x_sub_y_xor_y; - uint16_t x_xor_q = x ^ q; - uint16_t x_xor_q_ = x_xor_q >> (uint32_t)15U; - return x_xor_q_ - (uint16_t)1U; + uint16_t x_xor_y = (uint32_t)x ^ (uint32_t)y; + uint16_t x_sub_y = (uint32_t)x - (uint32_t)y; + uint16_t x_sub_y_xor_y = (uint32_t)x_sub_y ^ (uint32_t)y; + uint16_t q = (uint32_t)x_xor_y | (uint32_t)x_sub_y_xor_y; + uint16_t x_xor_q = (uint32_t)x ^ (uint32_t)q; + uint16_t x_xor_q_ = (uint32_t)x_xor_q >> 15U; + return (uint32_t)x_xor_q_ - 1U; } -static inline uint8_t FStar_UInt8_eq_mask(uint8_t a, uint8_t b) +static KRML_NOINLINE uint8_t FStar_UInt8_eq_mask(uint8_t a, uint8_t b) { - uint8_t x = a ^ b; - uint8_t minus_x = ~x + (uint8_t)1U; - uint8_t x_or_minus_x = x | minus_x; - uint8_t xnx = x_or_minus_x >> (uint32_t)7U; - return xnx - (uint8_t)1U; + uint8_t x = (uint32_t)a ^ (uint32_t)b; + uint8_t minus_x = (uint32_t)~x + 1U; + uint8_t x_or_minus_x = (uint32_t)x | (uint32_t)minus_x; + uint8_t xnx = (uint32_t)x_or_minus_x >> 7U; + return (uint32_t)xnx - 1U; } -static inline uint8_t FStar_UInt8_gte_mask(uint8_t a, uint8_t b) +static KRML_NOINLINE uint8_t FStar_UInt8_gte_mask(uint8_t a, uint8_t b) { uint8_t x = a; uint8_t y = b; - uint8_t x_xor_y = x ^ y; - uint8_t x_sub_y = x - y; - uint8_t x_sub_y_xor_y = x_sub_y ^ y; - uint8_t q = x_xor_y | x_sub_y_xor_y; - uint8_t x_xor_q = x ^ q; - uint8_t x_xor_q_ = x_xor_q >> (uint32_t)7U; - return x_xor_q_ - (uint8_t)1U; + uint8_t x_xor_y = (uint32_t)x ^ (uint32_t)y; + uint8_t x_sub_y = (uint32_t)x - (uint32_t)y; + uint8_t x_sub_y_xor_y = (uint32_t)x_sub_y ^ (uint32_t)y; + uint8_t q = (uint32_t)x_xor_y | (uint32_t)x_sub_y_xor_y; + uint8_t x_xor_q = (uint32_t)x ^ (uint32_t)q; + uint8_t x_xor_q_ = (uint32_t)x_xor_q >> 7U; + return (uint32_t)x_xor_q_ - 1U; } diff --git a/Modules/_hacl/include/krml/internal/target.h b/Modules/_hacl/include/krml/internal/target.h index 5a2f94eb2ec8da..c7fcc0151e6f10 100644 --- a/Modules/_hacl/include/krml/internal/target.h +++ b/Modules/_hacl/include/krml/internal/target.h @@ -4,13 +4,13 @@ #ifndef __KRML_TARGET_H #define __KRML_TARGET_H -#include -#include -#include -#include +#include #include #include -#include +#include +#include +#include +#include /* Since KaRaMeL emits the inline keyword unconditionally, we follow the * guidelines at https://gcc.gnu.org/onlinedocs/gcc/Inline.html and make this @@ -57,6 +57,31 @@ # define KRML_HOST_IGNORE(x) (void)(x) #endif +#ifndef KRML_MAYBE_UNUSED_VAR +# define KRML_MAYBE_UNUSED_VAR(x) KRML_HOST_IGNORE(x) +#endif + +#ifndef KRML_MAYBE_UNUSED +# if defined(__GNUC__) +# define KRML_MAYBE_UNUSED __attribute__((unused)) +# else +# define KRML_MAYBE_UNUSED +# endif +#endif + +#ifndef KRML_NOINLINE +# if defined(_MSC_VER) +# define KRML_NOINLINE __declspec(noinline) +# elif defined (__GNUC__) +# define KRML_NOINLINE __attribute__((noinline,unused)) +# else +# define KRML_NOINLINE +# warning "The KRML_NOINLINE macro is not defined for this toolchain!" +# warning "The compiler may defeat side-channel resistance with optimizations." +# warning "Please locate target.h and try to fill it out with a suitable definition for this compiler." +# endif +#endif + /* In FStar.Buffer.fst, the size of arrays is uint32_t, but it's a number of * *elements*. Do an ugly, run-time check (some of which KaRaMeL can eliminate). */ @@ -83,184 +108,186 @@ #define KRML_LOOP1(i, n, x) { \ x \ i += n; \ + (void) i; \ } -#define KRML_LOOP2(i, n, x) \ - KRML_LOOP1(i, n, x) \ +#define KRML_LOOP2(i, n, x) \ + KRML_LOOP1(i, n, x) \ KRML_LOOP1(i, n, x) -#define KRML_LOOP3(i, n, x) \ - KRML_LOOP2(i, n, x) \ +#define KRML_LOOP3(i, n, x) \ + KRML_LOOP2(i, n, x) \ KRML_LOOP1(i, n, x) -#define KRML_LOOP4(i, n, x) \ - KRML_LOOP2(i, n, x) \ +#define KRML_LOOP4(i, n, x) \ + KRML_LOOP2(i, n, x) \ KRML_LOOP2(i, n, x) -#define KRML_LOOP5(i, n, x) \ - KRML_LOOP4(i, n, x) \ +#define KRML_LOOP5(i, n, x) \ + KRML_LOOP4(i, n, x) \ KRML_LOOP1(i, n, x) -#define KRML_LOOP6(i, n, x) \ - KRML_LOOP4(i, n, x) \ +#define KRML_LOOP6(i, n, x) \ + KRML_LOOP4(i, n, x) \ KRML_LOOP2(i, n, x) -#define KRML_LOOP7(i, n, x) \ - KRML_LOOP4(i, n, x) \ +#define KRML_LOOP7(i, n, x) \ + KRML_LOOP4(i, n, x) \ KRML_LOOP3(i, n, x) -#define KRML_LOOP8(i, n, x) \ - KRML_LOOP4(i, n, x) \ +#define KRML_LOOP8(i, n, x) \ + KRML_LOOP4(i, n, x) \ KRML_LOOP4(i, n, x) -#define KRML_LOOP9(i, n, x) \ - KRML_LOOP8(i, n, x) \ +#define KRML_LOOP9(i, n, x) \ + KRML_LOOP8(i, n, x) \ KRML_LOOP1(i, n, x) -#define KRML_LOOP10(i, n, x) \ - KRML_LOOP8(i, n, x) \ +#define KRML_LOOP10(i, n, x) \ + KRML_LOOP8(i, n, x) \ KRML_LOOP2(i, n, x) -#define KRML_LOOP11(i, n, x) \ - KRML_LOOP8(i, n, x) \ +#define KRML_LOOP11(i, n, x) \ + KRML_LOOP8(i, n, x) \ KRML_LOOP3(i, n, x) -#define KRML_LOOP12(i, n, x) \ - KRML_LOOP8(i, n, x) \ +#define KRML_LOOP12(i, n, x) \ + KRML_LOOP8(i, n, x) \ KRML_LOOP4(i, n, x) -#define KRML_LOOP13(i, n, x) \ - KRML_LOOP8(i, n, x) \ +#define KRML_LOOP13(i, n, x) \ + KRML_LOOP8(i, n, x) \ KRML_LOOP5(i, n, x) -#define KRML_LOOP14(i, n, x) \ - KRML_LOOP8(i, n, x) \ +#define KRML_LOOP14(i, n, x) \ + KRML_LOOP8(i, n, x) \ KRML_LOOP6(i, n, x) -#define KRML_LOOP15(i, n, x) \ - KRML_LOOP8(i, n, x) \ +#define KRML_LOOP15(i, n, x) \ + KRML_LOOP8(i, n, x) \ KRML_LOOP7(i, n, x) -#define KRML_LOOP16(i, n, x) \ - KRML_LOOP8(i, n, x) \ +#define KRML_LOOP16(i, n, x) \ + KRML_LOOP8(i, n, x) \ KRML_LOOP8(i, n, x) -#define KRML_UNROLL_FOR(i, z, n, k, x) do { \ - uint32_t i = z; \ - KRML_LOOP##n(i, k, x) \ -} while (0) +#define KRML_UNROLL_FOR(i, z, n, k, x) \ + do { \ + uint32_t i = z; \ + KRML_LOOP##n(i, k, x) \ + } while (0) -#define KRML_ACTUAL_FOR(i, z, n, k, x) \ - do { \ - for (uint32_t i = z; i < n; i += k) { \ - x \ - } \ +#define KRML_ACTUAL_FOR(i, z, n, k, x) \ + do { \ + for (uint32_t i = z; i < n; i += k) { \ + x \ + } \ } while (0) #ifndef KRML_UNROLL_MAX -#define KRML_UNROLL_MAX 16 +# define KRML_UNROLL_MAX 16 #endif /* 1 is the number of loop iterations, i.e. (n - z)/k as evaluated by krml */ #if 0 <= KRML_UNROLL_MAX -#define KRML_MAYBE_FOR0(i, z, n, k, x) +# define KRML_MAYBE_FOR0(i, z, n, k, x) #else -#define KRML_MAYBE_FOR0(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) +# define KRML_MAYBE_FOR0(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) #endif #if 1 <= KRML_UNROLL_MAX -#define KRML_MAYBE_FOR1(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 1, k, x) +# define KRML_MAYBE_FOR1(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 1, k, x) #else -#define KRML_MAYBE_FOR1(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) +# define KRML_MAYBE_FOR1(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) #endif #if 2 <= KRML_UNROLL_MAX -#define KRML_MAYBE_FOR2(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 2, k, x) +# define KRML_MAYBE_FOR2(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 2, k, x) #else -#define KRML_MAYBE_FOR2(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) +# define KRML_MAYBE_FOR2(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) #endif #if 3 <= KRML_UNROLL_MAX -#define KRML_MAYBE_FOR3(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 3, k, x) +# define KRML_MAYBE_FOR3(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 3, k, x) #else -#define KRML_MAYBE_FOR3(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) +# define KRML_MAYBE_FOR3(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) #endif #if 4 <= KRML_UNROLL_MAX -#define KRML_MAYBE_FOR4(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 4, k, x) +# define KRML_MAYBE_FOR4(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 4, k, x) #else -#define KRML_MAYBE_FOR4(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) +# define KRML_MAYBE_FOR4(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) #endif #if 5 <= KRML_UNROLL_MAX -#define KRML_MAYBE_FOR5(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 5, k, x) +# define KRML_MAYBE_FOR5(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 5, k, x) #else -#define KRML_MAYBE_FOR5(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) +# define KRML_MAYBE_FOR5(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) #endif #if 6 <= KRML_UNROLL_MAX -#define KRML_MAYBE_FOR6(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 6, k, x) +# define KRML_MAYBE_FOR6(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 6, k, x) #else -#define KRML_MAYBE_FOR6(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) +# define KRML_MAYBE_FOR6(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) #endif #if 7 <= KRML_UNROLL_MAX -#define KRML_MAYBE_FOR7(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 7, k, x) +# define KRML_MAYBE_FOR7(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 7, k, x) #else -#define KRML_MAYBE_FOR7(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) +# define KRML_MAYBE_FOR7(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) #endif #if 8 <= KRML_UNROLL_MAX -#define KRML_MAYBE_FOR8(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 8, k, x) +# define KRML_MAYBE_FOR8(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 8, k, x) #else -#define KRML_MAYBE_FOR8(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) +# define KRML_MAYBE_FOR8(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) #endif #if 9 <= KRML_UNROLL_MAX -#define KRML_MAYBE_FOR9(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 9, k, x) +# define KRML_MAYBE_FOR9(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 9, k, x) #else -#define KRML_MAYBE_FOR9(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) +# define KRML_MAYBE_FOR9(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) #endif #if 10 <= KRML_UNROLL_MAX -#define KRML_MAYBE_FOR10(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 10, k, x) +# define KRML_MAYBE_FOR10(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 10, k, x) #else -#define KRML_MAYBE_FOR10(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) +# define KRML_MAYBE_FOR10(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) #endif #if 11 <= KRML_UNROLL_MAX -#define KRML_MAYBE_FOR11(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 11, k, x) +# define KRML_MAYBE_FOR11(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 11, k, x) #else -#define KRML_MAYBE_FOR11(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) +# define KRML_MAYBE_FOR11(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) #endif #if 12 <= KRML_UNROLL_MAX -#define KRML_MAYBE_FOR12(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 12, k, x) +# define KRML_MAYBE_FOR12(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 12, k, x) #else -#define KRML_MAYBE_FOR12(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) +# define KRML_MAYBE_FOR12(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) #endif #if 13 <= KRML_UNROLL_MAX -#define KRML_MAYBE_FOR13(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 13, k, x) +# define KRML_MAYBE_FOR13(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 13, k, x) #else -#define KRML_MAYBE_FOR13(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) +# define KRML_MAYBE_FOR13(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) #endif #if 14 <= KRML_UNROLL_MAX -#define KRML_MAYBE_FOR14(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 14, k, x) +# define KRML_MAYBE_FOR14(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 14, k, x) #else -#define KRML_MAYBE_FOR14(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) +# define KRML_MAYBE_FOR14(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) #endif #if 15 <= KRML_UNROLL_MAX -#define KRML_MAYBE_FOR15(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 15, k, x) +# define KRML_MAYBE_FOR15(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 15, k, x) #else -#define KRML_MAYBE_FOR15(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) +# define KRML_MAYBE_FOR15(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) #endif #if 16 <= KRML_UNROLL_MAX -#define KRML_MAYBE_FOR16(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 16, k, x) +# define KRML_MAYBE_FOR16(i, z, n, k, x) KRML_UNROLL_FOR(i, z, 16, k, x) #else -#define KRML_MAYBE_FOR16(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) +# define KRML_MAYBE_FOR16(i, z, n, k, x) KRML_ACTUAL_FOR(i, z, n, k, x) #endif #endif diff --git a/Modules/_hacl/internal/Hacl_Hash_MD5.h b/Modules/_hacl/internal/Hacl_Hash_MD5.h index 87ad4cf228d91b..a50ec407f53e39 100644 --- a/Modules/_hacl/internal/Hacl_Hash_MD5.h +++ b/Modules/_hacl/internal/Hacl_Hash_MD5.h @@ -37,21 +37,16 @@ extern "C" { #include "../Hacl_Hash_MD5.h" -void Hacl_Hash_Core_MD5_legacy_init(uint32_t *s); +void Hacl_Hash_MD5_init(uint32_t *s); -void Hacl_Hash_Core_MD5_legacy_finish(uint32_t *s, uint8_t *dst); +void Hacl_Hash_MD5_finish(uint32_t *s, uint8_t *dst); -void Hacl_Hash_MD5_legacy_update_multi(uint32_t *s, uint8_t *blocks, uint32_t n_blocks); +void Hacl_Hash_MD5_update_multi(uint32_t *s, uint8_t *blocks, uint32_t n_blocks); void -Hacl_Hash_MD5_legacy_update_last( - uint32_t *s, - uint64_t prev_len, - uint8_t *input, - uint32_t input_len -); - -void Hacl_Hash_MD5_legacy_hash(uint8_t *input, uint32_t input_len, uint8_t *dst); +Hacl_Hash_MD5_update_last(uint32_t *s, uint64_t prev_len, uint8_t *input, uint32_t input_len); + +void Hacl_Hash_MD5_hash_oneshot(uint8_t *output, uint8_t *input, uint32_t input_len); #if defined(__cplusplus) } diff --git a/Modules/_hacl/internal/Hacl_Hash_SHA1.h b/Modules/_hacl/internal/Hacl_Hash_SHA1.h index d2d9df44c6c14c..b39bad3f3b93e8 100644 --- a/Modules/_hacl/internal/Hacl_Hash_SHA1.h +++ b/Modules/_hacl/internal/Hacl_Hash_SHA1.h @@ -37,21 +37,16 @@ extern "C" { #include "../Hacl_Hash_SHA1.h" -void Hacl_Hash_Core_SHA1_legacy_init(uint32_t *s); +void Hacl_Hash_SHA1_init(uint32_t *s); -void Hacl_Hash_Core_SHA1_legacy_finish(uint32_t *s, uint8_t *dst); +void Hacl_Hash_SHA1_finish(uint32_t *s, uint8_t *dst); -void Hacl_Hash_SHA1_legacy_update_multi(uint32_t *s, uint8_t *blocks, uint32_t n_blocks); +void Hacl_Hash_SHA1_update_multi(uint32_t *s, uint8_t *blocks, uint32_t n_blocks); void -Hacl_Hash_SHA1_legacy_update_last( - uint32_t *s, - uint64_t prev_len, - uint8_t *input, - uint32_t input_len -); - -void Hacl_Hash_SHA1_legacy_hash(uint8_t *input, uint32_t input_len, uint8_t *dst); +Hacl_Hash_SHA1_update_last(uint32_t *s, uint64_t prev_len, uint8_t *input, uint32_t input_len); + +void Hacl_Hash_SHA1_hash_oneshot(uint8_t *output, uint8_t *input, uint32_t input_len); #if defined(__cplusplus) } diff --git a/Modules/_hacl/internal/Hacl_Hash_SHA2.h b/Modules/_hacl/internal/Hacl_Hash_SHA2.h index 851f7dc60c94c2..0127f4373fb1a1 100644 --- a/Modules/_hacl/internal/Hacl_Hash_SHA2.h +++ b/Modules/_hacl/internal/Hacl_Hash_SHA2.h @@ -40,141 +40,121 @@ extern "C" { static const uint32_t -Hacl_Impl_SHA2_Generic_h224[8U] = +Hacl_Hash_SHA2_h224[8U] = { - (uint32_t)0xc1059ed8U, (uint32_t)0x367cd507U, (uint32_t)0x3070dd17U, (uint32_t)0xf70e5939U, - (uint32_t)0xffc00b31U, (uint32_t)0x68581511U, (uint32_t)0x64f98fa7U, (uint32_t)0xbefa4fa4U + 0xc1059ed8U, 0x367cd507U, 0x3070dd17U, 0xf70e5939U, 0xffc00b31U, 0x68581511U, 0x64f98fa7U, + 0xbefa4fa4U }; static const uint32_t -Hacl_Impl_SHA2_Generic_h256[8U] = +Hacl_Hash_SHA2_h256[8U] = { - (uint32_t)0x6a09e667U, (uint32_t)0xbb67ae85U, (uint32_t)0x3c6ef372U, (uint32_t)0xa54ff53aU, - (uint32_t)0x510e527fU, (uint32_t)0x9b05688cU, (uint32_t)0x1f83d9abU, (uint32_t)0x5be0cd19U + 0x6a09e667U, 0xbb67ae85U, 0x3c6ef372U, 0xa54ff53aU, 0x510e527fU, 0x9b05688cU, 0x1f83d9abU, + 0x5be0cd19U }; static const uint64_t -Hacl_Impl_SHA2_Generic_h384[8U] = +Hacl_Hash_SHA2_h384[8U] = { - (uint64_t)0xcbbb9d5dc1059ed8U, (uint64_t)0x629a292a367cd507U, (uint64_t)0x9159015a3070dd17U, - (uint64_t)0x152fecd8f70e5939U, (uint64_t)0x67332667ffc00b31U, (uint64_t)0x8eb44a8768581511U, - (uint64_t)0xdb0c2e0d64f98fa7U, (uint64_t)0x47b5481dbefa4fa4U + 0xcbbb9d5dc1059ed8ULL, 0x629a292a367cd507ULL, 0x9159015a3070dd17ULL, 0x152fecd8f70e5939ULL, + 0x67332667ffc00b31ULL, 0x8eb44a8768581511ULL, 0xdb0c2e0d64f98fa7ULL, 0x47b5481dbefa4fa4ULL }; static const uint64_t -Hacl_Impl_SHA2_Generic_h512[8U] = +Hacl_Hash_SHA2_h512[8U] = { - (uint64_t)0x6a09e667f3bcc908U, (uint64_t)0xbb67ae8584caa73bU, (uint64_t)0x3c6ef372fe94f82bU, - (uint64_t)0xa54ff53a5f1d36f1U, (uint64_t)0x510e527fade682d1U, (uint64_t)0x9b05688c2b3e6c1fU, - (uint64_t)0x1f83d9abfb41bd6bU, (uint64_t)0x5be0cd19137e2179U + 0x6a09e667f3bcc908ULL, 0xbb67ae8584caa73bULL, 0x3c6ef372fe94f82bULL, 0xa54ff53a5f1d36f1ULL, + 0x510e527fade682d1ULL, 0x9b05688c2b3e6c1fULL, 0x1f83d9abfb41bd6bULL, 0x5be0cd19137e2179ULL }; static const uint32_t -Hacl_Impl_SHA2_Generic_k224_256[64U] = +Hacl_Hash_SHA2_k224_256[64U] = { - (uint32_t)0x428a2f98U, (uint32_t)0x71374491U, (uint32_t)0xb5c0fbcfU, (uint32_t)0xe9b5dba5U, - (uint32_t)0x3956c25bU, (uint32_t)0x59f111f1U, (uint32_t)0x923f82a4U, (uint32_t)0xab1c5ed5U, - (uint32_t)0xd807aa98U, (uint32_t)0x12835b01U, (uint32_t)0x243185beU, (uint32_t)0x550c7dc3U, - (uint32_t)0x72be5d74U, (uint32_t)0x80deb1feU, (uint32_t)0x9bdc06a7U, (uint32_t)0xc19bf174U, - (uint32_t)0xe49b69c1U, (uint32_t)0xefbe4786U, (uint32_t)0x0fc19dc6U, (uint32_t)0x240ca1ccU, - (uint32_t)0x2de92c6fU, (uint32_t)0x4a7484aaU, (uint32_t)0x5cb0a9dcU, (uint32_t)0x76f988daU, - (uint32_t)0x983e5152U, (uint32_t)0xa831c66dU, (uint32_t)0xb00327c8U, (uint32_t)0xbf597fc7U, - (uint32_t)0xc6e00bf3U, (uint32_t)0xd5a79147U, (uint32_t)0x06ca6351U, (uint32_t)0x14292967U, - (uint32_t)0x27b70a85U, (uint32_t)0x2e1b2138U, (uint32_t)0x4d2c6dfcU, (uint32_t)0x53380d13U, - (uint32_t)0x650a7354U, (uint32_t)0x766a0abbU, (uint32_t)0x81c2c92eU, (uint32_t)0x92722c85U, - (uint32_t)0xa2bfe8a1U, (uint32_t)0xa81a664bU, (uint32_t)0xc24b8b70U, (uint32_t)0xc76c51a3U, - (uint32_t)0xd192e819U, (uint32_t)0xd6990624U, (uint32_t)0xf40e3585U, (uint32_t)0x106aa070U, - (uint32_t)0x19a4c116U, (uint32_t)0x1e376c08U, (uint32_t)0x2748774cU, (uint32_t)0x34b0bcb5U, - (uint32_t)0x391c0cb3U, (uint32_t)0x4ed8aa4aU, (uint32_t)0x5b9cca4fU, (uint32_t)0x682e6ff3U, - (uint32_t)0x748f82eeU, (uint32_t)0x78a5636fU, (uint32_t)0x84c87814U, (uint32_t)0x8cc70208U, - (uint32_t)0x90befffaU, (uint32_t)0xa4506cebU, (uint32_t)0xbef9a3f7U, (uint32_t)0xc67178f2U + 0x428a2f98U, 0x71374491U, 0xb5c0fbcfU, 0xe9b5dba5U, 0x3956c25bU, 0x59f111f1U, 0x923f82a4U, + 0xab1c5ed5U, 0xd807aa98U, 0x12835b01U, 0x243185beU, 0x550c7dc3U, 0x72be5d74U, 0x80deb1feU, + 0x9bdc06a7U, 0xc19bf174U, 0xe49b69c1U, 0xefbe4786U, 0x0fc19dc6U, 0x240ca1ccU, 0x2de92c6fU, + 0x4a7484aaU, 0x5cb0a9dcU, 0x76f988daU, 0x983e5152U, 0xa831c66dU, 0xb00327c8U, 0xbf597fc7U, + 0xc6e00bf3U, 0xd5a79147U, 0x06ca6351U, 0x14292967U, 0x27b70a85U, 0x2e1b2138U, 0x4d2c6dfcU, + 0x53380d13U, 0x650a7354U, 0x766a0abbU, 0x81c2c92eU, 0x92722c85U, 0xa2bfe8a1U, 0xa81a664bU, + 0xc24b8b70U, 0xc76c51a3U, 0xd192e819U, 0xd6990624U, 0xf40e3585U, 0x106aa070U, 0x19a4c116U, + 0x1e376c08U, 0x2748774cU, 0x34b0bcb5U, 0x391c0cb3U, 0x4ed8aa4aU, 0x5b9cca4fU, 0x682e6ff3U, + 0x748f82eeU, 0x78a5636fU, 0x84c87814U, 0x8cc70208U, 0x90befffaU, 0xa4506cebU, 0xbef9a3f7U, + 0xc67178f2U }; static const uint64_t -Hacl_Impl_SHA2_Generic_k384_512[80U] = +Hacl_Hash_SHA2_k384_512[80U] = { - (uint64_t)0x428a2f98d728ae22U, (uint64_t)0x7137449123ef65cdU, (uint64_t)0xb5c0fbcfec4d3b2fU, - (uint64_t)0xe9b5dba58189dbbcU, (uint64_t)0x3956c25bf348b538U, (uint64_t)0x59f111f1b605d019U, - (uint64_t)0x923f82a4af194f9bU, (uint64_t)0xab1c5ed5da6d8118U, (uint64_t)0xd807aa98a3030242U, - (uint64_t)0x12835b0145706fbeU, (uint64_t)0x243185be4ee4b28cU, (uint64_t)0x550c7dc3d5ffb4e2U, - (uint64_t)0x72be5d74f27b896fU, (uint64_t)0x80deb1fe3b1696b1U, (uint64_t)0x9bdc06a725c71235U, - (uint64_t)0xc19bf174cf692694U, (uint64_t)0xe49b69c19ef14ad2U, (uint64_t)0xefbe4786384f25e3U, - (uint64_t)0x0fc19dc68b8cd5b5U, (uint64_t)0x240ca1cc77ac9c65U, (uint64_t)0x2de92c6f592b0275U, - (uint64_t)0x4a7484aa6ea6e483U, (uint64_t)0x5cb0a9dcbd41fbd4U, (uint64_t)0x76f988da831153b5U, - (uint64_t)0x983e5152ee66dfabU, (uint64_t)0xa831c66d2db43210U, (uint64_t)0xb00327c898fb213fU, - (uint64_t)0xbf597fc7beef0ee4U, (uint64_t)0xc6e00bf33da88fc2U, (uint64_t)0xd5a79147930aa725U, - (uint64_t)0x06ca6351e003826fU, (uint64_t)0x142929670a0e6e70U, (uint64_t)0x27b70a8546d22ffcU, - (uint64_t)0x2e1b21385c26c926U, (uint64_t)0x4d2c6dfc5ac42aedU, (uint64_t)0x53380d139d95b3dfU, - (uint64_t)0x650a73548baf63deU, (uint64_t)0x766a0abb3c77b2a8U, (uint64_t)0x81c2c92e47edaee6U, - (uint64_t)0x92722c851482353bU, (uint64_t)0xa2bfe8a14cf10364U, (uint64_t)0xa81a664bbc423001U, - (uint64_t)0xc24b8b70d0f89791U, (uint64_t)0xc76c51a30654be30U, (uint64_t)0xd192e819d6ef5218U, - (uint64_t)0xd69906245565a910U, (uint64_t)0xf40e35855771202aU, (uint64_t)0x106aa07032bbd1b8U, - (uint64_t)0x19a4c116b8d2d0c8U, (uint64_t)0x1e376c085141ab53U, (uint64_t)0x2748774cdf8eeb99U, - (uint64_t)0x34b0bcb5e19b48a8U, (uint64_t)0x391c0cb3c5c95a63U, (uint64_t)0x4ed8aa4ae3418acbU, - (uint64_t)0x5b9cca4f7763e373U, (uint64_t)0x682e6ff3d6b2b8a3U, (uint64_t)0x748f82ee5defb2fcU, - (uint64_t)0x78a5636f43172f60U, (uint64_t)0x84c87814a1f0ab72U, (uint64_t)0x8cc702081a6439ecU, - (uint64_t)0x90befffa23631e28U, (uint64_t)0xa4506cebde82bde9U, (uint64_t)0xbef9a3f7b2c67915U, - (uint64_t)0xc67178f2e372532bU, (uint64_t)0xca273eceea26619cU, (uint64_t)0xd186b8c721c0c207U, - (uint64_t)0xeada7dd6cde0eb1eU, (uint64_t)0xf57d4f7fee6ed178U, (uint64_t)0x06f067aa72176fbaU, - (uint64_t)0x0a637dc5a2c898a6U, (uint64_t)0x113f9804bef90daeU, (uint64_t)0x1b710b35131c471bU, - (uint64_t)0x28db77f523047d84U, (uint64_t)0x32caab7b40c72493U, (uint64_t)0x3c9ebe0a15c9bebcU, - (uint64_t)0x431d67c49c100d4cU, (uint64_t)0x4cc5d4becb3e42b6U, (uint64_t)0x597f299cfc657e2aU, - (uint64_t)0x5fcb6fab3ad6faecU, (uint64_t)0x6c44198c4a475817U + 0x428a2f98d728ae22ULL, 0x7137449123ef65cdULL, 0xb5c0fbcfec4d3b2fULL, 0xe9b5dba58189dbbcULL, + 0x3956c25bf348b538ULL, 0x59f111f1b605d019ULL, 0x923f82a4af194f9bULL, 0xab1c5ed5da6d8118ULL, + 0xd807aa98a3030242ULL, 0x12835b0145706fbeULL, 0x243185be4ee4b28cULL, 0x550c7dc3d5ffb4e2ULL, + 0x72be5d74f27b896fULL, 0x80deb1fe3b1696b1ULL, 0x9bdc06a725c71235ULL, 0xc19bf174cf692694ULL, + 0xe49b69c19ef14ad2ULL, 0xefbe4786384f25e3ULL, 0x0fc19dc68b8cd5b5ULL, 0x240ca1cc77ac9c65ULL, + 0x2de92c6f592b0275ULL, 0x4a7484aa6ea6e483ULL, 0x5cb0a9dcbd41fbd4ULL, 0x76f988da831153b5ULL, + 0x983e5152ee66dfabULL, 0xa831c66d2db43210ULL, 0xb00327c898fb213fULL, 0xbf597fc7beef0ee4ULL, + 0xc6e00bf33da88fc2ULL, 0xd5a79147930aa725ULL, 0x06ca6351e003826fULL, 0x142929670a0e6e70ULL, + 0x27b70a8546d22ffcULL, 0x2e1b21385c26c926ULL, 0x4d2c6dfc5ac42aedULL, 0x53380d139d95b3dfULL, + 0x650a73548baf63deULL, 0x766a0abb3c77b2a8ULL, 0x81c2c92e47edaee6ULL, 0x92722c851482353bULL, + 0xa2bfe8a14cf10364ULL, 0xa81a664bbc423001ULL, 0xc24b8b70d0f89791ULL, 0xc76c51a30654be30ULL, + 0xd192e819d6ef5218ULL, 0xd69906245565a910ULL, 0xf40e35855771202aULL, 0x106aa07032bbd1b8ULL, + 0x19a4c116b8d2d0c8ULL, 0x1e376c085141ab53ULL, 0x2748774cdf8eeb99ULL, 0x34b0bcb5e19b48a8ULL, + 0x391c0cb3c5c95a63ULL, 0x4ed8aa4ae3418acbULL, 0x5b9cca4f7763e373ULL, 0x682e6ff3d6b2b8a3ULL, + 0x748f82ee5defb2fcULL, 0x78a5636f43172f60ULL, 0x84c87814a1f0ab72ULL, 0x8cc702081a6439ecULL, + 0x90befffa23631e28ULL, 0xa4506cebde82bde9ULL, 0xbef9a3f7b2c67915ULL, 0xc67178f2e372532bULL, + 0xca273eceea26619cULL, 0xd186b8c721c0c207ULL, 0xeada7dd6cde0eb1eULL, 0xf57d4f7fee6ed178ULL, + 0x06f067aa72176fbaULL, 0x0a637dc5a2c898a6ULL, 0x113f9804bef90daeULL, 0x1b710b35131c471bULL, + 0x28db77f523047d84ULL, 0x32caab7b40c72493ULL, 0x3c9ebe0a15c9bebcULL, 0x431d67c49c100d4cULL, + 0x4cc5d4becb3e42b6ULL, 0x597f299cfc657e2aULL, 0x5fcb6fab3ad6faecULL, 0x6c44198c4a475817ULL }; -void Hacl_SHA2_Scalar32_sha256_init(uint32_t *hash); +void Hacl_Hash_SHA2_sha256_init(uint32_t *hash); -void Hacl_SHA2_Scalar32_sha256_update_nblocks(uint32_t len, uint8_t *b, uint32_t *st); +void Hacl_Hash_SHA2_sha256_update_nblocks(uint32_t len, uint8_t *b, uint32_t *st); void -Hacl_SHA2_Scalar32_sha256_update_last( - uint64_t totlen, - uint32_t len, - uint8_t *b, - uint32_t *hash -); +Hacl_Hash_SHA2_sha256_update_last(uint64_t totlen, uint32_t len, uint8_t *b, uint32_t *hash); -void Hacl_SHA2_Scalar32_sha256_finish(uint32_t *st, uint8_t *h); +void Hacl_Hash_SHA2_sha256_finish(uint32_t *st, uint8_t *h); -void Hacl_SHA2_Scalar32_sha224_init(uint32_t *hash); +void Hacl_Hash_SHA2_sha224_init(uint32_t *hash); void -Hacl_SHA2_Scalar32_sha224_update_last(uint64_t totlen, uint32_t len, uint8_t *b, uint32_t *st); +Hacl_Hash_SHA2_sha224_update_last(uint64_t totlen, uint32_t len, uint8_t *b, uint32_t *st); -void Hacl_SHA2_Scalar32_sha224_finish(uint32_t *st, uint8_t *h); +void Hacl_Hash_SHA2_sha224_finish(uint32_t *st, uint8_t *h); -void Hacl_SHA2_Scalar32_sha512_init(uint64_t *hash); +void Hacl_Hash_SHA2_sha512_init(uint64_t *hash); -void Hacl_SHA2_Scalar32_sha512_update_nblocks(uint32_t len, uint8_t *b, uint64_t *st); +void Hacl_Hash_SHA2_sha512_update_nblocks(uint32_t len, uint8_t *b, uint64_t *st); void -Hacl_SHA2_Scalar32_sha512_update_last( +Hacl_Hash_SHA2_sha512_update_last( FStar_UInt128_uint128 totlen, uint32_t len, uint8_t *b, uint64_t *hash ); -void Hacl_SHA2_Scalar32_sha512_finish(uint64_t *st, uint8_t *h); +void Hacl_Hash_SHA2_sha512_finish(uint64_t *st, uint8_t *h); -void Hacl_SHA2_Scalar32_sha384_init(uint64_t *hash); +void Hacl_Hash_SHA2_sha384_init(uint64_t *hash); -void Hacl_SHA2_Scalar32_sha384_update_nblocks(uint32_t len, uint8_t *b, uint64_t *st); +void Hacl_Hash_SHA2_sha384_update_nblocks(uint32_t len, uint8_t *b, uint64_t *st); void -Hacl_SHA2_Scalar32_sha384_update_last( +Hacl_Hash_SHA2_sha384_update_last( FStar_UInt128_uint128 totlen, uint32_t len, uint8_t *b, uint64_t *st ); -void Hacl_SHA2_Scalar32_sha384_finish(uint64_t *st, uint8_t *h); +void Hacl_Hash_SHA2_sha384_finish(uint64_t *st, uint8_t *h); #if defined(__cplusplus) } diff --git a/Modules/_hacl/internal/Hacl_Hash_SHA3.h b/Modules/_hacl/internal/Hacl_Hash_SHA3.h index 1c9808b8dd497c..b80e81fafb9780 100644 --- a/Modules/_hacl/internal/Hacl_Hash_SHA3.h +++ b/Modules/_hacl/internal/Hacl_Hash_SHA3.h @@ -53,9 +53,9 @@ Hacl_Hash_SHA3_update_last_sha3( uint32_t input_len ); -void Hacl_Impl_SHA3_state_permute(uint64_t *s); +void Hacl_Hash_SHA3_state_permute(uint64_t *s); -void Hacl_Impl_SHA3_loadState(uint32_t rateInBytes, uint8_t *input, uint64_t *s); +void Hacl_Hash_SHA3_loadState(uint32_t rateInBytes, uint8_t *input, uint64_t *s); #if defined(__cplusplus) } diff --git a/Modules/_hacl/python_hacl_namespaces.h b/Modules/_hacl/python_hacl_namespaces.h index 0df236282ac509..684e7fd2fbefbc 100644 --- a/Modules/_hacl/python_hacl_namespaces.h +++ b/Modules/_hacl/python_hacl_namespaces.h @@ -5,59 +5,61 @@ * C's excuse for namespaces: Use globally unique names to avoid linkage * conflicts with builds linking or dynamically loading other code potentially * using HACL* libraries. + * + * To make sure this is effective: cd Modules && nm -a *.o | grep Hacl */ -#define Hacl_Streaming_SHA2_state_sha2_224_s python_hashlib_Hacl_Streaming_SHA2_state_sha2_224_s -#define Hacl_Streaming_SHA2_state_sha2_224 python_hashlib_Hacl_Streaming_SHA2_state_sha2_224 -#define Hacl_Streaming_SHA2_state_sha2_256 python_hashlib_Hacl_Streaming_SHA2_state_sha2_256 -#define Hacl_Streaming_SHA2_state_sha2_384_s python_hashlib_Hacl_Streaming_SHA2_state_sha2_384_s -#define Hacl_Streaming_SHA2_state_sha2_384 python_hashlib_Hacl_Streaming_SHA2_state_sha2_384 -#define Hacl_Streaming_SHA2_state_sha2_512 python_hashlib_Hacl_Streaming_SHA2_state_sha2_512 -#define Hacl_Streaming_SHA2_create_in_256 python_hashlib_Hacl_Streaming_SHA2_create_in_256 -#define Hacl_Streaming_SHA2_create_in_224 python_hashlib_Hacl_Streaming_SHA2_create_in_224 -#define Hacl_Streaming_SHA2_create_in_512 python_hashlib_Hacl_Streaming_SHA2_create_in_512 -#define Hacl_Streaming_SHA2_create_in_384 python_hashlib_Hacl_Streaming_SHA2_create_in_384 -#define Hacl_Streaming_SHA2_copy_256 python_hashlib_Hacl_Streaming_SHA2_copy_256 -#define Hacl_Streaming_SHA2_copy_224 python_hashlib_Hacl_Streaming_SHA2_copy_224 -#define Hacl_Streaming_SHA2_copy_512 python_hashlib_Hacl_Streaming_SHA2_copy_512 -#define Hacl_Streaming_SHA2_copy_384 python_hashlib_Hacl_Streaming_SHA2_copy_384 -#define Hacl_Streaming_SHA2_init_256 python_hashlib_Hacl_Streaming_SHA2_init_256 -#define Hacl_Streaming_SHA2_init_224 python_hashlib_Hacl_Streaming_SHA2_init_224 -#define Hacl_Streaming_SHA2_init_512 python_hashlib_Hacl_Streaming_SHA2_init_512 -#define Hacl_Streaming_SHA2_init_384 python_hashlib_Hacl_Streaming_SHA2_init_384 +#define Hacl_Hash_SHA2_state_sha2_224_s python_hashlib_Hacl_Hash_SHA2_state_sha2_224_s +#define Hacl_Hash_SHA2_state_sha2_224 python_hashlib_Hacl_Hash_SHA2_state_sha2_224 +#define Hacl_Hash_SHA2_state_sha2_256 python_hashlib_Hacl_Hash_SHA2_state_sha2_256 +#define Hacl_Hash_SHA2_state_sha2_384_s python_hashlib_Hacl_Hash_SHA2_state_sha2_384_s +#define Hacl_Hash_SHA2_state_sha2_384 python_hashlib_Hacl_Hash_SHA2_state_sha2_384 +#define Hacl_Hash_SHA2_state_sha2_512 python_hashlib_Hacl_Hash_SHA2_state_sha2_512 +#define Hacl_Hash_SHA2_malloc_256 python_hashlib_Hacl_Hash_SHA2_malloc_256 +#define Hacl_Hash_SHA2_malloc_224 python_hashlib_Hacl_Hash_SHA2_malloc_224 +#define Hacl_Hash_SHA2_malloc_512 python_hashlib_Hacl_Hash_SHA2_malloc_512 +#define Hacl_Hash_SHA2_malloc_384 python_hashlib_Hacl_Hash_SHA2_malloc_384 +#define Hacl_Hash_SHA2_copy_256 python_hashlib_Hacl_Hash_SHA2_copy_256 +#define Hacl_Hash_SHA2_copy_224 python_hashlib_Hacl_Hash_SHA2_copy_224 +#define Hacl_Hash_SHA2_copy_512 python_hashlib_Hacl_Hash_SHA2_copy_512 +#define Hacl_Hash_SHA2_copy_384 python_hashlib_Hacl_Hash_SHA2_copy_384 +#define Hacl_Hash_SHA2_init_256 python_hashlib_Hacl_Hash_SHA2_init_256 +#define Hacl_Hash_SHA2_init_224 python_hashlib_Hacl_Hash_SHA2_init_224 +#define Hacl_Hash_SHA2_init_512 python_hashlib_Hacl_Hash_SHA2_init_512 +#define Hacl_Hash_SHA2_init_384 python_hashlib_Hacl_Hash_SHA2_init_384 #define Hacl_SHA2_Scalar32_sha512_init python_hashlib_Hacl_SHA2_Scalar32_sha512_init -#define Hacl_Streaming_SHA2_update_256 python_hashlib_Hacl_Streaming_SHA2_update_256 -#define Hacl_Streaming_SHA2_update_224 python_hashlib_Hacl_Streaming_SHA2_update_224 -#define Hacl_Streaming_SHA2_update_512 python_hashlib_Hacl_Streaming_SHA2_update_512 -#define Hacl_Streaming_SHA2_update_384 python_hashlib_Hacl_Streaming_SHA2_update_384 -#define Hacl_Streaming_SHA2_finish_256 python_hashlib_Hacl_Streaming_SHA2_finish_256 -#define Hacl_Streaming_SHA2_finish_224 python_hashlib_Hacl_Streaming_SHA2_finish_224 -#define Hacl_Streaming_SHA2_finish_512 python_hashlib_Hacl_Streaming_SHA2_finish_512 -#define Hacl_Streaming_SHA2_finish_384 python_hashlib_Hacl_Streaming_SHA2_finish_384 -#define Hacl_Streaming_SHA2_free_256 python_hashlib_Hacl_Streaming_SHA2_free_256 -#define Hacl_Streaming_SHA2_free_224 python_hashlib_Hacl_Streaming_SHA2_free_224 -#define Hacl_Streaming_SHA2_free_512 python_hashlib_Hacl_Streaming_SHA2_free_512 -#define Hacl_Streaming_SHA2_free_384 python_hashlib_Hacl_Streaming_SHA2_free_384 -#define Hacl_Streaming_SHA2_sha256 python_hashlib_Hacl_Streaming_SHA2_sha256 -#define Hacl_Streaming_SHA2_sha224 python_hashlib_Hacl_Streaming_SHA2_sha224 -#define Hacl_Streaming_SHA2_sha512 python_hashlib_Hacl_Streaming_SHA2_sha512 -#define Hacl_Streaming_SHA2_sha384 python_hashlib_Hacl_Streaming_SHA2_sha384 +#define Hacl_Hash_SHA2_update_256 python_hashlib_Hacl_Hash_SHA2_update_256 +#define Hacl_Hash_SHA2_update_224 python_hashlib_Hacl_Hash_SHA2_update_224 +#define Hacl_Hash_SHA2_update_512 python_hashlib_Hacl_Hash_SHA2_update_512 +#define Hacl_Hash_SHA2_update_384 python_hashlib_Hacl_Hash_SHA2_update_384 +#define Hacl_Hash_SHA2_digest_256 python_hashlib_Hacl_Hash_SHA2_digest_256 +#define Hacl_Hash_SHA2_digest_224 python_hashlib_Hacl_Hash_SHA2_digest_224 +#define Hacl_Hash_SHA2_digest_512 python_hashlib_Hacl_Hash_SHA2_digest_512 +#define Hacl_Hash_SHA2_digest_384 python_hashlib_Hacl_Hash_SHA2_digest_384 +#define Hacl_Hash_SHA2_free_256 python_hashlib_Hacl_Hash_SHA2_free_256 +#define Hacl_Hash_SHA2_free_224 python_hashlib_Hacl_Hash_SHA2_free_224 +#define Hacl_Hash_SHA2_free_512 python_hashlib_Hacl_Hash_SHA2_free_512 +#define Hacl_Hash_SHA2_free_384 python_hashlib_Hacl_Hash_SHA2_free_384 +#define Hacl_Hash_SHA2_sha256 python_hashlib_Hacl_Hash_SHA2_sha256 +#define Hacl_Hash_SHA2_sha224 python_hashlib_Hacl_Hash_SHA2_sha224 +#define Hacl_Hash_SHA2_sha512 python_hashlib_Hacl_Hash_SHA2_sha512 +#define Hacl_Hash_SHA2_sha384 python_hashlib_Hacl_Hash_SHA2_sha384 -#define Hacl_Streaming_MD5_legacy_create_in python_hashlib_Hacl_Streaming_MD5_legacy_create_in -#define Hacl_Streaming_MD5_legacy_init python_hashlib_Hacl_Streaming_MD5_legacy_init -#define Hacl_Streaming_MD5_legacy_update python_hashlib_Hacl_Streaming_MD5_legacy_update -#define Hacl_Streaming_MD5_legacy_finish python_hashlib_Hacl_Streaming_MD5_legacy_finish -#define Hacl_Streaming_MD5_legacy_free python_hashlib_Hacl_Streaming_MD5_legacy_free -#define Hacl_Streaming_MD5_legacy_copy python_hashlib_Hacl_Streaming_MD5_legacy_copy -#define Hacl_Streaming_MD5_legacy_hash python_hashlib_Hacl_Streaming_MD5_legacy_hash +#define Hacl_Hash_MD5_malloc python_hashlib_Hacl_Hash_MD5_malloc +#define Hacl_Hash_MD5_init python_hashlib_Hacl_Hash_MD5_init +#define Hacl_Hash_MD5_update python_hashlib_Hacl_Hash_MD5_update +#define Hacl_Hash_MD5_digest python_hashlib_Hacl_Hash_MD5_digest +#define Hacl_Hash_MD5_free python_hashlib_Hacl_Hash_MD5_free +#define Hacl_Hash_MD5_copy python_hashlib_Hacl_Hash_MD5_copy +#define Hacl_Hash_MD5_hash python_hashlib_Hacl_Hash_MD5_hash -#define Hacl_Streaming_SHA1_legacy_create_in python_hashlib_Hacl_Streaming_SHA1_legacy_create_in -#define Hacl_Streaming_SHA1_legacy_init python_hashlib_Hacl_Streaming_SHA1_legacy_init -#define Hacl_Streaming_SHA1_legacy_update python_hashlib_Hacl_Streaming_SHA1_legacy_update -#define Hacl_Streaming_SHA1_legacy_finish python_hashlib_Hacl_Streaming_SHA1_legacy_finish -#define Hacl_Streaming_SHA1_legacy_free python_hashlib_Hacl_Streaming_SHA1_legacy_free -#define Hacl_Streaming_SHA1_legacy_copy python_hashlib_Hacl_Streaming_SHA1_legacy_copy -#define Hacl_Streaming_SHA1_legacy_hash python_hashlib_Hacl_Streaming_SHA1_legacy_hash +#define Hacl_Hash_SHA1_malloc python_hashlib_Hacl_Hash_SHA1_malloc +#define Hacl_Hash_SHA1_init python_hashlib_Hacl_Hash_SHA1_init +#define Hacl_Hash_SHA1_update python_hashlib_Hacl_Hash_SHA1_update +#define Hacl_Hash_SHA1_digest python_hashlib_Hacl_Hash_SHA1_digest +#define Hacl_Hash_SHA1_free python_hashlib_Hacl_Hash_SHA1_free +#define Hacl_Hash_SHA1_copy python_hashlib_Hacl_Hash_SHA1_copy +#define Hacl_Hash_SHA1_hash python_hashlib_Hacl_Hash_SHA1_hash #define Hacl_Hash_SHA3_update_last_sha3 python_hashlib_Hacl_Hash_SHA3_update_last_sha3 #define Hacl_Hash_SHA3_update_multi_sha3 python_hashlib_Hacl_Hash_SHA3_update_multi_sha3 @@ -72,15 +74,16 @@ #define Hacl_SHA3_sha3_512 python_hashlib_Hacl_SHA3_sha3_512 #define Hacl_SHA3_shake128_hacl python_hashlib_Hacl_SHA3_shake128_hacl #define Hacl_SHA3_shake256_hacl python_hashlib_Hacl_SHA3_shake256_hacl -#define Hacl_Streaming_Keccak_block_len python_hashlib_Hacl_Streaming_Keccak_block_len -#define Hacl_Streaming_Keccak_copy python_hashlib_Hacl_Streaming_Keccak_copy -#define Hacl_Streaming_Keccak_finish python_hashlib_Hacl_Streaming_Keccak_finish -#define Hacl_Streaming_Keccak_free python_hashlib_Hacl_Streaming_Keccak_free -#define Hacl_Streaming_Keccak_get_alg python_hashlib_Hacl_Streaming_Keccak_get_alg -#define Hacl_Streaming_Keccak_hash_len python_hashlib_Hacl_Streaming_Keccak_hash_len -#define Hacl_Streaming_Keccak_is_shake python_hashlib_Hacl_Streaming_Keccak_is_shake -#define Hacl_Streaming_Keccak_malloc python_hashlib_Hacl_Streaming_Keccak_malloc -#define Hacl_Streaming_Keccak_reset python_hashlib_Hacl_Streaming_Keccak_reset -#define Hacl_Streaming_Keccak_update python_hashlib_Hacl_Streaming_Keccak_update +#define Hacl_Hash_SHA3_block_len python_hashlib_Hacl_Hash_SHA3_block_len +#define Hacl_Hash_SHA3_copy python_hashlib_Hacl_Hash_SHA3_copy +#define Hacl_Hash_SHA3_digest python_hashlib_Hacl_Hash_SHA3_digest +#define Hacl_Hash_SHA3_free python_hashlib_Hacl_Hash_SHA3_free +#define Hacl_Hash_SHA3_get_alg python_hashlib_Hacl_Hash_SHA3_get_alg +#define Hacl_Hash_SHA3_hash_len python_hashlib_Hacl_Hash_SHA3_hash_len +#define Hacl_Hash_SHA3_is_shake python_hashlib_Hacl_Hash_SHA3_is_shake +#define Hacl_Hash_SHA3_malloc python_hashlib_Hacl_Hash_SHA3_malloc +#define Hacl_Hash_SHA3_reset python_hashlib_Hacl_Hash_SHA3_reset +#define Hacl_Hash_SHA3_update python_hashlib_Hacl_Hash_SHA3_update +#define Hacl_Hash_SHA3_squeeze python_hashlib_Hacl_Hash_SHA3_squeeze #endif // _PYTHON_HACL_NAMESPACES_H diff --git a/Modules/_hacl/refresh.sh b/Modules/_hacl/refresh.sh index c1b3e37f3afb9d..3878e02af31a21 100755 --- a/Modules/_hacl/refresh.sh +++ b/Modules/_hacl/refresh.sh @@ -22,7 +22,7 @@ fi # Update this when updating to a new version after verifying that the changes # the update brings in are good. -expected_hacl_star_rev=521af282fdf6d60227335120f18ae9309a4b8e8c +expected_hacl_star_rev=bb3d0dc8d9d15a5cd51094d5b69e70aa09005ff0 hacl_dir="$(realpath "$1")" cd "$(dirname "$0")" @@ -127,7 +127,7 @@ $sed -i -z 's!\(extern\|typedef\)[^;]*;\n\n!!g' include/krml/FStar_UInt_8_16_32_ $sed -i 's!#include.*Hacl_Krmllib.h"!!g' "${all_files[@]}" # Use globally unique names for the Hacl_ C APIs to avoid linkage conflicts. -$sed -i -z 's!#include \n!#include \n#include "python_hacl_namespaces.h"\n!' Hacl_Hash_SHA2.h +$sed -i -z 's!#include \n!#include \n#include "python_hacl_namespaces.h"\n!' Hacl_Hash_*.h # Finally, we remove a bunch of ifdefs from target.h that are, again, useful in # the general case, but not exercised by the subset of HACL* that we vendor. diff --git a/Modules/_interpreters_common.h b/Modules/_interpreters_common.h index 07120f6ccc7207..de9a60ce657e0c 100644 --- a/Modules/_interpreters_common.h +++ b/Modules/_interpreters_common.h @@ -19,3 +19,20 @@ clear_xid_class(PyTypeObject *cls) return _PyCrossInterpreterData_UnregisterClass(cls); } #endif + + +#ifdef RETURNS_INTERPID_OBJECT +static PyObject * +get_interpid_obj(PyInterpreterState *interp) +{ + if (_PyInterpreterState_IDInitref(interp) != 0) { + return NULL; + }; + int64_t id = PyInterpreterState_GetID(interp); + if (id < 0) { + return NULL; + } + assert(id < LLONG_MAX); + return PyLong_FromLongLong(id); +} +#endif diff --git a/Modules/_io/bytesio.c b/Modules/_io/bytesio.c index 4a15c8e841f25f..fb66d3db0f7a1f 100644 --- a/Modules/_io/bytesio.c +++ b/Modules/_io/bytesio.c @@ -155,9 +155,6 @@ resize_buffer(bytesio *self, size_t size) alloc = size + 1; } - if (alloc > ((size_t)-1) / sizeof(char)) - goto overflow; - if (SHARED_BUF(self)) { if (unshare_buffer(self, alloc) < 0) return -1; diff --git a/Modules/_opcode.c b/Modules/_opcode.c index 93c71377f03a76..5350adb456b859 100644 --- a/Modules/_opcode.c +++ b/Modules/_opcode.c @@ -347,6 +347,28 @@ _opcode_get_intrinsic2_descs_impl(PyObject *module) return list; } +/*[clinic input] + +_opcode.get_executor + + code: object + offset: int + +Return the executor object at offset in code if exists, None otherwise. +[clinic start generated code]*/ + +static PyObject * +_opcode_get_executor_impl(PyObject *module, PyObject *code, int offset) +/*[clinic end generated code: output=c035c7a47b16648f input=85eff93ea7aac282]*/ +{ + if (!PyCode_Check(code)) { + PyErr_Format(PyExc_TypeError, + "expected a code object, not '%.100s'", + Py_TYPE(code)->tp_name); + return NULL; + } + return (PyObject *)PyUnstable_GetExecutor((PyCodeObject *)code, offset); +} static PyMethodDef opcode_functions[] = { @@ -363,6 +385,7 @@ opcode_functions[] = { _OPCODE_GET_NB_OPS_METHODDEF _OPCODE_GET_INTRINSIC1_DESCS_METHODDEF _OPCODE_GET_INTRINSIC2_DESCS_METHODDEF + _OPCODE_GET_EXECUTOR_METHODDEF {NULL, NULL, 0, NULL} }; diff --git a/Modules/_posixsubprocess.c b/Modules/_posixsubprocess.c index bcbbe70680b8e7..b160cd78177a17 100644 --- a/Modules/_posixsubprocess.c +++ b/Modules/_posixsubprocess.c @@ -1031,7 +1031,9 @@ subprocess_fork_exec_impl(PyObject *module, PyObject *process_args, Py_ssize_t fds_to_keep_len = PyTuple_GET_SIZE(py_fds_to_keep); PyInterpreterState *interp = _PyInterpreterState_GET(); - if ((preexec_fn != Py_None) && interp->finalizing) { + if ((preexec_fn != Py_None) && + _PyInterpreterState_GetFinalizing(interp) != NULL) + { PyErr_SetString(PyExc_PythonFinalizationError, "preexec_fn not supported at interpreter shutdown"); return NULL; diff --git a/Modules/_ssl.c b/Modules/_ssl.c index d00f407b569fb6..fbf914c4321922 100644 --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -599,7 +599,7 @@ PySSL_ChainExceptions(PySSLSocket *sslsock) { } static PyObject * -PySSL_SetError(PySSLSocket *sslsock, int ret, const char *filename, int lineno) +PySSL_SetError(PySSLSocket *sslsock, const char *filename, int lineno) { PyObject *type; char *errstr = NULL; @@ -612,7 +612,6 @@ PySSL_SetError(PySSLSocket *sslsock, int ret, const char *filename, int lineno) _sslmodulestate *state = get_state_sock(sslsock); type = state->PySSLErrorObject; - assert(ret <= 0); e = ERR_peek_last_error(); if (sslsock->ssl != NULL) { @@ -645,32 +644,21 @@ PySSL_SetError(PySSLSocket *sslsock, int ret, const char *filename, int lineno) case SSL_ERROR_SYSCALL: { if (e == 0) { - PySocketSockObject *s = GET_SOCKET(sslsock); - if (ret == 0 || (((PyObject *)s) == Py_None)) { + /* underlying BIO reported an I/O error */ + ERR_clear_error(); +#ifdef MS_WINDOWS + if (err.ws) { + return PyErr_SetFromWindowsErr(err.ws); + } +#endif + if (err.c) { + errno = err.c; + return PyErr_SetFromErrno(PyExc_OSError); + } + else { p = PY_SSL_ERROR_EOF; type = state->PySSLEOFErrorObject; errstr = "EOF occurred in violation of protocol"; - } else if (s && ret == -1) { - /* underlying BIO reported an I/O error */ - ERR_clear_error(); -#ifdef MS_WINDOWS - if (err.ws) { - return PyErr_SetFromWindowsErr(err.ws); - } -#endif - if (err.c) { - errno = err.c; - return PyErr_SetFromErrno(PyExc_OSError); - } - else { - p = PY_SSL_ERROR_EOF; - type = state->PySSLEOFErrorObject; - errstr = "EOF occurred in violation of protocol"; - } - } else { /* possible? */ - p = PY_SSL_ERROR_SYSCALL; - type = state->PySSLSyscallErrorObject; - errstr = "Some I/O error occurred"; } } else { if (ERR_GET_LIB(e) == ERR_LIB_SSL && @@ -1030,7 +1018,7 @@ _ssl__SSLSocket_do_handshake_impl(PySSLSocket *self) err.ssl == SSL_ERROR_WANT_WRITE); Py_XDECREF(sock); if (ret < 1) - return PySSL_SetError(self, ret, __FILE__, __LINE__); + return PySSL_SetError(self, __FILE__, __LINE__); if (PySSL_ChainExceptions(self) < 0) return NULL; Py_RETURN_NONE; @@ -2437,7 +2425,7 @@ _ssl__SSLSocket_write_impl(PySSLSocket *self, Py_buffer *b) Py_XDECREF(sock); if (retval == 0) - return PySSL_SetError(self, retval, __FILE__, __LINE__); + return PySSL_SetError(self, __FILE__, __LINE__); if (PySSL_ChainExceptions(self) < 0) return NULL; return PyLong_FromSize_t(count); @@ -2467,7 +2455,7 @@ _ssl__SSLSocket_pending_impl(PySSLSocket *self) self->err = err; if (count < 0) - return PySSL_SetError(self, count, __FILE__, __LINE__); + return PySSL_SetError(self, __FILE__, __LINE__); else return PyLong_FromLong(count); } @@ -2590,7 +2578,7 @@ _ssl__SSLSocket_read_impl(PySSLSocket *self, Py_ssize_t len, err.ssl == SSL_ERROR_WANT_WRITE); if (retval == 0) { - PySSL_SetError(self, retval, __FILE__, __LINE__); + PySSL_SetError(self, __FILE__, __LINE__); goto error; } if (self->exc != NULL) @@ -2716,7 +2704,7 @@ _ssl__SSLSocket_shutdown_impl(PySSLSocket *self) } if (ret < 0) { Py_XDECREF(sock); - PySSL_SetError(self, ret, __FILE__, __LINE__); + PySSL_SetError(self, __FILE__, __LINE__); return NULL; } if (self->exc != NULL) @@ -3178,7 +3166,6 @@ _ssl__SSLContext_impl(PyTypeObject *type, int proto_version) result = SSL_CTX_set_cipher_list(ctx, "HIGH:!aNULL:!eNULL"); } if (result == 0) { - Py_DECREF(self); ERR_clear_error(); PyErr_SetString(get_state_ctx(self)->PySSLErrorObject, "No cipher can be selected."); diff --git a/Modules/_testcapi/abstract.c b/Modules/_testcapi/abstract.c index a8ba009eb6a54b..b126aee5b9777b 100644 --- a/Modules/_testcapi/abstract.c +++ b/Modules/_testcapi/abstract.c @@ -2,59 +2,6 @@ #include "util.h" -static PyObject * -object_repr(PyObject *self, PyObject *arg) -{ - NULLABLE(arg); - return PyObject_Repr(arg); -} - -static PyObject * -object_ascii(PyObject *self, PyObject *arg) -{ - NULLABLE(arg); - return PyObject_ASCII(arg); -} - -static PyObject * -object_str(PyObject *self, PyObject *arg) -{ - NULLABLE(arg); - return PyObject_Str(arg); -} - -static PyObject * -object_bytes(PyObject *self, PyObject *arg) -{ - NULLABLE(arg); - return PyObject_Bytes(arg); -} - -static PyObject * -object_getattr(PyObject *self, PyObject *args) -{ - PyObject *obj, *attr_name; - if (!PyArg_ParseTuple(args, "OO", &obj, &attr_name)) { - return NULL; - } - NULLABLE(obj); - NULLABLE(attr_name); - return PyObject_GetAttr(obj, attr_name); -} - -static PyObject * -object_getattrstring(PyObject *self, PyObject *args) -{ - PyObject *obj; - const char *attr_name; - Py_ssize_t size; - if (!PyArg_ParseTuple(args, "Oz#", &obj, &attr_name, &size)) { - return NULL; - } - NULLABLE(obj); - return PyObject_GetAttrString(obj, attr_name); -} - static PyObject * object_getoptionalattr(PyObject *self, PyObject *args) { @@ -106,31 +53,6 @@ object_getoptionalattrstring(PyObject *self, PyObject *args) } } -static PyObject * -object_hasattr(PyObject *self, PyObject *args) -{ - PyObject *obj, *attr_name; - if (!PyArg_ParseTuple(args, "OO", &obj, &attr_name)) { - return NULL; - } - NULLABLE(obj); - NULLABLE(attr_name); - return PyLong_FromLong(PyObject_HasAttr(obj, attr_name)); -} - -static PyObject * -object_hasattrstring(PyObject *self, PyObject *args) -{ - PyObject *obj; - const char *attr_name; - Py_ssize_t size; - if (!PyArg_ParseTuple(args, "Oz#", &obj, &attr_name, &size)) { - return NULL; - } - NULLABLE(obj); - return PyLong_FromLong(PyObject_HasAttrString(obj, attr_name)); -} - static PyObject * object_hasattrwitherror(PyObject *self, PyObject *args) { @@ -157,121 +79,17 @@ object_hasattrstringwitherror(PyObject *self, PyObject *args) } static PyObject * -object_setattr(PyObject *self, PyObject *args) -{ - PyObject *obj, *attr_name, *value; - if (!PyArg_ParseTuple(args, "OOO", &obj, &attr_name, &value)) { - return NULL; - } - NULLABLE(obj); - NULLABLE(attr_name); - NULLABLE(value); - RETURN_INT(PyObject_SetAttr(obj, attr_name, value)); -} - -static PyObject * -object_setattrstring(PyObject *self, PyObject *args) -{ - PyObject *obj, *value; - const char *attr_name; - Py_ssize_t size; - if (!PyArg_ParseTuple(args, "Oz#O", &obj, &attr_name, &size, &value)) { - return NULL; - } - NULLABLE(obj); - NULLABLE(value); - RETURN_INT(PyObject_SetAttrString(obj, attr_name, value)); -} - -static PyObject * -object_delattr(PyObject *self, PyObject *args) -{ - PyObject *obj, *attr_name; -if (!PyArg_ParseTuple(args, "OO", &obj, &attr_name)) { - return NULL; - } - NULLABLE(obj); - NULLABLE(attr_name); - RETURN_INT(PyObject_DelAttr(obj, attr_name)); -} - -static PyObject * -object_delattrstring(PyObject *self, PyObject *args) +mapping_getoptionalitemstring(PyObject *self, PyObject *args) { - PyObject *obj; + PyObject *obj, *value = UNINITIALIZED_PTR; const char *attr_name; Py_ssize_t size; if (!PyArg_ParseTuple(args, "Oz#", &obj, &attr_name, &size)) { return NULL; } NULLABLE(obj); - RETURN_INT(PyObject_DelAttrString(obj, attr_name)); -} - -static PyObject * -number_check(PyObject *self, PyObject *obj) -{ - NULLABLE(obj); - return PyBool_FromLong(PyNumber_Check(obj)); -} - -static PyObject * -mapping_check(PyObject *self, PyObject *obj) -{ - NULLABLE(obj); - return PyLong_FromLong(PyMapping_Check(obj)); -} - -static PyObject * -mapping_size(PyObject *self, PyObject *obj) -{ - NULLABLE(obj); - RETURN_SIZE(PyMapping_Size(obj)); -} - -static PyObject * -mapping_length(PyObject *self, PyObject *obj) -{ - NULLABLE(obj); - RETURN_SIZE(PyMapping_Length(obj)); -} - -static PyObject * -object_getitem(PyObject *self, PyObject *args) -{ - PyObject *mapping, *key; - if (!PyArg_ParseTuple(args, "OO", &mapping, &key)) { - return NULL; - } - NULLABLE(mapping); - NULLABLE(key); - return PyObject_GetItem(mapping, key); -} - -static PyObject * -mapping_getitemstring(PyObject *self, PyObject *args) -{ - PyObject *mapping; - const char *key; - Py_ssize_t size; - if (!PyArg_ParseTuple(args, "Oz#", &mapping, &key, &size)) { - return NULL; - } - NULLABLE(mapping); - return PyMapping_GetItemString(mapping, key); -} - -static PyObject * -mapping_getoptionalitem(PyObject *self, PyObject *args) -{ - PyObject *obj, *attr_name, *value = UNINITIALIZED_PTR; - if (!PyArg_ParseTuple(args, "OO", &obj, &attr_name)) { - return NULL; - } - NULLABLE(obj); - NULLABLE(attr_name); - switch (PyMapping_GetOptionalItem(obj, attr_name, &value)) { + switch (PyMapping_GetOptionalItemString(obj, attr_name, &value)) { case -1: assert(value == NULL); return NULL; @@ -281,23 +99,22 @@ mapping_getoptionalitem(PyObject *self, PyObject *args) case 1: return value; default: - Py_FatalError("PyMapping_GetOptionalItem() returned invalid code"); + Py_FatalError("PyMapping_GetOptionalItemString() returned invalid code"); Py_UNREACHABLE(); } } static PyObject * -mapping_getoptionalitemstring(PyObject *self, PyObject *args) +mapping_getoptionalitem(PyObject *self, PyObject *args) { - PyObject *obj, *value = UNINITIALIZED_PTR; - const char *attr_name; - Py_ssize_t size; - if (!PyArg_ParseTuple(args, "Oz#", &obj, &attr_name, &size)) { + PyObject *obj, *attr_name, *value = UNINITIALIZED_PTR; + if (!PyArg_ParseTuple(args, "OO", &obj, &attr_name)) { return NULL; } NULLABLE(obj); + NULLABLE(attr_name); - switch (PyMapping_GetOptionalItemString(obj, attr_name, &value)) { + switch (PyMapping_GetOptionalItem(obj, attr_name, &value)) { case -1: assert(value == NULL); return NULL; @@ -307,399 +124,19 @@ mapping_getoptionalitemstring(PyObject *self, PyObject *args) case 1: return value; default: - Py_FatalError("PyMapping_GetOptionalItemString() returned invalid code"); + Py_FatalError("PyMapping_GetOptionalItem() returned invalid code"); Py_UNREACHABLE(); } } -static PyObject * -mapping_haskey(PyObject *self, PyObject *args) -{ - PyObject *mapping, *key; - if (!PyArg_ParseTuple(args, "OO", &mapping, &key)) { - return NULL; - } - NULLABLE(mapping); - NULLABLE(key); - return PyLong_FromLong(PyMapping_HasKey(mapping, key)); -} - -static PyObject * -mapping_haskeystring(PyObject *self, PyObject *args) -{ - PyObject *mapping; - const char *key; - Py_ssize_t size; - if (!PyArg_ParseTuple(args, "Oz#", &mapping, &key, &size)) { - return NULL; - } - NULLABLE(mapping); - return PyLong_FromLong(PyMapping_HasKeyString(mapping, key)); -} - -static PyObject * -mapping_haskeywitherror(PyObject *self, PyObject *args) -{ - PyObject *mapping, *key; - if (!PyArg_ParseTuple(args, "OO", &mapping, &key)) { - return NULL; - } - NULLABLE(mapping); - NULLABLE(key); - RETURN_INT(PyMapping_HasKeyWithError(mapping, key)); -} - -static PyObject * -mapping_haskeystringwitherror(PyObject *self, PyObject *args) -{ - PyObject *mapping; - const char *key; - Py_ssize_t size; - if (!PyArg_ParseTuple(args, "Oz#", &mapping, &key, &size)) { - return NULL; - } - NULLABLE(mapping); - RETURN_INT(PyMapping_HasKeyStringWithError(mapping, key)); -} - -static PyObject * -object_setitem(PyObject *self, PyObject *args) -{ - PyObject *mapping, *key, *value; - if (!PyArg_ParseTuple(args, "OOO", &mapping, &key, &value)) { - return NULL; - } - NULLABLE(mapping); - NULLABLE(key); - NULLABLE(value); - RETURN_INT(PyObject_SetItem(mapping, key, value)); -} - -static PyObject * -mapping_setitemstring(PyObject *self, PyObject *args) -{ - PyObject *mapping, *value; - const char *key; - Py_ssize_t size; - if (!PyArg_ParseTuple(args, "Oz#O", &mapping, &key, &size, &value)) { - return NULL; - } - NULLABLE(mapping); - NULLABLE(value); - RETURN_INT(PyMapping_SetItemString(mapping, key, value)); -} - -static PyObject * -object_delitem(PyObject *self, PyObject *args) -{ - PyObject *mapping, *key; - if (!PyArg_ParseTuple(args, "OO", &mapping, &key)) { - return NULL; - } - NULLABLE(mapping); - NULLABLE(key); - RETURN_INT(PyObject_DelItem(mapping, key)); -} - -static PyObject * -mapping_delitem(PyObject *self, PyObject *args) -{ - PyObject *mapping, *key; - if (!PyArg_ParseTuple(args, "OO", &mapping, &key)) { - return NULL; - } - NULLABLE(mapping); - NULLABLE(key); - RETURN_INT(PyMapping_DelItem(mapping, key)); -} - -static PyObject * -mapping_delitemstring(PyObject *self, PyObject *args) -{ - PyObject *mapping; - const char *key; - Py_ssize_t size; - if (!PyArg_ParseTuple(args, "Oz#", &mapping, &key, &size)) { - return NULL; - } - NULLABLE(mapping); - RETURN_INT(PyMapping_DelItemString(mapping, key)); -} - -static PyObject * -mapping_keys(PyObject *self, PyObject *obj) -{ - NULLABLE(obj); - return PyMapping_Keys(obj); -} - -static PyObject * -mapping_values(PyObject *self, PyObject *obj) -{ - NULLABLE(obj); - return PyMapping_Values(obj); -} - -static PyObject * -mapping_items(PyObject *self, PyObject *obj) -{ - NULLABLE(obj); - return PyMapping_Items(obj); -} - - -static PyObject * -sequence_check(PyObject* self, PyObject *obj) -{ - NULLABLE(obj); - return PyLong_FromLong(PySequence_Check(obj)); -} - -static PyObject * -sequence_size(PyObject* self, PyObject *obj) -{ - NULLABLE(obj); - RETURN_SIZE(PySequence_Size(obj)); -} - -static PyObject * -sequence_length(PyObject* self, PyObject *obj) -{ - NULLABLE(obj); - RETURN_SIZE(PySequence_Length(obj)); -} - -static PyObject * -sequence_concat(PyObject *self, PyObject *args) -{ - PyObject *seq1, *seq2; - if (!PyArg_ParseTuple(args, "OO", &seq1, &seq2)) { - return NULL; - } - NULLABLE(seq1); - NULLABLE(seq2); - - return PySequence_Concat(seq1, seq2); -} - -static PyObject * -sequence_repeat(PyObject *self, PyObject *args) -{ - PyObject *seq; - Py_ssize_t count; - if (!PyArg_ParseTuple(args, "On", &seq, &count)) { - return NULL; - } - NULLABLE(seq); - - return PySequence_Repeat(seq, count); -} - -static PyObject * -sequence_inplaceconcat(PyObject *self, PyObject *args) -{ - PyObject *seq1, *seq2; - if (!PyArg_ParseTuple(args, "OO", &seq1, &seq2)) { - return NULL; - } - NULLABLE(seq1); - NULLABLE(seq2); - - return PySequence_InPlaceConcat(seq1, seq2); -} - -static PyObject * -sequence_inplacerepeat(PyObject *self, PyObject *args) -{ - PyObject *seq; - Py_ssize_t count; - if (!PyArg_ParseTuple(args, "On", &seq, &count)) { - return NULL; - } - NULLABLE(seq); - - return PySequence_InPlaceRepeat(seq, count); -} - -static PyObject * -sequence_getitem(PyObject *self, PyObject *args) -{ - PyObject *seq; - Py_ssize_t i; - if (!PyArg_ParseTuple(args, "On", &seq, &i)) { - return NULL; - } - NULLABLE(seq); - - return PySequence_GetItem(seq, i); -} - -static PyObject * -sequence_setitem(PyObject *self, PyObject *args) -{ - Py_ssize_t i; - PyObject *seq, *val; - if (!PyArg_ParseTuple(args, "OnO", &seq, &i, &val)) { - return NULL; - } - NULLABLE(seq); - NULLABLE(val); - - RETURN_INT(PySequence_SetItem(seq, i, val)); -} - - -static PyObject * -sequence_delitem(PyObject *self, PyObject *args) -{ - Py_ssize_t i; - PyObject *seq; - if (!PyArg_ParseTuple(args, "On", &seq, &i)) { - return NULL; - } - NULLABLE(seq); - - RETURN_INT(PySequence_DelItem(seq, i)); -} - -static PyObject * -sequence_setslice(PyObject* self, PyObject *args) -{ - PyObject *sequence, *obj; - Py_ssize_t i1, i2; - if (!PyArg_ParseTuple(args, "OnnO", &sequence, &i1, &i2, &obj)) { - return NULL; - } - NULLABLE(sequence); - NULLABLE(obj); - - RETURN_INT(PySequence_SetSlice(sequence, i1, i2, obj)); -} - -static PyObject * -sequence_delslice(PyObject *self, PyObject *args) -{ - PyObject *sequence; - Py_ssize_t i1, i2; - if (!PyArg_ParseTuple(args, "Onn", &sequence, &i1, &i2)) { - return NULL; - } - NULLABLE(sequence); - - RETURN_INT(PySequence_DelSlice(sequence, i1, i2)); -} - -static PyObject * -sequence_count(PyObject *self, PyObject *args) -{ - PyObject *seq, *value; - if (!PyArg_ParseTuple(args, "OO", &seq, &value)) { - return NULL; - } - NULLABLE(seq); - NULLABLE(value); - - RETURN_SIZE(PySequence_Count(seq, value)); -} - -static PyObject * -sequence_contains(PyObject *self, PyObject *args) -{ - PyObject *seq, *value; - if (!PyArg_ParseTuple(args, "OO", &seq, &value)) { - return NULL; - } - NULLABLE(seq); - NULLABLE(value); - - RETURN_INT(PySequence_Contains(seq, value)); -} - -static PyObject * -sequence_index(PyObject *self, PyObject *args) -{ - PyObject *seq, *value; - if (!PyArg_ParseTuple(args, "OO", &seq, &value)) { - return NULL; - } - NULLABLE(seq); - NULLABLE(value); - - RETURN_SIZE(PySequence_Index(seq, value)); -} - -static PyObject * -sequence_list(PyObject *self, PyObject *obj) -{ - NULLABLE(obj); - return PySequence_List(obj); -} - -static PyObject * -sequence_tuple(PyObject *self, PyObject *obj) -{ - NULLABLE(obj); - return PySequence_Tuple(obj); -} - static PyMethodDef test_methods[] = { - {"object_repr", object_repr, METH_O}, - {"object_ascii", object_ascii, METH_O}, - {"object_str", object_str, METH_O}, - {"object_bytes", object_bytes, METH_O}, - - {"object_getattr", object_getattr, METH_VARARGS}, - {"object_getattrstring", object_getattrstring, METH_VARARGS}, {"object_getoptionalattr", object_getoptionalattr, METH_VARARGS}, {"object_getoptionalattrstring", object_getoptionalattrstring, METH_VARARGS}, - {"object_hasattr", object_hasattr, METH_VARARGS}, - {"object_hasattrstring", object_hasattrstring, METH_VARARGS}, {"object_hasattrwitherror", object_hasattrwitherror, METH_VARARGS}, {"object_hasattrstringwitherror", object_hasattrstringwitherror, METH_VARARGS}, - {"object_setattr", object_setattr, METH_VARARGS}, - {"object_setattrstring", object_setattrstring, METH_VARARGS}, - {"object_delattr", object_delattr, METH_VARARGS}, - {"object_delattrstring", object_delattrstring, METH_VARARGS}, - - {"number_check", number_check, METH_O}, - {"mapping_check", mapping_check, METH_O}, - {"mapping_size", mapping_size, METH_O}, - {"mapping_length", mapping_length, METH_O}, - {"object_getitem", object_getitem, METH_VARARGS}, - {"mapping_getitemstring", mapping_getitemstring, METH_VARARGS}, {"mapping_getoptionalitem", mapping_getoptionalitem, METH_VARARGS}, {"mapping_getoptionalitemstring", mapping_getoptionalitemstring, METH_VARARGS}, - {"mapping_haskey", mapping_haskey, METH_VARARGS}, - {"mapping_haskeystring", mapping_haskeystring, METH_VARARGS}, - {"mapping_haskeywitherror", mapping_haskeywitherror, METH_VARARGS}, - {"mapping_haskeystringwitherror", mapping_haskeystringwitherror, METH_VARARGS}, - {"object_setitem", object_setitem, METH_VARARGS}, - {"mapping_setitemstring", mapping_setitemstring, METH_VARARGS}, - {"object_delitem", object_delitem, METH_VARARGS}, - {"mapping_delitem", mapping_delitem, METH_VARARGS}, - {"mapping_delitemstring", mapping_delitemstring, METH_VARARGS}, - {"mapping_keys", mapping_keys, METH_O}, - {"mapping_values", mapping_values, METH_O}, - {"mapping_items", mapping_items, METH_O}, - - {"sequence_check", sequence_check, METH_O}, - {"sequence_size", sequence_size, METH_O}, - {"sequence_length", sequence_length, METH_O}, - {"sequence_concat", sequence_concat, METH_VARARGS}, - {"sequence_repeat", sequence_repeat, METH_VARARGS}, - {"sequence_inplaceconcat", sequence_inplaceconcat, METH_VARARGS}, - {"sequence_inplacerepeat", sequence_inplacerepeat, METH_VARARGS}, - {"sequence_getitem", sequence_getitem, METH_VARARGS}, - {"sequence_setitem", sequence_setitem, METH_VARARGS}, - {"sequence_delitem", sequence_delitem, METH_VARARGS}, - {"sequence_setslice", sequence_setslice, METH_VARARGS}, - {"sequence_delslice", sequence_delslice, METH_VARARGS}, - {"sequence_count", sequence_count, METH_VARARGS}, - {"sequence_contains", sequence_contains, METH_VARARGS}, - {"sequence_index", sequence_index, METH_VARARGS}, - {"sequence_list", sequence_list, METH_O}, - {"sequence_tuple", sequence_tuple, METH_O}, {NULL}, }; diff --git a/Modules/_testcapi/bytes.c b/Modules/_testcapi/bytes.c new file mode 100644 index 00000000000000..02294d8887abb7 --- /dev/null +++ b/Modules/_testcapi/bytes.c @@ -0,0 +1,53 @@ +#include "parts.h" +#include "util.h" + + +/* Test _PyBytes_Resize() */ +static PyObject * +bytes_resize(PyObject *Py_UNUSED(module), PyObject *args) +{ + PyObject *obj; + Py_ssize_t newsize; + int new; + + if (!PyArg_ParseTuple(args, "Onp", &obj, &newsize, &new)) + return NULL; + + NULLABLE(obj); + if (new) { + assert(obj != NULL); + assert(PyBytes_CheckExact(obj)); + PyObject *newobj = PyBytes_FromStringAndSize(NULL, PyBytes_Size(obj)); + if (newobj == NULL) { + return NULL; + } + memcpy(PyBytes_AsString(newobj), PyBytes_AsString(obj), PyBytes_Size(obj)); + obj = newobj; + } + else { + Py_XINCREF(obj); + } + if (_PyBytes_Resize(&obj, newsize) < 0) { + assert(obj == NULL); + } + else { + assert(obj != NULL); + } + return obj; +} + + +static PyMethodDef test_methods[] = { + {"bytes_resize", bytes_resize, METH_VARARGS}, + {NULL}, +}; + +int +_PyTestCapi_Init_Bytes(PyObject *m) +{ + if (PyModule_AddFunctions(m, test_methods) < 0) { + return -1; + } + + return 0; +} diff --git a/Modules/_testcapi/clinic/long.c.h b/Modules/_testcapi/clinic/long.c.h index e2f7042be12c48..767c671abb8eae 100644 --- a/Modules/_testcapi/clinic/long.c.h +++ b/Modules/_testcapi/clinic/long.c.h @@ -2,137 +2,6 @@ preserve [clinic start generated code]*/ -PyDoc_STRVAR(_testcapi_test_long_api__doc__, -"test_long_api($module, /)\n" -"--\n" -"\n"); - -#define _TESTCAPI_TEST_LONG_API_METHODDEF \ - {"test_long_api", (PyCFunction)_testcapi_test_long_api, METH_NOARGS, _testcapi_test_long_api__doc__}, - -static PyObject * -_testcapi_test_long_api_impl(PyObject *module); - -static PyObject * -_testcapi_test_long_api(PyObject *module, PyObject *Py_UNUSED(ignored)) -{ - return _testcapi_test_long_api_impl(module); -} - -PyDoc_STRVAR(_testcapi_test_longlong_api__doc__, -"test_longlong_api($module, /)\n" -"--\n" -"\n"); - -#define _TESTCAPI_TEST_LONGLONG_API_METHODDEF \ - {"test_longlong_api", (PyCFunction)_testcapi_test_longlong_api, METH_NOARGS, _testcapi_test_longlong_api__doc__}, - -static PyObject * -_testcapi_test_longlong_api_impl(PyObject *module); - -static PyObject * -_testcapi_test_longlong_api(PyObject *module, PyObject *Py_UNUSED(ignored)) -{ - return _testcapi_test_longlong_api_impl(module); -} - -PyDoc_STRVAR(_testcapi_test_long_and_overflow__doc__, -"test_long_and_overflow($module, /)\n" -"--\n" -"\n" -"Test the PyLong_AsLongAndOverflow API.\n" -"\n" -"General conversion to PY_LONG is tested by test_long_api_inner.\n" -"This test will concentrate on proper handling of overflow."); - -#define _TESTCAPI_TEST_LONG_AND_OVERFLOW_METHODDEF \ - {"test_long_and_overflow", (PyCFunction)_testcapi_test_long_and_overflow, METH_NOARGS, _testcapi_test_long_and_overflow__doc__}, - -static PyObject * -_testcapi_test_long_and_overflow_impl(PyObject *module); - -static PyObject * -_testcapi_test_long_and_overflow(PyObject *module, PyObject *Py_UNUSED(ignored)) -{ - return _testcapi_test_long_and_overflow_impl(module); -} - -PyDoc_STRVAR(_testcapi_test_long_long_and_overflow__doc__, -"test_long_long_and_overflow($module, /)\n" -"--\n" -"\n" -"Test the PyLong_AsLongLongAndOverflow API.\n" -"\n" -"General conversion to long long is tested by test_long_api_inner.\n" -"This test will concentrate on proper handling of overflow."); - -#define _TESTCAPI_TEST_LONG_LONG_AND_OVERFLOW_METHODDEF \ - {"test_long_long_and_overflow", (PyCFunction)_testcapi_test_long_long_and_overflow, METH_NOARGS, _testcapi_test_long_long_and_overflow__doc__}, - -static PyObject * -_testcapi_test_long_long_and_overflow_impl(PyObject *module); - -static PyObject * -_testcapi_test_long_long_and_overflow(PyObject *module, PyObject *Py_UNUSED(ignored)) -{ - return _testcapi_test_long_long_and_overflow_impl(module); -} - -PyDoc_STRVAR(_testcapi_test_long_as_size_t__doc__, -"test_long_as_size_t($module, /)\n" -"--\n" -"\n" -"Test the PyLong_As{Size,Ssize}_t API.\n" -"\n" -"At present this just tests that non-integer arguments are handled correctly.\n" -"It should be extended to test overflow handling."); - -#define _TESTCAPI_TEST_LONG_AS_SIZE_T_METHODDEF \ - {"test_long_as_size_t", (PyCFunction)_testcapi_test_long_as_size_t, METH_NOARGS, _testcapi_test_long_as_size_t__doc__}, - -static PyObject * -_testcapi_test_long_as_size_t_impl(PyObject *module); - -static PyObject * -_testcapi_test_long_as_size_t(PyObject *module, PyObject *Py_UNUSED(ignored)) -{ - return _testcapi_test_long_as_size_t_impl(module); -} - -PyDoc_STRVAR(_testcapi_test_long_as_unsigned_long_long_mask__doc__, -"test_long_as_unsigned_long_long_mask($module, /)\n" -"--\n" -"\n"); - -#define _TESTCAPI_TEST_LONG_AS_UNSIGNED_LONG_LONG_MASK_METHODDEF \ - {"test_long_as_unsigned_long_long_mask", (PyCFunction)_testcapi_test_long_as_unsigned_long_long_mask, METH_NOARGS, _testcapi_test_long_as_unsigned_long_long_mask__doc__}, - -static PyObject * -_testcapi_test_long_as_unsigned_long_long_mask_impl(PyObject *module); - -static PyObject * -_testcapi_test_long_as_unsigned_long_long_mask(PyObject *module, PyObject *Py_UNUSED(ignored)) -{ - return _testcapi_test_long_as_unsigned_long_long_mask_impl(module); -} - -PyDoc_STRVAR(_testcapi_test_long_as_double__doc__, -"test_long_as_double($module, /)\n" -"--\n" -"\n"); - -#define _TESTCAPI_TEST_LONG_AS_DOUBLE_METHODDEF \ - {"test_long_as_double", (PyCFunction)_testcapi_test_long_as_double, METH_NOARGS, _testcapi_test_long_as_double__doc__}, - -static PyObject * -_testcapi_test_long_as_double_impl(PyObject *module); - -static PyObject * -_testcapi_test_long_as_double(PyObject *module, PyObject *Py_UNUSED(ignored)) -{ - return _testcapi_test_long_as_double_impl(module); -} - PyDoc_STRVAR(_testcapi_call_long_compact_api__doc__, "call_long_compact_api($module, arg, /)\n" "--\n" @@ -140,12 +9,4 @@ PyDoc_STRVAR(_testcapi_call_long_compact_api__doc__, #define _TESTCAPI_CALL_LONG_COMPACT_API_METHODDEF \ {"call_long_compact_api", (PyCFunction)_testcapi_call_long_compact_api, METH_O, _testcapi_call_long_compact_api__doc__}, - -PyDoc_STRVAR(_testcapi_PyLong_AsInt__doc__, -"PyLong_AsInt($module, arg, /)\n" -"--\n" -"\n"); - -#define _TESTCAPI_PYLONG_ASINT_METHODDEF \ - {"PyLong_AsInt", (PyCFunction)_testcapi_PyLong_AsInt, METH_O, _testcapi_PyLong_AsInt__doc__}, -/*[clinic end generated code: output=de762870526e241d input=a9049054013a1b77]*/ +/*[clinic end generated code: output=0ddcbc6ea06e5e21 input=a9049054013a1b77]*/ diff --git a/Modules/_testcapi/complex.c b/Modules/_testcapi/complex.c index 4a70217eb90d62..eceb1310bfe874 100644 --- a/Modules/_testcapi/complex.c +++ b/Modules/_testcapi/complex.c @@ -2,20 +2,6 @@ #include "util.h" -static PyObject * -complex_check(PyObject *Py_UNUSED(module), PyObject *obj) -{ - NULLABLE(obj); - return PyLong_FromLong(PyComplex_Check(obj)); -} - -static PyObject * -complex_checkexact(PyObject *Py_UNUSED(module), PyObject *obj) -{ - NULLABLE(obj); - return PyLong_FromLong(PyComplex_CheckExact(obj)); -} - static PyObject * complex_fromccomplex(PyObject *Py_UNUSED(module), PyObject *obj) { @@ -28,48 +14,6 @@ complex_fromccomplex(PyObject *Py_UNUSED(module), PyObject *obj) return PyComplex_FromCComplex(complex); } -static PyObject * -complex_fromdoubles(PyObject *Py_UNUSED(module), PyObject *args) -{ - double real, imag; - - if (!PyArg_ParseTuple(args, "dd", &real, &imag)) { - return NULL; - } - - return PyComplex_FromDoubles(real, imag); -} - -static PyObject * -complex_realasdouble(PyObject *Py_UNUSED(module), PyObject *obj) -{ - double real; - - NULLABLE(obj); - real = PyComplex_RealAsDouble(obj); - - if (real == -1. && PyErr_Occurred()) { - return NULL; - } - - return PyFloat_FromDouble(real); -} - -static PyObject * -complex_imagasdouble(PyObject *Py_UNUSED(module), PyObject *obj) -{ - double imag; - - NULLABLE(obj); - imag = PyComplex_ImagAsDouble(obj); - - if (imag == -1. && PyErr_Occurred()) { - return NULL; - } - - return PyFloat_FromDouble(imag); -} - static PyObject * complex_asccomplex(PyObject *Py_UNUSED(module), PyObject *obj) { @@ -139,12 +83,7 @@ _py_c_abs(PyObject *Py_UNUSED(module), PyObject* obj) static PyMethodDef test_methods[] = { - {"complex_check", complex_check, METH_O}, - {"complex_checkexact", complex_checkexact, METH_O}, {"complex_fromccomplex", complex_fromccomplex, METH_O}, - {"complex_fromdoubles", complex_fromdoubles, METH_VARARGS}, - {"complex_realasdouble", complex_realasdouble, METH_O}, - {"complex_imagasdouble", complex_imagasdouble, METH_O}, {"complex_asccomplex", complex_asccomplex, METH_O}, {"_py_c_sum", _py_c_sum, METH_VARARGS}, {"_py_c_diff", _py_c_diff, METH_VARARGS}, diff --git a/Modules/_testcapi/dict.c b/Modules/_testcapi/dict.c index fe03c24f75e196..4319906dc4fee0 100644 --- a/Modules/_testcapi/dict.c +++ b/Modules/_testcapi/dict.c @@ -2,59 +2,6 @@ #include "util.h" -static PyObject * -dict_check(PyObject *self, PyObject *obj) -{ - NULLABLE(obj); - return PyLong_FromLong(PyDict_Check(obj)); -} - -static PyObject * -dict_checkexact(PyObject *self, PyObject *obj) -{ - NULLABLE(obj); - return PyLong_FromLong(PyDict_CheckExact(obj)); -} - -static PyObject * -dict_new(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - return PyDict_New(); -} - -static PyObject * -dictproxy_new(PyObject *self, PyObject *obj) -{ - NULLABLE(obj); - return PyDictProxy_New(obj); -} - -static PyObject * -dict_clear(PyObject *self, PyObject *obj) -{ - PyDict_Clear(obj); - Py_RETURN_NONE; -} - -static PyObject * -dict_copy(PyObject *self, PyObject *obj) -{ - NULLABLE(obj); - return PyDict_Copy(obj); -} - -static PyObject * -dict_contains(PyObject *self, PyObject *args) -{ - PyObject *obj, *key; - if (!PyArg_ParseTuple(args, "OO", &obj, &key)) { - return NULL; - } - NULLABLE(obj); - NULLABLE(key); - RETURN_INT(PyDict_Contains(obj, key)); -} - static PyObject * dict_containsstring(PyObject *self, PyObject *args) { @@ -68,72 +15,6 @@ dict_containsstring(PyObject *self, PyObject *args) RETURN_INT(PyDict_ContainsString(obj, key)); } -static PyObject * -dict_size(PyObject *self, PyObject *obj) -{ - NULLABLE(obj); - RETURN_SIZE(PyDict_Size(obj)); -} - -static PyObject * -dict_getitem(PyObject *self, PyObject *args) -{ - PyObject *mapping, *key; - if (!PyArg_ParseTuple(args, "OO", &mapping, &key)) { - return NULL; - } - NULLABLE(mapping); - NULLABLE(key); - PyObject *value = PyDict_GetItem(mapping, key); - if (value == NULL) { - if (PyErr_Occurred()) { - return NULL; - } - return Py_NewRef(PyExc_KeyError); - } - return Py_NewRef(value); -} - -static PyObject * -dict_getitemstring(PyObject *self, PyObject *args) -{ - PyObject *mapping; - const char *key; - Py_ssize_t size; - if (!PyArg_ParseTuple(args, "Oz#", &mapping, &key, &size)) { - return NULL; - } - NULLABLE(mapping); - PyObject *value = PyDict_GetItemString(mapping, key); - if (value == NULL) { - if (PyErr_Occurred()) { - return NULL; - } - return Py_NewRef(PyExc_KeyError); - } - return Py_NewRef(value); -} - -static PyObject * -dict_getitemwitherror(PyObject *self, PyObject *args) -{ - PyObject *mapping, *key; - if (!PyArg_ParseTuple(args, "OO", &mapping, &key)) { - return NULL; - } - NULLABLE(mapping); - NULLABLE(key); - PyObject *value = PyDict_GetItemWithError(mapping, key); - if (value == NULL) { - if (PyErr_Occurred()) { - return NULL; - } - return Py_NewRef(PyExc_KeyError); - } - return Py_NewRef(value); -} - - static PyObject * dict_getitemref(PyObject *self, PyObject *args) { @@ -185,33 +66,6 @@ dict_getitemstringref(PyObject *self, PyObject *args) } } -static PyObject * -dict_setitem(PyObject *self, PyObject *args) -{ - PyObject *mapping, *key, *value; - if (!PyArg_ParseTuple(args, "OOO", &mapping, &key, &value)) { - return NULL; - } - NULLABLE(mapping); - NULLABLE(key); - NULLABLE(value); - RETURN_INT(PyDict_SetItem(mapping, key, value)); -} - -static PyObject * -dict_setitemstring(PyObject *self, PyObject *args) -{ - PyObject *mapping, *value; - const char *key; - Py_ssize_t size; - if (!PyArg_ParseTuple(args, "Oz#O", &mapping, &key, &size, &value)) { - return NULL; - } - NULLABLE(mapping); - NULLABLE(value); - RETURN_INT(PyDict_SetItemString(mapping, key, value)); -} - static PyObject * dict_setdefault(PyObject *self, PyObject *args) { @@ -250,112 +104,6 @@ dict_setdefaultref(PyObject *self, PyObject *args) } } -static PyObject * -dict_delitem(PyObject *self, PyObject *args) -{ - PyObject *mapping, *key; - if (!PyArg_ParseTuple(args, "OO", &mapping, &key)) { - return NULL; - } - NULLABLE(mapping); - NULLABLE(key); - RETURN_INT(PyDict_DelItem(mapping, key)); -} - -static PyObject * -dict_delitemstring(PyObject *self, PyObject *args) -{ - PyObject *mapping; - const char *key; - Py_ssize_t size; - if (!PyArg_ParseTuple(args, "Oz#", &mapping, &key, &size)) { - return NULL; - } - NULLABLE(mapping); - RETURN_INT(PyDict_DelItemString(mapping, key)); -} - -static PyObject * -dict_keys(PyObject *self, PyObject *obj) -{ - NULLABLE(obj); - return PyDict_Keys(obj); -} - -static PyObject * -dict_values(PyObject *self, PyObject *obj) -{ - NULLABLE(obj); - return PyDict_Values(obj); -} - -static PyObject * -dict_items(PyObject *self, PyObject *obj) -{ - NULLABLE(obj); - return PyDict_Items(obj); -} - -static PyObject * -dict_next(PyObject *self, PyObject *args) -{ - PyObject *mapping, *key = UNINITIALIZED_PTR, *value = UNINITIALIZED_PTR; - Py_ssize_t pos; - if (!PyArg_ParseTuple(args, "On", &mapping, &pos)) { - return NULL; - } - NULLABLE(mapping); - int rc = PyDict_Next(mapping, &pos, &key, &value); - if (rc != 0) { - return Py_BuildValue("inOO", rc, pos, key, value); - } - assert(key == UNINITIALIZED_PTR); - assert(value == UNINITIALIZED_PTR); - if (PyErr_Occurred()) { - return NULL; - } - Py_RETURN_NONE; -} - -static PyObject * -dict_merge(PyObject *self, PyObject *args) -{ - PyObject *mapping, *mapping2; - int override; - if (!PyArg_ParseTuple(args, "OOi", &mapping, &mapping2, &override)) { - return NULL; - } - NULLABLE(mapping); - NULLABLE(mapping2); - RETURN_INT(PyDict_Merge(mapping, mapping2, override)); -} - -static PyObject * -dict_update(PyObject *self, PyObject *args) -{ - PyObject *mapping, *mapping2; - if (!PyArg_ParseTuple(args, "OO", &mapping, &mapping2)) { - return NULL; - } - NULLABLE(mapping); - NULLABLE(mapping2); - RETURN_INT(PyDict_Update(mapping, mapping2)); -} - -static PyObject * -dict_mergefromseq2(PyObject *self, PyObject *args) -{ - PyObject *mapping, *seq; - int override; - if (!PyArg_ParseTuple(args, "OOi", &mapping, &seq, &override)) { - return NULL; - } - NULLABLE(mapping); - NULLABLE(seq); - RETURN_INT(PyDict_MergeFromSeq2(mapping, seq, override)); -} - - static PyObject * dict_pop(PyObject *self, PyObject *args) { @@ -382,7 +130,6 @@ dict_pop(PyObject *self, PyObject *args) return Py_BuildValue("iN", res, result); } - static PyObject * dict_pop_null(PyObject *self, PyObject *args) { @@ -396,7 +143,6 @@ dict_pop_null(PyObject *self, PyObject *args) RETURN_INT(PyDict_Pop(dict, key, NULL)); } - static PyObject * dict_popstring(PyObject *self, PyObject *args) { @@ -423,7 +169,6 @@ dict_popstring(PyObject *self, PyObject *args) return Py_BuildValue("iN", res, result); } - static PyObject * dict_popstring_null(PyObject *self, PyObject *args) { @@ -439,33 +184,11 @@ dict_popstring_null(PyObject *self, PyObject *args) static PyMethodDef test_methods[] = { - {"dict_check", dict_check, METH_O}, - {"dict_checkexact", dict_checkexact, METH_O}, - {"dict_new", dict_new, METH_NOARGS}, - {"dictproxy_new", dictproxy_new, METH_O}, - {"dict_clear", dict_clear, METH_O}, - {"dict_copy", dict_copy, METH_O}, - {"dict_size", dict_size, METH_O}, - {"dict_getitem", dict_getitem, METH_VARARGS}, - {"dict_getitemwitherror", dict_getitemwitherror, METH_VARARGS}, - {"dict_getitemstring", dict_getitemstring, METH_VARARGS}, + {"dict_containsstring", dict_containsstring, METH_VARARGS}, {"dict_getitemref", dict_getitemref, METH_VARARGS}, {"dict_getitemstringref", dict_getitemstringref, METH_VARARGS}, - {"dict_contains", dict_contains, METH_VARARGS}, - {"dict_containsstring", dict_containsstring, METH_VARARGS}, - {"dict_setitem", dict_setitem, METH_VARARGS}, - {"dict_setitemstring", dict_setitemstring, METH_VARARGS}, - {"dict_delitem", dict_delitem, METH_VARARGS}, - {"dict_delitemstring", dict_delitemstring, METH_VARARGS}, {"dict_setdefault", dict_setdefault, METH_VARARGS}, {"dict_setdefaultref", dict_setdefaultref, METH_VARARGS}, - {"dict_keys", dict_keys, METH_O}, - {"dict_values", dict_values, METH_O}, - {"dict_items", dict_items, METH_O}, - {"dict_next", dict_next, METH_VARARGS}, - {"dict_merge", dict_merge, METH_VARARGS}, - {"dict_update", dict_update, METH_VARARGS}, - {"dict_mergefromseq2", dict_mergefromseq2, METH_VARARGS}, {"dict_pop", dict_pop, METH_VARARGS}, {"dict_pop_null", dict_pop_null, METH_VARARGS}, {"dict_popstring", dict_popstring, METH_VARARGS}, diff --git a/Modules/_testcapi/float.c b/Modules/_testcapi/float.c index 4fcbaf3bb2aa1e..15ea97ec4520b7 100644 --- a/Modules/_testcapi/float.c +++ b/Modules/_testcapi/float.c @@ -6,71 +6,6 @@ #include "clinic/float.c.h" -static PyObject * -float_check(PyObject *Py_UNUSED(module), PyObject *obj) -{ - NULLABLE(obj); - return PyLong_FromLong(PyFloat_Check(obj)); -} - -static PyObject * -float_checkexact(PyObject *Py_UNUSED(module), PyObject *obj) -{ - NULLABLE(obj); - return PyLong_FromLong(PyFloat_CheckExact(obj)); -} - -static PyObject * -float_fromstring(PyObject *Py_UNUSED(module), PyObject *obj) -{ - NULLABLE(obj); - return PyFloat_FromString(obj); -} - -static PyObject * -float_fromdouble(PyObject *Py_UNUSED(module), PyObject *obj) -{ - double d; - - if (!PyArg_Parse(obj, "d", &d)) { - return NULL; - } - - return PyFloat_FromDouble(d); -} - -static PyObject * -float_asdouble(PyObject *Py_UNUSED(module), PyObject *obj) -{ - double d; - - NULLABLE(obj); - d = PyFloat_AsDouble(obj); - if (d == -1. && PyErr_Occurred()) { - return NULL; - } - - return PyFloat_FromDouble(d); -} - -static PyObject * -float_getinfo(PyObject *Py_UNUSED(module), PyObject *Py_UNUSED(arg)) -{ - return PyFloat_GetInfo(); -} - -static PyObject * -float_getmax(PyObject *Py_UNUSED(module), PyObject *Py_UNUSED(arg)) -{ - return PyFloat_FromDouble(PyFloat_GetMax()); -} - -static PyObject * -float_getmin(PyObject *Py_UNUSED(module), PyObject *Py_UNUSED(arg)) -{ - return PyFloat_FromDouble(PyFloat_GetMin()); -} - /*[clinic input] module _testcapi [clinic start generated code]*/ @@ -165,14 +100,6 @@ _testcapi_float_unpack_impl(PyObject *module, const char *data, } static PyMethodDef test_methods[] = { - {"float_check", float_check, METH_O}, - {"float_checkexact", float_checkexact, METH_O}, - {"float_fromstring", float_fromstring, METH_O}, - {"float_fromdouble", float_fromdouble, METH_O}, - {"float_asdouble", float_asdouble, METH_O}, - {"float_getinfo", float_getinfo, METH_NOARGS}, - {"float_getmax", float_getmax, METH_NOARGS}, - {"float_getmin", float_getmin, METH_NOARGS}, _TESTCAPI_FLOAT_PACK_METHODDEF _TESTCAPI_FLOAT_UNPACK_METHODDEF {NULL}, diff --git a/Modules/_testcapi/hash.c b/Modules/_testcapi/hash.c index aee76787dcddb3..809d537bfef0d3 100644 --- a/Modules/_testcapi/hash.c +++ b/Modules/_testcapi/hash.c @@ -59,9 +59,20 @@ hash_pointer(PyObject *Py_UNUSED(module), PyObject *arg) } +static PyObject * +object_generichash(PyObject *Py_UNUSED(module), PyObject *arg) +{ + NULLABLE(arg); + Py_hash_t hash = PyObject_GenericHash(arg); + Py_BUILD_ASSERT(sizeof(long long) >= sizeof(hash)); + return PyLong_FromLongLong(hash); +} + + static PyMethodDef test_methods[] = { {"hash_getfuncdef", hash_getfuncdef, METH_NOARGS}, {"hash_pointer", hash_pointer, METH_O}, + {"object_generichash", object_generichash, METH_O}, {NULL}, }; diff --git a/Modules/_testcapi/list.c b/Modules/_testcapi/list.c index 2cb6499e28336d..09cec4c30c8c36 100644 --- a/Modules/_testcapi/list.c +++ b/Modules/_testcapi/list.c @@ -1,32 +1,6 @@ #include "parts.h" #include "util.h" -static PyObject * -list_check(PyObject* Py_UNUSED(module), PyObject *obj) -{ - NULLABLE(obj); - return PyLong_FromLong(PyList_Check(obj)); -} - -static PyObject * -list_check_exact(PyObject* Py_UNUSED(module), PyObject *obj) -{ - NULLABLE(obj); - return PyLong_FromLong(PyList_CheckExact(obj)); -} - -static PyObject * -list_new(PyObject* Py_UNUSED(module), PyObject *obj) -{ - return PyList_New(PyLong_AsSsize_t(obj)); -} - -static PyObject * -list_size(PyObject *Py_UNUSED(module), PyObject *obj) -{ - NULLABLE(obj); - RETURN_SIZE(PyList_Size(obj)); -} static PyObject * list_get_size(PyObject *Py_UNUSED(module), PyObject *obj) @@ -35,17 +9,6 @@ list_get_size(PyObject *Py_UNUSED(module), PyObject *obj) RETURN_SIZE(PyList_GET_SIZE(obj)); } -static PyObject * -list_getitem(PyObject *Py_UNUSED(module), PyObject *args) -{ - PyObject *obj; - Py_ssize_t i; - if (!PyArg_ParseTuple(args, "On", &obj, &i)) { - return NULL; - } - NULLABLE(obj); - return Py_XNewRef(PyList_GetItem(obj, i)); -} static PyObject * list_get_item(PyObject *Py_UNUSED(module), PyObject *args) @@ -59,31 +22,6 @@ list_get_item(PyObject *Py_UNUSED(module), PyObject *args) return Py_XNewRef(PyList_GET_ITEM(obj, i)); } -static PyObject * -list_get_item_ref(PyObject *Py_UNUSED(module), PyObject *args) -{ - PyObject *obj; - Py_ssize_t i; - if (!PyArg_ParseTuple(args, "On", &obj, &i)) { - return NULL; - } - NULLABLE(obj); - return PyList_GetItemRef(obj, i); -} - -static PyObject * -list_setitem(PyObject *Py_UNUSED(module), PyObject *args) -{ - PyObject *obj, *value; - Py_ssize_t i; - if (!PyArg_ParseTuple(args, "OnO", &obj, &i, &value)) { - return NULL; - } - NULLABLE(obj); - NULLABLE(value); - RETURN_INT(PyList_SetItem(obj, i, Py_XNewRef(value))); - -} static PyObject * list_set_item(PyObject *Py_UNUSED(module), PyObject *args) @@ -100,79 +38,6 @@ list_set_item(PyObject *Py_UNUSED(module), PyObject *args) } -static PyObject * -list_insert(PyObject *Py_UNUSED(module), PyObject *args) -{ - PyObject *obj, *value; - Py_ssize_t where; - if (!PyArg_ParseTuple(args, "OnO", &obj, &where, &value)) { - return NULL; - } - NULLABLE(obj); - NULLABLE(value); - RETURN_INT(PyList_Insert(obj, where, Py_XNewRef(value))); - -} - -static PyObject * -list_append(PyObject *Py_UNUSED(module), PyObject *args) -{ - PyObject *obj, *value; - if (!PyArg_ParseTuple(args, "OO", &obj, &value)) { - return NULL; - } - NULLABLE(obj); - NULLABLE(value); - RETURN_INT(PyList_Append(obj, value)); -} - -static PyObject * -list_getslice(PyObject *Py_UNUSED(module), PyObject *args) -{ - PyObject *obj; - Py_ssize_t ilow, ihigh; - if (!PyArg_ParseTuple(args, "Onn", &obj, &ilow, &ihigh)) { - return NULL; - } - NULLABLE(obj); - return PyList_GetSlice(obj, ilow, ihigh); - -} - -static PyObject * -list_setslice(PyObject *Py_UNUSED(module), PyObject *args) -{ - PyObject *obj, *value; - Py_ssize_t ilow, ihigh; - if (!PyArg_ParseTuple(args, "OnnO", &obj, &ilow, &ihigh, &value)) { - return NULL; - } - NULLABLE(obj); - NULLABLE(value); - RETURN_INT(PyList_SetSlice(obj, ilow, ihigh, value)); -} - -static PyObject * -list_sort(PyObject* Py_UNUSED(module), PyObject *obj) -{ - NULLABLE(obj); - RETURN_INT(PyList_Sort(obj)); -} - -static PyObject * -list_reverse(PyObject* Py_UNUSED(module), PyObject *obj) -{ - NULLABLE(obj); - RETURN_INT(PyList_Reverse(obj)); -} - -static PyObject * -list_astuple(PyObject* Py_UNUSED(module), PyObject *obj) -{ - NULLABLE(obj); - return PyList_AsTuple(obj); -} - static PyObject * list_clear(PyObject* Py_UNUSED(module), PyObject *obj) @@ -196,25 +61,12 @@ list_extend(PyObject* Py_UNUSED(module), PyObject *args) static PyMethodDef test_methods[] = { - {"list_check", list_check, METH_O}, - {"list_check_exact", list_check_exact, METH_O}, - {"list_new", list_new, METH_O}, - {"list_size", list_size, METH_O}, {"list_get_size", list_get_size, METH_O}, - {"list_getitem", list_getitem, METH_VARARGS}, {"list_get_item", list_get_item, METH_VARARGS}, - {"list_get_item_ref", list_get_item_ref, METH_VARARGS}, - {"list_setitem", list_setitem, METH_VARARGS}, {"list_set_item", list_set_item, METH_VARARGS}, - {"list_insert", list_insert, METH_VARARGS}, - {"list_append", list_append, METH_VARARGS}, - {"list_getslice", list_getslice, METH_VARARGS}, - {"list_setslice", list_setslice, METH_VARARGS}, - {"list_sort", list_sort, METH_O}, - {"list_reverse", list_reverse, METH_O}, - {"list_astuple", list_astuple, METH_O}, {"list_clear", list_clear, METH_O}, {"list_extend", list_extend, METH_VARARGS}, + {NULL}, }; diff --git a/Modules/_testcapi/long.c b/Modules/_testcapi/long.c index dc21cf9f475228..28dca01bee09a0 100644 --- a/Modules/_testcapi/long.c +++ b/Modules/_testcapi/long.c @@ -12,529 +12,6 @@ module _testcapi /*[clinic end generated code: output=da39a3ee5e6b4b0d input=6361033e795369fc]*/ -static PyObject * -raiseTestError(const char* test_name, const char* msg) -{ - PyErr_Format(PyExc_AssertionError, "%s: %s", test_name, msg); - return NULL; -} - -/* Tests of PyLong_{As, From}{Unsigned,}Long(), and - PyLong_{As, From}{Unsigned,}LongLong(). - - Note that the meat of the test is contained in testcapi_long.h. - This is revolting, but delicate code duplication is worse: "almost - exactly the same" code is needed to test long long, but the ubiquitous - dependence on type names makes it impossible to use a parameterized - function. A giant macro would be even worse than this. A C++ template - would be perfect. - - The "report an error" functions are deliberately not part of the #include - file: if the test fails, you can set a breakpoint in the appropriate - error function directly, and crawl back from there in the debugger. -*/ - -#define UNBIND(X) Py_DECREF(X); (X) = NULL - -static PyObject * -raise_test_long_error(const char* msg) -{ - return raiseTestError("test_long_api", msg); -} - -// Test PyLong_FromLong()/PyLong_AsLong() -// and PyLong_FromUnsignedLong()/PyLong_AsUnsignedLong(). - -#define TESTNAME test_long_api_inner -#define TYPENAME long -#define F_S_TO_PY PyLong_FromLong -#define F_PY_TO_S PyLong_AsLong -#define F_U_TO_PY PyLong_FromUnsignedLong -#define F_PY_TO_U PyLong_AsUnsignedLong - -#include "testcapi_long.h" - -/*[clinic input] -_testcapi.test_long_api -[clinic start generated code]*/ - -static PyObject * -_testcapi_test_long_api_impl(PyObject *module) -/*[clinic end generated code: output=4405798ca1e9f444 input=e9b8880d7331c688]*/ -{ - return TESTNAME(raise_test_long_error); -} - -#undef TESTNAME -#undef TYPENAME -#undef F_S_TO_PY -#undef F_PY_TO_S -#undef F_U_TO_PY -#undef F_PY_TO_U - -// Test PyLong_FromLongLong()/PyLong_AsLongLong() -// and PyLong_FromUnsignedLongLong()/PyLong_AsUnsignedLongLong(). - -static PyObject * -raise_test_longlong_error(const char* msg) -{ - return raiseTestError("test_longlong_api", msg); -} - -#define TESTNAME test_longlong_api_inner -#define TYPENAME long long -#define F_S_TO_PY PyLong_FromLongLong -#define F_PY_TO_S PyLong_AsLongLong -#define F_U_TO_PY PyLong_FromUnsignedLongLong -#define F_PY_TO_U PyLong_AsUnsignedLongLong - -#include "testcapi_long.h" - -/*[clinic input] -_testcapi.test_longlong_api -[clinic start generated code]*/ - -static PyObject * -_testcapi_test_longlong_api_impl(PyObject *module) -/*[clinic end generated code: output=2b3414ba8c31dfe6 input=ccbb2a48c2b3c4a5]*/ -{ - return TESTNAME(raise_test_longlong_error); -} - -#undef TESTNAME -#undef TYPENAME -#undef F_S_TO_PY -#undef F_PY_TO_S -#undef F_U_TO_PY -#undef F_PY_TO_U - - -/*[clinic input] -_testcapi.test_long_and_overflow - -Test the PyLong_AsLongAndOverflow API. - -General conversion to PY_LONG is tested by test_long_api_inner. -This test will concentrate on proper handling of overflow. -[clinic start generated code]*/ - -static PyObject * -_testcapi_test_long_and_overflow_impl(PyObject *module) -/*[clinic end generated code: output=f8460ca115e31d8e input=762f6b62da0a3cdc]*/ -{ - PyObject *num, *one, *temp; - long value; - int overflow; - - /* Test that overflow is set properly for a large value. */ - /* num is a number larger than LONG_MAX even on 64-bit platforms */ - num = PyLong_FromString("FFFFFFFFFFFFFFFFFFFFFFFF", NULL, 16); - if (num == NULL) - return NULL; - overflow = 1234; - value = PyLong_AsLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != -1) - return raiseTestError("test_long_and_overflow", - "return value was not set to -1"); - if (overflow != 1) - return raiseTestError("test_long_and_overflow", - "overflow was not set to 1"); - - /* Same again, with num = LONG_MAX + 1 */ - num = PyLong_FromLong(LONG_MAX); - if (num == NULL) - return NULL; - one = PyLong_FromLong(1L); - if (one == NULL) { - Py_DECREF(num); - return NULL; - } - temp = PyNumber_Add(num, one); - Py_DECREF(one); - Py_SETREF(num, temp); - if (num == NULL) - return NULL; - overflow = 0; - value = PyLong_AsLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != -1) - return raiseTestError("test_long_and_overflow", - "return value was not set to -1"); - if (overflow != 1) - return raiseTestError("test_long_and_overflow", - "overflow was not set to 1"); - - /* Test that overflow is set properly for a large negative value. */ - /* num is a number smaller than LONG_MIN even on 64-bit platforms */ - num = PyLong_FromString("-FFFFFFFFFFFFFFFFFFFFFFFF", NULL, 16); - if (num == NULL) - return NULL; - overflow = 1234; - value = PyLong_AsLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != -1) - return raiseTestError("test_long_and_overflow", - "return value was not set to -1"); - if (overflow != -1) - return raiseTestError("test_long_and_overflow", - "overflow was not set to -1"); - - /* Same again, with num = LONG_MIN - 1 */ - num = PyLong_FromLong(LONG_MIN); - if (num == NULL) - return NULL; - one = PyLong_FromLong(1L); - if (one == NULL) { - Py_DECREF(num); - return NULL; - } - temp = PyNumber_Subtract(num, one); - Py_DECREF(one); - Py_SETREF(num, temp); - if (num == NULL) - return NULL; - overflow = 0; - value = PyLong_AsLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != -1) - return raiseTestError("test_long_and_overflow", - "return value was not set to -1"); - if (overflow != -1) - return raiseTestError("test_long_and_overflow", - "overflow was not set to -1"); - - /* Test that overflow is cleared properly for small values. */ - num = PyLong_FromString("FF", NULL, 16); - if (num == NULL) - return NULL; - overflow = 1234; - value = PyLong_AsLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != 0xFF) - return raiseTestError("test_long_and_overflow", - "expected return value 0xFF"); - if (overflow != 0) - return raiseTestError("test_long_and_overflow", - "overflow was not cleared"); - - num = PyLong_FromString("-FF", NULL, 16); - if (num == NULL) - return NULL; - overflow = 0; - value = PyLong_AsLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != -0xFF) - return raiseTestError("test_long_and_overflow", - "expected return value 0xFF"); - if (overflow != 0) - return raiseTestError("test_long_and_overflow", - "overflow was set incorrectly"); - - num = PyLong_FromLong(LONG_MAX); - if (num == NULL) - return NULL; - overflow = 1234; - value = PyLong_AsLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != LONG_MAX) - return raiseTestError("test_long_and_overflow", - "expected return value LONG_MAX"); - if (overflow != 0) - return raiseTestError("test_long_and_overflow", - "overflow was not cleared"); - - num = PyLong_FromLong(LONG_MIN); - if (num == NULL) - return NULL; - overflow = 0; - value = PyLong_AsLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != LONG_MIN) - return raiseTestError("test_long_and_overflow", - "expected return value LONG_MIN"); - if (overflow != 0) - return raiseTestError("test_long_and_overflow", - "overflow was not cleared"); - - Py_RETURN_NONE; -} - -/*[clinic input] -_testcapi.test_long_long_and_overflow - -Test the PyLong_AsLongLongAndOverflow API. - -General conversion to long long is tested by test_long_api_inner. -This test will concentrate on proper handling of overflow. -[clinic start generated code]*/ - -static PyObject * -_testcapi_test_long_long_and_overflow_impl(PyObject *module) -/*[clinic end generated code: output=0b92330786f45483 input=544bb0aefe5e8a9e]*/ -{ - PyObject *num, *one, *temp; - long long value; - int overflow; - - /* Test that overflow is set properly for a large value. */ - /* num is a number larger than LLONG_MAX on a typical machine. */ - num = PyLong_FromString("FFFFFFFFFFFFFFFFFFFFFFFF", NULL, 16); - if (num == NULL) - return NULL; - overflow = 1234; - value = PyLong_AsLongLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != -1) - return raiseTestError("test_long_long_and_overflow", - "return value was not set to -1"); - if (overflow != 1) - return raiseTestError("test_long_long_and_overflow", - "overflow was not set to 1"); - - /* Same again, with num = LLONG_MAX + 1 */ - num = PyLong_FromLongLong(LLONG_MAX); - if (num == NULL) - return NULL; - one = PyLong_FromLong(1L); - if (one == NULL) { - Py_DECREF(num); - return NULL; - } - temp = PyNumber_Add(num, one); - Py_DECREF(one); - Py_SETREF(num, temp); - if (num == NULL) - return NULL; - overflow = 0; - value = PyLong_AsLongLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != -1) - return raiseTestError("test_long_long_and_overflow", - "return value was not set to -1"); - if (overflow != 1) - return raiseTestError("test_long_long_and_overflow", - "overflow was not set to 1"); - - /* Test that overflow is set properly for a large negative value. */ - /* num is a number smaller than LLONG_MIN on a typical platform */ - num = PyLong_FromString("-FFFFFFFFFFFFFFFFFFFFFFFF", NULL, 16); - if (num == NULL) - return NULL; - overflow = 1234; - value = PyLong_AsLongLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != -1) - return raiseTestError("test_long_long_and_overflow", - "return value was not set to -1"); - if (overflow != -1) - return raiseTestError("test_long_long_and_overflow", - "overflow was not set to -1"); - - /* Same again, with num = LLONG_MIN - 1 */ - num = PyLong_FromLongLong(LLONG_MIN); - if (num == NULL) - return NULL; - one = PyLong_FromLong(1L); - if (one == NULL) { - Py_DECREF(num); - return NULL; - } - temp = PyNumber_Subtract(num, one); - Py_DECREF(one); - Py_SETREF(num, temp); - if (num == NULL) - return NULL; - overflow = 0; - value = PyLong_AsLongLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != -1) - return raiseTestError("test_long_long_and_overflow", - "return value was not set to -1"); - if (overflow != -1) - return raiseTestError("test_long_long_and_overflow", - "overflow was not set to -1"); - - /* Test that overflow is cleared properly for small values. */ - num = PyLong_FromString("FF", NULL, 16); - if (num == NULL) - return NULL; - overflow = 1234; - value = PyLong_AsLongLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != 0xFF) - return raiseTestError("test_long_long_and_overflow", - "expected return value 0xFF"); - if (overflow != 0) - return raiseTestError("test_long_long_and_overflow", - "overflow was not cleared"); - - num = PyLong_FromString("-FF", NULL, 16); - if (num == NULL) - return NULL; - overflow = 0; - value = PyLong_AsLongLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != -0xFF) - return raiseTestError("test_long_long_and_overflow", - "expected return value 0xFF"); - if (overflow != 0) - return raiseTestError("test_long_long_and_overflow", - "overflow was set incorrectly"); - - num = PyLong_FromLongLong(LLONG_MAX); - if (num == NULL) - return NULL; - overflow = 1234; - value = PyLong_AsLongLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != LLONG_MAX) - return raiseTestError("test_long_long_and_overflow", - "expected return value LLONG_MAX"); - if (overflow != 0) - return raiseTestError("test_long_long_and_overflow", - "overflow was not cleared"); - - num = PyLong_FromLongLong(LLONG_MIN); - if (num == NULL) - return NULL; - overflow = 0; - value = PyLong_AsLongLongAndOverflow(num, &overflow); - Py_DECREF(num); - if (value == -1 && PyErr_Occurred()) - return NULL; - if (value != LLONG_MIN) - return raiseTestError("test_long_long_and_overflow", - "expected return value LLONG_MIN"); - if (overflow != 0) - return raiseTestError("test_long_long_and_overflow", - "overflow was not cleared"); - - Py_RETURN_NONE; -} - -/*[clinic input] -_testcapi.test_long_as_size_t - -Test the PyLong_As{Size,Ssize}_t API. - -At present this just tests that non-integer arguments are handled correctly. -It should be extended to test overflow handling. -[clinic start generated code]*/ - -static PyObject * -_testcapi_test_long_as_size_t_impl(PyObject *module) -/*[clinic end generated code: output=f6490ea2b41e6173 input=922990c4a3edfb0d]*/ -{ - size_t out_u; - Py_ssize_t out_s; - - Py_INCREF(Py_None); - - out_u = PyLong_AsSize_t(Py_None); - if (out_u != (size_t)-1 || !PyErr_Occurred()) - return raiseTestError("test_long_as_size_t", - "PyLong_AsSize_t(None) didn't complain"); - if (!PyErr_ExceptionMatches(PyExc_TypeError)) - return raiseTestError("test_long_as_size_t", - "PyLong_AsSize_t(None) raised " - "something other than TypeError"); - PyErr_Clear(); - - out_s = PyLong_AsSsize_t(Py_None); - if (out_s != (Py_ssize_t)-1 || !PyErr_Occurred()) - return raiseTestError("test_long_as_size_t", - "PyLong_AsSsize_t(None) didn't complain"); - if (!PyErr_ExceptionMatches(PyExc_TypeError)) - return raiseTestError("test_long_as_size_t", - "PyLong_AsSsize_t(None) raised " - "something other than TypeError"); - PyErr_Clear(); - - /* Py_INCREF(Py_None) omitted - we already have a reference to it. */ - return Py_None; -} - -/*[clinic input] -_testcapi.test_long_as_unsigned_long_long_mask -[clinic start generated code]*/ - -static PyObject * -_testcapi_test_long_as_unsigned_long_long_mask_impl(PyObject *module) -/*[clinic end generated code: output=e3e16cd0189440cc input=eb2438493ae7b9af]*/ -{ - unsigned long long res = PyLong_AsUnsignedLongLongMask(NULL); - - if (res != (unsigned long long)-1 || !PyErr_Occurred()) { - return raiseTestError("test_long_as_unsigned_long_long_mask", - "PyLong_AsUnsignedLongLongMask(NULL) didn't " - "complain"); - } - if (!PyErr_ExceptionMatches(PyExc_SystemError)) { - return raiseTestError("test_long_as_unsigned_long_long_mask", - "PyLong_AsUnsignedLongLongMask(NULL) raised " - "something other than SystemError"); - } - PyErr_Clear(); - Py_RETURN_NONE; -} - -/*[clinic input] -_testcapi.test_long_as_double -[clinic start generated code]*/ - -static PyObject * -_testcapi_test_long_as_double_impl(PyObject *module) -/*[clinic end generated code: output=deca0898e15adde5 input=c77bc88ef5a1de76]*/ -{ - double out; - - Py_INCREF(Py_None); - - out = PyLong_AsDouble(Py_None); - if (out != -1.0 || !PyErr_Occurred()) - return raiseTestError("test_long_as_double", - "PyLong_AsDouble(None) didn't complain"); - if (!PyErr_ExceptionMatches(PyExc_TypeError)) - return raiseTestError("test_long_as_double", - "PyLong_AsDouble(None) raised " - "something other than TypeError"); - PyErr_Clear(); - - /* Py_INCREF(Py_None) omitted - we already have a reference to it. */ - return Py_None; -} - /*[clinic input] _testcapi.call_long_compact_api arg: object @@ -555,48 +32,6 @@ _testcapi_call_long_compact_api(PyObject *module, PyObject *arg) return Py_BuildValue("in", is_compact, value); } -static PyObject * -pylong_check(PyObject *module, PyObject *obj) -{ - NULLABLE(obj); - return PyLong_FromLong(PyLong_Check(obj)); -} - -static PyObject * -pylong_checkexact(PyObject *module, PyObject *obj) -{ - NULLABLE(obj); - return PyLong_FromLong(PyLong_CheckExact(obj)); -} - -static PyObject * -pylong_fromdouble(PyObject *module, PyObject *arg) -{ - double value; - if (!PyArg_Parse(arg, "d", &value)) { - return NULL; - } - return PyLong_FromDouble(value); -} - -static PyObject * -pylong_fromstring(PyObject *module, PyObject *args) -{ - const char *str; - Py_ssize_t len; - int base; - char *end = UNINITIALIZED_PTR; - if (!PyArg_ParseTuple(args, "z#i", &str, &len, &base)) { - return NULL; - } - - PyObject *result = PyLong_FromString(str, &end, base); - if (result == NULL) { - // XXX 'end' is not always set. - return NULL; - } - return Py_BuildValue("Nn", result, (Py_ssize_t)(end - str)); -} static PyObject * pylong_fromunicodeobject(PyObject *module, PyObject *args) @@ -611,170 +46,6 @@ pylong_fromunicodeobject(PyObject *module, PyObject *args) return PyLong_FromUnicodeObject(unicode, base); } -static PyObject * -pylong_fromvoidptr(PyObject *module, PyObject *arg) -{ - NULLABLE(arg); - return PyLong_FromVoidPtr((void *)arg); -} - -/*[clinic input] -_testcapi.PyLong_AsInt - arg: object - / -[clinic start generated code]*/ - -static PyObject * -_testcapi_PyLong_AsInt(PyObject *module, PyObject *arg) -/*[clinic end generated code: output=0df9f19de5fa575b input=9561b97105493a67]*/ -{ - NULLABLE(arg); - assert(!PyErr_Occurred()); - int value = PyLong_AsInt(arg); - if (value == -1 && PyErr_Occurred()) { - return NULL; - } - return PyLong_FromLong(value); -} - -static PyObject * -pylong_aslong(PyObject *module, PyObject *arg) -{ - NULLABLE(arg); - long value = PyLong_AsLong(arg); - if (value == -1 && PyErr_Occurred()) { - return NULL; - } - return PyLong_FromLong(value); -} - -static PyObject * -pylong_aslongandoverflow(PyObject *module, PyObject *arg) -{ - NULLABLE(arg); - int overflow = UNINITIALIZED_INT; - long value = PyLong_AsLongAndOverflow(arg, &overflow); - if (value == -1 && PyErr_Occurred()) { - assert(overflow == -1); - return NULL; - } - return Py_BuildValue("li", value, overflow); -} - -static PyObject * -pylong_asunsignedlong(PyObject *module, PyObject *arg) -{ - NULLABLE(arg); - unsigned long value = PyLong_AsUnsignedLong(arg); - if (value == (unsigned long)-1 && PyErr_Occurred()) { - return NULL; - } - return PyLong_FromUnsignedLong(value); -} - -static PyObject * -pylong_asunsignedlongmask(PyObject *module, PyObject *arg) -{ - NULLABLE(arg); - unsigned long value = PyLong_AsUnsignedLongMask(arg); - if (value == (unsigned long)-1 && PyErr_Occurred()) { - return NULL; - } - return PyLong_FromUnsignedLong(value); -} - -static PyObject * -pylong_aslonglong(PyObject *module, PyObject *arg) -{ - NULLABLE(arg); - long long value = PyLong_AsLongLong(arg); - if (value == -1 && PyErr_Occurred()) { - return NULL; - } - return PyLong_FromLongLong(value); -} - -static PyObject * -pylong_aslonglongandoverflow(PyObject *module, PyObject *arg) -{ - NULLABLE(arg); - int overflow = UNINITIALIZED_INT; - long long value = PyLong_AsLongLongAndOverflow(arg, &overflow); - if (value == -1 && PyErr_Occurred()) { - assert(overflow == -1); - return NULL; - } - return Py_BuildValue("Li", value, overflow); -} - -static PyObject * -pylong_asunsignedlonglong(PyObject *module, PyObject *arg) -{ - NULLABLE(arg); - unsigned long long value = PyLong_AsUnsignedLongLong(arg); - if (value == (unsigned long long)-1 && PyErr_Occurred()) { - return NULL; - } - return PyLong_FromUnsignedLongLong(value); -} - -static PyObject * -pylong_asunsignedlonglongmask(PyObject *module, PyObject *arg) -{ - NULLABLE(arg); - unsigned long long value = PyLong_AsUnsignedLongLongMask(arg); - if (value == (unsigned long long)-1 && PyErr_Occurred()) { - return NULL; - } - return PyLong_FromUnsignedLongLong(value); -} - -static PyObject * -pylong_as_ssize_t(PyObject *module, PyObject *arg) -{ - NULLABLE(arg); - Py_ssize_t value = PyLong_AsSsize_t(arg); - if (value == -1 && PyErr_Occurred()) { - return NULL; - } - return PyLong_FromSsize_t(value); -} - -static PyObject * -pylong_as_size_t(PyObject *module, PyObject *arg) -{ - NULLABLE(arg); - size_t value = PyLong_AsSize_t(arg); - if (value == (size_t)-1 && PyErr_Occurred()) { - return NULL; - } - return PyLong_FromSize_t(value); -} - -static PyObject * -pylong_asdouble(PyObject *module, PyObject *arg) -{ - NULLABLE(arg); - double value = PyLong_AsDouble(arg); - if (value == -1.0 && PyErr_Occurred()) { - return NULL; - } - return PyFloat_FromDouble(value); -} - -static PyObject * -pylong_asvoidptr(PyObject *module, PyObject *arg) -{ - NULLABLE(arg); - void *value = PyLong_AsVoidPtr(arg); - if (value == NULL) { - if (PyErr_Occurred()) { - return NULL; - } - Py_RETURN_NONE; - } - return Py_NewRef((PyObject *)value); -} static PyObject * pylong_asnativebytes(PyObject *module, PyObject *args) @@ -800,6 +71,7 @@ pylong_asnativebytes(PyObject *module, PyObject *args) return res >= 0 ? PyLong_FromSsize_t(res) : NULL; } + static PyObject * pylong_fromnativebytes(PyObject *module, PyObject *args) { @@ -820,37 +92,24 @@ pylong_fromnativebytes(PyObject *module, PyObject *args) return res; } +static PyObject * +pylong_aspid(PyObject *module, PyObject *arg) +{ + NULLABLE(arg); + pid_t value = PyLong_AsPid(arg); + if (value == -1 && PyErr_Occurred()) { + return NULL; + } + return PyLong_FromPid(value); +} + static PyMethodDef test_methods[] = { - _TESTCAPI_TEST_LONG_AND_OVERFLOW_METHODDEF - _TESTCAPI_TEST_LONG_API_METHODDEF - _TESTCAPI_TEST_LONG_AS_DOUBLE_METHODDEF - _TESTCAPI_TEST_LONG_AS_SIZE_T_METHODDEF - _TESTCAPI_TEST_LONG_AS_UNSIGNED_LONG_LONG_MASK_METHODDEF - _TESTCAPI_TEST_LONG_LONG_AND_OVERFLOW_METHODDEF - _TESTCAPI_TEST_LONGLONG_API_METHODDEF _TESTCAPI_CALL_LONG_COMPACT_API_METHODDEF - {"pylong_check", pylong_check, METH_O}, - {"pylong_checkexact", pylong_checkexact, METH_O}, - {"pylong_fromdouble", pylong_fromdouble, METH_O}, - {"pylong_fromstring", pylong_fromstring, METH_VARARGS}, {"pylong_fromunicodeobject", pylong_fromunicodeobject, METH_VARARGS}, - {"pylong_fromvoidptr", pylong_fromvoidptr, METH_O}, - _TESTCAPI_PYLONG_ASINT_METHODDEF - {"pylong_aslong", pylong_aslong, METH_O}, - {"pylong_aslongandoverflow", pylong_aslongandoverflow, METH_O}, - {"pylong_asunsignedlong", pylong_asunsignedlong, METH_O}, - {"pylong_asunsignedlongmask", pylong_asunsignedlongmask, METH_O}, - {"pylong_aslonglong", pylong_aslonglong, METH_O}, - {"pylong_aslonglongandoverflow", pylong_aslonglongandoverflow, METH_O}, - {"pylong_asunsignedlonglong", pylong_asunsignedlonglong, METH_O}, - {"pylong_asunsignedlonglongmask", pylong_asunsignedlonglongmask, METH_O}, - {"pylong_as_ssize_t", pylong_as_ssize_t, METH_O}, - {"pylong_as_size_t", pylong_as_size_t, METH_O}, - {"pylong_asdouble", pylong_asdouble, METH_O}, - {"pylong_asvoidptr", pylong_asvoidptr, METH_O}, {"pylong_asnativebytes", pylong_asnativebytes, METH_VARARGS}, {"pylong_fromnativebytes", pylong_fromnativebytes, METH_VARARGS}, + {"pylong_aspid", pylong_aspid, METH_O}, {NULL}, }; diff --git a/Modules/_testcapi/parts.h b/Modules/_testcapi/parts.h index f9bdd830775a75..e7c868f6bcff6e 100644 --- a/Modules/_testcapi/parts.h +++ b/Modules/_testcapi/parts.h @@ -31,6 +31,7 @@ int _PyTestCapi_Init_Vectorcall(PyObject *module); int _PyTestCapi_Init_Heaptype(PyObject *module); int _PyTestCapi_Init_Abstract(PyObject *module); +int _PyTestCapi_Init_Bytes(PyObject *module); int _PyTestCapi_Init_Unicode(PyObject *module); int _PyTestCapi_Init_GetArgs(PyObject *module); int _PyTestCapi_Init_DateTime(PyObject *module); diff --git a/Modules/_testcapi/set.c b/Modules/_testcapi/set.c index 2fbd0aeffcd9f9..31b52cee5e9623 100644 --- a/Modules/_testcapi/set.c +++ b/Modules/_testcapi/set.c @@ -1,75 +1,6 @@ #include "parts.h" #include "util.h" -static PyObject * -set_check(PyObject *self, PyObject *obj) -{ - NULLABLE(obj); - RETURN_INT(PySet_Check(obj)); -} - -static PyObject * -set_checkexact(PyObject *self, PyObject *obj) -{ - NULLABLE(obj); - RETURN_INT(PySet_CheckExact(obj)); -} - -static PyObject * -frozenset_check(PyObject *self, PyObject *obj) -{ - NULLABLE(obj); - RETURN_INT(PyFrozenSet_Check(obj)); -} - -static PyObject * -frozenset_checkexact(PyObject *self, PyObject *obj) -{ - NULLABLE(obj); - RETURN_INT(PyFrozenSet_CheckExact(obj)); -} - -static PyObject * -anyset_check(PyObject *self, PyObject *obj) -{ - NULLABLE(obj); - RETURN_INT(PyAnySet_Check(obj)); -} - -static PyObject * -anyset_checkexact(PyObject *self, PyObject *obj) -{ - NULLABLE(obj); - RETURN_INT(PyAnySet_CheckExact(obj)); -} - -static PyObject * -set_new(PyObject *self, PyObject *args) -{ - PyObject *iterable = NULL; - if (!PyArg_ParseTuple(args, "|O", &iterable)) { - return NULL; - } - return PySet_New(iterable); -} - -static PyObject * -frozenset_new(PyObject *self, PyObject *args) -{ - PyObject *iterable = NULL; - if (!PyArg_ParseTuple(args, "|O", &iterable)) { - return NULL; - } - return PyFrozenSet_New(iterable); -} - -static PyObject * -set_size(PyObject *self, PyObject *obj) -{ - NULLABLE(obj); - RETURN_SIZE(PySet_Size(obj)); -} - static PyObject * set_get_size(PyObject *self, PyObject *obj) { @@ -77,111 +8,8 @@ set_get_size(PyObject *self, PyObject *obj) RETURN_SIZE(PySet_GET_SIZE(obj)); } -static PyObject * -set_contains(PyObject *self, PyObject *args) -{ - PyObject *obj, *item; - if (!PyArg_ParseTuple(args, "OO", &obj, &item)) { - return NULL; - } - NULLABLE(obj); - NULLABLE(item); - RETURN_INT(PySet_Contains(obj, item)); -} - -static PyObject * -set_add(PyObject *self, PyObject *args) -{ - PyObject *obj, *item; - if (!PyArg_ParseTuple(args, "OO", &obj, &item)) { - return NULL; - } - NULLABLE(obj); - NULLABLE(item); - RETURN_INT(PySet_Add(obj, item)); -} - -static PyObject * -set_discard(PyObject *self, PyObject *args) -{ - PyObject *obj, *item; - if (!PyArg_ParseTuple(args, "OO", &obj, &item)) { - return NULL; - } - NULLABLE(obj); - NULLABLE(item); - RETURN_INT(PySet_Discard(obj, item)); -} - -static PyObject * -set_pop(PyObject *self, PyObject *obj) -{ - NULLABLE(obj); - return PySet_Pop(obj); -} - -static PyObject * -set_clear(PyObject *self, PyObject *obj) -{ - NULLABLE(obj); - RETURN_INT(PySet_Clear(obj)); -} - -static PyObject * -test_frozenset_add_in_capi(PyObject *self, PyObject *Py_UNUSED(obj)) -{ - // Test that `frozenset` can be used with `PySet_Add`, - // when frozenset is just created in CAPI. - PyObject *fs = PyFrozenSet_New(NULL); - if (fs == NULL) { - return NULL; - } - PyObject *num = PyLong_FromLong(1); - if (num == NULL) { - goto error; - } - if (PySet_Add(fs, num) < 0) { - goto error; - } - int contains = PySet_Contains(fs, num); - if (contains < 0) { - goto error; - } - else if (contains == 0) { - goto unexpected; - } - Py_DECREF(fs); - Py_DECREF(num); - Py_RETURN_NONE; - -unexpected: - PyErr_SetString(PyExc_ValueError, "set does not contain expected value"); -error: - Py_DECREF(fs); - Py_XDECREF(num); - return NULL; -} - static PyMethodDef test_methods[] = { - {"set_check", set_check, METH_O}, - {"set_checkexact", set_checkexact, METH_O}, - {"frozenset_check", frozenset_check, METH_O}, - {"frozenset_checkexact", frozenset_checkexact, METH_O}, - {"anyset_check", anyset_check, METH_O}, - {"anyset_checkexact", anyset_checkexact, METH_O}, - - {"set_new", set_new, METH_VARARGS}, - {"frozenset_new", frozenset_new, METH_VARARGS}, - - {"set_size", set_size, METH_O}, {"set_get_size", set_get_size, METH_O}, - {"set_contains", set_contains, METH_VARARGS}, - {"set_add", set_add, METH_VARARGS}, - {"set_discard", set_discard, METH_VARARGS}, - {"set_pop", set_pop, METH_O}, - {"set_clear", set_clear, METH_O}, - - {"test_frozenset_add_in_capi", test_frozenset_add_in_capi, METH_NOARGS}, {NULL}, }; diff --git a/Modules/_testcapi/unicode.c b/Modules/_testcapi/unicode.c index d0954bbe36ff9d..015db9017139d0 100644 --- a/Modules/_testcapi/unicode.c +++ b/Modules/_testcapi/unicode.c @@ -3,101 +3,29 @@ #include "parts.h" #include "util.h" +/* Test PyUnicode_New() */ static PyObject * -codec_incrementalencoder(PyObject *self, PyObject *args) -{ - const char *encoding, *errors = NULL; - if (!PyArg_ParseTuple(args, "s|s:test_incrementalencoder", - &encoding, &errors)) - return NULL; - return PyCodec_IncrementalEncoder(encoding, errors); -} - -static PyObject * -codec_incrementaldecoder(PyObject *self, PyObject *args) -{ - const char *encoding, *errors = NULL; - if (!PyArg_ParseTuple(args, "s|s:test_incrementaldecoder", - &encoding, &errors)) - return NULL; - return PyCodec_IncrementalDecoder(encoding, errors); -} - -static PyObject * -test_unicode_compare_with_ascii(PyObject *self, PyObject *Py_UNUSED(ignored)) { - PyObject *py_s = PyUnicode_FromStringAndSize("str\0", 4); - int result; - if (py_s == NULL) - return NULL; - result = PyUnicode_CompareWithASCIIString(py_s, "str"); - Py_DECREF(py_s); - if (!result) { - PyErr_SetString(PyExc_AssertionError, "Python string ending in NULL " - "should not compare equal to c string."); - return NULL; - } - Py_RETURN_NONE; -} - -static PyObject * -test_widechar(PyObject *self, PyObject *Py_UNUSED(ignored)) +unicode_new(PyObject *self, PyObject *args) { -#if defined(SIZEOF_WCHAR_T) && (SIZEOF_WCHAR_T == 4) - const wchar_t wtext[2] = {(wchar_t)0x10ABCDu}; - size_t wtextlen = 1; - const wchar_t invalid[1] = {(wchar_t)0x110000u}; -#else - const wchar_t wtext[3] = {(wchar_t)0xDBEAu, (wchar_t)0xDFCDu}; - size_t wtextlen = 2; -#endif - PyObject *wide, *utf8; - - wide = PyUnicode_FromWideChar(wtext, wtextlen); - if (wide == NULL) - return NULL; + Py_ssize_t size; + unsigned int maxchar; + PyObject *result; - utf8 = PyUnicode_FromString("\xf4\x8a\xaf\x8d"); - if (utf8 == NULL) { - Py_DECREF(wide); + if (!PyArg_ParseTuple(args, "nI", &size, &maxchar)) { return NULL; } - if (PyUnicode_GET_LENGTH(wide) != PyUnicode_GET_LENGTH(utf8)) { - Py_DECREF(wide); - Py_DECREF(utf8); - PyErr_SetString(PyExc_AssertionError, - "test_widechar: " - "wide string and utf8 string " - "have different length"); - return NULL; - } - if (PyUnicode_Compare(wide, utf8)) { - Py_DECREF(wide); - Py_DECREF(utf8); - if (PyErr_Occurred()) - return NULL; - PyErr_SetString(PyExc_AssertionError, - "test_widechar: " - "wide string and utf8 string " - "are different"); + result = PyUnicode_New(size, (Py_UCS4)maxchar); + if (!result) { return NULL; } - - Py_DECREF(wide); - Py_DECREF(utf8); - -#if defined(SIZEOF_WCHAR_T) && (SIZEOF_WCHAR_T == 4) - wide = PyUnicode_FromWideChar(invalid, 1); - if (wide == NULL) - PyErr_Clear(); - else { - PyErr_SetString(PyExc_AssertionError, - "test_widechar: " - "PyUnicode_FromWideChar(L\"\\U00110000\", 1) didn't fail"); + if (size > 0 && maxchar <= 0x10ffff && + PyUnicode_Fill(result, 0, size, (Py_UCS4)maxchar) < 0) + { + Py_DECREF(result); return NULL; } -#endif - Py_RETURN_NONE; + return result; } @@ -128,30 +56,6 @@ unicode_copy(PyObject *unicode) return copy; } -/* Test PyUnicode_New() */ -static PyObject * -unicode_new(PyObject *self, PyObject *args) -{ - Py_ssize_t size; - unsigned int maxchar; - PyObject *result; - - if (!PyArg_ParseTuple(args, "nI", &size, &maxchar)) { - return NULL; - } - - result = PyUnicode_New(size, (Py_UCS4)maxchar); - if (!result) { - return NULL; - } - if (size > 0 && maxchar <= 0x10ffff && - PyUnicode_Fill(result, 0, size, (Py_UCS4)maxchar) < 0) - { - Py_DECREF(result); - return NULL; - } - return result; -} /* Test PyUnicode_Fill() */ static PyObject * @@ -178,129 +82,6 @@ unicode_fill(PyObject *self, PyObject *args) return Py_BuildValue("(Nn)", to_copy, filled); } -/* Test PyUnicode_WriteChar() */ -static PyObject * -unicode_writechar(PyObject *self, PyObject *args) -{ - PyObject *to, *to_copy; - Py_ssize_t index; - unsigned int character; - int result; - - if (!PyArg_ParseTuple(args, "OnI", &to, &index, &character)) { - return NULL; - } - - NULLABLE(to); - if (!(to_copy = unicode_copy(to)) && to) { - return NULL; - } - - result = PyUnicode_WriteChar(to_copy, index, (Py_UCS4)character); - if (result == -1 && PyErr_Occurred()) { - Py_DECREF(to_copy); - return NULL; - } - return Py_BuildValue("(Ni)", to_copy, result); -} - -/* Test PyUnicode_Resize() */ -static PyObject * -unicode_resize(PyObject *self, PyObject *args) -{ - PyObject *obj, *copy; - Py_ssize_t length; - int result; - - if (!PyArg_ParseTuple(args, "On", &obj, &length)) { - return NULL; - } - - NULLABLE(obj); - if (!(copy = unicode_copy(obj)) && obj) { - return NULL; - } - result = PyUnicode_Resize(©, length); - if (result == -1 && PyErr_Occurred()) { - Py_XDECREF(copy); - return NULL; - } - if (obj && PyUnicode_Check(obj) && length > PyUnicode_GET_LENGTH(obj)) { - if (PyUnicode_Fill(copy, PyUnicode_GET_LENGTH(obj), length, 0U) < 0) { - Py_DECREF(copy); - return NULL; - } - } - return Py_BuildValue("(Ni)", copy, result); -} - -/* Test PyUnicode_Append() */ -static PyObject * -unicode_append(PyObject *self, PyObject *args) -{ - PyObject *left, *right, *left_copy; - - if (!PyArg_ParseTuple(args, "OO", &left, &right)) - return NULL; - - NULLABLE(left); - NULLABLE(right); - if (!(left_copy = unicode_copy(left)) && left) { - return NULL; - } - PyUnicode_Append(&left_copy, right); - return left_copy; -} - -/* Test PyUnicode_AppendAndDel() */ -static PyObject * -unicode_appendanddel(PyObject *self, PyObject *args) -{ - PyObject *left, *right, *left_copy; - - if (!PyArg_ParseTuple(args, "OO", &left, &right)) - return NULL; - - NULLABLE(left); - NULLABLE(right); - if (!(left_copy = unicode_copy(left)) && left) { - return NULL; - } - Py_XINCREF(right); - PyUnicode_AppendAndDel(&left_copy, right); - return left_copy; -} - -/* Test PyUnicode_FromStringAndSize() */ -static PyObject * -unicode_fromstringandsize(PyObject *self, PyObject *args) -{ - const char *s; - Py_ssize_t bsize; - Py_ssize_t size = -100; - - if (!PyArg_ParseTuple(args, "z#|n", &s, &bsize, &size)) { - return NULL; - } - - if (size == -100) { - size = bsize; - } - return PyUnicode_FromStringAndSize(s, size); -} - -/* Test PyUnicode_FromString() */ -static PyObject * -unicode_fromstring(PyObject *self, PyObject *arg) -{ - const char *s; - Py_ssize_t size; - - if (!PyArg_Parse(arg, "z#", &s, &size)) { - return NULL; - } - return PyUnicode_FromString(s); -} /* Test PyUnicode_FromKindAndData() */ static PyObject * @@ -326,211 +107,9 @@ unicode_fromkindanddata(PyObject *self, PyObject *args) return PyUnicode_FromKindAndData(kind, buffer, kind ? size / kind : 0); } -/* Test PyUnicode_Substring() */ -static PyObject * -unicode_substring(PyObject *self, PyObject *args) -{ - PyObject *str; - Py_ssize_t start, end; - - if (!PyArg_ParseTuple(args, "Onn", &str, &start, &end)) { - return NULL; - } - - NULLABLE(str); - return PyUnicode_Substring(str, start, end); -} - -/* Test PyUnicode_GetLength() */ -static PyObject * -unicode_getlength(PyObject *self, PyObject *arg) -{ - NULLABLE(arg); - RETURN_SIZE(PyUnicode_GetLength(arg)); -} - -/* Test PyUnicode_ReadChar() */ -static PyObject * -unicode_readchar(PyObject *self, PyObject *args) -{ - PyObject *unicode; - Py_ssize_t index; - Py_UCS4 result; - - if (!PyArg_ParseTuple(args, "On", &unicode, &index)) { - return NULL; - } - - NULLABLE(unicode); - result = PyUnicode_ReadChar(unicode, index); - if (result == (Py_UCS4)-1) - return NULL; - return PyLong_FromUnsignedLong(result); -} - -/* Test PyUnicode_FromEncodedObject() */ -static PyObject * -unicode_fromencodedobject(PyObject *self, PyObject *args) -{ - PyObject *obj; - const char *encoding; - const char *errors = NULL; - - if (!PyArg_ParseTuple(args, "Oz|z", &obj, &encoding, &errors)) { - return NULL; - } - - NULLABLE(obj); - return PyUnicode_FromEncodedObject(obj, encoding, errors); -} - -/* Test PyUnicode_FromObject() */ -static PyObject * -unicode_fromobject(PyObject *self, PyObject *arg) -{ - NULLABLE(arg); - return PyUnicode_FromObject(arg); -} - -/* Test PyUnicode_InternInPlace() */ -static PyObject * -unicode_interninplace(PyObject *self, PyObject *arg) -{ - NULLABLE(arg); - Py_XINCREF(arg); - PyUnicode_InternInPlace(&arg); - return arg; -} - -/* Test PyUnicode_InternFromString() */ -static PyObject * -unicode_internfromstring(PyObject *self, PyObject *arg) -{ - const char *s; - Py_ssize_t size; - - if (!PyArg_Parse(arg, "z#", &s, &size)) { - return NULL; - } - return PyUnicode_InternFromString(s); -} - -/* Test PyUnicode_FromWideChar() */ -static PyObject * -unicode_fromwidechar(PyObject *self, PyObject *args) -{ - const char *s; - Py_ssize_t bsize; - Py_ssize_t size = -100; - - if (!PyArg_ParseTuple(args, "z#|n", &s, &bsize, &size)) { - return NULL; - } - if (size == -100) { - if (bsize % SIZEOF_WCHAR_T) { - PyErr_SetString(PyExc_AssertionError, - "invalid size in unicode_fromwidechar()"); - return NULL; - } - size = bsize / SIZEOF_WCHAR_T; - } - return PyUnicode_FromWideChar((const wchar_t *)s, size); -} - -/* Test PyUnicode_AsWideChar() */ -static PyObject * -unicode_aswidechar(PyObject *self, PyObject *args) -{ - PyObject *unicode, *result; - Py_ssize_t buflen, size; - wchar_t *buffer; - - if (!PyArg_ParseTuple(args, "On", &unicode, &buflen)) - return NULL; - NULLABLE(unicode); - buffer = PyMem_New(wchar_t, buflen); - if (buffer == NULL) - return PyErr_NoMemory(); - - size = PyUnicode_AsWideChar(unicode, buffer, buflen); - if (size == -1) { - PyMem_Free(buffer); - return NULL; - } - - if (size < buflen) - buflen = size + 1; - else - buflen = size; - result = PyUnicode_FromWideChar(buffer, buflen); - PyMem_Free(buffer); - if (result == NULL) - return NULL; - - return Py_BuildValue("(Nn)", result, size); -} - -/* Test PyUnicode_AsWideCharString() with NULL as buffer */ -static PyObject * -unicode_aswidechar_null(PyObject *self, PyObject *args) -{ - PyObject *unicode; - Py_ssize_t buflen; - - if (!PyArg_ParseTuple(args, "On", &unicode, &buflen)) - return NULL; - NULLABLE(unicode); - RETURN_SIZE(PyUnicode_AsWideChar(unicode, NULL, buflen)); -} - -/* Test PyUnicode_AsWideCharString() */ -static PyObject * -unicode_aswidecharstring(PyObject *self, PyObject *args) -{ - PyObject *unicode, *result; - Py_ssize_t size = UNINITIALIZED_SIZE; - wchar_t *buffer; - - if (!PyArg_ParseTuple(args, "O", &unicode)) - return NULL; - - NULLABLE(unicode); - buffer = PyUnicode_AsWideCharString(unicode, &size); - if (buffer == NULL) { - assert(size == UNINITIALIZED_SIZE); - return NULL; - } - - result = PyUnicode_FromWideChar(buffer, size + 1); - PyMem_Free(buffer); - if (result == NULL) - return NULL; - return Py_BuildValue("(Nn)", result, size); -} - -/* Test PyUnicode_AsWideCharString() with NULL as the size address */ -static PyObject * -unicode_aswidecharstring_null(PyObject *self, PyObject *args) -{ - PyObject *unicode, *result; - wchar_t *buffer; - - if (!PyArg_ParseTuple(args, "O", &unicode)) - return NULL; - - NULLABLE(unicode); - buffer = PyUnicode_AsWideCharString(unicode, NULL); - if (buffer == NULL) - return NULL; - - result = PyUnicode_FromWideChar(buffer, -1); - PyMem_Free(buffer); - if (result == NULL) - return NULL; - return result; -} -/* Test PyUnicode_AsUCS4() */ +// Test PyUnicode_AsUCS4(). +// Part of the limited C API, but the test needs PyUnicode_FromKindAndData(). static PyObject * unicode_asucs4(PyObject *self, PyObject *args) { @@ -562,7 +141,9 @@ unicode_asucs4(PyObject *self, PyObject *args) return result; } -/* Test PyUnicode_AsUCS4Copy() */ + +// Test PyUnicode_AsUCS4Copy(). +// Part of the limited C API, but the test needs PyUnicode_FromKindAndData(). static PyObject * unicode_asucs4copy(PyObject *self, PyObject *args) { @@ -586,17 +167,6 @@ unicode_asucs4copy(PyObject *self, PyObject *args) return result; } -/* Test PyUnicode_FromOrdinal() */ -static PyObject * -unicode_fromordinal(PyObject *self, PyObject *args) -{ - int ordinal; - - if (!PyArg_ParseTuple(args, "i", &ordinal)) - return NULL; - - return PyUnicode_FromOrdinal(ordinal); -} /* Test PyUnicode_AsUTF8() */ static PyObject * @@ -617,911 +187,6 @@ unicode_asutf8(PyObject *self, PyObject *args) return PyBytes_FromStringAndSize(s, buflen); } -/* Test PyUnicode_AsUTF8AndSize() */ -static PyObject * -unicode_asutf8andsize(PyObject *self, PyObject *args) -{ - PyObject *unicode; - Py_ssize_t buflen; - const char *s; - Py_ssize_t size = UNINITIALIZED_SIZE; - - if (!PyArg_ParseTuple(args, "On", &unicode, &buflen)) - return NULL; - - NULLABLE(unicode); - s = PyUnicode_AsUTF8AndSize(unicode, &size); - if (s == NULL) { - assert(size == -1); - return NULL; - } - - return Py_BuildValue("(y#n)", s, buflen, size); -} - -/* Test PyUnicode_AsUTF8AndSize() with NULL as the size address */ -static PyObject * -unicode_asutf8andsize_null(PyObject *self, PyObject *args) -{ - PyObject *unicode; - Py_ssize_t buflen; - const char *s; - - if (!PyArg_ParseTuple(args, "On", &unicode, &buflen)) - return NULL; - - NULLABLE(unicode); - s = PyUnicode_AsUTF8AndSize(unicode, NULL); - if (s == NULL) - return NULL; - - return PyBytes_FromStringAndSize(s, buflen); -} - -/* Test PyUnicode_GetDefaultEncoding() */ -static PyObject * -unicode_getdefaultencoding(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - const char *s = PyUnicode_GetDefaultEncoding(); - if (s == NULL) - return NULL; - - return PyBytes_FromString(s); -} - -/* Test PyUnicode_Decode() */ -static PyObject * -unicode_decode(PyObject *self, PyObject *args) -{ - const char *s; - Py_ssize_t size; - const char *encoding; - const char *errors = NULL; - - if (!PyArg_ParseTuple(args, "y#z|z", &s, &size, &encoding, &errors)) - return NULL; - - return PyUnicode_Decode(s, size, encoding, errors); -} - -/* Test PyUnicode_AsEncodedString() */ -static PyObject * -unicode_asencodedstring(PyObject *self, PyObject *args) -{ - PyObject *unicode; - const char *encoding; - const char *errors = NULL; - - if (!PyArg_ParseTuple(args, "Oz|z", &unicode, &encoding, &errors)) - return NULL; - - NULLABLE(unicode); - return PyUnicode_AsEncodedString(unicode, encoding, errors); -} - -/* Test PyUnicode_BuildEncodingMap() */ -static PyObject * -unicode_buildencodingmap(PyObject *self, PyObject *arg) -{ - NULLABLE(arg); - return PyUnicode_BuildEncodingMap(arg); -} - -/* Test PyUnicode_DecodeUTF7() */ -static PyObject * -unicode_decodeutf7(PyObject *self, PyObject *args) -{ - const char *data; - Py_ssize_t size; - const char *errors = NULL; - - if (!PyArg_ParseTuple(args, "y#|z", &data, &size, &errors)) - return NULL; - - return PyUnicode_DecodeUTF7(data, size, errors); -} - -/* Test PyUnicode_DecodeUTF7Stateful() */ -static PyObject * -unicode_decodeutf7stateful(PyObject *self, PyObject *args) -{ - const char *data; - Py_ssize_t size; - const char *errors = NULL; - Py_ssize_t consumed = UNINITIALIZED_SIZE; - PyObject *result; - - if (!PyArg_ParseTuple(args, "y#|z", &data, &size, &errors)) - return NULL; - - result = PyUnicode_DecodeUTF7Stateful(data, size, errors, &consumed); - if (!result) { - assert(consumed == UNINITIALIZED_SIZE); - return NULL; - } - return Py_BuildValue("(Nn)", result, consumed); -} - -/* Test PyUnicode_DecodeUTF8() */ -static PyObject * -unicode_decodeutf8(PyObject *self, PyObject *args) -{ - const char *data; - Py_ssize_t size; - const char *errors = NULL; - - if (!PyArg_ParseTuple(args, "y#|z", &data, &size, &errors)) - return NULL; - - return PyUnicode_DecodeUTF8(data, size, errors); -} - -/* Test PyUnicode_DecodeUTF8Stateful() */ -static PyObject * -unicode_decodeutf8stateful(PyObject *self, PyObject *args) -{ - const char *data; - Py_ssize_t size; - const char *errors = NULL; - Py_ssize_t consumed = UNINITIALIZED_SIZE; - PyObject *result; - - if (!PyArg_ParseTuple(args, "y#|z", &data, &size, &errors)) - return NULL; - - result = PyUnicode_DecodeUTF8Stateful(data, size, errors, &consumed); - if (!result) { - assert(consumed == UNINITIALIZED_SIZE); - return NULL; - } - return Py_BuildValue("(Nn)", result, consumed); -} - -/* Test PyUnicode_AsUTF8String() */ -static PyObject * -unicode_asutf8string(PyObject *self, PyObject *arg) -{ - NULLABLE(arg); - return PyUnicode_AsUTF8String(arg); -} - -/* Test PyUnicode_DecodeUTF32() */ -static PyObject * -unicode_decodeutf32(PyObject *self, PyObject *args) -{ - const char *data; - Py_ssize_t size; - const char *errors = NULL; - int byteorder = UNINITIALIZED_INT; - PyObject *result; - - if (!PyArg_ParseTuple(args, "iy#|z", &byteorder, &data, &size, &errors)) - return NULL; - - result = PyUnicode_DecodeUTF32(data, size, errors, &byteorder); - if (!result) { - return NULL; - } - return Py_BuildValue("(iN)", byteorder, result); -} - -/* Test PyUnicode_DecodeUTF32Stateful() */ -static PyObject * -unicode_decodeutf32stateful(PyObject *self, PyObject *args) -{ - const char *data; - Py_ssize_t size; - const char *errors = NULL; - int byteorder = UNINITIALIZED_INT; - Py_ssize_t consumed = UNINITIALIZED_SIZE; - PyObject *result; - - if (!PyArg_ParseTuple(args, "iy#|z", &byteorder, &data, &size, &errors)) - return NULL; - - result = PyUnicode_DecodeUTF32Stateful(data, size, errors, &byteorder, &consumed); - if (!result) { - assert(consumed == UNINITIALIZED_SIZE); - return NULL; - } - return Py_BuildValue("(iNn)", byteorder, result, consumed); -} - -/* Test PyUnicode_AsUTF32String() */ -static PyObject * -unicode_asutf32string(PyObject *self, PyObject *arg) -{ - NULLABLE(arg); - return PyUnicode_AsUTF32String(arg); -} - -/* Test PyUnicode_DecodeUTF16() */ -static PyObject * -unicode_decodeutf16(PyObject *self, PyObject *args) -{ - const char *data; - Py_ssize_t size; - const char *errors = NULL; - int byteorder = UNINITIALIZED_INT; - PyObject *result; - - if (!PyArg_ParseTuple(args, "iy#|z", &byteorder, &data, &size, &errors)) - return NULL; - - result = PyUnicode_DecodeUTF16(data, size, errors, &byteorder); - if (!result) { - return NULL; - } - return Py_BuildValue("(iN)", byteorder, result); -} - -/* Test PyUnicode_DecodeUTF16Stateful() */ -static PyObject * -unicode_decodeutf16stateful(PyObject *self, PyObject *args) -{ - const char *data; - Py_ssize_t size; - const char *errors = NULL; - int byteorder = UNINITIALIZED_INT; - Py_ssize_t consumed = UNINITIALIZED_SIZE; - PyObject *result; - - if (!PyArg_ParseTuple(args, "iy#|z", &byteorder, &data, &size, &errors)) - return NULL; - - result = PyUnicode_DecodeUTF16Stateful(data, size, errors, &byteorder, &consumed); - if (!result) { - assert(consumed == UNINITIALIZED_SIZE); - return NULL; - } - return Py_BuildValue("(iNn)", byteorder, result, consumed); -} - -/* Test PyUnicode_AsUTF16String() */ -static PyObject * -unicode_asutf16string(PyObject *self, PyObject *arg) -{ - NULLABLE(arg); - return PyUnicode_AsUTF16String(arg); -} - -/* Test PyUnicode_DecodeUnicodeEscape() */ -static PyObject * -unicode_decodeunicodeescape(PyObject *self, PyObject *args) -{ - const char *data; - Py_ssize_t size; - const char *errors = NULL; - - if (!PyArg_ParseTuple(args, "y#|z", &data, &size, &errors)) - return NULL; - - return PyUnicode_DecodeUnicodeEscape(data, size, errors); -} - -/* Test PyUnicode_AsUnicodeEscapeString() */ -static PyObject * -unicode_asunicodeescapestring(PyObject *self, PyObject *arg) -{ - NULLABLE(arg); - return PyUnicode_AsUnicodeEscapeString(arg); -} - -static PyObject * -unicode_decoderawunicodeescape(PyObject *self, PyObject *args) -{ - const char *data; - Py_ssize_t size; - const char *errors = NULL; - - if (!PyArg_ParseTuple(args, "y#|z", &data, &size, &errors)) - return NULL; - - return PyUnicode_DecodeRawUnicodeEscape(data, size, errors); -} - -/* Test PyUnicode_AsRawUnicodeEscapeString() */ -static PyObject * -unicode_asrawunicodeescapestring(PyObject *self, PyObject *arg) -{ - NULLABLE(arg); - return PyUnicode_AsRawUnicodeEscapeString(arg); -} - -static PyObject * -unicode_decodelatin1(PyObject *self, PyObject *args) -{ - const char *data; - Py_ssize_t size; - const char *errors = NULL; - - if (!PyArg_ParseTuple(args, "y#|z", &data, &size, &errors)) - return NULL; - - return PyUnicode_DecodeLatin1(data, size, errors); -} - -/* Test PyUnicode_AsLatin1String() */ -static PyObject * -unicode_aslatin1string(PyObject *self, PyObject *arg) -{ - NULLABLE(arg); - return PyUnicode_AsLatin1String(arg); -} - -/* Test PyUnicode_DecodeASCII() */ -static PyObject * -unicode_decodeascii(PyObject *self, PyObject *args) -{ - const char *data; - Py_ssize_t size; - const char *errors = NULL; - - if (!PyArg_ParseTuple(args, "y#|z", &data, &size, &errors)) - return NULL; - - return PyUnicode_DecodeASCII(data, size, errors); -} - -/* Test PyUnicode_AsASCIIString() */ -static PyObject * -unicode_asasciistring(PyObject *self, PyObject *arg) -{ - NULLABLE(arg); - return PyUnicode_AsASCIIString(arg); -} - -/* Test PyUnicode_DecodeCharmap() */ -static PyObject * -unicode_decodecharmap(PyObject *self, PyObject *args) -{ - const char *data; - Py_ssize_t size; - PyObject *mapping; - const char *errors = NULL; - - if (!PyArg_ParseTuple(args, "y#O|z", &data, &size, &mapping, &errors)) - return NULL; - - NULLABLE(mapping); - return PyUnicode_DecodeCharmap(data, size, mapping, errors); -} - -/* Test PyUnicode_AsCharmapString() */ -static PyObject * -unicode_ascharmapstring(PyObject *self, PyObject *args) -{ - PyObject *unicode; - PyObject *mapping; - - if (!PyArg_ParseTuple(args, "OO", &unicode, &mapping)) - return NULL; - - NULLABLE(unicode); - NULLABLE(mapping); - return PyUnicode_AsCharmapString(unicode, mapping); -} - -#ifdef MS_WINDOWS - -/* Test PyUnicode_DecodeMBCS() */ -static PyObject * -unicode_decodembcs(PyObject *self, PyObject *args) -{ - const char *data; - Py_ssize_t size; - const char *errors = NULL; - - if (!PyArg_ParseTuple(args, "y#|z", &data, &size, &errors)) - return NULL; - - return PyUnicode_DecodeMBCS(data, size, errors); -} - -/* Test PyUnicode_DecodeMBCSStateful() */ -static PyObject * -unicode_decodembcsstateful(PyObject *self, PyObject *args) -{ - const char *data; - Py_ssize_t size; - const char *errors = NULL; - Py_ssize_t consumed = UNINITIALIZED_SIZE; - PyObject *result; - - if (!PyArg_ParseTuple(args, "y#|z", &data, &size, &errors)) - return NULL; - - result = PyUnicode_DecodeMBCSStateful(data, size, errors, &consumed); - if (!result) { - assert(consumed == UNINITIALIZED_SIZE); - return NULL; - } - return Py_BuildValue("(Nn)", result, consumed); -} - -/* Test PyUnicode_DecodeCodePageStateful() */ -static PyObject * -unicode_decodecodepagestateful(PyObject *self, PyObject *args) -{ - int code_page; - const char *data; - Py_ssize_t size; - const char *errors = NULL; - Py_ssize_t consumed = UNINITIALIZED_SIZE; - PyObject *result; - - if (!PyArg_ParseTuple(args, "iy#|z", &code_page, &data, &size, &errors)) - return NULL; - - result = PyUnicode_DecodeCodePageStateful(code_page, data, size, errors, &consumed); - if (!result) { - assert(consumed == UNINITIALIZED_SIZE); - return NULL; - } - return Py_BuildValue("(Nn)", result, consumed); -} - -/* Test PyUnicode_AsMBCSString() */ -static PyObject * -unicode_asmbcsstring(PyObject *self, PyObject *arg) -{ - NULLABLE(arg); - return PyUnicode_AsMBCSString(arg); -} - -/* Test PyUnicode_EncodeCodePage() */ -static PyObject * -unicode_encodecodepage(PyObject *self, PyObject *args) -{ - int code_page; - PyObject *unicode; - const char *errors; - - if (!PyArg_ParseTuple(args, "iO|z", &code_page, &unicode, &errors)) - return NULL; - - NULLABLE(unicode); - return PyUnicode_EncodeCodePage(code_page, unicode, errors); -} - -#endif /* MS_WINDOWS */ - -/* Test PyUnicode_DecodeLocaleAndSize() */ -static PyObject * -unicode_decodelocaleandsize(PyObject *self, PyObject *args) -{ - const char *data; - Py_ssize_t size; - const char *errors; - - if (!PyArg_ParseTuple(args, "y#|z", &data, &size, &errors)) - return NULL; - - return PyUnicode_DecodeLocaleAndSize(data, size, errors); -} - -/* Test PyUnicode_DecodeLocale() */ -static PyObject * -unicode_decodelocale(PyObject *self, PyObject *args) -{ - const char *data; - Py_ssize_t size; - const char *errors; - - if (!PyArg_ParseTuple(args, "y#|z", &data, &size, &errors)) - return NULL; - - return PyUnicode_DecodeLocale(data, errors); -} - -/* Test PyUnicode_EncodeLocale() */ -static PyObject * -unicode_encodelocale(PyObject *self, PyObject *args) -{ - PyObject *unicode; - const char *errors; - - if (!PyArg_ParseTuple(args, "O|z", &unicode, &errors)) - return NULL; - - NULLABLE(unicode); - return PyUnicode_EncodeLocale(unicode, errors); -} - -/* Test PyUnicode_DecodeFSDefault() */ -static PyObject * -unicode_decodefsdefault(PyObject *self, PyObject *args) -{ - const char *data; - Py_ssize_t size; - - if (!PyArg_ParseTuple(args, "y#", &data, &size)) - return NULL; - - return PyUnicode_DecodeFSDefault(data); -} - -/* Test PyUnicode_DecodeFSDefaultAndSize() */ -static PyObject * -unicode_decodefsdefaultandsize(PyObject *self, PyObject *args) -{ - const char *data; - Py_ssize_t size; - - if (!PyArg_ParseTuple(args, "y#|n", &data, &size, &size)) - return NULL; - - return PyUnicode_DecodeFSDefaultAndSize(data, size); -} - -/* Test PyUnicode_EncodeFSDefault() */ -static PyObject * -unicode_encodefsdefault(PyObject *self, PyObject *arg) -{ - NULLABLE(arg); - return PyUnicode_EncodeFSDefault(arg); -} - -/* Test PyUnicode_Concat() */ -static PyObject * -unicode_concat(PyObject *self, PyObject *args) -{ - PyObject *left; - PyObject *right; - - if (!PyArg_ParseTuple(args, "OO", &left, &right)) - return NULL; - - NULLABLE(left); - NULLABLE(right); - return PyUnicode_Concat(left, right); -} - -/* Test PyUnicode_Split() */ -static PyObject * -unicode_split(PyObject *self, PyObject *args) -{ - PyObject *s; - PyObject *sep; - Py_ssize_t maxsplit = -1; - - if (!PyArg_ParseTuple(args, "OO|n", &s, &sep, &maxsplit)) - return NULL; - - NULLABLE(s); - NULLABLE(sep); - return PyUnicode_Split(s, sep, maxsplit); -} - -/* Test PyUnicode_RSplit() */ -static PyObject * -unicode_rsplit(PyObject *self, PyObject *args) -{ - PyObject *s; - PyObject *sep; - Py_ssize_t maxsplit = -1; - - if (!PyArg_ParseTuple(args, "OO|n", &s, &sep, &maxsplit)) - return NULL; - - NULLABLE(s); - NULLABLE(sep); - return PyUnicode_RSplit(s, sep, maxsplit); -} - -/* Test PyUnicode_Splitlines() */ -static PyObject * -unicode_splitlines(PyObject *self, PyObject *args) -{ - PyObject *s; - int keepends = 0; - - if (!PyArg_ParseTuple(args, "O|i", &s, &keepends)) - return NULL; - - NULLABLE(s); - return PyUnicode_Splitlines(s, keepends); -} - -/* Test PyUnicode_Partition() */ -static PyObject * -unicode_partition(PyObject *self, PyObject *args) -{ - PyObject *s; - PyObject *sep; - - if (!PyArg_ParseTuple(args, "OO", &s, &sep)) - return NULL; - - NULLABLE(s); - NULLABLE(sep); - return PyUnicode_Partition(s, sep); -} - -/* Test PyUnicode_RPartition() */ -static PyObject * -unicode_rpartition(PyObject *self, PyObject *args) -{ - PyObject *s; - PyObject *sep; - - if (!PyArg_ParseTuple(args, "OO", &s, &sep)) - return NULL; - - NULLABLE(s); - NULLABLE(sep); - return PyUnicode_RPartition(s, sep); -} - -/* Test PyUnicode_Translate() */ -static PyObject * -unicode_translate(PyObject *self, PyObject *args) -{ - PyObject *obj; - PyObject *table; - const char *errors = NULL; - - if (!PyArg_ParseTuple(args, "OO|z", &obj, &table, &errors)) - return NULL; - - NULLABLE(obj); - NULLABLE(table); - return PyUnicode_Translate(obj, table, errors); -} - -/* Test PyUnicode_Join() */ -static PyObject * -unicode_join(PyObject *self, PyObject *args) -{ - PyObject *sep; - PyObject *seq; - - if (!PyArg_ParseTuple(args, "OO", &sep, &seq)) - return NULL; - - NULLABLE(sep); - NULLABLE(seq); - return PyUnicode_Join(sep, seq); -} - -/* Test PyUnicode_Count() */ -static PyObject * -unicode_count(PyObject *self, PyObject *args) -{ - PyObject *str; - PyObject *substr; - Py_ssize_t start; - Py_ssize_t end; - - if (!PyArg_ParseTuple(args, "OOnn", &str, &substr, &start, &end)) - return NULL; - - NULLABLE(str); - NULLABLE(substr); - RETURN_SIZE(PyUnicode_Count(str, substr, start, end)); -} - -/* Test PyUnicode_Find() */ -static PyObject * -unicode_find(PyObject *self, PyObject *args) -{ - PyObject *str; - PyObject *substr; - Py_ssize_t start; - Py_ssize_t end; - int direction; - Py_ssize_t result; - - if (!PyArg_ParseTuple(args, "OOnni", &str, &substr, &start, &end, &direction)) - return NULL; - - NULLABLE(str); - NULLABLE(substr); - result = PyUnicode_Find(str, substr, start, end, direction); - if (result == -2) { - assert(PyErr_Occurred()); - return NULL; - } - assert(!PyErr_Occurred()); - return PyLong_FromSsize_t(result); -} - -/* Test PyUnicode_Tailmatch() */ -static PyObject * -unicode_tailmatch(PyObject *self, PyObject *args) -{ - PyObject *str; - PyObject *substr; - Py_ssize_t start; - Py_ssize_t end; - int direction; - - if (!PyArg_ParseTuple(args, "OOnni", &str, &substr, &start, &end, &direction)) - return NULL; - - NULLABLE(str); - NULLABLE(substr); - RETURN_SIZE(PyUnicode_Tailmatch(str, substr, start, end, direction)); -} - -/* Test PyUnicode_FindChar() */ -static PyObject * -unicode_findchar(PyObject *self, PyObject *args) -{ - PyObject *str; - int direction; - unsigned int ch; - Py_ssize_t result; - Py_ssize_t start, end; - - if (!PyArg_ParseTuple(args, "OInni:unicode_findchar", &str, &ch, - &start, &end, &direction)) { - return NULL; - } - NULLABLE(str); - result = PyUnicode_FindChar(str, (Py_UCS4)ch, start, end, direction); - if (result == -2) { - assert(PyErr_Occurred()); - return NULL; - } - assert(!PyErr_Occurred()); - return PyLong_FromSsize_t(result); -} - -/* Test PyUnicode_Replace() */ -static PyObject * -unicode_replace(PyObject *self, PyObject *args) -{ - PyObject *str; - PyObject *substr; - PyObject *replstr; - Py_ssize_t maxcount = -1; - - if (!PyArg_ParseTuple(args, "OOO|n", &str, &substr, &replstr, &maxcount)) - return NULL; - - NULLABLE(str); - NULLABLE(substr); - NULLABLE(replstr); - return PyUnicode_Replace(str, substr, replstr, maxcount); -} - -/* Test PyUnicode_Compare() */ -static PyObject * -unicode_compare(PyObject *self, PyObject *args) -{ - PyObject *left; - PyObject *right; - int result; - - if (!PyArg_ParseTuple(args, "OO", &left, &right)) - return NULL; - - NULLABLE(left); - NULLABLE(right); - result = PyUnicode_Compare(left, right); - if (result == -1 && PyErr_Occurred()) { - return NULL; - } - assert(!PyErr_Occurred()); - return PyLong_FromLong(result); -} - -/* Test PyUnicode_CompareWithASCIIString() */ -static PyObject * -unicode_comparewithasciistring(PyObject *self, PyObject *args) -{ - PyObject *left; - const char *right = NULL; - Py_ssize_t right_len; - int result; - - if (!PyArg_ParseTuple(args, "O|y#", &left, &right, &right_len)) - return NULL; - - NULLABLE(left); - result = PyUnicode_CompareWithASCIIString(left, right); - if (result == -1 && PyErr_Occurred()) { - return NULL; - } - return PyLong_FromLong(result); -} - -/* Test PyUnicode_EqualToUTF8() */ -static PyObject * -unicode_equaltoutf8(PyObject *self, PyObject *args) -{ - PyObject *left; - const char *right = NULL; - Py_ssize_t right_len; - int result; - - if (!PyArg_ParseTuple(args, "Oz#", &left, &right, &right_len)) { - return NULL; - } - - NULLABLE(left); - result = PyUnicode_EqualToUTF8(left, right); - assert(!PyErr_Occurred()); - return PyLong_FromLong(result); -} - -/* Test PyUnicode_EqualToUTF8AndSize() */ -static PyObject * -unicode_equaltoutf8andsize(PyObject *self, PyObject *args) -{ - PyObject *left; - const char *right = NULL; - Py_ssize_t right_len; - Py_ssize_t size = -100; - int result; - - if (!PyArg_ParseTuple(args, "Oz#|n", &left, &right, &right_len, &size)) { - return NULL; - } - - NULLABLE(left); - if (size == -100) { - size = right_len; - } - result = PyUnicode_EqualToUTF8AndSize(left, right, size); - assert(!PyErr_Occurred()); - return PyLong_FromLong(result); -} - -/* Test PyUnicode_RichCompare() */ -static PyObject * -unicode_richcompare(PyObject *self, PyObject *args) -{ - PyObject *left; - PyObject *right; - int op; - - if (!PyArg_ParseTuple(args, "OOi", &left, &right, &op)) - return NULL; - - NULLABLE(left); - NULLABLE(right); - return PyUnicode_RichCompare(left, right, op); -} - -/* Test PyUnicode_Format() */ -static PyObject * -unicode_format(PyObject *self, PyObject *args) -{ - PyObject *format; - PyObject *fargs; - - if (!PyArg_ParseTuple(args, "OO", &format, &fargs)) - return NULL; - - NULLABLE(format); - NULLABLE(fargs); - return PyUnicode_Format(format, fargs); -} - -/* Test PyUnicode_Contains() */ -static PyObject * -unicode_contains(PyObject *self, PyObject *args) -{ - PyObject *container; - PyObject *element; - - if (!PyArg_ParseTuple(args, "OO", &container, &element)) - return NULL; - - NULLABLE(container); - NULLABLE(element); - RETURN_INT(PyUnicode_Contains(container, element)); -} - -/* Test PyUnicode_IsIdentifier() */ -static PyObject * -unicode_isidentifier(PyObject *self, PyObject *arg) -{ - NULLABLE(arg); - RETURN_INT(PyUnicode_IsIdentifier(arg)); -} /* Test PyUnicode_CopyCharacters() */ static PyObject * @@ -1555,541 +220,14 @@ unicode_copycharacters(PyObject *self, PyObject *args) return Py_BuildValue("(Nn)", to_copy, copied); } -static int -check_raised_systemerror(PyObject *result, char* msg) -{ - if (result) { - // no exception - PyErr_Format(PyExc_AssertionError, - "SystemError not raised: %s", - msg); - return 0; - } - if (PyErr_ExceptionMatches(PyExc_SystemError)) { - // expected exception - PyErr_Clear(); - return 1; - } - // unexpected exception - return 0; -} - -static PyObject * -test_string_from_format(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - PyObject *result; - PyObject *unicode = PyUnicode_FromString("None"); - -#define CHECK_FORMAT_2(FORMAT, EXPECTED, ARG1, ARG2) \ - result = PyUnicode_FromFormat(FORMAT, ARG1, ARG2); \ - if (EXPECTED == NULL) { \ - if (!check_raised_systemerror(result, FORMAT)) { \ - goto Fail; \ - } \ - } \ - else if (result == NULL) \ - return NULL; \ - else if (PyUnicode_CompareWithASCIIString(result, EXPECTED) != 0) { \ - PyErr_Format(PyExc_AssertionError, \ - "test_string_from_format: failed at \"%s\" " \ - "expected \"%s\" got \"%s\"", \ - FORMAT, EXPECTED, PyUnicode_AsUTF8(result)); \ - goto Fail; \ - } \ - Py_XDECREF(result) - -#define CHECK_FORMAT_1(FORMAT, EXPECTED, ARG) \ - CHECK_FORMAT_2(FORMAT, EXPECTED, ARG, 0) - -#define CHECK_FORMAT_0(FORMAT, EXPECTED) \ - CHECK_FORMAT_2(FORMAT, EXPECTED, 0, 0) - - // Unrecognized - CHECK_FORMAT_2("%u %? %u", NULL, 1, 2); - - // "%%" (options are rejected) - CHECK_FORMAT_0( "%%", "%"); - CHECK_FORMAT_0( "%0%", NULL); - CHECK_FORMAT_0("%00%", NULL); - CHECK_FORMAT_0( "%2%", NULL); - CHECK_FORMAT_0("%02%", NULL); - CHECK_FORMAT_0("%.0%", NULL); - CHECK_FORMAT_0("%.2%", NULL); - - // "%c" - CHECK_FORMAT_1( "%c", "c", 'c'); - CHECK_FORMAT_1( "%0c", "c", 'c'); - CHECK_FORMAT_1("%00c", "c", 'c'); - CHECK_FORMAT_1( "%2c", NULL, 'c'); - CHECK_FORMAT_1("%02c", NULL, 'c'); - CHECK_FORMAT_1("%.0c", NULL, 'c'); - CHECK_FORMAT_1("%.2c", NULL, 'c'); - - // Integers - CHECK_FORMAT_1("%d", "123", (int)123); - CHECK_FORMAT_1("%i", "123", (int)123); - CHECK_FORMAT_1("%u", "123", (unsigned int)123); - CHECK_FORMAT_1("%x", "7b", (unsigned int)123); - CHECK_FORMAT_1("%X", "7B", (unsigned int)123); - CHECK_FORMAT_1("%o", "173", (unsigned int)123); - CHECK_FORMAT_1("%ld", "123", (long)123); - CHECK_FORMAT_1("%li", "123", (long)123); - CHECK_FORMAT_1("%lu", "123", (unsigned long)123); - CHECK_FORMAT_1("%lx", "7b", (unsigned long)123); - CHECK_FORMAT_1("%lX", "7B", (unsigned long)123); - CHECK_FORMAT_1("%lo", "173", (unsigned long)123); - CHECK_FORMAT_1("%lld", "123", (long long)123); - CHECK_FORMAT_1("%lli", "123", (long long)123); - CHECK_FORMAT_1("%llu", "123", (unsigned long long)123); - CHECK_FORMAT_1("%llx", "7b", (unsigned long long)123); - CHECK_FORMAT_1("%llX", "7B", (unsigned long long)123); - CHECK_FORMAT_1("%llo", "173", (unsigned long long)123); - CHECK_FORMAT_1("%zd", "123", (Py_ssize_t)123); - CHECK_FORMAT_1("%zi", "123", (Py_ssize_t)123); - CHECK_FORMAT_1("%zu", "123", (size_t)123); - CHECK_FORMAT_1("%zx", "7b", (size_t)123); - CHECK_FORMAT_1("%zX", "7B", (size_t)123); - CHECK_FORMAT_1("%zo", "173", (size_t)123); - CHECK_FORMAT_1("%td", "123", (ptrdiff_t)123); - CHECK_FORMAT_1("%ti", "123", (ptrdiff_t)123); - CHECK_FORMAT_1("%tu", "123", (ptrdiff_t)123); - CHECK_FORMAT_1("%tx", "7b", (ptrdiff_t)123); - CHECK_FORMAT_1("%tX", "7B", (ptrdiff_t)123); - CHECK_FORMAT_1("%to", "173", (ptrdiff_t)123); - CHECK_FORMAT_1("%jd", "123", (intmax_t)123); - CHECK_FORMAT_1("%ji", "123", (intmax_t)123); - CHECK_FORMAT_1("%ju", "123", (uintmax_t)123); - CHECK_FORMAT_1("%jx", "7b", (uintmax_t)123); - CHECK_FORMAT_1("%jX", "7B", (uintmax_t)123); - CHECK_FORMAT_1("%jo", "173", (uintmax_t)123); - - CHECK_FORMAT_1("%d", "-123", (int)-123); - CHECK_FORMAT_1("%i", "-123", (int)-123); - CHECK_FORMAT_1("%ld", "-123", (long)-123); - CHECK_FORMAT_1("%li", "-123", (long)-123); - CHECK_FORMAT_1("%lld", "-123", (long long)-123); - CHECK_FORMAT_1("%lli", "-123", (long long)-123); - CHECK_FORMAT_1("%zd", "-123", (Py_ssize_t)-123); - CHECK_FORMAT_1("%zi", "-123", (Py_ssize_t)-123); - CHECK_FORMAT_1("%td", "-123", (ptrdiff_t)-123); - CHECK_FORMAT_1("%ti", "-123", (ptrdiff_t)-123); - CHECK_FORMAT_1("%jd", "-123", (intmax_t)-123); - CHECK_FORMAT_1("%ji", "-123", (intmax_t)-123); - - // Integers: width < length - CHECK_FORMAT_1("%1d", "123", (int)123); - CHECK_FORMAT_1("%1i", "123", (int)123); - CHECK_FORMAT_1("%1u", "123", (unsigned int)123); - CHECK_FORMAT_1("%1ld", "123", (long)123); - CHECK_FORMAT_1("%1li", "123", (long)123); - CHECK_FORMAT_1("%1lu", "123", (unsigned long)123); - CHECK_FORMAT_1("%1lld", "123", (long long)123); - CHECK_FORMAT_1("%1lli", "123", (long long)123); - CHECK_FORMAT_1("%1llu", "123", (unsigned long long)123); - CHECK_FORMAT_1("%1zd", "123", (Py_ssize_t)123); - CHECK_FORMAT_1("%1zi", "123", (Py_ssize_t)123); - CHECK_FORMAT_1("%1zu", "123", (size_t)123); - CHECK_FORMAT_1("%1x", "7b", (int)123); - - CHECK_FORMAT_1("%1d", "-123", (int)-123); - CHECK_FORMAT_1("%1i", "-123", (int)-123); - CHECK_FORMAT_1("%1ld", "-123", (long)-123); - CHECK_FORMAT_1("%1li", "-123", (long)-123); - CHECK_FORMAT_1("%1lld", "-123", (long long)-123); - CHECK_FORMAT_1("%1lli", "-123", (long long)-123); - CHECK_FORMAT_1("%1zd", "-123", (Py_ssize_t)-123); - CHECK_FORMAT_1("%1zi", "-123", (Py_ssize_t)-123); - - // Integers: width > length - CHECK_FORMAT_1("%5d", " 123", (int)123); - CHECK_FORMAT_1("%5i", " 123", (int)123); - CHECK_FORMAT_1("%5u", " 123", (unsigned int)123); - CHECK_FORMAT_1("%5ld", " 123", (long)123); - CHECK_FORMAT_1("%5li", " 123", (long)123); - CHECK_FORMAT_1("%5lu", " 123", (unsigned long)123); - CHECK_FORMAT_1("%5lld", " 123", (long long)123); - CHECK_FORMAT_1("%5lli", " 123", (long long)123); - CHECK_FORMAT_1("%5llu", " 123", (unsigned long long)123); - CHECK_FORMAT_1("%5zd", " 123", (Py_ssize_t)123); - CHECK_FORMAT_1("%5zi", " 123", (Py_ssize_t)123); - CHECK_FORMAT_1("%5zu", " 123", (size_t)123); - CHECK_FORMAT_1("%5x", " 7b", (int)123); - - CHECK_FORMAT_1("%5d", " -123", (int)-123); - CHECK_FORMAT_1("%5i", " -123", (int)-123); - CHECK_FORMAT_1("%5ld", " -123", (long)-123); - CHECK_FORMAT_1("%5li", " -123", (long)-123); - CHECK_FORMAT_1("%5lld", " -123", (long long)-123); - CHECK_FORMAT_1("%5lli", " -123", (long long)-123); - CHECK_FORMAT_1("%5zd", " -123", (Py_ssize_t)-123); - CHECK_FORMAT_1("%5zi", " -123", (Py_ssize_t)-123); - - // Integers: width > length, 0-flag - CHECK_FORMAT_1("%05d", "00123", (int)123); - CHECK_FORMAT_1("%05i", "00123", (int)123); - CHECK_FORMAT_1("%05u", "00123", (unsigned int)123); - CHECK_FORMAT_1("%05ld", "00123", (long)123); - CHECK_FORMAT_1("%05li", "00123", (long)123); - CHECK_FORMAT_1("%05lu", "00123", (unsigned long)123); - CHECK_FORMAT_1("%05lld", "00123", (long long)123); - CHECK_FORMAT_1("%05lli", "00123", (long long)123); - CHECK_FORMAT_1("%05llu", "00123", (unsigned long long)123); - CHECK_FORMAT_1("%05zd", "00123", (Py_ssize_t)123); - CHECK_FORMAT_1("%05zi", "00123", (Py_ssize_t)123); - CHECK_FORMAT_1("%05zu", "00123", (size_t)123); - CHECK_FORMAT_1("%05x", "0007b", (int)123); - - CHECK_FORMAT_1("%05d", "-0123", (int)-123); - CHECK_FORMAT_1("%05i", "-0123", (int)-123); - CHECK_FORMAT_1("%05ld", "-0123", (long)-123); - CHECK_FORMAT_1("%05li", "-0123", (long)-123); - CHECK_FORMAT_1("%05lld", "-0123", (long long)-123); - CHECK_FORMAT_1("%05lli", "-0123", (long long)-123); - CHECK_FORMAT_1("%05zd", "-0123", (Py_ssize_t)-123); - CHECK_FORMAT_1("%05zi", "-0123", (Py_ssize_t)-123); - - // Integers: precision < length - CHECK_FORMAT_1("%.1d", "123", (int)123); - CHECK_FORMAT_1("%.1i", "123", (int)123); - CHECK_FORMAT_1("%.1u", "123", (unsigned int)123); - CHECK_FORMAT_1("%.1ld", "123", (long)123); - CHECK_FORMAT_1("%.1li", "123", (long)123); - CHECK_FORMAT_1("%.1lu", "123", (unsigned long)123); - CHECK_FORMAT_1("%.1lld", "123", (long long)123); - CHECK_FORMAT_1("%.1lli", "123", (long long)123); - CHECK_FORMAT_1("%.1llu", "123", (unsigned long long)123); - CHECK_FORMAT_1("%.1zd", "123", (Py_ssize_t)123); - CHECK_FORMAT_1("%.1zi", "123", (Py_ssize_t)123); - CHECK_FORMAT_1("%.1zu", "123", (size_t)123); - CHECK_FORMAT_1("%.1x", "7b", (int)123); - - CHECK_FORMAT_1("%.1d", "-123", (int)-123); - CHECK_FORMAT_1("%.1i", "-123", (int)-123); - CHECK_FORMAT_1("%.1ld", "-123", (long)-123); - CHECK_FORMAT_1("%.1li", "-123", (long)-123); - CHECK_FORMAT_1("%.1lld", "-123", (long long)-123); - CHECK_FORMAT_1("%.1lli", "-123", (long long)-123); - CHECK_FORMAT_1("%.1zd", "-123", (Py_ssize_t)-123); - CHECK_FORMAT_1("%.1zi", "-123", (Py_ssize_t)-123); - - // Integers: precision > length - CHECK_FORMAT_1("%.5d", "00123", (int)123); - CHECK_FORMAT_1("%.5i", "00123", (int)123); - CHECK_FORMAT_1("%.5u", "00123", (unsigned int)123); - CHECK_FORMAT_1("%.5ld", "00123", (long)123); - CHECK_FORMAT_1("%.5li", "00123", (long)123); - CHECK_FORMAT_1("%.5lu", "00123", (unsigned long)123); - CHECK_FORMAT_1("%.5lld", "00123", (long long)123); - CHECK_FORMAT_1("%.5lli", "00123", (long long)123); - CHECK_FORMAT_1("%.5llu", "00123", (unsigned long long)123); - CHECK_FORMAT_1("%.5zd", "00123", (Py_ssize_t)123); - CHECK_FORMAT_1("%.5zi", "00123", (Py_ssize_t)123); - CHECK_FORMAT_1("%.5zu", "00123", (size_t)123); - CHECK_FORMAT_1("%.5x", "0007b", (int)123); - - CHECK_FORMAT_1("%.5d", "-00123", (int)-123); - CHECK_FORMAT_1("%.5i", "-00123", (int)-123); - CHECK_FORMAT_1("%.5ld", "-00123", (long)-123); - CHECK_FORMAT_1("%.5li", "-00123", (long)-123); - CHECK_FORMAT_1("%.5lld", "-00123", (long long)-123); - CHECK_FORMAT_1("%.5lli", "-00123", (long long)-123); - CHECK_FORMAT_1("%.5zd", "-00123", (Py_ssize_t)-123); - CHECK_FORMAT_1("%.5zi", "-00123", (Py_ssize_t)-123); - - // Integers: width > precision > length - CHECK_FORMAT_1("%7.5d", " 00123", (int)123); - CHECK_FORMAT_1("%7.5i", " 00123", (int)123); - CHECK_FORMAT_1("%7.5u", " 00123", (unsigned int)123); - CHECK_FORMAT_1("%7.5ld", " 00123", (long)123); - CHECK_FORMAT_1("%7.5li", " 00123", (long)123); - CHECK_FORMAT_1("%7.5lu", " 00123", (unsigned long)123); - CHECK_FORMAT_1("%7.5lld", " 00123", (long long)123); - CHECK_FORMAT_1("%7.5lli", " 00123", (long long)123); - CHECK_FORMAT_1("%7.5llu", " 00123", (unsigned long long)123); - CHECK_FORMAT_1("%7.5zd", " 00123", (Py_ssize_t)123); - CHECK_FORMAT_1("%7.5zi", " 00123", (Py_ssize_t)123); - CHECK_FORMAT_1("%7.5zu", " 00123", (size_t)123); - CHECK_FORMAT_1("%7.5x", " 0007b", (int)123); - - CHECK_FORMAT_1("%7.5d", " -00123", (int)-123); - CHECK_FORMAT_1("%7.5i", " -00123", (int)-123); - CHECK_FORMAT_1("%7.5ld", " -00123", (long)-123); - CHECK_FORMAT_1("%7.5li", " -00123", (long)-123); - CHECK_FORMAT_1("%7.5lld", " -00123", (long long)-123); - CHECK_FORMAT_1("%7.5lli", " -00123", (long long)-123); - CHECK_FORMAT_1("%7.5zd", " -00123", (Py_ssize_t)-123); - CHECK_FORMAT_1("%7.5zi", " -00123", (Py_ssize_t)-123); - - // Integers: width > precision > length, 0-flag - CHECK_FORMAT_1("%07.5d", "0000123", (int)123); - CHECK_FORMAT_1("%07.5i", "0000123", (int)123); - CHECK_FORMAT_1("%07.5u", "0000123", (unsigned int)123); - CHECK_FORMAT_1("%07.5ld", "0000123", (long)123); - CHECK_FORMAT_1("%07.5li", "0000123", (long)123); - CHECK_FORMAT_1("%07.5lu", "0000123", (unsigned long)123); - CHECK_FORMAT_1("%07.5lld", "0000123", (long long)123); - CHECK_FORMAT_1("%07.5lli", "0000123", (long long)123); - CHECK_FORMAT_1("%07.5llu", "0000123", (unsigned long long)123); - CHECK_FORMAT_1("%07.5zd", "0000123", (Py_ssize_t)123); - CHECK_FORMAT_1("%07.5zi", "0000123", (Py_ssize_t)123); - CHECK_FORMAT_1("%07.5zu", "0000123", (size_t)123); - CHECK_FORMAT_1("%07.5x", "000007b", (int)123); - - CHECK_FORMAT_1("%07.5d", "-000123", (int)-123); - CHECK_FORMAT_1("%07.5i", "-000123", (int)-123); - CHECK_FORMAT_1("%07.5ld", "-000123", (long)-123); - CHECK_FORMAT_1("%07.5li", "-000123", (long)-123); - CHECK_FORMAT_1("%07.5lld", "-000123", (long long)-123); - CHECK_FORMAT_1("%07.5lli", "-000123", (long long)-123); - CHECK_FORMAT_1("%07.5zd", "-000123", (Py_ssize_t)-123); - CHECK_FORMAT_1("%07.5zi", "-000123", (Py_ssize_t)-123); - - // Integers: precision > width > length - CHECK_FORMAT_1("%5.7d", "0000123", (int)123); - CHECK_FORMAT_1("%5.7i", "0000123", (int)123); - CHECK_FORMAT_1("%5.7u", "0000123", (unsigned int)123); - CHECK_FORMAT_1("%5.7ld", "0000123", (long)123); - CHECK_FORMAT_1("%5.7li", "0000123", (long)123); - CHECK_FORMAT_1("%5.7lu", "0000123", (unsigned long)123); - CHECK_FORMAT_1("%5.7lld", "0000123", (long long)123); - CHECK_FORMAT_1("%5.7lli", "0000123", (long long)123); - CHECK_FORMAT_1("%5.7llu", "0000123", (unsigned long long)123); - CHECK_FORMAT_1("%5.7zd", "0000123", (Py_ssize_t)123); - CHECK_FORMAT_1("%5.7zi", "0000123", (Py_ssize_t)123); - CHECK_FORMAT_1("%5.7zu", "0000123", (size_t)123); - CHECK_FORMAT_1("%5.7x", "000007b", (int)123); - - CHECK_FORMAT_1("%5.7d", "-0000123", (int)-123); - CHECK_FORMAT_1("%5.7i", "-0000123", (int)-123); - CHECK_FORMAT_1("%5.7ld", "-0000123", (long)-123); - CHECK_FORMAT_1("%5.7li", "-0000123", (long)-123); - CHECK_FORMAT_1("%5.7lld", "-0000123", (long long)-123); - CHECK_FORMAT_1("%5.7lli", "-0000123", (long long)-123); - CHECK_FORMAT_1("%5.7zd", "-0000123", (Py_ssize_t)-123); - CHECK_FORMAT_1("%5.7zi", "-0000123", (Py_ssize_t)-123); - - // Integers: precision > width > length, 0-flag - CHECK_FORMAT_1("%05.7d", "0000123", (int)123); - CHECK_FORMAT_1("%05.7i", "0000123", (int)123); - CHECK_FORMAT_1("%05.7u", "0000123", (unsigned int)123); - CHECK_FORMAT_1("%05.7ld", "0000123", (long)123); - CHECK_FORMAT_1("%05.7li", "0000123", (long)123); - CHECK_FORMAT_1("%05.7lu", "0000123", (unsigned long)123); - CHECK_FORMAT_1("%05.7lld", "0000123", (long long)123); - CHECK_FORMAT_1("%05.7lli", "0000123", (long long)123); - CHECK_FORMAT_1("%05.7llu", "0000123", (unsigned long long)123); - CHECK_FORMAT_1("%05.7zd", "0000123", (Py_ssize_t)123); - CHECK_FORMAT_1("%05.7zi", "0000123", (Py_ssize_t)123); - CHECK_FORMAT_1("%05.7zu", "0000123", (size_t)123); - CHECK_FORMAT_1("%05.7x", "000007b", (int)123); - - CHECK_FORMAT_1("%05.7d", "-0000123", (int)-123); - CHECK_FORMAT_1("%05.7i", "-0000123", (int)-123); - CHECK_FORMAT_1("%05.7ld", "-0000123", (long)-123); - CHECK_FORMAT_1("%05.7li", "-0000123", (long)-123); - CHECK_FORMAT_1("%05.7lld", "-0000123", (long long)-123); - CHECK_FORMAT_1("%05.7lli", "-0000123", (long long)-123); - CHECK_FORMAT_1("%05.7zd", "-0000123", (Py_ssize_t)-123); - CHECK_FORMAT_1("%05.7zi", "-0000123", (Py_ssize_t)-123); - - // Integers: precision = 0, arg = 0 (empty string in C) - CHECK_FORMAT_1("%.0d", "0", (int)0); - CHECK_FORMAT_1("%.0i", "0", (int)0); - CHECK_FORMAT_1("%.0u", "0", (unsigned int)0); - CHECK_FORMAT_1("%.0ld", "0", (long)0); - CHECK_FORMAT_1("%.0li", "0", (long)0); - CHECK_FORMAT_1("%.0lu", "0", (unsigned long)0); - CHECK_FORMAT_1("%.0lld", "0", (long long)0); - CHECK_FORMAT_1("%.0lli", "0", (long long)0); - CHECK_FORMAT_1("%.0llu", "0", (unsigned long long)0); - CHECK_FORMAT_1("%.0zd", "0", (Py_ssize_t)0); - CHECK_FORMAT_1("%.0zi", "0", (Py_ssize_t)0); - CHECK_FORMAT_1("%.0zu", "0", (size_t)0); - CHECK_FORMAT_1("%.0x", "0", (int)0); - - // Strings - CHECK_FORMAT_1("%s", "None", "None"); - CHECK_FORMAT_1("%ls", "None", L"None"); - CHECK_FORMAT_1("%U", "None", unicode); - CHECK_FORMAT_1("%A", "None", Py_None); - CHECK_FORMAT_1("%S", "None", Py_None); - CHECK_FORMAT_1("%R", "None", Py_None); - CHECK_FORMAT_2("%V", "None", unicode, "ignored"); - CHECK_FORMAT_2("%V", "None", NULL, "None"); - CHECK_FORMAT_2("%lV", "None", NULL, L"None"); - - // Strings: width < length - CHECK_FORMAT_1("%1s", "None", "None"); - CHECK_FORMAT_1("%1ls", "None", L"None"); - CHECK_FORMAT_1("%1U", "None", unicode); - CHECK_FORMAT_1("%1A", "None", Py_None); - CHECK_FORMAT_1("%1S", "None", Py_None); - CHECK_FORMAT_1("%1R", "None", Py_None); - CHECK_FORMAT_2("%1V", "None", unicode, "ignored"); - CHECK_FORMAT_2("%1V", "None", NULL, "None"); - CHECK_FORMAT_2("%1lV", "None", NULL, L"None"); - - // Strings: width > length - CHECK_FORMAT_1("%5s", " None", "None"); - CHECK_FORMAT_1("%5ls", " None", L"None"); - CHECK_FORMAT_1("%5U", " None", unicode); - CHECK_FORMAT_1("%5A", " None", Py_None); - CHECK_FORMAT_1("%5S", " None", Py_None); - CHECK_FORMAT_1("%5R", " None", Py_None); - CHECK_FORMAT_2("%5V", " None", unicode, "ignored"); - CHECK_FORMAT_2("%5V", " None", NULL, "None"); - CHECK_FORMAT_2("%5lV", " None", NULL, L"None"); - - // Strings: precision < length - CHECK_FORMAT_1("%.1s", "N", "None"); - CHECK_FORMAT_1("%.1ls", "N", L"None"); - CHECK_FORMAT_1("%.1U", "N", unicode); - CHECK_FORMAT_1("%.1A", "N", Py_None); - CHECK_FORMAT_1("%.1S", "N", Py_None); - CHECK_FORMAT_1("%.1R", "N", Py_None); - CHECK_FORMAT_2("%.1V", "N", unicode, "ignored"); - CHECK_FORMAT_2("%.1V", "N", NULL, "None"); - CHECK_FORMAT_2("%.1lV", "N", NULL, L"None"); - - // Strings: precision > length - CHECK_FORMAT_1("%.5s", "None", "None"); - CHECK_FORMAT_1("%.5ls", "None", L"None"); - CHECK_FORMAT_1("%.5U", "None", unicode); - CHECK_FORMAT_1("%.5A", "None", Py_None); - CHECK_FORMAT_1("%.5S", "None", Py_None); - CHECK_FORMAT_1("%.5R", "None", Py_None); - CHECK_FORMAT_2("%.5V", "None", unicode, "ignored"); - CHECK_FORMAT_2("%.5V", "None", NULL, "None"); - CHECK_FORMAT_2("%.5lV", "None", NULL, L"None"); - - // Strings: precision < length, width > length - CHECK_FORMAT_1("%5.1s", " N", "None"); - CHECK_FORMAT_1("%5.1ls"," N", L"None"); - CHECK_FORMAT_1("%5.1U", " N", unicode); - CHECK_FORMAT_1("%5.1A", " N", Py_None); - CHECK_FORMAT_1("%5.1S", " N", Py_None); - CHECK_FORMAT_1("%5.1R", " N", Py_None); - CHECK_FORMAT_2("%5.1V", " N", unicode, "ignored"); - CHECK_FORMAT_2("%5.1V", " N", NULL, "None"); - CHECK_FORMAT_2("%5.1lV"," N", NULL, L"None"); - - // Strings: width < length, precision > length - CHECK_FORMAT_1("%1.5s", "None", "None"); - CHECK_FORMAT_1("%1.5ls", "None", L"None"); - CHECK_FORMAT_1("%1.5U", "None", unicode); - CHECK_FORMAT_1("%1.5A", "None", Py_None); - CHECK_FORMAT_1("%1.5S", "None", Py_None); - CHECK_FORMAT_1("%1.5R", "None", Py_None); - CHECK_FORMAT_2("%1.5V", "None", unicode, "ignored"); - CHECK_FORMAT_2("%1.5V", "None", NULL, "None"); - CHECK_FORMAT_2("%1.5lV", "None", NULL, L"None"); - - Py_XDECREF(unicode); - Py_RETURN_NONE; - - Fail: - Py_XDECREF(result); - Py_XDECREF(unicode); - return NULL; - -#undef CHECK_FORMAT_2 -#undef CHECK_FORMAT_1 -#undef CHECK_FORMAT_0 -} static PyMethodDef TestMethods[] = { - {"codec_incrementalencoder", codec_incrementalencoder, METH_VARARGS}, - {"codec_incrementaldecoder", codec_incrementaldecoder, METH_VARARGS}, - {"test_unicode_compare_with_ascii", - test_unicode_compare_with_ascii, METH_NOARGS}, - {"test_string_from_format", test_string_from_format, METH_NOARGS}, - {"test_widechar", test_widechar, METH_NOARGS}, {"unicode_new", unicode_new, METH_VARARGS}, {"unicode_fill", unicode_fill, METH_VARARGS}, - {"unicode_writechar", unicode_writechar, METH_VARARGS}, - {"unicode_resize", unicode_resize, METH_VARARGS}, - {"unicode_append", unicode_append, METH_VARARGS}, - {"unicode_appendanddel", unicode_appendanddel, METH_VARARGS}, - {"unicode_fromstringandsize",unicode_fromstringandsize, METH_VARARGS}, - {"unicode_fromstring", unicode_fromstring, METH_O}, {"unicode_fromkindanddata", unicode_fromkindanddata, METH_VARARGS}, - {"unicode_substring", unicode_substring, METH_VARARGS}, - {"unicode_getlength", unicode_getlength, METH_O}, - {"unicode_readchar", unicode_readchar, METH_VARARGS}, - {"unicode_fromencodedobject",unicode_fromencodedobject, METH_VARARGS}, - {"unicode_fromobject", unicode_fromobject, METH_O}, - {"unicode_interninplace", unicode_interninplace, METH_O}, - {"unicode_internfromstring", unicode_internfromstring, METH_O}, - {"unicode_fromwidechar", unicode_fromwidechar, METH_VARARGS}, - {"unicode_aswidechar", unicode_aswidechar, METH_VARARGS}, - {"unicode_aswidechar_null", unicode_aswidechar_null, METH_VARARGS}, - {"unicode_aswidecharstring", unicode_aswidecharstring, METH_VARARGS}, - {"unicode_aswidecharstring_null",unicode_aswidecharstring_null,METH_VARARGS}, {"unicode_asucs4", unicode_asucs4, METH_VARARGS}, {"unicode_asucs4copy", unicode_asucs4copy, METH_VARARGS}, - {"unicode_fromordinal", unicode_fromordinal, METH_VARARGS}, {"unicode_asutf8", unicode_asutf8, METH_VARARGS}, - {"unicode_asutf8andsize", unicode_asutf8andsize, METH_VARARGS}, - {"unicode_asutf8andsize_null",unicode_asutf8andsize_null, METH_VARARGS}, - {"unicode_getdefaultencoding",unicode_getdefaultencoding, METH_NOARGS}, - {"unicode_decode", unicode_decode, METH_VARARGS}, - {"unicode_asencodedstring", unicode_asencodedstring, METH_VARARGS}, - {"unicode_buildencodingmap", unicode_buildencodingmap, METH_O}, - {"unicode_decodeutf7", unicode_decodeutf7, METH_VARARGS}, - {"unicode_decodeutf7stateful",unicode_decodeutf7stateful, METH_VARARGS}, - {"unicode_decodeutf8", unicode_decodeutf8, METH_VARARGS}, - {"unicode_decodeutf8stateful",unicode_decodeutf8stateful, METH_VARARGS}, - {"unicode_asutf8string", unicode_asutf8string, METH_O}, - {"unicode_decodeutf16", unicode_decodeutf16, METH_VARARGS}, - {"unicode_decodeutf16stateful",unicode_decodeutf16stateful, METH_VARARGS}, - {"unicode_asutf16string", unicode_asutf16string, METH_O}, - {"unicode_decodeutf32", unicode_decodeutf32, METH_VARARGS}, - {"unicode_decodeutf32stateful",unicode_decodeutf32stateful, METH_VARARGS}, - {"unicode_asutf32string", unicode_asutf32string, METH_O}, - {"unicode_decodeunicodeescape",unicode_decodeunicodeescape, METH_VARARGS}, - {"unicode_asunicodeescapestring",unicode_asunicodeescapestring,METH_O}, - {"unicode_decoderawunicodeescape",unicode_decoderawunicodeescape,METH_VARARGS}, - {"unicode_asrawunicodeescapestring",unicode_asrawunicodeescapestring,METH_O}, - {"unicode_decodelatin1", unicode_decodelatin1, METH_VARARGS}, - {"unicode_aslatin1string", unicode_aslatin1string, METH_O}, - {"unicode_decodeascii", unicode_decodeascii, METH_VARARGS}, - {"unicode_asasciistring", unicode_asasciistring, METH_O}, - {"unicode_decodecharmap", unicode_decodecharmap, METH_VARARGS}, - {"unicode_ascharmapstring", unicode_ascharmapstring, METH_VARARGS}, -#ifdef MS_WINDOWS - {"unicode_decodembcs", unicode_decodembcs, METH_VARARGS}, - {"unicode_decodembcsstateful",unicode_decodembcsstateful, METH_VARARGS}, - {"unicode_decodecodepagestateful",unicode_decodecodepagestateful,METH_VARARGS}, - {"unicode_asmbcsstring", unicode_asmbcsstring, METH_O}, - {"unicode_encodecodepage", unicode_encodecodepage, METH_VARARGS}, -#endif /* MS_WINDOWS */ - {"unicode_decodelocaleandsize",unicode_decodelocaleandsize, METH_VARARGS}, - {"unicode_decodelocale", unicode_decodelocale, METH_VARARGS}, - {"unicode_encodelocale", unicode_encodelocale, METH_VARARGS}, - {"unicode_decodefsdefault", unicode_decodefsdefault, METH_VARARGS}, - {"unicode_decodefsdefaultandsize",unicode_decodefsdefaultandsize,METH_VARARGS}, - {"unicode_encodefsdefault", unicode_encodefsdefault, METH_O}, - {"unicode_concat", unicode_concat, METH_VARARGS}, - {"unicode_splitlines", unicode_splitlines, METH_VARARGS}, - {"unicode_split", unicode_split, METH_VARARGS}, - {"unicode_rsplit", unicode_rsplit, METH_VARARGS}, - {"unicode_partition", unicode_partition, METH_VARARGS}, - {"unicode_rpartition", unicode_rpartition, METH_VARARGS}, - {"unicode_translate", unicode_translate, METH_VARARGS}, - {"unicode_join", unicode_join, METH_VARARGS}, - {"unicode_count", unicode_count, METH_VARARGS}, - {"unicode_tailmatch", unicode_tailmatch, METH_VARARGS}, - {"unicode_find", unicode_find, METH_VARARGS}, - {"unicode_findchar", unicode_findchar, METH_VARARGS}, - {"unicode_replace", unicode_replace, METH_VARARGS}, - {"unicode_compare", unicode_compare, METH_VARARGS}, - {"unicode_comparewithasciistring",unicode_comparewithasciistring,METH_VARARGS}, - {"unicode_equaltoutf8", unicode_equaltoutf8, METH_VARARGS}, - {"unicode_equaltoutf8andsize",unicode_equaltoutf8andsize, METH_VARARGS}, - {"unicode_richcompare", unicode_richcompare, METH_VARARGS}, - {"unicode_format", unicode_format, METH_VARARGS}, - {"unicode_contains", unicode_contains, METH_VARARGS}, - {"unicode_isidentifier", unicode_isidentifier, METH_O}, {"unicode_copycharacters", unicode_copycharacters, METH_VARARGS}, {NULL}, }; @@ -2099,6 +237,5 @@ _PyTestCapi_Init_Unicode(PyObject *m) { if (PyModule_AddFunctions(m, TestMethods) < 0) { return -1; } - return 0; } diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c index 7928cd7d6fe1ae..3c30381be6d538 100644 --- a/Modules/_testcapimodule.c +++ b/Modules/_testcapimodule.c @@ -13,7 +13,6 @@ #include "_testcapi/parts.h" #include "frameobject.h" // PyFrame_New() -#include "interpreteridobject.h" // PyInterpreterID_Type #include "marshal.h" // PyMarshal_WriteLongToFile() #include // FLT_MAX @@ -1449,36 +1448,6 @@ run_in_subinterp(PyObject *self, PyObject *args) return PyLong_FromLong(r); } -static PyObject * -get_interpreterid_type(PyObject *self, PyObject *Py_UNUSED(ignored)) -{ - return Py_NewRef(&PyInterpreterID_Type); -} - -static PyObject * -link_interpreter_refcount(PyObject *self, PyObject *idobj) -{ - PyInterpreterState *interp = PyInterpreterID_LookUp(idobj); - if (interp == NULL) { - assert(PyErr_Occurred()); - return NULL; - } - _PyInterpreterState_RequireIDRef(interp, 1); - Py_RETURN_NONE; -} - -static PyObject * -unlink_interpreter_refcount(PyObject *self, PyObject *idobj) -{ - PyInterpreterState *interp = PyInterpreterID_LookUp(idobj); - if (interp == NULL) { - assert(PyErr_Occurred()); - return NULL; - } - _PyInterpreterState_RequireIDRef(interp, 0); - Py_RETURN_NONE; -} - static PyMethodDef ml; static PyObject * @@ -3094,6 +3063,33 @@ function_set_kw_defaults(PyObject *self, PyObject *args) Py_RETURN_NONE; } +static PyObject * +function_get_closure(PyObject *self, PyObject *func) +{ + PyObject *closure = PyFunction_GetClosure(func); + if (closure != NULL) { + return Py_NewRef(closure); + } else if (PyErr_Occurred()) { + return NULL; + } else { + Py_RETURN_NONE; // This can happen when `closure` is set to `None` + } +} + +static PyObject * +function_set_closure(PyObject *self, PyObject *args) +{ + PyObject *func = NULL, *closure = NULL; + if (!PyArg_ParseTuple(args, "OO", &func, &closure)) { + return NULL; + } + int result = PyFunction_SetClosure(func, closure); + if (result == -1) { + return NULL; + } + Py_RETURN_NONE; +} + static PyObject * check_pyimport_addmodule(PyObject *self, PyObject *args) { @@ -3296,9 +3292,6 @@ static PyMethodDef TestMethods[] = { {"crash_no_current_thread", crash_no_current_thread, METH_NOARGS}, {"test_current_tstate_matches", test_current_tstate_matches, METH_NOARGS}, {"run_in_subinterp", run_in_subinterp, METH_VARARGS}, - {"get_interpreterid_type", get_interpreterid_type, METH_NOARGS}, - {"link_interpreter_refcount", link_interpreter_refcount, METH_O}, - {"unlink_interpreter_refcount", unlink_interpreter_refcount, METH_O}, {"create_cfunction", create_cfunction, METH_NOARGS}, {"call_in_temporary_c_thread", call_in_temporary_c_thread, METH_VARARGS, PyDoc_STR("set_error_class(error_class) -> None")}, @@ -3379,6 +3372,8 @@ static PyMethodDef TestMethods[] = { {"function_set_defaults", function_set_defaults, METH_VARARGS, NULL}, {"function_get_kw_defaults", function_get_kw_defaults, METH_O, NULL}, {"function_set_kw_defaults", function_set_kw_defaults, METH_VARARGS, NULL}, + {"function_get_closure", function_get_closure, METH_O, NULL}, + {"function_set_closure", function_set_closure, METH_VARARGS, NULL}, {"check_pyimport_addmodule", check_pyimport_addmodule, METH_VARARGS}, {"test_weakref_capi", test_weakref_capi, METH_NOARGS}, {NULL, NULL} /* sentinel */ @@ -3946,6 +3941,7 @@ PyInit__testcapi(void) PyModule_AddObject(m, "SIZEOF_WCHAR_T", PyLong_FromSsize_t(sizeof(wchar_t))); PyModule_AddObject(m, "SIZEOF_VOID_P", PyLong_FromSsize_t(sizeof(void*))); PyModule_AddObject(m, "SIZEOF_TIME_T", PyLong_FromSsize_t(sizeof(time_t))); + PyModule_AddObject(m, "SIZEOF_PID_T", PyLong_FromSsize_t(sizeof(pid_t))); PyModule_AddObject(m, "Py_Version", PyLong_FromUnsignedLong(Py_Version)); Py_INCREF(&PyInstanceMethod_Type); PyModule_AddObject(m, "instancemethod", (PyObject *)&PyInstanceMethod_Type); @@ -3975,6 +3971,9 @@ PyInit__testcapi(void) if (_PyTestCapi_Init_Abstract(m) < 0) { return NULL; } + if (_PyTestCapi_Init_Bytes(m) < 0) { + return NULL; + } if (_PyTestCapi_Init_Unicode(m) < 0) { return NULL; } diff --git a/Modules/_testinternalcapi.c b/Modules/_testinternalcapi.c index 1c10dd02138f3a..c07652facc0ae2 100644 --- a/Modules/_testinternalcapi.c +++ b/Modules/_testinternalcapi.c @@ -29,8 +29,6 @@ #include "pycore_pyerrors.h" // _PyErr_ChainExceptions1() #include "pycore_pystate.h" // _PyThreadState_GET() -#include "interpreteridobject.h" // PyInterpreterID_LookUp() - #include "clinic/_testinternalcapi.c.h" // Include test definitions from _testinternalcapi/ @@ -993,26 +991,6 @@ get_optimizer(PyObject *self, PyObject *Py_UNUSED(ignored)) return opt; } -static PyObject * -get_executor(PyObject *self, PyObject *const *args, Py_ssize_t nargs) -{ - - if (!_PyArg_CheckPositional("get_executor", nargs, 2, 2)) { - return NULL; - } - PyObject *code = args[0]; - PyObject *offset = args[1]; - long ioffset = PyLong_AsLong(offset); - if (ioffset == -1 && PyErr_Occurred()) { - return NULL; - } - if (!PyCode_Check(code)) { - PyErr_SetString(PyExc_TypeError, "first argument must be a code object"); - return NULL; - } - return (PyObject *)PyUnstable_GetExecutor((PyCodeObject *)code, ioffset); -} - static PyObject * add_executor_dependency(PyObject *self, PyObject *args) { @@ -1112,7 +1090,7 @@ pending_identify(PyObject *self, PyObject *args) if (!PyArg_ParseTuple(args, "O:pending_identify", &interpid)) { return NULL; } - PyInterpreterState *interp = PyInterpreterID_LookUp(interpid); + PyInterpreterState *interp = _PyInterpreterState_LookUpIDObject(interpid); if (interp == NULL) { if (!PyErr_Occurred()) { PyErr_SetString(PyExc_ValueError, "interpreter not found"); @@ -1477,16 +1455,152 @@ run_in_subinterp_with_config(PyObject *self, PyObject *args, PyObject *kwargs) } +static PyObject * +normalize_interp_id(PyObject *self, PyObject *idobj) +{ + int64_t interpid = _PyInterpreterState_ObjectToID(idobj); + if (interpid < 0) { + return NULL; + } + return PyLong_FromLongLong(interpid); +} + +static PyObject * +unused_interpreter_id(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + int64_t interpid = INT64_MAX; + assert(interpid > _PyRuntime.interpreters.next_id); + return PyLong_FromLongLong(interpid); +} + +static PyObject * +new_interpreter(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + // Unlike _interpreters.create(), we do not automatically link + // the interpreter to its refcount. + PyThreadState *save_tstate = PyThreadState_Get(); + const PyInterpreterConfig config = \ + (PyInterpreterConfig)_PyInterpreterConfig_INIT; + PyThreadState *tstate = NULL; + PyStatus status = Py_NewInterpreterFromConfig(&tstate, &config); + PyThreadState_Swap(save_tstate); + if (PyStatus_Exception(status)) { + _PyErr_SetFromPyStatus(status); + return NULL; + } + PyInterpreterState *interp = PyThreadState_GetInterpreter(tstate); + + if (_PyInterpreterState_IDInitref(interp) < 0) { + goto error; + } + + int64_t interpid = PyInterpreterState_GetID(interp); + if (interpid < 0) { + goto error; + } + PyObject *idobj = PyLong_FromLongLong(interpid); + if (idobj == NULL) { + goto error; + } + + PyThreadState_Swap(tstate); + PyThreadState_Clear(tstate); + PyThreadState_Swap(save_tstate); + PyThreadState_Delete(tstate); + + return idobj; + +error: + save_tstate = PyThreadState_Swap(tstate); + Py_EndInterpreter(tstate); + PyThreadState_Swap(save_tstate); + return NULL; +} + +static PyObject * +interpreter_exists(PyObject *self, PyObject *idobj) +{ + PyInterpreterState *interp = _PyInterpreterState_LookUpIDObject(idobj); + if (interp == NULL) { + if (PyErr_ExceptionMatches(PyExc_InterpreterNotFoundError)) { + PyErr_Clear(); + Py_RETURN_FALSE; + } + assert(PyErr_Occurred()); + return NULL; + } + Py_RETURN_TRUE; +} + static PyObject * get_interpreter_refcount(PyObject *self, PyObject *idobj) { - PyInterpreterState *interp = PyInterpreterID_LookUp(idobj); + PyInterpreterState *interp = _PyInterpreterState_LookUpIDObject(idobj); if (interp == NULL) { return NULL; } return PyLong_FromLongLong(interp->id_refcount); } +static PyObject * +link_interpreter_refcount(PyObject *self, PyObject *idobj) +{ + PyInterpreterState *interp = _PyInterpreterState_LookUpIDObject(idobj); + if (interp == NULL) { + assert(PyErr_Occurred()); + return NULL; + } + _PyInterpreterState_RequireIDRef(interp, 1); + Py_RETURN_NONE; +} + +static PyObject * +unlink_interpreter_refcount(PyObject *self, PyObject *idobj) +{ + PyInterpreterState *interp = _PyInterpreterState_LookUpIDObject(idobj); + if (interp == NULL) { + assert(PyErr_Occurred()); + return NULL; + } + _PyInterpreterState_RequireIDRef(interp, 0); + Py_RETURN_NONE; +} + +static PyObject * +interpreter_refcount_linked(PyObject *self, PyObject *idobj) +{ + PyInterpreterState *interp = _PyInterpreterState_LookUpIDObject(idobj); + if (interp == NULL) { + return NULL; + } + if (_PyInterpreterState_RequiresIDRef(interp)) { + Py_RETURN_TRUE; + } + Py_RETURN_FALSE; +} + +static PyObject * +interpreter_incref(PyObject *self, PyObject *idobj) +{ + PyInterpreterState *interp = _PyInterpreterState_LookUpIDObject(idobj); + if (interp == NULL) { + return NULL; + } + _PyInterpreterState_IDIncref(interp); + Py_RETURN_NONE; +} + +static PyObject * +interpreter_decref(PyObject *self, PyObject *idobj) +{ + PyInterpreterState *interp = _PyInterpreterState_LookUpIDObject(idobj); + if (interp == NULL) { + return NULL; + } + _PyInterpreterState_IDDecref(interp); + Py_RETURN_NONE; +} + static void _xid_capsule_destructor(PyObject *capsule) @@ -1702,7 +1816,6 @@ static PyMethodDef module_functions[] = { {"iframe_getlasti", iframe_getlasti, METH_O, NULL}, {"get_optimizer", get_optimizer, METH_NOARGS, NULL}, {"set_optimizer", set_optimizer, METH_O, NULL}, - {"get_executor", _PyCFunction_CAST(get_executor), METH_FASTCALL, NULL}, {"new_counter_optimizer", new_counter_optimizer, METH_NOARGS, NULL}, {"new_uop_optimizer", new_uop_optimizer, METH_NOARGS, NULL}, {"add_executor_dependency", add_executor_dependency, METH_VARARGS, NULL}, @@ -1727,7 +1840,16 @@ static PyMethodDef module_functions[] = { {"run_in_subinterp_with_config", _PyCFunction_CAST(run_in_subinterp_with_config), METH_VARARGS | METH_KEYWORDS}, + {"normalize_interp_id", normalize_interp_id, METH_O}, + {"unused_interpreter_id", unused_interpreter_id, METH_NOARGS}, + {"new_interpreter", new_interpreter, METH_NOARGS}, + {"interpreter_exists", interpreter_exists, METH_O}, {"get_interpreter_refcount", get_interpreter_refcount, METH_O}, + {"link_interpreter_refcount", link_interpreter_refcount, METH_O}, + {"unlink_interpreter_refcount", unlink_interpreter_refcount, METH_O}, + {"interpreter_refcount_linked", interpreter_refcount_linked, METH_O}, + {"interpreter_incref", interpreter_incref, METH_O}, + {"interpreter_decref", interpreter_decref, METH_O}, {"compile_perf_trampoline_entry", compile_perf_trampoline_entry, METH_VARARGS}, {"perf_trampoline_set_persist_after_fork", perf_trampoline_set_persist_after_fork, METH_VARARGS}, {"get_crossinterp_data", get_crossinterp_data, METH_VARARGS}, diff --git a/Modules/_testlimitedcapi.c b/Modules/_testlimitedcapi.c index 49bf6a3ea39613..598071fe0ddbad 100644 --- a/Modules/_testlimitedcapi.c +++ b/Modules/_testlimitedcapi.c @@ -26,22 +26,49 @@ PyInit__testlimitedcapi(void) return NULL; } - if (_PyTestCapi_Init_ByteArray(mod) < 0) { + if (_PyTestLimitedCAPI_Init_Abstract(mod) < 0) { return NULL; } - if (_PyTestCapi_Init_Bytes(mod) < 0) { + if (_PyTestLimitedCAPI_Init_ByteArray(mod) < 0) { return NULL; } - if (_PyTestCapi_Init_HeaptypeRelative(mod) < 0) { + if (_PyTestLimitedCAPI_Init_Bytes(mod) < 0) { return NULL; } - if (_PyTestCapi_Init_PyOS(mod) < 0) { + if (_PyTestLimitedCAPI_Init_Complex(mod) < 0) { return NULL; } - if (_PyTestCapi_Init_Sys(mod) < 0) { + if (_PyTestLimitedCAPI_Init_Dict(mod) < 0) { return NULL; } - if (_PyTestCapi_Init_VectorcallLimited(mod) < 0) { + if (_PyTestLimitedCAPI_Init_Float(mod) < 0) { + return NULL; + } + if (_PyTestLimitedCAPI_Init_HeaptypeRelative(mod) < 0) { + return NULL; + } + if (_PyTestLimitedCAPI_Init_List(mod) < 0) { + return NULL; + } + if (_PyTestLimitedCAPI_Init_Long(mod) < 0) { + return NULL; + } + if (_PyTestLimitedCAPI_Init_Object(mod) < 0) { + return NULL; + } + if (_PyTestLimitedCAPI_Init_PyOS(mod) < 0) { + return NULL; + } + if (_PyTestLimitedCAPI_Init_Set(mod) < 0) { + return NULL; + } + if (_PyTestLimitedCAPI_Init_Sys(mod) < 0) { + return NULL; + } + if (_PyTestLimitedCAPI_Init_Unicode(mod) < 0) { + return NULL; + } + if (_PyTestLimitedCAPI_Init_VectorcallLimited(mod) < 0) { return NULL; } return mod; diff --git a/Modules/_testlimitedcapi/abstract.c b/Modules/_testlimitedcapi/abstract.c new file mode 100644 index 00000000000000..6056dd100d6069 --- /dev/null +++ b/Modules/_testlimitedcapi/abstract.c @@ -0,0 +1,582 @@ +#include "parts.h" +#include "util.h" + + +static PyObject * +object_repr(PyObject *self, PyObject *arg) +{ + NULLABLE(arg); + return PyObject_Repr(arg); +} + +static PyObject * +object_ascii(PyObject *self, PyObject *arg) +{ + NULLABLE(arg); + return PyObject_ASCII(arg); +} + +static PyObject * +object_str(PyObject *self, PyObject *arg) +{ + NULLABLE(arg); + return PyObject_Str(arg); +} + +static PyObject * +object_bytes(PyObject *self, PyObject *arg) +{ + NULLABLE(arg); + return PyObject_Bytes(arg); +} + +static PyObject * +object_getattr(PyObject *self, PyObject *args) +{ + PyObject *obj, *attr_name; + if (!PyArg_ParseTuple(args, "OO", &obj, &attr_name)) { + return NULL; + } + NULLABLE(obj); + NULLABLE(attr_name); + return PyObject_GetAttr(obj, attr_name); +} + +static PyObject * +object_getattrstring(PyObject *self, PyObject *args) +{ + PyObject *obj; + const char *attr_name; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "Oz#", &obj, &attr_name, &size)) { + return NULL; + } + NULLABLE(obj); + return PyObject_GetAttrString(obj, attr_name); +} + +static PyObject * +object_hasattr(PyObject *self, PyObject *args) +{ + PyObject *obj, *attr_name; + if (!PyArg_ParseTuple(args, "OO", &obj, &attr_name)) { + return NULL; + } + NULLABLE(obj); + NULLABLE(attr_name); + return PyLong_FromLong(PyObject_HasAttr(obj, attr_name)); +} + +static PyObject * +object_hasattrstring(PyObject *self, PyObject *args) +{ + PyObject *obj; + const char *attr_name; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "Oz#", &obj, &attr_name, &size)) { + return NULL; + } + NULLABLE(obj); + return PyLong_FromLong(PyObject_HasAttrString(obj, attr_name)); +} + +static PyObject * +object_setattr(PyObject *self, PyObject *args) +{ + PyObject *obj, *attr_name, *value; + if (!PyArg_ParseTuple(args, "OOO", &obj, &attr_name, &value)) { + return NULL; + } + NULLABLE(obj); + NULLABLE(attr_name); + NULLABLE(value); + RETURN_INT(PyObject_SetAttr(obj, attr_name, value)); +} + +static PyObject * +object_setattrstring(PyObject *self, PyObject *args) +{ + PyObject *obj, *value; + const char *attr_name; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "Oz#O", &obj, &attr_name, &size, &value)) { + return NULL; + } + NULLABLE(obj); + NULLABLE(value); + RETURN_INT(PyObject_SetAttrString(obj, attr_name, value)); +} + +static PyObject * +object_delattr(PyObject *self, PyObject *args) +{ + PyObject *obj, *attr_name; +if (!PyArg_ParseTuple(args, "OO", &obj, &attr_name)) { + return NULL; + } + NULLABLE(obj); + NULLABLE(attr_name); + RETURN_INT(PyObject_DelAttr(obj, attr_name)); +} + +static PyObject * +object_delattrstring(PyObject *self, PyObject *args) +{ + PyObject *obj; + const char *attr_name; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "Oz#", &obj, &attr_name, &size)) { + return NULL; + } + NULLABLE(obj); + RETURN_INT(PyObject_DelAttrString(obj, attr_name)); +} + +static PyObject * +number_check(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + return PyBool_FromLong(PyNumber_Check(obj)); +} + +static PyObject * +mapping_check(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + return PyLong_FromLong(PyMapping_Check(obj)); +} + +static PyObject * +mapping_size(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + RETURN_SIZE(PyMapping_Size(obj)); +} + +static PyObject * +mapping_length(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + RETURN_SIZE(PyMapping_Length(obj)); +} + +static PyObject * +object_getitem(PyObject *self, PyObject *args) +{ + PyObject *mapping, *key; + if (!PyArg_ParseTuple(args, "OO", &mapping, &key)) { + return NULL; + } + NULLABLE(mapping); + NULLABLE(key); + return PyObject_GetItem(mapping, key); +} + +static PyObject * +mapping_getitemstring(PyObject *self, PyObject *args) +{ + PyObject *mapping; + const char *key; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "Oz#", &mapping, &key, &size)) { + return NULL; + } + NULLABLE(mapping); + return PyMapping_GetItemString(mapping, key); +} + +static PyObject * +mapping_haskey(PyObject *self, PyObject *args) +{ + PyObject *mapping, *key; + if (!PyArg_ParseTuple(args, "OO", &mapping, &key)) { + return NULL; + } + NULLABLE(mapping); + NULLABLE(key); + return PyLong_FromLong(PyMapping_HasKey(mapping, key)); +} + +static PyObject * +mapping_haskeystring(PyObject *self, PyObject *args) +{ + PyObject *mapping; + const char *key; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "Oz#", &mapping, &key, &size)) { + return NULL; + } + NULLABLE(mapping); + return PyLong_FromLong(PyMapping_HasKeyString(mapping, key)); +} + +static PyObject * +mapping_haskeywitherror(PyObject *self, PyObject *args) +{ + PyObject *mapping, *key; + if (!PyArg_ParseTuple(args, "OO", &mapping, &key)) { + return NULL; + } + NULLABLE(mapping); + NULLABLE(key); + RETURN_INT(PyMapping_HasKeyWithError(mapping, key)); +} + +static PyObject * +mapping_haskeystringwitherror(PyObject *self, PyObject *args) +{ + PyObject *mapping; + const char *key; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "Oz#", &mapping, &key, &size)) { + return NULL; + } + NULLABLE(mapping); + RETURN_INT(PyMapping_HasKeyStringWithError(mapping, key)); +} + +static PyObject * +object_setitem(PyObject *self, PyObject *args) +{ + PyObject *mapping, *key, *value; + if (!PyArg_ParseTuple(args, "OOO", &mapping, &key, &value)) { + return NULL; + } + NULLABLE(mapping); + NULLABLE(key); + NULLABLE(value); + RETURN_INT(PyObject_SetItem(mapping, key, value)); +} + +static PyObject * +mapping_setitemstring(PyObject *self, PyObject *args) +{ + PyObject *mapping, *value; + const char *key; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "Oz#O", &mapping, &key, &size, &value)) { + return NULL; + } + NULLABLE(mapping); + NULLABLE(value); + RETURN_INT(PyMapping_SetItemString(mapping, key, value)); +} + +static PyObject * +object_delitem(PyObject *self, PyObject *args) +{ + PyObject *mapping, *key; + if (!PyArg_ParseTuple(args, "OO", &mapping, &key)) { + return NULL; + } + NULLABLE(mapping); + NULLABLE(key); + RETURN_INT(PyObject_DelItem(mapping, key)); +} + +static PyObject * +mapping_delitem(PyObject *self, PyObject *args) +{ + PyObject *mapping, *key; + if (!PyArg_ParseTuple(args, "OO", &mapping, &key)) { + return NULL; + } + NULLABLE(mapping); + NULLABLE(key); + RETURN_INT(PyMapping_DelItem(mapping, key)); +} + +static PyObject * +mapping_delitemstring(PyObject *self, PyObject *args) +{ + PyObject *mapping; + const char *key; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "Oz#", &mapping, &key, &size)) { + return NULL; + } + NULLABLE(mapping); + RETURN_INT(PyMapping_DelItemString(mapping, key)); +} + +static PyObject * +mapping_keys(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + return PyMapping_Keys(obj); +} + +static PyObject * +mapping_values(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + return PyMapping_Values(obj); +} + +static PyObject * +mapping_items(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + return PyMapping_Items(obj); +} + + +static PyObject * +sequence_check(PyObject* self, PyObject *obj) +{ + NULLABLE(obj); + return PyLong_FromLong(PySequence_Check(obj)); +} + +static PyObject * +sequence_size(PyObject* self, PyObject *obj) +{ + NULLABLE(obj); + RETURN_SIZE(PySequence_Size(obj)); +} + +static PyObject * +sequence_length(PyObject* self, PyObject *obj) +{ + NULLABLE(obj); + RETURN_SIZE(PySequence_Length(obj)); +} + +static PyObject * +sequence_concat(PyObject *self, PyObject *args) +{ + PyObject *seq1, *seq2; + if (!PyArg_ParseTuple(args, "OO", &seq1, &seq2)) { + return NULL; + } + NULLABLE(seq1); + NULLABLE(seq2); + + return PySequence_Concat(seq1, seq2); +} + +static PyObject * +sequence_repeat(PyObject *self, PyObject *args) +{ + PyObject *seq; + Py_ssize_t count; + if (!PyArg_ParseTuple(args, "On", &seq, &count)) { + return NULL; + } + NULLABLE(seq); + + return PySequence_Repeat(seq, count); +} + +static PyObject * +sequence_inplaceconcat(PyObject *self, PyObject *args) +{ + PyObject *seq1, *seq2; + if (!PyArg_ParseTuple(args, "OO", &seq1, &seq2)) { + return NULL; + } + NULLABLE(seq1); + NULLABLE(seq2); + + return PySequence_InPlaceConcat(seq1, seq2); +} + +static PyObject * +sequence_inplacerepeat(PyObject *self, PyObject *args) +{ + PyObject *seq; + Py_ssize_t count; + if (!PyArg_ParseTuple(args, "On", &seq, &count)) { + return NULL; + } + NULLABLE(seq); + + return PySequence_InPlaceRepeat(seq, count); +} + +static PyObject * +sequence_getitem(PyObject *self, PyObject *args) +{ + PyObject *seq; + Py_ssize_t i; + if (!PyArg_ParseTuple(args, "On", &seq, &i)) { + return NULL; + } + NULLABLE(seq); + + return PySequence_GetItem(seq, i); +} + +static PyObject * +sequence_setitem(PyObject *self, PyObject *args) +{ + Py_ssize_t i; + PyObject *seq, *val; + if (!PyArg_ParseTuple(args, "OnO", &seq, &i, &val)) { + return NULL; + } + NULLABLE(seq); + NULLABLE(val); + + RETURN_INT(PySequence_SetItem(seq, i, val)); +} + + +static PyObject * +sequence_delitem(PyObject *self, PyObject *args) +{ + Py_ssize_t i; + PyObject *seq; + if (!PyArg_ParseTuple(args, "On", &seq, &i)) { + return NULL; + } + NULLABLE(seq); + + RETURN_INT(PySequence_DelItem(seq, i)); +} + +static PyObject * +sequence_setslice(PyObject* self, PyObject *args) +{ + PyObject *sequence, *obj; + Py_ssize_t i1, i2; + if (!PyArg_ParseTuple(args, "OnnO", &sequence, &i1, &i2, &obj)) { + return NULL; + } + NULLABLE(sequence); + NULLABLE(obj); + + RETURN_INT(PySequence_SetSlice(sequence, i1, i2, obj)); +} + +static PyObject * +sequence_delslice(PyObject *self, PyObject *args) +{ + PyObject *sequence; + Py_ssize_t i1, i2; + if (!PyArg_ParseTuple(args, "Onn", &sequence, &i1, &i2)) { + return NULL; + } + NULLABLE(sequence); + + RETURN_INT(PySequence_DelSlice(sequence, i1, i2)); +} + +static PyObject * +sequence_count(PyObject *self, PyObject *args) +{ + PyObject *seq, *value; + if (!PyArg_ParseTuple(args, "OO", &seq, &value)) { + return NULL; + } + NULLABLE(seq); + NULLABLE(value); + + RETURN_SIZE(PySequence_Count(seq, value)); +} + +static PyObject * +sequence_contains(PyObject *self, PyObject *args) +{ + PyObject *seq, *value; + if (!PyArg_ParseTuple(args, "OO", &seq, &value)) { + return NULL; + } + NULLABLE(seq); + NULLABLE(value); + + RETURN_INT(PySequence_Contains(seq, value)); +} + +static PyObject * +sequence_index(PyObject *self, PyObject *args) +{ + PyObject *seq, *value; + if (!PyArg_ParseTuple(args, "OO", &seq, &value)) { + return NULL; + } + NULLABLE(seq); + NULLABLE(value); + + RETURN_SIZE(PySequence_Index(seq, value)); +} + +static PyObject * +sequence_list(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + return PySequence_List(obj); +} + +static PyObject * +sequence_tuple(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + return PySequence_Tuple(obj); +} + + +static PyMethodDef test_methods[] = { + {"object_repr", object_repr, METH_O}, + {"object_ascii", object_ascii, METH_O}, + {"object_str", object_str, METH_O}, + {"object_bytes", object_bytes, METH_O}, + + {"object_getattr", object_getattr, METH_VARARGS}, + {"object_getattrstring", object_getattrstring, METH_VARARGS}, + {"object_hasattr", object_hasattr, METH_VARARGS}, + {"object_hasattrstring", object_hasattrstring, METH_VARARGS}, + {"object_setattr", object_setattr, METH_VARARGS}, + {"object_setattrstring", object_setattrstring, METH_VARARGS}, + {"object_delattr", object_delattr, METH_VARARGS}, + {"object_delattrstring", object_delattrstring, METH_VARARGS}, + + {"number_check", number_check, METH_O}, + {"mapping_check", mapping_check, METH_O}, + {"mapping_size", mapping_size, METH_O}, + {"mapping_length", mapping_length, METH_O}, + {"object_getitem", object_getitem, METH_VARARGS}, + {"mapping_getitemstring", mapping_getitemstring, METH_VARARGS}, + {"mapping_haskey", mapping_haskey, METH_VARARGS}, + {"mapping_haskeystring", mapping_haskeystring, METH_VARARGS}, + {"mapping_haskeywitherror", mapping_haskeywitherror, METH_VARARGS}, + {"mapping_haskeystringwitherror", mapping_haskeystringwitherror, METH_VARARGS}, + {"object_setitem", object_setitem, METH_VARARGS}, + {"mapping_setitemstring", mapping_setitemstring, METH_VARARGS}, + {"object_delitem", object_delitem, METH_VARARGS}, + {"mapping_delitem", mapping_delitem, METH_VARARGS}, + {"mapping_delitemstring", mapping_delitemstring, METH_VARARGS}, + {"mapping_keys", mapping_keys, METH_O}, + {"mapping_values", mapping_values, METH_O}, + {"mapping_items", mapping_items, METH_O}, + + {"sequence_check", sequence_check, METH_O}, + {"sequence_size", sequence_size, METH_O}, + {"sequence_length", sequence_length, METH_O}, + {"sequence_concat", sequence_concat, METH_VARARGS}, + {"sequence_repeat", sequence_repeat, METH_VARARGS}, + {"sequence_inplaceconcat", sequence_inplaceconcat, METH_VARARGS}, + {"sequence_inplacerepeat", sequence_inplacerepeat, METH_VARARGS}, + {"sequence_getitem", sequence_getitem, METH_VARARGS}, + {"sequence_setitem", sequence_setitem, METH_VARARGS}, + {"sequence_delitem", sequence_delitem, METH_VARARGS}, + {"sequence_setslice", sequence_setslice, METH_VARARGS}, + {"sequence_delslice", sequence_delslice, METH_VARARGS}, + {"sequence_count", sequence_count, METH_VARARGS}, + {"sequence_contains", sequence_contains, METH_VARARGS}, + {"sequence_index", sequence_index, METH_VARARGS}, + {"sequence_list", sequence_list, METH_O}, + {"sequence_tuple", sequence_tuple, METH_O}, + + {NULL}, +}; + +int +_PyTestLimitedCAPI_Init_Abstract(PyObject *m) +{ + if (PyModule_AddFunctions(m, test_methods) < 0) { + return -1; + } + + return 0; +} diff --git a/Modules/_testlimitedcapi/bytearray.c b/Modules/_testlimitedcapi/bytearray.c index dc47ed2c306f40..68b029e25af677 100644 --- a/Modules/_testlimitedcapi/bytearray.c +++ b/Modules/_testlimitedcapi/bytearray.c @@ -113,7 +113,7 @@ static PyMethodDef test_methods[] = { }; int -_PyTestCapi_Init_ByteArray(PyObject *m) +_PyTestLimitedCAPI_Init_ByteArray(PyObject *m) { if (PyModule_AddFunctions(m, test_methods) < 0) { return -1; diff --git a/Modules/_testlimitedcapi/bytes.c b/Modules/_testlimitedcapi/bytes.c index a14c4f9d4d30a8..157d4089954af5 100644 --- a/Modules/_testlimitedcapi/bytes.c +++ b/Modules/_testlimitedcapi/bytes.c @@ -245,7 +245,7 @@ static PyMethodDef test_methods[] = { }; int -_PyTestCapi_Init_Bytes(PyObject *m) +_PyTestLimitedCAPI_Init_Bytes(PyObject *m) { if (PyModule_AddFunctions(m, test_methods) < 0) { return -1; diff --git a/Modules/_testlimitedcapi/clinic/long.c.h b/Modules/_testlimitedcapi/clinic/long.c.h new file mode 100644 index 00000000000000..ebaeb53921a82f --- /dev/null +++ b/Modules/_testlimitedcapi/clinic/long.c.h @@ -0,0 +1,143 @@ +/*[clinic input] +preserve +[clinic start generated code]*/ + +PyDoc_STRVAR(_testlimitedcapi_test_long_api__doc__, +"test_long_api($module, /)\n" +"--\n" +"\n"); + +#define _TESTLIMITEDCAPI_TEST_LONG_API_METHODDEF \ + {"test_long_api", (PyCFunction)_testlimitedcapi_test_long_api, METH_NOARGS, _testlimitedcapi_test_long_api__doc__}, + +static PyObject * +_testlimitedcapi_test_long_api_impl(PyObject *module); + +static PyObject * +_testlimitedcapi_test_long_api(PyObject *module, PyObject *Py_UNUSED(ignored)) +{ + return _testlimitedcapi_test_long_api_impl(module); +} + +PyDoc_STRVAR(_testlimitedcapi_test_longlong_api__doc__, +"test_longlong_api($module, /)\n" +"--\n" +"\n"); + +#define _TESTLIMITEDCAPI_TEST_LONGLONG_API_METHODDEF \ + {"test_longlong_api", (PyCFunction)_testlimitedcapi_test_longlong_api, METH_NOARGS, _testlimitedcapi_test_longlong_api__doc__}, + +static PyObject * +_testlimitedcapi_test_longlong_api_impl(PyObject *module); + +static PyObject * +_testlimitedcapi_test_longlong_api(PyObject *module, PyObject *Py_UNUSED(ignored)) +{ + return _testlimitedcapi_test_longlong_api_impl(module); +} + +PyDoc_STRVAR(_testlimitedcapi_test_long_and_overflow__doc__, +"test_long_and_overflow($module, /)\n" +"--\n" +"\n" +"Test the PyLong_AsLongAndOverflow API.\n" +"\n" +"General conversion to PY_LONG is tested by test_long_api_inner.\n" +"This test will concentrate on proper handling of overflow."); + +#define _TESTLIMITEDCAPI_TEST_LONG_AND_OVERFLOW_METHODDEF \ + {"test_long_and_overflow", (PyCFunction)_testlimitedcapi_test_long_and_overflow, METH_NOARGS, _testlimitedcapi_test_long_and_overflow__doc__}, + +static PyObject * +_testlimitedcapi_test_long_and_overflow_impl(PyObject *module); + +static PyObject * +_testlimitedcapi_test_long_and_overflow(PyObject *module, PyObject *Py_UNUSED(ignored)) +{ + return _testlimitedcapi_test_long_and_overflow_impl(module); +} + +PyDoc_STRVAR(_testlimitedcapi_test_long_long_and_overflow__doc__, +"test_long_long_and_overflow($module, /)\n" +"--\n" +"\n" +"Test the PyLong_AsLongLongAndOverflow API.\n" +"\n" +"General conversion to long long is tested by test_long_api_inner.\n" +"This test will concentrate on proper handling of overflow."); + +#define _TESTLIMITEDCAPI_TEST_LONG_LONG_AND_OVERFLOW_METHODDEF \ + {"test_long_long_and_overflow", (PyCFunction)_testlimitedcapi_test_long_long_and_overflow, METH_NOARGS, _testlimitedcapi_test_long_long_and_overflow__doc__}, + +static PyObject * +_testlimitedcapi_test_long_long_and_overflow_impl(PyObject *module); + +static PyObject * +_testlimitedcapi_test_long_long_and_overflow(PyObject *module, PyObject *Py_UNUSED(ignored)) +{ + return _testlimitedcapi_test_long_long_and_overflow_impl(module); +} + +PyDoc_STRVAR(_testlimitedcapi_test_long_as_size_t__doc__, +"test_long_as_size_t($module, /)\n" +"--\n" +"\n" +"Test the PyLong_As{Size,Ssize}_t API.\n" +"\n" +"At present this just tests that non-integer arguments are handled correctly.\n" +"It should be extended to test overflow handling."); + +#define _TESTLIMITEDCAPI_TEST_LONG_AS_SIZE_T_METHODDEF \ + {"test_long_as_size_t", (PyCFunction)_testlimitedcapi_test_long_as_size_t, METH_NOARGS, _testlimitedcapi_test_long_as_size_t__doc__}, + +static PyObject * +_testlimitedcapi_test_long_as_size_t_impl(PyObject *module); + +static PyObject * +_testlimitedcapi_test_long_as_size_t(PyObject *module, PyObject *Py_UNUSED(ignored)) +{ + return _testlimitedcapi_test_long_as_size_t_impl(module); +} + +PyDoc_STRVAR(_testlimitedcapi_test_long_as_unsigned_long_long_mask__doc__, +"test_long_as_unsigned_long_long_mask($module, /)\n" +"--\n" +"\n"); + +#define _TESTLIMITEDCAPI_TEST_LONG_AS_UNSIGNED_LONG_LONG_MASK_METHODDEF \ + {"test_long_as_unsigned_long_long_mask", (PyCFunction)_testlimitedcapi_test_long_as_unsigned_long_long_mask, METH_NOARGS, _testlimitedcapi_test_long_as_unsigned_long_long_mask__doc__}, + +static PyObject * +_testlimitedcapi_test_long_as_unsigned_long_long_mask_impl(PyObject *module); + +static PyObject * +_testlimitedcapi_test_long_as_unsigned_long_long_mask(PyObject *module, PyObject *Py_UNUSED(ignored)) +{ + return _testlimitedcapi_test_long_as_unsigned_long_long_mask_impl(module); +} + +PyDoc_STRVAR(_testlimitedcapi_test_long_as_double__doc__, +"test_long_as_double($module, /)\n" +"--\n" +"\n"); + +#define _TESTLIMITEDCAPI_TEST_LONG_AS_DOUBLE_METHODDEF \ + {"test_long_as_double", (PyCFunction)_testlimitedcapi_test_long_as_double, METH_NOARGS, _testlimitedcapi_test_long_as_double__doc__}, + +static PyObject * +_testlimitedcapi_test_long_as_double_impl(PyObject *module); + +static PyObject * +_testlimitedcapi_test_long_as_double(PyObject *module, PyObject *Py_UNUSED(ignored)) +{ + return _testlimitedcapi_test_long_as_double_impl(module); +} + +PyDoc_STRVAR(_testlimitedcapi_PyLong_AsInt__doc__, +"PyLong_AsInt($module, arg, /)\n" +"--\n" +"\n"); + +#define _TESTLIMITEDCAPI_PYLONG_ASINT_METHODDEF \ + {"PyLong_AsInt", (PyCFunction)_testlimitedcapi_PyLong_AsInt, METH_O, _testlimitedcapi_PyLong_AsInt__doc__}, +/*[clinic end generated code: output=bc52b73c599f96c2 input=a9049054013a1b77]*/ diff --git a/Modules/_testlimitedcapi/complex.c b/Modules/_testlimitedcapi/complex.c new file mode 100644 index 00000000000000..e4c244e5c88d06 --- /dev/null +++ b/Modules/_testlimitedcapi/complex.c @@ -0,0 +1,79 @@ +#include "parts.h" +#include "util.h" + + +static PyObject * +complex_check(PyObject *Py_UNUSED(module), PyObject *obj) +{ + NULLABLE(obj); + return PyLong_FromLong(PyComplex_Check(obj)); +} + +static PyObject * +complex_checkexact(PyObject *Py_UNUSED(module), PyObject *obj) +{ + NULLABLE(obj); + return PyLong_FromLong(PyComplex_CheckExact(obj)); +} + +static PyObject * +complex_fromdoubles(PyObject *Py_UNUSED(module), PyObject *args) +{ + double real, imag; + + if (!PyArg_ParseTuple(args, "dd", &real, &imag)) { + return NULL; + } + + return PyComplex_FromDoubles(real, imag); +} + +static PyObject * +complex_realasdouble(PyObject *Py_UNUSED(module), PyObject *obj) +{ + double real; + + NULLABLE(obj); + real = PyComplex_RealAsDouble(obj); + + if (real == -1. && PyErr_Occurred()) { + return NULL; + } + + return PyFloat_FromDouble(real); +} + +static PyObject * +complex_imagasdouble(PyObject *Py_UNUSED(module), PyObject *obj) +{ + double imag; + + NULLABLE(obj); + imag = PyComplex_ImagAsDouble(obj); + + if (imag == -1. && PyErr_Occurred()) { + return NULL; + } + + return PyFloat_FromDouble(imag); +} + + +static PyMethodDef test_methods[] = { + {"complex_check", complex_check, METH_O}, + {"complex_checkexact", complex_checkexact, METH_O}, + {"complex_fromdoubles", complex_fromdoubles, METH_VARARGS}, + {"complex_realasdouble", complex_realasdouble, METH_O}, + {"complex_imagasdouble", complex_imagasdouble, METH_O}, + {NULL}, +}; + +int +_PyTestLimitedCAPI_Init_Complex(PyObject *mod) +{ + if (PyModule_AddFunctions(mod, test_methods) < 0) { + return -1; + } + + return 0; +} diff --git a/Modules/_testlimitedcapi/dict.c b/Modules/_testlimitedcapi/dict.c new file mode 100644 index 00000000000000..ec32712eef6434 --- /dev/null +++ b/Modules/_testlimitedcapi/dict.c @@ -0,0 +1,291 @@ +#include "parts.h" +#include "util.h" + + +static PyObject * +dict_check(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + return PyLong_FromLong(PyDict_Check(obj)); +} + +static PyObject * +dict_checkexact(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + return PyLong_FromLong(PyDict_CheckExact(obj)); +} + +static PyObject * +dict_new(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + return PyDict_New(); +} + +static PyObject * +dictproxy_new(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + return PyDictProxy_New(obj); +} + +static PyObject * +dict_clear(PyObject *self, PyObject *obj) +{ + PyDict_Clear(obj); + Py_RETURN_NONE; +} + +static PyObject * +dict_copy(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + return PyDict_Copy(obj); +} + +static PyObject * +dict_contains(PyObject *self, PyObject *args) +{ + PyObject *obj, *key; + if (!PyArg_ParseTuple(args, "OO", &obj, &key)) { + return NULL; + } + NULLABLE(obj); + NULLABLE(key); + RETURN_INT(PyDict_Contains(obj, key)); +} + +static PyObject * +dict_size(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + RETURN_SIZE(PyDict_Size(obj)); +} + +static PyObject * +dict_getitem(PyObject *self, PyObject *args) +{ + PyObject *mapping, *key; + if (!PyArg_ParseTuple(args, "OO", &mapping, &key)) { + return NULL; + } + NULLABLE(mapping); + NULLABLE(key); + PyObject *value = PyDict_GetItem(mapping, key); + if (value == NULL) { + if (PyErr_Occurred()) { + return NULL; + } + return Py_NewRef(PyExc_KeyError); + } + return Py_NewRef(value); +} + +static PyObject * +dict_getitemstring(PyObject *self, PyObject *args) +{ + PyObject *mapping; + const char *key; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "Oz#", &mapping, &key, &size)) { + return NULL; + } + NULLABLE(mapping); + PyObject *value = PyDict_GetItemString(mapping, key); + if (value == NULL) { + if (PyErr_Occurred()) { + return NULL; + } + return Py_NewRef(PyExc_KeyError); + } + return Py_NewRef(value); +} + +static PyObject * +dict_getitemwitherror(PyObject *self, PyObject *args) +{ + PyObject *mapping, *key; + if (!PyArg_ParseTuple(args, "OO", &mapping, &key)) { + return NULL; + } + NULLABLE(mapping); + NULLABLE(key); + PyObject *value = PyDict_GetItemWithError(mapping, key); + if (value == NULL) { + if (PyErr_Occurred()) { + return NULL; + } + return Py_NewRef(PyExc_KeyError); + } + return Py_NewRef(value); +} + + +static PyObject * +dict_setitem(PyObject *self, PyObject *args) +{ + PyObject *mapping, *key, *value; + if (!PyArg_ParseTuple(args, "OOO", &mapping, &key, &value)) { + return NULL; + } + NULLABLE(mapping); + NULLABLE(key); + NULLABLE(value); + RETURN_INT(PyDict_SetItem(mapping, key, value)); +} + +static PyObject * +dict_setitemstring(PyObject *self, PyObject *args) +{ + PyObject *mapping, *value; + const char *key; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "Oz#O", &mapping, &key, &size, &value)) { + return NULL; + } + NULLABLE(mapping); + NULLABLE(value); + RETURN_INT(PyDict_SetItemString(mapping, key, value)); +} + +static PyObject * +dict_delitem(PyObject *self, PyObject *args) +{ + PyObject *mapping, *key; + if (!PyArg_ParseTuple(args, "OO", &mapping, &key)) { + return NULL; + } + NULLABLE(mapping); + NULLABLE(key); + RETURN_INT(PyDict_DelItem(mapping, key)); +} + +static PyObject * +dict_delitemstring(PyObject *self, PyObject *args) +{ + PyObject *mapping; + const char *key; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "Oz#", &mapping, &key, &size)) { + return NULL; + } + NULLABLE(mapping); + RETURN_INT(PyDict_DelItemString(mapping, key)); +} + +static PyObject * +dict_keys(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + return PyDict_Keys(obj); +} + +static PyObject * +dict_values(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + return PyDict_Values(obj); +} + +static PyObject * +dict_items(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + return PyDict_Items(obj); +} + +static PyObject * +dict_next(PyObject *self, PyObject *args) +{ + PyObject *mapping, *key = UNINITIALIZED_PTR, *value = UNINITIALIZED_PTR; + Py_ssize_t pos; + if (!PyArg_ParseTuple(args, "On", &mapping, &pos)) { + return NULL; + } + NULLABLE(mapping); + int rc = PyDict_Next(mapping, &pos, &key, &value); + if (rc != 0) { + return Py_BuildValue("inOO", rc, pos, key, value); + } + assert(key == UNINITIALIZED_PTR); + assert(value == UNINITIALIZED_PTR); + if (PyErr_Occurred()) { + return NULL; + } + Py_RETURN_NONE; +} + +static PyObject * +dict_merge(PyObject *self, PyObject *args) +{ + PyObject *mapping, *mapping2; + int override; + if (!PyArg_ParseTuple(args, "OOi", &mapping, &mapping2, &override)) { + return NULL; + } + NULLABLE(mapping); + NULLABLE(mapping2); + RETURN_INT(PyDict_Merge(mapping, mapping2, override)); +} + +static PyObject * +dict_update(PyObject *self, PyObject *args) +{ + PyObject *mapping, *mapping2; + if (!PyArg_ParseTuple(args, "OO", &mapping, &mapping2)) { + return NULL; + } + NULLABLE(mapping); + NULLABLE(mapping2); + RETURN_INT(PyDict_Update(mapping, mapping2)); +} + +static PyObject * +dict_mergefromseq2(PyObject *self, PyObject *args) +{ + PyObject *mapping, *seq; + int override; + if (!PyArg_ParseTuple(args, "OOi", &mapping, &seq, &override)) { + return NULL; + } + NULLABLE(mapping); + NULLABLE(seq); + RETURN_INT(PyDict_MergeFromSeq2(mapping, seq, override)); +} + + +static PyMethodDef test_methods[] = { + {"dict_check", dict_check, METH_O}, + {"dict_checkexact", dict_checkexact, METH_O}, + {"dict_new", dict_new, METH_NOARGS}, + {"dictproxy_new", dictproxy_new, METH_O}, + {"dict_clear", dict_clear, METH_O}, + {"dict_copy", dict_copy, METH_O}, + {"dict_size", dict_size, METH_O}, + {"dict_getitem", dict_getitem, METH_VARARGS}, + {"dict_getitemwitherror", dict_getitemwitherror, METH_VARARGS}, + {"dict_getitemstring", dict_getitemstring, METH_VARARGS}, + {"dict_contains", dict_contains, METH_VARARGS}, + {"dict_setitem", dict_setitem, METH_VARARGS}, + {"dict_setitemstring", dict_setitemstring, METH_VARARGS}, + {"dict_delitem", dict_delitem, METH_VARARGS}, + {"dict_delitemstring", dict_delitemstring, METH_VARARGS}, + {"dict_keys", dict_keys, METH_O}, + {"dict_values", dict_values, METH_O}, + {"dict_items", dict_items, METH_O}, + {"dict_next", dict_next, METH_VARARGS}, + {"dict_merge", dict_merge, METH_VARARGS}, + {"dict_update", dict_update, METH_VARARGS}, + {"dict_mergefromseq2", dict_mergefromseq2, METH_VARARGS}, + {NULL}, +}; + +int +_PyTestLimitedCAPI_Init_Dict(PyObject *m) +{ + if (PyModule_AddFunctions(m, test_methods) < 0) { + return -1; + } + + return 0; +} diff --git a/Modules/_testlimitedcapi/float.c b/Modules/_testlimitedcapi/float.c new file mode 100644 index 00000000000000..88dc91f682ff39 --- /dev/null +++ b/Modules/_testlimitedcapi/float.c @@ -0,0 +1,91 @@ +#include "parts.h" +#include "util.h" + + +static PyObject * +float_check(PyObject *Py_UNUSED(module), PyObject *obj) +{ + NULLABLE(obj); + return PyLong_FromLong(PyFloat_Check(obj)); +} + +static PyObject * +float_checkexact(PyObject *Py_UNUSED(module), PyObject *obj) +{ + NULLABLE(obj); + return PyLong_FromLong(PyFloat_CheckExact(obj)); +} + +static PyObject * +float_fromstring(PyObject *Py_UNUSED(module), PyObject *obj) +{ + NULLABLE(obj); + return PyFloat_FromString(obj); +} + +static PyObject * +float_fromdouble(PyObject *Py_UNUSED(module), PyObject *obj) +{ + double d; + + if (!PyArg_Parse(obj, "d", &d)) { + return NULL; + } + + return PyFloat_FromDouble(d); +} + +static PyObject * +float_asdouble(PyObject *Py_UNUSED(module), PyObject *obj) +{ + double d; + + NULLABLE(obj); + d = PyFloat_AsDouble(obj); + if (d == -1. && PyErr_Occurred()) { + return NULL; + } + + return PyFloat_FromDouble(d); +} + +static PyObject * +float_getinfo(PyObject *Py_UNUSED(module), PyObject *Py_UNUSED(arg)) +{ + return PyFloat_GetInfo(); +} + +static PyObject * +float_getmax(PyObject *Py_UNUSED(module), PyObject *Py_UNUSED(arg)) +{ + return PyFloat_FromDouble(PyFloat_GetMax()); +} + +static PyObject * +float_getmin(PyObject *Py_UNUSED(module), PyObject *Py_UNUSED(arg)) +{ + return PyFloat_FromDouble(PyFloat_GetMin()); +} + + +static PyMethodDef test_methods[] = { + {"float_check", float_check, METH_O}, + {"float_checkexact", float_checkexact, METH_O}, + {"float_fromstring", float_fromstring, METH_O}, + {"float_fromdouble", float_fromdouble, METH_O}, + {"float_asdouble", float_asdouble, METH_O}, + {"float_getinfo", float_getinfo, METH_NOARGS}, + {"float_getmax", float_getmax, METH_NOARGS}, + {"float_getmin", float_getmin, METH_NOARGS}, + {NULL}, +}; + +int +_PyTestLimitedCAPI_Init_Float(PyObject *mod) +{ + if (PyModule_AddFunctions(mod, test_methods) < 0) { + return -1; + } + + return 0; +} diff --git a/Modules/_testlimitedcapi/heaptype_relative.c b/Modules/_testlimitedcapi/heaptype_relative.c index d0316dd4fc63b4..7c508c6182bc8a 100644 --- a/Modules/_testlimitedcapi/heaptype_relative.c +++ b/Modules/_testlimitedcapi/heaptype_relative.c @@ -331,7 +331,8 @@ static PyMethodDef TestMethods[] = { }; int -_PyTestCapi_Init_HeaptypeRelative(PyObject *m) { +_PyTestLimitedCAPI_Init_HeaptypeRelative(PyObject *m) +{ if (PyModule_AddFunctions(m, TestMethods) < 0) { return -1; } diff --git a/Modules/_testlimitedcapi/list.c b/Modules/_testlimitedcapi/list.c new file mode 100644 index 00000000000000..3022cbf9191b2e --- /dev/null +++ b/Modules/_testlimitedcapi/list.c @@ -0,0 +1,169 @@ +#include "parts.h" +#include "util.h" + +static PyObject * +list_check(PyObject* Py_UNUSED(module), PyObject *obj) +{ + NULLABLE(obj); + return PyLong_FromLong(PyList_Check(obj)); +} + +static PyObject * +list_check_exact(PyObject* Py_UNUSED(module), PyObject *obj) +{ + NULLABLE(obj); + return PyLong_FromLong(PyList_CheckExact(obj)); +} + +static PyObject * +list_new(PyObject* Py_UNUSED(module), PyObject *obj) +{ + return PyList_New(PyLong_AsSsize_t(obj)); +} + +static PyObject * +list_size(PyObject *Py_UNUSED(module), PyObject *obj) +{ + NULLABLE(obj); + RETURN_SIZE(PyList_Size(obj)); +} + +static PyObject * +list_getitem(PyObject *Py_UNUSED(module), PyObject *args) +{ + PyObject *obj; + Py_ssize_t i; + if (!PyArg_ParseTuple(args, "On", &obj, &i)) { + return NULL; + } + NULLABLE(obj); + return Py_XNewRef(PyList_GetItem(obj, i)); +} + +static PyObject * +list_get_item_ref(PyObject *Py_UNUSED(module), PyObject *args) +{ + PyObject *obj; + Py_ssize_t i; + if (!PyArg_ParseTuple(args, "On", &obj, &i)) { + return NULL; + } + NULLABLE(obj); + return PyList_GetItemRef(obj, i); +} + +static PyObject * +list_setitem(PyObject *Py_UNUSED(module), PyObject *args) +{ + PyObject *obj, *value; + Py_ssize_t i; + if (!PyArg_ParseTuple(args, "OnO", &obj, &i, &value)) { + return NULL; + } + NULLABLE(obj); + NULLABLE(value); + RETURN_INT(PyList_SetItem(obj, i, Py_XNewRef(value))); + +} + +static PyObject * +list_insert(PyObject *Py_UNUSED(module), PyObject *args) +{ + PyObject *obj, *value; + Py_ssize_t where; + if (!PyArg_ParseTuple(args, "OnO", &obj, &where, &value)) { + return NULL; + } + NULLABLE(obj); + NULLABLE(value); + RETURN_INT(PyList_Insert(obj, where, Py_XNewRef(value))); + +} + +static PyObject * +list_append(PyObject *Py_UNUSED(module), PyObject *args) +{ + PyObject *obj, *value; + if (!PyArg_ParseTuple(args, "OO", &obj, &value)) { + return NULL; + } + NULLABLE(obj); + NULLABLE(value); + RETURN_INT(PyList_Append(obj, value)); +} + +static PyObject * +list_getslice(PyObject *Py_UNUSED(module), PyObject *args) +{ + PyObject *obj; + Py_ssize_t ilow, ihigh; + if (!PyArg_ParseTuple(args, "Onn", &obj, &ilow, &ihigh)) { + return NULL; + } + NULLABLE(obj); + return PyList_GetSlice(obj, ilow, ihigh); + +} + +static PyObject * +list_setslice(PyObject *Py_UNUSED(module), PyObject *args) +{ + PyObject *obj, *value; + Py_ssize_t ilow, ihigh; + if (!PyArg_ParseTuple(args, "OnnO", &obj, &ilow, &ihigh, &value)) { + return NULL; + } + NULLABLE(obj); + NULLABLE(value); + RETURN_INT(PyList_SetSlice(obj, ilow, ihigh, value)); +} + +static PyObject * +list_sort(PyObject* Py_UNUSED(module), PyObject *obj) +{ + NULLABLE(obj); + RETURN_INT(PyList_Sort(obj)); +} + +static PyObject * +list_reverse(PyObject* Py_UNUSED(module), PyObject *obj) +{ + NULLABLE(obj); + RETURN_INT(PyList_Reverse(obj)); +} + +static PyObject * +list_astuple(PyObject* Py_UNUSED(module), PyObject *obj) +{ + NULLABLE(obj); + return PyList_AsTuple(obj); +} + + +static PyMethodDef test_methods[] = { + {"list_check", list_check, METH_O}, + {"list_check_exact", list_check_exact, METH_O}, + {"list_new", list_new, METH_O}, + {"list_size", list_size, METH_O}, + {"list_getitem", list_getitem, METH_VARARGS}, + {"list_get_item_ref", list_get_item_ref, METH_VARARGS}, + {"list_setitem", list_setitem, METH_VARARGS}, + {"list_insert", list_insert, METH_VARARGS}, + {"list_append", list_append, METH_VARARGS}, + {"list_getslice", list_getslice, METH_VARARGS}, + {"list_setslice", list_setslice, METH_VARARGS}, + {"list_sort", list_sort, METH_O}, + {"list_reverse", list_reverse, METH_O}, + {"list_astuple", list_astuple, METH_O}, + {NULL}, +}; + +int +_PyTestLimitedCAPI_Init_List(PyObject *m) +{ + if (PyModule_AddFunctions(m, test_methods) < 0) { + return -1; + } + + return 0; +} diff --git a/Modules/_testlimitedcapi/long.c b/Modules/_testlimitedcapi/long.c new file mode 100644 index 00000000000000..5953009b6ef9b7 --- /dev/null +++ b/Modules/_testlimitedcapi/long.c @@ -0,0 +1,798 @@ +#include "pyconfig.h" // Py_GIL_DISABLED +#ifndef Py_GIL_DISABLED + // Need limited C API 3.13 to test PyLong_AsInt() +# define Py_LIMITED_API 0x030d0000 +#endif + +#include "parts.h" +#include "util.h" +#include "clinic/long.c.h" + +/*[clinic input] +module _testlimitedcapi +[clinic start generated code]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=2700057f9c1135ba]*/ + + +static PyObject * +raiseTestError(const char* test_name, const char* msg) +{ + PyErr_Format(PyExc_AssertionError, "%s: %s", test_name, msg); + return NULL; +} + +/* Tests of PyLong_{As, From}{Unsigned,}Long(), and + PyLong_{As, From}{Unsigned,}LongLong(). + + Note that the meat of the test is contained in testcapi_long.h. + This is revolting, but delicate code duplication is worse: "almost + exactly the same" code is needed to test long long, but the ubiquitous + dependence on type names makes it impossible to use a parameterized + function. A giant macro would be even worse than this. A C++ template + would be perfect. + + The "report an error" functions are deliberately not part of the #include + file: if the test fails, you can set a breakpoint in the appropriate + error function directly, and crawl back from there in the debugger. +*/ + +#define UNBIND(X) Py_DECREF(X); (X) = NULL + +static PyObject * +raise_test_long_error(const char* msg) +{ + return raiseTestError("test_long_api", msg); +} + +// Test PyLong_FromLong()/PyLong_AsLong() +// and PyLong_FromUnsignedLong()/PyLong_AsUnsignedLong(). + +#define TESTNAME test_long_api_inner +#define TYPENAME long +#define F_S_TO_PY PyLong_FromLong +#define F_PY_TO_S PyLong_AsLong +#define F_U_TO_PY PyLong_FromUnsignedLong +#define F_PY_TO_U PyLong_AsUnsignedLong + +#include "testcapi_long.h" + +/*[clinic input] +_testlimitedcapi.test_long_api +[clinic start generated code]*/ + +static PyObject * +_testlimitedcapi_test_long_api_impl(PyObject *module) +/*[clinic end generated code: output=06a2c02366d1853a input=9012b3d6a483df63]*/ +{ + return TESTNAME(raise_test_long_error); +} + +#undef TESTNAME +#undef TYPENAME +#undef F_S_TO_PY +#undef F_PY_TO_S +#undef F_U_TO_PY +#undef F_PY_TO_U + +// Test PyLong_FromLongLong()/PyLong_AsLongLong() +// and PyLong_FromUnsignedLongLong()/PyLong_AsUnsignedLongLong(). + +static PyObject * +raise_test_longlong_error(const char* msg) +{ + return raiseTestError("test_longlong_api", msg); +} + +#define TESTNAME test_longlong_api_inner +#define TYPENAME long long +#define F_S_TO_PY PyLong_FromLongLong +#define F_PY_TO_S PyLong_AsLongLong +#define F_U_TO_PY PyLong_FromUnsignedLongLong +#define F_PY_TO_U PyLong_AsUnsignedLongLong + +#include "testcapi_long.h" + +/*[clinic input] +_testlimitedcapi.test_longlong_api +[clinic start generated code]*/ + +static PyObject * +_testlimitedcapi_test_longlong_api_impl(PyObject *module) +/*[clinic end generated code: output=8faa10e1c35214bf input=2b582a9d25bd68e7]*/ +{ + return TESTNAME(raise_test_longlong_error); +} + +#undef TESTNAME +#undef TYPENAME +#undef F_S_TO_PY +#undef F_PY_TO_S +#undef F_U_TO_PY +#undef F_PY_TO_U + + +/*[clinic input] +_testlimitedcapi.test_long_and_overflow + +Test the PyLong_AsLongAndOverflow API. + +General conversion to PY_LONG is tested by test_long_api_inner. +This test will concentrate on proper handling of overflow. +[clinic start generated code]*/ + +static PyObject * +_testlimitedcapi_test_long_and_overflow_impl(PyObject *module) +/*[clinic end generated code: output=fdfd3c1eeabb6d14 input=e3a18791de6519fe]*/ +{ + PyObject *num, *one, *temp; + long value; + int overflow; + + /* Test that overflow is set properly for a large value. */ + /* num is a number larger than LONG_MAX even on 64-bit platforms */ + num = PyLong_FromString("FFFFFFFFFFFFFFFFFFFFFFFF", NULL, 16); + if (num == NULL) + return NULL; + overflow = 1234; + value = PyLong_AsLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -1) + return raiseTestError("test_long_and_overflow", + "return value was not set to -1"); + if (overflow != 1) + return raiseTestError("test_long_and_overflow", + "overflow was not set to 1"); + + /* Same again, with num = LONG_MAX + 1 */ + num = PyLong_FromLong(LONG_MAX); + if (num == NULL) + return NULL; + one = PyLong_FromLong(1L); + if (one == NULL) { + Py_DECREF(num); + return NULL; + } + temp = PyNumber_Add(num, one); + Py_DECREF(one); + Py_DECREF(num); + num = temp; + if (num == NULL) + return NULL; + overflow = 0; + value = PyLong_AsLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -1) + return raiseTestError("test_long_and_overflow", + "return value was not set to -1"); + if (overflow != 1) + return raiseTestError("test_long_and_overflow", + "overflow was not set to 1"); + + /* Test that overflow is set properly for a large negative value. */ + /* num is a number smaller than LONG_MIN even on 64-bit platforms */ + num = PyLong_FromString("-FFFFFFFFFFFFFFFFFFFFFFFF", NULL, 16); + if (num == NULL) + return NULL; + overflow = 1234; + value = PyLong_AsLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -1) + return raiseTestError("test_long_and_overflow", + "return value was not set to -1"); + if (overflow != -1) + return raiseTestError("test_long_and_overflow", + "overflow was not set to -1"); + + /* Same again, with num = LONG_MIN - 1 */ + num = PyLong_FromLong(LONG_MIN); + if (num == NULL) + return NULL; + one = PyLong_FromLong(1L); + if (one == NULL) { + Py_DECREF(num); + return NULL; + } + temp = PyNumber_Subtract(num, one); + Py_DECREF(one); + Py_DECREF(num); num = temp; + if (num == NULL) + return NULL; + overflow = 0; + value = PyLong_AsLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -1) + return raiseTestError("test_long_and_overflow", + "return value was not set to -1"); + if (overflow != -1) + return raiseTestError("test_long_and_overflow", + "overflow was not set to -1"); + + /* Test that overflow is cleared properly for small values. */ + num = PyLong_FromString("FF", NULL, 16); + if (num == NULL) + return NULL; + overflow = 1234; + value = PyLong_AsLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != 0xFF) + return raiseTestError("test_long_and_overflow", + "expected return value 0xFF"); + if (overflow != 0) + return raiseTestError("test_long_and_overflow", + "overflow was not cleared"); + + num = PyLong_FromString("-FF", NULL, 16); + if (num == NULL) + return NULL; + overflow = 0; + value = PyLong_AsLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -0xFF) + return raiseTestError("test_long_and_overflow", + "expected return value 0xFF"); + if (overflow != 0) + return raiseTestError("test_long_and_overflow", + "overflow was set incorrectly"); + + num = PyLong_FromLong(LONG_MAX); + if (num == NULL) + return NULL; + overflow = 1234; + value = PyLong_AsLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != LONG_MAX) + return raiseTestError("test_long_and_overflow", + "expected return value LONG_MAX"); + if (overflow != 0) + return raiseTestError("test_long_and_overflow", + "overflow was not cleared"); + + num = PyLong_FromLong(LONG_MIN); + if (num == NULL) + return NULL; + overflow = 0; + value = PyLong_AsLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != LONG_MIN) + return raiseTestError("test_long_and_overflow", + "expected return value LONG_MIN"); + if (overflow != 0) + return raiseTestError("test_long_and_overflow", + "overflow was not cleared"); + + Py_RETURN_NONE; +} + +/*[clinic input] +_testlimitedcapi.test_long_long_and_overflow + +Test the PyLong_AsLongLongAndOverflow API. + +General conversion to long long is tested by test_long_api_inner. +This test will concentrate on proper handling of overflow. +[clinic start generated code]*/ + +static PyObject * +_testlimitedcapi_test_long_long_and_overflow_impl(PyObject *module) +/*[clinic end generated code: output=3d2721a49c09a307 input=741c593b606cc6b3]*/ +{ + PyObject *num, *one, *temp; + long long value; + int overflow; + + /* Test that overflow is set properly for a large value. */ + /* num is a number larger than LLONG_MAX on a typical machine. */ + num = PyLong_FromString("FFFFFFFFFFFFFFFFFFFFFFFF", NULL, 16); + if (num == NULL) + return NULL; + overflow = 1234; + value = PyLong_AsLongLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -1) + return raiseTestError("test_long_long_and_overflow", + "return value was not set to -1"); + if (overflow != 1) + return raiseTestError("test_long_long_and_overflow", + "overflow was not set to 1"); + + /* Same again, with num = LLONG_MAX + 1 */ + num = PyLong_FromLongLong(LLONG_MAX); + if (num == NULL) + return NULL; + one = PyLong_FromLong(1L); + if (one == NULL) { + Py_DECREF(num); + return NULL; + } + temp = PyNumber_Add(num, one); + Py_DECREF(one); + Py_DECREF(num); num = temp; + if (num == NULL) + return NULL; + overflow = 0; + value = PyLong_AsLongLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -1) + return raiseTestError("test_long_long_and_overflow", + "return value was not set to -1"); + if (overflow != 1) + return raiseTestError("test_long_long_and_overflow", + "overflow was not set to 1"); + + /* Test that overflow is set properly for a large negative value. */ + /* num is a number smaller than LLONG_MIN on a typical platform */ + num = PyLong_FromString("-FFFFFFFFFFFFFFFFFFFFFFFF", NULL, 16); + if (num == NULL) + return NULL; + overflow = 1234; + value = PyLong_AsLongLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -1) + return raiseTestError("test_long_long_and_overflow", + "return value was not set to -1"); + if (overflow != -1) + return raiseTestError("test_long_long_and_overflow", + "overflow was not set to -1"); + + /* Same again, with num = LLONG_MIN - 1 */ + num = PyLong_FromLongLong(LLONG_MIN); + if (num == NULL) + return NULL; + one = PyLong_FromLong(1L); + if (one == NULL) { + Py_DECREF(num); + return NULL; + } + temp = PyNumber_Subtract(num, one); + Py_DECREF(one); + Py_DECREF(num); num = temp; + if (num == NULL) + return NULL; + overflow = 0; + value = PyLong_AsLongLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -1) + return raiseTestError("test_long_long_and_overflow", + "return value was not set to -1"); + if (overflow != -1) + return raiseTestError("test_long_long_and_overflow", + "overflow was not set to -1"); + + /* Test that overflow is cleared properly for small values. */ + num = PyLong_FromString("FF", NULL, 16); + if (num == NULL) + return NULL; + overflow = 1234; + value = PyLong_AsLongLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != 0xFF) + return raiseTestError("test_long_long_and_overflow", + "expected return value 0xFF"); + if (overflow != 0) + return raiseTestError("test_long_long_and_overflow", + "overflow was not cleared"); + + num = PyLong_FromString("-FF", NULL, 16); + if (num == NULL) + return NULL; + overflow = 0; + value = PyLong_AsLongLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != -0xFF) + return raiseTestError("test_long_long_and_overflow", + "expected return value 0xFF"); + if (overflow != 0) + return raiseTestError("test_long_long_and_overflow", + "overflow was set incorrectly"); + + num = PyLong_FromLongLong(LLONG_MAX); + if (num == NULL) + return NULL; + overflow = 1234; + value = PyLong_AsLongLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != LLONG_MAX) + return raiseTestError("test_long_long_and_overflow", + "expected return value LLONG_MAX"); + if (overflow != 0) + return raiseTestError("test_long_long_and_overflow", + "overflow was not cleared"); + + num = PyLong_FromLongLong(LLONG_MIN); + if (num == NULL) + return NULL; + overflow = 0; + value = PyLong_AsLongLongAndOverflow(num, &overflow); + Py_DECREF(num); + if (value == -1 && PyErr_Occurred()) + return NULL; + if (value != LLONG_MIN) + return raiseTestError("test_long_long_and_overflow", + "expected return value LLONG_MIN"); + if (overflow != 0) + return raiseTestError("test_long_long_and_overflow", + "overflow was not cleared"); + + Py_RETURN_NONE; +} + +/*[clinic input] +_testlimitedcapi.test_long_as_size_t + +Test the PyLong_As{Size,Ssize}_t API. + +At present this just tests that non-integer arguments are handled correctly. +It should be extended to test overflow handling. +[clinic start generated code]*/ + +static PyObject * +_testlimitedcapi_test_long_as_size_t_impl(PyObject *module) +/*[clinic end generated code: output=297a9f14a42f55af input=8923d8f2038c46f4]*/ +{ + size_t out_u; + Py_ssize_t out_s; + + Py_INCREF(Py_None); + + out_u = PyLong_AsSize_t(Py_None); + if (out_u != (size_t)-1 || !PyErr_Occurred()) + return raiseTestError("test_long_as_size_t", + "PyLong_AsSize_t(None) didn't complain"); + if (!PyErr_ExceptionMatches(PyExc_TypeError)) + return raiseTestError("test_long_as_size_t", + "PyLong_AsSize_t(None) raised " + "something other than TypeError"); + PyErr_Clear(); + + out_s = PyLong_AsSsize_t(Py_None); + if (out_s != (Py_ssize_t)-1 || !PyErr_Occurred()) + return raiseTestError("test_long_as_size_t", + "PyLong_AsSsize_t(None) didn't complain"); + if (!PyErr_ExceptionMatches(PyExc_TypeError)) + return raiseTestError("test_long_as_size_t", + "PyLong_AsSsize_t(None) raised " + "something other than TypeError"); + PyErr_Clear(); + + /* Py_INCREF(Py_None) omitted - we already have a reference to it. */ + return Py_None; +} + +/*[clinic input] +_testlimitedcapi.test_long_as_unsigned_long_long_mask +[clinic start generated code]*/ + +static PyObject * +_testlimitedcapi_test_long_as_unsigned_long_long_mask_impl(PyObject *module) +/*[clinic end generated code: output=90be09ffeec8ecab input=17c660bd58becad5]*/ +{ + unsigned long long res = PyLong_AsUnsignedLongLongMask(NULL); + + if (res != (unsigned long long)-1 || !PyErr_Occurred()) { + return raiseTestError("test_long_as_unsigned_long_long_mask", + "PyLong_AsUnsignedLongLongMask(NULL) didn't " + "complain"); + } + if (!PyErr_ExceptionMatches(PyExc_SystemError)) { + return raiseTestError("test_long_as_unsigned_long_long_mask", + "PyLong_AsUnsignedLongLongMask(NULL) raised " + "something other than SystemError"); + } + PyErr_Clear(); + Py_RETURN_NONE; +} + +/*[clinic input] +_testlimitedcapi.test_long_as_double +[clinic start generated code]*/ + +static PyObject * +_testlimitedcapi_test_long_as_double_impl(PyObject *module) +/*[clinic end generated code: output=0e688c2acf224f88 input=e7b5712385064a48]*/ +{ + double out; + + Py_INCREF(Py_None); + + out = PyLong_AsDouble(Py_None); + if (out != -1.0 || !PyErr_Occurred()) + return raiseTestError("test_long_as_double", + "PyLong_AsDouble(None) didn't complain"); + if (!PyErr_ExceptionMatches(PyExc_TypeError)) + return raiseTestError("test_long_as_double", + "PyLong_AsDouble(None) raised " + "something other than TypeError"); + PyErr_Clear(); + + /* Py_INCREF(Py_None) omitted - we already have a reference to it. */ + return Py_None; +} + +static PyObject * +pylong_check(PyObject *module, PyObject *obj) +{ + NULLABLE(obj); + return PyLong_FromLong(PyLong_Check(obj)); +} + +static PyObject * +pylong_checkexact(PyObject *module, PyObject *obj) +{ + NULLABLE(obj); + return PyLong_FromLong(PyLong_CheckExact(obj)); +} + +static PyObject * +pylong_fromdouble(PyObject *module, PyObject *arg) +{ + double value; + if (!PyArg_Parse(arg, "d", &value)) { + return NULL; + } + return PyLong_FromDouble(value); +} + +static PyObject * +pylong_fromstring(PyObject *module, PyObject *args) +{ + const char *str; + Py_ssize_t len; + int base; + char *end = UNINITIALIZED_PTR; + if (!PyArg_ParseTuple(args, "z#i", &str, &len, &base)) { + return NULL; + } + + PyObject *result = PyLong_FromString(str, &end, base); + if (result == NULL) { + // XXX 'end' is not always set. + return NULL; + } + return Py_BuildValue("Nn", result, (Py_ssize_t)(end - str)); +} + +static PyObject * +pylong_fromvoidptr(PyObject *module, PyObject *arg) +{ + NULLABLE(arg); + return PyLong_FromVoidPtr((void *)arg); +} + +/*[clinic input] +_testlimitedcapi.PyLong_AsInt + arg: object + / +[clinic start generated code]*/ + +static PyObject * +_testlimitedcapi_PyLong_AsInt(PyObject *module, PyObject *arg) +/*[clinic end generated code: output=d91db4c1287f85fa input=32c66be86f3265a1]*/ +{ + NULLABLE(arg); + assert(!PyErr_Occurred()); + int value = PyLong_AsInt(arg); + if (value == -1 && PyErr_Occurred()) { + return NULL; + } + return PyLong_FromLong(value); +} + +static PyObject * +pylong_aslong(PyObject *module, PyObject *arg) +{ + NULLABLE(arg); + long value = PyLong_AsLong(arg); + if (value == -1 && PyErr_Occurred()) { + return NULL; + } + return PyLong_FromLong(value); +} + +static PyObject * +pylong_aslongandoverflow(PyObject *module, PyObject *arg) +{ + NULLABLE(arg); + int overflow = UNINITIALIZED_INT; + long value = PyLong_AsLongAndOverflow(arg, &overflow); + if (value == -1 && PyErr_Occurred()) { + assert(overflow == -1); + return NULL; + } + return Py_BuildValue("li", value, overflow); +} + +static PyObject * +pylong_asunsignedlong(PyObject *module, PyObject *arg) +{ + NULLABLE(arg); + unsigned long value = PyLong_AsUnsignedLong(arg); + if (value == (unsigned long)-1 && PyErr_Occurred()) { + return NULL; + } + return PyLong_FromUnsignedLong(value); +} + +static PyObject * +pylong_asunsignedlongmask(PyObject *module, PyObject *arg) +{ + NULLABLE(arg); + unsigned long value = PyLong_AsUnsignedLongMask(arg); + if (value == (unsigned long)-1 && PyErr_Occurred()) { + return NULL; + } + return PyLong_FromUnsignedLong(value); +} + +static PyObject * +pylong_aslonglong(PyObject *module, PyObject *arg) +{ + NULLABLE(arg); + long long value = PyLong_AsLongLong(arg); + if (value == -1 && PyErr_Occurred()) { + return NULL; + } + return PyLong_FromLongLong(value); +} + +static PyObject * +pylong_aslonglongandoverflow(PyObject *module, PyObject *arg) +{ + NULLABLE(arg); + int overflow = UNINITIALIZED_INT; + long long value = PyLong_AsLongLongAndOverflow(arg, &overflow); + if (value == -1 && PyErr_Occurred()) { + assert(overflow == -1); + return NULL; + } + return Py_BuildValue("Li", value, overflow); +} + +static PyObject * +pylong_asunsignedlonglong(PyObject *module, PyObject *arg) +{ + NULLABLE(arg); + unsigned long long value = PyLong_AsUnsignedLongLong(arg); + if (value == (unsigned long long)-1 && PyErr_Occurred()) { + return NULL; + } + return PyLong_FromUnsignedLongLong(value); +} + +static PyObject * +pylong_asunsignedlonglongmask(PyObject *module, PyObject *arg) +{ + NULLABLE(arg); + unsigned long long value = PyLong_AsUnsignedLongLongMask(arg); + if (value == (unsigned long long)-1 && PyErr_Occurred()) { + return NULL; + } + return PyLong_FromUnsignedLongLong(value); +} + +static PyObject * +pylong_as_ssize_t(PyObject *module, PyObject *arg) +{ + NULLABLE(arg); + Py_ssize_t value = PyLong_AsSsize_t(arg); + if (value == -1 && PyErr_Occurred()) { + return NULL; + } + return PyLong_FromSsize_t(value); +} + +static PyObject * +pylong_as_size_t(PyObject *module, PyObject *arg) +{ + NULLABLE(arg); + size_t value = PyLong_AsSize_t(arg); + if (value == (size_t)-1 && PyErr_Occurred()) { + return NULL; + } + return PyLong_FromSize_t(value); +} + +static PyObject * +pylong_asdouble(PyObject *module, PyObject *arg) +{ + NULLABLE(arg); + double value = PyLong_AsDouble(arg); + if (value == -1.0 && PyErr_Occurred()) { + return NULL; + } + return PyFloat_FromDouble(value); +} + +static PyObject * +pylong_asvoidptr(PyObject *module, PyObject *arg) +{ + NULLABLE(arg); + void *value = PyLong_AsVoidPtr(arg); + if (value == NULL) { + if (PyErr_Occurred()) { + return NULL; + } + Py_RETURN_NONE; + } + return Py_NewRef((PyObject *)value); +} + +static PyObject * +pylong_aspid(PyObject *module, PyObject *arg) +{ + NULLABLE(arg); + pid_t value = PyLong_AsPid(arg); + if (value == -1 && PyErr_Occurred()) { + return NULL; + } + return PyLong_FromPid(value); +} + + +static PyMethodDef test_methods[] = { + _TESTLIMITEDCAPI_TEST_LONG_AND_OVERFLOW_METHODDEF + _TESTLIMITEDCAPI_TEST_LONG_API_METHODDEF + _TESTLIMITEDCAPI_TEST_LONG_AS_DOUBLE_METHODDEF + _TESTLIMITEDCAPI_TEST_LONG_AS_SIZE_T_METHODDEF + _TESTLIMITEDCAPI_TEST_LONG_AS_UNSIGNED_LONG_LONG_MASK_METHODDEF + _TESTLIMITEDCAPI_TEST_LONG_LONG_AND_OVERFLOW_METHODDEF + _TESTLIMITEDCAPI_TEST_LONGLONG_API_METHODDEF + {"pylong_check", pylong_check, METH_O}, + {"pylong_checkexact", pylong_checkexact, METH_O}, + {"pylong_fromdouble", pylong_fromdouble, METH_O}, + {"pylong_fromstring", pylong_fromstring, METH_VARARGS}, + {"pylong_fromvoidptr", pylong_fromvoidptr, METH_O}, + _TESTLIMITEDCAPI_PYLONG_ASINT_METHODDEF + {"pylong_aslong", pylong_aslong, METH_O}, + {"pylong_aslongandoverflow", pylong_aslongandoverflow, METH_O}, + {"pylong_asunsignedlong", pylong_asunsignedlong, METH_O}, + {"pylong_asunsignedlongmask", pylong_asunsignedlongmask, METH_O}, + {"pylong_aslonglong", pylong_aslonglong, METH_O}, + {"pylong_aslonglongandoverflow", pylong_aslonglongandoverflow, METH_O}, + {"pylong_asunsignedlonglong", pylong_asunsignedlonglong, METH_O}, + {"pylong_asunsignedlonglongmask", pylong_asunsignedlonglongmask, METH_O}, + {"pylong_as_ssize_t", pylong_as_ssize_t, METH_O}, + {"pylong_as_size_t", pylong_as_size_t, METH_O}, + {"pylong_asdouble", pylong_asdouble, METH_O}, + {"pylong_asvoidptr", pylong_asvoidptr, METH_O}, + {"pylong_aspid", pylong_aspid, METH_O}, + {NULL}, +}; + +int +_PyTestLimitedCAPI_Init_Long(PyObject *mod) +{ + if (PyModule_AddFunctions(mod, test_methods) < 0) { + return -1; + } + return 0; +} diff --git a/Modules/_testlimitedcapi/object.c b/Modules/_testlimitedcapi/object.c new file mode 100644 index 00000000000000..6e438c811d6e98 --- /dev/null +++ b/Modules/_testlimitedcapi/object.c @@ -0,0 +1,80 @@ +// Need limited C API version 3.13 for Py_GetConstant() +#include "pyconfig.h" // Py_GIL_DISABLED +#if !defined(Py_GIL_DISABLED) && !defined(Py_LIMITED_API ) +# define Py_LIMITED_API 0x030d0000 +#endif + +#include "parts.h" +#include "util.h" + + +/* Test Py_GetConstant() */ +static PyObject * +get_constant(PyObject *Py_UNUSED(module), PyObject *args) +{ + int constant_id; + if (!PyArg_ParseTuple(args, "i", &constant_id)) { + return NULL; + } + + PyObject *obj = Py_GetConstant(constant_id); + if (obj == NULL) { + assert(PyErr_Occurred()); + return NULL; + } + return obj; +} + + +/* Test Py_GetConstantBorrowed() */ +static PyObject * +get_constant_borrowed(PyObject *Py_UNUSED(module), PyObject *args) +{ + int constant_id; + if (!PyArg_ParseTuple(args, "i", &constant_id)) { + return NULL; + } + + PyObject *obj = Py_GetConstantBorrowed(constant_id); + if (obj == NULL) { + assert(PyErr_Occurred()); + return NULL; + } + return Py_NewRef(obj); +} + + +/* Test constants */ +static PyObject * +test_constants(PyObject *Py_UNUSED(module), PyObject *Py_UNUSED(args)) +{ + // Test that implementation of constants in the limited C API: + // check that the C code compiles. + // + // Test also that constants and Py_GetConstant() return the same + // objects. + assert(Py_None == Py_GetConstant(Py_CONSTANT_NONE)); + assert(Py_False == Py_GetConstant(Py_CONSTANT_FALSE)); + assert(Py_True == Py_GetConstant(Py_CONSTANT_TRUE)); + assert(Py_Ellipsis == Py_GetConstant(Py_CONSTANT_ELLIPSIS)); + assert(Py_NotImplemented == Py_GetConstant(Py_CONSTANT_NOT_IMPLEMENTED)); + // Other constants are tested in test_capi.test_object + Py_RETURN_NONE; +} + +static PyMethodDef test_methods[] = { + {"get_constant", get_constant, METH_VARARGS}, + {"get_constant_borrowed", get_constant_borrowed, METH_VARARGS}, + {"test_constants", test_constants, METH_NOARGS}, + {NULL}, +}; + +int +_PyTestLimitedCAPI_Init_Object(PyObject *m) +{ + if (PyModule_AddFunctions(m, test_methods) < 0) { + return -1; + } + + return 0; +} diff --git a/Modules/_testlimitedcapi/parts.h b/Modules/_testlimitedcapi/parts.h index 9bc52413382eb5..d91f174cd31eed 100644 --- a/Modules/_testlimitedcapi/parts.h +++ b/Modules/_testlimitedcapi/parts.h @@ -22,11 +22,20 @@ # error "Py_BUILD_CORE macro must not be defined" #endif -int _PyTestCapi_Init_ByteArray(PyObject *module); -int _PyTestCapi_Init_Bytes(PyObject *module); -int _PyTestCapi_Init_HeaptypeRelative(PyObject *module); -int _PyTestCapi_Init_PyOS(PyObject *module); -int _PyTestCapi_Init_Sys(PyObject *module); -int _PyTestCapi_Init_VectorcallLimited(PyObject *module); +int _PyTestLimitedCAPI_Init_Abstract(PyObject *module); +int _PyTestLimitedCAPI_Init_ByteArray(PyObject *module); +int _PyTestLimitedCAPI_Init_Bytes(PyObject *module); +int _PyTestLimitedCAPI_Init_Complex(PyObject *module); +int _PyTestLimitedCAPI_Init_Dict(PyObject *module); +int _PyTestLimitedCAPI_Init_Float(PyObject *module); +int _PyTestLimitedCAPI_Init_HeaptypeRelative(PyObject *module); +int _PyTestLimitedCAPI_Init_Object(PyObject *module); +int _PyTestLimitedCAPI_Init_List(PyObject *module); +int _PyTestLimitedCAPI_Init_Long(PyObject *module); +int _PyTestLimitedCAPI_Init_PyOS(PyObject *module); +int _PyTestLimitedCAPI_Init_Set(PyObject *module); +int _PyTestLimitedCAPI_Init_Sys(PyObject *module); +int _PyTestLimitedCAPI_Init_Unicode(PyObject *module); +int _PyTestLimitedCAPI_Init_VectorcallLimited(PyObject *module); #endif // Py_TESTLIMITEDCAPI_PARTS_H diff --git a/Modules/_testlimitedcapi/pyos.c b/Modules/_testlimitedcapi/pyos.c index 63140e914875db..0f61801eae322a 100644 --- a/Modules/_testlimitedcapi/pyos.c +++ b/Modules/_testlimitedcapi/pyos.c @@ -50,7 +50,7 @@ static PyMethodDef test_methods[] = { }; int -_PyTestCapi_Init_PyOS(PyObject *mod) +_PyTestLimitedCAPI_Init_PyOS(PyObject *mod) { if (PyModule_AddFunctions(mod, test_methods) < 0) { return -1; diff --git a/Modules/_testlimitedcapi/set.c b/Modules/_testlimitedcapi/set.c new file mode 100644 index 00000000000000..35da5fa5f008e1 --- /dev/null +++ b/Modules/_testlimitedcapi/set.c @@ -0,0 +1,189 @@ +#include "parts.h" +#include "util.h" + +static PyObject * +set_check(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + RETURN_INT(PySet_Check(obj)); +} + +static PyObject * +set_checkexact(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + RETURN_INT(PySet_CheckExact(obj)); +} + +static PyObject * +frozenset_check(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + RETURN_INT(PyFrozenSet_Check(obj)); +} + +static PyObject * +frozenset_checkexact(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + RETURN_INT(PyFrozenSet_CheckExact(obj)); +} + +static PyObject * +anyset_check(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + RETURN_INT(PyAnySet_Check(obj)); +} + +static PyObject * +anyset_checkexact(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + RETURN_INT(PyAnySet_CheckExact(obj)); +} + +static PyObject * +set_new(PyObject *self, PyObject *args) +{ + PyObject *iterable = NULL; + if (!PyArg_ParseTuple(args, "|O", &iterable)) { + return NULL; + } + return PySet_New(iterable); +} + +static PyObject * +frozenset_new(PyObject *self, PyObject *args) +{ + PyObject *iterable = NULL; + if (!PyArg_ParseTuple(args, "|O", &iterable)) { + return NULL; + } + return PyFrozenSet_New(iterable); +} + +static PyObject * +set_size(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + RETURN_SIZE(PySet_Size(obj)); +} + +static PyObject * +set_contains(PyObject *self, PyObject *args) +{ + PyObject *obj, *item; + if (!PyArg_ParseTuple(args, "OO", &obj, &item)) { + return NULL; + } + NULLABLE(obj); + NULLABLE(item); + RETURN_INT(PySet_Contains(obj, item)); +} + +static PyObject * +set_add(PyObject *self, PyObject *args) +{ + PyObject *obj, *item; + if (!PyArg_ParseTuple(args, "OO", &obj, &item)) { + return NULL; + } + NULLABLE(obj); + NULLABLE(item); + RETURN_INT(PySet_Add(obj, item)); +} + +static PyObject * +set_discard(PyObject *self, PyObject *args) +{ + PyObject *obj, *item; + if (!PyArg_ParseTuple(args, "OO", &obj, &item)) { + return NULL; + } + NULLABLE(obj); + NULLABLE(item); + RETURN_INT(PySet_Discard(obj, item)); +} + +static PyObject * +set_pop(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + return PySet_Pop(obj); +} + +static PyObject * +set_clear(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + RETURN_INT(PySet_Clear(obj)); +} + +static PyObject * +test_frozenset_add_in_capi(PyObject *self, PyObject *Py_UNUSED(obj)) +{ + // Test that `frozenset` can be used with `PySet_Add`, + // when frozenset is just created in CAPI. + PyObject *fs = PyFrozenSet_New(NULL); + if (fs == NULL) { + return NULL; + } + PyObject *num = PyLong_FromLong(1); + if (num == NULL) { + goto error; + } + if (PySet_Add(fs, num) < 0) { + goto error; + } + int contains = PySet_Contains(fs, num); + if (contains < 0) { + goto error; + } + else if (contains == 0) { + goto unexpected; + } + Py_DECREF(fs); + Py_DECREF(num); + Py_RETURN_NONE; + +unexpected: + PyErr_SetString(PyExc_ValueError, "set does not contain expected value"); +error: + Py_DECREF(fs); + Py_XDECREF(num); + return NULL; +} + +static PyMethodDef test_methods[] = { + {"set_check", set_check, METH_O}, + {"set_checkexact", set_checkexact, METH_O}, + {"frozenset_check", frozenset_check, METH_O}, + {"frozenset_checkexact", frozenset_checkexact, METH_O}, + {"anyset_check", anyset_check, METH_O}, + {"anyset_checkexact", anyset_checkexact, METH_O}, + + {"set_new", set_new, METH_VARARGS}, + {"frozenset_new", frozenset_new, METH_VARARGS}, + + {"set_size", set_size, METH_O}, + {"set_contains", set_contains, METH_VARARGS}, + {"set_add", set_add, METH_VARARGS}, + {"set_discard", set_discard, METH_VARARGS}, + {"set_pop", set_pop, METH_O}, + {"set_clear", set_clear, METH_O}, + + {"test_frozenset_add_in_capi", test_frozenset_add_in_capi, METH_NOARGS}, + + {NULL}, +}; + +int +_PyTestLimitedCAPI_Init_Set(PyObject *m) +{ + if (PyModule_AddFunctions(m, test_methods) < 0) { + return -1; + } + + return 0; +} diff --git a/Modules/_testlimitedcapi/sys.c b/Modules/_testlimitedcapi/sys.c index aa40e3cd5b9b29..7d8b7a8569e515 100644 --- a/Modules/_testlimitedcapi/sys.c +++ b/Modules/_testlimitedcapi/sys.c @@ -46,7 +46,7 @@ static PyMethodDef test_methods[] = { }; int -_PyTestCapi_Init_Sys(PyObject *m) +_PyTestLimitedCAPI_Init_Sys(PyObject *m) { if (PyModule_AddFunctions(m, test_methods) < 0) { return -1; diff --git a/Modules/_testcapi/testcapi_long.h b/Modules/_testlimitedcapi/testcapi_long.h similarity index 100% rename from Modules/_testcapi/testcapi_long.h rename to Modules/_testlimitedcapi/testcapi_long.h diff --git a/Modules/_testlimitedcapi/unicode.c b/Modules/_testlimitedcapi/unicode.c new file mode 100644 index 00000000000000..2b70d09108a333 --- /dev/null +++ b/Modules/_testlimitedcapi/unicode.c @@ -0,0 +1,1938 @@ +#include "pyconfig.h" // Py_GIL_DISABLED +#ifndef Py_GIL_DISABLED + // Need limited C API 3.13 to test PyUnicode_EqualToUTF8() +# define Py_LIMITED_API 0x030d0000 +#endif + +#include "parts.h" +#include "util.h" + +#include // ptrdiff_t +#include // memset() + + +static PyObject * +codec_incrementalencoder(PyObject *self, PyObject *args) +{ + const char *encoding, *errors = NULL; + if (!PyArg_ParseTuple(args, "s|s:test_incrementalencoder", + &encoding, &errors)) + return NULL; + return PyCodec_IncrementalEncoder(encoding, errors); +} + +static PyObject * +codec_incrementaldecoder(PyObject *self, PyObject *args) +{ + const char *encoding, *errors = NULL; + if (!PyArg_ParseTuple(args, "s|s:test_incrementaldecoder", + &encoding, &errors)) + return NULL; + return PyCodec_IncrementalDecoder(encoding, errors); +} + +static PyObject * +test_unicode_compare_with_ascii(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + PyObject *py_s = PyUnicode_FromStringAndSize("str\0", 4); + int result; + if (py_s == NULL) + return NULL; + result = PyUnicode_CompareWithASCIIString(py_s, "str"); + Py_DECREF(py_s); + if (!result) { + PyErr_SetString(PyExc_AssertionError, "Python string ending in NULL " + "should not compare equal to c string."); + return NULL; + } + Py_RETURN_NONE; +} + +static PyObject * +test_widechar(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ +#if defined(SIZEOF_WCHAR_T) && (SIZEOF_WCHAR_T == 4) + const wchar_t wtext[2] = {(wchar_t)0x10ABCDu}; + size_t wtextlen = 1; + const wchar_t invalid[1] = {(wchar_t)0x110000u}; +#else + const wchar_t wtext[3] = {(wchar_t)0xDBEAu, (wchar_t)0xDFCDu}; + size_t wtextlen = 2; +#endif + PyObject *wide, *utf8; + + wide = PyUnicode_FromWideChar(wtext, wtextlen); + if (wide == NULL) + return NULL; + + utf8 = PyUnicode_FromString("\xf4\x8a\xaf\x8d"); + if (utf8 == NULL) { + Py_DECREF(wide); + return NULL; + } + + if (PyUnicode_GetLength(wide) != PyUnicode_GetLength(utf8)) { + Py_DECREF(wide); + Py_DECREF(utf8); + PyErr_SetString(PyExc_AssertionError, + "test_widechar: " + "wide string and utf8 string " + "have different length"); + return NULL; + } + if (PyUnicode_Compare(wide, utf8)) { + Py_DECREF(wide); + Py_DECREF(utf8); + if (PyErr_Occurred()) + return NULL; + PyErr_SetString(PyExc_AssertionError, + "test_widechar: " + "wide string and utf8 string " + "are different"); + return NULL; + } + + Py_DECREF(wide); + Py_DECREF(utf8); + +#if defined(SIZEOF_WCHAR_T) && (SIZEOF_WCHAR_T == 4) + wide = PyUnicode_FromWideChar(invalid, 1); + if (wide == NULL) + PyErr_Clear(); + else { + PyErr_SetString(PyExc_AssertionError, + "test_widechar: " + "PyUnicode_FromWideChar(L\"\\U00110000\", 1) didn't fail"); + return NULL; + } +#endif + Py_RETURN_NONE; +} + + +static PyObject * +unicode_copy(PyObject *unicode) +{ + if (!unicode) { + return NULL; + } + if (!PyUnicode_Check(unicode)) { + Py_INCREF(unicode); + return unicode; + } + + // Create a new string by encoding to UTF-8 and then decode from UTF-8 + PyObject *utf8 = PyUnicode_AsUTF8String(unicode); + if (!utf8) { + return NULL; + } + + PyObject *copy = PyUnicode_DecodeUTF8( + PyBytes_AsString(utf8), + PyBytes_Size(utf8), + NULL); + Py_DECREF(utf8); + + return copy; +} + +/* Test PyUnicode_WriteChar() */ +static PyObject * +unicode_writechar(PyObject *self, PyObject *args) +{ + PyObject *to, *to_copy; + Py_ssize_t index; + unsigned int character; + int result; + + if (!PyArg_ParseTuple(args, "OnI", &to, &index, &character)) { + return NULL; + } + + NULLABLE(to); + if (!(to_copy = unicode_copy(to)) && to) { + return NULL; + } + + result = PyUnicode_WriteChar(to_copy, index, (Py_UCS4)character); + if (result == -1 && PyErr_Occurred()) { + Py_DECREF(to_copy); + return NULL; + } + return Py_BuildValue("(Ni)", to_copy, result); +} + +static void +unicode_fill(PyObject *str, Py_ssize_t start, Py_ssize_t end, Py_UCS4 ch) +{ + assert(0 <= start); + assert(end <= PyUnicode_GetLength(str)); + for (Py_ssize_t i = start; i < end; i++) { + int res = PyUnicode_WriteChar(str, i, ch); + assert(res == 0); + } +} + + +/* Test PyUnicode_Resize() */ +static PyObject * +unicode_resize(PyObject *self, PyObject *args) +{ + PyObject *obj, *copy; + Py_ssize_t length; + int result; + + if (!PyArg_ParseTuple(args, "On", &obj, &length)) { + return NULL; + } + + NULLABLE(obj); + if (!(copy = unicode_copy(obj)) && obj) { + return NULL; + } + result = PyUnicode_Resize(©, length); + if (result == -1 && PyErr_Occurred()) { + Py_XDECREF(copy); + return NULL; + } + if (obj && PyUnicode_Check(obj) && length > PyUnicode_GetLength(obj)) { + unicode_fill(copy, PyUnicode_GetLength(obj), length, 0U); + } + return Py_BuildValue("(Ni)", copy, result); +} + +/* Test PyUnicode_Append() */ +static PyObject * +unicode_append(PyObject *self, PyObject *args) +{ + PyObject *left, *right, *left_copy; + + if (!PyArg_ParseTuple(args, "OO", &left, &right)) + return NULL; + + NULLABLE(left); + NULLABLE(right); + if (!(left_copy = unicode_copy(left)) && left) { + return NULL; + } + PyUnicode_Append(&left_copy, right); + return left_copy; +} + +/* Test PyUnicode_AppendAndDel() */ +static PyObject * +unicode_appendanddel(PyObject *self, PyObject *args) +{ + PyObject *left, *right, *left_copy; + + if (!PyArg_ParseTuple(args, "OO", &left, &right)) + return NULL; + + NULLABLE(left); + NULLABLE(right); + if (!(left_copy = unicode_copy(left)) && left) { + return NULL; + } + Py_XINCREF(right); + PyUnicode_AppendAndDel(&left_copy, right); + return left_copy; +} + +/* Test PyUnicode_FromStringAndSize() */ +static PyObject * +unicode_fromstringandsize(PyObject *self, PyObject *args) +{ + const char *s; + Py_ssize_t bsize; + Py_ssize_t size = -100; + + if (!PyArg_ParseTuple(args, "z#|n", &s, &bsize, &size)) { + return NULL; + } + + if (size == -100) { + size = bsize; + } + return PyUnicode_FromStringAndSize(s, size); +} + +/* Test PyUnicode_FromString() */ +static PyObject * +unicode_fromstring(PyObject *self, PyObject *arg) +{ + const char *s; + Py_ssize_t size; + + if (!PyArg_Parse(arg, "z#", &s, &size)) { + return NULL; + } + return PyUnicode_FromString(s); +} + +/* Test PyUnicode_Substring() */ +static PyObject * +unicode_substring(PyObject *self, PyObject *args) +{ + PyObject *str; + Py_ssize_t start, end; + + if (!PyArg_ParseTuple(args, "Onn", &str, &start, &end)) { + return NULL; + } + + NULLABLE(str); + return PyUnicode_Substring(str, start, end); +} + +/* Test PyUnicode_GetLength() */ +static PyObject * +unicode_getlength(PyObject *self, PyObject *arg) +{ + NULLABLE(arg); + RETURN_SIZE(PyUnicode_GetLength(arg)); +} + +/* Test PyUnicode_ReadChar() */ +static PyObject * +unicode_readchar(PyObject *self, PyObject *args) +{ + PyObject *unicode; + Py_ssize_t index; + Py_UCS4 result; + + if (!PyArg_ParseTuple(args, "On", &unicode, &index)) { + return NULL; + } + + NULLABLE(unicode); + result = PyUnicode_ReadChar(unicode, index); + if (result == (Py_UCS4)-1) + return NULL; + return PyLong_FromUnsignedLong(result); +} + +/* Test PyUnicode_FromEncodedObject() */ +static PyObject * +unicode_fromencodedobject(PyObject *self, PyObject *args) +{ + PyObject *obj; + const char *encoding; + const char *errors = NULL; + + if (!PyArg_ParseTuple(args, "Oz|z", &obj, &encoding, &errors)) { + return NULL; + } + + NULLABLE(obj); + return PyUnicode_FromEncodedObject(obj, encoding, errors); +} + +/* Test PyUnicode_FromObject() */ +static PyObject * +unicode_fromobject(PyObject *self, PyObject *arg) +{ + NULLABLE(arg); + return PyUnicode_FromObject(arg); +} + +/* Test PyUnicode_InternInPlace() */ +static PyObject * +unicode_interninplace(PyObject *self, PyObject *arg) +{ + NULLABLE(arg); + Py_XINCREF(arg); + PyUnicode_InternInPlace(&arg); + return arg; +} + +/* Test PyUnicode_InternFromString() */ +static PyObject * +unicode_internfromstring(PyObject *self, PyObject *arg) +{ + const char *s; + Py_ssize_t size; + + if (!PyArg_Parse(arg, "z#", &s, &size)) { + return NULL; + } + return PyUnicode_InternFromString(s); +} + +/* Test PyUnicode_FromWideChar() */ +static PyObject * +unicode_fromwidechar(PyObject *self, PyObject *args) +{ + const char *s; + Py_ssize_t bsize; + Py_ssize_t size = -100; + + if (!PyArg_ParseTuple(args, "z#|n", &s, &bsize, &size)) { + return NULL; + } + if (size == -100) { + if (bsize % SIZEOF_WCHAR_T) { + PyErr_SetString(PyExc_AssertionError, + "invalid size in unicode_fromwidechar()"); + return NULL; + } + size = bsize / SIZEOF_WCHAR_T; + } + return PyUnicode_FromWideChar((const wchar_t *)s, size); +} + +/* Test PyUnicode_AsWideChar() */ +static PyObject * +unicode_aswidechar(PyObject *self, PyObject *args) +{ + PyObject *unicode, *result; + Py_ssize_t buflen, size; + wchar_t *buffer; + + if (!PyArg_ParseTuple(args, "On", &unicode, &buflen)) + return NULL; + NULLABLE(unicode); + buffer = PyMem_New(wchar_t, buflen); + if (buffer == NULL) + return PyErr_NoMemory(); + + size = PyUnicode_AsWideChar(unicode, buffer, buflen); + if (size == -1) { + PyMem_Free(buffer); + return NULL; + } + + if (size < buflen) + buflen = size + 1; + else + buflen = size; + result = PyUnicode_FromWideChar(buffer, buflen); + PyMem_Free(buffer); + if (result == NULL) + return NULL; + + return Py_BuildValue("(Nn)", result, size); +} + +/* Test PyUnicode_AsWideCharString() with NULL as buffer */ +static PyObject * +unicode_aswidechar_null(PyObject *self, PyObject *args) +{ + PyObject *unicode; + Py_ssize_t buflen; + + if (!PyArg_ParseTuple(args, "On", &unicode, &buflen)) + return NULL; + NULLABLE(unicode); + RETURN_SIZE(PyUnicode_AsWideChar(unicode, NULL, buflen)); +} + +/* Test PyUnicode_AsWideCharString() */ +static PyObject * +unicode_aswidecharstring(PyObject *self, PyObject *args) +{ + PyObject *unicode, *result; + Py_ssize_t size = UNINITIALIZED_SIZE; + wchar_t *buffer; + + if (!PyArg_ParseTuple(args, "O", &unicode)) + return NULL; + + NULLABLE(unicode); + buffer = PyUnicode_AsWideCharString(unicode, &size); + if (buffer == NULL) { + assert(size == UNINITIALIZED_SIZE); + return NULL; + } + + result = PyUnicode_FromWideChar(buffer, size + 1); + PyMem_Free(buffer); + if (result == NULL) + return NULL; + return Py_BuildValue("(Nn)", result, size); +} + +/* Test PyUnicode_AsWideCharString() with NULL as the size address */ +static PyObject * +unicode_aswidecharstring_null(PyObject *self, PyObject *args) +{ + PyObject *unicode, *result; + wchar_t *buffer; + + if (!PyArg_ParseTuple(args, "O", &unicode)) + return NULL; + + NULLABLE(unicode); + buffer = PyUnicode_AsWideCharString(unicode, NULL); + if (buffer == NULL) + return NULL; + + result = PyUnicode_FromWideChar(buffer, -1); + PyMem_Free(buffer); + if (result == NULL) + return NULL; + return result; +} + + +/* Test PyUnicode_FromOrdinal() */ +static PyObject * +unicode_fromordinal(PyObject *self, PyObject *args) +{ + int ordinal; + + if (!PyArg_ParseTuple(args, "i", &ordinal)) + return NULL; + + return PyUnicode_FromOrdinal(ordinal); +} + +/* Test PyUnicode_AsUTF8AndSize() */ +static PyObject * +unicode_asutf8andsize(PyObject *self, PyObject *args) +{ + PyObject *unicode; + Py_ssize_t buflen; + const char *s; + Py_ssize_t size = UNINITIALIZED_SIZE; + + if (!PyArg_ParseTuple(args, "On", &unicode, &buflen)) + return NULL; + + NULLABLE(unicode); + s = PyUnicode_AsUTF8AndSize(unicode, &size); + if (s == NULL) { + assert(size == -1); + return NULL; + } + + return Py_BuildValue("(y#n)", s, buflen, size); +} + +/* Test PyUnicode_AsUTF8AndSize() with NULL as the size address */ +static PyObject * +unicode_asutf8andsize_null(PyObject *self, PyObject *args) +{ + PyObject *unicode; + Py_ssize_t buflen; + const char *s; + + if (!PyArg_ParseTuple(args, "On", &unicode, &buflen)) + return NULL; + + NULLABLE(unicode); + s = PyUnicode_AsUTF8AndSize(unicode, NULL); + if (s == NULL) + return NULL; + + return PyBytes_FromStringAndSize(s, buflen); +} + +/* Test PyUnicode_GetDefaultEncoding() */ +static PyObject * +unicode_getdefaultencoding(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + const char *s = PyUnicode_GetDefaultEncoding(); + if (s == NULL) + return NULL; + + return PyBytes_FromString(s); +} + +/* Test PyUnicode_Decode() */ +static PyObject * +unicode_decode(PyObject *self, PyObject *args) +{ + const char *s; + Py_ssize_t size; + const char *encoding; + const char *errors = NULL; + + if (!PyArg_ParseTuple(args, "y#z|z", &s, &size, &encoding, &errors)) + return NULL; + + return PyUnicode_Decode(s, size, encoding, errors); +} + +/* Test PyUnicode_AsEncodedString() */ +static PyObject * +unicode_asencodedstring(PyObject *self, PyObject *args) +{ + PyObject *unicode; + const char *encoding; + const char *errors = NULL; + + if (!PyArg_ParseTuple(args, "Oz|z", &unicode, &encoding, &errors)) + return NULL; + + NULLABLE(unicode); + return PyUnicode_AsEncodedString(unicode, encoding, errors); +} + +/* Test PyUnicode_BuildEncodingMap() */ +static PyObject * +unicode_buildencodingmap(PyObject *self, PyObject *arg) +{ + NULLABLE(arg); + return PyUnicode_BuildEncodingMap(arg); +} + +/* Test PyUnicode_DecodeUTF7() */ +static PyObject * +unicode_decodeutf7(PyObject *self, PyObject *args) +{ + const char *data; + Py_ssize_t size; + const char *errors = NULL; + + if (!PyArg_ParseTuple(args, "y#|z", &data, &size, &errors)) + return NULL; + + return PyUnicode_DecodeUTF7(data, size, errors); +} + +/* Test PyUnicode_DecodeUTF7Stateful() */ +static PyObject * +unicode_decodeutf7stateful(PyObject *self, PyObject *args) +{ + const char *data; + Py_ssize_t size; + const char *errors = NULL; + Py_ssize_t consumed = UNINITIALIZED_SIZE; + PyObject *result; + + if (!PyArg_ParseTuple(args, "y#|z", &data, &size, &errors)) + return NULL; + + result = PyUnicode_DecodeUTF7Stateful(data, size, errors, &consumed); + if (!result) { + assert(consumed == UNINITIALIZED_SIZE); + return NULL; + } + return Py_BuildValue("(Nn)", result, consumed); +} + +/* Test PyUnicode_DecodeUTF8() */ +static PyObject * +unicode_decodeutf8(PyObject *self, PyObject *args) +{ + const char *data; + Py_ssize_t size; + const char *errors = NULL; + + if (!PyArg_ParseTuple(args, "y#|z", &data, &size, &errors)) + return NULL; + + return PyUnicode_DecodeUTF8(data, size, errors); +} + +/* Test PyUnicode_DecodeUTF8Stateful() */ +static PyObject * +unicode_decodeutf8stateful(PyObject *self, PyObject *args) +{ + const char *data; + Py_ssize_t size; + const char *errors = NULL; + Py_ssize_t consumed = UNINITIALIZED_SIZE; + PyObject *result; + + if (!PyArg_ParseTuple(args, "y#|z", &data, &size, &errors)) + return NULL; + + result = PyUnicode_DecodeUTF8Stateful(data, size, errors, &consumed); + if (!result) { + assert(consumed == UNINITIALIZED_SIZE); + return NULL; + } + return Py_BuildValue("(Nn)", result, consumed); +} + +/* Test PyUnicode_AsUTF8String() */ +static PyObject * +unicode_asutf8string(PyObject *self, PyObject *arg) +{ + NULLABLE(arg); + return PyUnicode_AsUTF8String(arg); +} + +/* Test PyUnicode_DecodeUTF32() */ +static PyObject * +unicode_decodeutf32(PyObject *self, PyObject *args) +{ + const char *data; + Py_ssize_t size; + const char *errors = NULL; + int byteorder = UNINITIALIZED_INT; + PyObject *result; + + if (!PyArg_ParseTuple(args, "iy#|z", &byteorder, &data, &size, &errors)) + return NULL; + + result = PyUnicode_DecodeUTF32(data, size, errors, &byteorder); + if (!result) { + return NULL; + } + return Py_BuildValue("(iN)", byteorder, result); +} + +/* Test PyUnicode_DecodeUTF32Stateful() */ +static PyObject * +unicode_decodeutf32stateful(PyObject *self, PyObject *args) +{ + const char *data; + Py_ssize_t size; + const char *errors = NULL; + int byteorder = UNINITIALIZED_INT; + Py_ssize_t consumed = UNINITIALIZED_SIZE; + PyObject *result; + + if (!PyArg_ParseTuple(args, "iy#|z", &byteorder, &data, &size, &errors)) + return NULL; + + result = PyUnicode_DecodeUTF32Stateful(data, size, errors, &byteorder, &consumed); + if (!result) { + assert(consumed == UNINITIALIZED_SIZE); + return NULL; + } + return Py_BuildValue("(iNn)", byteorder, result, consumed); +} + +/* Test PyUnicode_AsUTF32String() */ +static PyObject * +unicode_asutf32string(PyObject *self, PyObject *arg) +{ + NULLABLE(arg); + return PyUnicode_AsUTF32String(arg); +} + +/* Test PyUnicode_DecodeUTF16() */ +static PyObject * +unicode_decodeutf16(PyObject *self, PyObject *args) +{ + const char *data; + Py_ssize_t size; + const char *errors = NULL; + int byteorder = UNINITIALIZED_INT; + PyObject *result; + + if (!PyArg_ParseTuple(args, "iy#|z", &byteorder, &data, &size, &errors)) + return NULL; + + result = PyUnicode_DecodeUTF16(data, size, errors, &byteorder); + if (!result) { + return NULL; + } + return Py_BuildValue("(iN)", byteorder, result); +} + +/* Test PyUnicode_DecodeUTF16Stateful() */ +static PyObject * +unicode_decodeutf16stateful(PyObject *self, PyObject *args) +{ + const char *data; + Py_ssize_t size; + const char *errors = NULL; + int byteorder = UNINITIALIZED_INT; + Py_ssize_t consumed = UNINITIALIZED_SIZE; + PyObject *result; + + if (!PyArg_ParseTuple(args, "iy#|z", &byteorder, &data, &size, &errors)) + return NULL; + + result = PyUnicode_DecodeUTF16Stateful(data, size, errors, &byteorder, &consumed); + if (!result) { + assert(consumed == UNINITIALIZED_SIZE); + return NULL; + } + return Py_BuildValue("(iNn)", byteorder, result, consumed); +} + +/* Test PyUnicode_AsUTF16String() */ +static PyObject * +unicode_asutf16string(PyObject *self, PyObject *arg) +{ + NULLABLE(arg); + return PyUnicode_AsUTF16String(arg); +} + +/* Test PyUnicode_DecodeUnicodeEscape() */ +static PyObject * +unicode_decodeunicodeescape(PyObject *self, PyObject *args) +{ + const char *data; + Py_ssize_t size; + const char *errors = NULL; + + if (!PyArg_ParseTuple(args, "y#|z", &data, &size, &errors)) + return NULL; + + return PyUnicode_DecodeUnicodeEscape(data, size, errors); +} + +/* Test PyUnicode_AsUnicodeEscapeString() */ +static PyObject * +unicode_asunicodeescapestring(PyObject *self, PyObject *arg) +{ + NULLABLE(arg); + return PyUnicode_AsUnicodeEscapeString(arg); +} + +static PyObject * +unicode_decoderawunicodeescape(PyObject *self, PyObject *args) +{ + const char *data; + Py_ssize_t size; + const char *errors = NULL; + + if (!PyArg_ParseTuple(args, "y#|z", &data, &size, &errors)) + return NULL; + + return PyUnicode_DecodeRawUnicodeEscape(data, size, errors); +} + +/* Test PyUnicode_AsRawUnicodeEscapeString() */ +static PyObject * +unicode_asrawunicodeescapestring(PyObject *self, PyObject *arg) +{ + NULLABLE(arg); + return PyUnicode_AsRawUnicodeEscapeString(arg); +} + +static PyObject * +unicode_decodelatin1(PyObject *self, PyObject *args) +{ + const char *data; + Py_ssize_t size; + const char *errors = NULL; + + if (!PyArg_ParseTuple(args, "y#|z", &data, &size, &errors)) + return NULL; + + return PyUnicode_DecodeLatin1(data, size, errors); +} + +/* Test PyUnicode_AsLatin1String() */ +static PyObject * +unicode_aslatin1string(PyObject *self, PyObject *arg) +{ + NULLABLE(arg); + return PyUnicode_AsLatin1String(arg); +} + +/* Test PyUnicode_DecodeASCII() */ +static PyObject * +unicode_decodeascii(PyObject *self, PyObject *args) +{ + const char *data; + Py_ssize_t size; + const char *errors = NULL; + + if (!PyArg_ParseTuple(args, "y#|z", &data, &size, &errors)) + return NULL; + + return PyUnicode_DecodeASCII(data, size, errors); +} + +/* Test PyUnicode_AsASCIIString() */ +static PyObject * +unicode_asasciistring(PyObject *self, PyObject *arg) +{ + NULLABLE(arg); + return PyUnicode_AsASCIIString(arg); +} + +/* Test PyUnicode_DecodeCharmap() */ +static PyObject * +unicode_decodecharmap(PyObject *self, PyObject *args) +{ + const char *data; + Py_ssize_t size; + PyObject *mapping; + const char *errors = NULL; + + if (!PyArg_ParseTuple(args, "y#O|z", &data, &size, &mapping, &errors)) + return NULL; + + NULLABLE(mapping); + return PyUnicode_DecodeCharmap(data, size, mapping, errors); +} + +/* Test PyUnicode_AsCharmapString() */ +static PyObject * +unicode_ascharmapstring(PyObject *self, PyObject *args) +{ + PyObject *unicode; + PyObject *mapping; + + if (!PyArg_ParseTuple(args, "OO", &unicode, &mapping)) + return NULL; + + NULLABLE(unicode); + NULLABLE(mapping); + return PyUnicode_AsCharmapString(unicode, mapping); +} + +#ifdef MS_WINDOWS + +/* Test PyUnicode_DecodeMBCS() */ +static PyObject * +unicode_decodembcs(PyObject *self, PyObject *args) +{ + const char *data; + Py_ssize_t size; + const char *errors = NULL; + + if (!PyArg_ParseTuple(args, "y#|z", &data, &size, &errors)) + return NULL; + + return PyUnicode_DecodeMBCS(data, size, errors); +} + +/* Test PyUnicode_DecodeMBCSStateful() */ +static PyObject * +unicode_decodembcsstateful(PyObject *self, PyObject *args) +{ + const char *data; + Py_ssize_t size; + const char *errors = NULL; + Py_ssize_t consumed = UNINITIALIZED_SIZE; + PyObject *result; + + if (!PyArg_ParseTuple(args, "y#|z", &data, &size, &errors)) + return NULL; + + result = PyUnicode_DecodeMBCSStateful(data, size, errors, &consumed); + if (!result) { + assert(consumed == UNINITIALIZED_SIZE); + return NULL; + } + return Py_BuildValue("(Nn)", result, consumed); +} + +/* Test PyUnicode_DecodeCodePageStateful() */ +static PyObject * +unicode_decodecodepagestateful(PyObject *self, PyObject *args) +{ + int code_page; + const char *data; + Py_ssize_t size; + const char *errors = NULL; + Py_ssize_t consumed = UNINITIALIZED_SIZE; + PyObject *result; + + if (!PyArg_ParseTuple(args, "iy#|z", &code_page, &data, &size, &errors)) + return NULL; + + result = PyUnicode_DecodeCodePageStateful(code_page, data, size, errors, &consumed); + if (!result) { + assert(consumed == UNINITIALIZED_SIZE); + return NULL; + } + return Py_BuildValue("(Nn)", result, consumed); +} + +/* Test PyUnicode_AsMBCSString() */ +static PyObject * +unicode_asmbcsstring(PyObject *self, PyObject *arg) +{ + NULLABLE(arg); + return PyUnicode_AsMBCSString(arg); +} + +/* Test PyUnicode_EncodeCodePage() */ +static PyObject * +unicode_encodecodepage(PyObject *self, PyObject *args) +{ + int code_page; + PyObject *unicode; + const char *errors; + + if (!PyArg_ParseTuple(args, "iO|z", &code_page, &unicode, &errors)) + return NULL; + + NULLABLE(unicode); + return PyUnicode_EncodeCodePage(code_page, unicode, errors); +} + +#endif /* MS_WINDOWS */ + +/* Test PyUnicode_DecodeLocaleAndSize() */ +static PyObject * +unicode_decodelocaleandsize(PyObject *self, PyObject *args) +{ + const char *data; + Py_ssize_t size; + const char *errors; + + if (!PyArg_ParseTuple(args, "y#|z", &data, &size, &errors)) + return NULL; + + return PyUnicode_DecodeLocaleAndSize(data, size, errors); +} + +/* Test PyUnicode_DecodeLocale() */ +static PyObject * +unicode_decodelocale(PyObject *self, PyObject *args) +{ + const char *data; + Py_ssize_t size; + const char *errors; + + if (!PyArg_ParseTuple(args, "y#|z", &data, &size, &errors)) + return NULL; + + return PyUnicode_DecodeLocale(data, errors); +} + +/* Test PyUnicode_EncodeLocale() */ +static PyObject * +unicode_encodelocale(PyObject *self, PyObject *args) +{ + PyObject *unicode; + const char *errors; + + if (!PyArg_ParseTuple(args, "O|z", &unicode, &errors)) + return NULL; + + NULLABLE(unicode); + return PyUnicode_EncodeLocale(unicode, errors); +} + +/* Test PyUnicode_DecodeFSDefault() */ +static PyObject * +unicode_decodefsdefault(PyObject *self, PyObject *args) +{ + const char *data; + Py_ssize_t size; + + if (!PyArg_ParseTuple(args, "y#", &data, &size)) + return NULL; + + return PyUnicode_DecodeFSDefault(data); +} + +/* Test PyUnicode_DecodeFSDefaultAndSize() */ +static PyObject * +unicode_decodefsdefaultandsize(PyObject *self, PyObject *args) +{ + const char *data; + Py_ssize_t size; + + if (!PyArg_ParseTuple(args, "y#|n", &data, &size, &size)) + return NULL; + + return PyUnicode_DecodeFSDefaultAndSize(data, size); +} + +/* Test PyUnicode_EncodeFSDefault() */ +static PyObject * +unicode_encodefsdefault(PyObject *self, PyObject *arg) +{ + NULLABLE(arg); + return PyUnicode_EncodeFSDefault(arg); +} + +/* Test PyUnicode_Concat() */ +static PyObject * +unicode_concat(PyObject *self, PyObject *args) +{ + PyObject *left; + PyObject *right; + + if (!PyArg_ParseTuple(args, "OO", &left, &right)) + return NULL; + + NULLABLE(left); + NULLABLE(right); + return PyUnicode_Concat(left, right); +} + +/* Test PyUnicode_Split() */ +static PyObject * +unicode_split(PyObject *self, PyObject *args) +{ + PyObject *s; + PyObject *sep; + Py_ssize_t maxsplit = -1; + + if (!PyArg_ParseTuple(args, "OO|n", &s, &sep, &maxsplit)) + return NULL; + + NULLABLE(s); + NULLABLE(sep); + return PyUnicode_Split(s, sep, maxsplit); +} + +/* Test PyUnicode_RSplit() */ +static PyObject * +unicode_rsplit(PyObject *self, PyObject *args) +{ + PyObject *s; + PyObject *sep; + Py_ssize_t maxsplit = -1; + + if (!PyArg_ParseTuple(args, "OO|n", &s, &sep, &maxsplit)) + return NULL; + + NULLABLE(s); + NULLABLE(sep); + return PyUnicode_RSplit(s, sep, maxsplit); +} + +/* Test PyUnicode_Splitlines() */ +static PyObject * +unicode_splitlines(PyObject *self, PyObject *args) +{ + PyObject *s; + int keepends = 0; + + if (!PyArg_ParseTuple(args, "O|i", &s, &keepends)) + return NULL; + + NULLABLE(s); + return PyUnicode_Splitlines(s, keepends); +} + +/* Test PyUnicode_Partition() */ +static PyObject * +unicode_partition(PyObject *self, PyObject *args) +{ + PyObject *s; + PyObject *sep; + + if (!PyArg_ParseTuple(args, "OO", &s, &sep)) + return NULL; + + NULLABLE(s); + NULLABLE(sep); + return PyUnicode_Partition(s, sep); +} + +/* Test PyUnicode_RPartition() */ +static PyObject * +unicode_rpartition(PyObject *self, PyObject *args) +{ + PyObject *s; + PyObject *sep; + + if (!PyArg_ParseTuple(args, "OO", &s, &sep)) + return NULL; + + NULLABLE(s); + NULLABLE(sep); + return PyUnicode_RPartition(s, sep); +} + +/* Test PyUnicode_Translate() */ +static PyObject * +unicode_translate(PyObject *self, PyObject *args) +{ + PyObject *obj; + PyObject *table; + const char *errors = NULL; + + if (!PyArg_ParseTuple(args, "OO|z", &obj, &table, &errors)) + return NULL; + + NULLABLE(obj); + NULLABLE(table); + return PyUnicode_Translate(obj, table, errors); +} + +/* Test PyUnicode_Join() */ +static PyObject * +unicode_join(PyObject *self, PyObject *args) +{ + PyObject *sep; + PyObject *seq; + + if (!PyArg_ParseTuple(args, "OO", &sep, &seq)) + return NULL; + + NULLABLE(sep); + NULLABLE(seq); + return PyUnicode_Join(sep, seq); +} + +/* Test PyUnicode_Count() */ +static PyObject * +unicode_count(PyObject *self, PyObject *args) +{ + PyObject *str; + PyObject *substr; + Py_ssize_t start; + Py_ssize_t end; + + if (!PyArg_ParseTuple(args, "OOnn", &str, &substr, &start, &end)) + return NULL; + + NULLABLE(str); + NULLABLE(substr); + RETURN_SIZE(PyUnicode_Count(str, substr, start, end)); +} + +/* Test PyUnicode_Find() */ +static PyObject * +unicode_find(PyObject *self, PyObject *args) +{ + PyObject *str; + PyObject *substr; + Py_ssize_t start; + Py_ssize_t end; + int direction; + Py_ssize_t result; + + if (!PyArg_ParseTuple(args, "OOnni", &str, &substr, &start, &end, &direction)) + return NULL; + + NULLABLE(str); + NULLABLE(substr); + result = PyUnicode_Find(str, substr, start, end, direction); + if (result == -2) { + assert(PyErr_Occurred()); + return NULL; + } + assert(!PyErr_Occurred()); + return PyLong_FromSsize_t(result); +} + +/* Test PyUnicode_Tailmatch() */ +static PyObject * +unicode_tailmatch(PyObject *self, PyObject *args) +{ + PyObject *str; + PyObject *substr; + Py_ssize_t start; + Py_ssize_t end; + int direction; + + if (!PyArg_ParseTuple(args, "OOnni", &str, &substr, &start, &end, &direction)) + return NULL; + + NULLABLE(str); + NULLABLE(substr); + RETURN_SIZE(PyUnicode_Tailmatch(str, substr, start, end, direction)); +} + +/* Test PyUnicode_FindChar() */ +static PyObject * +unicode_findchar(PyObject *self, PyObject *args) +{ + PyObject *str; + int direction; + unsigned int ch; + Py_ssize_t result; + Py_ssize_t start, end; + + if (!PyArg_ParseTuple(args, "OInni:unicode_findchar", &str, &ch, + &start, &end, &direction)) { + return NULL; + } + NULLABLE(str); + result = PyUnicode_FindChar(str, (Py_UCS4)ch, start, end, direction); + if (result == -2) { + assert(PyErr_Occurred()); + return NULL; + } + assert(!PyErr_Occurred()); + return PyLong_FromSsize_t(result); +} + +/* Test PyUnicode_Replace() */ +static PyObject * +unicode_replace(PyObject *self, PyObject *args) +{ + PyObject *str; + PyObject *substr; + PyObject *replstr; + Py_ssize_t maxcount = -1; + + if (!PyArg_ParseTuple(args, "OOO|n", &str, &substr, &replstr, &maxcount)) + return NULL; + + NULLABLE(str); + NULLABLE(substr); + NULLABLE(replstr); + return PyUnicode_Replace(str, substr, replstr, maxcount); +} + +/* Test PyUnicode_Compare() */ +static PyObject * +unicode_compare(PyObject *self, PyObject *args) +{ + PyObject *left; + PyObject *right; + int result; + + if (!PyArg_ParseTuple(args, "OO", &left, &right)) + return NULL; + + NULLABLE(left); + NULLABLE(right); + result = PyUnicode_Compare(left, right); + if (result == -1 && PyErr_Occurred()) { + return NULL; + } + assert(!PyErr_Occurred()); + return PyLong_FromLong(result); +} + +/* Test PyUnicode_CompareWithASCIIString() */ +static PyObject * +unicode_comparewithasciistring(PyObject *self, PyObject *args) +{ + PyObject *left; + const char *right = NULL; + Py_ssize_t right_len; + int result; + + if (!PyArg_ParseTuple(args, "O|y#", &left, &right, &right_len)) + return NULL; + + NULLABLE(left); + result = PyUnicode_CompareWithASCIIString(left, right); + if (result == -1 && PyErr_Occurred()) { + return NULL; + } + return PyLong_FromLong(result); +} + +/* Test PyUnicode_EqualToUTF8() */ +static PyObject * +unicode_equaltoutf8(PyObject *self, PyObject *args) +{ + PyObject *left; + const char *right = NULL; + Py_ssize_t right_len; + int result; + + if (!PyArg_ParseTuple(args, "Oz#", &left, &right, &right_len)) { + return NULL; + } + + NULLABLE(left); + result = PyUnicode_EqualToUTF8(left, right); + assert(!PyErr_Occurred()); + return PyLong_FromLong(result); +} + +/* Test PyUnicode_EqualToUTF8AndSize() */ +static PyObject * +unicode_equaltoutf8andsize(PyObject *self, PyObject *args) +{ + PyObject *left; + const char *right = NULL; + Py_ssize_t right_len; + Py_ssize_t size = -100; + int result; + + if (!PyArg_ParseTuple(args, "Oz#|n", &left, &right, &right_len, &size)) { + return NULL; + } + + NULLABLE(left); + if (size == -100) { + size = right_len; + } + result = PyUnicode_EqualToUTF8AndSize(left, right, size); + assert(!PyErr_Occurred()); + return PyLong_FromLong(result); +} + +/* Test PyUnicode_RichCompare() */ +static PyObject * +unicode_richcompare(PyObject *self, PyObject *args) +{ + PyObject *left; + PyObject *right; + int op; + + if (!PyArg_ParseTuple(args, "OOi", &left, &right, &op)) + return NULL; + + NULLABLE(left); + NULLABLE(right); + return PyUnicode_RichCompare(left, right, op); +} + +/* Test PyUnicode_Format() */ +static PyObject * +unicode_format(PyObject *self, PyObject *args) +{ + PyObject *format; + PyObject *fargs; + + if (!PyArg_ParseTuple(args, "OO", &format, &fargs)) + return NULL; + + NULLABLE(format); + NULLABLE(fargs); + return PyUnicode_Format(format, fargs); +} + +/* Test PyUnicode_Contains() */ +static PyObject * +unicode_contains(PyObject *self, PyObject *args) +{ + PyObject *container; + PyObject *element; + + if (!PyArg_ParseTuple(args, "OO", &container, &element)) + return NULL; + + NULLABLE(container); + NULLABLE(element); + RETURN_INT(PyUnicode_Contains(container, element)); +} + +/* Test PyUnicode_IsIdentifier() */ +static PyObject * +unicode_isidentifier(PyObject *self, PyObject *arg) +{ + NULLABLE(arg); + RETURN_INT(PyUnicode_IsIdentifier(arg)); +} + + +static int +check_raised_systemerror(PyObject *result, char* msg) +{ + if (result) { + // no exception + PyErr_Format(PyExc_AssertionError, + "SystemError not raised: %s", + msg); + return 0; + } + if (PyErr_ExceptionMatches(PyExc_SystemError)) { + // expected exception + PyErr_Clear(); + return 1; + } + // unexpected exception + return 0; +} + +static PyObject * +test_string_from_format(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + PyObject *result; + PyObject *unicode = PyUnicode_FromString("None"); + +#define CHECK_FORMAT_2(FORMAT, EXPECTED, ARG1, ARG2) \ + result = PyUnicode_FromFormat(FORMAT, ARG1, ARG2); \ + if (EXPECTED == NULL) { \ + if (!check_raised_systemerror(result, FORMAT)) { \ + goto Fail; \ + } \ + } \ + else if (result == NULL) \ + return NULL; \ + else if (PyUnicode_CompareWithASCIIString(result, EXPECTED) != 0) { \ + PyObject *utf8 = PyUnicode_AsUTF8String(result); \ + PyErr_Format(PyExc_AssertionError, \ + "test_string_from_format: failed at \"%s\" " \ + "expected \"%s\" got \"%s\"", \ + FORMAT, EXPECTED, utf8); \ + Py_XDECREF(utf8); \ + goto Fail; \ + } \ + Py_XDECREF(result) + +#define CHECK_FORMAT_1(FORMAT, EXPECTED, ARG) \ + CHECK_FORMAT_2(FORMAT, EXPECTED, ARG, 0) + +#define CHECK_FORMAT_0(FORMAT, EXPECTED) \ + CHECK_FORMAT_2(FORMAT, EXPECTED, 0, 0) + + // Unrecognized + CHECK_FORMAT_2("%u %? %u", NULL, 1, 2); + + // "%%" (options are rejected) + CHECK_FORMAT_0( "%%", "%"); + CHECK_FORMAT_0( "%0%", NULL); + CHECK_FORMAT_0("%00%", NULL); + CHECK_FORMAT_0( "%2%", NULL); + CHECK_FORMAT_0("%02%", NULL); + CHECK_FORMAT_0("%.0%", NULL); + CHECK_FORMAT_0("%.2%", NULL); + + // "%c" + CHECK_FORMAT_1( "%c", "c", 'c'); + CHECK_FORMAT_1( "%0c", "c", 'c'); + CHECK_FORMAT_1("%00c", "c", 'c'); + CHECK_FORMAT_1( "%2c", NULL, 'c'); + CHECK_FORMAT_1("%02c", NULL, 'c'); + CHECK_FORMAT_1("%.0c", NULL, 'c'); + CHECK_FORMAT_1("%.2c", NULL, 'c'); + + // Integers + CHECK_FORMAT_1("%d", "123", (int)123); + CHECK_FORMAT_1("%i", "123", (int)123); + CHECK_FORMAT_1("%u", "123", (unsigned int)123); + CHECK_FORMAT_1("%x", "7b", (unsigned int)123); + CHECK_FORMAT_1("%X", "7B", (unsigned int)123); + CHECK_FORMAT_1("%o", "173", (unsigned int)123); + CHECK_FORMAT_1("%ld", "123", (long)123); + CHECK_FORMAT_1("%li", "123", (long)123); + CHECK_FORMAT_1("%lu", "123", (unsigned long)123); + CHECK_FORMAT_1("%lx", "7b", (unsigned long)123); + CHECK_FORMAT_1("%lX", "7B", (unsigned long)123); + CHECK_FORMAT_1("%lo", "173", (unsigned long)123); + CHECK_FORMAT_1("%lld", "123", (long long)123); + CHECK_FORMAT_1("%lli", "123", (long long)123); + CHECK_FORMAT_1("%llu", "123", (unsigned long long)123); + CHECK_FORMAT_1("%llx", "7b", (unsigned long long)123); + CHECK_FORMAT_1("%llX", "7B", (unsigned long long)123); + CHECK_FORMAT_1("%llo", "173", (unsigned long long)123); + CHECK_FORMAT_1("%zd", "123", (Py_ssize_t)123); + CHECK_FORMAT_1("%zi", "123", (Py_ssize_t)123); + CHECK_FORMAT_1("%zu", "123", (size_t)123); + CHECK_FORMAT_1("%zx", "7b", (size_t)123); + CHECK_FORMAT_1("%zX", "7B", (size_t)123); + CHECK_FORMAT_1("%zo", "173", (size_t)123); + CHECK_FORMAT_1("%td", "123", (ptrdiff_t)123); + CHECK_FORMAT_1("%ti", "123", (ptrdiff_t)123); + CHECK_FORMAT_1("%tu", "123", (ptrdiff_t)123); + CHECK_FORMAT_1("%tx", "7b", (ptrdiff_t)123); + CHECK_FORMAT_1("%tX", "7B", (ptrdiff_t)123); + CHECK_FORMAT_1("%to", "173", (ptrdiff_t)123); + CHECK_FORMAT_1("%jd", "123", (intmax_t)123); + CHECK_FORMAT_1("%ji", "123", (intmax_t)123); + CHECK_FORMAT_1("%ju", "123", (uintmax_t)123); + CHECK_FORMAT_1("%jx", "7b", (uintmax_t)123); + CHECK_FORMAT_1("%jX", "7B", (uintmax_t)123); + CHECK_FORMAT_1("%jo", "173", (uintmax_t)123); + + CHECK_FORMAT_1("%d", "-123", (int)-123); + CHECK_FORMAT_1("%i", "-123", (int)-123); + CHECK_FORMAT_1("%ld", "-123", (long)-123); + CHECK_FORMAT_1("%li", "-123", (long)-123); + CHECK_FORMAT_1("%lld", "-123", (long long)-123); + CHECK_FORMAT_1("%lli", "-123", (long long)-123); + CHECK_FORMAT_1("%zd", "-123", (Py_ssize_t)-123); + CHECK_FORMAT_1("%zi", "-123", (Py_ssize_t)-123); + CHECK_FORMAT_1("%td", "-123", (ptrdiff_t)-123); + CHECK_FORMAT_1("%ti", "-123", (ptrdiff_t)-123); + CHECK_FORMAT_1("%jd", "-123", (intmax_t)-123); + CHECK_FORMAT_1("%ji", "-123", (intmax_t)-123); + + // Integers: width < length + CHECK_FORMAT_1("%1d", "123", (int)123); + CHECK_FORMAT_1("%1i", "123", (int)123); + CHECK_FORMAT_1("%1u", "123", (unsigned int)123); + CHECK_FORMAT_1("%1ld", "123", (long)123); + CHECK_FORMAT_1("%1li", "123", (long)123); + CHECK_FORMAT_1("%1lu", "123", (unsigned long)123); + CHECK_FORMAT_1("%1lld", "123", (long long)123); + CHECK_FORMAT_1("%1lli", "123", (long long)123); + CHECK_FORMAT_1("%1llu", "123", (unsigned long long)123); + CHECK_FORMAT_1("%1zd", "123", (Py_ssize_t)123); + CHECK_FORMAT_1("%1zi", "123", (Py_ssize_t)123); + CHECK_FORMAT_1("%1zu", "123", (size_t)123); + CHECK_FORMAT_1("%1x", "7b", (int)123); + + CHECK_FORMAT_1("%1d", "-123", (int)-123); + CHECK_FORMAT_1("%1i", "-123", (int)-123); + CHECK_FORMAT_1("%1ld", "-123", (long)-123); + CHECK_FORMAT_1("%1li", "-123", (long)-123); + CHECK_FORMAT_1("%1lld", "-123", (long long)-123); + CHECK_FORMAT_1("%1lli", "-123", (long long)-123); + CHECK_FORMAT_1("%1zd", "-123", (Py_ssize_t)-123); + CHECK_FORMAT_1("%1zi", "-123", (Py_ssize_t)-123); + + // Integers: width > length + CHECK_FORMAT_1("%5d", " 123", (int)123); + CHECK_FORMAT_1("%5i", " 123", (int)123); + CHECK_FORMAT_1("%5u", " 123", (unsigned int)123); + CHECK_FORMAT_1("%5ld", " 123", (long)123); + CHECK_FORMAT_1("%5li", " 123", (long)123); + CHECK_FORMAT_1("%5lu", " 123", (unsigned long)123); + CHECK_FORMAT_1("%5lld", " 123", (long long)123); + CHECK_FORMAT_1("%5lli", " 123", (long long)123); + CHECK_FORMAT_1("%5llu", " 123", (unsigned long long)123); + CHECK_FORMAT_1("%5zd", " 123", (Py_ssize_t)123); + CHECK_FORMAT_1("%5zi", " 123", (Py_ssize_t)123); + CHECK_FORMAT_1("%5zu", " 123", (size_t)123); + CHECK_FORMAT_1("%5x", " 7b", (int)123); + + CHECK_FORMAT_1("%5d", " -123", (int)-123); + CHECK_FORMAT_1("%5i", " -123", (int)-123); + CHECK_FORMAT_1("%5ld", " -123", (long)-123); + CHECK_FORMAT_1("%5li", " -123", (long)-123); + CHECK_FORMAT_1("%5lld", " -123", (long long)-123); + CHECK_FORMAT_1("%5lli", " -123", (long long)-123); + CHECK_FORMAT_1("%5zd", " -123", (Py_ssize_t)-123); + CHECK_FORMAT_1("%5zi", " -123", (Py_ssize_t)-123); + + // Integers: width > length, 0-flag + CHECK_FORMAT_1("%05d", "00123", (int)123); + CHECK_FORMAT_1("%05i", "00123", (int)123); + CHECK_FORMAT_1("%05u", "00123", (unsigned int)123); + CHECK_FORMAT_1("%05ld", "00123", (long)123); + CHECK_FORMAT_1("%05li", "00123", (long)123); + CHECK_FORMAT_1("%05lu", "00123", (unsigned long)123); + CHECK_FORMAT_1("%05lld", "00123", (long long)123); + CHECK_FORMAT_1("%05lli", "00123", (long long)123); + CHECK_FORMAT_1("%05llu", "00123", (unsigned long long)123); + CHECK_FORMAT_1("%05zd", "00123", (Py_ssize_t)123); + CHECK_FORMAT_1("%05zi", "00123", (Py_ssize_t)123); + CHECK_FORMAT_1("%05zu", "00123", (size_t)123); + CHECK_FORMAT_1("%05x", "0007b", (int)123); + + CHECK_FORMAT_1("%05d", "-0123", (int)-123); + CHECK_FORMAT_1("%05i", "-0123", (int)-123); + CHECK_FORMAT_1("%05ld", "-0123", (long)-123); + CHECK_FORMAT_1("%05li", "-0123", (long)-123); + CHECK_FORMAT_1("%05lld", "-0123", (long long)-123); + CHECK_FORMAT_1("%05lli", "-0123", (long long)-123); + CHECK_FORMAT_1("%05zd", "-0123", (Py_ssize_t)-123); + CHECK_FORMAT_1("%05zi", "-0123", (Py_ssize_t)-123); + + // Integers: precision < length + CHECK_FORMAT_1("%.1d", "123", (int)123); + CHECK_FORMAT_1("%.1i", "123", (int)123); + CHECK_FORMAT_1("%.1u", "123", (unsigned int)123); + CHECK_FORMAT_1("%.1ld", "123", (long)123); + CHECK_FORMAT_1("%.1li", "123", (long)123); + CHECK_FORMAT_1("%.1lu", "123", (unsigned long)123); + CHECK_FORMAT_1("%.1lld", "123", (long long)123); + CHECK_FORMAT_1("%.1lli", "123", (long long)123); + CHECK_FORMAT_1("%.1llu", "123", (unsigned long long)123); + CHECK_FORMAT_1("%.1zd", "123", (Py_ssize_t)123); + CHECK_FORMAT_1("%.1zi", "123", (Py_ssize_t)123); + CHECK_FORMAT_1("%.1zu", "123", (size_t)123); + CHECK_FORMAT_1("%.1x", "7b", (int)123); + + CHECK_FORMAT_1("%.1d", "-123", (int)-123); + CHECK_FORMAT_1("%.1i", "-123", (int)-123); + CHECK_FORMAT_1("%.1ld", "-123", (long)-123); + CHECK_FORMAT_1("%.1li", "-123", (long)-123); + CHECK_FORMAT_1("%.1lld", "-123", (long long)-123); + CHECK_FORMAT_1("%.1lli", "-123", (long long)-123); + CHECK_FORMAT_1("%.1zd", "-123", (Py_ssize_t)-123); + CHECK_FORMAT_1("%.1zi", "-123", (Py_ssize_t)-123); + + // Integers: precision > length + CHECK_FORMAT_1("%.5d", "00123", (int)123); + CHECK_FORMAT_1("%.5i", "00123", (int)123); + CHECK_FORMAT_1("%.5u", "00123", (unsigned int)123); + CHECK_FORMAT_1("%.5ld", "00123", (long)123); + CHECK_FORMAT_1("%.5li", "00123", (long)123); + CHECK_FORMAT_1("%.5lu", "00123", (unsigned long)123); + CHECK_FORMAT_1("%.5lld", "00123", (long long)123); + CHECK_FORMAT_1("%.5lli", "00123", (long long)123); + CHECK_FORMAT_1("%.5llu", "00123", (unsigned long long)123); + CHECK_FORMAT_1("%.5zd", "00123", (Py_ssize_t)123); + CHECK_FORMAT_1("%.5zi", "00123", (Py_ssize_t)123); + CHECK_FORMAT_1("%.5zu", "00123", (size_t)123); + CHECK_FORMAT_1("%.5x", "0007b", (int)123); + + CHECK_FORMAT_1("%.5d", "-00123", (int)-123); + CHECK_FORMAT_1("%.5i", "-00123", (int)-123); + CHECK_FORMAT_1("%.5ld", "-00123", (long)-123); + CHECK_FORMAT_1("%.5li", "-00123", (long)-123); + CHECK_FORMAT_1("%.5lld", "-00123", (long long)-123); + CHECK_FORMAT_1("%.5lli", "-00123", (long long)-123); + CHECK_FORMAT_1("%.5zd", "-00123", (Py_ssize_t)-123); + CHECK_FORMAT_1("%.5zi", "-00123", (Py_ssize_t)-123); + + // Integers: width > precision > length + CHECK_FORMAT_1("%7.5d", " 00123", (int)123); + CHECK_FORMAT_1("%7.5i", " 00123", (int)123); + CHECK_FORMAT_1("%7.5u", " 00123", (unsigned int)123); + CHECK_FORMAT_1("%7.5ld", " 00123", (long)123); + CHECK_FORMAT_1("%7.5li", " 00123", (long)123); + CHECK_FORMAT_1("%7.5lu", " 00123", (unsigned long)123); + CHECK_FORMAT_1("%7.5lld", " 00123", (long long)123); + CHECK_FORMAT_1("%7.5lli", " 00123", (long long)123); + CHECK_FORMAT_1("%7.5llu", " 00123", (unsigned long long)123); + CHECK_FORMAT_1("%7.5zd", " 00123", (Py_ssize_t)123); + CHECK_FORMAT_1("%7.5zi", " 00123", (Py_ssize_t)123); + CHECK_FORMAT_1("%7.5zu", " 00123", (size_t)123); + CHECK_FORMAT_1("%7.5x", " 0007b", (int)123); + + CHECK_FORMAT_1("%7.5d", " -00123", (int)-123); + CHECK_FORMAT_1("%7.5i", " -00123", (int)-123); + CHECK_FORMAT_1("%7.5ld", " -00123", (long)-123); + CHECK_FORMAT_1("%7.5li", " -00123", (long)-123); + CHECK_FORMAT_1("%7.5lld", " -00123", (long long)-123); + CHECK_FORMAT_1("%7.5lli", " -00123", (long long)-123); + CHECK_FORMAT_1("%7.5zd", " -00123", (Py_ssize_t)-123); + CHECK_FORMAT_1("%7.5zi", " -00123", (Py_ssize_t)-123); + + // Integers: width > precision > length, 0-flag + CHECK_FORMAT_1("%07.5d", "0000123", (int)123); + CHECK_FORMAT_1("%07.5i", "0000123", (int)123); + CHECK_FORMAT_1("%07.5u", "0000123", (unsigned int)123); + CHECK_FORMAT_1("%07.5ld", "0000123", (long)123); + CHECK_FORMAT_1("%07.5li", "0000123", (long)123); + CHECK_FORMAT_1("%07.5lu", "0000123", (unsigned long)123); + CHECK_FORMAT_1("%07.5lld", "0000123", (long long)123); + CHECK_FORMAT_1("%07.5lli", "0000123", (long long)123); + CHECK_FORMAT_1("%07.5llu", "0000123", (unsigned long long)123); + CHECK_FORMAT_1("%07.5zd", "0000123", (Py_ssize_t)123); + CHECK_FORMAT_1("%07.5zi", "0000123", (Py_ssize_t)123); + CHECK_FORMAT_1("%07.5zu", "0000123", (size_t)123); + CHECK_FORMAT_1("%07.5x", "000007b", (int)123); + + CHECK_FORMAT_1("%07.5d", "-000123", (int)-123); + CHECK_FORMAT_1("%07.5i", "-000123", (int)-123); + CHECK_FORMAT_1("%07.5ld", "-000123", (long)-123); + CHECK_FORMAT_1("%07.5li", "-000123", (long)-123); + CHECK_FORMAT_1("%07.5lld", "-000123", (long long)-123); + CHECK_FORMAT_1("%07.5lli", "-000123", (long long)-123); + CHECK_FORMAT_1("%07.5zd", "-000123", (Py_ssize_t)-123); + CHECK_FORMAT_1("%07.5zi", "-000123", (Py_ssize_t)-123); + + // Integers: precision > width > length + CHECK_FORMAT_1("%5.7d", "0000123", (int)123); + CHECK_FORMAT_1("%5.7i", "0000123", (int)123); + CHECK_FORMAT_1("%5.7u", "0000123", (unsigned int)123); + CHECK_FORMAT_1("%5.7ld", "0000123", (long)123); + CHECK_FORMAT_1("%5.7li", "0000123", (long)123); + CHECK_FORMAT_1("%5.7lu", "0000123", (unsigned long)123); + CHECK_FORMAT_1("%5.7lld", "0000123", (long long)123); + CHECK_FORMAT_1("%5.7lli", "0000123", (long long)123); + CHECK_FORMAT_1("%5.7llu", "0000123", (unsigned long long)123); + CHECK_FORMAT_1("%5.7zd", "0000123", (Py_ssize_t)123); + CHECK_FORMAT_1("%5.7zi", "0000123", (Py_ssize_t)123); + CHECK_FORMAT_1("%5.7zu", "0000123", (size_t)123); + CHECK_FORMAT_1("%5.7x", "000007b", (int)123); + + CHECK_FORMAT_1("%5.7d", "-0000123", (int)-123); + CHECK_FORMAT_1("%5.7i", "-0000123", (int)-123); + CHECK_FORMAT_1("%5.7ld", "-0000123", (long)-123); + CHECK_FORMAT_1("%5.7li", "-0000123", (long)-123); + CHECK_FORMAT_1("%5.7lld", "-0000123", (long long)-123); + CHECK_FORMAT_1("%5.7lli", "-0000123", (long long)-123); + CHECK_FORMAT_1("%5.7zd", "-0000123", (Py_ssize_t)-123); + CHECK_FORMAT_1("%5.7zi", "-0000123", (Py_ssize_t)-123); + + // Integers: precision > width > length, 0-flag + CHECK_FORMAT_1("%05.7d", "0000123", (int)123); + CHECK_FORMAT_1("%05.7i", "0000123", (int)123); + CHECK_FORMAT_1("%05.7u", "0000123", (unsigned int)123); + CHECK_FORMAT_1("%05.7ld", "0000123", (long)123); + CHECK_FORMAT_1("%05.7li", "0000123", (long)123); + CHECK_FORMAT_1("%05.7lu", "0000123", (unsigned long)123); + CHECK_FORMAT_1("%05.7lld", "0000123", (long long)123); + CHECK_FORMAT_1("%05.7lli", "0000123", (long long)123); + CHECK_FORMAT_1("%05.7llu", "0000123", (unsigned long long)123); + CHECK_FORMAT_1("%05.7zd", "0000123", (Py_ssize_t)123); + CHECK_FORMAT_1("%05.7zi", "0000123", (Py_ssize_t)123); + CHECK_FORMAT_1("%05.7zu", "0000123", (size_t)123); + CHECK_FORMAT_1("%05.7x", "000007b", (int)123); + + CHECK_FORMAT_1("%05.7d", "-0000123", (int)-123); + CHECK_FORMAT_1("%05.7i", "-0000123", (int)-123); + CHECK_FORMAT_1("%05.7ld", "-0000123", (long)-123); + CHECK_FORMAT_1("%05.7li", "-0000123", (long)-123); + CHECK_FORMAT_1("%05.7lld", "-0000123", (long long)-123); + CHECK_FORMAT_1("%05.7lli", "-0000123", (long long)-123); + CHECK_FORMAT_1("%05.7zd", "-0000123", (Py_ssize_t)-123); + CHECK_FORMAT_1("%05.7zi", "-0000123", (Py_ssize_t)-123); + + // Integers: precision = 0, arg = 0 (empty string in C) + CHECK_FORMAT_1("%.0d", "0", (int)0); + CHECK_FORMAT_1("%.0i", "0", (int)0); + CHECK_FORMAT_1("%.0u", "0", (unsigned int)0); + CHECK_FORMAT_1("%.0ld", "0", (long)0); + CHECK_FORMAT_1("%.0li", "0", (long)0); + CHECK_FORMAT_1("%.0lu", "0", (unsigned long)0); + CHECK_FORMAT_1("%.0lld", "0", (long long)0); + CHECK_FORMAT_1("%.0lli", "0", (long long)0); + CHECK_FORMAT_1("%.0llu", "0", (unsigned long long)0); + CHECK_FORMAT_1("%.0zd", "0", (Py_ssize_t)0); + CHECK_FORMAT_1("%.0zi", "0", (Py_ssize_t)0); + CHECK_FORMAT_1("%.0zu", "0", (size_t)0); + CHECK_FORMAT_1("%.0x", "0", (int)0); + + // Strings + CHECK_FORMAT_1("%s", "None", "None"); + CHECK_FORMAT_1("%ls", "None", L"None"); + CHECK_FORMAT_1("%U", "None", unicode); + CHECK_FORMAT_1("%A", "None", Py_None); + CHECK_FORMAT_1("%S", "None", Py_None); + CHECK_FORMAT_1("%R", "None", Py_None); + CHECK_FORMAT_2("%V", "None", unicode, "ignored"); + CHECK_FORMAT_2("%V", "None", NULL, "None"); + CHECK_FORMAT_2("%lV", "None", NULL, L"None"); + + // Strings: width < length + CHECK_FORMAT_1("%1s", "None", "None"); + CHECK_FORMAT_1("%1ls", "None", L"None"); + CHECK_FORMAT_1("%1U", "None", unicode); + CHECK_FORMAT_1("%1A", "None", Py_None); + CHECK_FORMAT_1("%1S", "None", Py_None); + CHECK_FORMAT_1("%1R", "None", Py_None); + CHECK_FORMAT_2("%1V", "None", unicode, "ignored"); + CHECK_FORMAT_2("%1V", "None", NULL, "None"); + CHECK_FORMAT_2("%1lV", "None", NULL, L"None"); + + // Strings: width > length + CHECK_FORMAT_1("%5s", " None", "None"); + CHECK_FORMAT_1("%5ls", " None", L"None"); + CHECK_FORMAT_1("%5U", " None", unicode); + CHECK_FORMAT_1("%5A", " None", Py_None); + CHECK_FORMAT_1("%5S", " None", Py_None); + CHECK_FORMAT_1("%5R", " None", Py_None); + CHECK_FORMAT_2("%5V", " None", unicode, "ignored"); + CHECK_FORMAT_2("%5V", " None", NULL, "None"); + CHECK_FORMAT_2("%5lV", " None", NULL, L"None"); + + // Strings: precision < length + CHECK_FORMAT_1("%.1s", "N", "None"); + CHECK_FORMAT_1("%.1ls", "N", L"None"); + CHECK_FORMAT_1("%.1U", "N", unicode); + CHECK_FORMAT_1("%.1A", "N", Py_None); + CHECK_FORMAT_1("%.1S", "N", Py_None); + CHECK_FORMAT_1("%.1R", "N", Py_None); + CHECK_FORMAT_2("%.1V", "N", unicode, "ignored"); + CHECK_FORMAT_2("%.1V", "N", NULL, "None"); + CHECK_FORMAT_2("%.1lV", "N", NULL, L"None"); + + // Strings: precision > length + CHECK_FORMAT_1("%.5s", "None", "None"); + CHECK_FORMAT_1("%.5ls", "None", L"None"); + CHECK_FORMAT_1("%.5U", "None", unicode); + CHECK_FORMAT_1("%.5A", "None", Py_None); + CHECK_FORMAT_1("%.5S", "None", Py_None); + CHECK_FORMAT_1("%.5R", "None", Py_None); + CHECK_FORMAT_2("%.5V", "None", unicode, "ignored"); + CHECK_FORMAT_2("%.5V", "None", NULL, "None"); + CHECK_FORMAT_2("%.5lV", "None", NULL, L"None"); + + // Strings: precision < length, width > length + CHECK_FORMAT_1("%5.1s", " N", "None"); + CHECK_FORMAT_1("%5.1ls"," N", L"None"); + CHECK_FORMAT_1("%5.1U", " N", unicode); + CHECK_FORMAT_1("%5.1A", " N", Py_None); + CHECK_FORMAT_1("%5.1S", " N", Py_None); + CHECK_FORMAT_1("%5.1R", " N", Py_None); + CHECK_FORMAT_2("%5.1V", " N", unicode, "ignored"); + CHECK_FORMAT_2("%5.1V", " N", NULL, "None"); + CHECK_FORMAT_2("%5.1lV"," N", NULL, L"None"); + + // Strings: width < length, precision > length + CHECK_FORMAT_1("%1.5s", "None", "None"); + CHECK_FORMAT_1("%1.5ls", "None", L"None"); + CHECK_FORMAT_1("%1.5U", "None", unicode); + CHECK_FORMAT_1("%1.5A", "None", Py_None); + CHECK_FORMAT_1("%1.5S", "None", Py_None); + CHECK_FORMAT_1("%1.5R", "None", Py_None); + CHECK_FORMAT_2("%1.5V", "None", unicode, "ignored"); + CHECK_FORMAT_2("%1.5V", "None", NULL, "None"); + CHECK_FORMAT_2("%1.5lV", "None", NULL, L"None"); + + Py_XDECREF(unicode); + Py_RETURN_NONE; + + Fail: + Py_XDECREF(result); + Py_XDECREF(unicode); + return NULL; + +#undef CHECK_FORMAT_2 +#undef CHECK_FORMAT_1 +#undef CHECK_FORMAT_0 +} + +static PyMethodDef TestMethods[] = { + {"codec_incrementalencoder", codec_incrementalencoder, METH_VARARGS}, + {"codec_incrementaldecoder", codec_incrementaldecoder, METH_VARARGS}, + {"test_unicode_compare_with_ascii", + test_unicode_compare_with_ascii, METH_NOARGS}, + {"test_string_from_format", test_string_from_format, METH_NOARGS}, + {"test_widechar", test_widechar, METH_NOARGS}, + {"unicode_writechar", unicode_writechar, METH_VARARGS}, + {"unicode_resize", unicode_resize, METH_VARARGS}, + {"unicode_append", unicode_append, METH_VARARGS}, + {"unicode_appendanddel", unicode_appendanddel, METH_VARARGS}, + {"unicode_fromstringandsize",unicode_fromstringandsize, METH_VARARGS}, + {"unicode_fromstring", unicode_fromstring, METH_O}, + {"unicode_substring", unicode_substring, METH_VARARGS}, + {"unicode_getlength", unicode_getlength, METH_O}, + {"unicode_readchar", unicode_readchar, METH_VARARGS}, + {"unicode_fromencodedobject",unicode_fromencodedobject, METH_VARARGS}, + {"unicode_fromobject", unicode_fromobject, METH_O}, + {"unicode_interninplace", unicode_interninplace, METH_O}, + {"unicode_internfromstring", unicode_internfromstring, METH_O}, + {"unicode_fromwidechar", unicode_fromwidechar, METH_VARARGS}, + {"unicode_aswidechar", unicode_aswidechar, METH_VARARGS}, + {"unicode_aswidechar_null", unicode_aswidechar_null, METH_VARARGS}, + {"unicode_aswidecharstring", unicode_aswidecharstring, METH_VARARGS}, + {"unicode_aswidecharstring_null",unicode_aswidecharstring_null,METH_VARARGS}, + {"unicode_fromordinal", unicode_fromordinal, METH_VARARGS}, + {"unicode_asutf8andsize", unicode_asutf8andsize, METH_VARARGS}, + {"unicode_asutf8andsize_null",unicode_asutf8andsize_null, METH_VARARGS}, + {"unicode_getdefaultencoding",unicode_getdefaultencoding, METH_NOARGS}, + {"unicode_decode", unicode_decode, METH_VARARGS}, + {"unicode_asencodedstring", unicode_asencodedstring, METH_VARARGS}, + {"unicode_buildencodingmap", unicode_buildencodingmap, METH_O}, + {"unicode_decodeutf7", unicode_decodeutf7, METH_VARARGS}, + {"unicode_decodeutf7stateful",unicode_decodeutf7stateful, METH_VARARGS}, + {"unicode_decodeutf8", unicode_decodeutf8, METH_VARARGS}, + {"unicode_decodeutf8stateful",unicode_decodeutf8stateful, METH_VARARGS}, + {"unicode_asutf8string", unicode_asutf8string, METH_O}, + {"unicode_decodeutf16", unicode_decodeutf16, METH_VARARGS}, + {"unicode_decodeutf16stateful",unicode_decodeutf16stateful, METH_VARARGS}, + {"unicode_asutf16string", unicode_asutf16string, METH_O}, + {"unicode_decodeutf32", unicode_decodeutf32, METH_VARARGS}, + {"unicode_decodeutf32stateful",unicode_decodeutf32stateful, METH_VARARGS}, + {"unicode_asutf32string", unicode_asutf32string, METH_O}, + {"unicode_decodeunicodeescape",unicode_decodeunicodeescape, METH_VARARGS}, + {"unicode_asunicodeescapestring",unicode_asunicodeescapestring,METH_O}, + {"unicode_decoderawunicodeescape",unicode_decoderawunicodeescape,METH_VARARGS}, + {"unicode_asrawunicodeescapestring",unicode_asrawunicodeescapestring,METH_O}, + {"unicode_decodelatin1", unicode_decodelatin1, METH_VARARGS}, + {"unicode_aslatin1string", unicode_aslatin1string, METH_O}, + {"unicode_decodeascii", unicode_decodeascii, METH_VARARGS}, + {"unicode_asasciistring", unicode_asasciistring, METH_O}, + {"unicode_decodecharmap", unicode_decodecharmap, METH_VARARGS}, + {"unicode_ascharmapstring", unicode_ascharmapstring, METH_VARARGS}, +#ifdef MS_WINDOWS + {"unicode_decodembcs", unicode_decodembcs, METH_VARARGS}, + {"unicode_decodembcsstateful",unicode_decodembcsstateful, METH_VARARGS}, + {"unicode_decodecodepagestateful",unicode_decodecodepagestateful,METH_VARARGS}, + {"unicode_asmbcsstring", unicode_asmbcsstring, METH_O}, + {"unicode_encodecodepage", unicode_encodecodepage, METH_VARARGS}, +#endif /* MS_WINDOWS */ + {"unicode_decodelocaleandsize",unicode_decodelocaleandsize, METH_VARARGS}, + {"unicode_decodelocale", unicode_decodelocale, METH_VARARGS}, + {"unicode_encodelocale", unicode_encodelocale, METH_VARARGS}, + {"unicode_decodefsdefault", unicode_decodefsdefault, METH_VARARGS}, + {"unicode_decodefsdefaultandsize",unicode_decodefsdefaultandsize,METH_VARARGS}, + {"unicode_encodefsdefault", unicode_encodefsdefault, METH_O}, + {"unicode_concat", unicode_concat, METH_VARARGS}, + {"unicode_splitlines", unicode_splitlines, METH_VARARGS}, + {"unicode_split", unicode_split, METH_VARARGS}, + {"unicode_rsplit", unicode_rsplit, METH_VARARGS}, + {"unicode_partition", unicode_partition, METH_VARARGS}, + {"unicode_rpartition", unicode_rpartition, METH_VARARGS}, + {"unicode_translate", unicode_translate, METH_VARARGS}, + {"unicode_join", unicode_join, METH_VARARGS}, + {"unicode_count", unicode_count, METH_VARARGS}, + {"unicode_tailmatch", unicode_tailmatch, METH_VARARGS}, + {"unicode_find", unicode_find, METH_VARARGS}, + {"unicode_findchar", unicode_findchar, METH_VARARGS}, + {"unicode_replace", unicode_replace, METH_VARARGS}, + {"unicode_compare", unicode_compare, METH_VARARGS}, + {"unicode_comparewithasciistring",unicode_comparewithasciistring,METH_VARARGS}, + {"unicode_equaltoutf8", unicode_equaltoutf8, METH_VARARGS}, + {"unicode_equaltoutf8andsize",unicode_equaltoutf8andsize, METH_VARARGS}, + {"unicode_richcompare", unicode_richcompare, METH_VARARGS}, + {"unicode_format", unicode_format, METH_VARARGS}, + {"unicode_contains", unicode_contains, METH_VARARGS}, + {"unicode_isidentifier", unicode_isidentifier, METH_O}, + {NULL}, +}; + +int +_PyTestLimitedCAPI_Init_Unicode(PyObject *m) +{ + if (PyModule_AddFunctions(m, TestMethods) < 0) { + return -1; + } + + return 0; +} diff --git a/Modules/_testlimitedcapi/vectorcall_limited.c b/Modules/_testlimitedcapi/vectorcall_limited.c index fc1a89c9098e1b..784126c17fccc1 100644 --- a/Modules/_testlimitedcapi/vectorcall_limited.c +++ b/Modules/_testlimitedcapi/vectorcall_limited.c @@ -182,7 +182,8 @@ static PyMethodDef TestMethods[] = { }; int -_PyTestCapi_Init_VectorcallLimited(PyObject *m) { +_PyTestLimitedCAPI_Init_VectorcallLimited(PyObject *m) +{ if (PyModule_AddFunctions(m, TestMethods) < 0) { return -1; } diff --git a/Modules/_threadmodule.c b/Modules/_threadmodule.c index 6889e8f6e12126..4912cd776ef5ae 100644 --- a/Modules/_threadmodule.c +++ b/Modules/_threadmodule.c @@ -1729,7 +1729,7 @@ do_start_new_thread(thread_module_state *state, PyObject *func, PyObject *args, "thread is not supported for isolated subinterpreters"); return -1; } - if (interp->finalizing) { + if (_PyInterpreterState_GetFinalizing(interp) != NULL) { PyErr_SetString(PyExc_PythonFinalizationError, "can't create new thread at interpreter shutdown"); return -1; diff --git a/Modules/_xxinterpchannelsmodule.c b/Modules/_xxinterpchannelsmodule.c index 28ec00a159d6cd..b63a3aab8263bc 100644 --- a/Modules/_xxinterpchannelsmodule.c +++ b/Modules/_xxinterpchannelsmodule.c @@ -6,7 +6,6 @@ #endif #include "Python.h" -#include "interpreteridobject.h" #include "pycore_crossinterp.h" // struct _xid #include "pycore_interp.h" // _PyInterpreterState_LookUpID() @@ -18,7 +17,9 @@ #endif #define REGISTERS_HEAP_TYPES +#define RETURNS_INTERPID_OBJECT #include "_interpreters_common.h" +#undef RETURNS_INTERPID_OBJECT #undef REGISTERS_HEAP_TYPES @@ -2908,7 +2909,7 @@ channelsmod_list_interpreters(PyObject *self, PyObject *args, PyObject *kwds) goto except; } if (res) { - interpid_obj = PyInterpreterState_GetIDObject(interp); + interpid_obj = get_interpid_obj(interp); if (interpid_obj == NULL) { goto except; } diff --git a/Modules/_xxsubinterpretersmodule.c b/Modules/_xxsubinterpretersmodule.c index 28c2f9c08bc0da..5e5b3c10201867 100644 --- a/Modules/_xxsubinterpretersmodule.c +++ b/Modules/_xxsubinterpretersmodule.c @@ -16,10 +16,11 @@ #include "pycore_pyerrors.h" // _Py_excinfo #include "pycore_pystate.h" // _PyInterpreterState_SetRunningMain() -#include "interpreteridobject.h" #include "marshal.h" // PyMarshal_ReadObjectFromString() +#define RETURNS_INTERPID_OBJECT #include "_interpreters_common.h" +#undef RETURNS_INTERPID_OBJECT #define MODULE_NAME _xxsubinterpreters @@ -35,96 +36,8 @@ _get_current_interp(void) return PyInterpreterState_Get(); } -static int64_t -pylong_to_interpid(PyObject *idobj) -{ - assert(PyLong_CheckExact(idobj)); - - if (_PyLong_IsNegative((PyLongObject *)idobj)) { - PyErr_Format(PyExc_ValueError, - "interpreter ID must be a non-negative int, got %R", - idobj); - return -1; - } - - int overflow; - long long id = PyLong_AsLongLongAndOverflow(idobj, &overflow); - if (id == -1) { - if (!overflow) { - assert(PyErr_Occurred()); - return -1; - } - assert(!PyErr_Occurred()); - // For now, we don't worry about if LLONG_MAX < INT64_MAX. - goto bad_id; - } -#if LLONG_MAX > INT64_MAX - if (id > INT64_MAX) { - goto bad_id; - } -#endif - return (int64_t)id; - -bad_id: - PyErr_Format(PyExc_RuntimeError, - "unrecognized interpreter ID %O", idobj); - return -1; -} - -static int64_t -convert_interpid_obj(PyObject *arg) -{ - int64_t id = -1; - if (_PyIndex_Check(arg)) { - PyObject *idobj = PyNumber_Long(arg); - if (idobj == NULL) { - return -1; - } - id = pylong_to_interpid(idobj); - Py_DECREF(idobj); - if (id < 0) { - return -1; - } - } - else { - PyErr_Format(PyExc_TypeError, - "interpreter ID must be an int, got %.100s", - Py_TYPE(arg)->tp_name); - return -1; - } - return id; -} - -static PyInterpreterState * -look_up_interp(PyObject *arg) -{ - int64_t id = convert_interpid_obj(arg); - if (id < 0) { - return NULL; - } - return _PyInterpreterState_LookUpID(id); -} - +#define look_up_interp _PyInterpreterState_LookUpIDObject -static PyObject * -interpid_to_pylong(int64_t id) -{ - assert(id < LLONG_MAX); - return PyLong_FromLongLong(id); -} - -static PyObject * -get_interpid_obj(PyInterpreterState *interp) -{ - if (_PyInterpreterState_IDInitref(interp) != 0) { - return NULL; - }; - int64_t id = PyInterpreterState_GetID(interp); - if (id < 0) { - return NULL; - } - return interpid_to_pylong(id); -} static PyObject * _get_current_module(void) @@ -143,6 +56,24 @@ _get_current_module(void) } +static int +is_running_main(PyInterpreterState *interp) +{ + if (_PyInterpreterState_IsRunningMain(interp)) { + return 1; + } + // Unlike with the general C-API, we can be confident that someone + // using this module for the main interpreter is doing so through + // the main program. Thus we can make this extra check. This benefits + // applications that embed Python but haven't been updated yet + // to call_PyInterpreterState_SetRunningMain(). + if (_Py_IsMainInterpreter(interp)) { + return 1; + } + return 0; +} + + /* Cross-interpreter Buffer Views *******************************************/ // XXX Release when the original interpreter is destroyed. @@ -596,7 +527,7 @@ interp_destroy(PyObject *self, PyObject *args, PyObject *kwds) // Ensure the interpreter isn't running. /* XXX We *could* support destroying a running interpreter but aren't going to worry about it for now. */ - if (_PyInterpreterState_IsRunningMain(interp)) { + if (is_running_main(interp)) { PyErr_Format(PyExc_RuntimeError, "interpreter running"); return NULL; } @@ -699,7 +630,7 @@ interp_set___main___attrs(PyObject *self, PyObject *args) } // Look up the interpreter. - PyInterpreterState *interp = PyInterpreterID_LookUp(id); + PyInterpreterState *interp = look_up_interp(id); if (interp == NULL) { return NULL; } @@ -1064,7 +995,7 @@ interp_is_running(PyObject *self, PyObject *args, PyObject *kwds) if (interp == NULL) { return NULL; } - if (_PyInterpreterState_IsRunningMain(interp)) { + if (is_running_main(interp)) { Py_RETURN_TRUE; } Py_RETURN_FALSE; diff --git a/Modules/clinic/_opcode.c.h b/Modules/clinic/_opcode.c.h index c7fd0f9f8a7420..fb90fb8e32f918 100644 --- a/Modules/clinic/_opcode.c.h +++ b/Modules/clinic/_opcode.c.h @@ -668,4 +668,64 @@ _opcode_get_intrinsic2_descs(PyObject *module, PyObject *Py_UNUSED(ignored)) { return _opcode_get_intrinsic2_descs_impl(module); } -/*[clinic end generated code: output=a1052bb1deffb7f2 input=a9049054013a1b77]*/ + +PyDoc_STRVAR(_opcode_get_executor__doc__, +"get_executor($module, /, code, offset)\n" +"--\n" +"\n" +"Return the executor object at offset in code if exists, None otherwise."); + +#define _OPCODE_GET_EXECUTOR_METHODDEF \ + {"get_executor", _PyCFunction_CAST(_opcode_get_executor), METH_FASTCALL|METH_KEYWORDS, _opcode_get_executor__doc__}, + +static PyObject * +_opcode_get_executor_impl(PyObject *module, PyObject *code, int offset); + +static PyObject * +_opcode_get_executor(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 2 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(code), &_Py_ID(offset), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"code", "offset", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "get_executor", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[2]; + PyObject *code; + int offset; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 2, 2, 0, argsbuf); + if (!args) { + goto exit; + } + code = args[0]; + offset = PyLong_AsInt(args[1]); + if (offset == -1 && PyErr_Occurred()) { + goto exit; + } + return_value = _opcode_get_executor_impl(module, code, offset); + +exit: + return return_value; +} +/*[clinic end generated code: output=2dbb31b041b49c8f input=a9049054013a1b77]*/ diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c index 9807d2e7d48a36..8a1b483eddae35 100644 --- a/Modules/gcmodule.c +++ b/Modules/gcmodule.c @@ -158,17 +158,12 @@ gc_set_threshold_impl(PyObject *module, int threshold0, int group_right_1, { GCState *gcstate = get_gc_state(); - gcstate->generations[0].threshold = threshold0; + gcstate->young.threshold = threshold0; if (group_right_1) { - gcstate->generations[1].threshold = threshold1; + gcstate->old[0].threshold = threshold1; } if (group_right_2) { - gcstate->generations[2].threshold = threshold2; - - /* generations higher than 2 get the same threshold */ - for (int i = 3; i < NUM_GENERATIONS; i++) { - gcstate->generations[i].threshold = gcstate->generations[2].threshold; - } + gcstate->old[1].threshold = threshold2; } Py_RETURN_NONE; } @@ -185,9 +180,9 @@ gc_get_threshold_impl(PyObject *module) { GCState *gcstate = get_gc_state(); return Py_BuildValue("(iii)", - gcstate->generations[0].threshold, - gcstate->generations[1].threshold, - gcstate->generations[2].threshold); + gcstate->young.threshold, + gcstate->old[0].threshold, + 0); } /*[clinic input] @@ -207,14 +202,14 @@ gc_get_count_impl(PyObject *module) struct _gc_thread_state *gc = &tstate->gc; // Flush the local allocation count to the global count - _Py_atomic_add_int(&gcstate->generations[0].count, (int)gc->alloc_count); + _Py_atomic_add_int(&gcstate->young.count, (int)gc->alloc_count); gc->alloc_count = 0; #endif return Py_BuildValue("(iii)", - gcstate->generations[0].count, - gcstate->generations[1].count, - gcstate->generations[2].count); + gcstate->young.count, + gcstate->old[gcstate->visited_space].count, + gcstate->old[gcstate->visited_space^1].count); } /*[clinic input] @@ -331,7 +326,7 @@ gc_get_objects_impl(PyObject *module, Py_ssize_t generation) } PyInterpreterState *interp = _PyInterpreterState_GET(); - return _PyGC_GetObjects(interp, generation); + return _PyGC_GetObjects(interp, (int)generation); } /*[clinic input] diff --git a/Modules/md5module.c b/Modules/md5module.c index 7d2b3275f213fd..9cbf11feaa9c32 100644 --- a/Modules/md5module.c +++ b/Modules/md5module.c @@ -51,7 +51,7 @@ typedef struct { // Prevents undefined behavior via multiple threads entering the C API. bool use_mutex; PyMutex mutex; - Hacl_Streaming_MD5_state *hash_state; + Hacl_Hash_MD5_state_t *hash_state; } MD5object; #include "clinic/md5module.c.h" @@ -93,7 +93,7 @@ MD5_traverse(PyObject *ptr, visitproc visit, void *arg) static void MD5_dealloc(MD5object *ptr) { - Hacl_Streaming_MD5_legacy_free(ptr->hash_state); + Hacl_Hash_MD5_free(ptr->hash_state); PyTypeObject *tp = Py_TYPE((PyObject*)ptr); PyObject_GC_UnTrack(ptr); PyObject_GC_Del(ptr); @@ -122,7 +122,7 @@ MD5Type_copy_impl(MD5object *self, PyTypeObject *cls) return NULL; ENTER_HASHLIB(self); - newobj->hash_state = Hacl_Streaming_MD5_legacy_copy(self->hash_state); + newobj->hash_state = Hacl_Hash_MD5_copy(self->hash_state); LEAVE_HASHLIB(self); return (PyObject *)newobj; } @@ -139,7 +139,7 @@ MD5Type_digest_impl(MD5object *self) { unsigned char digest[MD5_DIGESTSIZE]; ENTER_HASHLIB(self); - Hacl_Streaming_MD5_legacy_finish(self->hash_state, digest); + Hacl_Hash_MD5_digest(self->hash_state, digest); LEAVE_HASHLIB(self); return PyBytes_FromStringAndSize((const char *)digest, MD5_DIGESTSIZE); } @@ -156,7 +156,7 @@ MD5Type_hexdigest_impl(MD5object *self) { unsigned char digest[MD5_DIGESTSIZE]; ENTER_HASHLIB(self); - Hacl_Streaming_MD5_legacy_finish(self->hash_state, digest); + Hacl_Hash_MD5_digest(self->hash_state, digest); LEAVE_HASHLIB(self); const char *hexdigits = "0123456789abcdef"; @@ -170,15 +170,15 @@ MD5Type_hexdigest_impl(MD5object *self) return PyUnicode_FromStringAndSize(digest_hex, sizeof(digest_hex)); } -static void update(Hacl_Streaming_MD5_state *state, uint8_t *buf, Py_ssize_t len) { +static void update(Hacl_Hash_MD5_state_t *state, uint8_t *buf, Py_ssize_t len) { #if PY_SSIZE_T_MAX > UINT32_MAX while (len > UINT32_MAX) { - Hacl_Streaming_MD5_legacy_update(state, buf, UINT32_MAX); + Hacl_Hash_MD5_update(state, buf, UINT32_MAX); len -= UINT32_MAX; buf += UINT32_MAX; } #endif - Hacl_Streaming_MD5_legacy_update(state, buf, (uint32_t) len); + Hacl_Hash_MD5_update(state, buf, (uint32_t) len); } /*[clinic input] @@ -302,7 +302,7 @@ _md5_md5_impl(PyObject *module, PyObject *string, int usedforsecurity) return NULL; } - new->hash_state = Hacl_Streaming_MD5_legacy_create_in(); + new->hash_state = Hacl_Hash_MD5_malloc(); if (PyErr_Occurred()) { Py_DECREF(new); diff --git a/Modules/overlapped.c b/Modules/overlapped.c index fd40e91d0f50c4..b9881d91ded244 100644 --- a/Modules/overlapped.c +++ b/Modules/overlapped.c @@ -723,6 +723,24 @@ Overlapped_dealloc(OverlappedObject *self) if (!HasOverlappedIoCompleted(&self->overlapped) && self->type != TYPE_NOT_STARTED) { + // NOTE: We should not get here, if we do then something is wrong in + // the IocpProactor or ProactorEventLoop. Since everything uses IOCP if + // the overlapped IO hasn't completed yet then we should not be + // deallocating! + // + // The problem is likely that this OverlappedObject was removed from + // the IocpProactor._cache before it was complete. The _cache holds a + // reference while IO is pending so that it does not get deallocated + // while the kernel has retained the OVERLAPPED structure. + // + // CancelIoEx (likely called from self.cancel()) may have successfully + // completed, but the OVERLAPPED is still in use until either + // HasOverlappedIoCompleted() is true or GetQueuedCompletionStatus has + // returned this OVERLAPPED object. + // + // NOTE: Waiting when IOCP is in use can hang indefinitely, but this + // CancelIoEx is superfluous in that self.cancel() was already called, + // so I've only ever seen this return FALSE with GLE=ERROR_NOT_FOUND Py_BEGIN_ALLOW_THREADS if (CancelIoEx(self->handle, &self->overlapped)) wait = TRUE; @@ -2038,6 +2056,7 @@ overlapped_exec(PyObject *module) WINAPI_CONSTANT(F_DWORD, ERROR_OPERATION_ABORTED); WINAPI_CONSTANT(F_DWORD, ERROR_SEM_TIMEOUT); WINAPI_CONSTANT(F_DWORD, ERROR_PIPE_BUSY); + WINAPI_CONSTANT(F_DWORD, ERROR_PORT_UNREACHABLE); WINAPI_CONSTANT(F_DWORD, INFINITE); WINAPI_CONSTANT(F_HANDLE, INVALID_HANDLE_VALUE); WINAPI_CONSTANT(F_HANDLE, NULL); diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index 7b2d3661ee5546..a4b635ef5bf629 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -613,11 +613,16 @@ PyOS_BeforeFork(void) run_at_forkers(interp->before_forkers, 1); _PyImport_AcquireLock(interp); + _PyEval_StopTheWorldAll(&_PyRuntime); + HEAD_LOCK(&_PyRuntime); } void PyOS_AfterFork_Parent(void) { + HEAD_UNLOCK(&_PyRuntime); + _PyEval_StartTheWorldAll(&_PyRuntime); + PyInterpreterState *interp = _PyInterpreterState_GET(); if (_PyImport_ReleaseLock(interp) <= 0) { Py_FatalError("failed releasing import lock after fork"); @@ -632,6 +637,7 @@ PyOS_AfterFork_Child(void) PyStatus status; _PyRuntimeState *runtime = &_PyRuntime; + // re-creates runtime->interpreters.mutex (HEAD_UNLOCK) status = _PyRuntimeState_ReInitThreads(runtime); if (_PyStatus_EXCEPTION(status)) { goto fatal_error; @@ -640,6 +646,7 @@ PyOS_AfterFork_Child(void) PyThreadState *tstate = _PyThreadState_GET(); _Py_EnsureTstateNotNULL(tstate); + assert(tstate->thread_id == PyThread_get_thread_ident()); #ifdef PY_HAVE_THREAD_NATIVE_ID tstate->native_thread_id = PyThread_get_thread_native_id(); #endif @@ -649,11 +656,22 @@ PyOS_AfterFork_Child(void) _Py_qsbr_after_fork((_PyThreadStateImpl *)tstate); #endif + // Ideally we could guarantee tstate is running main. + _PyInterpreterState_ReinitRunningMain(tstate); + status = _PyEval_ReInitThreads(tstate); if (_PyStatus_EXCEPTION(status)) { goto fatal_error; } + // Remove the dead thread states. We "start the world" once we are the only + // thread state left to undo the stop the world call in `PyOS_BeforeFork`. + // That needs to happen before `_PyThreadState_DeleteList`, because that + // may call destructors. + PyThreadState *list = _PyThreadState_RemoveExcept(tstate); + _PyEval_StartTheWorldAll(&_PyRuntime); + _PyThreadState_DeleteList(list); + status = _PyImport_ReInitLock(tstate->interp); if (_PyStatus_EXCEPTION(status)) { goto fatal_error; @@ -7731,10 +7749,15 @@ os_register_at_fork_impl(PyObject *module, PyObject *before, // running in the process. Best effort, silent if unable to count threads. // Constraint: Quick. Never overcounts. Never leaves an error set. // -// This code might do an import, thus acquiring the import lock, which -// PyOS_BeforeFork() also does. As this should only be called from -// the parent process, it is in the same thread so that works. -static void warn_about_fork_with_threads(const char* name) { +// This should only be called from the parent process after +// PyOS_AfterFork_Parent(). +static void +warn_about_fork_with_threads(const char* name) +{ + // It's not safe to issue the warning while the world is stopped, because + // other threads might be holding locks that we need, which would deadlock. + assert(!_PyRuntime.stoptheworld.world_stopped); + // TODO: Consider making an `os` module API to return the current number // of threads in the process. That'd presumably use this platform code but // raise an error rather than using the inaccurate fallback. @@ -7842,7 +7865,7 @@ os_fork1_impl(PyObject *module) pid_t pid; PyInterpreterState *interp = _PyInterpreterState_GET(); - if (interp->finalizing) { + if (_PyInterpreterState_GetFinalizing(interp) != NULL) { PyErr_SetString(PyExc_PythonFinalizationError, "can't fork at interpreter shutdown"); return NULL; @@ -7858,9 +7881,10 @@ os_fork1_impl(PyObject *module) /* child: this clobbers and resets the import lock. */ PyOS_AfterFork_Child(); } else { - warn_about_fork_with_threads("fork1"); /* parent: release the import lock. */ PyOS_AfterFork_Parent(); + // After PyOS_AfterFork_Parent() starts the world to avoid deadlock. + warn_about_fork_with_threads("fork1"); } if (pid == -1) { errno = saved_errno; @@ -7886,7 +7910,7 @@ os_fork_impl(PyObject *module) { pid_t pid; PyInterpreterState *interp = _PyInterpreterState_GET(); - if (interp->finalizing) { + if (_PyInterpreterState_GetFinalizing(interp) != NULL) { PyErr_SetString(PyExc_PythonFinalizationError, "can't fork at interpreter shutdown"); return NULL; @@ -7906,9 +7930,10 @@ os_fork_impl(PyObject *module) /* child: this clobbers and resets the import lock. */ PyOS_AfterFork_Child(); } else { - warn_about_fork_with_threads("fork"); /* parent: release the import lock. */ PyOS_AfterFork_Parent(); + // After PyOS_AfterFork_Parent() starts the world to avoid deadlock. + warn_about_fork_with_threads("fork"); } if (pid == -1) { errno = saved_errno; @@ -8719,7 +8744,7 @@ os_forkpty_impl(PyObject *module) pid_t pid; PyInterpreterState *interp = _PyInterpreterState_GET(); - if (interp->finalizing) { + if (_PyInterpreterState_GetFinalizing(interp) != NULL) { PyErr_SetString(PyExc_PythonFinalizationError, "can't fork at interpreter shutdown"); return NULL; @@ -8737,9 +8762,10 @@ os_forkpty_impl(PyObject *module) /* child: this clobbers and resets the import lock. */ PyOS_AfterFork_Child(); } else { - warn_about_fork_with_threads("forkpty"); /* parent: release the import lock. */ PyOS_AfterFork_Parent(); + // After PyOS_AfterFork_Parent() starts the world to avoid deadlock. + warn_about_fork_with_threads("forkpty"); } if (pid == -1) { return posix_error(); diff --git a/Modules/sha1module.c b/Modules/sha1module.c index eda6b5608d52f7..345a6c215eb167 100644 --- a/Modules/sha1module.c +++ b/Modules/sha1module.c @@ -52,7 +52,7 @@ typedef struct { bool use_mutex; PyMutex mutex; PyThread_type_lock lock; - Hacl_Streaming_SHA1_state *hash_state; + Hacl_Hash_SHA1_state_t *hash_state; } SHA1object; #include "clinic/sha1module.c.h" @@ -95,7 +95,7 @@ SHA1_traverse(PyObject *ptr, visitproc visit, void *arg) static void SHA1_dealloc(SHA1object *ptr) { - Hacl_Streaming_SHA1_legacy_free(ptr->hash_state); + Hacl_Hash_SHA1_free(ptr->hash_state); PyTypeObject *tp = Py_TYPE(ptr); PyObject_GC_UnTrack(ptr); PyObject_GC_Del(ptr); @@ -124,7 +124,7 @@ SHA1Type_copy_impl(SHA1object *self, PyTypeObject *cls) return NULL; ENTER_HASHLIB(self); - newobj->hash_state = Hacl_Streaming_SHA1_legacy_copy(self->hash_state); + newobj->hash_state = Hacl_Hash_SHA1_copy(self->hash_state); LEAVE_HASHLIB(self); return (PyObject *)newobj; } @@ -141,7 +141,7 @@ SHA1Type_digest_impl(SHA1object *self) { unsigned char digest[SHA1_DIGESTSIZE]; ENTER_HASHLIB(self); - Hacl_Streaming_SHA1_legacy_finish(self->hash_state, digest); + Hacl_Hash_SHA1_digest(self->hash_state, digest); LEAVE_HASHLIB(self); return PyBytes_FromStringAndSize((const char *)digest, SHA1_DIGESTSIZE); } @@ -158,20 +158,20 @@ SHA1Type_hexdigest_impl(SHA1object *self) { unsigned char digest[SHA1_DIGESTSIZE]; ENTER_HASHLIB(self); - Hacl_Streaming_SHA1_legacy_finish(self->hash_state, digest); + Hacl_Hash_SHA1_digest(self->hash_state, digest); LEAVE_HASHLIB(self); return _Py_strhex((const char *)digest, SHA1_DIGESTSIZE); } -static void update(Hacl_Streaming_SHA1_state *state, uint8_t *buf, Py_ssize_t len) { +static void update(Hacl_Hash_SHA1_state_t *state, uint8_t *buf, Py_ssize_t len) { #if PY_SSIZE_T_MAX > UINT32_MAX while (len > UINT32_MAX) { - Hacl_Streaming_SHA1_legacy_update(state, buf, UINT32_MAX); + Hacl_Hash_SHA1_update(state, buf, UINT32_MAX); len -= UINT32_MAX; buf += UINT32_MAX; } #endif - Hacl_Streaming_SHA1_legacy_update(state, buf, (uint32_t) len); + Hacl_Hash_SHA1_update(state, buf, (uint32_t) len); } /*[clinic input] @@ -295,7 +295,7 @@ _sha1_sha1_impl(PyObject *module, PyObject *string, int usedforsecurity) return NULL; } - new->hash_state = Hacl_Streaming_SHA1_legacy_create_in(); + new->hash_state = Hacl_Hash_SHA1_malloc(); if (PyErr_Occurred()) { Py_DECREF(new); diff --git a/Modules/sha2module.c b/Modules/sha2module.c index 968493ba51b50d..60be4228a00a03 100644 --- a/Modules/sha2module.c +++ b/Modules/sha2module.c @@ -55,7 +55,7 @@ typedef struct { // Prevents undefined behavior via multiple threads entering the C API. bool use_mutex; PyMutex mutex; - Hacl_Streaming_SHA2_state_sha2_256 *state; + Hacl_Hash_SHA2_state_t_256 *state; } SHA256object; typedef struct { @@ -64,7 +64,7 @@ typedef struct { // Prevents undefined behavior via multiple threads entering the C API. bool use_mutex; PyMutex mutex; - Hacl_Streaming_SHA2_state_sha2_512 *state; + Hacl_Hash_SHA2_state_t_512 *state; } SHA512object; #include "clinic/sha2module.c.h" @@ -89,13 +89,13 @@ sha2_get_state(PyObject *module) static void SHA256copy(SHA256object *src, SHA256object *dest) { dest->digestsize = src->digestsize; - dest->state = Hacl_Streaming_SHA2_copy_256(src->state); + dest->state = Hacl_Hash_SHA2_copy_256(src->state); } static void SHA512copy(SHA512object *src, SHA512object *dest) { dest->digestsize = src->digestsize; - dest->state = Hacl_Streaming_SHA2_copy_512(src->state); + dest->state = Hacl_Hash_SHA2_copy_512(src->state); } static SHA256object * @@ -166,7 +166,7 @@ SHA2_traverse(PyObject *ptr, visitproc visit, void *arg) static void SHA256_dealloc(SHA256object *ptr) { - Hacl_Streaming_SHA2_free_256(ptr->state); + Hacl_Hash_SHA2_free_256(ptr->state); PyTypeObject *tp = Py_TYPE(ptr); PyObject_GC_UnTrack(ptr); PyObject_GC_Del(ptr); @@ -176,7 +176,7 @@ SHA256_dealloc(SHA256object *ptr) static void SHA512_dealloc(SHA512object *ptr) { - Hacl_Streaming_SHA2_free_512(ptr->state); + Hacl_Hash_SHA2_free_512(ptr->state); PyTypeObject *tp = Py_TYPE(ptr); PyObject_GC_UnTrack(ptr); PyObject_GC_Del(ptr); @@ -186,34 +186,34 @@ SHA512_dealloc(SHA512object *ptr) /* HACL* takes a uint32_t for the length of its parameter, but Py_ssize_t can be * 64 bits so we loop in <4gig chunks when needed. */ -static void update_256(Hacl_Streaming_SHA2_state_sha2_256 *state, uint8_t *buf, Py_ssize_t len) { +static void update_256(Hacl_Hash_SHA2_state_t_256 *state, uint8_t *buf, Py_ssize_t len) { /* Note: we explicitly ignore the error code on the basis that it would take > * 1 billion years to overflow the maximum admissible length for SHA2-256 * (namely, 2^61-1 bytes). */ #if PY_SSIZE_T_MAX > UINT32_MAX while (len > UINT32_MAX) { - Hacl_Streaming_SHA2_update_256(state, buf, UINT32_MAX); + Hacl_Hash_SHA2_update_256(state, buf, UINT32_MAX); len -= UINT32_MAX; buf += UINT32_MAX; } #endif /* Cast to uint32_t is safe: len <= UINT32_MAX at this point. */ - Hacl_Streaming_SHA2_update_256(state, buf, (uint32_t) len); + Hacl_Hash_SHA2_update_256(state, buf, (uint32_t) len); } -static void update_512(Hacl_Streaming_SHA2_state_sha2_512 *state, uint8_t *buf, Py_ssize_t len) { +static void update_512(Hacl_Hash_SHA2_state_t_512 *state, uint8_t *buf, Py_ssize_t len) { /* Note: we explicitly ignore the error code on the basis that it would take > * 1 billion years to overflow the maximum admissible length for this API * (namely, 2^64-1 bytes). */ #if PY_SSIZE_T_MAX > UINT32_MAX while (len > UINT32_MAX) { - Hacl_Streaming_SHA2_update_512(state, buf, UINT32_MAX); + Hacl_Hash_SHA2_update_512(state, buf, UINT32_MAX); len -= UINT32_MAX; buf += UINT32_MAX; } #endif /* Cast to uint32_t is safe: len <= UINT32_MAX at this point. */ - Hacl_Streaming_SHA2_update_512(state, buf, (uint32_t) len); + Hacl_Hash_SHA2_update_512(state, buf, (uint32_t) len); } @@ -296,7 +296,7 @@ SHA256Type_digest_impl(SHA256object *self) ENTER_HASHLIB(self); // HACL* performs copies under the hood so that self->state remains valid // after this call. - Hacl_Streaming_SHA2_finish_256(self->state, digest); + Hacl_Hash_SHA2_digest_256(self->state, digest); LEAVE_HASHLIB(self); return PyBytes_FromStringAndSize((const char *)digest, self->digestsize); } @@ -316,7 +316,7 @@ SHA512Type_digest_impl(SHA512object *self) ENTER_HASHLIB(self); // HACL* performs copies under the hood so that self->state remains valid // after this call. - Hacl_Streaming_SHA2_finish_512(self->state, digest); + Hacl_Hash_SHA2_digest_512(self->state, digest); LEAVE_HASHLIB(self); return PyBytes_FromStringAndSize((const char *)digest, self->digestsize); } @@ -334,7 +334,7 @@ SHA256Type_hexdigest_impl(SHA256object *self) uint8_t digest[SHA256_DIGESTSIZE]; assert(self->digestsize <= SHA256_DIGESTSIZE); ENTER_HASHLIB(self); - Hacl_Streaming_SHA2_finish_256(self->state, digest); + Hacl_Hash_SHA2_digest_256(self->state, digest); LEAVE_HASHLIB(self); return _Py_strhex((const char *)digest, self->digestsize); } @@ -352,7 +352,7 @@ SHA512Type_hexdigest_impl(SHA512object *self) uint8_t digest[SHA512_DIGESTSIZE]; assert(self->digestsize <= SHA512_DIGESTSIZE); ENTER_HASHLIB(self); - Hacl_Streaming_SHA2_finish_512(self->state, digest); + Hacl_Hash_SHA2_digest_512(self->state, digest); LEAVE_HASHLIB(self); return _Py_strhex((const char *)digest, self->digestsize); } @@ -597,7 +597,7 @@ _sha2_sha256_impl(PyObject *module, PyObject *string, int usedforsecurity) return NULL; } - new->state = Hacl_Streaming_SHA2_create_in_256(); + new->state = Hacl_Hash_SHA2_malloc_256(); new->digestsize = 32; if (PyErr_Occurred()) { @@ -651,7 +651,7 @@ _sha2_sha224_impl(PyObject *module, PyObject *string, int usedforsecurity) return NULL; } - new->state = Hacl_Streaming_SHA2_create_in_224(); + new->state = Hacl_Hash_SHA2_malloc_224(); new->digestsize = 28; if (PyErr_Occurred()) { @@ -705,7 +705,7 @@ _sha2_sha512_impl(PyObject *module, PyObject *string, int usedforsecurity) return NULL; } - new->state = Hacl_Streaming_SHA2_create_in_512(); + new->state = Hacl_Hash_SHA2_malloc_512(); new->digestsize = 64; if (PyErr_Occurred()) { @@ -758,7 +758,7 @@ _sha2_sha384_impl(PyObject *module, PyObject *string, int usedforsecurity) return NULL; } - new->state = Hacl_Streaming_SHA2_create_in_384(); + new->state = Hacl_Hash_SHA2_malloc_384(); new->digestsize = 48; if (PyErr_Occurred()) { diff --git a/Modules/sha3module.c b/Modules/sha3module.c index d9d2f6c385a68b..c30e924a7072f7 100644 --- a/Modules/sha3module.c +++ b/Modules/sha3module.c @@ -63,7 +63,7 @@ typedef struct { // Prevents undefined behavior via multiple threads entering the C API. bool use_mutex; PyMutex mutex; - Hacl_Streaming_Keccak_state *hash_state; + Hacl_Hash_SHA3_state_t *hash_state; } SHA3object; #include "clinic/sha3module.c.h" @@ -81,18 +81,18 @@ newSHA3object(PyTypeObject *type) return newobj; } -static void sha3_update(Hacl_Streaming_Keccak_state *state, uint8_t *buf, Py_ssize_t len) { +static void sha3_update(Hacl_Hash_SHA3_state_t *state, uint8_t *buf, Py_ssize_t len) { /* Note: we explicitly ignore the error code on the basis that it would take > * 1 billion years to hash more than 2^64 bytes. */ #if PY_SSIZE_T_MAX > UINT32_MAX while (len > UINT32_MAX) { - Hacl_Streaming_Keccak_update(state, buf, UINT32_MAX); + Hacl_Hash_SHA3_update(state, buf, UINT32_MAX); len -= UINT32_MAX; buf += UINT32_MAX; } #endif /* Cast to uint32_t is safe: len <= UINT32_MAX at this point. */ - Hacl_Streaming_Keccak_update(state, buf, (uint32_t) len); + Hacl_Hash_SHA3_update(state, buf, (uint32_t) len); } /*[clinic input] @@ -120,17 +120,17 @@ py_sha3_new_impl(PyTypeObject *type, PyObject *data, int usedforsecurity) assert(state != NULL); if (type == state->sha3_224_type) { - self->hash_state = Hacl_Streaming_Keccak_malloc(Spec_Hash_Definitions_SHA3_224); + self->hash_state = Hacl_Hash_SHA3_malloc(Spec_Hash_Definitions_SHA3_224); } else if (type == state->sha3_256_type) { - self->hash_state = Hacl_Streaming_Keccak_malloc(Spec_Hash_Definitions_SHA3_256); + self->hash_state = Hacl_Hash_SHA3_malloc(Spec_Hash_Definitions_SHA3_256); } else if (type == state->sha3_384_type) { - self->hash_state = Hacl_Streaming_Keccak_malloc(Spec_Hash_Definitions_SHA3_384); + self->hash_state = Hacl_Hash_SHA3_malloc(Spec_Hash_Definitions_SHA3_384); } else if (type == state->sha3_512_type) { - self->hash_state = Hacl_Streaming_Keccak_malloc(Spec_Hash_Definitions_SHA3_512); + self->hash_state = Hacl_Hash_SHA3_malloc(Spec_Hash_Definitions_SHA3_512); } else if (type == state->shake_128_type) { - self->hash_state = Hacl_Streaming_Keccak_malloc(Spec_Hash_Definitions_Shake128); + self->hash_state = Hacl_Hash_SHA3_malloc(Spec_Hash_Definitions_Shake128); } else if (type == state->shake_256_type) { - self->hash_state = Hacl_Streaming_Keccak_malloc(Spec_Hash_Definitions_Shake256); + self->hash_state = Hacl_Hash_SHA3_malloc(Spec_Hash_Definitions_Shake256); } else { PyErr_BadInternalCall(); goto error; @@ -169,7 +169,7 @@ py_sha3_new_impl(PyTypeObject *type, PyObject *data, int usedforsecurity) static void SHA3_dealloc(SHA3object *self) { - Hacl_Streaming_Keccak_free(self->hash_state); + Hacl_Hash_SHA3_free(self->hash_state); PyTypeObject *tp = Py_TYPE(self); PyObject_Free(self); Py_DECREF(tp); @@ -195,7 +195,7 @@ _sha3_sha3_224_copy_impl(SHA3object *self) return NULL; } ENTER_HASHLIB(self); - newobj->hash_state = Hacl_Streaming_Keccak_copy(self->hash_state); + newobj->hash_state = Hacl_Hash_SHA3_copy(self->hash_state); LEAVE_HASHLIB(self); return (PyObject *)newobj; } @@ -215,10 +215,10 @@ _sha3_sha3_224_digest_impl(SHA3object *self) // This function errors out if the algorithm is Shake. Here, we know this // not to be the case, and therefore do not perform error checking. ENTER_HASHLIB(self); - Hacl_Streaming_Keccak_finish(self->hash_state, digest); + Hacl_Hash_SHA3_digest(self->hash_state, digest); LEAVE_HASHLIB(self); return PyBytes_FromStringAndSize((const char *)digest, - Hacl_Streaming_Keccak_hash_len(self->hash_state)); + Hacl_Hash_SHA3_hash_len(self->hash_state)); } @@ -234,10 +234,10 @@ _sha3_sha3_224_hexdigest_impl(SHA3object *self) { unsigned char digest[SHA3_MAX_DIGESTSIZE]; ENTER_HASHLIB(self); - Hacl_Streaming_Keccak_finish(self->hash_state, digest); + Hacl_Hash_SHA3_digest(self->hash_state, digest); LEAVE_HASHLIB(self); return _Py_strhex((const char *)digest, - Hacl_Streaming_Keccak_hash_len(self->hash_state)); + Hacl_Hash_SHA3_hash_len(self->hash_state)); } @@ -288,7 +288,7 @@ static PyMethodDef SHA3_methods[] = { static PyObject * SHA3_get_block_size(SHA3object *self, void *closure) { - uint32_t rate = Hacl_Streaming_Keccak_block_len(self->hash_state); + uint32_t rate = Hacl_Hash_SHA3_block_len(self->hash_state); return PyLong_FromLong(rate); } @@ -324,17 +324,17 @@ static PyObject * SHA3_get_digest_size(SHA3object *self, void *closure) { // Preserving previous behavior: variable-length algorithms return 0 - if (Hacl_Streaming_Keccak_is_shake(self->hash_state)) + if (Hacl_Hash_SHA3_is_shake(self->hash_state)) return PyLong_FromLong(0); else - return PyLong_FromLong(Hacl_Streaming_Keccak_hash_len(self->hash_state)); + return PyLong_FromLong(Hacl_Hash_SHA3_hash_len(self->hash_state)); } static PyObject * SHA3_get_capacity_bits(SHA3object *self, void *closure) { - uint32_t rate = Hacl_Streaming_Keccak_block_len(self->hash_state) * 8; + uint32_t rate = Hacl_Hash_SHA3_block_len(self->hash_state) * 8; int capacity = 1600 - rate; return PyLong_FromLong(capacity); } @@ -343,7 +343,7 @@ SHA3_get_capacity_bits(SHA3object *self, void *closure) static PyObject * SHA3_get_rate_bits(SHA3object *self, void *closure) { - uint32_t rate = Hacl_Streaming_Keccak_block_len(self->hash_state) * 8; + uint32_t rate = Hacl_Hash_SHA3_block_len(self->hash_state) * 8; return PyLong_FromLong(rate); } @@ -436,7 +436,7 @@ _SHAKE_digest(SHA3object *self, unsigned long digestlen, int hex) * - the output length is zero -- we follow the existing behavior and return * an empty digest, without raising an error */ if (digestlen > 0) { - Hacl_Streaming_Keccak_squeeze(self->hash_state, digest, digestlen); + Hacl_Hash_SHA3_squeeze(self->hash_state, digest, digestlen); } if (hex) { result = _Py_strhex((const char *)digest, digestlen); diff --git a/Objects/bytesobject.c b/Objects/bytesobject.c index 26227dd251122d..256e01f54f0782 100644 --- a/Objects/bytesobject.c +++ b/Objects/bytesobject.c @@ -3025,11 +3025,9 @@ PyBytes_ConcatAndDel(PyObject **pv, PyObject *w) /* The following function breaks the notion that bytes are immutable: - it changes the size of a bytes object. We get away with this only if there - is only one module referencing the object. You can also think of it + it changes the size of a bytes object. You can think of it as creating a new bytes object and destroying the old one, only - more efficiently. In any case, don't use this if the bytes object may - already be known to some other part of the code... + more efficiently. Note that if there's not enough memory to resize the bytes object, the original bytes object at *pv is deallocated, *pv is set to NULL, an "out of memory" exception is set, and -1 is returned. Else (on success) 0 is @@ -3045,28 +3043,40 @@ _PyBytes_Resize(PyObject **pv, Py_ssize_t newsize) PyBytesObject *sv; v = *pv; if (!PyBytes_Check(v) || newsize < 0) { - goto error; + *pv = 0; + Py_DECREF(v); + PyErr_BadInternalCall(); + return -1; } - if (Py_SIZE(v) == newsize) { + Py_ssize_t oldsize = PyBytes_GET_SIZE(v); + if (oldsize == newsize) { /* return early if newsize equals to v->ob_size */ return 0; } - if (Py_SIZE(v) == 0) { - if (newsize == 0) { - return 0; - } + if (oldsize == 0) { *pv = _PyBytes_FromSize(newsize, 0); Py_DECREF(v); return (*pv == NULL) ? -1 : 0; } - if (Py_REFCNT(v) != 1) { - goto error; - } if (newsize == 0) { *pv = bytes_get_empty(); Py_DECREF(v); return 0; } + if (Py_REFCNT(v) != 1) { + if (oldsize < newsize) { + *pv = _PyBytes_FromSize(newsize, 0); + if (*pv) { + memcpy(PyBytes_AS_STRING(*pv), PyBytes_AS_STRING(v), oldsize); + } + } + else { + *pv = PyBytes_FromStringAndSize(PyBytes_AS_STRING(v), newsize); + } + Py_DECREF(v); + return (*pv == NULL) ? -1 : 0; + } + #ifdef Py_TRACE_REFS _Py_ForgetReference(v); #endif @@ -3089,11 +3099,6 @@ _Py_COMP_DIAG_IGNORE_DEPR_DECLS sv->ob_shash = -1; /* invalidate cached hash value */ _Py_COMP_DIAG_POP return 0; -error: - *pv = 0; - Py_DECREF(v); - PyErr_BadInternalCall(); - return -1; } diff --git a/Objects/classobject.c b/Objects/classobject.c index d7e520f556d9a0..9cbb9442c6059c 100644 --- a/Objects/classobject.c +++ b/Objects/classobject.c @@ -301,7 +301,7 @@ static Py_hash_t method_hash(PyMethodObject *a) { Py_hash_t x, y; - x = _Py_HashPointer(a->im_self); + x = PyObject_GenericHash(a->im_self); y = PyObject_Hash(a->im_func); if (y == -1) return -1; diff --git a/Objects/codeobject.c b/Objects/codeobject.c index 3df733eb4ee578..f14ff73394b168 100644 --- a/Objects/codeobject.c +++ b/Objects/codeobject.c @@ -1710,6 +1710,7 @@ code_dealloc(PyCodeObject *co) } Py_SET_REFCNT(co, 0); + _PyFunction_ClearCodeByVersion(co->co_version); if (co->co_extra != NULL) { PyInterpreterState *interp = _PyInterpreterState_GET(); _PyCodeObjectExtra *co_extra = co->co_extra; @@ -2348,49 +2349,3 @@ _PyCode_ConstantKey(PyObject *op) } return key; } - -void -_PyStaticCode_Fini(PyCodeObject *co) -{ - if (co->co_executors != NULL) { - clear_executors(co); - } - deopt_code(co, _PyCode_CODE(co)); - PyMem_Free(co->co_extra); - if (co->_co_cached != NULL) { - Py_CLEAR(co->_co_cached->_co_code); - Py_CLEAR(co->_co_cached->_co_cellvars); - Py_CLEAR(co->_co_cached->_co_freevars); - Py_CLEAR(co->_co_cached->_co_varnames); - PyMem_Free(co->_co_cached); - co->_co_cached = NULL; - } - co->co_extra = NULL; - if (co->co_weakreflist != NULL) { - PyObject_ClearWeakRefs((PyObject *)co); - co->co_weakreflist = NULL; - } - free_monitoring_data(co->_co_monitoring); - co->_co_monitoring = NULL; -} - -int -_PyStaticCode_Init(PyCodeObject *co) -{ - int res = intern_strings(co->co_names); - if (res < 0) { - return -1; - } - res = intern_string_constants(co->co_consts, NULL); - if (res < 0) { - return -1; - } - res = intern_strings(co->co_localsplusnames); - if (res < 0) { - return -1; - } - _PyCode_Quicken(co); - return 0; -} - -#define MAX_CODE_UNITS_PER_LOC_ENTRY 8 diff --git a/Objects/descrobject.c b/Objects/descrobject.c index df546a090c28e4..3423f152ce862d 100644 --- a/Objects/descrobject.c +++ b/Objects/descrobject.c @@ -1346,7 +1346,7 @@ wrapper_hash(PyObject *self) { wrapperobject *wp = (wrapperobject *)self; Py_hash_t x, y; - x = _Py_HashPointer(wp->self); + x = PyObject_GenericHash(wp->self); y = _Py_HashPointer(wp->descr); x = x ^ y; if (x == -1) diff --git a/Objects/fileobject.c b/Objects/fileobject.c index e30ab952dff571..bae49d367b65ee 100644 --- a/Objects/fileobject.c +++ b/Objects/fileobject.c @@ -80,13 +80,7 @@ PyFile_GetLine(PyObject *f, int n) "EOF when reading a line"); } else if (s[len-1] == '\n') { - if (Py_REFCNT(result) == 1) - _PyBytes_Resize(&result, len-1); - else { - PyObject *v; - v = PyBytes_FromStringAndSize(s, len-1); - Py_SETREF(result, v); - } + (void) _PyBytes_Resize(&result, len-1); } } if (n < 0 && result != NULL && PyUnicode_Check(result)) { diff --git a/Objects/frameobject.c b/Objects/frameobject.c index a914c61aac2fd5..d55c246d80dd6a 100644 --- a/Objects/frameobject.c +++ b/Objects/frameobject.c @@ -1140,7 +1140,7 @@ frame_init_get_vars(_PyInterpreterFrame *frame) /* Free vars have not been initialized -- Do that */ PyObject *closure = ((PyFunctionObject *)frame->f_funcobj)->func_closure; - int offset = PyCode_GetFirstFree(co); + int offset = PyUnstable_Code_GetFirstFree(co); for (int i = 0; i < co->co_nfreevars; ++i) { PyObject *o = PyTuple_GET_ITEM(closure, i); frame->localsplus[offset + i] = Py_NewRef(o); diff --git a/Objects/funcobject.c b/Objects/funcobject.c index a506166916de48..a3c0800e7891d3 100644 --- a/Objects/funcobject.c +++ b/Objects/funcobject.c @@ -218,43 +218,61 @@ PyFunction_NewWithQualName(PyObject *code, PyObject *globals, PyObject *qualname } /* -Function versions ------------------ +(This is purely internal documentation. There are no public APIs here.) -Function versions are used to detect when a function object has been -updated, invalidating inline cache data used by the `CALL` bytecode -(notably `CALL_PY_EXACT_ARGS` and a few other `CALL` specializations). +Function (and code) versions +---------------------------- -They are also used by the Tier 2 superblock creation code to find -the function being called (and from there the code object). +The Tier 1 specializer generates CALL variants that can be invalidated +by changes to critical function attributes: -How does a function's `func_version` field get initialized? +- __code__ +- __defaults__ +- __kwdefaults__ +- __closure__ -- `PyFunction_New` and friends initialize it to 0. -- The `MAKE_FUNCTION` instruction sets it from the code's `co_version`. -- It is reset to 0 when various attributes like `__code__` are set. -- A new version is allocated by `_PyFunction_GetVersionForCurrentState` - when the specializer needs a version and the version is 0. +For this purpose function objects have a 32-bit func_version member +that the specializer writes to the specialized instruction's inline +cache and which is checked by a guard on the specialized instructions. -The latter allocates versions using a counter in the interpreter state, -`interp->func_state.next_version`. -When the counter wraps around to 0, no more versions are allocated. -There is one other special case: functions with a non-standard -`vectorcall` field are not given a version. +The MAKE_FUNCTION bytecode sets func_version from the code object's +co_version field. The latter is initialized from a counter in the +interpreter state (interp->func_state.next_version) and never changes. +When this counter overflows, it remains zero and the specializer loses +the ability to specialize calls to new functions. -When the function version is 0, the `CALL` bytecode is not specialized. +The func_version is reset to zero when any of the critical attributes +is modified; after this point the specializer will no longer specialize +calls to this function, and the guard will always fail. -Code object versions --------------------- +The function and code version cache +----------------------------------- -So where to code objects get their `co_version`? -They share the same counter, `interp->func_state.next_version`. +The Tier 2 optimizer now has a problem, since it needs to find the +function and code objects given only the version number from the inline +cache. Our solution is to maintain a cache mapping version numbers to +function and code objects. To limit the cache size we could hash +the version number, but for now we simply use it modulo the table size. + +There are some corner cases (e.g. generator expressions) where we will +be unable to find the function object in the cache but we can still +find the code object. For this reason the cache stores both the +function object and the code object. + +The cache doesn't contain strong references; cache entries are +invalidated whenever the function or code object is deallocated. + +Invariants +---------- + +These should hold at any time except when one of the cache-mutating +functions is running. + +- For any slot s at index i: + - s->func == NULL or s->func->func_version % FUNC_VERSION_CACHE_SIZE == i + - s->code == NULL or s->code->co_version % FUNC_VERSION_CACHE_SIZE == i + if s->func != NULL, then s->func->func_code == s->code -Code objects get a new `co_version` allocated from this counter upon -creation. Since code objects are nominally immutable, `co_version` can -not be invalidated. The only way it can be 0 is when 2**32 or more -code objects have been created during the process's lifetime. -(The counter isn't reset by `fork()`, extending the lifetime.) */ void @@ -262,28 +280,61 @@ _PyFunction_SetVersion(PyFunctionObject *func, uint32_t version) { PyInterpreterState *interp = _PyInterpreterState_GET(); if (func->func_version != 0) { - PyFunctionObject **slot = + struct _func_version_cache_item *slot = interp->func_state.func_version_cache + (func->func_version % FUNC_VERSION_CACHE_SIZE); - if (*slot == func) { - *slot = NULL; + if (slot->func == func) { + slot->func = NULL; + // Leave slot->code alone, there may be use for it. } } func->func_version = version; if (version != 0) { - interp->func_state.func_version_cache[ - version % FUNC_VERSION_CACHE_SIZE] = func; + struct _func_version_cache_item *slot = + interp->func_state.func_version_cache + + (version % FUNC_VERSION_CACHE_SIZE); + slot->func = func; + slot->code = func->func_code; + } +} + +void +_PyFunction_ClearCodeByVersion(uint32_t version) +{ + PyInterpreterState *interp = _PyInterpreterState_GET(); + struct _func_version_cache_item *slot = + interp->func_state.func_version_cache + + (version % FUNC_VERSION_CACHE_SIZE); + if (slot->code) { + assert(PyCode_Check(slot->code)); + PyCodeObject *code = (PyCodeObject *)slot->code; + if (code->co_version == version) { + slot->code = NULL; + slot->func = NULL; + } } } PyFunctionObject * -_PyFunction_LookupByVersion(uint32_t version) +_PyFunction_LookupByVersion(uint32_t version, PyObject **p_code) { PyInterpreterState *interp = _PyInterpreterState_GET(); - PyFunctionObject *func = interp->func_state.func_version_cache[ - version % FUNC_VERSION_CACHE_SIZE]; - if (func != NULL && func->func_version == version) { - return func; + struct _func_version_cache_item *slot = + interp->func_state.func_version_cache + + (version % FUNC_VERSION_CACHE_SIZE); + if (slot->code) { + assert(PyCode_Check(slot->code)); + PyCodeObject *code = (PyCodeObject *)slot->code; + if (code->co_version == version) { + *p_code = slot->code; + } + } + else { + *p_code = NULL; + } + if (slot->func && slot->func->func_version == version) { + assert(slot->func->func_code == slot->code); + return slot->func; } return NULL; } @@ -291,19 +342,7 @@ _PyFunction_LookupByVersion(uint32_t version) uint32_t _PyFunction_GetVersionForCurrentState(PyFunctionObject *func) { - if (func->func_version != 0) { - return func->func_version; - } - if (func->vectorcall != _PyFunction_Vectorcall) { - return 0; - } - PyInterpreterState *interp = _PyInterpreterState_GET(); - if (interp->func_state.next_version == 0) { - return 0; - } - uint32_t v = interp->func_state.next_version++; - _PyFunction_SetVersion(func, v); - return v; + return func->func_version; } PyObject * @@ -507,7 +546,6 @@ PyFunction_SetAnnotations(PyObject *op, PyObject *annotations) "non-dict annotations"); return -1; } - _PyFunction_SetVersion((PyFunctionObject *)op, 0); Py_XSETREF(((PyFunctionObject *)op)->func_annotations, annotations); return 0; } @@ -731,7 +769,6 @@ func_set_annotations(PyFunctionObject *op, PyObject *value, void *Py_UNUSED(igno "__annotations__ must be set to a dict object"); return -1; } - _PyFunction_SetVersion(op, 0); Py_XSETREF(op->func_annotations, Py_XNewRef(value)); return 0; } diff --git a/Objects/interpreteridobject.c b/Objects/interpreteridobject.c deleted file mode 100644 index 16e27b64c0c9c2..00000000000000 --- a/Objects/interpreteridobject.c +++ /dev/null @@ -1,294 +0,0 @@ -/* InterpreterID object */ - -#include "Python.h" -#include "pycore_abstract.h" // _PyIndex_Check() -#include "pycore_interp.h" // _PyInterpreterState_LookUpID() -#include "interpreteridobject.h" - - -typedef struct interpid { - PyObject_HEAD - int64_t id; -} interpid; - -static interpid * -newinterpid(PyTypeObject *cls, int64_t id, int force) -{ - PyInterpreterState *interp = _PyInterpreterState_LookUpID(id); - if (interp == NULL) { - if (force) { - PyErr_Clear(); - } - else { - return NULL; - } - } - - if (interp != NULL) { - if (_PyInterpreterState_IDIncref(interp) < 0) { - return NULL; - } - } - - interpid *self = PyObject_New(interpid, cls); - if (self == NULL) { - if (interp != NULL) { - _PyInterpreterState_IDDecref(interp); - } - return NULL; - } - self->id = id; - - return self; -} - -static int -interp_id_converter(PyObject *arg, void *ptr) -{ - int64_t id; - if (PyObject_TypeCheck(arg, &PyInterpreterID_Type)) { - id = ((interpid *)arg)->id; - } - else if (_PyIndex_Check(arg)) { - id = PyLong_AsLongLong(arg); - if (id == -1 && PyErr_Occurred()) { - return 0; - } - if (id < 0) { - PyErr_Format(PyExc_ValueError, - "interpreter ID must be a non-negative int, got %R", arg); - return 0; - } - } - else { - PyErr_Format(PyExc_TypeError, - "interpreter ID must be an int, got %.100s", - Py_TYPE(arg)->tp_name); - return 0; - } - *(int64_t *)ptr = id; - return 1; -} - -static PyObject * -interpid_new(PyTypeObject *cls, PyObject *args, PyObject *kwds) -{ - static char *kwlist[] = {"id", "force", NULL}; - int64_t id; - int force = 0; - if (!PyArg_ParseTupleAndKeywords(args, kwds, - "O&|$p:InterpreterID.__init__", kwlist, - interp_id_converter, &id, &force)) { - return NULL; - } - - return (PyObject *)newinterpid(cls, id, force); -} - -static void -interpid_dealloc(PyObject *v) -{ - int64_t id = ((interpid *)v)->id; - PyInterpreterState *interp = _PyInterpreterState_LookUpID(id); - if (interp != NULL) { - _PyInterpreterState_IDDecref(interp); - } - else { - // already deleted - PyErr_Clear(); - } - Py_TYPE(v)->tp_free(v); -} - -static PyObject * -interpid_repr(PyObject *self) -{ - PyTypeObject *type = Py_TYPE(self); - const char *name = _PyType_Name(type); - interpid *id = (interpid *)self; - return PyUnicode_FromFormat("%s(%" PRId64 ")", name, id->id); -} - -static PyObject * -interpid_str(PyObject *self) -{ - interpid *id = (interpid *)self; - return PyUnicode_FromFormat("%" PRId64 "", id->id); -} - -static PyObject * -interpid_int(PyObject *self) -{ - interpid *id = (interpid *)self; - return PyLong_FromLongLong(id->id); -} - -static PyNumberMethods interpid_as_number = { - 0, /* nb_add */ - 0, /* nb_subtract */ - 0, /* nb_multiply */ - 0, /* nb_remainder */ - 0, /* nb_divmod */ - 0, /* nb_power */ - 0, /* nb_negative */ - 0, /* nb_positive */ - 0, /* nb_absolute */ - 0, /* nb_bool */ - 0, /* nb_invert */ - 0, /* nb_lshift */ - 0, /* nb_rshift */ - 0, /* nb_and */ - 0, /* nb_xor */ - 0, /* nb_or */ - (unaryfunc)interpid_int, /* nb_int */ - 0, /* nb_reserved */ - 0, /* nb_float */ - - 0, /* nb_inplace_add */ - 0, /* nb_inplace_subtract */ - 0, /* nb_inplace_multiply */ - 0, /* nb_inplace_remainder */ - 0, /* nb_inplace_power */ - 0, /* nb_inplace_lshift */ - 0, /* nb_inplace_rshift */ - 0, /* nb_inplace_and */ - 0, /* nb_inplace_xor */ - 0, /* nb_inplace_or */ - - 0, /* nb_floor_divide */ - 0, /* nb_true_divide */ - 0, /* nb_inplace_floor_divide */ - 0, /* nb_inplace_true_divide */ - - (unaryfunc)interpid_int, /* nb_index */ -}; - -static Py_hash_t -interpid_hash(PyObject *self) -{ - interpid *id = (interpid *)self; - PyObject *obj = PyLong_FromLongLong(id->id); - if (obj == NULL) { - return -1; - } - Py_hash_t hash = PyObject_Hash(obj); - Py_DECREF(obj); - return hash; -} - -static PyObject * -interpid_richcompare(PyObject *self, PyObject *other, int op) -{ - if (op != Py_EQ && op != Py_NE) { - Py_RETURN_NOTIMPLEMENTED; - } - - if (!PyObject_TypeCheck(self, &PyInterpreterID_Type)) { - Py_RETURN_NOTIMPLEMENTED; - } - - interpid *id = (interpid *)self; - int equal; - if (PyObject_TypeCheck(other, &PyInterpreterID_Type)) { - interpid *otherid = (interpid *)other; - equal = (id->id == otherid->id); - } - else if (PyLong_CheckExact(other)) { - /* Fast path */ - int overflow; - long long otherid = PyLong_AsLongLongAndOverflow(other, &overflow); - if (otherid == -1 && PyErr_Occurred()) { - return NULL; - } - equal = !overflow && (otherid >= 0) && (id->id == otherid); - } - else if (PyNumber_Check(other)) { - PyObject *pyid = PyLong_FromLongLong(id->id); - if (pyid == NULL) { - return NULL; - } - PyObject *res = PyObject_RichCompare(pyid, other, op); - Py_DECREF(pyid); - return res; - } - else { - Py_RETURN_NOTIMPLEMENTED; - } - - if ((op == Py_EQ && equal) || (op == Py_NE && !equal)) { - Py_RETURN_TRUE; - } - Py_RETURN_FALSE; -} - -PyDoc_STRVAR(interpid_doc, -"A interpreter ID identifies a interpreter and may be used as an int."); - -PyTypeObject PyInterpreterID_Type = { - PyVarObject_HEAD_INIT(&PyType_Type, 0) - "InterpreterID", /* tp_name */ - sizeof(interpid), /* tp_basicsize */ - 0, /* tp_itemsize */ - (destructor)interpid_dealloc, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - (reprfunc)interpid_repr, /* tp_repr */ - &interpid_as_number, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - interpid_hash, /* tp_hash */ - 0, /* tp_call */ - (reprfunc)interpid_str, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ - interpid_doc, /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - interpid_richcompare, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - 0, /* tp_methods */ - 0, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - 0, /* tp_init */ - 0, /* tp_alloc */ - interpid_new, /* tp_new */ -}; - -PyObject *PyInterpreterID_New(int64_t id) -{ - return (PyObject *)newinterpid(&PyInterpreterID_Type, id, 0); -} - -PyObject * -PyInterpreterState_GetIDObject(PyInterpreterState *interp) -{ - if (_PyInterpreterState_IDInitref(interp) != 0) { - return NULL; - }; - int64_t id = PyInterpreterState_GetID(interp); - if (id < 0) { - return NULL; - } - return (PyObject *)newinterpid(&PyInterpreterID_Type, id, 0); -} - -PyInterpreterState * -PyInterpreterID_LookUp(PyObject *requested_id) -{ - int64_t id; - if (!interp_id_converter(requested_id, &id)) { - return NULL; - } - return _PyInterpreterState_LookUpID(id); -} diff --git a/Objects/listobject.c b/Objects/listobject.c index 096043bb3d3c51..470ad8eb8135db 100644 --- a/Objects/listobject.c +++ b/Objects/listobject.c @@ -1289,7 +1289,7 @@ list_extend_set(PyListObject *self, PySetObject *other) PyObject **dest = self->ob_item + m; while (_PySet_NextEntry((PyObject *)other, &setpos, &key, &hash)) { Py_INCREF(key); - *dest = key; + FT_ATOMIC_STORE_PTR_RELEASE(*dest, key); dest++; } Py_SET_SIZE(self, m + n); @@ -1312,7 +1312,7 @@ list_extend_dict(PyListObject *self, PyDictObject *dict, int which_item) while (_PyDict_Next((PyObject *)dict, &pos, &keyvalue[0], &keyvalue[1], NULL)) { PyObject *obj = keyvalue[which_item]; Py_INCREF(obj); - *dest = obj; + FT_ATOMIC_STORE_PTR_RELEASE(*dest, obj); dest++; } @@ -1320,12 +1320,39 @@ list_extend_dict(PyListObject *self, PyDictObject *dict, int which_item) return 0; } +static int +list_extend_dictitems(PyListObject *self, PyDictObject *dict) +{ + Py_ssize_t m = Py_SIZE(self); + Py_ssize_t n = PyDict_GET_SIZE(dict); + if (list_resize(self, m + n) < 0) { + return -1; + } + + PyObject **dest = self->ob_item + m; + Py_ssize_t pos = 0; + Py_ssize_t i = 0; + PyObject *key, *value; + while (_PyDict_Next((PyObject *)dict, &pos, &key, &value, NULL)) { + PyObject *item = PyTuple_Pack(2, key, value); + if (item == NULL) { + Py_SET_SIZE(self, m + i); + return -1; + } + FT_ATOMIC_STORE_PTR_RELEASE(*dest, item); + dest++; + i++; + } + + Py_SET_SIZE(self, m + n); + return 0; +} + static int _list_extend(PyListObject *self, PyObject *iterable) { // Special case: // lists and tuples which can use PySequence_Fast ops - // TODO(@corona10): Add more special cases for other types. int res = -1; if ((PyObject *)self == iterable) { Py_BEGIN_CRITICAL_SECTION(self); @@ -1359,6 +1386,12 @@ _list_extend(PyListObject *self, PyObject *iterable) res = list_extend_dict(self, dict, 1 /*values*/); Py_END_CRITICAL_SECTION2(); } + else if (Py_IS_TYPE(iterable, &PyDictItems_Type)) { + PyDictObject *dict = ((_PyDictViewObject *)iterable)->dv_dict; + Py_BEGIN_CRITICAL_SECTION2(self, dict); + res = list_extend_dictitems(self, dict); + Py_END_CRITICAL_SECTION2(); + } else { Py_BEGIN_CRITICAL_SECTION(self); res = list_extend_iter_lock_held(self, iterable); @@ -1595,6 +1628,15 @@ sortslice_advance(sortslice *slice, Py_ssize_t n) /* Avoid malloc for small temp arrays. */ #define MERGESTATE_TEMP_SIZE 256 +/* The largest value of minrun. This must be a power of 2, and >= 1, so that + * the compute_minrun() algorithm guarantees to return a result no larger than + * this, + */ +#define MAX_MINRUN 64 +#if ((MAX_MINRUN) < 1) || ((MAX_MINRUN) & ((MAX_MINRUN) - 1)) +#error "MAX_MINRUN must be a power of 2, and >= 1" +#endif + /* One MergeState exists on the stack per invocation of mergesort. It's just * a convenient way to pass state around among the helper functions. */ @@ -1652,68 +1694,133 @@ struct s_MergeState { int (*tuple_elem_compare)(PyObject *, PyObject *, MergeState *); }; -/* binarysort is the best method for sorting small arrays: it does - few compares, but can do data movement quadratic in the number of - elements. - [lo.keys, hi) is a contiguous slice of a list of keys, and is sorted via - binary insertion. This sort is stable. - On entry, must have lo.keys <= start <= hi, and that - [lo.keys, start) is already sorted (pass start == lo.keys if you don't - know!). - If islt() complains return -1, else 0. +/* binarysort is the best method for sorting small arrays: it does few + compares, but can do data movement quadratic in the number of elements. + ss->keys is viewed as an array of n kays, a[:n]. a[:ok] is already sorted. + Pass ok = 0 (or 1) if you don't know. + It's sorted in-place, by a stable binary insertion sort. If ss->values + isn't NULL, it's permuted in lockstap with ss->keys. + On entry, must have n >= 1, and 0 <= ok <= n <= MAX_MINRUN. + Return -1 if comparison raises an exception, else 0. Even in case of error, the output slice will be some permutation of the input (nothing is lost or duplicated). */ static int -binarysort(MergeState *ms, sortslice lo, PyObject **hi, PyObject **start) +binarysort(MergeState *ms, const sortslice *ss, Py_ssize_t n, Py_ssize_t ok) { - Py_ssize_t k; - PyObject **l, **p, **r; + Py_ssize_t k; /* for IFLT macro expansion */ + PyObject ** const a = ss->keys; + PyObject ** const v = ss->values; + const bool has_values = v != NULL; PyObject *pivot; - - assert(lo.keys <= start && start <= hi); - /* assert [lo.keys, start) is sorted */ - if (lo.keys == start) - ++start; - for (; start < hi; ++start) { - /* set l to where *start belongs */ - l = lo.keys; - r = start; - pivot = *r; - /* Invariants: - * pivot >= all in [lo.keys, l). - * pivot < all in [r, start). - * These are vacuously true at the start. + Py_ssize_t M; + + assert(0 <= ok && ok <= n && 1 <= n && n <= MAX_MINRUN); + /* assert a[:ok] is sorted */ + if (! ok) + ++ok; + /* Regular insertion sort has average- and worst-case O(n**2) cost + for both # of comparisons and number of bytes moved. But its branches + are highly predictable, and it loves sorted input (n-1 compares and no + data movement). This is significant in cases like sortperf.py's %sort, + where an out-of-order element near the start of a run is moved into + place slowly but then the remaining elements up to length minrun are + generally at worst one slot away from their correct position (so only + need 1 or 2 commpares to resolve). If comparisons are very fast (such + as for a list of Python floats), the simple inner loop leaves it + very competitive with binary insertion, despite that it does + significantly more compares overall on random data. + + Binary insertion sort has worst, average, and best case O(n log n) + cost for # of comparisons, but worst and average case O(n**2) cost + for data movement. The more expensive comparisons, the more important + the comparison advantage. But its branches are less predictable the + more "randomish" the data, and that's so significant its worst case + in real life is random input rather than reverse-ordered (which does + about twice the data movement than random input does). + + Note that the number of bytes moved doesn't seem to matter. MAX_MINRUN + of 64 is so small that the key and value pointers all fit in a corner + of L1 cache, and moving things around in that is very fast. */ +#if 0 // ordinary insertion sort. + PyObject * vpivot = NULL; + for (; ok < n; ++ok) { + pivot = a[ok]; + if (has_values) + vpivot = v[ok]; + for (M = ok - 1; M >= 0; --M) { + k = ISLT(pivot, a[M]); + if (k < 0) { + a[M + 1] = pivot; + if (has_values) + v[M + 1] = vpivot; + goto fail; + } + else if (k) { + a[M + 1] = a[M]; + if (has_values) + v[M + 1] = v[M]; + } + else + break; + } + a[M + 1] = pivot; + if (has_values) + v[M + 1] = vpivot; + } +#else // binary insertion sort + Py_ssize_t L, R; + for (; ok < n; ++ok) { + /* set L to where a[ok] belongs */ + L = 0; + R = ok; + pivot = a[ok]; + /* Slice invariants. vacuously true at the start: + * all a[0:L] <= pivot + * all a[L:R] unknown + * all a[R:ok] > pivot */ - assert(l < r); + assert(L < R); do { - p = l + ((r - l) >> 1); - IFLT(pivot, *p) - r = p; + /* don't do silly ;-) things to prevent overflow when finding + the midpoint; L and R are very far from filling a Py_ssize_t */ + M = (L + R) >> 1; +#if 1 // straightforward, but highly unpredictable branch on random data + IFLT(pivot, a[M]) + R = M; else - l = p+1; - } while (l < r); - assert(l == r); - /* The invariants still hold, so pivot >= all in [lo.keys, l) and - pivot < all in [l, start), so pivot belongs at l. Note - that if there are elements equal to pivot, l points to the - first slot after them -- that's why this sort is stable. - Slide over to make room. - Caution: using memmove is much slower under MSVC 5; - we're not usually moving many slots. */ - for (p = start; p > l; --p) - *p = *(p-1); - *l = pivot; - if (lo.values != NULL) { - Py_ssize_t offset = lo.values - lo.keys; - p = start + offset; - pivot = *p; - l += offset; - for ( ; p > l; --p) - *p = *(p-1); - *l = pivot; + L = M + 1; +#else + /* Try to get compiler to generate conditional move instructions + instead. Works fine, but leaving it disabled for now because + it's not yielding consistently faster sorts. Needs more + investigation. More computation in the inner loop adds its own + costs, which can be significant when compares are fast. */ + k = ISLT(pivot, a[M]); + if (k < 0) + goto fail; + Py_ssize_t Mp1 = M + 1; + R = k ? M : R; + L = k ? L : Mp1; +#endif + } while (L < R); + assert(L == R); + /* a[:L] holds all elements from a[:ok] <= pivot now, so pivot belongs + at index L. Slide a[L:ok] to the right a slot to make room for it. + Caution: using memmove is much slower under MSVC 5; we're not + usually moving many slots. Years later: under Visual Studio 2022, + memmove seems just slightly slower than doing it "by hand". */ + for (M = ok; M > L; --M) + a[M] = a[M - 1]; + a[L] = pivot; + if (has_values) { + pivot = v[ok]; + for (M = ok; M > L; --M) + v[M] = v[M - 1]; + v[L] = pivot; } } +#endif // pick binary or regular insertion sort return 0; fail: @@ -2526,10 +2633,10 @@ merge_force_collapse(MergeState *ms) /* Compute a good value for the minimum run length; natural runs shorter * than this are boosted artificially via binary insertion. * - * If n < 64, return n (it's too small to bother with fancy stuff). - * Else if n is an exact power of 2, return 32. - * Else return an int k, 32 <= k <= 64, such that n/k is close to, but - * strictly less than, an exact power of 2. + * If n < MAX_MINRUN return n (it's too small to bother with fancy stuff). + * Else if n is an exact power of 2, return MAX_MINRUN / 2. + * Else return an int k, MAX_MINRUN / 2 <= k <= MAX_MINRUN, such that n/k is + * close to, but strictly less than, an exact power of 2. * * See listsort.txt for more info. */ @@ -2539,7 +2646,7 @@ merge_compute_minrun(Py_ssize_t n) Py_ssize_t r = 0; /* becomes 1 if any 1 bits are shifted off */ assert(n >= 0); - while (n >= 64) { + while (n >= MAX_MINRUN) { r |= n & 1; n >>= 1; } @@ -2923,7 +3030,7 @@ list_sort_impl(PyListObject *self, PyObject *keyfunc, int reverse) if (n < minrun) { const Py_ssize_t force = nremaining <= minrun ? nremaining : minrun; - if (binarysort(&ms, lo, lo.keys + force, lo.keys + n) < 0) + if (binarysort(&ms, &lo, force, n) < 0) goto fail; n = force; } diff --git a/Objects/listsort.txt b/Objects/listsort.txt index 4f84e2c87da7f1..f387d9c116e502 100644 --- a/Objects/listsort.txt +++ b/Objects/listsort.txt @@ -270,9 +270,9 @@ result. This has two primary good effects: Computing minrun ---------------- -If N < 64, minrun is N. IOW, binary insertion sort is used for the whole -array then; it's hard to beat that given the overheads of trying something -fancier (see note BINSORT). +If N < MAX_MINRUN, minrun is N. IOW, binary insertion sort is used for the +whole array then; it's hard to beat that given the overheads of trying +something fancier (see note BINSORT). When N is a power of 2, testing on random data showed that minrun values of 16, 32, 64 and 128 worked about equally well. At 256 the data-movement cost @@ -310,12 +310,13 @@ place, and r < minrun is small compared to N), or q a little larger than a power of 2 regardless of r (then we've got a case similar to "2112", again leaving too little work for the last merge to do). -Instead we pick a minrun in range(32, 65) such that N/minrun is exactly a -power of 2, or if that isn't possible, is close to, but strictly less than, -a power of 2. This is easier to do than it may sound: take the first 6 -bits of N, and add 1 if any of the remaining bits are set. In fact, that -rule covers every case in this section, including small N and exact powers -of 2; merge_compute_minrun() is a deceptively simple function. +Instead we pick a minrun in range(MAX_MINRUN / 2, MAX_MINRUN + 1) such that +N/minrun is exactly a power of 2, or if that isn't possible, is close to, but +strictly less than, a power of 2. This is easier to do than it may sound: +take the first log2(MAX_MINRUN) bits of N, and add 1 if any of the remaining +bits are set. In fact, that rule covers every case in this section, including +small N and exact powers of 2; merge_compute_minrun() is a deceptively simple +function. The Merge Pattern diff --git a/Objects/methodobject.c b/Objects/methodobject.c index 599fb05cb5874f..d6773a264101dc 100644 --- a/Objects/methodobject.c +++ b/Objects/methodobject.c @@ -320,7 +320,7 @@ static Py_hash_t meth_hash(PyCFunctionObject *a) { Py_hash_t x, y; - x = _Py_HashPointer(a->m_self); + x = PyObject_GenericHash(a->m_self); y = _Py_HashPointer((void*)(a->m_ml->ml_meth)); x ^= y; if (x == -1) diff --git a/Objects/object.c b/Objects/object.c index df14fe0c6fbfec..b4f0fd4d7db941 100644 --- a/Objects/object.c +++ b/Objects/object.c @@ -14,6 +14,7 @@ #include "pycore_memoryobject.h" // _PyManagedBuffer_Type #include "pycore_namespace.h" // _PyNamespace_Type #include "pycore_object.h" // PyAPI_DATA() _Py_SwappedOp definition +#include "pycore_long.h" // _PyLong_GetZero() #include "pycore_optimizer.h" // _PyUOpExecutor_Type, _PyUOpOptimizer_Type, ... #include "pycore_pyerrors.h" // _PyErr_Occurred() #include "pycore_pymem.h" // _PyMem_IsPtrFreed() @@ -23,8 +24,6 @@ #include "pycore_typevarobject.h" // _PyTypeAlias_Type, _Py_initialize_generic #include "pycore_unionobject.h" // _PyUnion_Type -#include "interpreteridobject.h" // _PyInterpreterID_Type - #ifdef Py_LIMITED_API // Prevent recursive call _Py_IncRef() <=> Py_INCREF() # error "Py_LIMITED_API macro must not be defined" @@ -2239,7 +2238,6 @@ static PyTypeObject* static_types[] = { &PyGen_Type, &PyGetSetDescr_Type, &PyInstanceMethod_Type, - &PyInterpreterID_Type, &PyListIter_Type, &PyListRevIter_Type, &PyList_Type, @@ -2401,6 +2399,27 @@ _Py_NewReferenceNoTotal(PyObject *op) new_reference(op); } +void +_Py_SetImmortalUntracked(PyObject *op) +{ +#ifdef Py_GIL_DISABLED + op->ob_tid = _Py_UNOWNED_TID; + op->ob_ref_local = _Py_IMMORTAL_REFCNT_LOCAL; + op->ob_ref_shared = 0; +#else + op->ob_refcnt = _Py_IMMORTAL_REFCNT; +#endif +} + +void +_Py_SetImmortal(PyObject *op) +{ + if (PyObject_IS_GC(op) && _PyObject_GC_IS_TRACKED(op)) { + _PyObject_GC_UNTRACK(op); + } + _Py_SetImmortalUntracked(op); +} + void _Py_ResurrectReference(PyObject *op) { @@ -2970,3 +2989,53 @@ _Py_SetRefcnt(PyObject *ob, Py_ssize_t refcnt) { Py_SET_REFCNT(ob, refcnt); } + + +static PyObject* constants[] = { + &_Py_NoneStruct, // Py_CONSTANT_NONE + (PyObject*)(&_Py_FalseStruct), // Py_CONSTANT_FALSE + (PyObject*)(&_Py_TrueStruct), // Py_CONSTANT_TRUE + &_Py_EllipsisObject, // Py_CONSTANT_ELLIPSIS + &_Py_NotImplementedStruct, // Py_CONSTANT_NOT_IMPLEMENTED + NULL, // Py_CONSTANT_ZERO + NULL, // Py_CONSTANT_ONE + NULL, // Py_CONSTANT_EMPTY_STR + NULL, // Py_CONSTANT_EMPTY_BYTES + NULL, // Py_CONSTANT_EMPTY_TUPLE +}; + +void +_Py_GetConstant_Init(void) +{ + constants[Py_CONSTANT_ZERO] = _PyLong_GetZero(); + constants[Py_CONSTANT_ONE] = _PyLong_GetOne(); + constants[Py_CONSTANT_EMPTY_STR] = PyUnicode_New(0, 0); + constants[Py_CONSTANT_EMPTY_BYTES] = PyBytes_FromStringAndSize(NULL, 0); + constants[Py_CONSTANT_EMPTY_TUPLE] = PyTuple_New(0); +#ifndef NDEBUG + for (size_t i=0; i < Py_ARRAY_LENGTH(constants); i++) { + assert(constants[i] != NULL); + assert(_Py_IsImmortal(constants[i])); + } +#endif +} + +PyObject* +Py_GetConstant(unsigned int constant_id) +{ + if (constant_id < Py_ARRAY_LENGTH(constants)) { + return constants[constant_id]; + } + else { + PyErr_BadInternalCall(); + return NULL; + } +} + + +PyObject* +Py_GetConstantBorrowed(unsigned int constant_id) +{ + // All constants are immortal + return Py_GetConstant(constant_id); +} diff --git a/Objects/structseq.c b/Objects/structseq.c index 581d6ad240885a..661d96a968fb80 100644 --- a/Objects/structseq.c +++ b/Objects/structseq.c @@ -603,6 +603,9 @@ _PyStructSequence_InitBuiltinWithFlags(PyInterpreterState *interp, PyStructSequence_Desc *desc, unsigned long tp_flags) { + if (Py_TYPE(type) == NULL) { + Py_SET_TYPE(type, &PyType_Type); + } Py_ssize_t n_unnamed_members; Py_ssize_t n_members = count_members(desc, &n_unnamed_members); PyMemberDef *members = NULL; @@ -618,7 +621,7 @@ _PyStructSequence_InitBuiltinWithFlags(PyInterpreterState *interp, } initialize_static_fields(type, desc, members, tp_flags); - _Py_SetImmortal(type); + _Py_SetImmortal((PyObject *)type); } #ifndef NDEBUG else { diff --git a/Objects/typeobject.c b/Objects/typeobject.c index 24f31492985164..82822784aaf407 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -6891,12 +6891,6 @@ PyDoc_STRVAR(object_doc, "When called, it accepts no arguments and returns a new featureless\n" "instance that has no instance attributes and cannot be given any.\n"); -static Py_hash_t -object_hash(PyObject *obj) -{ - return _Py_HashPointer(obj); -} - PyTypeObject PyBaseObject_Type = { PyVarObject_HEAD_INIT(&PyType_Type, 0) "object", /* tp_name */ @@ -6911,7 +6905,7 @@ PyTypeObject PyBaseObject_Type = { 0, /* tp_as_number */ 0, /* tp_as_sequence */ 0, /* tp_as_mapping */ - object_hash, /* tp_hash */ + PyObject_GenericHash, /* tp_hash */ 0, /* tp_call */ object_str, /* tp_str */ PyObject_GenericGetAttr, /* tp_getattro */ @@ -10884,7 +10878,7 @@ super_init_without_args(_PyInterpreterFrame *cframe, PyCodeObject *co, // Look for __class__ in the free vars. PyTypeObject *type = NULL; - int i = PyCode_GetFirstFree(co); + int i = PyUnstable_Code_GetFirstFree(co); for (; i < co->co_nlocalsplus; i++) { assert((_PyLocals_GetKind(co->co_localspluskinds, i) & CO_FAST_FREE) != 0); PyObject *name = PyTuple_GET_ITEM(co->co_localsplusnames, i); diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index c8f647a7a71135..e412af5f797e7a 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -505,7 +505,7 @@ unicode_check_encoding_errors(const char *encoding, const char *errors) /* Disable checks during Python finalization. For example, it allows to call _PyObject_Dump() during finalization for debugging purpose. */ - if (interp->finalizing) { + if (_PyInterpreterState_GetFinalizing(interp) != NULL) { return 0; } diff --git a/PC/_testconsole.c b/PC/_testconsole.c index 1dc0d230c4d7c3..f1ace003df483b 100644 --- a/PC/_testconsole.c +++ b/PC/_testconsole.c @@ -1,17 +1,16 @@ /* Testing module for multi-phase initialization of extension modules (PEP 489) */ -#ifndef Py_BUILD_CORE_BUILTIN -# define Py_BUILD_CORE_MODULE 1 +// Need limited C API version 3.12 for Py_MOD_PER_INTERPRETER_GIL_SUPPORTED +#include "pyconfig.h" // Py_GIL_DISABLED +#ifndef Py_GIL_DISABLED +# define Py_LIMITED_API 0x030c0000 #endif #include "Python.h" #ifdef MS_WINDOWS -#include "pycore_fileutils.h" // _Py_get_osfhandle() -#include "pycore_runtime.h" // _Py_ID() - #define WIN32_LEAN_AND_MEAN #include #include @@ -57,20 +56,24 @@ module _testconsole _testconsole.write_input file: object - s: PyBytesObject + s: Py_buffer Writes UTF-16-LE encoded bytes to the console as if typed by a user. [clinic start generated code]*/ static PyObject * -_testconsole_write_input_impl(PyObject *module, PyObject *file, - PyBytesObject *s) -/*[clinic end generated code: output=48f9563db34aedb3 input=4c774f2d05770bc6]*/ +_testconsole_write_input_impl(PyObject *module, PyObject *file, Py_buffer *s) +/*[clinic end generated code: output=58631a8985426ad3 input=68062f1bb2e52206]*/ { INPUT_RECORD *rec = NULL; - PyTypeObject *winconsoleio_type = (PyTypeObject *)_PyImport_GetModuleAttr( - &_Py_ID(_io), &_Py_ID(_WindowsConsoleIO)); + PyObject *mod = PyImport_ImportModule("_io"); + if (mod == NULL) { + return NULL; + } + + PyTypeObject *winconsoleio_type = (PyTypeObject *)PyObject_GetAttrString(mod, "_WindowsConsoleIO"); + Py_DECREF(mod); if (winconsoleio_type == NULL) { return NULL; } @@ -81,8 +84,8 @@ _testconsole_write_input_impl(PyObject *module, PyObject *file, return NULL; } - const wchar_t *p = (const wchar_t *)PyBytes_AS_STRING(s); - DWORD size = (DWORD)PyBytes_GET_SIZE(s) / sizeof(wchar_t); + const wchar_t *p = (const wchar_t *)s->buf; + DWORD size = (DWORD)s->len / sizeof(wchar_t); rec = (INPUT_RECORD*)PyMem_Calloc(size, sizeof(INPUT_RECORD)); if (!rec) @@ -96,9 +99,11 @@ _testconsole_write_input_impl(PyObject *module, PyObject *file, prec->Event.KeyEvent.uChar.UnicodeChar = *p; } - HANDLE hInput = _Py_get_osfhandle(((winconsoleio*)file)->fd); - if (hInput == INVALID_HANDLE_VALUE) + HANDLE hInput = (HANDLE)_get_osfhandle(((winconsoleio*)file)->fd); + if (hInput == INVALID_HANDLE_VALUE) { + PyErr_SetFromErrno(PyExc_OSError); goto error; + } DWORD total = 0; while (total < size) { diff --git a/PC/clinic/_testconsole.c.h b/PC/clinic/_testconsole.c.h index 2c71c11c438b5b..4c11e545499ac5 100644 --- a/PC/clinic/_testconsole.c.h +++ b/PC/clinic/_testconsole.c.h @@ -2,12 +2,6 @@ preserve [clinic start generated code]*/ -#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) -# include "pycore_gc.h" // PyGC_Head -# include "pycore_runtime.h" // _Py_ID() -#endif -#include "pycore_modsupport.h" // _PyArg_UnpackKeywords() - #if defined(MS_WINDOWS) PyDoc_STRVAR(_testconsole_write_input__doc__, @@ -17,58 +11,30 @@ PyDoc_STRVAR(_testconsole_write_input__doc__, "Writes UTF-16-LE encoded bytes to the console as if typed by a user."); #define _TESTCONSOLE_WRITE_INPUT_METHODDEF \ - {"write_input", _PyCFunction_CAST(_testconsole_write_input), METH_FASTCALL|METH_KEYWORDS, _testconsole_write_input__doc__}, + {"write_input", (PyCFunction)(void(*)(void))_testconsole_write_input, METH_VARARGS|METH_KEYWORDS, _testconsole_write_input__doc__}, static PyObject * -_testconsole_write_input_impl(PyObject *module, PyObject *file, - PyBytesObject *s); +_testconsole_write_input_impl(PyObject *module, PyObject *file, Py_buffer *s); static PyObject * -_testconsole_write_input(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_testconsole_write_input(PyObject *module, PyObject *args, PyObject *kwargs) { PyObject *return_value = NULL; - #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) - - #define NUM_KEYWORDS 2 - static struct { - PyGC_Head _this_is_not_used; - PyObject_VAR_HEAD - PyObject *ob_item[NUM_KEYWORDS]; - } _kwtuple = { - .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) - .ob_item = { &_Py_ID(file), &_Py_ID(s), }, - }; - #undef NUM_KEYWORDS - #define KWTUPLE (&_kwtuple.ob_base.ob_base) - - #else // !Py_BUILD_CORE - # define KWTUPLE NULL - #endif // !Py_BUILD_CORE - - static const char * const _keywords[] = {"file", "s", NULL}; - static _PyArg_Parser _parser = { - .keywords = _keywords, - .fname = "write_input", - .kwtuple = KWTUPLE, - }; - #undef KWTUPLE - PyObject *argsbuf[2]; + static char *_keywords[] = {"file", "s", NULL}; PyObject *file; - PyBytesObject *s; + Py_buffer s = {NULL, NULL}; - args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 2, 2, 0, argsbuf); - if (!args) { - goto exit; - } - file = args[0]; - if (!PyBytes_Check(args[1])) { - _PyArg_BadArgument("write_input", "argument 's'", "bytes", args[1]); + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "Oy*:write_input", _keywords, + &file, &s)) goto exit; - } - s = (PyBytesObject *)args[1]; - return_value = _testconsole_write_input_impl(module, file, s); + return_value = _testconsole_write_input_impl(module, file, &s); exit: + /* Cleanup for s */ + if (s.obj) { + PyBuffer_Release(&s); + } + return return_value; } @@ -83,48 +49,21 @@ PyDoc_STRVAR(_testconsole_read_output__doc__, "Reads a str from the console as written to stdout."); #define _TESTCONSOLE_READ_OUTPUT_METHODDEF \ - {"read_output", _PyCFunction_CAST(_testconsole_read_output), METH_FASTCALL|METH_KEYWORDS, _testconsole_read_output__doc__}, + {"read_output", (PyCFunction)(void(*)(void))_testconsole_read_output, METH_VARARGS|METH_KEYWORDS, _testconsole_read_output__doc__}, static PyObject * _testconsole_read_output_impl(PyObject *module, PyObject *file); static PyObject * -_testconsole_read_output(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_testconsole_read_output(PyObject *module, PyObject *args, PyObject *kwargs) { PyObject *return_value = NULL; - #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) - - #define NUM_KEYWORDS 1 - static struct { - PyGC_Head _this_is_not_used; - PyObject_VAR_HEAD - PyObject *ob_item[NUM_KEYWORDS]; - } _kwtuple = { - .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) - .ob_item = { &_Py_ID(file), }, - }; - #undef NUM_KEYWORDS - #define KWTUPLE (&_kwtuple.ob_base.ob_base) - - #else // !Py_BUILD_CORE - # define KWTUPLE NULL - #endif // !Py_BUILD_CORE - - static const char * const _keywords[] = {"file", NULL}; - static _PyArg_Parser _parser = { - .keywords = _keywords, - .fname = "read_output", - .kwtuple = KWTUPLE, - }; - #undef KWTUPLE - PyObject *argsbuf[1]; + static char *_keywords[] = {"file", NULL}; PyObject *file; - args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); - if (!args) { + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O:read_output", _keywords, + &file)) goto exit; - } - file = args[0]; return_value = _testconsole_read_output_impl(module, file); exit: @@ -140,4 +79,4 @@ _testconsole_read_output(PyObject *module, PyObject *const *args, Py_ssize_t nar #ifndef _TESTCONSOLE_READ_OUTPUT_METHODDEF #define _TESTCONSOLE_READ_OUTPUT_METHODDEF #endif /* !defined(_TESTCONSOLE_READ_OUTPUT_METHODDEF) */ -/*[clinic end generated code: output=08a1c844b3657272 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=d60ce07157e3741a input=a9049054013a1b77]*/ diff --git a/PC/layout/support/appxmanifest.py b/PC/layout/support/appxmanifest.py index 1fb03380278f43..53977beb8af834 100644 --- a/PC/layout/support/appxmanifest.py +++ b/PC/layout/support/appxmanifest.py @@ -209,7 +209,7 @@ class PACKAGE_ID(ctypes.Structure): result = ctypes.create_unicode_buffer(256) result_len = ctypes.c_uint32(256) r = ctypes.windll.kernel32.PackageFamilyNameFromId( - pid, ctypes.byref(result_len), result + ctypes.byref(pid), ctypes.byref(result_len), result ) if r: raise OSError(r, "failed to get package family name") diff --git a/PC/python3dll.c b/PC/python3dll.c index dbfa3f23bb586d..c6fdc0bd73b9fe 100755 --- a/PC/python3dll.c +++ b/PC/python3dll.c @@ -55,6 +55,8 @@ EXPORT_FUNC(Py_GenericAlias) EXPORT_FUNC(Py_GetArgcArgv) EXPORT_FUNC(Py_GetBuildInfo) EXPORT_FUNC(Py_GetCompiler) +EXPORT_FUNC(Py_GetConstant) +EXPORT_FUNC(Py_GetConstantBorrowed) EXPORT_FUNC(Py_GetCopyright) EXPORT_FUNC(Py_GetExecPrefix) EXPORT_FUNC(Py_GetPath) @@ -639,6 +641,7 @@ EXPORT_FUNC(PyType_GenericNew) EXPORT_FUNC(PyType_GetFlags) EXPORT_FUNC(PyType_GetFullyQualifiedName) EXPORT_FUNC(PyType_GetModule) +EXPORT_FUNC(PyType_GetModuleByDef) EXPORT_FUNC(PyType_GetModuleName) EXPORT_FUNC(PyType_GetModuleState) EXPORT_FUNC(PyType_GetName) diff --git a/PC/winreg.c b/PC/winreg.c index 77b80217ac0ab1..8096d17e43b7bc 100644 --- a/PC/winreg.c +++ b/PC/winreg.c @@ -200,7 +200,7 @@ PyHKEY_hashFunc(PyObject *ob) /* Just use the address. XXX - should we use the handle value? */ - return _Py_HashPointer(ob); + return PyObject_GenericHash(ob); } diff --git a/PCbuild/_freeze_module.vcxproj b/PCbuild/_freeze_module.vcxproj index bce92c91f1ca0d..82471e0f140ec3 100644 --- a/PCbuild/_freeze_module.vcxproj +++ b/PCbuild/_freeze_module.vcxproj @@ -142,7 +142,6 @@ - diff --git a/PCbuild/_freeze_module.vcxproj.filters b/PCbuild/_freeze_module.vcxproj.filters index 5b34440af9322b..97c52fdadf7c05 100644 --- a/PCbuild/_freeze_module.vcxproj.filters +++ b/PCbuild/_freeze_module.vcxproj.filters @@ -241,9 +241,6 @@ Source Files - - Source Files - Source Files diff --git a/PCbuild/_testcapi.vcxproj b/PCbuild/_testcapi.vcxproj index 6522cb1fcf5c63..615d73d5e003b4 100644 --- a/PCbuild/_testcapi.vcxproj +++ b/PCbuild/_testcapi.vcxproj @@ -98,6 +98,7 @@ + diff --git a/PCbuild/_testcapi.vcxproj.filters b/PCbuild/_testcapi.vcxproj.filters index 772a9a861517ec..0c11e918556ff5 100644 --- a/PCbuild/_testcapi.vcxproj.filters +++ b/PCbuild/_testcapi.vcxproj.filters @@ -30,6 +30,9 @@ Source Files + + Source Files + Source Files diff --git a/PCbuild/_testlimitedcapi.vcxproj b/PCbuild/_testlimitedcapi.vcxproj index 1afeacaa93396e..252039d93103bd 100644 --- a/PCbuild/_testlimitedcapi.vcxproj +++ b/PCbuild/_testlimitedcapi.vcxproj @@ -94,11 +94,20 @@ + + + + + + + + + diff --git a/PCbuild/_testlimitedcapi.vcxproj.filters b/PCbuild/_testlimitedcapi.vcxproj.filters index b3eeb86185c372..7efbb0acf8f960 100644 --- a/PCbuild/_testlimitedcapi.vcxproj.filters +++ b/PCbuild/_testlimitedcapi.vcxproj.filters @@ -9,11 +9,21 @@ + + + + + + + + + + diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj index 9131ce87db6c84..c944bbafdba7e5 100644 --- a/PCbuild/pythoncore.vcxproj +++ b/PCbuild/pythoncore.vcxproj @@ -154,7 +154,6 @@ - @@ -303,7 +302,6 @@ - @@ -504,7 +502,6 @@ - diff --git a/PCbuild/pythoncore.vcxproj.filters b/PCbuild/pythoncore.vcxproj.filters index 27bd1121663398..0afad125ce1e97 100644 --- a/PCbuild/pythoncore.vcxproj.filters +++ b/PCbuild/pythoncore.vcxproj.filters @@ -330,9 +330,6 @@ Include - - Include - Modules @@ -492,9 +489,6 @@ Include - - Include\cpython - Include\cpython @@ -1475,9 +1469,6 @@ Objects - - Objects - Modules diff --git a/Parser/asdl_c.py b/Parser/asdl_c.py index 59cc391881ab86..c4df2c52c032bc 100755 --- a/Parser/asdl_c.py +++ b/Parser/asdl_c.py @@ -973,11 +973,22 @@ def visitModule(self, mod): Py_ssize_t size = PySet_Size(remaining_fields); PyObject *field_types = NULL, *remaining_list = NULL; if (size > 0) { - if (!PyObject_GetOptionalAttr((PyObject*)Py_TYPE(self), &_Py_ID(_field_types), - &field_types)) { + if (PyObject_GetOptionalAttr((PyObject*)Py_TYPE(self), &_Py_ID(_field_types), + &field_types) < 0) { res = -1; goto cleanup; } + if (field_types == NULL) { + if (PyErr_WarnFormat( + PyExc_DeprecationWarning, 1, + "%.400s provides _fields but not _field_types. " + "This will become an error in Python 3.15.", + Py_TYPE(self)->tp_name + ) < 0) { + res = -1; + } + goto cleanup; + } remaining_list = PySequence_List(remaining_fields); if (!remaining_list) { goto set_remaining_cleanup; diff --git a/Parser/parser.c b/Parser/parser.c index f1170c26197452..6817bd10d3cd7f 100644 --- a/Parser/parser.c +++ b/Parser/parser.c @@ -34,7 +34,7 @@ static KeywordToken *reserved_keywords[] = { {"for", 671}, {"try", 643}, {"and", 582}, - {"not", 588}, + {"not", 678}, {NULL, -1}, }, (KeywordToken[]) { @@ -329,290 +329,294 @@ static char *soft_keywords[] = { #define invalid_starred_expression_type 1242 #define invalid_replacement_field_type 1243 #define invalid_conversion_character_type 1244 -#define _loop0_1_type 1245 -#define _loop0_2_type 1246 -#define _loop1_3_type 1247 -#define _loop0_5_type 1248 -#define _gather_4_type 1249 -#define _tmp_6_type 1250 -#define _tmp_7_type 1251 -#define _tmp_8_type 1252 -#define _tmp_9_type 1253 -#define _tmp_10_type 1254 -#define _tmp_11_type 1255 -#define _tmp_12_type 1256 -#define _tmp_13_type 1257 -#define _loop1_14_type 1258 -#define _tmp_15_type 1259 -#define _tmp_16_type 1260 -#define _tmp_17_type 1261 -#define _loop0_19_type 1262 -#define _gather_18_type 1263 -#define _loop0_21_type 1264 -#define _gather_20_type 1265 -#define _tmp_22_type 1266 -#define _tmp_23_type 1267 -#define _loop0_24_type 1268 -#define _loop1_25_type 1269 -#define _loop0_27_type 1270 -#define _gather_26_type 1271 -#define _tmp_28_type 1272 -#define _loop0_30_type 1273 -#define _gather_29_type 1274 -#define _tmp_31_type 1275 -#define _loop1_32_type 1276 -#define _tmp_33_type 1277 -#define _tmp_34_type 1278 -#define _tmp_35_type 1279 -#define _loop0_36_type 1280 -#define _loop0_37_type 1281 -#define _loop0_38_type 1282 -#define _loop1_39_type 1283 -#define _loop0_40_type 1284 -#define _loop1_41_type 1285 -#define _loop1_42_type 1286 -#define _loop1_43_type 1287 -#define _loop0_44_type 1288 -#define _loop1_45_type 1289 -#define _loop0_46_type 1290 -#define _loop1_47_type 1291 -#define _loop0_48_type 1292 -#define _loop0_49_type 1293 -#define _loop1_50_type 1294 -#define _loop0_52_type 1295 -#define _gather_51_type 1296 -#define _loop0_54_type 1297 -#define _gather_53_type 1298 -#define _loop0_56_type 1299 -#define _gather_55_type 1300 -#define _loop0_58_type 1301 -#define _gather_57_type 1302 -#define _tmp_59_type 1303 -#define _loop1_60_type 1304 -#define _loop1_61_type 1305 -#define _tmp_62_type 1306 -#define _tmp_63_type 1307 -#define _loop1_64_type 1308 -#define _loop0_66_type 1309 -#define _gather_65_type 1310 -#define _tmp_67_type 1311 -#define _tmp_68_type 1312 -#define _tmp_69_type 1313 -#define _tmp_70_type 1314 -#define _loop0_72_type 1315 -#define _gather_71_type 1316 -#define _loop0_74_type 1317 -#define _gather_73_type 1318 -#define _tmp_75_type 1319 -#define _loop0_77_type 1320 -#define _gather_76_type 1321 -#define _loop0_79_type 1322 -#define _gather_78_type 1323 -#define _loop0_81_type 1324 -#define _gather_80_type 1325 -#define _loop1_82_type 1326 -#define _loop1_83_type 1327 -#define _loop0_85_type 1328 -#define _gather_84_type 1329 -#define _loop1_86_type 1330 -#define _loop1_87_type 1331 -#define _loop1_88_type 1332 -#define _tmp_89_type 1333 -#define _loop0_91_type 1334 -#define _gather_90_type 1335 -#define _tmp_92_type 1336 -#define _tmp_93_type 1337 -#define _tmp_94_type 1338 -#define _tmp_95_type 1339 -#define _tmp_96_type 1340 -#define _tmp_97_type 1341 -#define _loop0_98_type 1342 -#define _loop0_99_type 1343 -#define _loop0_100_type 1344 -#define _loop1_101_type 1345 -#define _loop0_102_type 1346 -#define _loop1_103_type 1347 -#define _loop1_104_type 1348 -#define _loop1_105_type 1349 -#define _loop0_106_type 1350 -#define _loop1_107_type 1351 -#define _loop0_108_type 1352 -#define _loop1_109_type 1353 -#define _loop0_110_type 1354 -#define _loop1_111_type 1355 -#define _tmp_112_type 1356 -#define _loop0_113_type 1357 -#define _loop0_114_type 1358 -#define _loop1_115_type 1359 -#define _tmp_116_type 1360 -#define _loop0_118_type 1361 -#define _gather_117_type 1362 -#define _loop1_119_type 1363 -#define _loop0_120_type 1364 -#define _loop0_121_type 1365 -#define _tmp_122_type 1366 -#define _tmp_123_type 1367 -#define _loop0_125_type 1368 -#define _gather_124_type 1369 -#define _tmp_126_type 1370 -#define _loop0_128_type 1371 -#define _gather_127_type 1372 -#define _loop0_130_type 1373 -#define _gather_129_type 1374 -#define _loop0_132_type 1375 -#define _gather_131_type 1376 -#define _loop0_134_type 1377 -#define _gather_133_type 1378 -#define _loop0_135_type 1379 -#define _loop0_137_type 1380 -#define _gather_136_type 1381 -#define _loop1_138_type 1382 -#define _tmp_139_type 1383 -#define _loop0_141_type 1384 -#define _gather_140_type 1385 -#define _loop0_143_type 1386 -#define _gather_142_type 1387 -#define _loop0_145_type 1388 -#define _gather_144_type 1389 -#define _loop0_147_type 1390 -#define _gather_146_type 1391 -#define _loop0_149_type 1392 -#define _gather_148_type 1393 -#define _tmp_150_type 1394 -#define _tmp_151_type 1395 -#define _tmp_152_type 1396 -#define _tmp_153_type 1397 -#define _tmp_154_type 1398 -#define _tmp_155_type 1399 -#define _tmp_156_type 1400 -#define _tmp_157_type 1401 -#define _tmp_158_type 1402 -#define _tmp_159_type 1403 -#define _tmp_160_type 1404 -#define _tmp_161_type 1405 -#define _loop0_162_type 1406 -#define _loop0_163_type 1407 -#define _loop0_164_type 1408 -#define _tmp_165_type 1409 -#define _tmp_166_type 1410 -#define _tmp_167_type 1411 -#define _tmp_168_type 1412 -#define _tmp_169_type 1413 -#define _loop0_170_type 1414 -#define _loop0_171_type 1415 -#define _loop0_172_type 1416 -#define _loop1_173_type 1417 -#define _tmp_174_type 1418 -#define _loop0_175_type 1419 -#define _tmp_176_type 1420 -#define _loop0_177_type 1421 -#define _loop1_178_type 1422 -#define _tmp_179_type 1423 -#define _tmp_180_type 1424 -#define _tmp_181_type 1425 -#define _loop0_182_type 1426 -#define _tmp_183_type 1427 -#define _tmp_184_type 1428 -#define _loop1_185_type 1429 -#define _tmp_186_type 1430 -#define _loop0_187_type 1431 -#define _loop0_188_type 1432 -#define _loop0_189_type 1433 -#define _loop0_191_type 1434 -#define _gather_190_type 1435 -#define _tmp_192_type 1436 -#define _loop0_193_type 1437 -#define _tmp_194_type 1438 -#define _loop0_195_type 1439 -#define _loop1_196_type 1440 -#define _loop1_197_type 1441 -#define _tmp_198_type 1442 -#define _tmp_199_type 1443 -#define _loop0_200_type 1444 -#define _tmp_201_type 1445 -#define _tmp_202_type 1446 -#define _tmp_203_type 1447 -#define _loop0_205_type 1448 -#define _gather_204_type 1449 -#define _loop0_207_type 1450 -#define _gather_206_type 1451 -#define _loop0_209_type 1452 -#define _gather_208_type 1453 -#define _loop0_211_type 1454 -#define _gather_210_type 1455 -#define _loop0_213_type 1456 -#define _gather_212_type 1457 -#define _tmp_214_type 1458 -#define _loop0_215_type 1459 -#define _loop1_216_type 1460 -#define _tmp_217_type 1461 -#define _loop0_218_type 1462 -#define _loop1_219_type 1463 -#define _tmp_220_type 1464 -#define _tmp_221_type 1465 -#define _tmp_222_type 1466 -#define _tmp_223_type 1467 -#define _tmp_224_type 1468 -#define _tmp_225_type 1469 -#define _tmp_226_type 1470 -#define _tmp_227_type 1471 -#define _tmp_228_type 1472 -#define _tmp_229_type 1473 -#define _loop0_231_type 1474 -#define _gather_230_type 1475 -#define _tmp_232_type 1476 -#define _tmp_233_type 1477 -#define _tmp_234_type 1478 -#define _tmp_235_type 1479 -#define _tmp_236_type 1480 -#define _tmp_237_type 1481 -#define _tmp_238_type 1482 -#define _tmp_239_type 1483 -#define _tmp_240_type 1484 -#define _tmp_241_type 1485 -#define _tmp_242_type 1486 -#define _tmp_243_type 1487 -#define _tmp_244_type 1488 -#define _loop0_245_type 1489 -#define _tmp_246_type 1490 -#define _tmp_247_type 1491 -#define _tmp_248_type 1492 -#define _tmp_249_type 1493 -#define _tmp_250_type 1494 -#define _tmp_251_type 1495 -#define _tmp_252_type 1496 -#define _tmp_253_type 1497 -#define _tmp_254_type 1498 -#define _tmp_255_type 1499 -#define _tmp_256_type 1500 -#define _tmp_257_type 1501 -#define _tmp_258_type 1502 -#define _tmp_259_type 1503 -#define _tmp_260_type 1504 -#define _loop0_261_type 1505 -#define _tmp_262_type 1506 -#define _tmp_263_type 1507 -#define _tmp_264_type 1508 -#define _tmp_265_type 1509 -#define _tmp_266_type 1510 -#define _tmp_267_type 1511 -#define _tmp_268_type 1512 -#define _tmp_269_type 1513 -#define _tmp_270_type 1514 -#define _tmp_271_type 1515 -#define _tmp_272_type 1516 -#define _tmp_273_type 1517 -#define _tmp_274_type 1518 -#define _tmp_275_type 1519 -#define _tmp_276_type 1520 -#define _loop0_278_type 1521 -#define _gather_277_type 1522 -#define _tmp_279_type 1523 -#define _tmp_280_type 1524 -#define _tmp_281_type 1525 -#define _tmp_282_type 1526 -#define _tmp_283_type 1527 -#define _tmp_284_type 1528 +#define invalid_arithmetic_type 1245 +#define invalid_factor_type 1246 +#define _loop0_1_type 1247 +#define _loop0_2_type 1248 +#define _loop1_3_type 1249 +#define _loop0_5_type 1250 +#define _gather_4_type 1251 +#define _tmp_6_type 1252 +#define _tmp_7_type 1253 +#define _tmp_8_type 1254 +#define _tmp_9_type 1255 +#define _tmp_10_type 1256 +#define _tmp_11_type 1257 +#define _tmp_12_type 1258 +#define _tmp_13_type 1259 +#define _loop1_14_type 1260 +#define _tmp_15_type 1261 +#define _tmp_16_type 1262 +#define _tmp_17_type 1263 +#define _loop0_19_type 1264 +#define _gather_18_type 1265 +#define _loop0_21_type 1266 +#define _gather_20_type 1267 +#define _tmp_22_type 1268 +#define _tmp_23_type 1269 +#define _loop0_24_type 1270 +#define _loop1_25_type 1271 +#define _loop0_27_type 1272 +#define _gather_26_type 1273 +#define _tmp_28_type 1274 +#define _loop0_30_type 1275 +#define _gather_29_type 1276 +#define _tmp_31_type 1277 +#define _loop1_32_type 1278 +#define _tmp_33_type 1279 +#define _tmp_34_type 1280 +#define _tmp_35_type 1281 +#define _loop0_36_type 1282 +#define _loop0_37_type 1283 +#define _loop0_38_type 1284 +#define _loop1_39_type 1285 +#define _loop0_40_type 1286 +#define _loop1_41_type 1287 +#define _loop1_42_type 1288 +#define _loop1_43_type 1289 +#define _loop0_44_type 1290 +#define _loop1_45_type 1291 +#define _loop0_46_type 1292 +#define _loop1_47_type 1293 +#define _loop0_48_type 1294 +#define _loop0_49_type 1295 +#define _loop1_50_type 1296 +#define _loop0_52_type 1297 +#define _gather_51_type 1298 +#define _loop0_54_type 1299 +#define _gather_53_type 1300 +#define _loop0_56_type 1301 +#define _gather_55_type 1302 +#define _loop0_58_type 1303 +#define _gather_57_type 1304 +#define _tmp_59_type 1305 +#define _loop1_60_type 1306 +#define _loop1_61_type 1307 +#define _tmp_62_type 1308 +#define _tmp_63_type 1309 +#define _loop1_64_type 1310 +#define _loop0_66_type 1311 +#define _gather_65_type 1312 +#define _tmp_67_type 1313 +#define _tmp_68_type 1314 +#define _tmp_69_type 1315 +#define _tmp_70_type 1316 +#define _loop0_72_type 1317 +#define _gather_71_type 1318 +#define _loop0_74_type 1319 +#define _gather_73_type 1320 +#define _tmp_75_type 1321 +#define _loop0_77_type 1322 +#define _gather_76_type 1323 +#define _loop0_79_type 1324 +#define _gather_78_type 1325 +#define _loop0_81_type 1326 +#define _gather_80_type 1327 +#define _loop1_82_type 1328 +#define _loop1_83_type 1329 +#define _loop0_85_type 1330 +#define _gather_84_type 1331 +#define _loop1_86_type 1332 +#define _loop1_87_type 1333 +#define _loop1_88_type 1334 +#define _tmp_89_type 1335 +#define _loop0_91_type 1336 +#define _gather_90_type 1337 +#define _tmp_92_type 1338 +#define _tmp_93_type 1339 +#define _tmp_94_type 1340 +#define _tmp_95_type 1341 +#define _tmp_96_type 1342 +#define _tmp_97_type 1343 +#define _loop0_98_type 1344 +#define _loop0_99_type 1345 +#define _loop0_100_type 1346 +#define _loop1_101_type 1347 +#define _loop0_102_type 1348 +#define _loop1_103_type 1349 +#define _loop1_104_type 1350 +#define _loop1_105_type 1351 +#define _loop0_106_type 1352 +#define _loop1_107_type 1353 +#define _loop0_108_type 1354 +#define _loop1_109_type 1355 +#define _loop0_110_type 1356 +#define _loop1_111_type 1357 +#define _tmp_112_type 1358 +#define _loop0_113_type 1359 +#define _loop0_114_type 1360 +#define _loop1_115_type 1361 +#define _tmp_116_type 1362 +#define _loop0_118_type 1363 +#define _gather_117_type 1364 +#define _loop1_119_type 1365 +#define _loop0_120_type 1366 +#define _loop0_121_type 1367 +#define _tmp_122_type 1368 +#define _tmp_123_type 1369 +#define _loop0_125_type 1370 +#define _gather_124_type 1371 +#define _tmp_126_type 1372 +#define _loop0_128_type 1373 +#define _gather_127_type 1374 +#define _loop0_130_type 1375 +#define _gather_129_type 1376 +#define _loop0_132_type 1377 +#define _gather_131_type 1378 +#define _loop0_134_type 1379 +#define _gather_133_type 1380 +#define _loop0_135_type 1381 +#define _loop0_137_type 1382 +#define _gather_136_type 1383 +#define _loop1_138_type 1384 +#define _tmp_139_type 1385 +#define _loop0_141_type 1386 +#define _gather_140_type 1387 +#define _loop0_143_type 1388 +#define _gather_142_type 1389 +#define _loop0_145_type 1390 +#define _gather_144_type 1391 +#define _loop0_147_type 1392 +#define _gather_146_type 1393 +#define _loop0_149_type 1394 +#define _gather_148_type 1395 +#define _tmp_150_type 1396 +#define _tmp_151_type 1397 +#define _tmp_152_type 1398 +#define _tmp_153_type 1399 +#define _tmp_154_type 1400 +#define _tmp_155_type 1401 +#define _tmp_156_type 1402 +#define _tmp_157_type 1403 +#define _tmp_158_type 1404 +#define _tmp_159_type 1405 +#define _tmp_160_type 1406 +#define _tmp_161_type 1407 +#define _loop0_162_type 1408 +#define _loop0_163_type 1409 +#define _loop0_164_type 1410 +#define _tmp_165_type 1411 +#define _tmp_166_type 1412 +#define _tmp_167_type 1413 +#define _tmp_168_type 1414 +#define _tmp_169_type 1415 +#define _loop0_170_type 1416 +#define _loop0_171_type 1417 +#define _loop0_172_type 1418 +#define _loop1_173_type 1419 +#define _tmp_174_type 1420 +#define _loop0_175_type 1421 +#define _tmp_176_type 1422 +#define _loop0_177_type 1423 +#define _loop1_178_type 1424 +#define _tmp_179_type 1425 +#define _tmp_180_type 1426 +#define _tmp_181_type 1427 +#define _loop0_182_type 1428 +#define _tmp_183_type 1429 +#define _tmp_184_type 1430 +#define _loop1_185_type 1431 +#define _tmp_186_type 1432 +#define _loop0_187_type 1433 +#define _loop0_188_type 1434 +#define _loop0_189_type 1435 +#define _loop0_191_type 1436 +#define _gather_190_type 1437 +#define _tmp_192_type 1438 +#define _loop0_193_type 1439 +#define _tmp_194_type 1440 +#define _loop0_195_type 1441 +#define _loop1_196_type 1442 +#define _loop1_197_type 1443 +#define _tmp_198_type 1444 +#define _tmp_199_type 1445 +#define _loop0_200_type 1446 +#define _tmp_201_type 1447 +#define _tmp_202_type 1448 +#define _tmp_203_type 1449 +#define _loop0_205_type 1450 +#define _gather_204_type 1451 +#define _loop0_207_type 1452 +#define _gather_206_type 1453 +#define _loop0_209_type 1454 +#define _gather_208_type 1455 +#define _loop0_211_type 1456 +#define _gather_210_type 1457 +#define _loop0_213_type 1458 +#define _gather_212_type 1459 +#define _tmp_214_type 1460 +#define _loop0_215_type 1461 +#define _loop1_216_type 1462 +#define _tmp_217_type 1463 +#define _loop0_218_type 1464 +#define _loop1_219_type 1465 +#define _tmp_220_type 1466 +#define _tmp_221_type 1467 +#define _tmp_222_type 1468 +#define _tmp_223_type 1469 +#define _tmp_224_type 1470 +#define _tmp_225_type 1471 +#define _tmp_226_type 1472 +#define _tmp_227_type 1473 +#define _tmp_228_type 1474 +#define _tmp_229_type 1475 +#define _loop0_231_type 1476 +#define _gather_230_type 1477 +#define _tmp_232_type 1478 +#define _tmp_233_type 1479 +#define _tmp_234_type 1480 +#define _tmp_235_type 1481 +#define _tmp_236_type 1482 +#define _tmp_237_type 1483 +#define _tmp_238_type 1484 +#define _tmp_239_type 1485 +#define _tmp_240_type 1486 +#define _tmp_241_type 1487 +#define _tmp_242_type 1488 +#define _tmp_243_type 1489 +#define _tmp_244_type 1490 +#define _loop0_245_type 1491 +#define _tmp_246_type 1492 +#define _tmp_247_type 1493 +#define _tmp_248_type 1494 +#define _tmp_249_type 1495 +#define _tmp_250_type 1496 +#define _tmp_251_type 1497 +#define _tmp_252_type 1498 +#define _tmp_253_type 1499 +#define _tmp_254_type 1500 +#define _tmp_255_type 1501 +#define _tmp_256_type 1502 +#define _tmp_257_type 1503 +#define _tmp_258_type 1504 +#define _tmp_259_type 1505 +#define _tmp_260_type 1506 +#define _tmp_261_type 1507 +#define _tmp_262_type 1508 +#define _loop0_263_type 1509 +#define _tmp_264_type 1510 +#define _tmp_265_type 1511 +#define _tmp_266_type 1512 +#define _tmp_267_type 1513 +#define _tmp_268_type 1514 +#define _tmp_269_type 1515 +#define _tmp_270_type 1516 +#define _tmp_271_type 1517 +#define _tmp_272_type 1518 +#define _tmp_273_type 1519 +#define _tmp_274_type 1520 +#define _tmp_275_type 1521 +#define _tmp_276_type 1522 +#define _tmp_277_type 1523 +#define _tmp_278_type 1524 +#define _loop0_280_type 1525 +#define _gather_279_type 1526 +#define _tmp_281_type 1527 +#define _tmp_282_type 1528 +#define _tmp_283_type 1529 +#define _tmp_284_type 1530 +#define _tmp_285_type 1531 +#define _tmp_286_type 1532 static mod_ty file_rule(Parser *p); static mod_ty interactive_rule(Parser *p); @@ -859,6 +863,8 @@ static void *invalid_kvpair_rule(Parser *p); static void *invalid_starred_expression_rule(Parser *p); static void *invalid_replacement_field_rule(Parser *p); static void *invalid_conversion_character_rule(Parser *p); +static void *invalid_arithmetic_rule(Parser *p); +static void *invalid_factor_rule(Parser *p); static asdl_seq *_loop0_1_rule(Parser *p); static asdl_seq *_loop0_2_rule(Parser *p); static asdl_seq *_loop1_3_rule(Parser *p); @@ -1119,9 +1125,9 @@ static void *_tmp_257_rule(Parser *p); static void *_tmp_258_rule(Parser *p); static void *_tmp_259_rule(Parser *p); static void *_tmp_260_rule(Parser *p); -static asdl_seq *_loop0_261_rule(Parser *p); +static void *_tmp_261_rule(Parser *p); static void *_tmp_262_rule(Parser *p); -static void *_tmp_263_rule(Parser *p); +static asdl_seq *_loop0_263_rule(Parser *p); static void *_tmp_264_rule(Parser *p); static void *_tmp_265_rule(Parser *p); static void *_tmp_266_rule(Parser *p); @@ -1135,14 +1141,16 @@ static void *_tmp_273_rule(Parser *p); static void *_tmp_274_rule(Parser *p); static void *_tmp_275_rule(Parser *p); static void *_tmp_276_rule(Parser *p); -static asdl_seq *_loop0_278_rule(Parser *p); -static asdl_seq *_gather_277_rule(Parser *p); -static void *_tmp_279_rule(Parser *p); -static void *_tmp_280_rule(Parser *p); +static void *_tmp_277_rule(Parser *p); +static void *_tmp_278_rule(Parser *p); +static asdl_seq *_loop0_280_rule(Parser *p); +static asdl_seq *_gather_279_rule(Parser *p); static void *_tmp_281_rule(Parser *p); static void *_tmp_282_rule(Parser *p); static void *_tmp_283_rule(Parser *p); static void *_tmp_284_rule(Parser *p); +static void *_tmp_285_rule(Parser *p); +static void *_tmp_286_rule(Parser *p); // file: statements? $ @@ -11999,7 +12007,7 @@ inversion_rule(Parser *p) Token * _keyword; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 588)) // token='not' + (_keyword = _PyPegen_expect_token(p, 678)) // token='not' && (a = inversion_rule(p)) // inversion ) @@ -12653,7 +12661,7 @@ notin_bitwise_or_rule(Parser *p) Token * _keyword_1; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 588)) // token='not' + (_keyword = _PyPegen_expect_token(p, 678)) // token='not' && (_keyword_1 = _PyPegen_expect_token(p, 672)) // token='in' && @@ -12750,7 +12758,7 @@ isnot_bitwise_or_rule(Parser *p) if ( (_keyword = _PyPegen_expect_token(p, 589)) // token='is' && - (_keyword_1 = _PyPegen_expect_token(p, 588)) // token='not' + (_keyword_1 = _PyPegen_expect_token(p, 678)) // token='not' && (a = bitwise_or_rule(p)) // bitwise_or ) @@ -13187,7 +13195,7 @@ bitwise_and_raw(Parser *p) } // Left-recursive -// shift_expr: shift_expr '<<' sum | shift_expr '>>' sum | sum +// shift_expr: shift_expr '<<' sum | shift_expr '>>' sum | invalid_arithmetic | sum static expr_ty shift_expr_raw(Parser *); static expr_ty shift_expr_rule(Parser *p) @@ -13322,6 +13330,25 @@ shift_expr_raw(Parser *p) D(fprintf(stderr, "%*c%s shift_expr[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "shift_expr '>>' sum")); } + if (p->call_invalid_rules) { // invalid_arithmetic + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> shift_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_arithmetic")); + void *invalid_arithmetic_var; + if ( + (invalid_arithmetic_var = invalid_arithmetic_rule(p)) // invalid_arithmetic + ) + { + D(fprintf(stderr, "%*c+ shift_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_arithmetic")); + _res = invalid_arithmetic_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s shift_expr[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_arithmetic")); + } { // sum if (p->error_indicator) { p->level--; @@ -13515,6 +13542,7 @@ sum_raw(Parser *p) // | term '//' factor // | term '%' factor // | term '@' factor +// | invalid_factor // | factor static expr_ty term_raw(Parser *); static expr_ty @@ -13767,6 +13795,25 @@ term_raw(Parser *p) D(fprintf(stderr, "%*c%s term[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "term '@' factor")); } + if (p->call_invalid_rules) { // invalid_factor + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> term[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_factor")); + void *invalid_factor_var; + if ( + (invalid_factor_var = invalid_factor_rule(p)) // invalid_factor + ) + { + D(fprintf(stderr, "%*c+ term[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_factor")); + _res = invalid_factor_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s term[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_factor")); + } { // factor if (p->error_indicator) { p->level--; @@ -25107,6 +25154,107 @@ invalid_conversion_character_rule(Parser *p) return _res; } +// invalid_arithmetic: sum ('+' | '-' | '*' | '/' | '%' | '//' | '@') 'not' inversion +static void * +invalid_arithmetic_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + _Pypegen_stack_overflow(p); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // sum ('+' | '-' | '*' | '/' | '%' | '//' | '@') 'not' inversion + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> invalid_arithmetic[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "sum ('+' | '-' | '*' | '/' | '%' | '//' | '@') 'not' inversion")); + void *_tmp_249_var; + Token * a; + expr_ty b; + expr_ty sum_var; + if ( + (sum_var = sum_rule(p)) // sum + && + (_tmp_249_var = _tmp_249_rule(p)) // '+' | '-' | '*' | '/' | '%' | '//' | '@' + && + (a = _PyPegen_expect_token(p, 678)) // token='not' + && + (b = inversion_rule(p)) // inversion + ) + { + D(fprintf(stderr, "%*c+ invalid_arithmetic[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "sum ('+' | '-' | '*' | '/' | '%' | '//' | '@') 'not' inversion")); + _res = RAISE_SYNTAX_ERROR_KNOWN_RANGE ( a , b , "'not' after an operator must be parenthesized" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_arithmetic[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "sum ('+' | '-' | '*' | '/' | '%' | '//' | '@') 'not' inversion")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// invalid_factor: ('+' | '-' | '~') 'not' factor +static void * +invalid_factor_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + _Pypegen_stack_overflow(p); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // ('+' | '-' | '~') 'not' factor + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> invalid_factor[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('+' | '-' | '~') 'not' factor")); + void *_tmp_250_var; + Token * a; + expr_ty b; + if ( + (_tmp_250_var = _tmp_250_rule(p)) // '+' | '-' | '~' + && + (a = _PyPegen_expect_token(p, 678)) // token='not' + && + (b = factor_rule(p)) // factor + ) + { + D(fprintf(stderr, "%*c+ invalid_factor[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "('+' | '-' | '~') 'not' factor")); + _res = RAISE_SYNTAX_ERROR_KNOWN_RANGE ( a , b , "'not' after an operator must be parenthesized" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_factor[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('+' | '-' | '~') 'not' factor")); + } + _res = NULL; + done: + p->level--; + return _res; +} + // _loop0_1: NEWLINE static asdl_seq * _loop0_1_rule(Parser *p) @@ -25922,12 +26070,12 @@ _loop1_14_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_14[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); - void *_tmp_249_var; + void *_tmp_251_var; while ( - (_tmp_249_var = _tmp_249_rule(p)) // star_targets '=' + (_tmp_251_var = _tmp_251_rule(p)) // star_targets '=' ) { - _res = _tmp_249_var; + _res = _tmp_251_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -26491,12 +26639,12 @@ _loop0_24_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop0_24[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')")); - void *_tmp_250_var; + void *_tmp_252_var; while ( - (_tmp_250_var = _tmp_250_rule(p)) // '.' | '...' + (_tmp_252_var = _tmp_252_rule(p)) // '.' | '...' ) { - _res = _tmp_250_var; + _res = _tmp_252_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -26558,12 +26706,12 @@ _loop1_25_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_25[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')")); - void *_tmp_251_var; + void *_tmp_253_var; while ( - (_tmp_251_var = _tmp_251_rule(p)) // '.' | '...' + (_tmp_253_var = _tmp_253_rule(p)) // '.' | '...' ) { - _res = _tmp_251_var; + _res = _tmp_253_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -26956,12 +27104,12 @@ _loop1_32_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_32[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('@' named_expression NEWLINE)")); - void *_tmp_252_var; + void *_tmp_254_var; while ( - (_tmp_252_var = _tmp_252_rule(p)) // '@' named_expression NEWLINE + (_tmp_254_var = _tmp_254_rule(p)) // '@' named_expression NEWLINE ) { - _res = _tmp_252_var; + _res = _tmp_254_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -30086,12 +30234,12 @@ _loop1_82_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_82[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' expression)")); - void *_tmp_253_var; + void *_tmp_255_var; while ( - (_tmp_253_var = _tmp_253_rule(p)) // ',' expression + (_tmp_255_var = _tmp_255_rule(p)) // ',' expression ) { - _res = _tmp_253_var; + _res = _tmp_255_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -30158,12 +30306,12 @@ _loop1_83_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_83[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_expression)")); - void *_tmp_254_var; + void *_tmp_256_var; while ( - (_tmp_254_var = _tmp_254_rule(p)) // ',' star_expression + (_tmp_256_var = _tmp_256_rule(p)) // ',' star_expression ) { - _res = _tmp_254_var; + _res = _tmp_256_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -30347,12 +30495,12 @@ _loop1_86_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_86[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('or' conjunction)")); - void *_tmp_255_var; + void *_tmp_257_var; while ( - (_tmp_255_var = _tmp_255_rule(p)) // 'or' conjunction + (_tmp_257_var = _tmp_257_rule(p)) // 'or' conjunction ) { - _res = _tmp_255_var; + _res = _tmp_257_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -30419,12 +30567,12 @@ _loop1_87_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_87[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('and' inversion)")); - void *_tmp_256_var; + void *_tmp_258_var; while ( - (_tmp_256_var = _tmp_256_rule(p)) // 'and' inversion + (_tmp_258_var = _tmp_258_rule(p)) // 'and' inversion ) { - _res = _tmp_256_var; + _res = _tmp_258_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -30611,7 +30759,7 @@ _loop0_91_rule(Parser *p) while ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _tmp_257_rule(p)) // slice | starred_expression + (elem = _tmp_259_rule(p)) // slice | starred_expression ) { _res = elem; @@ -30676,7 +30824,7 @@ _gather_90_rule(Parser *p) void *elem; asdl_seq * seq; if ( - (elem = _tmp_257_rule(p)) // slice | starred_expression + (elem = _tmp_259_rule(p)) // slice | starred_expression && (seq = _loop0_91_rule(p)) // _loop0_91 ) @@ -32275,12 +32423,12 @@ _loop1_115_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_115[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(fstring | string)")); - void *_tmp_258_var; + void *_tmp_260_var; while ( - (_tmp_258_var = _tmp_258_rule(p)) // fstring | string + (_tmp_260_var = _tmp_260_rule(p)) // fstring | string ) { - _res = _tmp_258_var; + _res = _tmp_260_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -32585,12 +32733,12 @@ _loop0_120_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop0_120[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)")); - void *_tmp_259_var; + void *_tmp_261_var; while ( - (_tmp_259_var = _tmp_259_rule(p)) // 'if' disjunction + (_tmp_261_var = _tmp_261_rule(p)) // 'if' disjunction ) { - _res = _tmp_259_var; + _res = _tmp_261_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -32652,12 +32800,12 @@ _loop0_121_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop0_121[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)")); - void *_tmp_260_var; + void *_tmp_262_var; while ( - (_tmp_260_var = _tmp_260_rule(p)) // 'if' disjunction + (_tmp_262_var = _tmp_262_rule(p)) // 'if' disjunction ) { - _res = _tmp_260_var; + _res = _tmp_262_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -32710,20 +32858,20 @@ _tmp_122_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _tmp_122[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "bitwise_or ((',' bitwise_or))* ','?")); - asdl_seq * _loop0_261_var; + asdl_seq * _loop0_263_var; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings expr_ty bitwise_or_var; if ( (bitwise_or_var = bitwise_or_rule(p)) // bitwise_or && - (_loop0_261_var = _loop0_261_rule(p)) // ((',' bitwise_or))* + (_loop0_263_var = _loop0_263_rule(p)) // ((',' bitwise_or))* && (_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','? ) { D(fprintf(stderr, "%*c+ _tmp_122[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "bitwise_or ((',' bitwise_or))* ','?")); - _res = _PyPegen_dummy_name(p, bitwise_or_var, _loop0_261_var, _opt_var); + _res = _PyPegen_dummy_name(p, bitwise_or_var, _loop0_263_var, _opt_var); goto done; } p->mark = _mark; @@ -32828,7 +32976,7 @@ _loop0_125_rule(Parser *p) while ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _tmp_262_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'=' + (elem = _tmp_264_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'=' ) { _res = elem; @@ -32894,7 +33042,7 @@ _gather_124_rule(Parser *p) void *elem; asdl_seq * seq; if ( - (elem = _tmp_262_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'=' + (elem = _tmp_264_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'=' && (seq = _loop0_125_rule(p)) // _loop0_125 ) @@ -33455,12 +33603,12 @@ _loop0_135_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop0_135[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_target)")); - void *_tmp_263_var; + void *_tmp_265_var; while ( - (_tmp_263_var = _tmp_263_rule(p)) // ',' star_target + (_tmp_265_var = _tmp_265_rule(p)) // ',' star_target ) { - _res = _tmp_263_var; + _res = _tmp_265_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -33639,12 +33787,12 @@ _loop1_138_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_138[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_target)")); - void *_tmp_264_var; + void *_tmp_266_var; while ( - (_tmp_264_var = _tmp_264_rule(p)) // ',' star_target + (_tmp_266_var = _tmp_266_rule(p)) // ',' star_target ) { - _res = _tmp_264_var; + _res = _tmp_266_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -34370,13 +34518,13 @@ _tmp_151_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _tmp_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs)")); - void *_tmp_265_var; + void *_tmp_267_var; if ( - (_tmp_265_var = _tmp_265_rule(p)) // ','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs + (_tmp_267_var = _tmp_267_rule(p)) // ','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs ) { D(fprintf(stderr, "%*c+ _tmp_151[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs)")); - _res = _tmp_265_var; + _res = _tmp_267_var; goto done; } p->mark = _mark; @@ -35142,12 +35290,12 @@ _loop0_163_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop0_163[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); - void *_tmp_266_var; + void *_tmp_268_var; while ( - (_tmp_266_var = _tmp_266_rule(p)) // star_targets '=' + (_tmp_268_var = _tmp_268_rule(p)) // star_targets '=' ) { - _res = _tmp_266_var; + _res = _tmp_268_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -35209,12 +35357,12 @@ _loop0_164_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop0_164[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); - void *_tmp_267_var; + void *_tmp_269_var; while ( - (_tmp_267_var = _tmp_267_rule(p)) // star_targets '=' + (_tmp_269_var = _tmp_269_rule(p)) // star_targets '=' ) { - _res = _tmp_267_var; + _res = _tmp_269_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -36241,15 +36389,15 @@ _tmp_180_rule(Parser *p) } D(fprintf(stderr, "%*c> _tmp_180[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); Token * _literal; - void *_tmp_268_var; + void *_tmp_270_var; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (_tmp_268_var = _tmp_268_rule(p)) // ')' | '**' + (_tmp_270_var = _tmp_270_rule(p)) // ')' | '**' ) { D(fprintf(stderr, "%*c+ _tmp_180[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); - _res = _PyPegen_dummy_name(p, _literal, _tmp_268_var); + _res = _PyPegen_dummy_name(p, _literal, _tmp_270_var); goto done; } p->mark = _mark; @@ -37397,15 +37545,15 @@ _tmp_198_rule(Parser *p) } D(fprintf(stderr, "%*c> _tmp_198[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); Token * _literal; - void *_tmp_269_var; + void *_tmp_271_var; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (_tmp_269_var = _tmp_269_rule(p)) // ':' | '**' + (_tmp_271_var = _tmp_271_rule(p)) // ':' | '**' ) { D(fprintf(stderr, "%*c+ _tmp_198[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); - _res = _PyPegen_dummy_name(p, _literal, _tmp_269_var); + _res = _PyPegen_dummy_name(p, _literal, _tmp_271_var); goto done; } p->mark = _mark; @@ -37901,7 +38049,7 @@ _loop0_207_rule(Parser *p) while ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _tmp_270_rule(p)) // expression ['as' star_target] + (elem = _tmp_272_rule(p)) // expression ['as' star_target] ) { _res = elem; @@ -37966,7 +38114,7 @@ _gather_206_rule(Parser *p) void *elem; asdl_seq * seq; if ( - (elem = _tmp_270_rule(p)) // expression ['as' star_target] + (elem = _tmp_272_rule(p)) // expression ['as' star_target] && (seq = _loop0_207_rule(p)) // _loop0_207 ) @@ -38018,7 +38166,7 @@ _loop0_209_rule(Parser *p) while ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _tmp_271_rule(p)) // expressions ['as' star_target] + (elem = _tmp_273_rule(p)) // expressions ['as' star_target] ) { _res = elem; @@ -38083,7 +38231,7 @@ _gather_208_rule(Parser *p) void *elem; asdl_seq * seq; if ( - (elem = _tmp_271_rule(p)) // expressions ['as' star_target] + (elem = _tmp_273_rule(p)) // expressions ['as' star_target] && (seq = _loop0_209_rule(p)) // _loop0_209 ) @@ -38135,7 +38283,7 @@ _loop0_211_rule(Parser *p) while ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _tmp_272_rule(p)) // expression ['as' star_target] + (elem = _tmp_274_rule(p)) // expression ['as' star_target] ) { _res = elem; @@ -38200,7 +38348,7 @@ _gather_210_rule(Parser *p) void *elem; asdl_seq * seq; if ( - (elem = _tmp_272_rule(p)) // expression ['as' star_target] + (elem = _tmp_274_rule(p)) // expression ['as' star_target] && (seq = _loop0_211_rule(p)) // _loop0_211 ) @@ -38252,7 +38400,7 @@ _loop0_213_rule(Parser *p) while ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _tmp_273_rule(p)) // expressions ['as' star_target] + (elem = _tmp_275_rule(p)) // expressions ['as' star_target] ) { _res = elem; @@ -38317,7 +38465,7 @@ _gather_212_rule(Parser *p) void *elem; asdl_seq * seq; if ( - (elem = _tmp_273_rule(p)) // expressions ['as' star_target] + (elem = _tmp_275_rule(p)) // expressions ['as' star_target] && (seq = _loop0_213_rule(p)) // _loop0_213 ) @@ -38737,7 +38885,7 @@ _tmp_220_rule(Parser *p) if ( (expression_var = expression_rule(p)) // expression && - (_opt_var = _tmp_274_rule(p), !p->error_indicator) // ['as' NAME] + (_opt_var = _tmp_276_rule(p), !p->error_indicator) // ['as' NAME] ) { D(fprintf(stderr, "%*c+ _tmp_220[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' NAME]")); @@ -40252,9 +40400,237 @@ _tmp_248_rule(Parser *p) return _res; } -// _tmp_249: star_targets '=' +// _tmp_249: '+' | '-' | '*' | '/' | '%' | '//' | '@' static void * _tmp_249_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + _Pypegen_stack_overflow(p); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // '+' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_249[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'+'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 14)) // token='+' + ) + { + D(fprintf(stderr, "%*c+ _tmp_249[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'+'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_249[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'+'")); + } + { // '-' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_249[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'-'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 15)) // token='-' + ) + { + D(fprintf(stderr, "%*c+ _tmp_249[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'-'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_249[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'-'")); + } + { // '*' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_249[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 16)) // token='*' + ) + { + D(fprintf(stderr, "%*c+ _tmp_249[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_249[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*'")); + } + { // '/' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_249[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'/'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 17)) // token='/' + ) + { + D(fprintf(stderr, "%*c+ _tmp_249[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'/'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_249[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'/'")); + } + { // '%' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_249[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'%'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 24)) // token='%' + ) + { + D(fprintf(stderr, "%*c+ _tmp_249[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'%'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_249[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'%'")); + } + { // '//' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_249[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'//'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 47)) // token='//' + ) + { + D(fprintf(stderr, "%*c+ _tmp_249[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'//'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_249[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'//'")); + } + { // '@' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_249[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 49)) // token='@' + ) + { + D(fprintf(stderr, "%*c+ _tmp_249[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_249[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'@'")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// _tmp_250: '+' | '-' | '~' +static void * +_tmp_250_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + _Pypegen_stack_overflow(p); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // '+' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_250[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'+'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 14)) // token='+' + ) + { + D(fprintf(stderr, "%*c+ _tmp_250[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'+'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_250[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'+'")); + } + { // '-' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_250[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'-'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 15)) // token='-' + ) + { + D(fprintf(stderr, "%*c+ _tmp_250[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'-'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_250[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'-'")); + } + { // '~' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_250[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'~'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 31)) // token='~' + ) + { + D(fprintf(stderr, "%*c+ _tmp_250[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'~'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_250[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'~'")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// _tmp_251: star_targets '=' +static void * +_tmp_251_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -40270,7 +40646,7 @@ _tmp_249_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_249[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c> _tmp_251[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); Token * _literal; expr_ty z; if ( @@ -40279,7 +40655,7 @@ _tmp_249_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_249[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c+ _tmp_251[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -40289,7 +40665,7 @@ _tmp_249_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_249[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_251[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='")); } _res = NULL; @@ -40298,9 +40674,9 @@ _tmp_249_rule(Parser *p) return _res; } -// _tmp_250: '.' | '...' +// _tmp_252: '.' | '...' static void * -_tmp_250_rule(Parser *p) +_tmp_252_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -40316,18 +40692,18 @@ _tmp_250_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_250[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); + D(fprintf(stderr, "%*c> _tmp_252[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 23)) // token='.' ) { - D(fprintf(stderr, "%*c+ _tmp_250[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); + D(fprintf(stderr, "%*c+ _tmp_252[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_250[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_252[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'")); } { // '...' @@ -40335,18 +40711,18 @@ _tmp_250_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_250[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); + D(fprintf(stderr, "%*c> _tmp_252[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 52)) // token='...' ) { - D(fprintf(stderr, "%*c+ _tmp_250[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); + D(fprintf(stderr, "%*c+ _tmp_252[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_250[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_252[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'...'")); } _res = NULL; @@ -40355,9 +40731,9 @@ _tmp_250_rule(Parser *p) return _res; } -// _tmp_251: '.' | '...' +// _tmp_253: '.' | '...' static void * -_tmp_251_rule(Parser *p) +_tmp_253_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -40373,18 +40749,18 @@ _tmp_251_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_251[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); + D(fprintf(stderr, "%*c> _tmp_253[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 23)) // token='.' ) { - D(fprintf(stderr, "%*c+ _tmp_251[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); + D(fprintf(stderr, "%*c+ _tmp_253[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_251[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_253[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'")); } { // '...' @@ -40392,18 +40768,18 @@ _tmp_251_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_251[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); + D(fprintf(stderr, "%*c> _tmp_253[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 52)) // token='...' ) { - D(fprintf(stderr, "%*c+ _tmp_251[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); + D(fprintf(stderr, "%*c+ _tmp_253[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_251[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_253[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'...'")); } _res = NULL; @@ -40412,9 +40788,9 @@ _tmp_251_rule(Parser *p) return _res; } -// _tmp_252: '@' named_expression NEWLINE +// _tmp_254: '@' named_expression NEWLINE static void * -_tmp_252_rule(Parser *p) +_tmp_254_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -40430,7 +40806,7 @@ _tmp_252_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_252[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); + D(fprintf(stderr, "%*c> _tmp_254[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); Token * _literal; expr_ty f; Token * newline_var; @@ -40442,7 +40818,7 @@ _tmp_252_rule(Parser *p) (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - D(fprintf(stderr, "%*c+ _tmp_252[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); + D(fprintf(stderr, "%*c+ _tmp_254[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); _res = f; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -40452,7 +40828,7 @@ _tmp_252_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_252[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_254[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'@' named_expression NEWLINE")); } _res = NULL; @@ -40461,9 +40837,9 @@ _tmp_252_rule(Parser *p) return _res; } -// _tmp_253: ',' expression +// _tmp_255: ',' expression static void * -_tmp_253_rule(Parser *p) +_tmp_255_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -40479,7 +40855,7 @@ _tmp_253_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_253[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); + D(fprintf(stderr, "%*c> _tmp_255[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); Token * _literal; expr_ty c; if ( @@ -40488,7 +40864,7 @@ _tmp_253_rule(Parser *p) (c = expression_rule(p)) // expression ) { - D(fprintf(stderr, "%*c+ _tmp_253[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' expression")); + D(fprintf(stderr, "%*c+ _tmp_255[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' expression")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -40498,7 +40874,7 @@ _tmp_253_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_253[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_255[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression")); } _res = NULL; @@ -40507,9 +40883,9 @@ _tmp_253_rule(Parser *p) return _res; } -// _tmp_254: ',' star_expression +// _tmp_256: ',' star_expression static void * -_tmp_254_rule(Parser *p) +_tmp_256_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -40525,7 +40901,7 @@ _tmp_254_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_254[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_expression")); + D(fprintf(stderr, "%*c> _tmp_256[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_expression")); Token * _literal; expr_ty c; if ( @@ -40534,7 +40910,7 @@ _tmp_254_rule(Parser *p) (c = star_expression_rule(p)) // star_expression ) { - D(fprintf(stderr, "%*c+ _tmp_254[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_expression")); + D(fprintf(stderr, "%*c+ _tmp_256[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_expression")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -40544,7 +40920,7 @@ _tmp_254_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_254[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_256[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_expression")); } _res = NULL; @@ -40553,9 +40929,9 @@ _tmp_254_rule(Parser *p) return _res; } -// _tmp_255: 'or' conjunction +// _tmp_257: 'or' conjunction static void * -_tmp_255_rule(Parser *p) +_tmp_257_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -40571,7 +40947,7 @@ _tmp_255_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_255[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); + D(fprintf(stderr, "%*c> _tmp_257[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); Token * _keyword; expr_ty c; if ( @@ -40580,7 +40956,7 @@ _tmp_255_rule(Parser *p) (c = conjunction_rule(p)) // conjunction ) { - D(fprintf(stderr, "%*c+ _tmp_255[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); + D(fprintf(stderr, "%*c+ _tmp_257[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -40590,7 +40966,7 @@ _tmp_255_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_255[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_257[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'or' conjunction")); } _res = NULL; @@ -40599,9 +40975,9 @@ _tmp_255_rule(Parser *p) return _res; } -// _tmp_256: 'and' inversion +// _tmp_258: 'and' inversion static void * -_tmp_256_rule(Parser *p) +_tmp_258_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -40617,7 +40993,7 @@ _tmp_256_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_256[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'and' inversion")); + D(fprintf(stderr, "%*c> _tmp_258[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'and' inversion")); Token * _keyword; expr_ty c; if ( @@ -40626,7 +41002,7 @@ _tmp_256_rule(Parser *p) (c = inversion_rule(p)) // inversion ) { - D(fprintf(stderr, "%*c+ _tmp_256[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'and' inversion")); + D(fprintf(stderr, "%*c+ _tmp_258[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'and' inversion")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -40636,7 +41012,7 @@ _tmp_256_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_256[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_258[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'and' inversion")); } _res = NULL; @@ -40645,9 +41021,9 @@ _tmp_256_rule(Parser *p) return _res; } -// _tmp_257: slice | starred_expression +// _tmp_259: slice | starred_expression static void * -_tmp_257_rule(Parser *p) +_tmp_259_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -40663,18 +41039,18 @@ _tmp_257_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_257[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slice")); + D(fprintf(stderr, "%*c> _tmp_259[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slice")); expr_ty slice_var; if ( (slice_var = slice_rule(p)) // slice ) { - D(fprintf(stderr, "%*c+ _tmp_257[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slice")); + D(fprintf(stderr, "%*c+ _tmp_259[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slice")); _res = slice_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_257[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_259[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slice")); } { // starred_expression @@ -40682,18 +41058,18 @@ _tmp_257_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_257[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression")); + D(fprintf(stderr, "%*c> _tmp_259[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression")); expr_ty starred_expression_var; if ( (starred_expression_var = starred_expression_rule(p)) // starred_expression ) { - D(fprintf(stderr, "%*c+ _tmp_257[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression")); + D(fprintf(stderr, "%*c+ _tmp_259[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression")); _res = starred_expression_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_257[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_259[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "starred_expression")); } _res = NULL; @@ -40702,9 +41078,9 @@ _tmp_257_rule(Parser *p) return _res; } -// _tmp_258: fstring | string +// _tmp_260: fstring | string static void * -_tmp_258_rule(Parser *p) +_tmp_260_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -40720,18 +41096,18 @@ _tmp_258_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_258[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "fstring")); + D(fprintf(stderr, "%*c> _tmp_260[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "fstring")); expr_ty fstring_var; if ( (fstring_var = fstring_rule(p)) // fstring ) { - D(fprintf(stderr, "%*c+ _tmp_258[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "fstring")); + D(fprintf(stderr, "%*c+ _tmp_260[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "fstring")); _res = fstring_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_258[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_260[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "fstring")); } { // string @@ -40739,18 +41115,18 @@ _tmp_258_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_258[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "string")); + D(fprintf(stderr, "%*c> _tmp_260[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "string")); expr_ty string_var; if ( (string_var = string_rule(p)) // string ) { - D(fprintf(stderr, "%*c+ _tmp_258[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "string")); + D(fprintf(stderr, "%*c+ _tmp_260[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "string")); _res = string_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_258[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_260[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "string")); } _res = NULL; @@ -40759,9 +41135,9 @@ _tmp_258_rule(Parser *p) return _res; } -// _tmp_259: 'if' disjunction +// _tmp_261: 'if' disjunction static void * -_tmp_259_rule(Parser *p) +_tmp_261_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -40777,7 +41153,7 @@ _tmp_259_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_259[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + D(fprintf(stderr, "%*c> _tmp_261[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); Token * _keyword; expr_ty z; if ( @@ -40786,7 +41162,7 @@ _tmp_259_rule(Parser *p) (z = disjunction_rule(p)) // disjunction ) { - D(fprintf(stderr, "%*c+ _tmp_259[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + D(fprintf(stderr, "%*c+ _tmp_261[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -40796,7 +41172,7 @@ _tmp_259_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_259[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_261[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'if' disjunction")); } _res = NULL; @@ -40805,9 +41181,9 @@ _tmp_259_rule(Parser *p) return _res; } -// _tmp_260: 'if' disjunction +// _tmp_262: 'if' disjunction static void * -_tmp_260_rule(Parser *p) +_tmp_262_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -40823,7 +41199,7 @@ _tmp_260_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_260[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + D(fprintf(stderr, "%*c> _tmp_262[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); Token * _keyword; expr_ty z; if ( @@ -40832,7 +41208,7 @@ _tmp_260_rule(Parser *p) (z = disjunction_rule(p)) // disjunction ) { - D(fprintf(stderr, "%*c+ _tmp_260[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + D(fprintf(stderr, "%*c+ _tmp_262[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -40842,7 +41218,7 @@ _tmp_260_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_260[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_262[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'if' disjunction")); } _res = NULL; @@ -40851,9 +41227,9 @@ _tmp_260_rule(Parser *p) return _res; } -// _loop0_261: (',' bitwise_or) +// _loop0_263: (',' bitwise_or) static asdl_seq * -_loop0_261_rule(Parser *p) +_loop0_263_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -40878,13 +41254,13 @@ _loop0_261_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_261[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' bitwise_or)")); - void *_tmp_275_var; + D(fprintf(stderr, "%*c> _loop0_263[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' bitwise_or)")); + void *_tmp_277_var; while ( - (_tmp_275_var = _tmp_275_rule(p)) // ',' bitwise_or + (_tmp_277_var = _tmp_277_rule(p)) // ',' bitwise_or ) { - _res = _tmp_275_var; + _res = _tmp_277_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -40901,7 +41277,7 @@ _loop0_261_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_261[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_263[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(',' bitwise_or)")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -40918,9 +41294,9 @@ _loop0_261_rule(Parser *p) return _seq; } -// _tmp_262: starred_expression | (assignment_expression | expression !':=') !'=' +// _tmp_264: starred_expression | (assignment_expression | expression !':=') !'=' static void * -_tmp_262_rule(Parser *p) +_tmp_264_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -40936,18 +41312,18 @@ _tmp_262_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_262[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression")); + D(fprintf(stderr, "%*c> _tmp_264[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression")); expr_ty starred_expression_var; if ( (starred_expression_var = starred_expression_rule(p)) // starred_expression ) { - D(fprintf(stderr, "%*c+ _tmp_262[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression")); + D(fprintf(stderr, "%*c+ _tmp_264[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression")); _res = starred_expression_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_262[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_264[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "starred_expression")); } { // (assignment_expression | expression !':=') !'=' @@ -40955,20 +41331,20 @@ _tmp_262_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_262[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='")); - void *_tmp_276_var; + D(fprintf(stderr, "%*c> _tmp_264[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='")); + void *_tmp_278_var; if ( - (_tmp_276_var = _tmp_276_rule(p)) // assignment_expression | expression !':=' + (_tmp_278_var = _tmp_278_rule(p)) // assignment_expression | expression !':=' && _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 22) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_262[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='")); - _res = _tmp_276_var; + D(fprintf(stderr, "%*c+ _tmp_264[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='")); + _res = _tmp_278_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_262[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_264[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(assignment_expression | expression !':=') !'='")); } _res = NULL; @@ -40977,9 +41353,9 @@ _tmp_262_rule(Parser *p) return _res; } -// _tmp_263: ',' star_target +// _tmp_265: ',' star_target static void * -_tmp_263_rule(Parser *p) +_tmp_265_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -40995,7 +41371,7 @@ _tmp_263_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_263[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); + D(fprintf(stderr, "%*c> _tmp_265[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); Token * _literal; expr_ty c; if ( @@ -41004,7 +41380,7 @@ _tmp_263_rule(Parser *p) (c = star_target_rule(p)) // star_target ) { - D(fprintf(stderr, "%*c+ _tmp_263[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target")); + D(fprintf(stderr, "%*c+ _tmp_265[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -41014,7 +41390,7 @@ _tmp_263_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_263[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_265[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_target")); } _res = NULL; @@ -41023,9 +41399,9 @@ _tmp_263_rule(Parser *p) return _res; } -// _tmp_264: ',' star_target +// _tmp_266: ',' star_target static void * -_tmp_264_rule(Parser *p) +_tmp_266_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41041,7 +41417,7 @@ _tmp_264_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_264[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); + D(fprintf(stderr, "%*c> _tmp_266[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); Token * _literal; expr_ty c; if ( @@ -41050,7 +41426,7 @@ _tmp_264_rule(Parser *p) (c = star_target_rule(p)) // star_target ) { - D(fprintf(stderr, "%*c+ _tmp_264[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target")); + D(fprintf(stderr, "%*c+ _tmp_266[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -41060,7 +41436,7 @@ _tmp_264_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_264[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_266[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_target")); } _res = NULL; @@ -41069,10 +41445,10 @@ _tmp_264_rule(Parser *p) return _res; } -// _tmp_265: +// _tmp_267: // | ','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs static void * -_tmp_265_rule(Parser *p) +_tmp_267_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41088,24 +41464,24 @@ _tmp_265_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_265[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs")); - asdl_seq * _gather_277_var; + D(fprintf(stderr, "%*c> _tmp_267[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs")); + asdl_seq * _gather_279_var; Token * _literal; asdl_seq* kwargs_var; if ( - (_gather_277_var = _gather_277_rule(p)) // ','.(starred_expression | (assignment_expression | expression !':=') !'=')+ + (_gather_279_var = _gather_279_rule(p)) // ','.(starred_expression | (assignment_expression | expression !':=') !'=')+ && (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (kwargs_var = kwargs_rule(p)) // kwargs ) { - D(fprintf(stderr, "%*c+ _tmp_265[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs")); - _res = _PyPegen_dummy_name(p, _gather_277_var, _literal, kwargs_var); + D(fprintf(stderr, "%*c+ _tmp_267[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs")); + _res = _PyPegen_dummy_name(p, _gather_279_var, _literal, kwargs_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_265[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_267[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs")); } _res = NULL; @@ -41114,9 +41490,9 @@ _tmp_265_rule(Parser *p) return _res; } -// _tmp_266: star_targets '=' +// _tmp_268: star_targets '=' static void * -_tmp_266_rule(Parser *p) +_tmp_268_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41132,7 +41508,7 @@ _tmp_266_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_266[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c> _tmp_268[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); Token * _literal; expr_ty star_targets_var; if ( @@ -41141,12 +41517,12 @@ _tmp_266_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_266[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c+ _tmp_268[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); _res = _PyPegen_dummy_name(p, star_targets_var, _literal); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_266[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_268[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='")); } _res = NULL; @@ -41155,9 +41531,9 @@ _tmp_266_rule(Parser *p) return _res; } -// _tmp_267: star_targets '=' +// _tmp_269: star_targets '=' static void * -_tmp_267_rule(Parser *p) +_tmp_269_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41173,7 +41549,7 @@ _tmp_267_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_267[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c> _tmp_269[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); Token * _literal; expr_ty star_targets_var; if ( @@ -41182,12 +41558,12 @@ _tmp_267_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_267[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c+ _tmp_269[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); _res = _PyPegen_dummy_name(p, star_targets_var, _literal); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_267[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_269[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='")); } _res = NULL; @@ -41196,9 +41572,9 @@ _tmp_267_rule(Parser *p) return _res; } -// _tmp_268: ')' | '**' +// _tmp_270: ')' | '**' static void * -_tmp_268_rule(Parser *p) +_tmp_270_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41214,18 +41590,18 @@ _tmp_268_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_268[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c> _tmp_270[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_268[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c+ _tmp_270[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_268[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_270[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); } { // '**' @@ -41233,18 +41609,18 @@ _tmp_268_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_268[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c> _tmp_270[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 35)) // token='**' ) { - D(fprintf(stderr, "%*c+ _tmp_268[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c+ _tmp_270[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_268[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_270[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'")); } _res = NULL; @@ -41253,9 +41629,9 @@ _tmp_268_rule(Parser *p) return _res; } -// _tmp_269: ':' | '**' +// _tmp_271: ':' | '**' static void * -_tmp_269_rule(Parser *p) +_tmp_271_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41271,18 +41647,18 @@ _tmp_269_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_269[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c> _tmp_271[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - D(fprintf(stderr, "%*c+ _tmp_269[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c+ _tmp_271[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_269[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_271[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); } { // '**' @@ -41290,18 +41666,18 @@ _tmp_269_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_269[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c> _tmp_271[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 35)) // token='**' ) { - D(fprintf(stderr, "%*c+ _tmp_269[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c+ _tmp_271[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_269[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_271[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'")); } _res = NULL; @@ -41310,9 +41686,9 @@ _tmp_269_rule(Parser *p) return _res; } -// _tmp_270: expression ['as' star_target] +// _tmp_272: expression ['as' star_target] static void * -_tmp_270_rule(Parser *p) +_tmp_272_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41328,22 +41704,22 @@ _tmp_270_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_270[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); + D(fprintf(stderr, "%*c> _tmp_272[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings expr_ty expression_var; if ( (expression_var = expression_rule(p)) // expression && - (_opt_var = _tmp_279_rule(p), !p->error_indicator) // ['as' star_target] + (_opt_var = _tmp_281_rule(p), !p->error_indicator) // ['as' star_target] ) { - D(fprintf(stderr, "%*c+ _tmp_270[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); + D(fprintf(stderr, "%*c+ _tmp_272[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); _res = _PyPegen_dummy_name(p, expression_var, _opt_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_270[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_272[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ['as' star_target]")); } _res = NULL; @@ -41352,9 +41728,9 @@ _tmp_270_rule(Parser *p) return _res; } -// _tmp_271: expressions ['as' star_target] +// _tmp_273: expressions ['as' star_target] static void * -_tmp_271_rule(Parser *p) +_tmp_273_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41370,22 +41746,22 @@ _tmp_271_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_271[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); + D(fprintf(stderr, "%*c> _tmp_273[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings expr_ty expressions_var; if ( (expressions_var = expressions_rule(p)) // expressions && - (_opt_var = _tmp_280_rule(p), !p->error_indicator) // ['as' star_target] + (_opt_var = _tmp_282_rule(p), !p->error_indicator) // ['as' star_target] ) { - D(fprintf(stderr, "%*c+ _tmp_271[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); + D(fprintf(stderr, "%*c+ _tmp_273[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); _res = _PyPegen_dummy_name(p, expressions_var, _opt_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_271[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_273[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expressions ['as' star_target]")); } _res = NULL; @@ -41394,9 +41770,9 @@ _tmp_271_rule(Parser *p) return _res; } -// _tmp_272: expression ['as' star_target] +// _tmp_274: expression ['as' star_target] static void * -_tmp_272_rule(Parser *p) +_tmp_274_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41412,22 +41788,22 @@ _tmp_272_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_272[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); + D(fprintf(stderr, "%*c> _tmp_274[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings expr_ty expression_var; if ( (expression_var = expression_rule(p)) // expression && - (_opt_var = _tmp_281_rule(p), !p->error_indicator) // ['as' star_target] + (_opt_var = _tmp_283_rule(p), !p->error_indicator) // ['as' star_target] ) { - D(fprintf(stderr, "%*c+ _tmp_272[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); + D(fprintf(stderr, "%*c+ _tmp_274[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); _res = _PyPegen_dummy_name(p, expression_var, _opt_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_272[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_274[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ['as' star_target]")); } _res = NULL; @@ -41436,9 +41812,9 @@ _tmp_272_rule(Parser *p) return _res; } -// _tmp_273: expressions ['as' star_target] +// _tmp_275: expressions ['as' star_target] static void * -_tmp_273_rule(Parser *p) +_tmp_275_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41454,22 +41830,22 @@ _tmp_273_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_273[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); + D(fprintf(stderr, "%*c> _tmp_275[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings expr_ty expressions_var; if ( (expressions_var = expressions_rule(p)) // expressions && - (_opt_var = _tmp_282_rule(p), !p->error_indicator) // ['as' star_target] + (_opt_var = _tmp_284_rule(p), !p->error_indicator) // ['as' star_target] ) { - D(fprintf(stderr, "%*c+ _tmp_273[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); + D(fprintf(stderr, "%*c+ _tmp_275[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); _res = _PyPegen_dummy_name(p, expressions_var, _opt_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_273[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_275[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expressions ['as' star_target]")); } _res = NULL; @@ -41478,9 +41854,9 @@ _tmp_273_rule(Parser *p) return _res; } -// _tmp_274: 'as' NAME +// _tmp_276: 'as' NAME static void * -_tmp_274_rule(Parser *p) +_tmp_276_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41496,7 +41872,7 @@ _tmp_274_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_274[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c> _tmp_276[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); Token * _keyword; expr_ty name_var; if ( @@ -41505,12 +41881,12 @@ _tmp_274_rule(Parser *p) (name_var = _PyPegen_name_token(p)) // NAME ) { - D(fprintf(stderr, "%*c+ _tmp_274[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c+ _tmp_276[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); _res = _PyPegen_dummy_name(p, _keyword, name_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_274[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_276[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); } _res = NULL; @@ -41519,9 +41895,9 @@ _tmp_274_rule(Parser *p) return _res; } -// _tmp_275: ',' bitwise_or +// _tmp_277: ',' bitwise_or static void * -_tmp_275_rule(Parser *p) +_tmp_277_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41537,7 +41913,7 @@ _tmp_275_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_275[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' bitwise_or")); + D(fprintf(stderr, "%*c> _tmp_277[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' bitwise_or")); Token * _literal; expr_ty bitwise_or_var; if ( @@ -41546,12 +41922,12 @@ _tmp_275_rule(Parser *p) (bitwise_or_var = bitwise_or_rule(p)) // bitwise_or ) { - D(fprintf(stderr, "%*c+ _tmp_275[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' bitwise_or")); + D(fprintf(stderr, "%*c+ _tmp_277[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' bitwise_or")); _res = _PyPegen_dummy_name(p, _literal, bitwise_or_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_275[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_277[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' bitwise_or")); } _res = NULL; @@ -41560,9 +41936,9 @@ _tmp_275_rule(Parser *p) return _res; } -// _tmp_276: assignment_expression | expression !':=' +// _tmp_278: assignment_expression | expression !':=' static void * -_tmp_276_rule(Parser *p) +_tmp_278_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41578,18 +41954,18 @@ _tmp_276_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_276[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "assignment_expression")); + D(fprintf(stderr, "%*c> _tmp_278[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "assignment_expression")); expr_ty assignment_expression_var; if ( (assignment_expression_var = assignment_expression_rule(p)) // assignment_expression ) { - D(fprintf(stderr, "%*c+ _tmp_276[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "assignment_expression")); + D(fprintf(stderr, "%*c+ _tmp_278[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "assignment_expression")); _res = assignment_expression_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_276[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_278[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "assignment_expression")); } { // expression !':=' @@ -41597,7 +41973,7 @@ _tmp_276_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_276[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression !':='")); + D(fprintf(stderr, "%*c> _tmp_278[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression !':='")); expr_ty expression_var; if ( (expression_var = expression_rule(p)) // expression @@ -41605,12 +41981,12 @@ _tmp_276_rule(Parser *p) _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 53) // token=':=' ) { - D(fprintf(stderr, "%*c+ _tmp_276[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression !':='")); + D(fprintf(stderr, "%*c+ _tmp_278[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression !':='")); _res = expression_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_276[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_278[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression !':='")); } _res = NULL; @@ -41619,9 +41995,9 @@ _tmp_276_rule(Parser *p) return _res; } -// _loop0_278: ',' (starred_expression | (assignment_expression | expression !':=') !'=') +// _loop0_280: ',' (starred_expression | (assignment_expression | expression !':=') !'=') static asdl_seq * -_loop0_278_rule(Parser *p) +_loop0_280_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41646,13 +42022,13 @@ _loop0_278_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_278[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (starred_expression | (assignment_expression | expression !':=') !'=')")); + D(fprintf(stderr, "%*c> _loop0_280[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (starred_expression | (assignment_expression | expression !':=') !'=')")); Token * _literal; void *elem; while ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _tmp_283_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'=' + (elem = _tmp_285_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'=' ) { _res = elem; @@ -41678,7 +42054,7 @@ _loop0_278_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_278[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_280[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (starred_expression | (assignment_expression | expression !':=') !'=')")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -41695,10 +42071,10 @@ _loop0_278_rule(Parser *p) return _seq; } -// _gather_277: -// | (starred_expression | (assignment_expression | expression !':=') !'=') _loop0_278 +// _gather_279: +// | (starred_expression | (assignment_expression | expression !':=') !'=') _loop0_280 static asdl_seq * -_gather_277_rule(Parser *p) +_gather_279_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41709,27 +42085,27 @@ _gather_277_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // (starred_expression | (assignment_expression | expression !':=') !'=') _loop0_278 + { // (starred_expression | (assignment_expression | expression !':=') !'=') _loop0_280 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_277[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_278")); + D(fprintf(stderr, "%*c> _gather_279[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_280")); void *elem; asdl_seq * seq; if ( - (elem = _tmp_283_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'=' + (elem = _tmp_285_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'=' && - (seq = _loop0_278_rule(p)) // _loop0_278 + (seq = _loop0_280_rule(p)) // _loop0_280 ) { - D(fprintf(stderr, "%*c+ _gather_277[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_278")); + D(fprintf(stderr, "%*c+ _gather_279[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_280")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_277[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_278")); + D(fprintf(stderr, "%*c%s _gather_279[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_280")); } _res = NULL; done: @@ -41737,9 +42113,9 @@ _gather_277_rule(Parser *p) return _res; } -// _tmp_279: 'as' star_target +// _tmp_281: 'as' star_target static void * -_tmp_279_rule(Parser *p) +_tmp_281_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41755,7 +42131,7 @@ _tmp_279_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_279[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c> _tmp_281[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); Token * _keyword; expr_ty star_target_var; if ( @@ -41764,12 +42140,12 @@ _tmp_279_rule(Parser *p) (star_target_var = star_target_rule(p)) // star_target ) { - D(fprintf(stderr, "%*c+ _tmp_279[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c+ _tmp_281[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); _res = _PyPegen_dummy_name(p, _keyword, star_target_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_279[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_281[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' star_target")); } _res = NULL; @@ -41778,9 +42154,9 @@ _tmp_279_rule(Parser *p) return _res; } -// _tmp_280: 'as' star_target +// _tmp_282: 'as' star_target static void * -_tmp_280_rule(Parser *p) +_tmp_282_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41796,7 +42172,7 @@ _tmp_280_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_280[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c> _tmp_282[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); Token * _keyword; expr_ty star_target_var; if ( @@ -41805,12 +42181,12 @@ _tmp_280_rule(Parser *p) (star_target_var = star_target_rule(p)) // star_target ) { - D(fprintf(stderr, "%*c+ _tmp_280[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c+ _tmp_282[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); _res = _PyPegen_dummy_name(p, _keyword, star_target_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_280[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_282[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' star_target")); } _res = NULL; @@ -41819,9 +42195,9 @@ _tmp_280_rule(Parser *p) return _res; } -// _tmp_281: 'as' star_target +// _tmp_283: 'as' star_target static void * -_tmp_281_rule(Parser *p) +_tmp_283_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41837,7 +42213,7 @@ _tmp_281_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_281[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c> _tmp_283[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); Token * _keyword; expr_ty star_target_var; if ( @@ -41846,12 +42222,12 @@ _tmp_281_rule(Parser *p) (star_target_var = star_target_rule(p)) // star_target ) { - D(fprintf(stderr, "%*c+ _tmp_281[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c+ _tmp_283[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); _res = _PyPegen_dummy_name(p, _keyword, star_target_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_281[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_283[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' star_target")); } _res = NULL; @@ -41860,9 +42236,9 @@ _tmp_281_rule(Parser *p) return _res; } -// _tmp_282: 'as' star_target +// _tmp_284: 'as' star_target static void * -_tmp_282_rule(Parser *p) +_tmp_284_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41878,7 +42254,7 @@ _tmp_282_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_282[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c> _tmp_284[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); Token * _keyword; expr_ty star_target_var; if ( @@ -41887,12 +42263,12 @@ _tmp_282_rule(Parser *p) (star_target_var = star_target_rule(p)) // star_target ) { - D(fprintf(stderr, "%*c+ _tmp_282[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c+ _tmp_284[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); _res = _PyPegen_dummy_name(p, _keyword, star_target_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_282[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_284[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' star_target")); } _res = NULL; @@ -41901,9 +42277,9 @@ _tmp_282_rule(Parser *p) return _res; } -// _tmp_283: starred_expression | (assignment_expression | expression !':=') !'=' +// _tmp_285: starred_expression | (assignment_expression | expression !':=') !'=' static void * -_tmp_283_rule(Parser *p) +_tmp_285_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41919,18 +42295,18 @@ _tmp_283_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_283[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression")); + D(fprintf(stderr, "%*c> _tmp_285[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression")); expr_ty starred_expression_var; if ( (starred_expression_var = starred_expression_rule(p)) // starred_expression ) { - D(fprintf(stderr, "%*c+ _tmp_283[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression")); + D(fprintf(stderr, "%*c+ _tmp_285[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression")); _res = starred_expression_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_283[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_285[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "starred_expression")); } { // (assignment_expression | expression !':=') !'=' @@ -41938,20 +42314,20 @@ _tmp_283_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_283[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='")); - void *_tmp_284_var; + D(fprintf(stderr, "%*c> _tmp_285[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='")); + void *_tmp_286_var; if ( - (_tmp_284_var = _tmp_284_rule(p)) // assignment_expression | expression !':=' + (_tmp_286_var = _tmp_286_rule(p)) // assignment_expression | expression !':=' && _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 22) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_283[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='")); - _res = _tmp_284_var; + D(fprintf(stderr, "%*c+ _tmp_285[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='")); + _res = _tmp_286_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_283[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_285[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(assignment_expression | expression !':=') !'='")); } _res = NULL; @@ -41960,9 +42336,9 @@ _tmp_283_rule(Parser *p) return _res; } -// _tmp_284: assignment_expression | expression !':=' +// _tmp_286: assignment_expression | expression !':=' static void * -_tmp_284_rule(Parser *p) +_tmp_286_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41978,18 +42354,18 @@ _tmp_284_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_284[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "assignment_expression")); + D(fprintf(stderr, "%*c> _tmp_286[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "assignment_expression")); expr_ty assignment_expression_var; if ( (assignment_expression_var = assignment_expression_rule(p)) // assignment_expression ) { - D(fprintf(stderr, "%*c+ _tmp_284[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "assignment_expression")); + D(fprintf(stderr, "%*c+ _tmp_286[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "assignment_expression")); _res = assignment_expression_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_284[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_286[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "assignment_expression")); } { // expression !':=' @@ -41997,7 +42373,7 @@ _tmp_284_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_284[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression !':='")); + D(fprintf(stderr, "%*c> _tmp_286[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression !':='")); expr_ty expression_var; if ( (expression_var = expression_rule(p)) // expression @@ -42005,12 +42381,12 @@ _tmp_284_rule(Parser *p) _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 53) // token=':=' ) { - D(fprintf(stderr, "%*c+ _tmp_284[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression !':='")); + D(fprintf(stderr, "%*c+ _tmp_286[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression !':='")); _res = expression_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_284[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_286[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression !':='")); } _res = NULL; diff --git a/Python/Python-ast.c b/Python/Python-ast.c index 7b591ddaa29869..60b46263a0d329 100644 --- a/Python/Python-ast.c +++ b/Python/Python-ast.c @@ -5119,11 +5119,22 @@ ast_type_init(PyObject *self, PyObject *args, PyObject *kw) Py_ssize_t size = PySet_Size(remaining_fields); PyObject *field_types = NULL, *remaining_list = NULL; if (size > 0) { - if (!PyObject_GetOptionalAttr((PyObject*)Py_TYPE(self), &_Py_ID(_field_types), - &field_types)) { + if (PyObject_GetOptionalAttr((PyObject*)Py_TYPE(self), &_Py_ID(_field_types), + &field_types) < 0) { res = -1; goto cleanup; } + if (field_types == NULL) { + if (PyErr_WarnFormat( + PyExc_DeprecationWarning, 1, + "%.400s provides _fields but not _field_types. " + "This will become an error in Python 3.15.", + Py_TYPE(self)->tp_name + ) < 0) { + res = -1; + } + goto cleanup; + } remaining_list = PySequence_List(remaining_fields); if (!remaining_list) { goto set_remaining_cleanup; diff --git a/Python/_warnings.c b/Python/_warnings.c index dfa82c569e1383..66a460e2a2c509 100644 --- a/Python/_warnings.c +++ b/Python/_warnings.c @@ -1,4 +1,5 @@ #include "Python.h" +#include "pycore_critical_section.h" // Py_BEGIN_CRITICAL_SECTION_MUT() #include "pycore_interp.h" // PyInterpreterState.warnings #include "pycore_long.h" // _PyLong_GetZero() #include "pycore_pyerrors.h" // _PyErr_Occurred() @@ -235,14 +236,12 @@ get_warnings_attr(PyInterpreterState *interp, PyObject *attr, int try_import) static PyObject * get_once_registry(PyInterpreterState *interp) { - PyObject *registry; - WarningsState *st = warnings_get_state(interp); - if (st == NULL) { - return NULL; - } + assert(st != NULL); + + _Py_CRITICAL_SECTION_ASSERT_MUTEX_LOCKED(&st->mutex); - registry = GET_WARNINGS_ATTR(interp, onceregistry, 0); + PyObject *registry = GET_WARNINGS_ATTR(interp, onceregistry, 0); if (registry == NULL) { if (PyErr_Occurred()) return NULL; @@ -265,14 +264,12 @@ get_once_registry(PyInterpreterState *interp) static PyObject * get_default_action(PyInterpreterState *interp) { - PyObject *default_action; - WarningsState *st = warnings_get_state(interp); - if (st == NULL) { - return NULL; - } + assert(st != NULL); - default_action = GET_WARNINGS_ATTR(interp, defaultaction, 0); + _Py_CRITICAL_SECTION_ASSERT_MUTEX_LOCKED(&st->mutex); + + PyObject *default_action = GET_WARNINGS_ATTR(interp, defaultaction, 0); if (default_action == NULL) { if (PyErr_Occurred()) { return NULL; @@ -299,15 +296,12 @@ get_filter(PyInterpreterState *interp, PyObject *category, PyObject *text, Py_ssize_t lineno, PyObject *module, PyObject **item) { - PyObject *action; - Py_ssize_t i; - PyObject *warnings_filters; WarningsState *st = warnings_get_state(interp); - if (st == NULL) { - return NULL; - } + assert(st != NULL); - warnings_filters = GET_WARNINGS_ATTR(interp, filters, 0); + _Py_CRITICAL_SECTION_ASSERT_MUTEX_LOCKED(&st->mutex); + + PyObject *warnings_filters = GET_WARNINGS_ATTR(interp, filters, 0); if (warnings_filters == NULL) { if (PyErr_Occurred()) return NULL; @@ -324,7 +318,7 @@ get_filter(PyInterpreterState *interp, PyObject *category, } /* WarningsState.filters could change while we are iterating over it. */ - for (i = 0; i < PyList_GET_SIZE(filters); i++) { + for (Py_ssize_t i = 0; i < PyList_GET_SIZE(filters); i++) { PyObject *tmp_item, *action, *msg, *cat, *mod, *ln_obj; Py_ssize_t ln; int is_subclass, good_msg, good_mod; @@ -384,7 +378,7 @@ get_filter(PyInterpreterState *interp, PyObject *category, Py_DECREF(tmp_item); } - action = get_default_action(interp); + PyObject *action = get_default_action(interp); if (action != NULL) { *item = Py_NewRef(Py_None); return action; @@ -1000,8 +994,13 @@ do_warn(PyObject *message, PyObject *category, Py_ssize_t stack_level, &filename, &lineno, &module, ®istry)) return NULL; + WarningsState *st = warnings_get_state(tstate->interp); + assert(st != NULL); + + Py_BEGIN_CRITICAL_SECTION_MUT(&st->mutex); res = warn_explicit(tstate, category, message, filename, lineno, module, registry, NULL, source); + Py_END_CRITICAL_SECTION(); Py_DECREF(filename); Py_DECREF(registry); Py_DECREF(module); @@ -1149,8 +1148,14 @@ warnings_warn_explicit_impl(PyObject *module, PyObject *message, return NULL; } } + + WarningsState *st = warnings_get_state(tstate->interp); + assert(st != NULL); + + Py_BEGIN_CRITICAL_SECTION_MUT(&st->mutex); returned = warn_explicit(tstate, category, message, filename, lineno, mod, registry, source_line, sourceobj); + Py_END_CRITICAL_SECTION(); Py_XDECREF(source_line); return returned; } @@ -1290,8 +1295,14 @@ PyErr_WarnExplicitObject(PyObject *category, PyObject *message, if (tstate == NULL) { return -1; } + + WarningsState *st = warnings_get_state(tstate->interp); + assert(st != NULL); + + Py_BEGIN_CRITICAL_SECTION_MUT(&st->mutex); res = warn_explicit(tstate, category, message, filename, lineno, module, registry, NULL, NULL); + Py_END_CRITICAL_SECTION(); if (res == NULL) return -1; Py_DECREF(res); @@ -1356,8 +1367,13 @@ PyErr_WarnExplicitFormat(PyObject *category, PyObject *res; PyThreadState *tstate = get_current_tstate(); if (tstate != NULL) { + WarningsState *st = warnings_get_state(tstate->interp); + assert(st != NULL); + + Py_BEGIN_CRITICAL_SECTION_MUT(&st->mutex); res = warn_explicit(tstate, category, message, filename, lineno, module, registry, NULL, NULL); + Py_END_CRITICAL_SECTION(); Py_DECREF(message); if (res != NULL) { Py_DECREF(res); diff --git a/Python/assemble.c b/Python/assemble.c index 569454ebf3b9cb..09db2fab48d95c 100644 --- a/Python/assemble.c +++ b/Python/assemble.c @@ -736,6 +736,9 @@ _PyAssemble_MakeCodeObject(_PyCompile_CodeUnitMetadata *umd, PyObject *const_cac int nlocalsplus, int code_flags, PyObject *filename) { + if (_PyCompile_InstructionSequence_ApplyLabelMap(instrs) < 0) { + return NULL; + } if (resolve_unconditional_jumps(instrs) < 0) { return NULL; } diff --git a/Python/brc.c b/Python/brc.c index b73c721e71aef6..8f87bc33007bcf 100644 --- a/Python/brc.c +++ b/Python/brc.c @@ -119,6 +119,8 @@ _Py_brc_merge_refcounts(PyThreadState *tstate) struct _brc_thread_state *brc = &((_PyThreadStateImpl *)tstate)->brc; struct _brc_bucket *bucket = get_bucket(tstate->interp, brc->tid); + assert(brc->tid == _Py_ThreadId()); + // Append all objects into a local stack. We don't want to hold the lock // while calling destructors. PyMutex_Lock(&bucket->mutex); @@ -142,11 +144,12 @@ void _Py_brc_init_thread(PyThreadState *tstate) { struct _brc_thread_state *brc = &((_PyThreadStateImpl *)tstate)->brc; - brc->tid = _Py_ThreadId(); + uintptr_t tid = _Py_ThreadId(); // Add ourself to the hashtable - struct _brc_bucket *bucket = get_bucket(tstate->interp, brc->tid); + struct _brc_bucket *bucket = get_bucket(tstate->interp, tid); PyMutex_Lock(&bucket->mutex); + brc->tid = tid; llist_insert_tail(&bucket->root, &brc->bucket_node); PyMutex_Unlock(&bucket->mutex); } @@ -155,6 +158,13 @@ void _Py_brc_remove_thread(PyThreadState *tstate) { struct _brc_thread_state *brc = &((_PyThreadStateImpl *)tstate)->brc; + if (brc->tid == 0) { + // The thread state may have been created, but never bound to a native + // thread and therefore never added to the hashtable. + assert(tstate->_status.bound == 0); + return; + } + struct _brc_bucket *bucket = get_bucket(tstate->interp, brc->tid); // We need to fully process any objects to merge before removing ourself diff --git a/Python/bytecodes.c b/Python/bytecodes.c index 476975d2fbc3c2..5cd9db97c71e37 100644 --- a/Python/bytecodes.c +++ b/Python/bytecodes.c @@ -179,7 +179,7 @@ dummy_func( uintptr_t code_version = _PyFrame_GetCode(frame)->_co_instrumentation_version; if (code_version != global_version) { if (_Py_Instrument(_PyFrame_GetCode(frame), tstate->interp)) { - GOTO_ERROR(error); + ERROR_NO_POP(); } next_instr = this_instr; } @@ -206,7 +206,13 @@ dummy_func( inst(LOAD_FAST_CHECK, (-- value)) { value = GETLOCAL(oparg); - ERROR_IF(value == NULL, unbound_local_error); + if (value == NULL) { + _PyEval_FormatExcCheckArg(tstate, PyExc_UnboundLocalError, + UNBOUNDLOCAL_ERROR_MSG, + PyTuple_GetItem(_PyFrame_GetCode(frame)->co_localsplusnames, oparg) + ); + ERROR_IF(1, error); + } Py_INCREF(value); } @@ -275,7 +281,7 @@ dummy_func( if (PyGen_Check(receiver)) { PyErr_SetObject(PyExc_StopIteration, value); if (monitor_stop_iteration(tstate, frame, this_instr)) { - GOTO_ERROR(error); + ERROR_NO_POP(); } PyErr_SetRaisedException(NULL); } @@ -290,7 +296,7 @@ dummy_func( if (PyGen_Check(receiver) || PyCoro_CheckExact(receiver)) { PyErr_SetObject(PyExc_StopIteration, value); if (monitor_stop_iteration(tstate, frame, this_instr)) { - GOTO_ERROR(error); + ERROR_NO_POP(); } PyErr_SetRaisedException(NULL); } @@ -826,7 +832,7 @@ dummy_func( int err = _Py_call_instrumentation_arg( tstate, PY_MONITORING_EVENT_PY_RETURN, frame, this_instr, retval); - if (err) GOTO_ERROR(error); + if (err) ERROR_NO_POP(); STACK_SHRINK(1); assert(EMPTY()); _PyFrame_SetStackPointer(frame, stack_pointer); @@ -850,7 +856,7 @@ dummy_func( int err = _Py_call_instrumentation_arg( tstate, PY_MONITORING_EVENT_PY_RETURN, frame, this_instr, retval); - if (err) GOTO_ERROR(error); + if (err) ERROR_NO_POP(); Py_INCREF(retval); assert(EMPTY()); _PyFrame_SetStackPointer(frame, stack_pointer); @@ -906,7 +912,7 @@ dummy_func( if (PyAsyncGen_CheckExact(aiter)) { awaitable = type->tp_as_async->am_anext(aiter); if (awaitable == NULL) { - GOTO_ERROR(error); + ERROR_NO_POP(); } } else { if (type->tp_as_async != NULL){ @@ -916,7 +922,7 @@ dummy_func( if (getter != NULL) { next_iter = (*getter)(aiter); if (next_iter == NULL) { - GOTO_ERROR(error); + ERROR_NO_POP(); } } else { @@ -924,7 +930,7 @@ dummy_func( "'async for' requires an iterator with " "__anext__ method, got %.100s", type->tp_name); - GOTO_ERROR(error); + ERROR_NO_POP(); } awaitable = _PyCoro_GetAwaitableIter(next_iter); @@ -936,7 +942,7 @@ dummy_func( Py_TYPE(next_iter)->tp_name); Py_DECREF(next_iter); - GOTO_ERROR(error); + ERROR_NO_POP(); } else { Py_DECREF(next_iter); } @@ -1018,7 +1024,7 @@ dummy_func( JUMPBY(oparg); } else { - GOTO_ERROR(error); + ERROR_NO_POP(); } } Py_DECREF(v); @@ -1054,7 +1060,7 @@ dummy_func( int err = _Py_call_instrumentation_arg( tstate, PY_MONITORING_EVENT_PY_YIELD, frame, this_instr, retval); - if (err) GOTO_ERROR(error); + if (err) ERROR_NO_POP(); tstate->exc_info = gen->gi_exc_state.previous_item; gen->gi_exc_state.previous_item = NULL; _Py_LeaveRecursiveCallPy(tstate); @@ -1108,7 +1114,7 @@ dummy_func( else { assert(PyLong_Check(lasti)); _PyErr_SetString(tstate, PyExc_SystemError, "lasti is not an int"); - GOTO_ERROR(error); + ERROR_NO_POP(); } } assert(exc && PyExceptionInstance_Check(exc)); @@ -1184,7 +1190,7 @@ dummy_func( if (ns == NULL) { _PyErr_Format(tstate, PyExc_SystemError, "no locals when deleting %R", name); - GOTO_ERROR(error); + ERROR_NO_POP(); } err = PyObject_DelItem(ns, name); // Can't use ERROR_IF here. @@ -1192,7 +1198,7 @@ dummy_func( _PyEval_FormatExcCheckArg(tstate, PyExc_NameError, NAME_ERROR_MSG, name); - GOTO_ERROR(error); + ERROR_NO_POP(); } } @@ -1312,12 +1318,12 @@ dummy_func( int err = PyDict_Pop(GLOBALS(), name, NULL); // Can't use ERROR_IF here. if (err < 0) { - GOTO_ERROR(error); + ERROR_NO_POP(); } if (err == 0) { _PyEval_FormatExcCheckArg(tstate, PyExc_NameError, NAME_ERROR_MSG, name); - GOTO_ERROR(error); + ERROR_NO_POP(); } } @@ -1334,21 +1340,21 @@ dummy_func( inst(LOAD_FROM_DICT_OR_GLOBALS, (mod_or_class_dict -- v)) { PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); if (PyMapping_GetOptionalItem(mod_or_class_dict, name, &v) < 0) { - GOTO_ERROR(error); + ERROR_NO_POP(); } if (v == NULL) { if (PyDict_GetItemRef(GLOBALS(), name, &v) < 0) { - GOTO_ERROR(error); + ERROR_NO_POP(); } if (v == NULL) { if (PyMapping_GetOptionalItem(BUILTINS(), name, &v) < 0) { - GOTO_ERROR(error); + ERROR_NO_POP(); } if (v == NULL) { _PyEval_FormatExcCheckArg( tstate, PyExc_NameError, NAME_ERROR_MSG, name); - GOTO_ERROR(error); + ERROR_NO_POP(); } } } @@ -1364,21 +1370,21 @@ dummy_func( } PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); if (PyMapping_GetOptionalItem(mod_or_class_dict, name, &v) < 0) { - GOTO_ERROR(error); + ERROR_NO_POP(); } if (v == NULL) { if (PyDict_GetItemRef(GLOBALS(), name, &v) < 0) { - GOTO_ERROR(error); + ERROR_NO_POP(); } if (v == NULL) { if (PyMapping_GetOptionalItem(BUILTINS(), name, &v) < 0) { - GOTO_ERROR(error); + ERROR_NO_POP(); } if (v == NULL) { _PyEval_FormatExcCheckArg( tstate, PyExc_NameError, NAME_ERROR_MSG, name); - GOTO_ERROR(error); + ERROR_NO_POP(); } } } @@ -1494,7 +1500,13 @@ dummy_func( inst(DELETE_FAST, (--)) { PyObject *v = GETLOCAL(oparg); - ERROR_IF(v == NULL, unbound_local_error); + if (v == NULL) { + _PyEval_FormatExcCheckArg(tstate, PyExc_UnboundLocalError, + UNBOUNDLOCAL_ERROR_MSG, + PyTuple_GetItem(_PyFrame_GetCode(frame)->co_localsplusnames, oparg) + ); + ERROR_IF(1, error); + } SETLOCAL(oparg, NULL); } @@ -1504,7 +1516,7 @@ dummy_func( PyObject *initial = GETLOCAL(oparg); PyObject *cell = PyCell_New(initial); if (cell == NULL) { - GOTO_ERROR(error); + ERROR_NO_POP(); } SETLOCAL(oparg, cell); } @@ -1516,7 +1528,7 @@ dummy_func( // Fortunately we don't need its superpower. if (oldobj == NULL) { _PyEval_FormatExcUnbound(tstate, _PyFrame_GetCode(frame), oparg); - GOTO_ERROR(error); + ERROR_NO_POP(); } PyCell_SET(cell, NULL); Py_DECREF(oldobj); @@ -1528,14 +1540,14 @@ dummy_func( assert(oparg >= 0 && oparg < _PyFrame_GetCode(frame)->co_nlocalsplus); name = PyTuple_GET_ITEM(_PyFrame_GetCode(frame)->co_localsplusnames, oparg); if (PyMapping_GetOptionalItem(class_dict, name, &value) < 0) { - GOTO_ERROR(error); + ERROR_NO_POP(); } if (!value) { PyObject *cell = GETLOCAL(oparg); value = PyCell_GET(cell); if (value == NULL) { _PyEval_FormatExcUnbound(tstate, _PyFrame_GetCode(frame), oparg); - GOTO_ERROR(error); + ERROR_NO_POP(); } Py_INCREF(value); } @@ -1615,7 +1627,7 @@ dummy_func( inst(BUILD_SET, (values[oparg] -- set)) { set = PySet_New(NULL); if (set == NULL) - GOTO_ERROR(error); + ERROR_NO_POP(); int err = 0; for (int i = 0; i < oparg; i++) { PyObject *item = values[i]; @@ -1662,12 +1674,8 @@ dummy_func( } inst(BUILD_CONST_KEY_MAP, (values[oparg], keys -- map)) { - if (!PyTuple_CheckExact(keys) || - PyTuple_GET_SIZE(keys) != (Py_ssize_t)oparg) { - _PyErr_SetString(tstate, PyExc_SystemError, - "bad BUILD_CONST_KEY_MAP keys argument"); - GOTO_ERROR(error); // Pop the keys and values. - } + assert(PyTuple_CheckExact(keys)); + assert(PyTuple_GET_SIZE(keys) == (Py_ssize_t)oparg); map = _PyDict_FromItems( &PyTuple_GET_ITEM(keys, 0), 1, values, 1, oparg); @@ -2502,7 +2510,7 @@ dummy_func( _PyErr_SetString(tstate, PyExc_TypeError, "cannot 'yield from' a coroutine object " "in a non-coroutine generator"); - GOTO_ERROR(error); + ERROR_NO_POP(); } iter = iterable; } @@ -2513,7 +2521,7 @@ dummy_func( /* `iterable` is not a generator. */ iter = PyObject_GetIter(iterable); if (iter == NULL) { - GOTO_ERROR(error); + ERROR_NO_POP(); } DECREF_INPUTS(); } @@ -2550,7 +2558,7 @@ dummy_func( if (next == NULL) { if (_PyErr_Occurred(tstate)) { if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) { - GOTO_ERROR(error); + ERROR_NO_POP(); } monitor_raise(tstate, frame, this_instr); _PyErr_Clear(tstate); @@ -2573,7 +2581,7 @@ dummy_func( if (next == NULL) { if (_PyErr_Occurred(tstate)) { if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) { - GOTO_ERROR(error); + ERROR_NO_POP(); } _PyErr_Clear(tstate); } @@ -2599,7 +2607,7 @@ dummy_func( else { if (_PyErr_Occurred(tstate)) { if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) { - GOTO_ERROR(error); + ERROR_NO_POP(); } monitor_raise(tstate, frame, this_instr); _PyErr_Clear(tstate); @@ -2779,7 +2787,7 @@ dummy_func( "asynchronous context manager protocol", Py_TYPE(mgr)->tp_name); } - GOTO_ERROR(error); + ERROR_NO_POP(); } exit = _PyObject_LookupSpecial(mgr, &_Py_ID(__aexit__)); if (exit == NULL) { @@ -2791,7 +2799,7 @@ dummy_func( Py_TYPE(mgr)->tp_name); } Py_DECREF(enter); - GOTO_ERROR(error); + ERROR_NO_POP(); } DECREF_INPUTS(); res = PyObject_CallNoArgs(enter); @@ -2814,7 +2822,7 @@ dummy_func( "context manager protocol", Py_TYPE(mgr)->tp_name); } - GOTO_ERROR(error); + ERROR_NO_POP(); } exit = _PyObject_LookupSpecial(mgr, &_Py_ID(__exit__)); if (exit == NULL) { @@ -2826,7 +2834,7 @@ dummy_func( Py_TYPE(mgr)->tp_name); } Py_DECREF(enter); - GOTO_ERROR(error); + ERROR_NO_POP(); } DECREF_INPUTS(); res = PyObject_CallNoArgs(enter); @@ -3075,7 +3083,7 @@ dummy_func( // The frame has stolen all the arguments from the stack, // so there is no need to clean them up. if (new_frame == NULL) { - GOTO_ERROR(error); + ERROR_NO_POP(); } frame->return_offset = (uint16_t)(next_instr - this_instr); DISPATCH_INLINED(new_frame); @@ -3298,7 +3306,7 @@ dummy_func( STAT_INC(CALL, hit); PyObject *self = _PyType_NewManagedObject(tp); if (self == NULL) { - GOTO_ERROR(error); + ERROR_NO_POP(); } Py_DECREF(tp); _PyInterpreterFrame *shim = _PyFrame_PushTrampolineUnchecked( @@ -3335,7 +3343,7 @@ dummy_func( PyErr_Format(PyExc_TypeError, "__init__() should return None, not '%.200s'", Py_TYPE(should_be_none)->tp_name); - GOTO_ERROR(error); + ERROR_NO_POP(); } } @@ -3472,7 +3480,7 @@ dummy_func( PyObject *arg = args[0]; Py_ssize_t len_i = PyObject_Length(arg); if (len_i < 0) { - GOTO_ERROR(error); + ERROR_NO_POP(); } res = PyLong_FromSsize_t(len_i); assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); @@ -3498,7 +3506,7 @@ dummy_func( PyObject *inst = args[0]; int retval = PyObject_IsInstance(inst, cls); if (retval < 0) { - GOTO_ERROR(error); + ERROR_NO_POP(); } res = PyBool_FromLong(retval); assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); @@ -3712,7 +3720,7 @@ dummy_func( // The frame has stolen all the arguments from the stack, // so there is no need to clean them up. if (new_frame == NULL) { - GOTO_ERROR(error); + ERROR_NO_POP(); } assert(next_instr - this_instr == 1); frame->return_offset = 1; @@ -3760,11 +3768,11 @@ dummy_func( assert(kwargs == NULL || PyDict_CheckExact(kwargs)); if (!PyTuple_CheckExact(callargs)) { if (check_args_iterable(tstate, func, callargs) < 0) { - GOTO_ERROR(error); + ERROR_NO_POP(); } PyObject *tuple = PySequence_Tuple(callargs); if (tuple == NULL) { - GOTO_ERROR(error); + ERROR_NO_POP(); } Py_SETREF(callargs, tuple); } @@ -3776,7 +3784,7 @@ dummy_func( int err = _Py_call_instrumentation_2args( tstate, PY_MONITORING_EVENT_CALL, frame, this_instr, func, arg); - if (err) GOTO_ERROR(error); + if (err) ERROR_NO_POP(); result = PyObject_Call(func, callargs, kwargs); if (!PyFunction_Check(func) && !PyMethod_Check(func)) { @@ -3810,7 +3818,7 @@ dummy_func( // Need to manually shrink the stack since we exit with DISPATCH_INLINED. STACK_SHRINK(oparg + 3); if (new_frame == NULL) { - GOTO_ERROR(error); + ERROR_NO_POP(); } assert(next_instr - this_instr == 1); frame->return_offset = 1; @@ -3831,7 +3839,7 @@ dummy_func( Py_DECREF(codeobj); if (func_obj == NULL) { - GOTO_ERROR(error); + ERROR_NO_POP(); } _PyFunction_SetVersion( @@ -3871,7 +3879,7 @@ dummy_func( PyFunctionObject *func = (PyFunctionObject *)frame->f_funcobj; PyGenObject *gen = (PyGenObject *)_Py_MakeCoro(func); if (gen == NULL) { - GOTO_ERROR(error); + ERROR_NO_POP(); } assert(EMPTY()); _PyFrame_SetStackPointer(frame, stack_pointer); @@ -4169,7 +4177,7 @@ dummy_func( if (optimized < 0) { Py_DECREF(previous); tstate->previous_executor = Py_None; - ERROR_IF(1, error); + GOTO_UNWIND(); } GOTO_TIER_ONE(target); } @@ -4199,6 +4207,19 @@ dummy_func( frame->instr_ptr = (_Py_CODEUNIT *)instr_ptr; } + tier2 op(_DEOPT, (--)) { + EXIT_TO_TIER1(); + } + + tier2 op(_SIDE_EXIT, (--)) { + EXIT_TO_TRACE(); + } + + tier2 op(_ERROR_POP_N, (unused[oparg] --)) { + SYNC_SP(); + GOTO_UNWIND(); + } + // END BYTECODES // } diff --git a/Python/ceval.c b/Python/ceval.c index 9dbcd3da3fec27..cd51011450c3d5 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -642,7 +642,6 @@ int _Py_CheckRecursiveCallPy( return 0; } - static const _Py_CODEUNIT _Py_INTERPRETER_TRAMPOLINE_INSTRUCTIONS[] = { /* Put a NOP at the start, so that the IP points into * the code, rather than before it */ @@ -850,15 +849,6 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int or goto error. */ Py_UNREACHABLE(); -unbound_local_error: - { - _PyEval_FormatExcCheckArg(tstate, PyExc_UnboundLocalError, - UNBOUNDLOCAL_ERROR_MSG, - PyTuple_GetItem(_PyFrame_GetCode(frame)->co_localsplusnames, oparg) - ); - goto error; - } - pop_4_error: STACK_SHRINK(1); pop_3_error: @@ -980,12 +970,6 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int #undef GOTO_ERROR #define GOTO_ERROR(LABEL) goto LABEL ## _tier_two -#undef DEOPT_IF -#define DEOPT_IF(COND, INSTNAME) \ - if ((COND)) { \ - goto deoptimize;\ - } - #ifdef Py_STATS // Disable these macros that apply to Tier 1 stats when we are in Tier 2 #undef STAT_INC @@ -1013,6 +997,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int #endif assert(next_uop->opcode == _START_EXECUTOR || next_uop->opcode == _COLD_EXIT); +tier2_dispatch: for (;;) { uopcode = next_uop->opcode; #ifdef Py_DEBUG @@ -1054,24 +1039,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int } } -// Jump here from ERROR_IF(..., unbound_local_error) -unbound_local_error_tier_two: - _PyEval_FormatExcCheckArg(tstate, PyExc_UnboundLocalError, - UNBOUNDLOCAL_ERROR_MSG, - PyTuple_GetItem(_PyFrame_GetCode(frame)->co_localsplusnames, oparg) - ); - goto error_tier_two; - -// JUMP to any of these from ERROR_IF(..., error) -pop_4_error_tier_two: - STACK_SHRINK(1); -pop_3_error_tier_two: - STACK_SHRINK(1); -pop_2_error_tier_two: - STACK_SHRINK(1); -pop_1_error_tier_two: - STACK_SHRINK(1); -error_tier_two: +jump_to_error_target: #ifdef Py_DEBUG if (lltrace >= 2) { printf("Error: [UOp "); @@ -1081,15 +1049,28 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int _PyOpcode_OpName[frame->instr_ptr->op.code]); } #endif + assert (next_uop[-1].format == UOP_FORMAT_JUMP); + uint16_t target = uop_get_error_target(&next_uop[-1]); + next_uop = current_executor->trace + target; + goto tier2_dispatch; + +error_tier_two: OPT_HIST(trace_uop_execution_counter, trace_run_length_hist); + assert(next_uop[-1].format == UOP_FORMAT_TARGET); frame->return_offset = 0; // Don't leave this random _PyFrame_SetStackPointer(frame, stack_pointer); Py_DECREF(current_executor); tstate->previous_executor = NULL; goto resume_with_error; -// Jump here from DEOPT_IF() -deoptimize: +jump_to_jump_target: + assert(next_uop[-1].format == UOP_FORMAT_JUMP); + target = uop_get_jump_target(&next_uop[-1]); + next_uop = current_executor->trace + target; + goto tier2_dispatch; + +exit_to_tier1: + assert(next_uop[-1].format == UOP_FORMAT_TARGET); next_instr = next_uop[-1].target + _PyCode_CODE(_PyFrame_GetCode(frame)); #ifdef Py_DEBUG if (lltrace >= 2) { @@ -1105,8 +1086,8 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int tstate->previous_executor = NULL; DISPATCH(); -// Jump here from EXIT_IF() -side_exit: +exit_to_trace: + assert(next_uop[-1].format == UOP_FORMAT_EXIT); OPT_HIST(trace_uop_execution_counter, trace_run_length_hist); UOP_STAT_INC(uopcode, miss); uint32_t exit_index = next_uop[-1].exit_index; @@ -2903,7 +2884,7 @@ _PyEval_FormatExcUnbound(PyThreadState *tstate, PyCodeObject *co, int oparg) if (_PyErr_Occurred(tstate)) return; name = PyTuple_GET_ITEM(co->co_localsplusnames, oparg); - if (oparg < PyCode_GetFirstFree(co)) { + if (oparg < PyUnstable_Code_GetFirstFree(co)) { _PyEval_FormatExcCheckArg(tstate, PyExc_UnboundLocalError, UNBOUNDLOCAL_ERROR_MSG, name); } else { diff --git a/Python/ceval_gil.c b/Python/ceval_gil.c index d2cd35dfa86833..d88ac65c5cf300 100644 --- a/Python/ceval_gil.c +++ b/Python/ceval_gil.c @@ -579,9 +579,8 @@ PyEval_ReleaseThread(PyThreadState *tstate) } #ifdef HAVE_FORK -/* This function is called from PyOS_AfterFork_Child to destroy all threads - which are not running in the child process, and clear internal locks - which might be held by those threads. */ +/* This function is called from PyOS_AfterFork_Child to re-initialize the + GIL and pending calls lock. */ PyStatus _PyEval_ReInitThreads(PyThreadState *tstate) { @@ -598,8 +597,6 @@ _PyEval_ReInitThreads(PyThreadState *tstate) struct _pending_calls *pending = &tstate->interp->ceval.pending; _PyMutex_at_fork_reinit(&pending->mutex); - /* Destroy all threads except the current one */ - _PyThreadState_DeleteExcept(tstate); return _PyStatus_OK(); } #endif @@ -671,7 +668,7 @@ _push_pending_call(struct _pending_calls *pending, pending->calls[i].flags = flags; pending->last = j; assert(pending->calls_to_do < NPENDINGCALLS); - pending->calls_to_do++; + _Py_atomic_add_int32(&pending->calls_to_do, 1); return 0; } @@ -701,7 +698,7 @@ _pop_pending_call(struct _pending_calls *pending, pending->calls[i] = (struct _pending_call){0}; pending->first = (i + 1) % NPENDINGCALLS; assert(pending->calls_to_do > 0); - pending->calls_to_do--; + _Py_atomic_add_int32(&pending->calls_to_do, -1); } } diff --git a/Python/ceval_macros.h b/Python/ceval_macros.h index 22992aa09e1f38..f2536ed3602c69 100644 --- a/Python/ceval_macros.h +++ b/Python/ceval_macros.h @@ -423,3 +423,9 @@ do { \ #define CURRENT_OPARG() (next_uop[-1].oparg) #define CURRENT_OPERAND() (next_uop[-1].operand) + +#define JUMP_TO_JUMP_TARGET() goto jump_to_jump_target +#define JUMP_TO_ERROR() goto jump_to_error_target +#define GOTO_UNWIND() goto error_tier_two +#define EXIT_TO_TRACE() goto exit_to_trace +#define EXIT_TO_TIER1() goto exit_to_tier1 diff --git a/Python/compile.c b/Python/compile.c index 6b17f3bcaf2264..43b3cbd4e1894c 100644 --- a/Python/compile.c +++ b/Python/compile.c @@ -235,6 +235,28 @@ _PyCompile_InstructionSequence_UseLabel(instr_sequence *seq, int lbl) return SUCCESS; } +int +_PyCompile_InstructionSequence_ApplyLabelMap(instr_sequence *instrs) +{ + /* Replace labels by offsets in the code */ + for (int i=0; i < instrs->s_used; i++) { + instruction *instr = &instrs->s_instrs[i]; + if (HAS_TARGET(instr->i_opcode)) { + assert(instr->i_oparg < instrs->s_labelmap_size); + instr->i_oparg = instrs->s_labelmap[instr->i_oparg]; + } + _PyCompile_ExceptHandlerInfo *hi = &instr->i_except_handler_info; + if (hi->h_label >= 0) { + assert(hi->h_label < instrs->s_labelmap_size); + hi->h_label = instrs->s_labelmap[hi->h_label]; + } + } + /* Clear label map so it's never used again */ + PyMem_Free(instrs->s_labelmap); + instrs->s_labelmap = NULL; + instrs->s_labelmap_size = 0; + return SUCCESS; +} #define MAX_OPCODE 511 @@ -358,7 +380,8 @@ struct compiler_unit { int u_scope_type; - PyObject *u_private; /* for private name mangling */ + PyObject *u_private; /* for private name mangling */ + PyObject *u_static_attributes; /* for class: attributes accessed via self.X */ instr_sequence u_instr_sequence; /* codegen output */ @@ -690,9 +713,26 @@ compiler_unit_free(struct compiler_unit *u) Py_CLEAR(u->u_metadata.u_cellvars); Py_CLEAR(u->u_metadata.u_fasthidden); Py_CLEAR(u->u_private); + Py_CLEAR(u->u_static_attributes); PyMem_Free(u); } +static struct compiler_unit * +get_class_compiler_unit(struct compiler *c) +{ + Py_ssize_t stack_size = PyList_GET_SIZE(c->c_stack); + for (Py_ssize_t i = stack_size - 1; i >= 0; i--) { + PyObject *capsule = PyList_GET_ITEM(c->c_stack, i); + struct compiler_unit *u = (struct compiler_unit *)PyCapsule_GetPointer( + capsule, CAPSULE_NAME); + assert(u); + if (u->u_scope_type == COMPILER_SCOPE_CLASS) { + return u; + } + } + return NULL; +} + static int compiler_set_qualname(struct compiler *c) { @@ -1336,6 +1376,16 @@ compiler_enter_scope(struct compiler *c, identifier name, } u->u_private = NULL; + if (scope_type == COMPILER_SCOPE_CLASS) { + u->u_static_attributes = PySet_New(0); + if (!u->u_static_attributes) { + compiler_unit_free(u); + return ERROR; + } + } + else { + u->u_static_attributes = NULL; + } /* Push the old compiler_unit on the stack. */ if (c->u) { @@ -1830,7 +1880,7 @@ compiler_make_closure(struct compiler *c, location loc, PyCodeObject *co, Py_ssize_t flags) { if (co->co_nfreevars) { - int i = PyCode_GetFirstFree(co); + int i = PyUnstable_Code_GetFirstFree(co); for (; i < co->co_nlocalsplus; ++i) { /* Bypass com_addop_varname because it will generate LOAD_DEREF but LOAD_CLOSURE is needed. @@ -2517,6 +2567,18 @@ compiler_class_body(struct compiler *c, stmt_ty s, int firstlineno) compiler_exit_scope(c); return ERROR; } + assert(c->u->u_static_attributes); + PyObject *static_attributes = PySequence_Tuple(c->u->u_static_attributes); + if (static_attributes == NULL) { + compiler_exit_scope(c); + return ERROR; + } + ADDOP_LOAD_CONST(c, NO_LOCATION, static_attributes); + Py_CLEAR(static_attributes); + if (compiler_nameop(c, NO_LOCATION, &_Py_ID(__static_attributes__), Store) < 0) { + compiler_exit_scope(c); + return ERROR; + } /* The following code is artificial */ /* Set __classdictcell__ if necessary */ if (c->u->u_ste->ste_needs_classdict) { @@ -2657,6 +2719,7 @@ compiler_class(struct compiler *c, stmt_ty s) s->v.ClassDef.keywords)); PyCodeObject *co = optimize_and_assemble(c, 0); + compiler_exit_scope(c); if (co == NULL) { return ERROR; @@ -6246,6 +6309,17 @@ compiler_visit_expr1(struct compiler *c, expr_ty e) ADDOP(c, loc, NOP); return SUCCESS; } + if (e->v.Attribute.value->kind == Name_kind && + _PyUnicode_EqualToASCIIString(e->v.Attribute.value->v.Name.id, "self")) + { + struct compiler_unit *class_u = get_class_compiler_unit(c); + if (class_u != NULL) { + assert(class_u->u_scope_type == COMPILER_SCOPE_CLASS); + assert(class_u->u_static_attributes); + RETURN_IF_ERROR( + PySet_Add(class_u->u_static_attributes, e->v.Attribute.attr)); + } + } VISIT(c, expr, e->v.Attribute.value); loc = LOC(e); loc = update_start_location_to_match_attr(c, loc, e); @@ -7772,11 +7846,8 @@ instr_sequence_to_instructions(instr_sequence *seq) for (int i = 0; i < seq->s_used; i++) { instruction *instr = &seq->s_instrs[i]; location loc = instr->i_loc; - int arg = HAS_TARGET(instr->i_opcode) ? - seq->s_labelmap[instr->i_oparg] : instr->i_oparg; - PyObject *inst_tuple = Py_BuildValue( - "(iiiiii)", instr->i_opcode, arg, + "(iiiiii)", instr->i_opcode, instr->i_oparg, loc.lineno, loc.end_lineno, loc.col_offset, loc.end_col_offset); if (inst_tuple == NULL) { @@ -7803,6 +7874,9 @@ cfg_to_instructions(cfg_builder *g) if (_PyCfg_ToInstructionSequence(g, &seq) < 0) { return NULL; } + if (_PyCompile_InstructionSequence_ApplyLabelMap(&seq) < 0) { + return NULL; + } PyObject *res = instr_sequence_to_instructions(&seq); instr_sequence_fini(&seq); return res; @@ -7974,6 +8048,10 @@ _PyCompile_CodeGen(PyObject *ast, PyObject *filename, PyCompilerFlags *pflags, goto finally; } + if (_PyCompile_InstructionSequence_ApplyLabelMap(INSTR_SEQUENCE(c)) < 0) { + return NULL; + } + PyObject *insts = instr_sequence_to_instructions(INSTR_SEQUENCE(c)); if (insts == NULL) { goto finally; diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h index a55daa2c344944..224b600b8f6a4a 100644 --- a/Python/executor_cases.c.h +++ b/Python/executor_cases.c.h @@ -14,23 +14,29 @@ case _RESUME_CHECK: { #if defined(__EMSCRIPTEN__) - if (_Py_emscripten_signal_clock == 0) goto deoptimize; + if (_Py_emscripten_signal_clock == 0) JUMP_TO_JUMP_TARGET(); _Py_emscripten_signal_clock -= Py_EMSCRIPTEN_SIGNAL_HANDLING; #endif uintptr_t eval_breaker = _Py_atomic_load_uintptr_relaxed(&tstate->eval_breaker); uintptr_t version = _PyFrame_GetCode(frame)->_co_instrumentation_version; assert((version & _PY_EVAL_EVENTS_MASK) == 0); - if (eval_breaker != version) goto deoptimize; + if (eval_breaker != version) JUMP_TO_JUMP_TARGET(); break; } - /* _INSTRUMENTED_RESUME is not a viable micro-op for tier 2 */ + /* _INSTRUMENTED_RESUME is not a viable micro-op for tier 2 because it is instrumented */ case _LOAD_FAST_CHECK: { PyObject *value; oparg = CURRENT_OPARG(); value = GETLOCAL(oparg); - if (value == NULL) goto unbound_local_error_tier_two; + if (value == NULL) { + _PyEval_FormatExcCheckArg(tstate, PyExc_UnboundLocalError, + UNBOUNDLOCAL_ERROR_MSG, + PyTuple_GetItem(_PyFrame_GetCode(frame)->co_localsplusnames, oparg) + ); + if (1) JUMP_TO_ERROR(); + } Py_INCREF(value); stack_pointer[0] = value; stack_pointer += 1; @@ -287,7 +293,7 @@ value = stack_pointer[-1]; res = PyNumber_Negative(value); Py_DECREF(value); - if (res == NULL) goto pop_1_error_tier_two; + if (res == NULL) JUMP_TO_ERROR(); stack_pointer[-1] = res; break; } @@ -308,7 +314,7 @@ value = stack_pointer[-1]; int err = PyObject_IsTrue(value); Py_DECREF(value); - if (err < 0) goto pop_1_error_tier_two; + if (err < 0) JUMP_TO_ERROR(); res = err ? Py_True : Py_False; stack_pointer[-1] = res; break; @@ -317,7 +323,7 @@ case _TO_BOOL_BOOL: { PyObject *value; value = stack_pointer[-1]; - if (!PyBool_Check(value)) goto side_exit; + if (!PyBool_Check(value)) JUMP_TO_JUMP_TARGET(); STAT_INC(TO_BOOL, hit); break; } @@ -326,7 +332,7 @@ PyObject *value; PyObject *res; value = stack_pointer[-1]; - if (!PyLong_CheckExact(value)) goto side_exit; + if (!PyLong_CheckExact(value)) JUMP_TO_JUMP_TARGET(); STAT_INC(TO_BOOL, hit); if (_PyLong_IsZero((PyLongObject *)value)) { assert(_Py_IsImmortal(value)); @@ -344,7 +350,7 @@ PyObject *value; PyObject *res; value = stack_pointer[-1]; - if (!PyList_CheckExact(value)) goto side_exit; + if (!PyList_CheckExact(value)) JUMP_TO_JUMP_TARGET(); STAT_INC(TO_BOOL, hit); res = Py_SIZE(value) ? Py_True : Py_False; Py_DECREF(value); @@ -357,7 +363,7 @@ PyObject *res; value = stack_pointer[-1]; // This one is a bit weird, because we expect *some* failures: - if (!Py_IsNone(value)) goto side_exit; + if (!Py_IsNone(value)) JUMP_TO_JUMP_TARGET(); STAT_INC(TO_BOOL, hit); res = Py_False; stack_pointer[-1] = res; @@ -368,7 +374,7 @@ PyObject *value; PyObject *res; value = stack_pointer[-1]; - if (!PyUnicode_CheckExact(value)) goto side_exit; + if (!PyUnicode_CheckExact(value)) JUMP_TO_JUMP_TARGET(); STAT_INC(TO_BOOL, hit); if (value == &_Py_STR(empty)) { assert(_Py_IsImmortal(value)); @@ -399,7 +405,7 @@ value = stack_pointer[-1]; res = PyNumber_Invert(value); Py_DECREF(value); - if (res == NULL) goto pop_1_error_tier_two; + if (res == NULL) JUMP_TO_ERROR(); stack_pointer[-1] = res; break; } @@ -409,8 +415,8 @@ PyObject *left; right = stack_pointer[-1]; left = stack_pointer[-2]; - if (!PyLong_CheckExact(left)) goto side_exit; - if (!PyLong_CheckExact(right)) goto side_exit; + if (!PyLong_CheckExact(left)) JUMP_TO_JUMP_TARGET(); + if (!PyLong_CheckExact(right)) JUMP_TO_JUMP_TARGET(); break; } @@ -424,7 +430,7 @@ res = _PyLong_Multiply((PyLongObject *)left, (PyLongObject *)right); _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); - if (res == NULL) goto pop_2_error_tier_two; + if (res == NULL) JUMP_TO_ERROR(); stack_pointer[-2] = res; stack_pointer += -1; break; @@ -440,7 +446,7 @@ res = _PyLong_Add((PyLongObject *)left, (PyLongObject *)right); _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); - if (res == NULL) goto pop_2_error_tier_two; + if (res == NULL) JUMP_TO_ERROR(); stack_pointer[-2] = res; stack_pointer += -1; break; @@ -456,7 +462,7 @@ res = _PyLong_Subtract((PyLongObject *)left, (PyLongObject *)right); _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); - if (res == NULL) goto pop_2_error_tier_two; + if (res == NULL) JUMP_TO_ERROR(); stack_pointer[-2] = res; stack_pointer += -1; break; @@ -467,8 +473,8 @@ PyObject *left; right = stack_pointer[-1]; left = stack_pointer[-2]; - if (!PyFloat_CheckExact(left)) goto side_exit; - if (!PyFloat_CheckExact(right)) goto side_exit; + if (!PyFloat_CheckExact(left)) JUMP_TO_JUMP_TARGET(); + if (!PyFloat_CheckExact(right)) JUMP_TO_JUMP_TARGET(); break; } @@ -525,8 +531,8 @@ PyObject *left; right = stack_pointer[-1]; left = stack_pointer[-2]; - if (!PyUnicode_CheckExact(left)) goto side_exit; - if (!PyUnicode_CheckExact(right)) goto side_exit; + if (!PyUnicode_CheckExact(left)) JUMP_TO_JUMP_TARGET(); + if (!PyUnicode_CheckExact(right)) JUMP_TO_JUMP_TARGET(); break; } @@ -540,7 +546,7 @@ res = PyUnicode_Concat(left, right); _Py_DECREF_SPECIALIZED(left, _PyUnicode_ExactDealloc); _Py_DECREF_SPECIALIZED(right, _PyUnicode_ExactDealloc); - if (res == NULL) goto pop_2_error_tier_two; + if (res == NULL) JUMP_TO_ERROR(); stack_pointer[-2] = res; stack_pointer += -1; break; @@ -555,7 +561,7 @@ res = PyObject_GetItem(container, sub); Py_DECREF(container); Py_DECREF(sub); - if (res == NULL) goto pop_2_error_tier_two; + if (res == NULL) JUMP_TO_ERROR(); stack_pointer[-2] = res; stack_pointer += -1; break; @@ -580,7 +586,7 @@ Py_DECREF(slice); } Py_DECREF(container); - if (res == NULL) goto pop_3_error_tier_two; + if (res == NULL) JUMP_TO_ERROR(); stack_pointer[-3] = res; stack_pointer += -2; break; @@ -606,7 +612,7 @@ } Py_DECREF(v); Py_DECREF(container); - if (err) goto pop_4_error_tier_two; + if (err) JUMP_TO_ERROR(); stack_pointer += -4; break; } @@ -617,12 +623,12 @@ PyObject *res; sub = stack_pointer[-1]; list = stack_pointer[-2]; - if (!PyLong_CheckExact(sub)) goto deoptimize; - if (!PyList_CheckExact(list)) goto deoptimize; + if (!PyLong_CheckExact(sub)) JUMP_TO_JUMP_TARGET(); + if (!PyList_CheckExact(list)) JUMP_TO_JUMP_TARGET(); // Deopt unless 0 <= sub < PyList_Size(list) - if (!_PyLong_IsNonNegativeCompact((PyLongObject *)sub)) goto deoptimize; + if (!_PyLong_IsNonNegativeCompact((PyLongObject *)sub)) JUMP_TO_JUMP_TARGET(); Py_ssize_t index = ((PyLongObject*)sub)->long_value.ob_digit[0]; - if (index >= PyList_GET_SIZE(list)) goto deoptimize; + if (index >= PyList_GET_SIZE(list)) JUMP_TO_JUMP_TARGET(); STAT_INC(BINARY_SUBSCR, hit); res = PyList_GET_ITEM(list, index); assert(res != NULL); @@ -640,14 +646,14 @@ PyObject *res; sub = stack_pointer[-1]; str = stack_pointer[-2]; - if (!PyLong_CheckExact(sub)) goto deoptimize; - if (!PyUnicode_CheckExact(str)) goto deoptimize; - if (!_PyLong_IsNonNegativeCompact((PyLongObject *)sub)) goto deoptimize; + if (!PyLong_CheckExact(sub)) JUMP_TO_JUMP_TARGET(); + if (!PyUnicode_CheckExact(str)) JUMP_TO_JUMP_TARGET(); + if (!_PyLong_IsNonNegativeCompact((PyLongObject *)sub)) JUMP_TO_JUMP_TARGET(); Py_ssize_t index = ((PyLongObject*)sub)->long_value.ob_digit[0]; - if (PyUnicode_GET_LENGTH(str) <= index) goto deoptimize; + if (PyUnicode_GET_LENGTH(str) <= index) JUMP_TO_JUMP_TARGET(); // Specialize for reading an ASCII character from any string: Py_UCS4 c = PyUnicode_READ_CHAR(str, index); - if (Py_ARRAY_LENGTH(_Py_SINGLETON(strings).ascii) <= c) goto deoptimize; + if (Py_ARRAY_LENGTH(_Py_SINGLETON(strings).ascii) <= c) JUMP_TO_JUMP_TARGET(); STAT_INC(BINARY_SUBSCR, hit); res = (PyObject*)&_Py_SINGLETON(strings).ascii[c]; _Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free); @@ -663,12 +669,12 @@ PyObject *res; sub = stack_pointer[-1]; tuple = stack_pointer[-2]; - if (!PyLong_CheckExact(sub)) goto deoptimize; - if (!PyTuple_CheckExact(tuple)) goto deoptimize; + if (!PyLong_CheckExact(sub)) JUMP_TO_JUMP_TARGET(); + if (!PyTuple_CheckExact(tuple)) JUMP_TO_JUMP_TARGET(); // Deopt unless 0 <= sub < PyTuple_Size(list) - if (!_PyLong_IsNonNegativeCompact((PyLongObject *)sub)) goto deoptimize; + if (!_PyLong_IsNonNegativeCompact((PyLongObject *)sub)) JUMP_TO_JUMP_TARGET(); Py_ssize_t index = ((PyLongObject*)sub)->long_value.ob_digit[0]; - if (index >= PyTuple_GET_SIZE(tuple)) goto deoptimize; + if (index >= PyTuple_GET_SIZE(tuple)) JUMP_TO_JUMP_TARGET(); STAT_INC(BINARY_SUBSCR, hit); res = PyTuple_GET_ITEM(tuple, index); assert(res != NULL); @@ -686,7 +692,7 @@ PyObject *res; sub = stack_pointer[-1]; dict = stack_pointer[-2]; - if (!PyDict_CheckExact(dict)) goto deoptimize; + if (!PyDict_CheckExact(dict)) JUMP_TO_JUMP_TARGET(); STAT_INC(BINARY_SUBSCR, hit); int rc = PyDict_GetItemRef(dict, sub, &res); if (rc == 0) { @@ -694,14 +700,14 @@ } Py_DECREF(dict); Py_DECREF(sub); - if (rc <= 0) goto pop_2_error_tier_two; + if (rc <= 0) JUMP_TO_ERROR(); // not found or error stack_pointer[-2] = res; stack_pointer += -1; break; } - /* _BINARY_SUBSCR_GETITEM is not a viable micro-op for tier 2 */ + /* _BINARY_SUBSCR_GETITEM is not a viable micro-op for tier 2 because it uses the 'this_instr' variable */ case _LIST_APPEND: { PyObject *v; @@ -709,7 +715,7 @@ oparg = CURRENT_OPARG(); v = stack_pointer[-1]; list = stack_pointer[-2 - (oparg-1)]; - if (_PyList_AppendTakeRef((PyListObject *)list, v) < 0) goto pop_1_error_tier_two; + if (_PyList_AppendTakeRef((PyListObject *)list, v) < 0) JUMP_TO_ERROR(); stack_pointer += -1; break; } @@ -722,7 +728,7 @@ set = stack_pointer[-2 - (oparg-1)]; int err = PySet_Add(set, v); Py_DECREF(v); - if (err) goto pop_1_error_tier_two; + if (err) JUMP_TO_ERROR(); stack_pointer += -1; break; } @@ -739,7 +745,7 @@ Py_DECREF(v); Py_DECREF(container); Py_DECREF(sub); - if (err) goto pop_3_error_tier_two; + if (err) JUMP_TO_ERROR(); stack_pointer += -3; break; } @@ -751,13 +757,13 @@ sub = stack_pointer[-1]; list = stack_pointer[-2]; value = stack_pointer[-3]; - if (!PyLong_CheckExact(sub)) goto deoptimize; - if (!PyList_CheckExact(list)) goto deoptimize; + if (!PyLong_CheckExact(sub)) JUMP_TO_JUMP_TARGET(); + if (!PyList_CheckExact(list)) JUMP_TO_JUMP_TARGET(); // Ensure nonnegative, zero-or-one-digit ints. - if (!_PyLong_IsNonNegativeCompact((PyLongObject *)sub)) goto deoptimize; + if (!_PyLong_IsNonNegativeCompact((PyLongObject *)sub)) JUMP_TO_JUMP_TARGET(); Py_ssize_t index = ((PyLongObject*)sub)->long_value.ob_digit[0]; // Ensure index < len(list) - if (index >= PyList_GET_SIZE(list)) goto deoptimize; + if (index >= PyList_GET_SIZE(list)) JUMP_TO_JUMP_TARGET(); STAT_INC(STORE_SUBSCR, hit); PyObject *old_value = PyList_GET_ITEM(list, index); PyList_SET_ITEM(list, index, value); @@ -776,11 +782,11 @@ sub = stack_pointer[-1]; dict = stack_pointer[-2]; value = stack_pointer[-3]; - if (!PyDict_CheckExact(dict)) goto deoptimize; + if (!PyDict_CheckExact(dict)) JUMP_TO_JUMP_TARGET(); STAT_INC(STORE_SUBSCR, hit); int err = _PyDict_SetItem_Take2((PyDictObject *)dict, sub, value); Py_DECREF(dict); - if (err) goto pop_3_error_tier_two; + if (err) JUMP_TO_ERROR(); stack_pointer += -3; break; } @@ -794,7 +800,7 @@ int err = PyObject_DelItem(container, sub); Py_DECREF(container); Py_DECREF(sub); - if (err) goto pop_2_error_tier_two; + if (err) JUMP_TO_ERROR(); stack_pointer += -2; break; } @@ -807,7 +813,7 @@ assert(oparg <= MAX_INTRINSIC_1); res = _PyIntrinsics_UnaryFunctions[oparg].func(tstate, value); Py_DECREF(value); - if (res == NULL) goto pop_1_error_tier_two; + if (res == NULL) JUMP_TO_ERROR(); stack_pointer[-1] = res; break; } @@ -823,7 +829,7 @@ res = _PyIntrinsics_BinaryFunctions[oparg].func(tstate, value2, value1); Py_DECREF(value2); Py_DECREF(value1); - if (res == NULL) goto pop_2_error_tier_two; + if (res == NULL) JUMP_TO_ERROR(); stack_pointer[-2] = res; stack_pointer += -1; break; @@ -855,9 +861,9 @@ break; } - /* _INSTRUMENTED_RETURN_VALUE is not a viable micro-op for tier 2 */ + /* _INSTRUMENTED_RETURN_VALUE is not a viable micro-op for tier 2 because it is instrumented */ - /* _INSTRUMENTED_RETURN_CONST is not a viable micro-op for tier 2 */ + /* _INSTRUMENTED_RETURN_CONST is not a viable micro-op for tier 2 because it is instrumented */ case _GET_AITER: { PyObject *obj; @@ -874,11 +880,11 @@ "__aiter__ method, got %.100s", type->tp_name); Py_DECREF(obj); - if (true) goto pop_1_error_tier_two; + if (true) JUMP_TO_ERROR(); } iter = (*getter)(obj); Py_DECREF(obj); - if (iter == NULL) goto pop_1_error_tier_two; + if (iter == NULL) JUMP_TO_ERROR(); if (Py_TYPE(iter)->tp_as_async == NULL || Py_TYPE(iter)->tp_as_async->am_anext == NULL) { _PyErr_Format(tstate, PyExc_TypeError, @@ -886,7 +892,7 @@ "that does not implement __anext__: %.100s", Py_TYPE(iter)->tp_name); Py_DECREF(iter); - if (true) goto pop_1_error_tier_two; + if (true) JUMP_TO_ERROR(); } stack_pointer[-1] = iter; break; @@ -902,7 +908,7 @@ if (PyAsyncGen_CheckExact(aiter)) { awaitable = type->tp_as_async->am_anext(aiter); if (awaitable == NULL) { - GOTO_ERROR(error); + JUMP_TO_ERROR(); } } else { if (type->tp_as_async != NULL){ @@ -911,7 +917,7 @@ if (getter != NULL) { next_iter = (*getter)(aiter); if (next_iter == NULL) { - GOTO_ERROR(error); + JUMP_TO_ERROR(); } } else { @@ -919,7 +925,7 @@ "'async for' requires an iterator with " "__anext__ method, got %.100s", type->tp_name); - GOTO_ERROR(error); + JUMP_TO_ERROR(); } awaitable = _PyCoro_GetAwaitableIter(next_iter); if (awaitable == NULL) { @@ -929,7 +935,7 @@ "from __anext__: %.100s", Py_TYPE(next_iter)->tp_name); Py_DECREF(next_iter); - GOTO_ERROR(error); + JUMP_TO_ERROR(); } else { Py_DECREF(next_iter); } @@ -962,16 +968,16 @@ /* The code below jumps to `error` if `iter` is NULL. */ } } - if (iter == NULL) goto pop_1_error_tier_two; + if (iter == NULL) JUMP_TO_ERROR(); stack_pointer[-1] = iter; break; } - /* _SEND is not a viable micro-op for tier 2 */ + /* _SEND is not a viable micro-op for tier 2 because it uses the 'this_instr' variable */ - /* _SEND_GEN is not a viable micro-op for tier 2 */ + /* _SEND_GEN is not a viable micro-op for tier 2 because it uses the 'this_instr' variable */ - /* _INSTRUMENTED_YIELD_VALUE is not a viable micro-op for tier 2 */ + /* _INSTRUMENTED_YIELD_VALUE is not a viable micro-op for tier 2 because it is instrumented */ case _POP_EXCEPT: { PyObject *exc_value; @@ -992,11 +998,11 @@ case _LOAD_BUILD_CLASS: { PyObject *bc; - if (PyMapping_GetOptionalItem(BUILTINS(), &_Py_ID(__build_class__), &bc) < 0) goto error_tier_two; + if (PyMapping_GetOptionalItem(BUILTINS(), &_Py_ID(__build_class__), &bc) < 0) JUMP_TO_ERROR(); if (bc == NULL) { _PyErr_SetString(tstate, PyExc_NameError, "__build_class__ not found"); - if (true) goto error_tier_two; + if (true) JUMP_TO_ERROR(); } stack_pointer[0] = bc; stack_pointer += 1; @@ -1014,14 +1020,14 @@ _PyErr_Format(tstate, PyExc_SystemError, "no locals found when storing %R", name); Py_DECREF(v); - if (true) goto pop_1_error_tier_two; + if (true) JUMP_TO_ERROR(); } if (PyDict_CheckExact(ns)) err = PyDict_SetItem(ns, name, v); else err = PyObject_SetItem(ns, name, v); Py_DECREF(v); - if (err) goto pop_1_error_tier_two; + if (err) JUMP_TO_ERROR(); stack_pointer += -1; break; } @@ -1034,7 +1040,7 @@ if (ns == NULL) { _PyErr_Format(tstate, PyExc_SystemError, "no locals when deleting %R", name); - GOTO_ERROR(error); + JUMP_TO_ERROR(); } err = PyObject_DelItem(ns, name); // Can't use ERROR_IF here. @@ -1042,7 +1048,7 @@ _PyEval_FormatExcCheckArg(tstate, PyExc_NameError, NAME_ERROR_MSG, name); - GOTO_ERROR(error); + JUMP_TO_ERROR(); } break; } @@ -1054,7 +1060,7 @@ PyObject **top = stack_pointer + oparg - 1; int res = _PyEval_UnpackIterable(tstate, seq, oparg, -1, top); Py_DECREF(seq); - if (res == 0) goto pop_1_error_tier_two; + if (res == 0) JUMP_TO_ERROR(); stack_pointer += -1 + oparg; break; } @@ -1066,8 +1072,8 @@ oparg = CURRENT_OPARG(); seq = stack_pointer[-1]; assert(oparg == 2); - if (!PyTuple_CheckExact(seq)) goto deoptimize; - if (PyTuple_GET_SIZE(seq) != 2) goto deoptimize; + if (!PyTuple_CheckExact(seq)) JUMP_TO_JUMP_TARGET(); + if (PyTuple_GET_SIZE(seq) != 2) JUMP_TO_JUMP_TARGET(); STAT_INC(UNPACK_SEQUENCE, hit); val0 = Py_NewRef(PyTuple_GET_ITEM(seq, 0)); val1 = Py_NewRef(PyTuple_GET_ITEM(seq, 1)); @@ -1084,8 +1090,8 @@ oparg = CURRENT_OPARG(); seq = stack_pointer[-1]; values = &stack_pointer[-1]; - if (!PyTuple_CheckExact(seq)) goto deoptimize; - if (PyTuple_GET_SIZE(seq) != oparg) goto deoptimize; + if (!PyTuple_CheckExact(seq)) JUMP_TO_JUMP_TARGET(); + if (PyTuple_GET_SIZE(seq) != oparg) JUMP_TO_JUMP_TARGET(); STAT_INC(UNPACK_SEQUENCE, hit); PyObject **items = _PyTuple_ITEMS(seq); for (int i = oparg; --i >= 0; ) { @@ -1102,8 +1108,8 @@ oparg = CURRENT_OPARG(); seq = stack_pointer[-1]; values = &stack_pointer[-1]; - if (!PyList_CheckExact(seq)) goto deoptimize; - if (PyList_GET_SIZE(seq) != oparg) goto deoptimize; + if (!PyList_CheckExact(seq)) JUMP_TO_JUMP_TARGET(); + if (PyList_GET_SIZE(seq) != oparg) JUMP_TO_JUMP_TARGET(); STAT_INC(UNPACK_SEQUENCE, hit); PyObject **items = _PyList_ITEMS(seq); for (int i = oparg; --i >= 0; ) { @@ -1122,7 +1128,7 @@ PyObject **top = stack_pointer + totalargs - 1; int res = _PyEval_UnpackIterable(tstate, seq, oparg & 0xFF, oparg >> 8, top); Py_DECREF(seq); - if (res == 0) goto pop_1_error_tier_two; + if (res == 0) JUMP_TO_ERROR(); stack_pointer += (oparg >> 8) + (oparg & 0xFF); break; } @@ -1137,7 +1143,7 @@ int err = PyObject_SetAttr(owner, name, v); Py_DECREF(v); Py_DECREF(owner); - if (err) goto pop_2_error_tier_two; + if (err) JUMP_TO_ERROR(); stack_pointer += -2; break; } @@ -1149,7 +1155,7 @@ PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); int err = PyObject_DelAttr(owner, name); Py_DECREF(owner); - if (err) goto pop_1_error_tier_two; + if (err) JUMP_TO_ERROR(); stack_pointer += -1; break; } @@ -1161,7 +1167,7 @@ PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); int err = PyDict_SetItem(GLOBALS(), name, v); Py_DECREF(v); - if (err) goto pop_1_error_tier_two; + if (err) JUMP_TO_ERROR(); stack_pointer += -1; break; } @@ -1172,12 +1178,12 @@ int err = PyDict_Pop(GLOBALS(), name, NULL); // Can't use ERROR_IF here. if (err < 0) { - GOTO_ERROR(error); + JUMP_TO_ERROR(); } if (err == 0) { _PyEval_FormatExcCheckArg(tstate, PyExc_NameError, NAME_ERROR_MSG, name); - GOTO_ERROR(error); + JUMP_TO_ERROR(); } break; } @@ -1188,7 +1194,7 @@ if (locals == NULL) { _PyErr_SetString(tstate, PyExc_SystemError, "no locals found"); - if (true) goto error_tier_two; + if (true) JUMP_TO_ERROR(); } Py_INCREF(locals); stack_pointer[0] = locals; @@ -1203,21 +1209,21 @@ mod_or_class_dict = stack_pointer[-1]; PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); if (PyMapping_GetOptionalItem(mod_or_class_dict, name, &v) < 0) { - GOTO_ERROR(error); + JUMP_TO_ERROR(); } if (v == NULL) { if (PyDict_GetItemRef(GLOBALS(), name, &v) < 0) { - GOTO_ERROR(error); + JUMP_TO_ERROR(); } if (v == NULL) { if (PyMapping_GetOptionalItem(BUILTINS(), name, &v) < 0) { - GOTO_ERROR(error); + JUMP_TO_ERROR(); } if (v == NULL) { _PyEval_FormatExcCheckArg( tstate, PyExc_NameError, NAME_ERROR_MSG, name); - GOTO_ERROR(error); + JUMP_TO_ERROR(); } } } @@ -1226,39 +1232,7 @@ break; } - case _LOAD_NAME: { - PyObject *v; - oparg = CURRENT_OPARG(); - PyObject *mod_or_class_dict = LOCALS(); - if (mod_or_class_dict == NULL) { - _PyErr_SetString(tstate, PyExc_SystemError, - "no locals found"); - if (true) goto error_tier_two; - } - PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); - if (PyMapping_GetOptionalItem(mod_or_class_dict, name, &v) < 0) { - GOTO_ERROR(error); - } - if (v == NULL) { - if (PyDict_GetItemRef(GLOBALS(), name, &v) < 0) { - GOTO_ERROR(error); - } - if (v == NULL) { - if (PyMapping_GetOptionalItem(BUILTINS(), name, &v) < 0) { - GOTO_ERROR(error); - } - if (v == NULL) { - _PyEval_FormatExcCheckArg( - tstate, PyExc_NameError, - NAME_ERROR_MSG, name); - GOTO_ERROR(error); - } - } - } - stack_pointer[0] = v; - stack_pointer += 1; - break; - } + /* _LOAD_NAME is not a viable micro-op for tier 2 because it has both popping and not-popping errors */ case _LOAD_GLOBAL: { PyObject *res; @@ -1278,22 +1252,22 @@ _PyEval_FormatExcCheckArg(tstate, PyExc_NameError, NAME_ERROR_MSG, name); } - if (true) goto error_tier_two; + if (true) JUMP_TO_ERROR(); } Py_INCREF(res); } else { /* Slow-path if globals or builtins is not a dict */ /* namespace 1: globals */ - if (PyMapping_GetOptionalItem(GLOBALS(), name, &res) < 0) goto error_tier_two; + if (PyMapping_GetOptionalItem(GLOBALS(), name, &res) < 0) JUMP_TO_ERROR(); if (res == NULL) { /* namespace 2: builtins */ - if (PyMapping_GetOptionalItem(BUILTINS(), name, &res) < 0) goto error_tier_two; + if (PyMapping_GetOptionalItem(BUILTINS(), name, &res) < 0) JUMP_TO_ERROR(); if (res == NULL) { _PyEval_FormatExcCheckArg( tstate, PyExc_NameError, NAME_ERROR_MSG, name); - if (true) goto error_tier_two; + if (true) JUMP_TO_ERROR(); } } } @@ -1307,8 +1281,8 @@ case _GUARD_GLOBALS_VERSION: { uint16_t version = (uint16_t)CURRENT_OPERAND(); PyDictObject *dict = (PyDictObject *)GLOBALS(); - if (!PyDict_CheckExact(dict)) goto deoptimize; - if (dict->ma_keys->dk_version != version) goto deoptimize; + if (!PyDict_CheckExact(dict)) JUMP_TO_JUMP_TARGET(); + if (dict->ma_keys->dk_version != version) JUMP_TO_JUMP_TARGET(); assert(DK_IS_UNICODE(dict->ma_keys)); break; } @@ -1316,8 +1290,8 @@ case _GUARD_BUILTINS_VERSION: { uint16_t version = (uint16_t)CURRENT_OPERAND(); PyDictObject *dict = (PyDictObject *)BUILTINS(); - if (!PyDict_CheckExact(dict)) goto deoptimize; - if (dict->ma_keys->dk_version != version) goto deoptimize; + if (!PyDict_CheckExact(dict)) JUMP_TO_JUMP_TARGET(); + if (dict->ma_keys->dk_version != version) JUMP_TO_JUMP_TARGET(); assert(DK_IS_UNICODE(dict->ma_keys)); break; } @@ -1330,7 +1304,7 @@ PyDictObject *dict = (PyDictObject *)GLOBALS(); PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(dict->ma_keys); res = entries[index].me_value; - if (res == NULL) goto deoptimize; + if (res == NULL) JUMP_TO_JUMP_TARGET(); Py_INCREF(res); STAT_INC(LOAD_GLOBAL, hit); null = NULL; @@ -1348,7 +1322,7 @@ PyDictObject *bdict = (PyDictObject *)BUILTINS(); PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(bdict->ma_keys); res = entries[index].me_value; - if (res == NULL) goto deoptimize; + if (res == NULL) JUMP_TO_JUMP_TARGET(); Py_INCREF(res); STAT_INC(LOAD_GLOBAL, hit); null = NULL; @@ -1361,7 +1335,13 @@ case _DELETE_FAST: { oparg = CURRENT_OPARG(); PyObject *v = GETLOCAL(oparg); - if (v == NULL) goto unbound_local_error_tier_two; + if (v == NULL) { + _PyEval_FormatExcCheckArg(tstate, PyExc_UnboundLocalError, + UNBOUNDLOCAL_ERROR_MSG, + PyTuple_GetItem(_PyFrame_GetCode(frame)->co_localsplusnames, oparg) + ); + if (1) JUMP_TO_ERROR(); + } SETLOCAL(oparg, NULL); break; } @@ -1373,7 +1353,7 @@ PyObject *initial = GETLOCAL(oparg); PyObject *cell = PyCell_New(initial); if (cell == NULL) { - GOTO_ERROR(error); + JUMP_TO_ERROR(); } SETLOCAL(oparg, cell); break; @@ -1387,7 +1367,7 @@ // Fortunately we don't need its superpower. if (oldobj == NULL) { _PyEval_FormatExcUnbound(tstate, _PyFrame_GetCode(frame), oparg); - GOTO_ERROR(error); + JUMP_TO_ERROR(); } PyCell_SET(cell, NULL); Py_DECREF(oldobj); @@ -1404,14 +1384,14 @@ assert(oparg >= 0 && oparg < _PyFrame_GetCode(frame)->co_nlocalsplus); name = PyTuple_GET_ITEM(_PyFrame_GetCode(frame)->co_localsplusnames, oparg); if (PyMapping_GetOptionalItem(class_dict, name, &value) < 0) { - GOTO_ERROR(error); + JUMP_TO_ERROR(); } if (!value) { PyObject *cell = GETLOCAL(oparg); value = PyCell_GET(cell); if (value == NULL) { _PyEval_FormatExcUnbound(tstate, _PyFrame_GetCode(frame), oparg); - GOTO_ERROR(error); + JUMP_TO_ERROR(); } Py_INCREF(value); } @@ -1427,7 +1407,7 @@ value = PyCell_GET(cell); if (value == NULL) { _PyEval_FormatExcUnbound(tstate, _PyFrame_GetCode(frame), oparg); - if (true) goto error_tier_two; + if (true) JUMP_TO_ERROR(); } Py_INCREF(value); stack_pointer[0] = value; @@ -1471,7 +1451,7 @@ for (int _i = oparg; --_i >= 0;) { Py_DECREF(pieces[_i]); } - if (str == NULL) { stack_pointer += -oparg; goto error_tier_two; } + if (str == NULL) JUMP_TO_ERROR(); stack_pointer[-oparg] = str; stack_pointer += 1 - oparg; break; @@ -1483,7 +1463,7 @@ oparg = CURRENT_OPARG(); values = &stack_pointer[-oparg]; tup = _PyTuple_FromArraySteal(values, oparg); - if (tup == NULL) { stack_pointer += -oparg; goto error_tier_two; } + if (tup == NULL) JUMP_TO_ERROR(); stack_pointer[-oparg] = tup; stack_pointer += 1 - oparg; break; @@ -1495,7 +1475,7 @@ oparg = CURRENT_OPARG(); values = &stack_pointer[-oparg]; list = _PyList_FromArraySteal(values, oparg); - if (list == NULL) { stack_pointer += -oparg; goto error_tier_two; } + if (list == NULL) JUMP_TO_ERROR(); stack_pointer[-oparg] = list; stack_pointer += 1 - oparg; break; @@ -1518,7 +1498,7 @@ Py_TYPE(iterable)->tp_name); } Py_DECREF(iterable); - if (true) goto pop_1_error_tier_two; + if (true) JUMP_TO_ERROR(); } assert(Py_IsNone(none_val)); Py_DECREF(iterable); @@ -1534,34 +1514,12 @@ set = stack_pointer[-2 - (oparg-1)]; int err = _PySet_Update(set, iterable); Py_DECREF(iterable); - if (err < 0) goto pop_1_error_tier_two; + if (err < 0) JUMP_TO_ERROR(); stack_pointer += -1; break; } - case _BUILD_SET: { - PyObject **values; - PyObject *set; - oparg = CURRENT_OPARG(); - values = &stack_pointer[-oparg]; - set = PySet_New(NULL); - if (set == NULL) - GOTO_ERROR(error); - int err = 0; - for (int i = 0; i < oparg; i++) { - PyObject *item = values[i]; - if (err == 0) - err = PySet_Add(set, item); - Py_DECREF(item); - } - if (err != 0) { - Py_DECREF(set); - if (true) { stack_pointer += -oparg; goto error_tier_two; } - } - stack_pointer[-oparg] = set; - stack_pointer += 1 - oparg; - break; - } + /* _BUILD_SET is not a viable micro-op for tier 2 because it has both popping and not-popping errors */ case _BUILD_MAP: { PyObject **values; @@ -1575,7 +1533,7 @@ for (int _i = oparg*2; --_i >= 0;) { Py_DECREF(values[_i]); } - if (map == NULL) { stack_pointer += -oparg*2; goto error_tier_two; } + if (map == NULL) JUMP_TO_ERROR(); stack_pointer[-oparg*2] = map; stack_pointer += 1 - oparg*2; break; @@ -1587,17 +1545,17 @@ if (LOCALS() == NULL) { _PyErr_Format(tstate, PyExc_SystemError, "no locals found when setting up annotations"); - if (true) goto error_tier_two; + if (true) JUMP_TO_ERROR(); } /* check if __annotations__ in locals()... */ - if (PyMapping_GetOptionalItem(LOCALS(), &_Py_ID(__annotations__), &ann_dict) < 0) goto error_tier_two; + if (PyMapping_GetOptionalItem(LOCALS(), &_Py_ID(__annotations__), &ann_dict) < 0) JUMP_TO_ERROR(); if (ann_dict == NULL) { ann_dict = PyDict_New(); - if (ann_dict == NULL) goto error_tier_two; + if (ann_dict == NULL) JUMP_TO_ERROR(); err = PyObject_SetItem(LOCALS(), &_Py_ID(__annotations__), ann_dict); Py_DECREF(ann_dict); - if (err) goto error_tier_two; + if (err) JUMP_TO_ERROR(); } else { Py_DECREF(ann_dict); @@ -1612,12 +1570,8 @@ oparg = CURRENT_OPARG(); keys = stack_pointer[-1]; values = &stack_pointer[-1 - oparg]; - if (!PyTuple_CheckExact(keys) || - PyTuple_GET_SIZE(keys) != (Py_ssize_t)oparg) { - _PyErr_SetString(tstate, PyExc_SystemError, - "bad BUILD_CONST_KEY_MAP keys argument"); - GOTO_ERROR(error); // Pop the keys and values. - } + assert(PyTuple_CheckExact(keys)); + assert(PyTuple_GET_SIZE(keys) == (Py_ssize_t)oparg); map = _PyDict_FromItems( &PyTuple_GET_ITEM(keys, 0), 1, values, 1, oparg); @@ -1625,7 +1579,7 @@ Py_DECREF(values[_i]); } Py_DECREF(keys); - if (map == NULL) { stack_pointer += -1 - oparg; goto error_tier_two; } + if (map == NULL) JUMP_TO_ERROR(); stack_pointer[-1 - oparg] = map; stack_pointer += -oparg; break; @@ -1644,7 +1598,7 @@ Py_TYPE(update)->tp_name); } Py_DECREF(update); - if (true) goto pop_1_error_tier_two; + if (true) JUMP_TO_ERROR(); } Py_DECREF(update); stack_pointer += -1; @@ -1662,7 +1616,7 @@ if (_PyDict_MergeEx(dict, update, 2) < 0) { _PyEval_FormatKwargsError(tstate, callable, update); Py_DECREF(update); - if (true) goto pop_1_error_tier_two; + if (true) JUMP_TO_ERROR(); } Py_DECREF(update); stack_pointer += -1; @@ -1680,12 +1634,12 @@ assert(PyDict_CheckExact(dict)); /* dict[key] = value */ // Do not DECREF INPUTS because the function steals the references - if (_PyDict_SetItem_Take2((PyDictObject *)dict, key, value) != 0) goto pop_2_error_tier_two; + if (_PyDict_SetItem_Take2((PyDictObject *)dict, key, value) != 0) JUMP_TO_ERROR(); stack_pointer += -2; break; } - /* _INSTRUMENTED_LOAD_SUPER_ATTR is not a viable micro-op for tier 2 */ + /* _INSTRUMENTED_LOAD_SUPER_ATTR is not a viable micro-op for tier 2 because it is instrumented */ case _LOAD_SUPER_ATTR_ATTR: { PyObject *self; @@ -1697,15 +1651,15 @@ class = stack_pointer[-2]; global_super = stack_pointer[-3]; assert(!(oparg & 1)); - if (global_super != (PyObject *)&PySuper_Type) goto deoptimize; - if (!PyType_Check(class)) goto deoptimize; + if (global_super != (PyObject *)&PySuper_Type) JUMP_TO_JUMP_TARGET(); + if (!PyType_Check(class)) JUMP_TO_JUMP_TARGET(); STAT_INC(LOAD_SUPER_ATTR, hit); PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 2); attr = _PySuper_Lookup((PyTypeObject *)class, self, name, NULL); Py_DECREF(global_super); Py_DECREF(class); Py_DECREF(self); - if (attr == NULL) goto pop_3_error_tier_two; + if (attr == NULL) JUMP_TO_ERROR(); stack_pointer[-3] = attr; stack_pointer += -2; break; @@ -1722,8 +1676,8 @@ class = stack_pointer[-2]; global_super = stack_pointer[-3]; assert(oparg & 1); - if (global_super != (PyObject *)&PySuper_Type) goto deoptimize; - if (!PyType_Check(class)) goto deoptimize; + if (global_super != (PyObject *)&PySuper_Type) JUMP_TO_JUMP_TARGET(); + if (!PyType_Check(class)) JUMP_TO_JUMP_TARGET(); STAT_INC(LOAD_SUPER_ATTR, hit); PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 2); PyTypeObject *cls = (PyTypeObject *)class; @@ -1734,7 +1688,7 @@ Py_DECREF(class); if (attr == NULL) { Py_DECREF(self); - if (true) goto pop_3_error_tier_two; + if (true) JUMP_TO_ERROR(); } if (method_found) { self_or_null = self; // transfer ownership @@ -1774,7 +1728,7 @@ meth | NULL | arg1 | ... | argN */ Py_DECREF(owner); - if (attr == NULL) goto pop_1_error_tier_two; + if (attr == NULL) JUMP_TO_ERROR(); self_or_null = NULL; } } @@ -1782,7 +1736,7 @@ /* Classic, pushes one value. */ attr = PyObject_GetAttr(owner, name); Py_DECREF(owner); - if (attr == NULL) goto pop_1_error_tier_two; + if (attr == NULL) JUMP_TO_ERROR(); } stack_pointer[-1] = attr; if (oparg & 1) stack_pointer[0] = self_or_null; @@ -1796,7 +1750,7 @@ uint32_t type_version = (uint32_t)CURRENT_OPERAND(); PyTypeObject *tp = Py_TYPE(owner); assert(type_version != 0); - if (tp->tp_version_tag != type_version) goto side_exit; + if (tp->tp_version_tag != type_version) JUMP_TO_JUMP_TARGET(); break; } @@ -1806,7 +1760,7 @@ assert(Py_TYPE(owner)->tp_dictoffset < 0); assert(Py_TYPE(owner)->tp_flags & Py_TPFLAGS_MANAGED_DICT); PyDictOrValues *dorv = _PyObject_DictOrValuesPointer(owner); - if (!_PyDictOrValues_IsValues(*dorv) && !_PyObject_MakeInstanceAttributesFromDict(owner, dorv)) goto deoptimize; + if (!_PyDictOrValues_IsValues(*dorv) && !_PyObject_MakeInstanceAttributesFromDict(owner, dorv)) JUMP_TO_JUMP_TARGET(); break; } @@ -1819,7 +1773,7 @@ uint16_t index = (uint16_t)CURRENT_OPERAND(); PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); attr = _PyDictOrValues_GetValues(dorv)->values[index]; - if (attr == NULL) goto deoptimize; + if (attr == NULL) JUMP_TO_JUMP_TARGET(); STAT_INC(LOAD_ATTR, hit); Py_INCREF(attr); null = NULL; @@ -1837,7 +1791,7 @@ uint16_t index = (uint16_t)CURRENT_OPERAND(); PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); attr = _PyDictOrValues_GetValues(dorv)->values[index]; - if (attr == NULL) goto deoptimize; + if (attr == NULL) JUMP_TO_JUMP_TARGET(); STAT_INC(LOAD_ATTR, hit); Py_INCREF(attr); null = NULL; @@ -1854,10 +1808,10 @@ PyObject *owner; owner = stack_pointer[-1]; uint32_t dict_version = (uint32_t)CURRENT_OPERAND(); - if (!PyModule_CheckExact(owner)) goto deoptimize; + if (!PyModule_CheckExact(owner)) JUMP_TO_JUMP_TARGET(); PyDictObject *dict = (PyDictObject *)((PyModuleObject *)owner)->md_dict; assert(dict != NULL); - if (dict->ma_keys->dk_version != dict_version) goto deoptimize; + if (dict->ma_keys->dk_version != dict_version) JUMP_TO_JUMP_TARGET(); break; } @@ -1873,7 +1827,7 @@ assert(index < dict->ma_keys->dk_nentries); PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + index; attr = ep->me_value; - if (attr == NULL) goto deoptimize; + if (attr == NULL) JUMP_TO_JUMP_TARGET(); STAT_INC(LOAD_ATTR, hit); Py_INCREF(attr); null = NULL; @@ -1889,9 +1843,9 @@ owner = stack_pointer[-1]; assert(Py_TYPE(owner)->tp_flags & Py_TPFLAGS_MANAGED_DICT); PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); - if (_PyDictOrValues_IsValues(dorv)) goto deoptimize; + if (_PyDictOrValues_IsValues(dorv)) JUMP_TO_JUMP_TARGET(); PyDictObject *dict = (PyDictObject *)_PyDictOrValues_GetDict(dorv); - if (dict == NULL) goto deoptimize; + if (dict == NULL) JUMP_TO_JUMP_TARGET(); assert(PyDict_CheckExact((PyObject *)dict)); break; } @@ -1905,19 +1859,19 @@ uint16_t hint = (uint16_t)CURRENT_OPERAND(); PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); PyDictObject *dict = (PyDictObject *)_PyDictOrValues_GetDict(dorv); - if (hint >= (size_t)dict->ma_keys->dk_nentries) goto deoptimize; + if (hint >= (size_t)dict->ma_keys->dk_nentries) JUMP_TO_JUMP_TARGET(); PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1); if (DK_IS_UNICODE(dict->ma_keys)) { PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint; - if (ep->me_key != name) goto deoptimize; + if (ep->me_key != name) JUMP_TO_JUMP_TARGET(); attr = ep->me_value; } else { PyDictKeyEntry *ep = DK_ENTRIES(dict->ma_keys) + hint; - if (ep->me_key != name) goto deoptimize; + if (ep->me_key != name) JUMP_TO_JUMP_TARGET(); attr = ep->me_value; } - if (attr == NULL) goto deoptimize; + if (attr == NULL) JUMP_TO_JUMP_TARGET(); STAT_INC(LOAD_ATTR, hit); Py_INCREF(attr); null = NULL; @@ -1937,7 +1891,7 @@ uint16_t index = (uint16_t)CURRENT_OPERAND(); char *addr = (char *)owner + index; attr = *(PyObject **)addr; - if (attr == NULL) goto deoptimize; + if (attr == NULL) JUMP_TO_JUMP_TARGET(); STAT_INC(LOAD_ATTR, hit); Py_INCREF(attr); null = NULL; @@ -1955,7 +1909,7 @@ uint16_t index = (uint16_t)CURRENT_OPERAND(); char *addr = (char *)owner + index; attr = *(PyObject **)addr; - if (attr == NULL) goto deoptimize; + if (attr == NULL) JUMP_TO_JUMP_TARGET(); STAT_INC(LOAD_ATTR, hit); Py_INCREF(attr); null = NULL; @@ -1972,9 +1926,9 @@ PyObject *owner; owner = stack_pointer[-1]; uint32_t type_version = (uint32_t)CURRENT_OPERAND(); - if (!PyType_Check(owner)) goto deoptimize; + if (!PyType_Check(owner)) JUMP_TO_JUMP_TARGET(); assert(type_version != 0); - if (((PyTypeObject *)owner)->tp_version_tag != type_version) goto deoptimize; + if (((PyTypeObject *)owner)->tp_version_tag != type_version) JUMP_TO_JUMP_TARGET(); break; } @@ -2014,16 +1968,16 @@ /* _LOAD_ATTR_CLASS is split on (oparg & 1) */ - /* _LOAD_ATTR_PROPERTY is not a viable micro-op for tier 2 */ + /* _LOAD_ATTR_PROPERTY is not a viable micro-op for tier 2 because it uses the 'this_instr' variable */ - /* _LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN is not a viable micro-op for tier 2 */ + /* _LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN is not a viable micro-op for tier 2 because it uses the 'this_instr' variable */ case _GUARD_DORV_VALUES: { PyObject *owner; owner = stack_pointer[-1]; assert(Py_TYPE(owner)->tp_flags & Py_TPFLAGS_MANAGED_DICT); PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); - if (!_PyDictOrValues_IsValues(dorv)) goto deoptimize; + if (!_PyDictOrValues_IsValues(dorv)) JUMP_TO_JUMP_TARGET(); break; } @@ -2049,7 +2003,7 @@ break; } - /* _STORE_ATTR_WITH_HINT is not a viable micro-op for tier 2 */ + /* _STORE_ATTR_WITH_HINT is not a viable micro-op for tier 2 because it has unused cache entries */ case _STORE_ATTR_SLOT: { PyObject *owner; @@ -2078,11 +2032,11 @@ res = PyObject_RichCompare(left, right, oparg >> 5); Py_DECREF(left); Py_DECREF(right); - if (res == NULL) goto pop_2_error_tier_two; + if (res == NULL) JUMP_TO_ERROR(); if (oparg & 16) { int res_bool = PyObject_IsTrue(res); Py_DECREF(res); - if (res_bool < 0) goto pop_2_error_tier_two; + if (res_bool < 0) JUMP_TO_ERROR(); res = res_bool ? Py_True : Py_False; } stack_pointer[-2] = res; @@ -2118,8 +2072,8 @@ oparg = CURRENT_OPARG(); right = stack_pointer[-1]; left = stack_pointer[-2]; - if (!_PyLong_IsCompact((PyLongObject *)left)) goto deoptimize; - if (!_PyLong_IsCompact((PyLongObject *)right)) goto deoptimize; + if (!_PyLong_IsCompact((PyLongObject *)left)) JUMP_TO_JUMP_TARGET(); + if (!_PyLong_IsCompact((PyLongObject *)right)) JUMP_TO_JUMP_TARGET(); STAT_INC(COMPARE_OP, hit); assert(_PyLong_DigitCount((PyLongObject *)left) <= 1 && _PyLong_DigitCount((PyLongObject *)right) <= 1); @@ -2184,7 +2138,7 @@ int res = PySequence_Contains(right, left); Py_DECREF(left); Py_DECREF(right); - if (res < 0) goto pop_2_error_tier_two; + if (res < 0) JUMP_TO_ERROR(); b = (res ^ oparg) ? Py_True : Py_False; stack_pointer[-2] = b; stack_pointer += -1; @@ -2198,13 +2152,13 @@ oparg = CURRENT_OPARG(); right = stack_pointer[-1]; left = stack_pointer[-2]; - if (!(PySet_CheckExact(right) || PyFrozenSet_CheckExact(right))) goto deoptimize; + if (!(PySet_CheckExact(right) || PyFrozenSet_CheckExact(right))) JUMP_TO_JUMP_TARGET(); STAT_INC(CONTAINS_OP, hit); // Note: both set and frozenset use the same seq_contains method! int res = _PySet_Contains((PySetObject *)right, left); Py_DECREF(left); Py_DECREF(right); - if (res < 0) goto pop_2_error_tier_two; + if (res < 0) JUMP_TO_ERROR(); b = (res ^ oparg) ? Py_True : Py_False; stack_pointer[-2] = b; stack_pointer += -1; @@ -2218,12 +2172,12 @@ oparg = CURRENT_OPARG(); right = stack_pointer[-1]; left = stack_pointer[-2]; - if (!PyDict_CheckExact(right)) goto deoptimize; + if (!PyDict_CheckExact(right)) JUMP_TO_JUMP_TARGET(); STAT_INC(CONTAINS_OP, hit); int res = PyDict_Contains(right, left); Py_DECREF(left); Py_DECREF(right); - if (res < 0) goto pop_2_error_tier_two; + if (res < 0) JUMP_TO_ERROR(); b = (res ^ oparg) ? Py_True : Py_False; stack_pointer[-2] = b; stack_pointer += -1; @@ -2240,7 +2194,7 @@ if (_PyEval_CheckExceptStarTypeValid(tstate, match_type) < 0) { Py_DECREF(exc_value); Py_DECREF(match_type); - if (true) goto pop_2_error_tier_two; + if (true) JUMP_TO_ERROR(); } match = NULL; rest = NULL; @@ -2248,9 +2202,9 @@ &match, &rest); Py_DECREF(exc_value); Py_DECREF(match_type); - if (res < 0) goto pop_2_error_tier_two; + if (res < 0) JUMP_TO_ERROR(); assert((match == NULL) == (rest == NULL)); - if (match == NULL) goto pop_2_error_tier_two; + if (match == NULL) JUMP_TO_ERROR(); if (!Py_IsNone(match)) { PyErr_SetHandledException(match); } @@ -2268,7 +2222,7 @@ assert(PyExceptionInstance_Check(left)); if (_PyEval_CheckExceptTypeValid(tstate, right) < 0) { Py_DECREF(right); - if (true) goto pop_1_error_tier_two; + if (true) JUMP_TO_ERROR(); } int res = PyErr_GivenExceptionMatches(left, right); Py_DECREF(right); @@ -2277,9 +2231,9 @@ break; } - /* _POP_JUMP_IF_FALSE is not a viable micro-op for tier 2 */ + /* _POP_JUMP_IF_FALSE is not a viable micro-op for tier 2 because it is replaced */ - /* _POP_JUMP_IF_TRUE is not a viable micro-op for tier 2 */ + /* _POP_JUMP_IF_TRUE is not a viable micro-op for tier 2 because it is replaced */ case _IS_NONE: { PyObject *value; @@ -2302,9 +2256,9 @@ obj = stack_pointer[-1]; // PUSH(len(TOS)) Py_ssize_t len_i = PyObject_Length(obj); - if (len_i < 0) goto error_tier_two; + if (len_i < 0) JUMP_TO_ERROR(); len_o = PyLong_FromSsize_t(len_i); - if (len_o == NULL) goto error_tier_two; + if (len_o == NULL) JUMP_TO_ERROR(); stack_pointer[0] = len_o; stack_pointer += 1; break; @@ -2330,7 +2284,7 @@ assert(PyTuple_CheckExact(attrs)); // Success! } else { - if (_PyErr_Occurred(tstate)) goto pop_3_error_tier_two; + if (_PyErr_Occurred(tstate)) JUMP_TO_ERROR(); // Error! attrs = Py_None; // Failure! } @@ -2369,7 +2323,7 @@ subject = stack_pointer[-2]; // On successful match, PUSH(values). Otherwise, PUSH(None). values_or_none = _PyEval_MatchKeys(tstate, subject, keys); - if (values_or_none == NULL) goto error_tier_two; + if (values_or_none == NULL) JUMP_TO_ERROR(); stack_pointer[0] = values_or_none; stack_pointer += 1; break; @@ -2382,7 +2336,7 @@ /* before: [obj]; after [getiter(obj)] */ iter = PyObject_GetIter(iterable); Py_DECREF(iterable); - if (iter == NULL) goto pop_1_error_tier_two; + if (iter == NULL) JUMP_TO_ERROR(); stack_pointer[-1] = iter; break; } @@ -2400,7 +2354,7 @@ _PyErr_SetString(tstate, PyExc_TypeError, "cannot 'yield from' a coroutine object " "in a non-coroutine generator"); - GOTO_ERROR(error); + JUMP_TO_ERROR(); } iter = iterable; } @@ -2411,7 +2365,7 @@ /* `iterable` is not a generator. */ iter = PyObject_GetIter(iterable); if (iter == NULL) { - GOTO_ERROR(error); + JUMP_TO_ERROR(); } Py_DECREF(iterable); } @@ -2419,7 +2373,7 @@ break; } - /* _FOR_ITER is not a viable micro-op for tier 2 */ + /* _FOR_ITER is not a viable micro-op for tier 2 because it is replaced */ case _FOR_ITER_TIER_TWO: { PyObject *iter; @@ -2430,7 +2384,7 @@ if (next == NULL) { if (_PyErr_Occurred(tstate)) { if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) { - GOTO_ERROR(error); + JUMP_TO_ERROR(); } _PyErr_Clear(tstate); } @@ -2438,7 +2392,7 @@ Py_DECREF(iter); STACK_SHRINK(1); /* The translator sets the deopt target just past END_FOR */ - if (true) goto deoptimize; + if (true) JUMP_TO_JUMP_TARGET(); } // Common case: no jump, leave it to the code generator stack_pointer[0] = next; @@ -2446,16 +2400,16 @@ break; } - /* _INSTRUMENTED_FOR_ITER is not a viable micro-op for tier 2 */ + /* _INSTRUMENTED_FOR_ITER is not a viable micro-op for tier 2 because it is instrumented */ case _ITER_CHECK_LIST: { PyObject *iter; iter = stack_pointer[-1]; - if (Py_TYPE(iter) != &PyListIter_Type) goto deoptimize; + if (Py_TYPE(iter) != &PyListIter_Type) JUMP_TO_JUMP_TARGET(); break; } - /* _ITER_JUMP_LIST is not a viable micro-op for tier 2 */ + /* _ITER_JUMP_LIST is not a viable micro-op for tier 2 because it is replaced */ case _GUARD_NOT_EXHAUSTED_LIST: { PyObject *iter; @@ -2463,8 +2417,8 @@ _PyListIterObject *it = (_PyListIterObject *)iter; assert(Py_TYPE(iter) == &PyListIter_Type); PyListObject *seq = it->it_seq; - if (seq == NULL) goto deoptimize; - if ((size_t)it->it_index >= (size_t)PyList_GET_SIZE(seq)) goto deoptimize; + if (seq == NULL) JUMP_TO_JUMP_TARGET(); + if ((size_t)it->it_index >= (size_t)PyList_GET_SIZE(seq)) JUMP_TO_JUMP_TARGET(); break; } @@ -2486,11 +2440,11 @@ case _ITER_CHECK_TUPLE: { PyObject *iter; iter = stack_pointer[-1]; - if (Py_TYPE(iter) != &PyTupleIter_Type) goto deoptimize; + if (Py_TYPE(iter) != &PyTupleIter_Type) JUMP_TO_JUMP_TARGET(); break; } - /* _ITER_JUMP_TUPLE is not a viable micro-op for tier 2 */ + /* _ITER_JUMP_TUPLE is not a viable micro-op for tier 2 because it is replaced */ case _GUARD_NOT_EXHAUSTED_TUPLE: { PyObject *iter; @@ -2498,8 +2452,8 @@ _PyTupleIterObject *it = (_PyTupleIterObject *)iter; assert(Py_TYPE(iter) == &PyTupleIter_Type); PyTupleObject *seq = it->it_seq; - if (seq == NULL) goto deoptimize; - if (it->it_index >= PyTuple_GET_SIZE(seq)) goto deoptimize; + if (seq == NULL) JUMP_TO_JUMP_TARGET(); + if (it->it_index >= PyTuple_GET_SIZE(seq)) JUMP_TO_JUMP_TARGET(); break; } @@ -2522,18 +2476,18 @@ PyObject *iter; iter = stack_pointer[-1]; _PyRangeIterObject *r = (_PyRangeIterObject *)iter; - if (Py_TYPE(r) != &PyRangeIter_Type) goto deoptimize; + if (Py_TYPE(r) != &PyRangeIter_Type) JUMP_TO_JUMP_TARGET(); break; } - /* _ITER_JUMP_RANGE is not a viable micro-op for tier 2 */ + /* _ITER_JUMP_RANGE is not a viable micro-op for tier 2 because it is replaced */ case _GUARD_NOT_EXHAUSTED_RANGE: { PyObject *iter; iter = stack_pointer[-1]; _PyRangeIterObject *r = (_PyRangeIterObject *)iter; assert(Py_TYPE(r) == &PyRangeIter_Type); - if (r->len <= 0) goto deoptimize; + if (r->len <= 0) JUMP_TO_JUMP_TARGET(); break; } @@ -2548,96 +2502,17 @@ r->start = value + r->step; r->len--; next = PyLong_FromLong(value); - if (next == NULL) goto error_tier_two; + if (next == NULL) JUMP_TO_ERROR(); stack_pointer[0] = next; stack_pointer += 1; break; } - /* _FOR_ITER_GEN is not a viable micro-op for tier 2 */ + /* _FOR_ITER_GEN is not a viable micro-op for tier 2 because it uses the 'this_instr' variable */ - case _BEFORE_ASYNC_WITH: { - PyObject *mgr; - PyObject *exit; - PyObject *res; - mgr = stack_pointer[-1]; - PyObject *enter = _PyObject_LookupSpecial(mgr, &_Py_ID(__aenter__)); - if (enter == NULL) { - if (!_PyErr_Occurred(tstate)) { - _PyErr_Format(tstate, PyExc_TypeError, - "'%.200s' object does not support the " - "asynchronous context manager protocol", - Py_TYPE(mgr)->tp_name); - } - GOTO_ERROR(error); - } - exit = _PyObject_LookupSpecial(mgr, &_Py_ID(__aexit__)); - if (exit == NULL) { - if (!_PyErr_Occurred(tstate)) { - _PyErr_Format(tstate, PyExc_TypeError, - "'%.200s' object does not support the " - "asynchronous context manager protocol " - "(missed __aexit__ method)", - Py_TYPE(mgr)->tp_name); - } - Py_DECREF(enter); - GOTO_ERROR(error); - } - Py_DECREF(mgr); - res = PyObject_CallNoArgs(enter); - Py_DECREF(enter); - if (res == NULL) { - Py_DECREF(exit); - if (true) goto pop_1_error_tier_two; - } - stack_pointer[-1] = exit; - stack_pointer[0] = res; - stack_pointer += 1; - break; - } + /* _BEFORE_ASYNC_WITH is not a viable micro-op for tier 2 because it has both popping and not-popping errors */ - case _BEFORE_WITH: { - PyObject *mgr; - PyObject *exit; - PyObject *res; - mgr = stack_pointer[-1]; - /* pop the context manager, push its __exit__ and the - * value returned from calling its __enter__ - */ - PyObject *enter = _PyObject_LookupSpecial(mgr, &_Py_ID(__enter__)); - if (enter == NULL) { - if (!_PyErr_Occurred(tstate)) { - _PyErr_Format(tstate, PyExc_TypeError, - "'%.200s' object does not support the " - "context manager protocol", - Py_TYPE(mgr)->tp_name); - } - GOTO_ERROR(error); - } - exit = _PyObject_LookupSpecial(mgr, &_Py_ID(__exit__)); - if (exit == NULL) { - if (!_PyErr_Occurred(tstate)) { - _PyErr_Format(tstate, PyExc_TypeError, - "'%.200s' object does not support the " - "context manager protocol " - "(missed __exit__ method)", - Py_TYPE(mgr)->tp_name); - } - Py_DECREF(enter); - GOTO_ERROR(error); - } - Py_DECREF(mgr); - res = PyObject_CallNoArgs(enter); - Py_DECREF(enter); - if (res == NULL) { - Py_DECREF(exit); - if (true) goto pop_1_error_tier_two; - } - stack_pointer[-1] = exit; - stack_pointer[0] = res; - stack_pointer += 1; - break; - } + /* _BEFORE_WITH is not a viable micro-op for tier 2 because it has both popping and not-popping errors */ case _WITH_EXCEPT_START: { PyObject *val; @@ -2670,7 +2545,7 @@ PyObject *stack[4] = {NULL, exc, val, tb}; res = PyObject_Vectorcall(exit_func, stack + 1, 3 | PY_VECTORCALL_ARGUMENTS_OFFSET, NULL); - if (res == NULL) goto error_tier_two; + if (res == NULL) JUMP_TO_ERROR(); stack_pointer[0] = res; stack_pointer += 1; break; @@ -2700,7 +2575,7 @@ owner = stack_pointer[-1]; assert(Py_TYPE(owner)->tp_flags & Py_TPFLAGS_MANAGED_DICT); PyDictOrValues *dorv = _PyObject_DictOrValuesPointer(owner); - if (!_PyDictOrValues_IsValues(*dorv) && !_PyObject_MakeInstanceAttributesFromDict(owner, dorv)) goto deoptimize; + if (!_PyDictOrValues_IsValues(*dorv) && !_PyObject_MakeInstanceAttributesFromDict(owner, dorv)) JUMP_TO_JUMP_TARGET(); break; } @@ -2710,7 +2585,7 @@ uint32_t keys_version = (uint32_t)CURRENT_OPERAND(); PyTypeObject *owner_cls = Py_TYPE(owner); PyHeapTypeObject *owner_heap_type = (PyHeapTypeObject *)owner_cls; - if (owner_heap_type->ht_cached_keys->dk_version != keys_version) goto deoptimize; + if (owner_heap_type->ht_cached_keys->dk_version != keys_version) JUMP_TO_JUMP_TARGET(); break; } @@ -2792,7 +2667,7 @@ assert(dictoffset > 0); PyObject *dict = *(PyObject **)((char *)owner + dictoffset); /* This object has a __dict__, just not yet created */ - if (dict != NULL) goto deoptimize; + if (dict != NULL) JUMP_TO_JUMP_TARGET(); break; } @@ -2815,9 +2690,9 @@ break; } - /* _INSTRUMENTED_CALL is not a viable micro-op for tier 2 */ + /* _INSTRUMENTED_CALL is not a viable micro-op for tier 2 because it is instrumented */ - /* _CALL is not a viable micro-op for tier 2 */ + /* _CALL is not a viable micro-op for tier 2 because it uses the 'this_instr' variable */ case _CHECK_PERIODIC: { CHECK_EVAL_BREAKER(); @@ -2830,8 +2705,8 @@ oparg = CURRENT_OPARG(); null = stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; - if (null != NULL) goto deoptimize; - if (Py_TYPE(callable) != &PyMethod_Type) goto deoptimize; + if (null != NULL) JUMP_TO_JUMP_TARGET(); + if (Py_TYPE(callable) != &PyMethod_Type) JUMP_TO_JUMP_TARGET(); break; } @@ -2853,7 +2728,7 @@ } case _CHECK_PEP_523: { - if (tstate->interp->eval_frame) goto deoptimize; + if (tstate->interp->eval_frame) JUMP_TO_JUMP_TARGET(); break; } @@ -2864,11 +2739,11 @@ self_or_null = stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; uint32_t func_version = (uint32_t)CURRENT_OPERAND(); - if (!PyFunction_Check(callable)) goto deoptimize; + if (!PyFunction_Check(callable)) JUMP_TO_JUMP_TARGET(); PyFunctionObject *func = (PyFunctionObject *)callable; - if (func->func_version != func_version) goto deoptimize; + if (func->func_version != func_version) JUMP_TO_JUMP_TARGET(); PyCodeObject *code = (PyCodeObject *)func->func_code; - if (code->co_argcount != oparg + (self_or_null != NULL)) goto deoptimize; + if (code->co_argcount != oparg + (self_or_null != NULL)) JUMP_TO_JUMP_TARGET(); break; } @@ -2878,8 +2753,8 @@ callable = stack_pointer[-2 - oparg]; PyFunctionObject *func = (PyFunctionObject *)callable; PyCodeObject *code = (PyCodeObject *)func->func_code; - if (!_PyThreadState_HasStackSpace(tstate, code->co_framesize)) goto deoptimize; - if (tstate->py_recursion_remaining <= 1) goto deoptimize; + if (!_PyThreadState_HasStackSpace(tstate, code->co_framesize)) JUMP_TO_JUMP_TARGET(); + if (tstate->py_recursion_remaining <= 1) JUMP_TO_JUMP_TARGET(); break; } @@ -3049,7 +2924,7 @@ break; } - /* _CALL_PY_WITH_DEFAULTS is not a viable micro-op for tier 2 */ + /* _CALL_PY_WITH_DEFAULTS is not a viable micro-op for tier 2 because it uses the 'this_instr' variable */ case _CALL_TYPE_1: { PyObject *arg; @@ -3061,8 +2936,8 @@ null = stack_pointer[-2]; callable = stack_pointer[-3]; assert(oparg == 1); - if (null != NULL) goto deoptimize; - if (callable != (PyObject *)&PyType_Type) goto deoptimize; + if (null != NULL) JUMP_TO_JUMP_TARGET(); + if (callable != (PyObject *)&PyType_Type) JUMP_TO_JUMP_TARGET(); STAT_INC(CALL, hit); res = Py_NewRef(Py_TYPE(arg)); Py_DECREF(arg); @@ -3081,12 +2956,12 @@ null = stack_pointer[-2]; callable = stack_pointer[-3]; assert(oparg == 1); - if (null != NULL) goto deoptimize; - if (callable != (PyObject *)&PyUnicode_Type) goto deoptimize; + if (null != NULL) JUMP_TO_JUMP_TARGET(); + if (callable != (PyObject *)&PyUnicode_Type) JUMP_TO_JUMP_TARGET(); STAT_INC(CALL, hit); res = PyObject_Str(arg); Py_DECREF(arg); - if (res == NULL) goto pop_3_error_tier_two; + if (res == NULL) JUMP_TO_ERROR(); stack_pointer[-3] = res; stack_pointer += -2; break; @@ -3102,18 +2977,18 @@ null = stack_pointer[-2]; callable = stack_pointer[-3]; assert(oparg == 1); - if (null != NULL) goto deoptimize; - if (callable != (PyObject *)&PyTuple_Type) goto deoptimize; + if (null != NULL) JUMP_TO_JUMP_TARGET(); + if (callable != (PyObject *)&PyTuple_Type) JUMP_TO_JUMP_TARGET(); STAT_INC(CALL, hit); res = PySequence_Tuple(arg); Py_DECREF(arg); - if (res == NULL) goto pop_3_error_tier_two; + if (res == NULL) JUMP_TO_ERROR(); stack_pointer[-3] = res; stack_pointer += -2; break; } - /* _CALL_ALLOC_AND_ENTER_INIT is not a viable micro-op for tier 2 */ + /* _CALL_ALLOC_AND_ENTER_INIT is not a viable micro-op for tier 2 because it uses the 'this_instr' variable */ case _EXIT_INIT_CHECK: { PyObject *should_be_none; @@ -3123,7 +2998,7 @@ PyErr_Format(PyExc_TypeError, "__init__() should return None, not '%.200s'", Py_TYPE(should_be_none)->tp_name); - GOTO_ERROR(error); + JUMP_TO_ERROR(); } stack_pointer += -1; break; @@ -3143,9 +3018,9 @@ args--; total_args++; } - if (!PyType_Check(callable)) goto deoptimize; + if (!PyType_Check(callable)) JUMP_TO_JUMP_TARGET(); PyTypeObject *tp = (PyTypeObject *)callable; - if (tp->tp_vectorcall == NULL) goto deoptimize; + if (tp->tp_vectorcall == NULL) JUMP_TO_JUMP_TARGET(); STAT_INC(CALL, hit); res = tp->tp_vectorcall((PyObject *)tp, args, total_args, NULL); /* Free the arguments. */ @@ -3153,7 +3028,7 @@ Py_DECREF(args[i]); } Py_DECREF(tp); - if (res == NULL) { stack_pointer += -2 - oparg; goto error_tier_two; } + if (res == NULL) JUMP_TO_ERROR(); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; break; @@ -3174,11 +3049,11 @@ args--; total_args++; } - if (total_args != 1) goto deoptimize; - if (!PyCFunction_CheckExact(callable)) goto deoptimize; - if (PyCFunction_GET_FLAGS(callable) != METH_O) goto deoptimize; + if (total_args != 1) JUMP_TO_JUMP_TARGET(); + if (!PyCFunction_CheckExact(callable)) JUMP_TO_JUMP_TARGET(); + if (PyCFunction_GET_FLAGS(callable) != METH_O) JUMP_TO_JUMP_TARGET(); // CPython promises to check all non-vectorcall function calls. - if (tstate->c_recursion_remaining <= 0) goto deoptimize; + if (tstate->c_recursion_remaining <= 0) JUMP_TO_JUMP_TARGET(); STAT_INC(CALL, hit); PyCFunction cfunc = PyCFunction_GET_FUNCTION(callable); PyObject *arg = args[0]; @@ -3188,7 +3063,7 @@ assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); Py_DECREF(arg); Py_DECREF(callable); - if (res == NULL) { stack_pointer += -2 - oparg; goto error_tier_two; } + if (res == NULL) JUMP_TO_ERROR(); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; break; @@ -3209,8 +3084,8 @@ args--; total_args++; } - if (!PyCFunction_CheckExact(callable)) goto deoptimize; - if (PyCFunction_GET_FLAGS(callable) != METH_FASTCALL) goto deoptimize; + if (!PyCFunction_CheckExact(callable)) JUMP_TO_JUMP_TARGET(); + if (PyCFunction_GET_FLAGS(callable) != METH_FASTCALL) JUMP_TO_JUMP_TARGET(); STAT_INC(CALL, hit); PyCFunction cfunc = PyCFunction_GET_FUNCTION(callable); /* res = func(self, args, nargs) */ @@ -3224,7 +3099,7 @@ Py_DECREF(args[i]); } Py_DECREF(callable); - if (res == NULL) { stack_pointer += -2 - oparg; goto error_tier_two; } + if (res == NULL) JUMP_TO_ERROR(); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; break; @@ -3245,8 +3120,8 @@ args--; total_args++; } - if (!PyCFunction_CheckExact(callable)) goto deoptimize; - if (PyCFunction_GET_FLAGS(callable) != (METH_FASTCALL | METH_KEYWORDS)) goto deoptimize; + if (!PyCFunction_CheckExact(callable)) JUMP_TO_JUMP_TARGET(); + if (PyCFunction_GET_FLAGS(callable) != (METH_FASTCALL | METH_KEYWORDS)) JUMP_TO_JUMP_TARGET(); STAT_INC(CALL, hit); /* res = func(self, args, nargs, kwnames) */ PyCFunctionFastWithKeywords cfunc = @@ -3259,7 +3134,7 @@ Py_DECREF(args[i]); } Py_DECREF(callable); - if (res == NULL) { stack_pointer += -2 - oparg; goto error_tier_two; } + if (res == NULL) JUMP_TO_ERROR(); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; break; @@ -3280,14 +3155,14 @@ args--; total_args++; } - if (total_args != 1) goto deoptimize; + if (total_args != 1) JUMP_TO_JUMP_TARGET(); PyInterpreterState *interp = tstate->interp; - if (callable != interp->callable_cache.len) goto deoptimize; + if (callable != interp->callable_cache.len) JUMP_TO_JUMP_TARGET(); STAT_INC(CALL, hit); PyObject *arg = args[0]; Py_ssize_t len_i = PyObject_Length(arg); if (len_i < 0) { - GOTO_ERROR(error); + JUMP_TO_ERROR(); } res = PyLong_FromSsize_t(len_i); assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); @@ -3316,15 +3191,15 @@ args--; total_args++; } - if (total_args != 2) goto deoptimize; + if (total_args != 2) JUMP_TO_JUMP_TARGET(); PyInterpreterState *interp = tstate->interp; - if (callable != interp->callable_cache.isinstance) goto deoptimize; + if (callable != interp->callable_cache.isinstance) JUMP_TO_JUMP_TARGET(); STAT_INC(CALL, hit); PyObject *cls = args[1]; PyObject *inst = args[0]; int retval = PyObject_IsInstance(inst, cls); if (retval < 0) { - GOTO_ERROR(error); + JUMP_TO_ERROR(); } res = PyBool_FromLong(retval); assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); @@ -3354,15 +3229,15 @@ total_args++; } PyMethodDescrObject *method = (PyMethodDescrObject *)callable; - if (total_args != 2) goto deoptimize; - if (!Py_IS_TYPE(method, &PyMethodDescr_Type)) goto deoptimize; + if (total_args != 2) JUMP_TO_JUMP_TARGET(); + if (!Py_IS_TYPE(method, &PyMethodDescr_Type)) JUMP_TO_JUMP_TARGET(); PyMethodDef *meth = method->d_method; - if (meth->ml_flags != METH_O) goto deoptimize; + if (meth->ml_flags != METH_O) JUMP_TO_JUMP_TARGET(); // CPython promises to check all non-vectorcall function calls. - if (tstate->c_recursion_remaining <= 0) goto deoptimize; + if (tstate->c_recursion_remaining <= 0) JUMP_TO_JUMP_TARGET(); PyObject *arg = args[1]; PyObject *self = args[0]; - if (!Py_IS_TYPE(self, method->d_common.d_type)) goto deoptimize; + if (!Py_IS_TYPE(self, method->d_common.d_type)) JUMP_TO_JUMP_TARGET(); STAT_INC(CALL, hit); PyCFunction cfunc = meth->ml_meth; _Py_EnterRecursiveCallTstateUnchecked(tstate); @@ -3372,7 +3247,7 @@ Py_DECREF(self); Py_DECREF(arg); Py_DECREF(callable); - if (res == NULL) { stack_pointer += -2 - oparg; goto error_tier_two; } + if (res == NULL) JUMP_TO_ERROR(); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; break; @@ -3393,12 +3268,12 @@ total_args++; } PyMethodDescrObject *method = (PyMethodDescrObject *)callable; - if (!Py_IS_TYPE(method, &PyMethodDescr_Type)) goto deoptimize; + if (!Py_IS_TYPE(method, &PyMethodDescr_Type)) JUMP_TO_JUMP_TARGET(); PyMethodDef *meth = method->d_method; - if (meth->ml_flags != (METH_FASTCALL|METH_KEYWORDS)) goto deoptimize; + if (meth->ml_flags != (METH_FASTCALL|METH_KEYWORDS)) JUMP_TO_JUMP_TARGET(); PyTypeObject *d_type = method->d_common.d_type; PyObject *self = args[0]; - if (!Py_IS_TYPE(self, d_type)) goto deoptimize; + if (!Py_IS_TYPE(self, d_type)) JUMP_TO_JUMP_TARGET(); STAT_INC(CALL, hit); int nargs = total_args - 1; PyCFunctionFastWithKeywords cfunc = @@ -3410,7 +3285,7 @@ Py_DECREF(args[i]); } Py_DECREF(callable); - if (res == NULL) { stack_pointer += -2 - oparg; goto error_tier_two; } + if (res == NULL) JUMP_TO_ERROR(); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; break; @@ -3431,15 +3306,15 @@ args--; total_args++; } - if (total_args != 1) goto deoptimize; + if (total_args != 1) JUMP_TO_JUMP_TARGET(); PyMethodDescrObject *method = (PyMethodDescrObject *)callable; - if (!Py_IS_TYPE(method, &PyMethodDescr_Type)) goto deoptimize; + if (!Py_IS_TYPE(method, &PyMethodDescr_Type)) JUMP_TO_JUMP_TARGET(); PyMethodDef *meth = method->d_method; PyObject *self = args[0]; - if (!Py_IS_TYPE(self, method->d_common.d_type)) goto deoptimize; - if (meth->ml_flags != METH_NOARGS) goto deoptimize; + if (!Py_IS_TYPE(self, method->d_common.d_type)) JUMP_TO_JUMP_TARGET(); + if (meth->ml_flags != METH_NOARGS) JUMP_TO_JUMP_TARGET(); // CPython promises to check all non-vectorcall function calls. - if (tstate->c_recursion_remaining <= 0) goto deoptimize; + if (tstate->c_recursion_remaining <= 0) JUMP_TO_JUMP_TARGET(); STAT_INC(CALL, hit); PyCFunction cfunc = meth->ml_meth; _Py_EnterRecursiveCallTstateUnchecked(tstate); @@ -3448,7 +3323,7 @@ assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); Py_DECREF(self); Py_DECREF(callable); - if (res == NULL) { stack_pointer += -2 - oparg; goto error_tier_two; } + if (res == NULL) JUMP_TO_ERROR(); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; break; @@ -3470,11 +3345,11 @@ } PyMethodDescrObject *method = (PyMethodDescrObject *)callable; /* Builtin METH_FASTCALL methods, without keywords */ - if (!Py_IS_TYPE(method, &PyMethodDescr_Type)) goto deoptimize; + if (!Py_IS_TYPE(method, &PyMethodDescr_Type)) JUMP_TO_JUMP_TARGET(); PyMethodDef *meth = method->d_method; - if (meth->ml_flags != METH_FASTCALL) goto deoptimize; + if (meth->ml_flags != METH_FASTCALL) JUMP_TO_JUMP_TARGET(); PyObject *self = args[0]; - if (!Py_IS_TYPE(self, method->d_common.d_type)) goto deoptimize; + if (!Py_IS_TYPE(self, method->d_common.d_type)) JUMP_TO_JUMP_TARGET(); STAT_INC(CALL, hit); PyCFunctionFast cfunc = (PyCFunctionFast)(void(*)(void))meth->ml_meth; @@ -3486,19 +3361,19 @@ Py_DECREF(args[i]); } Py_DECREF(callable); - if (res == NULL) { stack_pointer += -2 - oparg; goto error_tier_two; } + if (res == NULL) JUMP_TO_ERROR(); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; break; } - /* _INSTRUMENTED_CALL_KW is not a viable micro-op for tier 2 */ + /* _INSTRUMENTED_CALL_KW is not a viable micro-op for tier 2 because it is instrumented */ - /* _CALL_KW is not a viable micro-op for tier 2 */ + /* _CALL_KW is not a viable micro-op for tier 2 because it uses the 'this_instr' variable */ - /* _INSTRUMENTED_CALL_FUNCTION_EX is not a viable micro-op for tier 2 */ + /* _INSTRUMENTED_CALL_FUNCTION_EX is not a viable micro-op for tier 2 because it is instrumented */ - /* _CALL_FUNCTION_EX is not a viable micro-op for tier 2 */ + /* _CALL_FUNCTION_EX is not a viable micro-op for tier 2 because it uses the 'this_instr' variable */ case _MAKE_FUNCTION: { PyObject *codeobj; @@ -3508,7 +3383,7 @@ PyFunction_New(codeobj, GLOBALS()); Py_DECREF(codeobj); if (func_obj == NULL) { - GOTO_ERROR(error); + JUMP_TO_ERROR(); } _PyFunction_SetVersion( func_obj, ((PyCodeObject *)codeobj)->co_version); @@ -3565,7 +3440,7 @@ Py_DECREF(start); Py_DECREF(stop); Py_XDECREF(step); - if (slice == NULL) { stack_pointer += -2 - ((oparg == 3) ? 1 : 0); goto error_tier_two; } + if (slice == NULL) JUMP_TO_ERROR(); stack_pointer[-2 - ((oparg == 3) ? 1 : 0)] = slice; stack_pointer += -1 - ((oparg == 3) ? 1 : 0); break; @@ -3581,7 +3456,7 @@ conv_fn = _PyEval_ConversionFuncs[oparg]; result = conv_fn(value); Py_DECREF(value); - if (result == NULL) goto pop_1_error_tier_two; + if (result == NULL) JUMP_TO_ERROR(); stack_pointer[-1] = result; break; } @@ -3595,7 +3470,7 @@ if (!PyUnicode_CheckExact(value)) { res = PyObject_Format(value, NULL); Py_DECREF(value); - if (res == NULL) goto pop_1_error_tier_two; + if (res == NULL) JUMP_TO_ERROR(); } else { res = value; @@ -3613,7 +3488,7 @@ res = PyObject_Format(value, fmt_spec); Py_DECREF(value); Py_DECREF(fmt_spec); - if (res == NULL) goto pop_2_error_tier_two; + if (res == NULL) JUMP_TO_ERROR(); stack_pointer[-2] = res; stack_pointer += -1; break; @@ -3642,7 +3517,7 @@ res = _PyEval_BinaryOps[oparg](lhs, rhs); Py_DECREF(lhs); Py_DECREF(rhs); - if (res == NULL) goto pop_2_error_tier_two; + if (res == NULL) JUMP_TO_ERROR(); stack_pointer[-2] = res; stack_pointer += -1; break; @@ -3660,25 +3535,25 @@ break; } - /* _INSTRUMENTED_INSTRUCTION is not a viable micro-op for tier 2 */ + /* _INSTRUMENTED_INSTRUCTION is not a viable micro-op for tier 2 because it is instrumented */ - /* _INSTRUMENTED_JUMP_FORWARD is not a viable micro-op for tier 2 */ + /* _INSTRUMENTED_JUMP_FORWARD is not a viable micro-op for tier 2 because it is instrumented */ - /* _INSTRUMENTED_JUMP_BACKWARD is not a viable micro-op for tier 2 */ + /* _INSTRUMENTED_JUMP_BACKWARD is not a viable micro-op for tier 2 because it is instrumented */ - /* _INSTRUMENTED_POP_JUMP_IF_TRUE is not a viable micro-op for tier 2 */ + /* _INSTRUMENTED_POP_JUMP_IF_TRUE is not a viable micro-op for tier 2 because it is instrumented */ - /* _INSTRUMENTED_POP_JUMP_IF_FALSE is not a viable micro-op for tier 2 */ + /* _INSTRUMENTED_POP_JUMP_IF_FALSE is not a viable micro-op for tier 2 because it is instrumented */ - /* _INSTRUMENTED_POP_JUMP_IF_NONE is not a viable micro-op for tier 2 */ + /* _INSTRUMENTED_POP_JUMP_IF_NONE is not a viable micro-op for tier 2 because it is instrumented */ - /* _INSTRUMENTED_POP_JUMP_IF_NOT_NONE is not a viable micro-op for tier 2 */ + /* _INSTRUMENTED_POP_JUMP_IF_NOT_NONE is not a viable micro-op for tier 2 because it is instrumented */ case _GUARD_IS_TRUE_POP: { PyObject *flag; flag = stack_pointer[-1]; stack_pointer += -1; - if (!Py_IsTrue(flag)) goto side_exit; + if (!Py_IsTrue(flag)) JUMP_TO_JUMP_TARGET(); assert(Py_IsTrue(flag)); break; } @@ -3687,7 +3562,7 @@ PyObject *flag; flag = stack_pointer[-1]; stack_pointer += -1; - if (!Py_IsFalse(flag)) goto side_exit; + if (!Py_IsFalse(flag)) JUMP_TO_JUMP_TARGET(); assert(Py_IsFalse(flag)); break; } @@ -3698,7 +3573,7 @@ stack_pointer += -1; if (!Py_IsNone(val)) { Py_DECREF(val); - if (1) goto side_exit; + if (1) JUMP_TO_JUMP_TARGET(); } break; } @@ -3707,7 +3582,7 @@ PyObject *val; val = stack_pointer[-1]; stack_pointer += -1; - if (Py_IsNone(val)) goto side_exit; + if (Py_IsNone(val)) JUMP_TO_JUMP_TARGET(); Py_DECREF(val); break; } @@ -3738,12 +3613,12 @@ } case _EXIT_TRACE: { - if (1) goto side_exit; + if (1) JUMP_TO_JUMP_TARGET(); break; } case _CHECK_VALIDITY: { - if (!current_executor->vm_data.valid) goto deoptimize; + if (!current_executor->vm_data.valid) JUMP_TO_JUMP_TARGET(); break; } @@ -3803,7 +3678,7 @@ case _CHECK_FUNCTION: { uint32_t func_version = (uint32_t)CURRENT_OPERAND(); assert(PyFunction_Check(frame->f_funcobj)); - if (((PyFunctionObject *)frame->f_funcobj)->func_version != func_version) goto deoptimize; + if (((PyFunctionObject *)frame->f_funcobj)->func_version != func_version) JUMP_TO_JUMP_TARGET(); break; } @@ -3838,7 +3713,7 @@ if (optimized < 0) { Py_DECREF(previous); tstate->previous_executor = Py_None; - if (1) goto error_tier_two; + GOTO_UNWIND(); } GOTO_TIER_ONE(target); } @@ -3869,9 +3744,26 @@ case _CHECK_VALIDITY_AND_SET_IP: { PyObject *instr_ptr = (PyObject *)CURRENT_OPERAND(); - if (!current_executor->vm_data.valid) goto deoptimize; + if (!current_executor->vm_data.valid) JUMP_TO_JUMP_TARGET(); frame->instr_ptr = (_Py_CODEUNIT *)instr_ptr; break; } + case _DEOPT: { + EXIT_TO_TIER1(); + break; + } + + case _SIDE_EXIT: { + EXIT_TO_TRACE(); + break; + } + + case _ERROR_POP_N: { + oparg = CURRENT_OPARG(); + stack_pointer += -oparg; + GOTO_UNWIND(); + break; + } + #undef TIER_TWO diff --git a/Python/flowgraph.c b/Python/flowgraph.c index 2f47e47bf9d29d..5437c3875ff7b0 100644 --- a/Python/flowgraph.c +++ b/Python/flowgraph.c @@ -2717,13 +2717,14 @@ _PyCfg_ToInstructionSequence(cfg_builder *g, _PyCompile_InstructionSequence *seq int lbl = 0; for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { b->b_label = (jump_target_label){lbl}; - lbl += b->b_iused; + lbl += 1; } for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { RETURN_IF_ERROR(_PyCompile_InstructionSequence_UseLabel(seq, b->b_label.id)); for (int i = 0; i < b->b_iused; i++) { cfg_instr *instr = &b->b_instr[i]; - if (OPCODE_HAS_JUMP(instr->i_opcode) || is_block_push(instr)) { + if (HAS_TARGET(instr->i_opcode)) { + /* Set oparg to the label id (it will later be mapped to an offset) */ instr->i_oparg = instr->i_target->b_label.id; } RETURN_IF_ERROR( diff --git a/Python/frozenmain.c b/Python/frozenmain.c index 3ce9476c9ad46c..ec4566bd4f84bc 100644 --- a/Python/frozenmain.c +++ b/Python/frozenmain.c @@ -54,6 +54,12 @@ Py_FrozenMain(int argc, char **argv) Py_ExitStatusException(status); } + PyInterpreterState *interp = PyInterpreterState_Get(); + if (_PyInterpreterState_SetRunningMain(interp) < 0) { + PyErr_Print(); + exit(1); + } + #ifdef MS_WINDOWS PyWinFreeze_ExeInit(); #endif @@ -83,6 +89,9 @@ Py_FrozenMain(int argc, char **argv) #ifdef MS_WINDOWS PyWinFreeze_ExeTerm(); #endif + + _PyInterpreterState_SetNotRunningMain(interp); + if (Py_FinalizeEx() < 0) { sts = 120; } diff --git a/Python/gc.c b/Python/gc.c index 6b3316b642ea9e..36e20d05c205a5 100644 --- a/Python/gc.c +++ b/Python/gc.c @@ -46,7 +46,7 @@ typedef struct _gc_runtime_state GCState; // move_legacy_finalizers() removes this flag instead. // Between them, unreachable list is not normal list and we can not use // most gc_list_* functions for it. -#define NEXT_MASK_UNREACHABLE (1) +#define NEXT_MASK_UNREACHABLE 2 #define AS_GC(op) _Py_AS_GC(op) #define FROM_GC(gc) _Py_FROM_GC(gc) @@ -96,9 +96,48 @@ gc_decref(PyGC_Head *g) g->_gc_prev -= 1 << _PyGC_PREV_SHIFT; } +static inline int +gc_old_space(PyGC_Head *g) +{ + return g->_gc_next & _PyGC_NEXT_MASK_OLD_SPACE_1; +} + +static inline int +flip_old_space(int space) +{ + assert(space == 0 || space == 1); + return space ^ _PyGC_NEXT_MASK_OLD_SPACE_1; +} + +static inline void +gc_flip_old_space(PyGC_Head *g) +{ + g->_gc_next ^= _PyGC_NEXT_MASK_OLD_SPACE_1; +} -#define GEN_HEAD(gcstate, n) (&(gcstate)->generations[n].head) +static inline void +gc_set_old_space(PyGC_Head *g, int space) +{ + assert(space == 0 || space == _PyGC_NEXT_MASK_OLD_SPACE_1); + g->_gc_next &= ~_PyGC_NEXT_MASK_OLD_SPACE_1; + g->_gc_next |= space; +} +static PyGC_Head * +GEN_HEAD(GCState *gcstate, int n) +{ + assert((gcstate->visited_space & (~1)) == 0); + switch(n) { + case 0: + return &gcstate->young.head; + case 1: + return &gcstate->old[gcstate->visited_space].head; + case 2: + return &gcstate->old[gcstate->visited_space^1].head; + default: + Py_UNREACHABLE(); + } +} static GCState * get_gc_state(void) @@ -117,11 +156,12 @@ _PyGC_InitState(GCState *gcstate) GEN.head._gc_prev = (uintptr_t)&GEN.head; \ } while (0) - for (int i = 0; i < NUM_GENERATIONS; i++) { - assert(gcstate->generations[i].count == 0); - INIT_HEAD(gcstate->generations[i]); - }; - gcstate->generation0 = GEN_HEAD(gcstate, 0); + assert(gcstate->young.count == 0); + assert(gcstate->old[0].count == 0); + assert(gcstate->old[1].count == 0); + INIT_HEAD(gcstate->young); + INIT_HEAD(gcstate->old[0]); + INIT_HEAD(gcstate->old[1]); INIT_HEAD(gcstate->permanent_generation); #undef INIT_HEAD @@ -142,6 +182,7 @@ _PyGC_Init(PyInterpreterState *interp) if (gcstate->callbacks == NULL) { return _PyStatus_NO_MEMORY(); } + gcstate->heap_size = 0; return _PyStatus_OK(); } @@ -219,6 +260,7 @@ gc_list_is_empty(PyGC_Head *list) static inline void gc_list_append(PyGC_Head *node, PyGC_Head *list) { + assert((list->_gc_prev & ~_PyGC_PREV_MASK) == 0); PyGC_Head *last = (PyGC_Head *)list->_gc_prev; // last <-> node @@ -276,6 +318,8 @@ gc_list_merge(PyGC_Head *from, PyGC_Head *to) PyGC_Head *from_tail = GC_PREV(from); assert(from_head != from); assert(from_tail != from); + assert(gc_list_is_empty(to) || + gc_old_space(to_tail) == gc_old_space(from_tail)); _PyGCHead_SET_NEXT(to_tail, from_head); _PyGCHead_SET_PREV(from_head, to_tail); @@ -344,8 +388,8 @@ enum flagstates {collecting_clear_unreachable_clear, static void validate_list(PyGC_Head *head, enum flagstates flags) { - assert((head->_gc_prev & PREV_MASK_COLLECTING) == 0); - assert((head->_gc_next & NEXT_MASK_UNREACHABLE) == 0); + assert((head->_gc_prev & ~_PyGC_PREV_MASK) == 0); + assert((head->_gc_next & ~_PyGC_PREV_MASK) == 0); uintptr_t prev_value = 0, next_value = 0; switch (flags) { case collecting_clear_unreachable_clear: @@ -367,7 +411,7 @@ validate_list(PyGC_Head *head, enum flagstates flags) PyGC_Head *gc = GC_NEXT(head); while (gc != head) { PyGC_Head *trueprev = GC_PREV(gc); - PyGC_Head *truenext = (PyGC_Head *)(gc->_gc_next & ~NEXT_MASK_UNREACHABLE); + PyGC_Head *truenext = GC_NEXT(gc); assert(truenext != NULL); assert(trueprev == prev); assert((gc->_gc_prev & PREV_MASK_COLLECTING) == prev_value); @@ -377,8 +421,54 @@ validate_list(PyGC_Head *head, enum flagstates flags) } assert(prev == GC_PREV(head)); } + +static void +validate_old(GCState *gcstate) +{ + for (int space = 0; space < 2; space++) { + PyGC_Head *head = &gcstate->old[space].head; + PyGC_Head *gc = GC_NEXT(head); + while (gc != head) { + PyGC_Head *next = GC_NEXT(gc); + assert(gc_old_space(gc) == space); + gc = next; + } + } +} + +static void +validate_consistent_old_space(PyGC_Head *head) +{ + PyGC_Head *prev = head; + PyGC_Head *gc = GC_NEXT(head); + if (gc == head) { + return; + } + int old_space = gc_old_space(gc); + while (gc != head) { + PyGC_Head *truenext = GC_NEXT(gc); + assert(truenext != NULL); + assert(gc_old_space(gc) == old_space); + prev = gc; + gc = truenext; + } + assert(prev == GC_PREV(head)); +} + +static void +gc_list_validate_space(PyGC_Head *head, int space) { + PyGC_Head *gc = GC_NEXT(head); + while (gc != head) { + assert(gc_old_space(gc) == space); + gc = GC_NEXT(gc); + } +} + #else #define validate_list(x, y) do{}while(0) +#define validate_old(g) do{}while(0) +#define validate_consistent_old_space(l) do{}while(0) +#define gc_list_validate_space(l, s) do{}while(0) #endif /*** end of list stuff ***/ @@ -396,10 +486,6 @@ update_refs(PyGC_Head *containers) while (gc != containers) { next = GC_NEXT(gc); PyObject *op = FROM_GC(gc); - /* Move any object that might have become immortal to the - * permanent generation as the reference count is not accurately - * reflecting the actual number of live references to this object - */ if (_Py_IsImmortal(op)) { gc_list_move(gc, &get_gc_state()->permanent_generation.head); gc = next; @@ -502,12 +588,13 @@ visit_reachable(PyObject *op, void *arg) // Manually unlink gc from unreachable list because the list functions // don't work right in the presence of NEXT_MASK_UNREACHABLE flags. PyGC_Head *prev = GC_PREV(gc); - PyGC_Head *next = (PyGC_Head*)(gc->_gc_next & ~NEXT_MASK_UNREACHABLE); + PyGC_Head *next = GC_NEXT(gc); _PyObject_ASSERT(FROM_GC(prev), prev->_gc_next & NEXT_MASK_UNREACHABLE); _PyObject_ASSERT(FROM_GC(next), next->_gc_next & NEXT_MASK_UNREACHABLE); - prev->_gc_next = gc->_gc_next; // copy NEXT_MASK_UNREACHABLE + prev->_gc_next = gc->_gc_next; // copy flag bits + gc->_gc_next &= ~NEXT_MASK_UNREACHABLE; _PyGCHead_SET_PREV(next, prev); gc_list_append(gc, reachable); @@ -559,6 +646,9 @@ move_unreachable(PyGC_Head *young, PyGC_Head *unreachable) * or to the right have been scanned yet. */ + validate_consistent_old_space(young); + /* Record which old space we are in, and set NEXT_MASK_UNREACHABLE bit for convenience */ + uintptr_t flags = NEXT_MASK_UNREACHABLE | (gc->_gc_next & _PyGC_NEXT_MASK_OLD_SPACE_1); while (gc != young) { if (gc_get_refs(gc)) { /* gc is definitely reachable from outside the @@ -604,17 +694,18 @@ move_unreachable(PyGC_Head *young, PyGC_Head *unreachable) // But this may pollute the unreachable list head's 'next' pointer // too. That's semantically senseless but expedient here - the // damage is repaired when this function ends. - last->_gc_next = (NEXT_MASK_UNREACHABLE | (uintptr_t)gc); + last->_gc_next = flags | (uintptr_t)gc; _PyGCHead_SET_PREV(gc, last); - gc->_gc_next = (NEXT_MASK_UNREACHABLE | (uintptr_t)unreachable); + gc->_gc_next = flags | (uintptr_t)unreachable; unreachable->_gc_prev = (uintptr_t)gc; } - gc = (PyGC_Head*)prev->_gc_next; + gc = _PyGCHead_NEXT(prev); } // young->_gc_prev must be last element remained in the list. young->_gc_prev = (uintptr_t)prev; + young->_gc_next &= _PyGC_PREV_MASK; // don't let the pollution of the list head's next pointer leak - unreachable->_gc_next &= ~NEXT_MASK_UNREACHABLE; + unreachable->_gc_next &= _PyGC_PREV_MASK; } static void @@ -673,8 +764,8 @@ move_legacy_finalizers(PyGC_Head *unreachable, PyGC_Head *finalizers) PyObject *op = FROM_GC(gc); _PyObject_ASSERT(op, gc->_gc_next & NEXT_MASK_UNREACHABLE); + next = GC_NEXT(gc); gc->_gc_next &= ~NEXT_MASK_UNREACHABLE; - next = (PyGC_Head*)gc->_gc_next; if (has_legacy_finalizer(op)) { gc_clear_collecting(gc); @@ -697,8 +788,8 @@ clear_unreachable_mask(PyGC_Head *unreachable) PyGC_Head *gc, *next; for (gc = GC_NEXT(unreachable); gc != unreachable; gc = next) { _PyObject_ASSERT((PyObject*)FROM_GC(gc), gc->_gc_next & NEXT_MASK_UNREACHABLE); + next = GC_NEXT(gc); gc->_gc_next &= ~NEXT_MASK_UNREACHABLE; - next = (PyGC_Head*)gc->_gc_next; } validate_list(unreachable, collecting_set_unreachable_clear); } @@ -868,6 +959,7 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old) /* Invoke the callbacks we decided to honor. It's safe to invoke them * because they can't reference unreachable objects. */ + int visited_space = get_gc_state()->visited_space; while (! gc_list_is_empty(&wrcb_to_call)) { PyObject *temp; PyObject *callback; @@ -902,6 +994,7 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old) Py_DECREF(op); if (wrcb_to_call._gc_next == (uintptr_t)gc) { /* object is still alive -- move it */ + gc_set_old_space(gc, visited_space); gc_list_move(gc, old); } else { @@ -1030,25 +1123,6 @@ delete_garbage(PyThreadState *tstate, GCState *gcstate, } -// Show stats for objects in each generations -static void -show_stats_each_generations(GCState *gcstate) -{ - char buf[100]; - size_t pos = 0; - - for (int i = 0; i < NUM_GENERATIONS && pos < sizeof(buf); i++) { - pos += PyOS_snprintf(buf+pos, sizeof(buf)-pos, - " %zd", - gc_list_size(GEN_HEAD(gcstate, i))); - } - - PySys_FormatStderr( - "gc: objects in each generation:%s\n" - "gc: objects in permanent generation: %zd\n", - buf, gc_list_size(&gcstate->permanent_generation.head)); -} - /* Deduce which objects among "base" are unreachable from outside the list and move them to 'unreachable'. The process consist in the following steps: @@ -1122,7 +1196,6 @@ deduce_unreachable(PyGC_Head *base, PyGC_Head *unreachable) { * the reachable objects instead. But this is a one-time cost, probably not * worth complicating the code to speed just a little. */ - gc_list_init(unreachable); move_unreachable(base, unreachable); // gc_prev is pointer again validate_list(base, collecting_clear_unreachable_clear); validate_list(unreachable, collecting_set_unreachable_set); @@ -1161,219 +1234,289 @@ handle_resurrected_objects(PyGC_Head *unreachable, PyGC_Head* still_unreachable, } -/* Invoke progress callbacks to notify clients that garbage collection - * is starting or stopping +#define UNTRACK_TUPLES 1 +#define UNTRACK_DICTS 2 + +static void +gc_collect_region(PyThreadState *tstate, + PyGC_Head *from, + PyGC_Head *to, + int untrack, + struct gc_collection_stats *stats); + +static inline Py_ssize_t +gc_list_set_space(PyGC_Head *list, int space) +{ + Py_ssize_t size = 0; + PyGC_Head *gc; + for (gc = GC_NEXT(list); gc != list; gc = GC_NEXT(gc)) { + gc_set_old_space(gc, space); + size++; + } + return size; +} + +/* Making progress in the incremental collector + * In order to eventually collect all cycles + * the incremental collector must progress through the old + * space faster than objects are added to the old space. + * + * Each young or incremental collection adds a numebr of + * objects, S (for survivors) to the old space, and + * incremental collectors scan I objects from the old space. + * I > S must be true. We also want I > S * N to be where + * N > 1. Higher values of N mean that the old space is + * scanned more rapidly. + * The default incremental threshold of 10 translates to + * N == 1.4 (1 + 4/threshold) */ + +/* Divide by 10, so that the default incremental threshold of 10 + * scans objects at 1% of the heap size */ +#define SCAN_RATE_DIVISOR 10 + static void -invoke_gc_callback(PyThreadState *tstate, const char *phase, - int generation, Py_ssize_t collected, - Py_ssize_t uncollectable) +add_stats(GCState *gcstate, int gen, struct gc_collection_stats *stats) { - assert(!_PyErr_Occurred(tstate)); + gcstate->generation_stats[gen].collected += stats->collected; + gcstate->generation_stats[gen].uncollectable += stats->uncollectable; + gcstate->generation_stats[gen].collections += 1; +} - /* we may get called very early */ +static void +gc_collect_young(PyThreadState *tstate, + struct gc_collection_stats *stats) +{ GCState *gcstate = &tstate->interp->gc; - if (gcstate->callbacks == NULL) { - return; - } - - /* The local variable cannot be rebound, check it for sanity */ - assert(PyList_CheckExact(gcstate->callbacks)); - PyObject *info = NULL; - if (PyList_GET_SIZE(gcstate->callbacks) != 0) { - info = Py_BuildValue("{sisnsn}", - "generation", generation, - "collected", collected, - "uncollectable", uncollectable); - if (info == NULL) { - PyErr_FormatUnraisable("Exception ignored on invoking gc callbacks"); - return; + PyGC_Head *young = &gcstate->young.head; + PyGC_Head *visited = &gcstate->old[gcstate->visited_space].head; +#ifdef Py_STATS + { + Py_ssize_t count = 0; + PyGC_Head *gc; + for (gc = GC_NEXT(young); gc != young; gc = GC_NEXT(gc)) { + count++; } } +#endif - PyObject *phase_obj = PyUnicode_FromString(phase); - if (phase_obj == NULL) { - Py_XDECREF(info); - PyErr_FormatUnraisable("Exception ignored on invoking gc callbacks"); - return; + PyGC_Head survivors; + gc_list_init(&survivors); + gc_collect_region(tstate, young, &survivors, UNTRACK_TUPLES, stats); + Py_ssize_t survivor_count = 0; + if (gcstate->visited_space) { + /* objects in visited space have bit set, so we set it here */ + survivor_count = gc_list_set_space(&survivors, 1); } - - PyObject *stack[] = {phase_obj, info}; - for (Py_ssize_t i=0; icallbacks); i++) { - PyObject *r, *cb = PyList_GET_ITEM(gcstate->callbacks, i); - Py_INCREF(cb); /* make sure cb doesn't go away */ - r = PyObject_Vectorcall(cb, stack, 2, NULL); - if (r == NULL) { - PyErr_WriteUnraisable(cb); - } - else { - Py_DECREF(r); + else { + PyGC_Head *gc; + for (gc = GC_NEXT(&survivors); gc != &survivors; gc = GC_NEXT(gc)) { +#ifdef GC_DEBUG + assert(gc_old_space(gc) == 0); +#endif + survivor_count++; } - Py_DECREF(cb); } - Py_DECREF(phase_obj); - Py_XDECREF(info); - assert(!_PyErr_Occurred(tstate)); + gc_list_merge(&survivors, visited); + validate_old(gcstate); + gcstate->young.count = 0; + gcstate->old[gcstate->visited_space].count++; + Py_ssize_t scale_factor = gcstate->old[0].threshold; + if (scale_factor < 1) { + scale_factor = 1; + } + gcstate->work_to_do += gcstate->heap_size / SCAN_RATE_DIVISOR / scale_factor; + add_stats(gcstate, 0, stats); } +static inline int +IS_IN_VISITED(PyGC_Head *gc, int visited_space) +{ + assert(visited_space == 0 || flip_old_space(visited_space) == 0); + return gc_old_space(gc) == visited_space; +} + +struct container_and_flag { + PyGC_Head *container; + int visited_space; + uintptr_t size; +}; -/* Find the oldest generation (highest numbered) where the count - * exceeds the threshold. Objects in the that generation and - * generations younger than it will be collected. */ +/* A traversal callback for adding to container) */ static int -gc_select_generation(GCState *gcstate) -{ - for (int i = NUM_GENERATIONS-1; i >= 0; i--) { - if (gcstate->generations[i].count > gcstate->generations[i].threshold) { - /* Avoid quadratic performance degradation in number - of tracked objects (see also issue #4074): - - To limit the cost of garbage collection, there are two strategies; - - make each collection faster, e.g. by scanning fewer objects - - do less collections - This heuristic is about the latter strategy. - - In addition to the various configurable thresholds, we only trigger a - full collection if the ratio - - long_lived_pending / long_lived_total - - is above a given value (hardwired to 25%). - - The reason is that, while "non-full" collections (i.e., collections of - the young and middle generations) will always examine roughly the same - number of objects -- determined by the aforementioned thresholds --, - the cost of a full collection is proportional to the total number of - long-lived objects, which is virtually unbounded. - - Indeed, it has been remarked that doing a full collection every - of object creations entails a dramatic performance - degradation in workloads which consist in creating and storing lots of - long-lived objects (e.g. building a large list of GC-tracked objects would - show quadratic performance, instead of linear as expected: see issue #4074). - - Using the above ratio, instead, yields amortized linear performance in - the total number of objects (the effect of which can be summarized - thusly: "each full garbage collection is more and more costly as the - number of objects grows, but we do fewer and fewer of them"). - - This heuristic was suggested by Martin von Löwis on python-dev in - June 2008. His original analysis and proposal can be found at: - http://mail.python.org/pipermail/python-dev/2008-June/080579.html - */ - if (i == NUM_GENERATIONS - 1 - && gcstate->long_lived_pending < gcstate->long_lived_total / 4) - { - continue; - } - return i; +visit_add_to_container(PyObject *op, void *arg) +{ + OBJECT_STAT_INC(object_visits); + struct container_and_flag *cf = (struct container_and_flag *)arg; + int visited = cf->visited_space; + assert(visited == get_gc_state()->visited_space); + if (!_Py_IsImmortal(op) && _PyObject_IS_GC(op)) { + PyGC_Head *gc = AS_GC(op); + if (_PyObject_GC_IS_TRACKED(op) && + gc_old_space(gc) != visited) { + gc_flip_old_space(gc); + gc_list_move(gc, cf->container); + cf->size++; } } - return -1; + return 0; } - -/* This is the main function. Read this to understand how the - * collection process works. */ -static Py_ssize_t -gc_collect_main(PyThreadState *tstate, int generation, _PyGC_Reason reason) +static uintptr_t +expand_region_transitively_reachable(PyGC_Head *container, PyGC_Head *gc, GCState *gcstate) { - int i; - Py_ssize_t m = 0; /* # objects collected */ - Py_ssize_t n = 0; /* # unreachable objects that couldn't be collected */ - PyGC_Head *young; /* the generation we are examining */ - PyGC_Head *old; /* next older generation */ - PyGC_Head unreachable; /* non-problematic unreachable trash */ - PyGC_Head finalizers; /* objects with, & reachable from, __del__ */ - PyGC_Head *gc; - PyTime_t t1 = 0; /* initialize to prevent a compiler warning */ - GCState *gcstate = &tstate->interp->gc; - - // gc_collect_main() must not be called before _PyGC_Init - // or after _PyGC_Fini() - assert(gcstate->garbage != NULL); - assert(!_PyErr_Occurred(tstate)); - - int expected = 0; - if (!_Py_atomic_compare_exchange_int(&gcstate->collecting, &expected, 1)) { - // Don't start a garbage collection if one is already in progress. - return 0; - } - - if (generation == GENERATION_AUTO) { - // Select the oldest generation that needs collecting. We will collect - // objects from that generation and all generations younger than it. - generation = gc_select_generation(gcstate); - if (generation < 0) { - // No generation needs to be collected. - _Py_atomic_store_int(&gcstate->collecting, 0); - return 0; + validate_list(container, collecting_clear_unreachable_clear); + struct container_and_flag arg = { + .container = container, + .visited_space = gcstate->visited_space, + .size = 0 + }; + assert(GC_NEXT(gc) == container); + while (gc != container) { + /* Survivors will be moved to visited space, so they should + * have been marked as visited */ + assert(IS_IN_VISITED(gc, gcstate->visited_space)); + PyObject *op = FROM_GC(gc); + if (_Py_IsImmortal(op)) { + PyGC_Head *next = GC_NEXT(gc); + gc_list_move(gc, &get_gc_state()->permanent_generation.head); + gc = next; + continue; } + traverseproc traverse = Py_TYPE(op)->tp_traverse; + (void) traverse(op, + visit_add_to_container, + &arg); + gc = GC_NEXT(gc); } + return arg.size; +} - assert(generation >= 0 && generation < NUM_GENERATIONS); - -#ifdef Py_STATS - if (_Py_stats) { - _Py_stats->object_stats.object_visits = 0; - } +/* Do bookkeeping for a completed GC cycle */ +static void +completed_cycle(GCState *gcstate) +{ +#ifdef Py_DEBUG + PyGC_Head *not_visited = &gcstate->old[gcstate->visited_space^1].head; + assert(gc_list_is_empty(not_visited)); #endif - GC_STAT_ADD(generation, collections, 1); - - if (reason != _Py_GC_REASON_SHUTDOWN) { - invoke_gc_callback(tstate, "start", generation, 0, 0); - } - - if (gcstate->debug & _PyGC_DEBUG_STATS) { - PySys_WriteStderr("gc: collecting generation %d...\n", generation); - show_stats_each_generations(gcstate); - t1 = _PyTime_PerfCounterUnchecked(); + gcstate->visited_space = flip_old_space(gcstate->visited_space); + /* Make sure all young objects have old space bit set correctly */ + PyGC_Head *young = &gcstate->young.head; + PyGC_Head *gc = GC_NEXT(young); + while (gc != young) { + PyGC_Head *next = GC_NEXT(gc); + gc_set_old_space(gc, gcstate->visited_space); + gc = next; } + gcstate->work_to_do = 0; +} - if (PyDTrace_GC_START_ENABLED()) { - PyDTrace_GC_START(generation); +static void +gc_collect_increment(PyThreadState *tstate, struct gc_collection_stats *stats) +{ + GCState *gcstate = &tstate->interp->gc; + PyGC_Head *not_visited = &gcstate->old[gcstate->visited_space^1].head; + PyGC_Head *visited = &gcstate->old[gcstate->visited_space].head; + PyGC_Head increment; + gc_list_init(&increment); + Py_ssize_t scale_factor = gcstate->old[0].threshold; + if (scale_factor < 1) { + scale_factor = 1; + } + gc_list_merge(&gcstate->young.head, &increment); + gcstate->young.count = 0; + gc_list_validate_space(&increment, gcstate->visited_space); + Py_ssize_t increment_size = 0; + while (increment_size < gcstate->work_to_do) { + if (gc_list_is_empty(not_visited)) { + break; + } + PyGC_Head *gc = _PyGCHead_NEXT(not_visited); + gc_list_move(gc, &increment); + increment_size++; + gc_set_old_space(gc, gcstate->visited_space); + increment_size += expand_region_transitively_reachable(&increment, gc, gcstate); } + gc_list_validate_space(&increment, gcstate->visited_space); + PyGC_Head survivors; + gc_list_init(&survivors); + gc_collect_region(tstate, &increment, &survivors, UNTRACK_TUPLES, stats); + gc_list_validate_space(&survivors, gcstate->visited_space); + gc_list_merge(&survivors, visited); + assert(gc_list_is_empty(&increment)); + gcstate->work_to_do += gcstate->heap_size / SCAN_RATE_DIVISOR / scale_factor; + gcstate->work_to_do -= increment_size; - /* update collection and allocation counters */ - if (generation+1 < NUM_GENERATIONS) { - gcstate->generations[generation+1].count += 1; - } - for (i = 0; i <= generation; i++) { - gcstate->generations[i].count = 0; + validate_old(gcstate); + add_stats(gcstate, 1, stats); + if (gc_list_is_empty(not_visited)) { + completed_cycle(gcstate); } +} - /* merge younger generations with one we are currently collecting */ - for (i = 0; i < generation; i++) { - gc_list_merge(GEN_HEAD(gcstate, i), GEN_HEAD(gcstate, generation)); - } - /* handy references */ - young = GEN_HEAD(gcstate, generation); - if (generation < NUM_GENERATIONS-1) { - old = GEN_HEAD(gcstate, generation+1); - } - else { - old = young; - } - validate_list(old, collecting_clear_unreachable_clear); +static void +gc_collect_full(PyThreadState *tstate, + struct gc_collection_stats *stats) +{ + GCState *gcstate = &tstate->interp->gc; + validate_old(gcstate); + PyGC_Head *young = &gcstate->young.head; + PyGC_Head *pending = &gcstate->old[gcstate->visited_space^1].head; + PyGC_Head *visited = &gcstate->old[gcstate->visited_space].head; + /* merge all generations into visited */ + gc_list_validate_space(young, gcstate->visited_space); + gc_list_set_space(pending, gcstate->visited_space); + gc_list_merge(young, pending); + gcstate->young.count = 0; + gc_list_merge(pending, visited); + + gc_collect_region(tstate, visited, visited, + UNTRACK_TUPLES | UNTRACK_DICTS, + stats); + gcstate->young.count = 0; + gcstate->old[0].count = 0; + gcstate->old[1].count = 0; + + gcstate->work_to_do = - gcstate->young.threshold * 2; + _PyGC_ClearAllFreeLists(tstate->interp); + validate_old(gcstate); + add_stats(gcstate, 2, stats); +} + +/* This is the main function. Read this to understand how the + * collection process works. */ +static void +gc_collect_region(PyThreadState *tstate, + PyGC_Head *from, + PyGC_Head *to, + int untrack, + struct gc_collection_stats *stats) +{ + PyGC_Head unreachable; /* non-problematic unreachable trash */ + PyGC_Head finalizers; /* objects with, & reachable from, __del__ */ + PyGC_Head *gc; /* initialize to prevent a compiler warning */ + GCState *gcstate = &tstate->interp->gc; - deduce_unreachable(young, &unreachable); + assert(gcstate->garbage != NULL); + assert(!_PyErr_Occurred(tstate)); - untrack_tuples(young); - /* Move reachable objects to next generation. */ - if (young != old) { - if (generation == NUM_GENERATIONS - 2) { - gcstate->long_lived_pending += gc_list_size(young); - } - gc_list_merge(young, old); + gc_list_init(&unreachable); + deduce_unreachable(from, &unreachable); + validate_consistent_old_space(from); + if (untrack & UNTRACK_TUPLES) { + untrack_tuples(from); } - else { - /* We only un-track dicts in full collections, to avoid quadratic - dict build-up. See issue #14775. */ - untrack_dicts(young); - gcstate->long_lived_pending = 0; - gcstate->long_lived_total = gc_list_size(young); + if (untrack & UNTRACK_DICTS) { + untrack_dicts(from); } + validate_consistent_old_space(to); + if (from != to) { + gc_list_merge(from, to); + } + validate_consistent_old_space(to); + /* Move reachable objects to next generation. */ /* All objects in unreachable are trash, but objects reachable from * legacy finalizers (e.g. tp_del) can't safely be deleted. @@ -1387,10 +1530,8 @@ gc_collect_main(PyThreadState *tstate, int generation, _PyGC_Reason reason) * and we move those into the finalizers list too. */ move_legacy_finalizer_reachable(&finalizers); - validate_list(&finalizers, collecting_clear_unreachable_clear); validate_list(&unreachable, collecting_set_unreachable_clear); - /* Print debugging information. */ if (gcstate->debug & _PyGC_DEBUG_COLLECTABLE) { for (gc = GC_NEXT(&unreachable); gc != &unreachable; gc = GC_NEXT(gc)) { @@ -1399,89 +1540,101 @@ gc_collect_main(PyThreadState *tstate, int generation, _PyGC_Reason reason) } /* Clear weakrefs and invoke callbacks as necessary. */ - m += handle_weakrefs(&unreachable, old); - - validate_list(old, collecting_clear_unreachable_clear); + stats->collected += handle_weakrefs(&unreachable, to); + gc_list_validate_space(to, gcstate->visited_space); + validate_list(to, collecting_clear_unreachable_clear); validate_list(&unreachable, collecting_set_unreachable_clear); /* Call tp_finalize on objects which have one. */ finalize_garbage(tstate, &unreachable); - /* Handle any objects that may have resurrected after the call * to 'finalize_garbage' and continue the collection with the * objects that are still unreachable */ PyGC_Head final_unreachable; - handle_resurrected_objects(&unreachable, &final_unreachable, old); + gc_list_init(&final_unreachable); + handle_resurrected_objects(&unreachable, &final_unreachable, to); /* Call tp_clear on objects in the final_unreachable set. This will cause * the reference cycles to be broken. It may also cause some objects * in finalizers to be freed. */ - m += gc_list_size(&final_unreachable); - delete_garbage(tstate, gcstate, &final_unreachable, old); + stats->collected += gc_list_size(&final_unreachable); + delete_garbage(tstate, gcstate, &final_unreachable, to); /* Collect statistics on uncollectable objects found and print * debugging information. */ + Py_ssize_t n = 0; for (gc = GC_NEXT(&finalizers); gc != &finalizers; gc = GC_NEXT(gc)) { n++; - if (gcstate->debug & _PyGC_DEBUG_UNCOLLECTABLE) + if (gcstate->debug & _PyGC_DEBUG_COLLECTABLE) debug_cycle("uncollectable", FROM_GC(gc)); } - if (gcstate->debug & _PyGC_DEBUG_STATS) { - double d = PyTime_AsSecondsDouble(_PyTime_PerfCounterUnchecked() - t1); - PySys_WriteStderr( - "gc: done, %zd unreachable, %zd uncollectable, %.4fs elapsed\n", - n+m, n, d); - } - + stats->uncollectable = n; /* Append instances in the uncollectable set to a Python * reachable list of garbage. The programmer has to deal with * this if they insist on creating this type of structure. */ - handle_legacy_finalizers(tstate, gcstate, &finalizers, old); - validate_list(old, collecting_clear_unreachable_clear); + handle_legacy_finalizers(tstate, gcstate, &finalizers, to); + gc_list_validate_space(to, gcstate->visited_space); + validate_list(to, collecting_clear_unreachable_clear); +} - /* Clear free list only during the collection of the highest - * generation */ - if (generation == NUM_GENERATIONS-1) { - _PyGC_ClearAllFreeLists(tstate->interp); - } +/* Invoke progress callbacks to notify clients that garbage collection + * is starting or stopping + */ +static void +do_gc_callback(GCState *gcstate, const char *phase, + int generation, struct gc_collection_stats *stats) +{ + assert(!PyErr_Occurred()); - if (_PyErr_Occurred(tstate)) { - if (reason == _Py_GC_REASON_SHUTDOWN) { - _PyErr_Clear(tstate); - } - else { - PyErr_FormatUnraisable("Exception ignored in garbage collection"); + /* The local variable cannot be rebound, check it for sanity */ + assert(PyList_CheckExact(gcstate->callbacks)); + PyObject *info = NULL; + if (PyList_GET_SIZE(gcstate->callbacks) != 0) { + info = Py_BuildValue("{sisnsn}", + "generation", generation, + "collected", stats->collected, + "uncollectable", stats->uncollectable); + if (info == NULL) { + PyErr_FormatUnraisable("Exception ignored on invoking gc callbacks"); + return; } } - /* Update stats */ - struct gc_generation_stats *stats = &gcstate->generation_stats[generation]; - stats->collections++; - stats->collected += m; - stats->uncollectable += n; - - GC_STAT_ADD(generation, objects_collected, m); -#ifdef Py_STATS - if (_Py_stats) { - GC_STAT_ADD(generation, object_visits, - _Py_stats->object_stats.object_visits); - _Py_stats->object_stats.object_visits = 0; + PyObject *phase_obj = PyUnicode_FromString(phase); + if (phase_obj == NULL) { + Py_XDECREF(info); + PyErr_FormatUnraisable("Exception ignored on invoking gc callbacks"); + return; } -#endif - if (PyDTrace_GC_DONE_ENABLED()) { - PyDTrace_GC_DONE(n + m); + PyObject *stack[] = {phase_obj, info}; + for (Py_ssize_t i=0; icallbacks); i++) { + PyObject *r, *cb = PyList_GET_ITEM(gcstate->callbacks, i); + Py_INCREF(cb); /* make sure cb doesn't go away */ + r = PyObject_Vectorcall(cb, stack, 2, NULL); + if (r == NULL) { + PyErr_WriteUnraisable(cb); + } + else { + Py_DECREF(r); + } + Py_DECREF(cb); } + Py_DECREF(phase_obj); + Py_XDECREF(info); + assert(!PyErr_Occurred()); +} - if (reason != _Py_GC_REASON_SHUTDOWN) { - invoke_gc_callback(tstate, "stop", generation, m, n); +static void +invoke_gc_callback(GCState *gcstate, const char *phase, + int generation, struct gc_collection_stats *stats) +{ + if (gcstate->callbacks == NULL) { + return; } - - assert(!_PyErr_Occurred(tstate)); - _Py_atomic_store_int(&gcstate->collecting, 0); - return n + m; + do_gc_callback(gcstate, phase, generation, stats); } static int @@ -1537,7 +1690,7 @@ _PyGC_GetReferrers(PyInterpreterState *interp, PyObject *objs) } PyObject * -_PyGC_GetObjects(PyInterpreterState *interp, Py_ssize_t generation) +_PyGC_GetObjects(PyInterpreterState *interp, int generation) { assert(generation >= -1 && generation < NUM_GENERATIONS); GCState *gcstate = &interp->gc; @@ -1571,10 +1724,20 @@ void _PyGC_Freeze(PyInterpreterState *interp) { GCState *gcstate = &interp->gc; - for (int i = 0; i < NUM_GENERATIONS; ++i) { - gc_list_merge(GEN_HEAD(gcstate, i), &gcstate->permanent_generation.head); - gcstate->generations[i].count = 0; - } + /* The permanent_generation has its old space bit set to zero */ + if (gcstate->visited_space) { + gc_list_set_space(&gcstate->young.head, 0); + } + gc_list_merge(&gcstate->young.head, &gcstate->permanent_generation.head); + gcstate->young.count = 0; + PyGC_Head*old0 = &gcstate->old[0].head; + PyGC_Head*old1 = &gcstate->old[1].head; + gc_list_merge(old0, &gcstate->permanent_generation.head); + gcstate->old[0].count = 0; + gc_list_set_space(old1, 0); + gc_list_merge(old1, &gcstate->permanent_generation.head); + gcstate->old[1].count = 0; + validate_old(gcstate); } void @@ -1582,7 +1745,8 @@ _PyGC_Unfreeze(PyInterpreterState *interp) { GCState *gcstate = &interp->gc; gc_list_merge(&gcstate->permanent_generation.head, - GEN_HEAD(gcstate, NUM_GENERATIONS-1)); + &gcstate->old[0].head); + validate_old(gcstate); } Py_ssize_t @@ -1618,32 +1782,66 @@ PyGC_IsEnabled(void) return gcstate->enabled; } -/* Public API to invoke gc.collect() from C */ Py_ssize_t -PyGC_Collect(void) +_PyGC_Collect(PyThreadState *tstate, int generation, _PyGC_Reason reason) { - PyThreadState *tstate = _PyThreadState_GET(); GCState *gcstate = &tstate->interp->gc; - if (!gcstate->enabled) { + int expected = 0; + if (!_Py_atomic_compare_exchange_int(&gcstate->collecting, &expected, 1)) { + // Don't start a garbage collection if one is already in progress. return 0; } - Py_ssize_t n; + struct gc_collection_stats stats = { 0 }; + if (reason != _Py_GC_REASON_SHUTDOWN) { + invoke_gc_callback(gcstate, "start", generation, &stats); + } + if (PyDTrace_GC_START_ENABLED()) { + PyDTrace_GC_START(generation); + } PyObject *exc = _PyErr_GetRaisedException(tstate); - n = gc_collect_main(tstate, NUM_GENERATIONS - 1, _Py_GC_REASON_MANUAL); + switch(generation) { + case 0: + gc_collect_young(tstate, &stats); + break; + case 1: + gc_collect_increment(tstate, &stats); + break; + case 2: + gc_collect_full(tstate, &stats); + break; + default: + Py_UNREACHABLE(); + } + if (PyDTrace_GC_DONE_ENABLED()) { + PyDTrace_GC_DONE(stats.uncollectable + stats.collected); + } + if (reason != _Py_GC_REASON_SHUTDOWN) { + invoke_gc_callback(gcstate, "stop", generation, &stats); + } _PyErr_SetRaisedException(tstate, exc); - - return n; + GC_STAT_ADD(generation, objects_collected, stats.collected); +#ifdef Py_STATS + if (_Py_stats) { + GC_STAT_ADD(generation, object_visits, + _Py_stats->object_stats.object_visits); + _Py_stats->object_stats.object_visits = 0; + } +#endif + validate_old(gcstate); + _Py_atomic_store_int(&gcstate->collecting, 0); + return stats.uncollectable + stats.collected; } +/* Public API to invoke gc.collect() from C */ Py_ssize_t -_PyGC_Collect(PyThreadState *tstate, int generation, _PyGC_Reason reason) +PyGC_Collect(void) { - return gc_collect_main(tstate, generation, reason); + return _PyGC_Collect(_PyThreadState_GET(), 2, _Py_GC_REASON_MANUAL); } -Py_ssize_t +void _PyGC_CollectNoFail(PyThreadState *tstate) { /* Ideally, this function is only called on interpreter shutdown, @@ -1652,7 +1850,7 @@ _PyGC_CollectNoFail(PyThreadState *tstate) during interpreter shutdown (and then never finish it). See http://bugs.python.org/issue8713#msg195178 for an example. */ - return gc_collect_main(tstate, NUM_GENERATIONS - 1, _Py_GC_REASON_SHUTDOWN); + _PyGC_Collect(_PyThreadState_GET(), 2, _Py_GC_REASON_SHUTDOWN); } void @@ -1791,10 +1989,11 @@ _PyObject_GC_Link(PyObject *op) GCState *gcstate = &tstate->interp->gc; gc->_gc_next = 0; gc->_gc_prev = 0; - gcstate->generations[0].count++; /* number of allocated GC objects */ - if (gcstate->generations[0].count > gcstate->generations[0].threshold && + gcstate->young.count++; /* number of allocated GC objects */ + gcstate->heap_size++; + if (gcstate->young.count > gcstate->young.threshold && gcstate->enabled && - gcstate->generations[0].threshold && + gcstate->young.threshold && !_Py_atomic_load_int_relaxed(&gcstate->collecting) && !_PyErr_Occurred(tstate)) { @@ -1805,11 +2004,9 @@ _PyObject_GC_Link(PyObject *op) void _Py_RunGC(PyThreadState *tstate) { - GCState *gcstate = get_gc_state(); - if (!gcstate->enabled) { - return; + if (tstate->interp->gc.enabled) { + _PyGC_Collect(tstate, 1, _Py_GC_REASON_HEAP); } - gc_collect_main(tstate, GENERATION_AUTO, _Py_GC_REASON_HEAP); } static PyObject * @@ -1912,9 +2109,10 @@ PyObject_GC_Del(void *op) #endif } GCState *gcstate = get_gc_state(); - if (gcstate->generations[0].count > 0) { - gcstate->generations[0].count--; + if (gcstate->young.count > 0) { + gcstate->young.count--; } + gcstate->heap_size--; PyObject_Free(((char *)op)-presize); } @@ -1936,26 +2134,36 @@ PyObject_GC_IsFinalized(PyObject *obj) return 0; } +static int +visit_generation(gcvisitobjects_t callback, void *arg, struct gc_generation *gen) +{ + PyGC_Head *gc_list, *gc; + gc_list = &gen->head; + for (gc = GC_NEXT(gc_list); gc != gc_list; gc = GC_NEXT(gc)) { + PyObject *op = FROM_GC(gc); + Py_INCREF(op); + int res = callback(op, arg); + Py_DECREF(op); + if (!res) { + return -1; + } + } + return 0; +} + void PyUnstable_GC_VisitObjects(gcvisitobjects_t callback, void *arg) { - size_t i; GCState *gcstate = get_gc_state(); int origenstate = gcstate->enabled; gcstate->enabled = 0; - for (i = 0; i < NUM_GENERATIONS; i++) { - PyGC_Head *gc_list, *gc; - gc_list = GEN_HEAD(gcstate, i); - for (gc = GC_NEXT(gc_list); gc != gc_list; gc = GC_NEXT(gc)) { - PyObject *op = FROM_GC(gc); - Py_INCREF(op); - int res = callback(op, arg); - Py_DECREF(op); - if (!res) { - goto done; - } - } + if (visit_generation(callback, arg, &gcstate->young)) { + goto done; + } + if (visit_generation(callback, arg, &gcstate->old[0])) { + goto done; } + visit_generation(callback, arg, &gcstate->old[1]); done: gcstate->enabled = origenstate; } diff --git a/Python/gc_free_threading.c b/Python/gc_free_threading.c index 2b13d1f005dd97..69ce22a1e83b62 100644 --- a/Python/gc_free_threading.c +++ b/Python/gc_free_threading.c @@ -675,7 +675,7 @@ void _PyGC_InitState(GCState *gcstate) { // TODO: move to pycore_runtime_init.h once the incremental GC lands. - gcstate->generations[0].threshold = 2000; + gcstate->young.threshold = 2000; } @@ -970,8 +970,8 @@ cleanup_worklist(struct worklist *worklist) static bool gc_should_collect(GCState *gcstate) { - int count = _Py_atomic_load_int_relaxed(&gcstate->generations[0].count); - int threshold = gcstate->generations[0].threshold; + int count = _Py_atomic_load_int_relaxed(&gcstate->young.count); + int threshold = gcstate->young.threshold; if (count <= threshold || threshold == 0 || !gcstate->enabled) { return false; } @@ -979,7 +979,7 @@ gc_should_collect(GCState *gcstate) // objects. A few tests rely on immediate scheduling of the GC so we ignore // the scaled threshold if generations[1].threshold is set to zero. return (count > gcstate->long_lived_total / 4 || - gcstate->generations[1].threshold == 0); + gcstate->old[0].threshold == 0); } static void @@ -993,7 +993,7 @@ record_allocation(PyThreadState *tstate) if (gc->alloc_count >= LOCAL_ALLOC_COUNT_THRESHOLD) { // TODO: Use Py_ssize_t for the generation count. GCState *gcstate = &tstate->interp->gc; - _Py_atomic_add_int(&gcstate->generations[0].count, (int)gc->alloc_count); + _Py_atomic_add_int(&gcstate->young.count, (int)gc->alloc_count); gc->alloc_count = 0; if (gc_should_collect(gcstate) && @@ -1012,7 +1012,7 @@ record_deallocation(PyThreadState *tstate) gc->alloc_count--; if (gc->alloc_count <= -LOCAL_ALLOC_COUNT_THRESHOLD) { GCState *gcstate = &tstate->interp->gc; - _Py_atomic_add_int(&gcstate->generations[0].count, (int)gc->alloc_count); + _Py_atomic_add_int(&gcstate->young.count, (int)gc->alloc_count); gc->alloc_count = 0; } } @@ -1137,10 +1137,11 @@ gc_collect_main(PyThreadState *tstate, int generation, _PyGC_Reason reason) /* update collection and allocation counters */ if (generation+1 < NUM_GENERATIONS) { - gcstate->generations[generation+1].count += 1; + gcstate->old[generation].count += 1; } - for (i = 0; i <= generation; i++) { - gcstate->generations[i].count = 0; + gcstate->young.count = 0; + for (i = 1; i <= generation; i++) { + gcstate->old[i-1].count = 0; } PyInterpreterState *interp = tstate->interp; @@ -1304,7 +1305,7 @@ visit_get_objects(const mi_heap_t *heap, const mi_heap_area_t *area, } PyObject * -_PyGC_GetObjects(PyInterpreterState *interp, Py_ssize_t generation) +_PyGC_GetObjects(PyInterpreterState *interp, int generation) { PyObject *result = PyList_New(0); if (!result) { @@ -1463,7 +1464,7 @@ _PyGC_Collect(PyThreadState *tstate, int generation, _PyGC_Reason reason) return gc_collect_main(tstate, generation, reason); } -Py_ssize_t +void _PyGC_CollectNoFail(PyThreadState *tstate) { /* Ideally, this function is only called on interpreter shutdown, @@ -1472,7 +1473,7 @@ _PyGC_CollectNoFail(PyThreadState *tstate) during interpreter shutdown (and then never finish it). See http://bugs.python.org/issue8713#msg195178 for an example. */ - return gc_collect_main(tstate, NUM_GENERATIONS - 1, _Py_GC_REASON_SHUTDOWN); + gc_collect_main(tstate, NUM_GENERATIONS - 1, _Py_GC_REASON_SHUTDOWN); } void diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h index 2996ee72e7f2c6..c66eb678d38475 100644 --- a/Python/generated_cases.c.h +++ b/Python/generated_cases.c.h @@ -25,7 +25,7 @@ "asynchronous context manager protocol", Py_TYPE(mgr)->tp_name); } - GOTO_ERROR(error); + goto error; } exit = _PyObject_LookupSpecial(mgr, &_Py_ID(__aexit__)); if (exit == NULL) { @@ -37,7 +37,7 @@ Py_TYPE(mgr)->tp_name); } Py_DECREF(enter); - GOTO_ERROR(error); + goto error; } Py_DECREF(mgr); res = PyObject_CallNoArgs(enter); @@ -71,7 +71,7 @@ "context manager protocol", Py_TYPE(mgr)->tp_name); } - GOTO_ERROR(error); + goto error; } exit = _PyObject_LookupSpecial(mgr, &_Py_ID(__exit__)); if (exit == NULL) { @@ -83,7 +83,7 @@ Py_TYPE(mgr)->tp_name); } Py_DECREF(enter); - GOTO_ERROR(error); + goto error; } Py_DECREF(mgr); res = PyObject_CallNoArgs(enter); @@ -605,12 +605,8 @@ PyObject *map; keys = stack_pointer[-1]; values = &stack_pointer[-1 - oparg]; - if (!PyTuple_CheckExact(keys) || - PyTuple_GET_SIZE(keys) != (Py_ssize_t)oparg) { - _PyErr_SetString(tstate, PyExc_SystemError, - "bad BUILD_CONST_KEY_MAP keys argument"); - GOTO_ERROR(error); // Pop the keys and values. - } + assert(PyTuple_CheckExact(keys)); + assert(PyTuple_GET_SIZE(keys) == (Py_ssize_t)oparg); map = _PyDict_FromItems( &PyTuple_GET_ITEM(keys, 0), 1, values, 1, oparg); @@ -667,7 +663,7 @@ values = &stack_pointer[-oparg]; set = PySet_New(NULL); if (set == NULL) - GOTO_ERROR(error); + goto error; int err = 0; for (int i = 0; i < oparg; i++) { PyObject *item = values[i]; @@ -808,7 +804,7 @@ // The frame has stolen all the arguments from the stack, // so there is no need to clean them up. if (new_frame == NULL) { - GOTO_ERROR(error); + goto error; } frame->return_offset = (uint16_t)(next_instr - this_instr); DISPATCH_INLINED(new_frame); @@ -882,7 +878,7 @@ STAT_INC(CALL, hit); PyObject *self = _PyType_NewManagedObject(tp); if (self == NULL) { - GOTO_ERROR(error); + goto error; } Py_DECREF(tp); _PyInterpreterFrame *shim = _PyFrame_PushTrampolineUnchecked( @@ -1213,11 +1209,11 @@ assert(kwargs == NULL || PyDict_CheckExact(kwargs)); if (!PyTuple_CheckExact(callargs)) { if (check_args_iterable(tstate, func, callargs) < 0) { - GOTO_ERROR(error); + goto error; } PyObject *tuple = PySequence_Tuple(callargs); if (tuple == NULL) { - GOTO_ERROR(error); + goto error; } Py_SETREF(callargs, tuple); } @@ -1229,7 +1225,7 @@ int err = _Py_call_instrumentation_2args( tstate, PY_MONITORING_EVENT_CALL, frame, this_instr, func, arg); - if (err) GOTO_ERROR(error); + if (err) goto error; result = PyObject_Call(func, callargs, kwargs); if (!PyFunction_Check(func) && !PyMethod_Check(func)) { if (result == NULL) { @@ -1261,7 +1257,7 @@ // Need to manually shrink the stack since we exit with DISPATCH_INLINED. STACK_SHRINK(oparg + 3); if (new_frame == NULL) { - GOTO_ERROR(error); + goto error; } assert(next_instr - this_instr == 1); frame->return_offset = 1; @@ -1342,7 +1338,7 @@ PyObject *inst = args[0]; int retval = PyObject_IsInstance(inst, cls); if (retval < 0) { - GOTO_ERROR(error); + goto error; } res = PyBool_FromLong(retval); assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); @@ -1407,7 +1403,7 @@ // The frame has stolen all the arguments from the stack, // so there is no need to clean them up. if (new_frame == NULL) { - GOTO_ERROR(error); + goto error; } assert(next_instr - this_instr == 1); frame->return_offset = 1; @@ -1475,7 +1471,7 @@ PyObject *arg = args[0]; Py_ssize_t len_i = PyObject_Length(arg); if (len_i < 0) { - GOTO_ERROR(error); + goto error; } res = PyLong_FromSsize_t(len_i); assert((res != NULL) ^ (_PyErr_Occurred(tstate) != NULL)); @@ -2329,7 +2325,7 @@ // Fortunately we don't need its superpower. if (oldobj == NULL) { _PyEval_FormatExcUnbound(tstate, _PyFrame_GetCode(frame), oparg); - GOTO_ERROR(error); + goto error; } PyCell_SET(cell, NULL); Py_DECREF(oldobj); @@ -2341,7 +2337,13 @@ next_instr += 1; INSTRUCTION_STATS(DELETE_FAST); PyObject *v = GETLOCAL(oparg); - if (v == NULL) goto unbound_local_error; + if (v == NULL) { + _PyEval_FormatExcCheckArg(tstate, PyExc_UnboundLocalError, + UNBOUNDLOCAL_ERROR_MSG, + PyTuple_GetItem(_PyFrame_GetCode(frame)->co_localsplusnames, oparg) + ); + if (1) goto error; + } SETLOCAL(oparg, NULL); DISPATCH(); } @@ -2354,12 +2356,12 @@ int err = PyDict_Pop(GLOBALS(), name, NULL); // Can't use ERROR_IF here. if (err < 0) { - GOTO_ERROR(error); + goto error; } if (err == 0) { _PyEval_FormatExcCheckArg(tstate, PyExc_NameError, NAME_ERROR_MSG, name); - GOTO_ERROR(error); + goto error; } DISPATCH(); } @@ -2374,7 +2376,7 @@ if (ns == NULL) { _PyErr_Format(tstate, PyExc_SystemError, "no locals when deleting %R", name); - GOTO_ERROR(error); + goto error; } err = PyObject_DelItem(ns, name); // Can't use ERROR_IF here. @@ -2382,7 +2384,7 @@ _PyEval_FormatExcCheckArg(tstate, PyExc_NameError, NAME_ERROR_MSG, name); - GOTO_ERROR(error); + goto error; } DISPATCH(); } @@ -2523,7 +2525,7 @@ PyErr_Format(PyExc_TypeError, "__init__() should return None, not '%.200s'", Py_TYPE(should_be_none)->tp_name); - GOTO_ERROR(error); + goto error; } stack_pointer += -1; DISPATCH(); @@ -2610,7 +2612,7 @@ if (next == NULL) { if (_PyErr_Occurred(tstate)) { if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) { - GOTO_ERROR(error); + goto error; } monitor_raise(tstate, frame, this_instr); _PyErr_Clear(tstate); @@ -2841,7 +2843,7 @@ if (PyAsyncGen_CheckExact(aiter)) { awaitable = type->tp_as_async->am_anext(aiter); if (awaitable == NULL) { - GOTO_ERROR(error); + goto error; } } else { if (type->tp_as_async != NULL){ @@ -2850,7 +2852,7 @@ if (getter != NULL) { next_iter = (*getter)(aiter); if (next_iter == NULL) { - GOTO_ERROR(error); + goto error; } } else { @@ -2858,7 +2860,7 @@ "'async for' requires an iterator with " "__anext__ method, got %.100s", type->tp_name); - GOTO_ERROR(error); + goto error; } awaitable = _PyCoro_GetAwaitableIter(next_iter); if (awaitable == NULL) { @@ -2868,7 +2870,7 @@ "from __anext__: %.100s", Py_TYPE(next_iter)->tp_name); Py_DECREF(next_iter); - GOTO_ERROR(error); + goto error; } else { Py_DECREF(next_iter); } @@ -2956,7 +2958,7 @@ _PyErr_SetString(tstate, PyExc_TypeError, "cannot 'yield from' a coroutine object " "in a non-coroutine generator"); - GOTO_ERROR(error); + goto error; } iter = iterable; } @@ -2967,7 +2969,7 @@ /* `iterable` is not a generator. */ iter = PyObject_GetIter(iterable); if (iter == NULL) { - GOTO_ERROR(error); + goto error; } Py_DECREF(iterable); } @@ -3066,7 +3068,7 @@ if (PyGen_Check(receiver)) { PyErr_SetObject(PyExc_StopIteration, value); if (monitor_stop_iteration(tstate, frame, this_instr)) { - GOTO_ERROR(error); + goto error; } PyErr_SetRaisedException(NULL); } @@ -3087,7 +3089,7 @@ if (PyGen_Check(receiver) || PyCoro_CheckExact(receiver)) { PyErr_SetObject(PyExc_StopIteration, value); if (monitor_stop_iteration(tstate, frame, this_instr)) { - GOTO_ERROR(error); + goto error; } PyErr_SetRaisedException(NULL); } @@ -3113,7 +3115,7 @@ else { if (_PyErr_Occurred(tstate)) { if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) { - GOTO_ERROR(error); + goto error; } monitor_raise(tstate, frame, this_instr); _PyErr_Clear(tstate); @@ -3268,7 +3270,7 @@ uintptr_t code_version = _PyFrame_GetCode(frame)->_co_instrumentation_version; if (code_version != global_version) { if (_Py_Instrument(_PyFrame_GetCode(frame), tstate->interp)) { - GOTO_ERROR(error); + goto error; } next_instr = this_instr; } @@ -3299,7 +3301,7 @@ int err = _Py_call_instrumentation_arg( tstate, PY_MONITORING_EVENT_PY_RETURN, frame, this_instr, retval); - if (err) GOTO_ERROR(error); + if (err) goto error; Py_INCREF(retval); assert(EMPTY()); _PyFrame_SetStackPointer(frame, stack_pointer); @@ -3324,7 +3326,7 @@ int err = _Py_call_instrumentation_arg( tstate, PY_MONITORING_EVENT_PY_RETURN, frame, this_instr, retval); - if (err) GOTO_ERROR(error); + if (err) goto error; STACK_SHRINK(1); assert(EMPTY()); _PyFrame_SetStackPointer(frame, stack_pointer); @@ -3356,7 +3358,7 @@ int err = _Py_call_instrumentation_arg( tstate, PY_MONITORING_EVENT_PY_YIELD, frame, this_instr, retval); - if (err) GOTO_ERROR(error); + if (err) goto error; tstate->exc_info = gen->gi_exc_state.previous_item; gen->gi_exc_state.previous_item = NULL; _Py_LeaveRecursiveCallPy(tstate); @@ -4138,7 +4140,13 @@ INSTRUCTION_STATS(LOAD_FAST_CHECK); PyObject *value; value = GETLOCAL(oparg); - if (value == NULL) goto unbound_local_error; + if (value == NULL) { + _PyEval_FormatExcCheckArg(tstate, PyExc_UnboundLocalError, + UNBOUNDLOCAL_ERROR_MSG, + PyTuple_GetItem(_PyFrame_GetCode(frame)->co_localsplusnames, oparg) + ); + if (1) goto error; + } Py_INCREF(value); stack_pointer[0] = value; stack_pointer += 1; @@ -4175,14 +4183,14 @@ assert(oparg >= 0 && oparg < _PyFrame_GetCode(frame)->co_nlocalsplus); name = PyTuple_GET_ITEM(_PyFrame_GetCode(frame)->co_localsplusnames, oparg); if (PyMapping_GetOptionalItem(class_dict, name, &value) < 0) { - GOTO_ERROR(error); + goto error; } if (!value) { PyObject *cell = GETLOCAL(oparg); value = PyCell_GET(cell); if (value == NULL) { _PyEval_FormatExcUnbound(tstate, _PyFrame_GetCode(frame), oparg); - GOTO_ERROR(error); + goto error; } Py_INCREF(value); } @@ -4200,21 +4208,21 @@ mod_or_class_dict = stack_pointer[-1]; PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); if (PyMapping_GetOptionalItem(mod_or_class_dict, name, &v) < 0) { - GOTO_ERROR(error); + goto error; } if (v == NULL) { if (PyDict_GetItemRef(GLOBALS(), name, &v) < 0) { - GOTO_ERROR(error); + goto error; } if (v == NULL) { if (PyMapping_GetOptionalItem(BUILTINS(), name, &v) < 0) { - GOTO_ERROR(error); + goto error; } if (v == NULL) { _PyEval_FormatExcCheckArg( tstate, PyExc_NameError, NAME_ERROR_MSG, name); - GOTO_ERROR(error); + goto error; } } } @@ -4398,21 +4406,21 @@ } PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); if (PyMapping_GetOptionalItem(mod_or_class_dict, name, &v) < 0) { - GOTO_ERROR(error); + goto error; } if (v == NULL) { if (PyDict_GetItemRef(GLOBALS(), name, &v) < 0) { - GOTO_ERROR(error); + goto error; } if (v == NULL) { if (PyMapping_GetOptionalItem(BUILTINS(), name, &v) < 0) { - GOTO_ERROR(error); + goto error; } if (v == NULL) { _PyEval_FormatExcCheckArg( tstate, PyExc_NameError, NAME_ERROR_MSG, name); - GOTO_ERROR(error); + goto error; } } } @@ -4574,7 +4582,7 @@ PyObject *initial = GETLOCAL(oparg); PyObject *cell = PyCell_New(initial); if (cell == NULL) { - GOTO_ERROR(error); + goto error; } SETLOCAL(oparg, cell); DISPATCH(); @@ -4591,7 +4599,7 @@ PyFunction_New(codeobj, GLOBALS()); Py_DECREF(codeobj); if (func_obj == NULL) { - GOTO_ERROR(error); + goto error; } _PyFunction_SetVersion( func_obj, ((PyCodeObject *)codeobj)->co_version); @@ -4910,7 +4918,7 @@ else { assert(PyLong_Check(lasti)); _PyErr_SetString(tstate, PyExc_SystemError, "lasti is not an int"); - GOTO_ERROR(error); + goto error; } } assert(exc && PyExceptionInstance_Check(exc)); @@ -5017,7 +5025,7 @@ PyFunctionObject *func = (PyFunctionObject *)frame->f_funcobj; PyGenObject *gen = (PyGenObject *)_Py_MakeCoro(func); if (gen == NULL) { - GOTO_ERROR(error); + goto error; } assert(EMPTY()); _PyFrame_SetStackPointer(frame, stack_pointer); @@ -5126,7 +5134,7 @@ JUMPBY(oparg); } else { - GOTO_ERROR(error); + goto error; } } Py_DECREF(v); diff --git a/Python/import.c b/Python/import.c index dc92708c8b6ea0..6544a84d895d4a 100644 --- a/Python/import.c +++ b/Python/import.c @@ -1031,7 +1031,7 @@ _extensions_cache_set(PyObject *filename, PyObject *name, PyModuleDef *def) if (!already_set) { /* We assume that all module defs are statically allocated and will never be freed. Otherwise, we would incref here. */ - _Py_SetImmortal(def); + _Py_SetImmortal((PyObject *)def); } res = 0; diff --git a/Python/initconfig.c b/Python/initconfig.c index bbd611f7f7a48c..215d6a1d4e0dba 100644 --- a/Python/initconfig.c +++ b/Python/initconfig.c @@ -161,7 +161,7 @@ Options (and corresponding environment variables):\n\ -P : don't prepend a potentially unsafe path to sys.path; also\n\ PYTHONSAFEPATH\n\ -q : don't print version and copyright messages on interactive startup\n\ --s : don't add user site directory to sys.path; also PYTHONNOUSERSITE\n\ +-s : don't add user site directory to sys.path; also PYTHONNOUSERSITE=x\n\ -S : don't imply 'import site' on initialization\n\ -u : force the stdout and stderr streams to be unbuffered;\n\ this option has no effect on stdin; also PYTHONUNBUFFERED=x\n\ @@ -187,67 +187,46 @@ arg ...: arguments passed to program in sys.argv[1:]\n\ static const char usage_xoptions[] = "\ The following implementation-specific options are available:\n\ --X cpu_count=[n|default]: Override the return value of os.cpu_count(),\n\ - os.process_cpu_count(), and multiprocessing.cpu_count(). This can\n\ - help users who need to limit resources in a container.\n\ --X dev : enable CPython's \"development mode\", introducing additional runtime\n\ - checks which are too expensive to be enabled by default. Effect of\n\ - the developer mode:\n\ - * Add default warning filter, as -W default\n\ - * Install debug hooks on memory allocators: see the\n\ - PyMem_SetupDebugHooks() C function\n\ - * Enable the faulthandler module to dump the Python traceback on\n\ - a crash\n\ - * Enable asyncio debug mode\n\ - * Set the dev_mode attribute of sys.flags to True\n\ - * io.IOBase destructor logs close() exceptions\n\ --X faulthandler: enable faulthandler\n\ --X frozen_modules=[on|off]: whether or not frozen modules should be used.\n\ - The default is \"on\" (or \"off\" if you are running a local build).\n\ --X importtime: show how long each import takes. It shows module name,\n\ - cumulative time (including nested imports) and self time (excluding\n\ - nested imports). Note that its output may be broken in\n\ - multi-threaded application.\n\ - Typical usage is python3 -X importtime -c 'import asyncio'\n\ --X int_max_str_digits=number: limit the size of int<->str conversions.\n\ - This helps avoid denial of service attacks when parsing untrusted\n\ - data. The default is sys.int_info.default_max_str_digits.\n\ - 0 disables.\n\ --X no_debug_ranges: disable the inclusion of the tables mapping extra location\n\ - information (end line, start column offset and end column offset) to\n\ - every instruction in code objects. This is useful when smaller code\n\ - objects and pyc files are desired as well as suppressing the extra\n\ - visual location indicators when the interpreter displays tracebacks.\n\ --X perf: activate support for the Linux \"perf\" profiler by activating the\n\ - \"perf\" trampoline. When this option is activated, the Linux \"perf\"\n\ - profiler will be able to report Python calls. This option is only\n\ - available on some platforms and will do nothing if is not supported\n\ - on the current system. The default value is \"off\".\n\ +-X cpu_count=N: override the return value of os.cpu_count();\n\ + -X cpu_count=default cancels overriding; also PYTHON_CPU_COUNT\n\ +-X dev : enable Python Development Mode; also PYTHONDEVMODE\n\ +-X faulthandler: dump the Python traceback on fatal errors;\n\ + also PYTHONFAULTHANDLER\n\ +-X frozen_modules=[on|off]: whether to use frozen modules; the default is \"on\"\n\ + for installed Python and \"off\" for a local build;\n\ + also PYTHON_FROZEN_MODULES\n\ +" +#ifdef Py_GIL_DISABLED +"-X gil=[0|1]: enable (1) or disable (0) the GIL; also PYTHON_GIL\n" +#endif +"\ +-X importtime: show how long each import takes; also PYTHONPROFILEIMPORTTIME\n\ +-X int_max_str_digits=N: limit the size of int<->str conversions;\n\ + 0 disables the limit; also PYTHONINTMAXSTRDIGITS\n\ +-X no_debug_ranges: don't include extra location information in code objects;\n\ + also PYTHONNODEBUGRANGES\n\ +-X perf: support the Linux \"perf\" profiler; also PYTHONPERFSUPPORT=1\n\ " #ifdef Py_DEBUG -"-X presite=package.module: import this module before site.py is run.\n" +"-X presite=MOD: import this module before site; also PYTHON_PRESITE\n" #endif "\ --X pycache_prefix=PATH: enable writing .pyc files to a parallel tree rooted\n\ - at the given directory instead of to the code tree\n\ +-X pycache_prefix=PATH: write .pyc files to a parallel tree instead of to the\n\ + code tree; also PYTHONPYCACHEPREFIX\n\ " #ifdef Py_STATS -"-X pystats: Enable pystats collection at startup.\n" +"-X pystats: enable pystats collection at startup; also PYTHONSTATS\n" #endif "\ -X showrefcount: output the total reference count and number of used\n\ memory blocks when the program finishes or after each statement in\n\ - the interactive interpreter. This only works on debug builds\n\ --X tracemalloc: start tracing Python memory allocations using the\n\ - tracemalloc module. By default, only the most recent frame is stored\n\ - in a traceback of a trace. Use -X tracemalloc=NFRAME to start\n\ - tracing with a traceback limit of NFRAME frames\n\ --X utf8: enable UTF-8 mode for operating system interfaces, overriding the\n\ - default locale-aware mode. -X utf8=0 explicitly disables UTF-8 mode\n\ - (even when it would otherwise activate automatically)\n\ --X warn_default_encoding: enable opt-in EncodingWarning for 'encoding=None'\ -" -; + the interactive interpreter; only works on debug builds\n\ +-X tracemalloc[=N]: trace Python memory allocations; N sets a traceback limit\n\ + of N frames (default: 1); also PYTHONTRACEMALLOC=N\n\ +-X utf8[=0|1]: enable (1) or disable (0) UTF-8 mode; also PYTHONUTF8\n\ +-X warn_default_encoding: enable opt-in EncodingWarning for 'encoding=None';\n\ + also PYTHONWARNDEFAULTENCODING\ +"; /* Envvars that don't have equivalent command-line options are listed first */ static const char usage_envvars[] = @@ -257,9 +236,9 @@ static const char usage_envvars[] = " default module search path. The result is sys.path.\n" "PYTHONHOME : alternate directory (or %lc).\n" " The default module search path uses %s.\n" -"PYTHONPLATLIBDIR: override sys.platlibdir.\n" -"PYTHONCASEOK : ignore case in 'import' statements (Windows).\n" -"PYTHONIOENCODING: Encoding[:errors] used for stdin/stdout/stderr.\n" +"PYTHONPLATLIBDIR: override sys.platlibdir\n" +"PYTHONCASEOK : ignore case in 'import' statements (Windows)\n" +"PYTHONIOENCODING: encoding[:errors] used for stdin/stdout/stderr\n" "PYTHONHASHSEED : if this variable is set to 'random', a random value is used\n" " to seed the hashes of str and bytes objects. It can also be\n" " set to an integer in the range [0,4294967295] to get hash\n" @@ -280,39 +259,27 @@ static const char usage_envvars[] = "PYTHON_HISTORY : the location of a .python_history file.\n" "\n" "These variables have equivalent command-line options (see --help for details):\n" -"PYTHON_CPU_COUNT: Overrides the return value of os.process_cpu_count(),\n" -" os.cpu_count(), and multiprocessing.cpu_count() if set to\n" -" a positive integer. (-X cpu_count)\n" +"PYTHON_CPU_COUNT: override the return value of os.cpu_count() (-X cpu_count)\n" "PYTHONDEBUG : enable parser debug mode (-d)\n" -"PYTHONDEVMODE : enable the development mode (-X dev)\n" +"PYTHONDEVMODE : enable Python Development Mode (-X dev)\n" "PYTHONDONTWRITEBYTECODE: don't write .pyc files (-B)\n" "PYTHONFAULTHANDLER: dump the Python traceback on fatal errors (-X faulthandler)\n" -"PYTHON_FROZEN_MODULES: if this variable is set, it determines whether or not\n" -" frozen modules should be used. The default is \"on\" (or\n" -" \"off\" if you are running a local build).\n" +"PYTHON_FROZEN_MODULES: whether to use frozen modules; the default is \"on\"\n" +" for installed Python and \"off\" for a local build\n" " (-X frozen_modules)\n" #ifdef Py_GIL_DISABLED "PYTHON_GIL : when set to 0, disables the GIL (-X gil)\n" #endif "PYTHONINSPECT : inspect interactively after running script (-i)\n" -"PYTHONINTMAXSTRDIGITS: limits the maximum digit characters in an int value\n" -" when converting from a string and when converting an int\n" -" back to a str. A value of 0 disables the limit.\n" -" Conversions to or from bases 2, 4, 8, 16, and 32 are never\n" -" limited.\n" -" (-X int_max_str_digits=number)\n" -"PYTHONNODEBUGRANGES: if this variable is set, it disables the inclusion of\n" -" the tables mapping extra location information (end line,\n" -" start column offset and end column offset) to every\n" -" instruction in code objects. This is useful when smaller\n" -" code objects and pyc files are desired as well as\n" -" suppressing the extra visual location indicators when the\n" -" interpreter displays tracebacks. (-X no_debug_ranges)\n" +"PYTHONINTMAXSTRDIGITS: limit the size of int<->str conversions;\n" +" 0 disables the limit (-X int_max_str_digits=N)\n" +"PYTHONNODEBUGRANGES: don't include extra location information in code objects\n" +" (-X no_debug_ranges)\n" "PYTHONNOUSERSITE: disable user site directory (-s)\n" "PYTHONOPTIMIZE : enable level 1 optimizations (-O)\n" "PYTHONPERFSUPPORT: support the Linux \"perf\" profiler (-X perf)\n" #ifdef Py_DEBUG -"PYTHON_PRESITE=pkg.mod: import this module before site.py is run (-X presite)\n" +"PYTHON_PRESITE: import this module before site (-X presite)\n" #endif "PYTHONPROFILEIMPORTTIME: show how long each import takes (-X importtime)\n" "PYTHONPYCACHEPREFIX: root directory for bytecode cache (pyc) files\n" @@ -323,11 +290,11 @@ static const char usage_envvars[] = #endif "PYTHONTRACEMALLOC: trace Python memory allocations (-X tracemalloc)\n" "PYTHONUNBUFFERED: disable stdout/stderr buffering (-u)\n" -"PYTHONUTF8 : if set to 1, enable the UTF-8 mode (-X utf8)\n" +"PYTHONUTF8 : control the UTF-8 mode (-X utf8)\n" "PYTHONVERBOSE : trace import statements (-v)\n" "PYTHONWARNDEFAULTENCODING: enable opt-in EncodingWarning for 'encoding=None'\n" " (-X warn_default_encoding)\n" -"PYTHONWARNINGS=arg: warning control (-W arg)\n" +"PYTHONWARNINGS : warning control (-W)\n" ; #if defined(MS_WINDOWS) diff --git a/Python/jit.c b/Python/jit.c index dae25166b1f106..03bcf1142715f3 100644 --- a/Python/jit.c +++ b/Python/jit.c @@ -112,26 +112,6 @@ mark_executable(unsigned char *memory, size_t size) return 0; } -static int -mark_readable(unsigned char *memory, size_t size) -{ - if (size == 0) { - return 0; - } - assert(size % get_page_size() == 0); -#ifdef MS_WINDOWS - DWORD old; - int failed = !VirtualProtect(memory, size, PAGE_READONLY, &old); -#else - int failed = mprotect(memory, size, PROT_READ); -#endif - if (failed) { - jit_error("unable to protect readable memory"); - return -1; - } - return 0; -} - // JIT compiler stuff: ///////////////////////////////////////////////////////// // Warning! AArch64 requires you to get your hands dirty. These are your gloves: @@ -401,31 +381,31 @@ int _PyJIT_Compile(_PyExecutorObject *executor, const _PyUOpInstruction *trace, size_t length) { // Loop once to find the total compiled size: - size_t code_size = 0; - size_t data_size = 0; + uint32_t instruction_starts[UOP_MAX_TRACE_LENGTH]; + uint32_t code_size = 0; + uint32_t data_size = 0; for (size_t i = 0; i < length; i++) { _PyUOpInstruction *instruction = (_PyUOpInstruction *)&trace[i]; const StencilGroup *group = &stencil_groups[instruction->opcode]; + instruction_starts[i] = code_size; code_size += group->code.body_size; data_size += group->data.body_size; } - // Round up to the nearest page (code and data need separate pages): + code_size += stencil_groups[_FATAL_ERROR].code.body_size; + data_size += stencil_groups[_FATAL_ERROR].data.body_size; + // Round up to the nearest page: size_t page_size = get_page_size(); assert((page_size & (page_size - 1)) == 0); - code_size += page_size - (code_size & (page_size - 1)); - data_size += page_size - (data_size & (page_size - 1)); - unsigned char *memory = jit_alloc(code_size + data_size); + size_t padding = page_size - ((code_size + data_size) & (page_size - 1)); + size_t total_size = code_size + data_size + padding; + unsigned char *memory = jit_alloc(total_size); if (memory == NULL) { return -1; } // Loop again to emit the code: unsigned char *code = memory; unsigned char *data = memory + code_size; - unsigned char *top = code; - if (trace[0].opcode == _START_EXECUTOR) { - // Don't want to execute this more than once: - top += stencil_groups[_START_EXECUTOR].code.body_size; - } + assert(trace[0].opcode == _START_EXECUTOR || trace[0].opcode == _COLD_EXIT); for (size_t i = 0; i < length; i++) { _PyUOpInstruction *instruction = (_PyUOpInstruction *)&trace[i]; const StencilGroup *group = &stencil_groups[instruction->opcode]; @@ -437,21 +417,54 @@ _PyJIT_Compile(_PyExecutorObject *executor, const _PyUOpInstruction *trace, size patches[HoleValue_EXECUTOR] = (uint64_t)executor; patches[HoleValue_OPARG] = instruction->oparg; patches[HoleValue_OPERAND] = instruction->operand; - patches[HoleValue_TARGET] = instruction->target; - patches[HoleValue_TOP] = (uint64_t)top; + switch (instruction->format) { + case UOP_FORMAT_TARGET: + patches[HoleValue_TARGET] = instruction->target; + break; + case UOP_FORMAT_EXIT: + assert(instruction->exit_index < executor->exit_count); + patches[HoleValue_EXIT_INDEX] = instruction->exit_index; + if (instruction->error_target < length) { + patches[HoleValue_ERROR_TARGET] = (uint64_t)memory + instruction_starts[instruction->error_target]; + } + break; + case UOP_FORMAT_JUMP: + assert(instruction->jump_target < length); + patches[HoleValue_JUMP_TARGET] = (uint64_t)memory + instruction_starts[instruction->jump_target]; + if (instruction->error_target < length) { + patches[HoleValue_ERROR_TARGET] = (uint64_t)memory + instruction_starts[instruction->error_target]; + } + break; + default: + assert(0); + Py_FatalError("Illegal instruction format"); + } + patches[HoleValue_TOP] = (uint64_t)memory + instruction_starts[1]; patches[HoleValue_ZERO] = 0; emit(group, patches); code += group->code.body_size; data += group->data.body_size; } - if (mark_executable(memory, code_size) || - mark_readable(memory + code_size, data_size)) - { - jit_free(memory, code_size + data_size); + // Protect against accidental buffer overrun into data: + const StencilGroup *group = &stencil_groups[_FATAL_ERROR]; + uint64_t patches[] = GET_PATCHES(); + patches[HoleValue_CODE] = (uint64_t)code; + patches[HoleValue_CONTINUE] = (uint64_t)code; + patches[HoleValue_DATA] = (uint64_t)data; + patches[HoleValue_EXECUTOR] = (uint64_t)executor; + patches[HoleValue_TOP] = (uint64_t)code; + patches[HoleValue_ZERO] = 0; + emit(group, patches); + code += group->code.body_size; + data += group->data.body_size; + assert(code == memory + code_size); + assert(data == memory + code_size + data_size); + if (mark_executable(memory, total_size)) { + jit_free(memory, total_size); return -1; } executor->jit_code = memory; - executor->jit_size = code_size + data_size; + executor->jit_size = total_size; return 0; } diff --git a/Python/marshal.c b/Python/marshal.c index daec7415b3fc7e..21d242bbb9757e 100644 --- a/Python/marshal.c +++ b/Python/marshal.c @@ -14,6 +14,10 @@ #include "pycore_setobject.h" // _PySet_NextEntry() #include "marshal.h" // Py_MARSHAL_VERSION +#ifdef __APPLE__ +# include "TargetConditionals.h" +#endif /* __APPLE__ */ + /*[clinic input] module marshal [clinic start generated code]*/ @@ -33,11 +37,14 @@ module marshal * #if defined(MS_WINDOWS) && defined(_DEBUG) */ #if defined(MS_WINDOWS) -#define MAX_MARSHAL_STACK_DEPTH 1000 +# define MAX_MARSHAL_STACK_DEPTH 1000 #elif defined(__wasi__) -#define MAX_MARSHAL_STACK_DEPTH 1500 +# define MAX_MARSHAL_STACK_DEPTH 1500 +// TARGET_OS_IPHONE covers any non-macOS Apple platform. +#elif defined(__APPLE__) && TARGET_OS_IPHONE +# define MAX_MARSHAL_STACK_DEPTH 1500 #else -#define MAX_MARSHAL_STACK_DEPTH 2000 +# define MAX_MARSHAL_STACK_DEPTH 2000 #endif #define TYPE_NULL '0' diff --git a/Python/optimizer.c b/Python/optimizer.c index a6d6ffe5378636..38ab6d3cf61c72 100644 --- a/Python/optimizer.c +++ b/Python/optimizer.c @@ -154,13 +154,19 @@ PyUnstable_GetOptimizer(void) } static _PyExecutorObject * -make_executor_from_uops(_PyUOpInstruction *buffer, const _PyBloomFilter *dependencies); +make_executor_from_uops(_PyUOpInstruction *buffer, int length, const _PyBloomFilter *dependencies); static int init_cold_exit_executor(_PyExecutorObject *executor, int oparg); +/* It is impossible for the number of exits to reach 1/4 of the total length, + * as the number of exits cannot reach 1/3 of the number of non-exits, due to + * the presence of CHECK_VALIDITY checks and instructions to produce the values + * being checked in exits. */ +#define COLD_EXIT_COUNT (UOP_MAX_TRACE_LENGTH/4) + static int cold_exits_initialized = 0; -static _PyExecutorObject COLD_EXITS[UOP_MAX_TRACE_LENGTH] = { 0 }; +static _PyExecutorObject COLD_EXITS[COLD_EXIT_COUNT] = { 0 }; static const _PyBloomFilter EMPTY_FILTER = { 0 }; @@ -172,7 +178,7 @@ _Py_SetOptimizer(PyInterpreterState *interp, _PyOptimizerObject *optimizer) } else if (cold_exits_initialized == 0) { cold_exits_initialized = 1; - for (int i = 0; i < UOP_MAX_TRACE_LENGTH; i++) { + for (int i = 0; i < COLD_EXIT_COUNT; i++) { if (init_cold_exit_executor(&COLD_EXITS[i], i)) { return NULL; } @@ -211,7 +217,7 @@ _PyOptimizer_Optimize( _PyInterpreterFrame *frame, _Py_CODEUNIT *start, PyObject **stack_pointer, _PyExecutorObject **executor_ptr) { - PyCodeObject *code = (PyCodeObject *)frame->f_executable; + PyCodeObject *code = _PyFrame_GetCode(frame); assert(PyCode_Check(code)); PyInterpreterState *interp = _PyInterpreterState_GET(); if (!has_space_for_executor(code, start)) { @@ -313,10 +319,33 @@ _PyUOpPrint(const _PyUOpInstruction *uop) else { printf("%s", name); } - printf(" (%d, target=%d, operand=%#" PRIx64 ")", - uop->oparg, - uop->target, - (uint64_t)uop->operand); + switch(uop->format) { + case UOP_FORMAT_TARGET: + printf(" (%d, target=%d, operand=%#" PRIx64, + uop->oparg, + uop->target, + (uint64_t)uop->operand); + break; + case UOP_FORMAT_JUMP: + printf(" (%d, jump_target=%d, operand=%#" PRIx64, + uop->oparg, + uop->jump_target, + (uint64_t)uop->operand); + break; + case UOP_FORMAT_EXIT: + printf(" (%d, exit_index=%d, operand=%#" PRIx64, + uop->oparg, + uop->exit_index, + (uint64_t)uop->operand); + break; + default: + printf(" (%d, Unknown format)", uop->oparg); + } + if (_PyUop_Flags[uop->opcode] & HAS_ERROR_FLAG) { + printf(", error_target=%d", uop->error_target); + } + + printf(")"); } #endif @@ -432,28 +461,36 @@ BRANCH_TO_GUARD[4][2] = { #endif -// Beware: Macro arg order differs from struct member order +static inline int +add_to_trace( + _PyUOpInstruction *trace, + int trace_length, + uint16_t opcode, + uint16_t oparg, + uint64_t operand, + uint32_t target) +{ + trace[trace_length].opcode = opcode; + trace[trace_length].format = UOP_FORMAT_TARGET; + trace[trace_length].target = target; + trace[trace_length].oparg = oparg; + trace[trace_length].operand = operand; + return trace_length + 1; +} + #ifdef Py_DEBUG #define ADD_TO_TRACE(OPCODE, OPARG, OPERAND, TARGET) \ assert(trace_length < max_length); \ - trace[trace_length].opcode = (OPCODE); \ - trace[trace_length].oparg = (OPARG); \ - trace[trace_length].target = (TARGET); \ - trace[trace_length].operand = (OPERAND); \ + trace_length = add_to_trace(trace, trace_length, (OPCODE), (OPARG), (OPERAND), (TARGET)); \ if (lltrace >= 2) { \ printf("%4d ADD_TO_TRACE: ", trace_length); \ - _PyUOpPrint(&trace[trace_length]); \ + _PyUOpPrint(&trace[trace_length-1]); \ printf("\n"); \ - } \ - trace_length++; + } #else #define ADD_TO_TRACE(OPCODE, OPARG, OPERAND, TARGET) \ assert(trace_length < max_length); \ - trace[trace_length].opcode = (OPCODE); \ - trace[trace_length].oparg = (OPARG); \ - trace[trace_length].target = (TARGET); \ - trace[trace_length].operand = (OPERAND); \ - trace_length++; + trace_length = add_to_trace(trace, trace_length, (OPCODE), (OPARG), (OPERAND), (TARGET)); #endif #define INSTR_IP(INSTR, CODE) \ @@ -476,11 +513,12 @@ BRANCH_TO_GUARD[4][2] = { if (trace_stack_depth >= TRACE_STACK_SIZE) { \ DPRINTF(2, "Trace stack overflow\n"); \ OPT_STAT_INC(trace_stack_overflow); \ - ADD_TO_TRACE(_EXIT_TRACE, 0, 0, 0); \ + trace_length = 0; \ goto done; \ } \ - assert(func->func_code == (PyObject *)code); \ + assert(func == NULL || func->func_code == (PyObject *)code); \ trace_stack[trace_stack_depth].func = func; \ + trace_stack[trace_stack_depth].code = code; \ trace_stack[trace_stack_depth].instr = instr; \ trace_stack_depth++; #define TRACE_STACK_POP() \ @@ -489,10 +527,11 @@ BRANCH_TO_GUARD[4][2] = { } \ trace_stack_depth--; \ func = trace_stack[trace_stack_depth].func; \ - code = (PyCodeObject *)trace_stack[trace_stack_depth].func->func_code; \ + code = trace_stack[trace_stack_depth].code; \ + assert(func == NULL || func->func_code == (PyObject *)code); \ instr = trace_stack[trace_stack_depth].instr; -/* Returns 1 on success, +/* Returns the length of the trace on success, * 0 if it failed to produce a worthwhile trace, * and -1 on an error. */ @@ -505,16 +544,18 @@ translate_bytecode_to_trace( _PyBloomFilter *dependencies) { bool progress_needed = true; - PyCodeObject *code = (PyCodeObject *)frame->f_executable; + PyCodeObject *code = _PyFrame_GetCode(frame); PyFunctionObject *func = (PyFunctionObject *)frame->f_funcobj; assert(PyFunction_Check(func)); PyCodeObject *initial_code = code; _Py_BloomFilter_Add(dependencies, initial_code); _Py_CODEUNIT *initial_instr = instr; int trace_length = 0; - int max_length = buffer_size; + // Leave space for possible trailing _EXIT_TRACE + int max_length = buffer_size-2; struct { PyFunctionObject *func; + PyCodeObject *code; _Py_CODEUNIT *instr; } trace_stack[TRACE_STACK_SIZE]; int trace_stack_depth = 0; @@ -534,13 +575,16 @@ translate_bytecode_to_trace( PyUnicode_AsUTF8(code->co_filename), code->co_firstlineno, 2 * INSTR_IP(initial_instr, code)); + ADD_TO_TRACE(_START_EXECUTOR, 0, (uintptr_t)instr, INSTR_IP(instr, code)); uint32_t target = 0; top: // Jump here after _PUSH_FRAME or likely branches for (;;) { target = INSTR_IP(instr, code); - RESERVE_RAW(2, "epilogue"); // Always need space for _SET_IP, _CHECK_VALIDITY and _EXIT_TRACE + RESERVE_RAW(2, "_CHECK_VALIDITY_AND_SET_IP"); ADD_TO_TRACE(_CHECK_VALIDITY_AND_SET_IP, 0, (uintptr_t)instr, target); + // Need space for _DEOPT + max_length--; uint32_t opcode = instr->op.code; uint32_t oparg = instr->op.arg; @@ -578,13 +622,22 @@ translate_bytecode_to_trace( continue; } else { - if (OPCODE_HAS_DEOPT(opcode)) { + if (OPCODE_HAS_EXIT(opcode) || OPCODE_HAS_DEOPT(opcode)) { opcode = _PyOpcode_Deopt[opcode]; } + assert(!OPCODE_HAS_EXIT(opcode)); assert(!OPCODE_HAS_DEOPT(opcode)); } } + if (OPCODE_HAS_EXIT(opcode)) { + // Make space for exit code + max_length--; + } + if (OPCODE_HAS_ERROR(opcode)) { + // Make space for error code + max_length--; + } switch (opcode) { case POP_JUMP_IF_NONE: case POP_JUMP_IF_NOT_NONE: @@ -620,10 +673,10 @@ translate_bytecode_to_trace( DPRINTF(2, "Jump likely (%04x = %d bits), continue at byte offset %d\n", instr[1].cache, bitcount, 2 * INSTR_IP(target_instr, code)); instr = target_instr; - ADD_TO_TRACE(uopcode, max_length, 0, INSTR_IP(next_instr, code)); + ADD_TO_TRACE(uopcode, 0, 0, INSTR_IP(next_instr, code)); goto top; } - ADD_TO_TRACE(uopcode, max_length, 0, INSTR_IP(target_instr, code)); + ADD_TO_TRACE(uopcode, 0, 0, INSTR_IP(target_instr, code)); break; } @@ -719,9 +772,19 @@ translate_bytecode_to_trace( if (uop == _POP_FRAME) { TRACE_STACK_POP(); - /* Set the operand to the function object returned to, - * to assist optimization passes */ - ADD_TO_TRACE(uop, oparg, (uintptr_t)func, target); + /* Set the operand to the function or code object returned to, + * to assist optimization passes. (See _PUSH_FRAME below.) + */ + if (func != NULL) { + operand = (uintptr_t)func; + } + else if (code != NULL) { + operand = (uintptr_t)code | 1; + } + else { + operand = 0; + } + ADD_TO_TRACE(uop, oparg, operand, target); DPRINTF(2, "Returning to %s (%s:%d) at byte offset %d\n", PyUnicode_AsUTF8(code->co_qualname), @@ -738,10 +801,12 @@ translate_bytecode_to_trace( // Add one to account for the actual opcode/oparg pair: + 1; uint32_t func_version = read_u32(&instr[func_version_offset].cache); - PyFunctionObject *new_func = _PyFunction_LookupByVersion(func_version); - DPRINTF(2, "Function: version=%#x; object=%p\n", (int)func_version, new_func); - if (new_func != NULL) { - PyCodeObject *new_code = (PyCodeObject *)PyFunction_GET_CODE(new_func); + PyCodeObject *new_code = NULL; + PyFunctionObject *new_func = + _PyFunction_LookupByVersion(func_version, (PyObject **) &new_code); + DPRINTF(2, "Function: version=%#x; new_func=%p, new_code=%p\n", + (int)func_version, new_func, new_code); + if (new_code != NULL) { if (new_code == code) { // Recursive call, bail (we could be here forever). DPRINTF(2, "Bailing on recursive call to %s (%s:%d)\n", @@ -766,9 +831,22 @@ translate_bytecode_to_trace( instr += _PyOpcode_Caches[_PyOpcode_Deopt[opcode]] + 1; TRACE_STACK_PUSH(); _Py_BloomFilter_Add(dependencies, new_code); - /* Set the operand to the callee's function object, - * to assist optimization passes */ - ADD_TO_TRACE(uop, oparg, (uintptr_t)new_func, target); + /* Set the operand to the callee's function or code object, + * to assist optimization passes. + * We prefer setting it to the function (for remove_globals()) + * but if that's not available but the code is available, + * use the code, setting the low bit so the optimizer knows. + */ + if (new_func != NULL) { + operand = (uintptr_t)new_func; + } + else if (new_code != NULL) { + operand = (uintptr_t)new_code | 1; + } + else { + operand = 0; + } + ADD_TO_TRACE(uop, oparg, operand, target); code = new_code; func = new_func; instr = _PyCode_CODE(code); @@ -780,8 +858,8 @@ translate_bytecode_to_trace( 2 * INSTR_IP(instr, code)); goto top; } - DPRINTF(2, "Bail, new_func == NULL\n"); - ADD_TO_TRACE(uop, oparg, operand, target); + DPRINTF(2, "Bail, new_code == NULL\n"); + ADD_TO_TRACE(uop, oparg, 0, target); ADD_TO_TRACE(_EXIT_TRACE, 0, 0, 0); goto done; } @@ -820,7 +898,9 @@ translate_bytecode_to_trace( progress_needed ? "no progress" : "too short"); return 0; } - ADD_TO_TRACE(_EXIT_TRACE, 0, 0, target); + if (trace[trace_length-1].opcode != _JUMP_TO_TOP) { + ADD_TO_TRACE(_EXIT_TRACE, 0, 0, target); + } DPRINTF(1, "Created a proto-trace for %s (%s:%d) at byte offset %d -- length %d\n", PyUnicode_AsUTF8(code->co_qualname), @@ -828,8 +908,8 @@ translate_bytecode_to_trace( code->co_firstlineno, 2 * INSTR_IP(initial_instr, code), trace_length); - OPT_HIST(trace_length + buffer_size - max_length, trace_length_hist); - return 1; + OPT_HIST(trace_length, trace_length_hist); + return trace_length; } #undef RESERVE @@ -842,43 +922,86 @@ translate_bytecode_to_trace( #define SET_BIT(array, bit) (array[(bit)>>5] |= (1<<((bit)&31))) #define BIT_IS_SET(array, bit) (array[(bit)>>5] & (1<<((bit)&31))) -/* Count the number of used uops, and mark them in the bit vector `used`. - * This can be done in a single pass using simple reachability analysis, - * as there are no backward jumps. - * NOPs are excluded from the count. +/* Count the number of unused uops and exits */ static int -compute_used(_PyUOpInstruction *buffer, uint32_t *used, int *exit_count_ptr) +count_exits(_PyUOpInstruction *buffer, int length) { - int count = 0; int exit_count = 0; - SET_BIT(used, 0); - for (int i = 0; i < UOP_MAX_TRACE_LENGTH; i++) { - if (!BIT_IS_SET(used, i)) { - continue; - } - count++; + for (int i = 0; i < length; i++) { int opcode = buffer[i].opcode; - if (_PyUop_Flags[opcode] & HAS_EXIT_FLAG) { + if (opcode == _SIDE_EXIT) { exit_count++; } - if (opcode == _JUMP_TO_TOP || opcode == _EXIT_TRACE) { - continue; + } + return exit_count; +} + +static void make_exit(_PyUOpInstruction *inst, int opcode, int target) +{ + inst->opcode = opcode; + inst->oparg = 0; + inst->format = UOP_FORMAT_TARGET; + inst->target = target; +} + +/* Convert implicit exits, errors and deopts + * into explicit ones. */ +static int +prepare_for_execution(_PyUOpInstruction *buffer, int length) +{ + int32_t current_jump = -1; + int32_t current_jump_target = -1; + int32_t current_error = -1; + int32_t current_error_target = -1; + int32_t current_popped = -1; + /* Leaving in NOPs slows down the interpreter and messes up the stats */ + _PyUOpInstruction *copy_to = &buffer[0]; + for (int i = 0; i < length; i++) { + _PyUOpInstruction *inst = &buffer[i]; + if (inst->opcode != _NOP) { + if (copy_to != inst) { + *copy_to = *inst; + } + copy_to++; } - /* All other micro-ops fall through, so i+1 is reachable */ - SET_BIT(used, i+1); - assert(opcode <= MAX_UOP_ID); - if (_PyUop_Flags[opcode] & HAS_JUMP_FLAG) { - /* Mark target as reachable */ - SET_BIT(used, buffer[i].oparg); + } + length = (int)(copy_to - buffer); + int next_spare = length; + for (int i = 0; i < length; i++) { + _PyUOpInstruction *inst = &buffer[i]; + int opcode = inst->opcode; + int32_t target = (int32_t)uop_get_target(inst); + if (_PyUop_Flags[opcode] & (HAS_EXIT_FLAG | HAS_DEOPT_FLAG)) { + if (target != current_jump_target) { + uint16_t exit_op = (_PyUop_Flags[opcode] & HAS_EXIT_FLAG) ? _SIDE_EXIT : _DEOPT; + make_exit(&buffer[next_spare], exit_op, target); + current_jump_target = target; + current_jump = next_spare; + next_spare++; + } + buffer[i].jump_target = current_jump; + buffer[i].format = UOP_FORMAT_JUMP; } - if (opcode == NOP) { - count--; - UNSET_BIT(used, i); + if (_PyUop_Flags[opcode] & HAS_ERROR_FLAG) { + int popped = (_PyUop_Flags[opcode] & HAS_ERROR_NO_POP_FLAG) ? + 0 : _PyUop_num_popped(opcode, inst->oparg); + if (target != current_error_target || popped != current_popped) { + current_popped = popped; + current_error = next_spare; + current_error_target = target; + make_exit(&buffer[next_spare], _ERROR_POP_N, 0); + buffer[next_spare].oparg = popped; + next_spare++; + } + buffer[i].error_target = current_error; + if (buffer[i].format == UOP_FORMAT_TARGET) { + buffer[i].format = UOP_FORMAT_JUMP; + buffer[i].jump_target = 0; + } } } - *exit_count_ptr = exit_count; - return count; + return next_spare; } /* Executor side exits */ @@ -897,59 +1020,118 @@ allocate_executor(int exit_count, int length) return res; } +#ifdef Py_DEBUG + +#define CHECK(PRED) \ +if (!(PRED)) { \ + printf(#PRED " at %d\n", i); \ + assert(0); \ +} + +static int +target_unused(int opcode) +{ + return (_PyUop_Flags[opcode] & (HAS_ERROR_FLAG | HAS_EXIT_FLAG | HAS_DEOPT_FLAG)) == 0; +} + +static void +sanity_check(_PyExecutorObject *executor) +{ + for (uint32_t i = 0; i < executor->exit_count; i++) { + _PyExitData *exit = &executor->exits[i]; + CHECK(exit->target < (1 << 25)); + } + bool ended = false; + uint32_t i = 0; + CHECK(executor->trace[0].opcode == _START_EXECUTOR || executor->trace[0].opcode == _COLD_EXIT); + for (; i < executor->code_size; i++) { + const _PyUOpInstruction *inst = &executor->trace[i]; + uint16_t opcode = inst->opcode; + CHECK(opcode <= MAX_UOP_ID); + CHECK(_PyOpcode_uop_name[opcode] != NULL); + switch(inst->format) { + case UOP_FORMAT_TARGET: + CHECK(target_unused(opcode)); + break; + case UOP_FORMAT_EXIT: + CHECK(opcode == _SIDE_EXIT); + CHECK(inst->exit_index < executor->exit_count); + break; + case UOP_FORMAT_JUMP: + CHECK(inst->jump_target < executor->code_size); + break; + case UOP_FORMAT_UNUSED: + CHECK(0); + break; + } + if (_PyUop_Flags[opcode] & HAS_ERROR_FLAG) { + CHECK(inst->format == UOP_FORMAT_JUMP); + CHECK(inst->error_target < executor->code_size); + } + if (opcode == _JUMP_TO_TOP || opcode == _EXIT_TRACE || opcode == _COLD_EXIT) { + ended = true; + i++; + break; + } + } + CHECK(ended); + for (; i < executor->code_size; i++) { + const _PyUOpInstruction *inst = &executor->trace[i]; + uint16_t opcode = inst->opcode; + CHECK( + opcode == _DEOPT || + opcode == _SIDE_EXIT || + opcode == _ERROR_POP_N); + if (opcode == _SIDE_EXIT) { + CHECK(inst->format == UOP_FORMAT_EXIT); + } + } +} + +#undef CHECK +#endif + /* Makes an executor from a buffer of uops. * Account for the buffer having gaps and NOPs by computing a "used" * bit vector and only copying the used uops. Here "used" means reachable * and not a NOP. */ static _PyExecutorObject * -make_executor_from_uops(_PyUOpInstruction *buffer, const _PyBloomFilter *dependencies) +make_executor_from_uops(_PyUOpInstruction *buffer, int length, const _PyBloomFilter *dependencies) { - uint32_t used[(UOP_MAX_TRACE_LENGTH + 31)/32] = { 0 }; - int exit_count; - int length = compute_used(buffer, used, &exit_count); - length += 1; // For _START_EXECUTOR + int exit_count = count_exits(buffer, length); _PyExecutorObject *executor = allocate_executor(exit_count, length); if (executor == NULL) { return NULL; } + /* Initialize exits */ + assert(exit_count < COLD_EXIT_COUNT); for (int i = 0; i < exit_count; i++) { executor->exits[i].executor = &COLD_EXITS[i]; executor->exits[i].temperature = 0; } int next_exit = exit_count-1; - _PyUOpInstruction *dest = (_PyUOpInstruction *)&executor->trace[length-1]; - /* Scan backwards, so that we see the destinations of jumps before the jumps themselves. */ - for (int i = UOP_MAX_TRACE_LENGTH-1; i >= 0; i--) { - if (!BIT_IS_SET(used, i)) { - continue; - } - *dest = buffer[i]; + _PyUOpInstruction *dest = (_PyUOpInstruction *)&executor->trace[length]; + assert(buffer[0].opcode == _START_EXECUTOR); + buffer[0].operand = (uint64_t)executor; + for (int i = length-1; i >= 0; i--) { int opcode = buffer[i].opcode; - if (opcode == _POP_JUMP_IF_FALSE || - opcode == _POP_JUMP_IF_TRUE) - { - /* The oparg of the target will already have been set to its new offset */ - int oparg = dest->oparg; - dest->oparg = buffer[oparg].oparg; - } - if (_PyUop_Flags[opcode] & HAS_EXIT_FLAG) { + dest--; + *dest = buffer[i]; + assert(opcode != _POP_JUMP_IF_FALSE && opcode != _POP_JUMP_IF_TRUE); + if (opcode == _SIDE_EXIT) { executor->exits[next_exit].target = buffer[i].target; dest->exit_index = next_exit; + dest->format = UOP_FORMAT_EXIT; next_exit--; } - /* Set the oparg to be the destination offset, - * so that we can set the oparg of earlier jumps correctly. */ - buffer[i].oparg = (uint16_t)(dest - executor->trace); - dest--; } assert(next_exit == -1); assert(dest == executor->trace); - dest->opcode = _START_EXECUTOR; + assert(dest->opcode == _START_EXECUTOR); dest->oparg = 0; dest->target = 0; - dest->operand = (uintptr_t)executor; _Py_ExecutorInit(executor, dependencies); #ifdef Py_DEBUG char *python_lltrace = Py_GETENV("PYTHON_LLTRACE"); @@ -965,6 +1147,7 @@ make_executor_from_uops(_PyUOpInstruction *buffer, const _PyBloomFilter *depende printf("\n"); } } + sanity_check(executor); #endif #ifdef _Py_JIT executor->jit_code = NULL; @@ -981,7 +1164,7 @@ make_executor_from_uops(_PyUOpInstruction *buffer, const _PyBloomFilter *depende static int init_cold_exit_executor(_PyExecutorObject *executor, int oparg) { - _Py_SetImmortal(executor); + _Py_SetImmortalUntracked((PyObject *)executor); Py_SET_TYPE(executor, &_PyUOpExecutor_Type); executor->trace = (_PyUOpInstruction *)executor->exits; executor->code_size = 1; @@ -993,6 +1176,9 @@ init_cold_exit_executor(_PyExecutorObject *executor, int oparg) for (int i = 0; i < BLOOM_FILTER_WORDS; i++) { assert(executor->vm_data.bloom.bits[i] == 0); } +#ifdef Py_DEBUG + sanity_check(executor); +#endif #ifdef _Py_JIT executor->jit_code = NULL; executor->jit_size = 0; @@ -1003,6 +1189,28 @@ init_cold_exit_executor(_PyExecutorObject *executor, int oparg) return 0; } +#ifdef Py_STATS +/* Returns the effective trace length. + * Ignores NOPs and trailing exit and error handling.*/ +int effective_trace_length(_PyUOpInstruction *buffer, int length) +{ + int nop_count = 0; + for (int i = 0; i < length; i++) { + int opcode = buffer[i].opcode; + if (opcode == _NOP) { + nop_count++; + } + if (opcode == _EXIT_TRACE || + opcode == _JUMP_TO_TOP || + opcode == _COLD_EXIT) { + return i+1-nop_count; + } + } + Py_FatalError("No terminating instruction"); + Py_UNREACHABLE(); +} +#endif + static int uop_optimize( _PyOptimizerObject *self, @@ -1015,24 +1223,26 @@ uop_optimize( _Py_BloomFilter_Init(&dependencies); _PyUOpInstruction buffer[UOP_MAX_TRACE_LENGTH]; OPT_STAT_INC(attempts); - int err = translate_bytecode_to_trace(frame, instr, buffer, UOP_MAX_TRACE_LENGTH, &dependencies); - if (err <= 0) { + int length = translate_bytecode_to_trace(frame, instr, buffer, UOP_MAX_TRACE_LENGTH, &dependencies); + if (length <= 0) { // Error or nothing translated - return err; + return length; } + assert(length < UOP_MAX_TRACE_LENGTH); OPT_STAT_INC(traces_created); char *env_var = Py_GETENV("PYTHON_UOPS_OPTIMIZE"); if (env_var == NULL || *env_var == '\0' || *env_var > '0') { - err = _Py_uop_analyze_and_optimize(frame, buffer, - UOP_MAX_TRACE_LENGTH, + length = _Py_uop_analyze_and_optimize(frame, buffer, + length, curr_stackentries, &dependencies); - if (err <= 0) { - return err; + if (length <= 0) { + return length; } } - assert(err == 1); + assert(length < UOP_MAX_TRACE_LENGTH); + assert(length >= 1); /* Fix up */ - for (int pc = 0; pc < UOP_MAX_TRACE_LENGTH; pc++) { + for (int pc = 0; pc < length; pc++) { int opcode = buffer[pc].opcode; int oparg = buffer[pc].oparg; if (_PyUop_Flags[opcode] & HAS_OPARG_AND_1_FLAG) { @@ -1047,11 +1257,14 @@ uop_optimize( assert(_PyOpcode_uop_name[buffer[pc].opcode]); assert(strncmp(_PyOpcode_uop_name[buffer[pc].opcode], _PyOpcode_uop_name[opcode], strlen(_PyOpcode_uop_name[opcode])) == 0); } - _PyExecutorObject *executor = make_executor_from_uops(buffer, &dependencies); + OPT_HIST(effective_trace_length(buffer, length), optimized_trace_length_hist); + length = prepare_for_execution(buffer, length); + assert(length <= UOP_MAX_TRACE_LENGTH); + _PyExecutorObject *executor = make_executor_from_uops(buffer, length, &dependencies); if (executor == NULL) { return -1; } - OPT_HIST(Py_SIZE(executor), optimized_trace_length_hist); + assert(length <= UOP_MAX_TRACE_LENGTH); *exec_ptr = executor; return 1; } @@ -1115,7 +1328,7 @@ counter_optimize( int Py_UNUSED(curr_stackentries) ) { - PyCodeObject *code = (PyCodeObject *)frame->f_executable; + PyCodeObject *code = _PyFrame_GetCode(frame); int oparg = instr->op.arg; while (instr->op.code == EXTENDED_ARG) { instr++; @@ -1126,12 +1339,14 @@ counter_optimize( return 0; } _Py_CODEUNIT *target = instr + 1 + _PyOpcode_Caches[JUMP_BACKWARD] - oparg; - _PyUOpInstruction buffer[3] = { + _PyUOpInstruction buffer[5] = { + { .opcode = _START_EXECUTOR }, { .opcode = _LOAD_CONST_INLINE_BORROW, .operand = (uintptr_t)self }, { .opcode = _INTERNAL_INCREMENT_OPT_COUNTER }, - { .opcode = _EXIT_TRACE, .target = (uint32_t)(target - _PyCode_CODE(code)) } + { .opcode = _EXIT_TRACE, .jump_target = 4, .format=UOP_FORMAT_JUMP }, + { .opcode = _SIDE_EXIT, .target = (uint32_t)(target - _PyCode_CODE(code)), .format=UOP_FORMAT_TARGET } }; - _PyExecutorObject *executor = make_executor_from_uops(buffer, &EMPTY_FILTER); + _PyExecutorObject *executor = make_executor_from_uops(buffer, 5, &EMPTY_FILTER); if (executor == NULL) { return -1; } diff --git a/Python/optimizer_analysis.c b/Python/optimizer_analysis.c index 0c95616848a85b..6f553f8ab8ad2e 100644 --- a/Python/optimizer_analysis.c +++ b/Python/optimizer_analysis.c @@ -139,6 +139,7 @@ remove_globals(_PyInterpreterFrame *frame, _PyUOpInstruction *buffer, PyInterpreterState *interp = _PyInterpreterState_GET(); PyObject *builtins = frame->f_builtins; if (builtins != interp->builtins) { + OPT_STAT_INC(remove_globals_builtins_changed); return 1; } PyObject *globals = frame->f_globals; @@ -170,6 +171,7 @@ remove_globals(_PyInterpreterFrame *frame, _PyUOpInstruction *buffer, switch(opcode) { case _GUARD_BUILTINS_VERSION: if (incorrect_keys(inst, builtins)) { + OPT_STAT_INC(remove_globals_incorrect_keys); return 0; } if (interp->rare_events.builtin_dict >= _Py_MAX_ALLOWED_BUILTINS_MODIFICATIONS) { @@ -190,6 +192,7 @@ remove_globals(_PyInterpreterFrame *frame, _PyUOpInstruction *buffer, break; case _GUARD_GLOBALS_VERSION: if (incorrect_keys(inst, globals)) { + OPT_STAT_INC(remove_globals_incorrect_keys); return 0; } uint64_t watched_mutations = get_mutations(globals); @@ -225,7 +228,12 @@ remove_globals(_PyInterpreterFrame *frame, _PyUOpInstruction *buffer, builtins_watched <<= 1; globals_watched <<= 1; function_checked <<= 1; - PyFunctionObject *func = (PyFunctionObject *)buffer[pc].operand; + uint64_t operand = buffer[pc].operand; + if (operand == 0 || (operand & 1)) { + // It's either a code object or NULL, so bail + return 1; + } + PyFunctionObject *func = (PyFunctionObject *)operand; if (func == NULL) { return 1; } @@ -238,6 +246,7 @@ remove_globals(_PyInterpreterFrame *frame, _PyUOpInstruction *buffer, globals = func->func_globals; builtins = func->func_builtins; if (builtins != interp->builtins) { + OPT_STAT_INC(remove_globals_builtins_changed); return 1; } break; @@ -247,7 +256,15 @@ remove_globals(_PyInterpreterFrame *frame, _PyUOpInstruction *buffer, builtins_watched >>= 1; globals_watched >>= 1; function_checked >>= 1; - PyFunctionObject *func = (PyFunctionObject *)buffer[pc].operand; + uint64_t operand = buffer[pc].operand; + if (operand == 0 || (operand & 1)) { + // It's either a code object or NULL, so bail + return 1; + } + PyFunctionObject *func = (PyFunctionObject *)operand; + if (func == NULL) { + return 1; + } assert(PyFunction_Check(func)); function_version = func->func_version; globals = func->func_globals; @@ -358,6 +375,7 @@ optimize_uops( _Py_UOpsContext context; _Py_UOpsContext *ctx = &context; + uint32_t opcode = UINT16_MAX; if (_Py_uop_abstractcontext_init(ctx) < 0) { goto out_of_space; @@ -369,13 +387,12 @@ optimize_uops( ctx->curr_frame_depth++; ctx->frame = frame; - for (_PyUOpInstruction *this_instr = trace; - this_instr < trace + trace_len && !op_is_end(this_instr->opcode); - this_instr++) { + _PyUOpInstruction *this_instr = NULL; + for (int i = 0; i < trace_len; i++) { + this_instr = &trace[i]; int oparg = this_instr->oparg; - uint32_t opcode = this_instr->opcode; - + opcode = this_instr->opcode; _Py_UopsSymbol **stack_pointer = ctx->frame->stack_pointer; #ifdef Py_DEBUG @@ -399,35 +416,41 @@ optimize_uops( ctx->frame->stack_pointer = stack_pointer; assert(STACK_LEVEL() >= 0); } - _Py_uop_abstractcontext_fini(ctx); - return 1; + return trace_len; out_of_space: DPRINTF(3, "\n"); DPRINTF(1, "Out of space in abstract interpreter\n"); - _Py_uop_abstractcontext_fini(ctx); - return 0; - + goto done; error: DPRINTF(3, "\n"); DPRINTF(1, "Encountered error in abstract interpreter\n"); + if (opcode <= MAX_UOP_ID) { + OPT_ERROR_IN_OPCODE(opcode); + } _Py_uop_abstractcontext_fini(ctx); - return 0; + return -1; hit_bottom: // Attempted to push a "bottom" (contradition) symbol onto the stack. // This means that the abstract interpreter has hit unreachable code. - // We *could* generate an _EXIT_TRACE or _FATAL_ERROR here, but it's - // simpler to just admit failure and not create the executor. + // We *could* generate an _EXIT_TRACE or _FATAL_ERROR here, but hitting + // bottom indicates type instability, so we are probably better off + // retrying later. DPRINTF(3, "\n"); DPRINTF(1, "Hit bottom in abstract interpreter\n"); _Py_uop_abstractcontext_fini(ctx); return 0; +done: + /* Cannot optimize further, but there would be no benefit + * in retrying later */ + _Py_uop_abstractcontext_fini(ctx); + return trace_len; } -static void +static int remove_unneeded_uops(_PyUOpInstruction *buffer, int buffer_size) { /* Remove _SET_IP and _CHECK_VALIDITY where possible. @@ -482,7 +505,7 @@ remove_unneeded_uops(_PyUOpInstruction *buffer, int buffer_size) } case _JUMP_TO_TOP: case _EXIT_TRACE: - return; + return pc + 1; default: { bool needs_ip = false; @@ -506,12 +529,14 @@ remove_unneeded_uops(_PyUOpInstruction *buffer, int buffer_size) } } } + Py_FatalError("No terminating instruction"); + Py_UNREACHABLE(); } static void peephole_opt(_PyInterpreterFrame *frame, _PyUOpInstruction *buffer, int buffer_size) { - PyCodeObject *co = (PyCodeObject *)frame->f_executable; + PyCodeObject *co = _PyFrame_GetCode(frame); for (int pc = 0; pc < buffer_size; pc++) { int opcode = buffer[pc].opcode; switch(opcode) { @@ -534,11 +559,16 @@ peephole_opt(_PyInterpreterFrame *frame, _PyUOpInstruction *buffer, int buffer_s case _PUSH_FRAME: case _POP_FRAME: { - PyFunctionObject *func = (PyFunctionObject *)buffer[pc].operand; - if (func == NULL) { + uint64_t operand = buffer[pc].operand; + if (operand & 1) { + co = (PyCodeObject *)(operand & ~1); + assert(PyCode_Check(co)); + } + else if (operand == 0) { co = NULL; } else { + PyFunctionObject *func = (PyFunctionObject *)operand; assert(PyFunction_Check(func)); co = (PyCodeObject *)func->func_code; } @@ -553,43 +583,36 @@ peephole_opt(_PyInterpreterFrame *frame, _PyUOpInstruction *buffer, int buffer_s // 0 - failure, no error raised, just fall back to Tier 1 // -1 - failure, and raise error -// 1 - optimizer success +// > 0 - length of optimized trace int _Py_uop_analyze_and_optimize( _PyInterpreterFrame *frame, _PyUOpInstruction *buffer, - int buffer_size, + int length, int curr_stacklen, _PyBloomFilter *dependencies ) { OPT_STAT_INC(optimizer_attempts); - int err = remove_globals(frame, buffer, buffer_size, dependencies); - if (err == 0) { - goto not_ready; - } - if (err < 0) { - goto error; + int err = remove_globals(frame, buffer, length, dependencies); + if (err <= 0) { + return err; } - peephole_opt(frame, buffer, buffer_size); + peephole_opt(frame, buffer, length); - err = optimize_uops( - (PyCodeObject *)frame->f_executable, buffer, - buffer_size, curr_stacklen, dependencies); + length = optimize_uops( + _PyFrame_GetCode(frame), buffer, + length, curr_stacklen, dependencies); - if (err == 0) { - goto not_ready; + if (length <= 0) { + return length; } - assert(err == 1); - remove_unneeded_uops(buffer, buffer_size); + length = remove_unneeded_uops(buffer, length); + assert(length > 0); OPT_STAT_INC(optimizer_successes); - return 1; -not_ready: - return 0; -error: - return -1; + return length; } diff --git a/Python/optimizer_bytecodes.c b/Python/optimizer_bytecodes.c index ef08c0d8897c9f..e38428af108893 100644 --- a/Python/optimizer_bytecodes.c +++ b/Python/optimizer_bytecodes.c @@ -543,12 +543,25 @@ dummy_func(void) { (void)callable; - PyFunctionObject *func = (PyFunctionObject *)(this_instr + 2)->operand; - DPRINTF(3, "func: %p ", func); - if (func == NULL) { - goto error; + PyCodeObject *co = NULL; + assert((this_instr + 2)->opcode == _PUSH_FRAME); + uint64_t push_operand = (this_instr + 2)->operand; + if (push_operand & 1) { + co = (PyCodeObject *)(push_operand & ~1); + DPRINTF(3, "code=%p ", co); + assert(PyCode_Check(co)); + } + else { + PyFunctionObject *func = (PyFunctionObject *)push_operand; + DPRINTF(3, "func=%p ", func); + if (func == NULL) { + DPRINTF(3, "\n"); + DPRINTF(1, "Missing function\n"); + goto done; + } + co = (PyCodeObject *)func->func_code; + DPRINTF(3, "code=%p ", co); } - PyCodeObject *co = (PyCodeObject *)func->func_code; assert(self_or_null != NULL); assert(args != NULL); diff --git a/Python/optimizer_cases.c.h b/Python/optimizer_cases.c.h index 610d1b1aede9cc..df73cc091dea26 100644 --- a/Python/optimizer_cases.c.h +++ b/Python/optimizer_cases.c.h @@ -769,14 +769,7 @@ break; } - case _LOAD_NAME: { - _Py_UopsSymbol *v; - v = sym_new_not_null(ctx); - if (v == NULL) goto out_of_space; - stack_pointer[0] = v; - stack_pointer += 1; - break; - } + /* _LOAD_NAME is not a viable micro-op for tier 2 */ case _LOAD_GLOBAL: { _Py_UopsSymbol *res; @@ -900,14 +893,7 @@ break; } - case _BUILD_SET: { - _Py_UopsSymbol *set; - set = sym_new_not_null(ctx); - if (set == NULL) goto out_of_space; - stack_pointer[-oparg] = set; - stack_pointer += 1 - oparg; - break; - } + /* _BUILD_SET is not a viable micro-op for tier 2 */ case _BUILD_MAP: { _Py_UopsSymbol *map; @@ -1408,31 +1394,9 @@ /* _FOR_ITER_GEN is not a viable micro-op for tier 2 */ - case _BEFORE_ASYNC_WITH: { - _Py_UopsSymbol *exit; - _Py_UopsSymbol *res; - exit = sym_new_not_null(ctx); - if (exit == NULL) goto out_of_space; - res = sym_new_not_null(ctx); - if (res == NULL) goto out_of_space; - stack_pointer[-1] = exit; - stack_pointer[0] = res; - stack_pointer += 1; - break; - } + /* _BEFORE_ASYNC_WITH is not a viable micro-op for tier 2 */ - case _BEFORE_WITH: { - _Py_UopsSymbol *exit; - _Py_UopsSymbol *res; - exit = sym_new_not_null(ctx); - if (exit == NULL) goto out_of_space; - res = sym_new_not_null(ctx); - if (res == NULL) goto out_of_space; - stack_pointer[-1] = exit; - stack_pointer[0] = res; - stack_pointer += 1; - break; - } + /* _BEFORE_WITH is not a viable micro-op for tier 2 */ case _WITH_EXCEPT_START: { _Py_UopsSymbol *res; @@ -1596,12 +1560,25 @@ callable = stack_pointer[-2 - oparg]; int argcount = oparg; (void)callable; - PyFunctionObject *func = (PyFunctionObject *)(this_instr + 2)->operand; - DPRINTF(3, "func: %p ", func); - if (func == NULL) { - goto error; + PyCodeObject *co = NULL; + assert((this_instr + 2)->opcode == _PUSH_FRAME); + uint64_t push_operand = (this_instr + 2)->operand; + if (push_operand & 1) { + co = (PyCodeObject *)(push_operand & ~1); + DPRINTF(3, "code=%p ", co); + assert(PyCode_Check(co)); + } + else { + PyFunctionObject *func = (PyFunctionObject *)push_operand; + DPRINTF(3, "func=%p ", func); + if (func == NULL) { + DPRINTF(3, "\n"); + DPRINTF(1, "Missing function\n"); + goto done; + } + co = (PyCodeObject *)func->func_code; + DPRINTF(3, "code=%p ", co); } - PyCodeObject *co = (PyCodeObject *)func->func_code; assert(self_or_null != NULL); assert(args != NULL); if (sym_is_not_null(self_or_null)) { @@ -2016,3 +1993,16 @@ break; } + case _DEOPT: { + break; + } + + case _SIDE_EXIT: { + break; + } + + case _ERROR_POP_N: { + stack_pointer += -oparg; + break; + } + diff --git a/Python/pyhash.c b/Python/pyhash.c index 141407c265677a..d508d78092a9e7 100644 --- a/Python/pyhash.c +++ b/Python/pyhash.c @@ -94,7 +94,7 @@ _Py_HashDouble(PyObject *inst, double v) if (Py_IS_INFINITY(v)) return v > 0 ? _PyHASH_INF : -_PyHASH_INF; else - return _Py_HashPointer(inst); + return PyObject_GenericHash(inst); } m = frexp(v, &e); @@ -139,6 +139,12 @@ Py_HashPointer(const void *ptr) return hash; } +Py_hash_t +PyObject_GenericHash(PyObject *obj) +{ + return Py_HashPointer(obj); +} + Py_hash_t _Py_HashBytes(const void *src, Py_ssize_t len) { diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index 3a2c0a450ac9d9..1d315b80d88ce0 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -687,6 +687,10 @@ pycore_init_global_objects(PyInterpreterState *interp) _PyUnicode_InitState(interp); + if (_Py_IsMainInterpreter(interp)) { + _Py_GetConstant_Init(); + } + return _PyStatus_OK(); } @@ -1911,6 +1915,9 @@ Py_FinalizeEx(void) int malloc_stats = tstate->interp->config.malloc_stats; #endif + /* Ensure that remaining threads are detached */ + _PyEval_StopTheWorldAll(runtime); + /* Remaining daemon threads will automatically exit when they attempt to take the GIL (ex: PyEval_RestoreThread()). */ _PyInterpreterState_SetFinalizing(tstate->interp, tstate); @@ -1927,8 +1934,11 @@ Py_FinalizeEx(void) will be called in the current Python thread. Since _PyRuntimeState_SetFinalizing() has been called, no other Python thread can take the GIL at this point: if they try, they will exit - immediately. */ - _PyThreadState_DeleteExcept(tstate); + immediately. We start the world once we are the only thread state left, + before we call destructors. */ + PyThreadState *list = _PyThreadState_RemoveExcept(tstate); + _PyEval_StartTheWorldAll(runtime); + _PyThreadState_DeleteList(list); /* At this point no Python code should be running at all. The only thread state left should be the main thread of the main @@ -3135,6 +3145,10 @@ call_ll_exitfuncs(_PyRuntimeState *runtime) void _Py_NO_RETURN Py_Exit(int sts) { + PyThreadState *tstate = _PyThreadState_GET(); + if (tstate != NULL && _PyThreadState_IsRunningMain(tstate)) { + _PyInterpreterState_SetNotRunningMain(tstate->interp); + } if (Py_FinalizeEx() < 0) { sts = 120; } diff --git a/Python/pystate.c b/Python/pystate.c index eedcb920cd1cf2..8489f53c6e3e34 100644 --- a/Python/pystate.c +++ b/Python/pystate.c @@ -2,6 +2,7 @@ /* Thread and interpreter state structures and their interfaces */ #include "Python.h" +#include "pycore_abstract.h" // _PyIndex_Check() #include "pycore_ceval.h" #include "pycore_code.h" // stats #include "pycore_critical_section.h" // _PyCriticalSection_Resume() @@ -260,6 +261,12 @@ bind_tstate(PyThreadState *tstate) tstate->native_thread_id = PyThread_get_thread_native_id(); #endif +#ifdef Py_GIL_DISABLED + // Initialize biased reference counting inter-thread queue. Note that this + // needs to be initialized from the active thread. + _Py_brc_init_thread(tstate); +#endif + // mimalloc state needs to be initialized from the active thread. tstate_mimalloc_bind(tstate); @@ -1041,10 +1048,24 @@ _PyInterpreterState_IsRunningMain(PyInterpreterState *interp) if (interp->threads.main != NULL) { return 1; } - // For now, we assume the main interpreter is always running. - if (_Py_IsMainInterpreter(interp)) { - return 1; + // Embedders might not know to call _PyInterpreterState_SetRunningMain(), + // so their main thread wouldn't show it is running the main interpreter's + // program. (Py_Main() doesn't have this problem.) For now this isn't + // critical. If it were, we would need to infer "running main" from other + // information, like if it's the main interpreter. We used to do that + // but the naive approach led to some inconsistencies that caused problems. + return 0; +} + +int +_PyThreadState_IsRunningMain(PyThreadState *tstate) +{ + PyInterpreterState *interp = tstate->interp; + if (interp->threads.main != NULL) { + return tstate == interp->threads.main; } + // See the note in _PyInterpreterState_IsRunningMain() about + // possible false negatives here for embedders. return 0; } @@ -1059,11 +1080,83 @@ _PyInterpreterState_FailIfRunningMain(PyInterpreterState *interp) return 0; } +void +_PyInterpreterState_ReinitRunningMain(PyThreadState *tstate) +{ + PyInterpreterState *interp = tstate->interp; + if (interp->threads.main != tstate) { + interp->threads.main = NULL; + } +} + //---------- // accessors //---------- +PyObject * +PyUnstable_InterpreterState_GetMainModule(PyInterpreterState *interp) +{ + PyObject *modules = _PyImport_GetModules(interp); + if (modules == NULL) { + PyErr_SetString(PyExc_RuntimeError, "interpreter not initialized"); + return NULL; + } + return PyMapping_GetItemString(modules, "__main__"); +} + +PyObject * +PyInterpreterState_GetDict(PyInterpreterState *interp) +{ + if (interp->dict == NULL) { + interp->dict = PyDict_New(); + if (interp->dict == NULL) { + PyErr_Clear(); + } + } + /* Returning NULL means no per-interpreter dict is available. */ + return interp->dict; +} + + +//---------- +// interp ID +//---------- + +int64_t +_PyInterpreterState_ObjectToID(PyObject *idobj) +{ + if (!_PyIndex_Check(idobj)) { + PyErr_Format(PyExc_TypeError, + "interpreter ID must be an int, got %.100s", + Py_TYPE(idobj)->tp_name); + return -1; + } + + // This may raise OverflowError. + // For now, we don't worry about if LLONG_MAX < INT64_MAX. + long long id = PyLong_AsLongLong(idobj); + if (id == -1 && PyErr_Occurred()) { + return -1; + } + + if (id < 0) { + PyErr_Format(PyExc_ValueError, + "interpreter ID must be a non-negative int, got %R", + idobj); + return -1; + } +#if LLONG_MAX > INT64_MAX + else if (id > INT64_MAX) { + PyErr_SetString(PyExc_OverflowError, "int too big to convert"); + return -1; + } +#endif + else { + return (int64_t)id; + } +} + int64_t PyInterpreterState_GetID(PyInterpreterState *interp) { @@ -1142,30 +1235,6 @@ _PyInterpreterState_RequireIDRef(PyInterpreterState *interp, int required) interp->requires_idref = required ? 1 : 0; } -PyObject * -PyUnstable_InterpreterState_GetMainModule(PyInterpreterState *interp) -{ - PyObject *modules = _PyImport_GetModules(interp); - if (modules == NULL) { - PyErr_SetString(PyExc_RuntimeError, "interpreter not initialized"); - return NULL; - } - return PyMapping_GetItemString(modules, "__main__"); -} - -PyObject * -PyInterpreterState_GetDict(PyInterpreterState *interp) -{ - if (interp->dict == NULL) { - interp->dict = PyDict_New(); - if (interp->dict == NULL) { - PyErr_Clear(); - } - } - /* Returning NULL means no per-interpreter dict is available. */ - return interp->dict; -} - //----------------------------- // look up an interpreter state @@ -1227,6 +1296,16 @@ _PyInterpreterState_LookUpID(int64_t requested_id) return interp; } +PyInterpreterState * +_PyInterpreterState_LookUpIDObject(PyObject *requested_id) +{ + int64_t id = _PyInterpreterState_ObjectToID(requested_id); + if (id < 0) { + return NULL; + } + return _PyInterpreterState_LookUpID(id); +} + /********************************/ /* the per-thread runtime state */ @@ -1339,10 +1418,6 @@ init_threadstate(_PyThreadStateImpl *_tstate, tstate->what_event = -1; tstate->previous_executor = NULL; -#ifdef Py_GIL_DISABLED - // Initialize biased reference counting inter-thread queue - _Py_brc_init_thread(tstate); -#endif llist_init(&_tstate->mem_free_queue); if (interp->stoptheworld.requested || _PyRuntime.stoptheworld.requested) { @@ -1488,6 +1563,7 @@ PyThreadState_Clear(PyThreadState *tstate) { assert(tstate->_status.initialized && !tstate->_status.cleared); assert(current_fast_get()->interp == tstate->interp); + assert(!_PyThreadState_IsRunningMain(tstate)); // XXX assert(!tstate->_status.bound || tstate->_status.unbound); tstate->_status.finalizing = 1; // just in case @@ -1586,6 +1662,7 @@ tstate_delete_common(PyThreadState *tstate) assert(tstate->_status.cleared && !tstate->_status.finalized); assert(tstate->state != _Py_THREAD_ATTACHED); tstate_verify_not_active(tstate); + assert(!_PyThreadState_IsRunningMain(tstate)); PyInterpreterState *interp = tstate->interp; if (interp == NULL) { @@ -1678,24 +1755,29 @@ PyThreadState_DeleteCurrent(void) } -/* - * Delete all thread states except the one passed as argument. - * Note that, if there is a current thread state, it *must* be the one - * passed as argument. Also, this won't touch any other interpreters - * than the current one, since we don't know which thread state should - * be kept in those other interpreters. - */ -void -_PyThreadState_DeleteExcept(PyThreadState *tstate) +// Unlinks and removes all thread states from `tstate->interp`, with the +// exception of the one passed as an argument. However, it does not delete +// these thread states. Instead, it returns the removed thread states as a +// linked list. +// +// Note that if there is a current thread state, it *must* be the one +// passed as argument. Also, this won't touch any interpreters other +// than the current one, since we don't know which thread state should +// be kept in those other interpreters. +PyThreadState * +_PyThreadState_RemoveExcept(PyThreadState *tstate) { assert(tstate != NULL); PyInterpreterState *interp = tstate->interp; _PyRuntimeState *runtime = interp->runtime; +#ifdef Py_GIL_DISABLED + assert(runtime->stoptheworld.world_stopped); +#endif + HEAD_LOCK(runtime); /* Remove all thread states, except tstate, from the linked list of - thread states. This will allow calling PyThreadState_Clear() - without holding the lock. */ + thread states. */ PyThreadState *list = interp->threads.head; if (list == tstate) { list = tstate->next; @@ -1710,9 +1792,19 @@ _PyThreadState_DeleteExcept(PyThreadState *tstate) interp->threads.head = tstate; HEAD_UNLOCK(runtime); - /* Clear and deallocate all stale thread states. Even if this - executes Python code, we should be safe since it executes - in the current thread, not one of the stale threads. */ + return list; +} + +// Deletes the thread states in the linked list `list`. +// +// This is intended to be used in conjunction with _PyThreadState_RemoveExcept. +void +_PyThreadState_DeleteList(PyThreadState *list) +{ + // The world can't be stopped because we PyThreadState_Clear() can + // call destructors. + assert(!_PyRuntime.stoptheworld.world_stopped); + PyThreadState *p, *next; for (p = list; p; p = next) { next = p->next; diff --git a/Python/specialize.c b/Python/specialize.c index b1f9eb756c3665..c1edf8842faf68 100644 --- a/Python/specialize.c +++ b/Python/specialize.c @@ -247,8 +247,9 @@ print_optimization_stats(FILE *out, OptimizationStats *stats) fprintf(out, "Optimization optimizer successes: %" PRIu64 "\n", stats->optimizer_successes); fprintf(out, "Optimization optimizer failure no memory: %" PRIu64 "\n", stats->optimizer_failure_reason_no_memory); + fprintf(out, "Optimizer remove globals builtins changed: %" PRIu64 "\n", stats->remove_globals_builtins_changed); + fprintf(out, "Optimizer remove globals incorrect keys: %" PRIu64 "\n", stats->remove_globals_incorrect_keys); - const char* const* names; for (int i = 0; i <= MAX_UOP_ID; i++) { if (stats->opcode[i].execution_count) { fprintf(out, "uops[%s].execution_count : %" PRIu64 "\n", _PyUOpName(i), stats->opcode[i].execution_count); @@ -268,6 +269,17 @@ print_optimization_stats(FILE *out, OptimizationStats *stats) ); } } + + for (int i = 0; i < MAX_UOP_ID; i++) { + if (stats->error_in_opcode[i]) { + fprintf( + out, + "error_in_opcode[%s].count : %" PRIu64 "\n", + _PyUOpName(i), + stats->error_in_opcode[i] + ); + } + } } static void diff --git a/Python/stdlib_module_names.h b/Python/stdlib_module_names.h index 2445a5c838a7d7..ac9d91b5e12885 100644 --- a/Python/stdlib_module_names.h +++ b/Python/stdlib_module_names.h @@ -38,6 +38,7 @@ static const char* _Py_stdlib_module_names[] = { "_heapq", "_imp", "_io", +"_ios_support", "_json", "_locale", "_lsprof", diff --git a/Tools/build/generate_sbom.py b/Tools/build/generate_sbom.py index 6aa4946ee227e7..5c1851f09338a0 100644 --- a/Tools/build/generate_sbom.py +++ b/Tools/build/generate_sbom.py @@ -183,6 +183,20 @@ def check_sbom_packages(sbom_data: dict[str, typing.Any]) -> None: ), ) + # HACL* specifies its expected rev in a refresh script. + if package["name"] == "hacl-star": + hacl_refresh_sh = (CPYTHON_ROOT_DIR / "Modules/_hacl/refresh.sh").read_text() + hacl_expected_rev_match = re.search( + r"expected_hacl_star_rev=([0-9a-f]{40})", + hacl_refresh_sh + ) + hacl_expected_rev = hacl_expected_rev_match and hacl_expected_rev_match.group(1) + + error_if( + hacl_expected_rev != version, + "HACL* SBOM version doesn't match value in 'Modules/_hacl/refresh.sh'" + ) + # License must be on the approved list for SPDX. license_concluded = package["licenseConcluded"] error_if( diff --git a/Tools/c-analyzer/cpython/globals-to-fix.tsv b/Tools/c-analyzer/cpython/globals-to-fix.tsv index 686a3d3160cc90..92fab9b3998636 100644 --- a/Tools/c-analyzer/cpython/globals-to-fix.tsv +++ b/Tools/c-analyzer/cpython/globals-to-fix.tsv @@ -54,7 +54,6 @@ Objects/genobject.c - _PyAsyncGenASend_Type - Objects/genobject.c - _PyAsyncGenAThrow_Type - Objects/genobject.c - _PyAsyncGenWrappedValue_Type - Objects/genobject.c - _PyCoroWrapper_Type - -Objects/interpreteridobject.c - PyInterpreterID_Type - Objects/iterobject.c - PyCallIter_Type - Objects/iterobject.c - PySeqIter_Type - Objects/iterobject.c - _PyAnextAwaitable_Type - diff --git a/Tools/c-analyzer/cpython/ignored.tsv b/Tools/c-analyzer/cpython/ignored.tsv index 0024e2683052c8..965346b9b04a32 100644 --- a/Tools/c-analyzer/cpython/ignored.tsv +++ b/Tools/c-analyzer/cpython/ignored.tsv @@ -742,3 +742,4 @@ Modules/_sqlite/module.c - _sqlite3module - Modules/clinic/md5module.c.h _md5_md5 _keywords - Modules/clinic/grpmodule.c.h grp_getgrgid _keywords - Modules/clinic/grpmodule.c.h grp_getgrnam _keywords - +Objects/object.c - constants static PyObject*[] diff --git a/Tools/cases_generator/analyzer.py b/Tools/cases_generator/analyzer.py index 27e6ba2b3fdedf..2329205ad31d09 100644 --- a/Tools/cases_generator/analyzer.py +++ b/Tools/cases_generator/analyzer.py @@ -8,7 +8,8 @@ @dataclass class Properties: escapes: bool - infallible: bool + error_with_pop: bool + error_without_pop: bool deopts: bool oparg: bool jumps: bool @@ -37,7 +38,8 @@ def dump(self, indent: str) -> None: def from_list(properties: list["Properties"]) -> "Properties": return Properties( escapes=any(p.escapes for p in properties), - infallible=all(p.infallible for p in properties), + error_with_pop=any(p.error_with_pop for p in properties), + error_without_pop=any(p.error_without_pop for p in properties), deopts=any(p.deopts for p in properties), oparg=any(p.oparg for p in properties), jumps=any(p.jumps for p in properties), @@ -55,10 +57,16 @@ def from_list(properties: list["Properties"]) -> "Properties": passthrough=all(p.passthrough for p in properties), ) + @property + def infallible(self) -> bool: + return not self.error_with_pop and not self.error_without_pop + + SKIP_PROPERTIES = Properties( escapes=False, - infallible=True, + error_with_pop=False, + error_without_pop=False, deopts=False, oparg=False, jumps=False, @@ -157,20 +165,32 @@ def size(self) -> int: self._size = sum(c.size for c in self.caches) return self._size - def is_viable(self) -> bool: + def why_not_viable(self) -> str | None: if self.name == "_SAVE_RETURN_OFFSET": - return True # Adjusts next_instr, but only in tier 1 code - if self.properties.needs_this: - return False + return None # Adjusts next_instr, but only in tier 1 code if "INSTRUMENTED" in self.name: - return False + return "is instrumented" if "replaced" in self.annotations: - return False + return "is replaced" if self.name in ("INTERPRETER_EXIT", "JUMP_BACKWARD"): - return False + return "has tier 1 control flow" + if self.properties.needs_this: + return "uses the 'this_instr' variable" if len([c for c in self.caches if c.name != "unused"]) > 1: - return False - return True + return "has unused cache entries" + if self.properties.error_with_pop and self.properties.error_without_pop: + return "has both popping and not-popping errors" + if self.properties.eval_breaker: + if self.properties.error_with_pop or self.properties.error_without_pop: + return "has error handling and eval-breaker check" + if self.properties.side_exit: + return "exits and eval-breaker check" + if self.properties.deopts: + return "deopts and eval-breaker check" + return None + + def is_viable(self) -> bool: + return self.why_not_viable() is None def is_super(self) -> bool: for tkn in self.body: @@ -320,10 +340,17 @@ def tier_variable(node: parser.InstDef) -> int | None: return int(token.text[-1]) return None -def is_infallible(op: parser.InstDef) -> bool: - return not ( +def has_error_with_pop(op: parser.InstDef) -> bool: + return ( variable_used(op, "ERROR_IF") - or variable_used(op, "error") + or variable_used(op, "pop_1_error") + or variable_used(op, "exception_unwind") + or variable_used(op, "resume_with_error") + ) + +def has_error_without_pop(op: parser.InstDef) -> bool: + return ( + variable_used(op, "ERROR_NO_POP") or variable_used(op, "pop_1_error") or variable_used(op, "exception_unwind") or variable_used(op, "resume_with_error") @@ -507,12 +534,15 @@ def compute_properties(op: parser.InstDef) -> Properties: tkn.column, op.name, ) - infallible = is_infallible(op) + error_with_pop = has_error_with_pop(op) + error_without_pop = has_error_without_pop(op) + infallible = not error_with_pop and not error_without_pop passthrough = stack_effect_only_peeks(op) and infallible return Properties( escapes=makes_escaping_api_call(op), - infallible=infallible, - deopts=deopts_if or exits_if, + error_with_pop=error_with_pop, + error_without_pop=error_without_pop, + deopts=deopts_if, side_exit=exits_if, oparg=variable_used(op, "oparg"), jumps=variable_used(op, "JUMPBY"), diff --git a/Tools/cases_generator/generators_common.py b/Tools/cases_generator/generators_common.py index 0b4b99c60768b5..0addcf0ab570f6 100644 --- a/Tools/cases_generator/generators_common.py +++ b/Tools/cases_generator/generators_common.py @@ -99,6 +99,20 @@ def replace_error( out.emit(close) +def replace_error_no_pop( + out: CWriter, + tkn: Token, + tkn_iter: Iterator[Token], + uop: Uop, + stack: Stack, + inst: Instruction | None, +) -> None: + next(tkn_iter) # LPAREN + next(tkn_iter) # RPAREN + next(tkn_iter) # Semi colon + out.emit_at("goto error;", tkn) + + def replace_decrefs( out: CWriter, tkn: Token, @@ -160,6 +174,7 @@ def replace_check_eval_breaker( "EXIT_IF": replace_deopt, "DEOPT_IF": replace_deopt, "ERROR_IF": replace_error, + "ERROR_NO_POP": replace_error_no_pop, "DECREF_INPUTS": replace_decrefs, "CHECK_EVAL_BREAKER": replace_check_eval_breaker, "SYNC_SP": replace_sync_sp, @@ -213,6 +228,8 @@ def cflags(p: Properties) -> str: flags.append("HAS_EXIT_FLAG") if not p.infallible: flags.append("HAS_ERROR_FLAG") + if p.error_without_pop: + flags.append("HAS_ERROR_NO_POP_FLAG") if p.escapes: flags.append("HAS_ESCAPES_FLAG") if p.pure: diff --git a/Tools/cases_generator/opcode_metadata_generator.py b/Tools/cases_generator/opcode_metadata_generator.py index ab597834a8892f..04fecb235f18cd 100644 --- a/Tools/cases_generator/opcode_metadata_generator.py +++ b/Tools/cases_generator/opcode_metadata_generator.py @@ -54,6 +54,7 @@ "PURE", "PASSTHROUGH", "OPARG_AND_1", + "ERROR_NO_POP", ] diff --git a/Tools/cases_generator/tier2_generator.py b/Tools/cases_generator/tier2_generator.py index d8eed1078b0914..114d28ee745632 100644 --- a/Tools/cases_generator/tier2_generator.py +++ b/Tools/cases_generator/tier2_generator.py @@ -72,21 +72,21 @@ def tier2_replace_error( label = next(tkn_iter).text next(tkn_iter) # RPAREN next(tkn_iter) # Semi colon - out.emit(") ") - c_offset = stack.peek_offset.to_c() - try: - offset = -int(c_offset) - close = ";\n" - except ValueError: - offset = None - out.emit(f"{{ stack_pointer += {c_offset}; ") - close = "; }\n" - out.emit("goto ") - if offset: - out.emit(f"pop_{offset}_") - out.emit(label + "_tier_two") - out.emit(close) + out.emit(") JUMP_TO_ERROR();\n") + +def tier2_replace_error_no_pop( + out: CWriter, + tkn: Token, + tkn_iter: Iterator[Token], + uop: Uop, + stack: Stack, + inst: Instruction | None, +) -> None: + next(tkn_iter) # LPAREN + next(tkn_iter) # RPAREN + next(tkn_iter) # Semi colon + out.emit_at("JUMP_TO_ERROR();", tkn) def tier2_replace_deopt( out: CWriter, @@ -100,7 +100,7 @@ def tier2_replace_deopt( out.emit(next(tkn_iter)) emit_to(out, tkn_iter, "RPAREN") next(tkn_iter) # Semi colon - out.emit(") goto deoptimize;\n") + out.emit(") JUMP_TO_JUMP_TARGET();\n") def tier2_replace_exit_if( @@ -115,7 +115,7 @@ def tier2_replace_exit_if( out.emit(next(tkn_iter)) emit_to(out, tkn_iter, "RPAREN") next(tkn_iter) # Semi colon - out.emit(") goto side_exit;\n") + out.emit(") JUMP_TO_JUMP_TARGET();\n") def tier2_replace_oparg( @@ -141,6 +141,7 @@ def tier2_replace_oparg( TIER2_REPLACEMENT_FUNCTIONS = REPLACEMENT_FUNCTIONS.copy() TIER2_REPLACEMENT_FUNCTIONS["ERROR_IF"] = tier2_replace_error +TIER2_REPLACEMENT_FUNCTIONS["ERROR_NO_POP"] = tier2_replace_error_no_pop TIER2_REPLACEMENT_FUNCTIONS["DEOPT_IF"] = tier2_replace_deopt TIER2_REPLACEMENT_FUNCTIONS["oparg"] = tier2_replace_oparg TIER2_REPLACEMENT_FUNCTIONS["EXIT_IF"] = tier2_replace_exit_if @@ -201,8 +202,9 @@ def generate_tier2( continue if uop.is_super(): continue - if not uop.is_viable(): - out.emit(f"/* {uop.name} is not a viable micro-op for tier 2 */\n\n") + why_not_viable = uop.why_not_viable() + if why_not_viable is not None: + out.emit(f"/* {uop.name} is not a viable micro-op for tier 2 because it {why_not_viable} */\n\n") continue out.emit(f"case {uop.name}: {{\n") declare_variables(uop, out) diff --git a/Tools/cases_generator/uop_metadata_generator.py b/Tools/cases_generator/uop_metadata_generator.py index 72eed3041c55c9..7b3325ada4a49f 100644 --- a/Tools/cases_generator/uop_metadata_generator.py +++ b/Tools/cases_generator/uop_metadata_generator.py @@ -15,10 +15,10 @@ write_header, cflags, ) +from stack import Stack from cwriter import CWriter from typing import TextIO - DEFAULT_OUTPUT = ROOT / "Include/internal/pycore_uop_metadata.h" @@ -26,6 +26,7 @@ def generate_names_and_flags(analysis: Analysis, out: CWriter) -> None: out.emit("extern const uint16_t _PyUop_Flags[MAX_UOP_ID+1];\n") out.emit("extern const uint8_t _PyUop_Replication[MAX_UOP_ID+1];\n") out.emit("extern const char * const _PyOpcode_uop_name[MAX_UOP_ID+1];\n\n") + out.emit("extern int _PyUop_num_popped(int opcode, int oparg);\n\n") out.emit("#ifdef NEED_OPCODE_METADATA\n") out.emit("const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = {\n") for uop in analysis.uops.values(): @@ -44,6 +45,20 @@ def generate_names_and_flags(analysis: Analysis, out: CWriter) -> None: if uop.is_viable() and uop.properties.tier != 1: out.emit(f'[{uop.name}] = "{uop.name}",\n') out.emit("};\n") + out.emit("int _PyUop_num_popped(int opcode, int oparg)\n{\n") + out.emit("switch(opcode) {\n") + for uop in analysis.uops.values(): + if uop.is_viable() and uop.properties.tier != 1: + stack = Stack() + for var in reversed(uop.stack.inputs): + stack.pop(var) + popped = (-stack.base_offset).to_c() + out.emit(f"case {uop.name}:\n") + out.emit(f" return {popped};\n") + out.emit("default:\n") + out.emit(" return -1;\n") + out.emit("}\n") + out.emit("}\n\n") out.emit("#endif // NEED_OPCODE_METADATA\n\n") diff --git a/Tools/clinic/clinic.py b/Tools/clinic/clinic.py index c9641cb9c82bf7..ea480e61ba9a2b 100755 --- a/Tools/clinic/clinic.py +++ b/Tools/clinic/clinic.py @@ -56,7 +56,7 @@ from libclinic.block_parser import Block, BlockParser from libclinic.crenderdata import CRenderData, Include, TemplateDict from libclinic.converter import ( - CConverter, CConverterClassT, + CConverter, CConverterClassT, ConverterType, converters, legacy_converters) @@ -1988,13 +1988,38 @@ def parse_file( libclinic.write_file(output, cooked) +@functools.cache +def _create_parser_base_namespace() -> dict[str, Any]: + ns = dict( + CConverter=CConverter, + CReturnConverter=CReturnConverter, + buffer=buffer, + robuffer=robuffer, + rwbuffer=rwbuffer, + unspecified=unspecified, + NoneType=NoneType, + ) + for name, converter in converters.items(): + ns[f'{name}_converter'] = converter + for name, return_converter in return_converters.items(): + ns[f'{name}_return_converter'] = return_converter + return ns + + +def create_parser_namespace() -> dict[str, Any]: + base_namespace = _create_parser_base_namespace() + return base_namespace.copy() + + + class PythonParser: def __init__(self, clinic: Clinic) -> None: pass def parse(self, block: Block) -> None: + namespace = create_parser_namespace() with contextlib.redirect_stdout(io.StringIO()) as s: - exec(block.input) + exec(block.input, namespace) block.output = s.getvalue() @@ -3443,7 +3468,6 @@ class float_return_converter(double_return_converter): def eval_ast_expr( node: ast.expr, - globals: dict[str, Any], *, filename: str = '-' ) -> Any: @@ -3460,8 +3484,9 @@ def eval_ast_expr( node = node.value expr = ast.Expression(node) + namespace = create_parser_namespace() co = compile(expr, filename, 'eval') - fn = FunctionType(co, globals) + fn = FunctionType(co, namespace) return fn() @@ -4463,12 +4488,11 @@ def parse_converter( case ast.Name(name): return name, False, {} case ast.Call(func=ast.Name(name)): - symbols = globals() kwargs: ConverterArgs = {} for node in annotation.keywords: if not isinstance(node.arg, str): fail("Cannot use a kwarg splat in a function-call annotation") - kwargs[node.arg] = eval_ast_expr(node.value, symbols) + kwargs[node.arg] = eval_ast_expr(node.value) return name, False, kwargs case _: fail( @@ -4984,25 +5008,21 @@ def run_clinic(parser: argparse.ArgumentParser, ns: argparse.Namespace) -> None: parser.error( "can't specify --converters and a filename at the same time" ) - converters: list[tuple[str, str]] = [] - return_converters: list[tuple[str, str]] = [] - ignored = set(""" - add_c_converter - add_c_return_converter - add_default_legacy_c_converter - add_legacy_c_converter - """.strip().split()) - module = globals() - for name in module: - for suffix, ids in ( - ("_return_converter", return_converters), - ("_converter", converters), - ): - if name in ignored: - continue - if name.endswith(suffix): - ids.append((name, name.removesuffix(suffix))) - break + AnyConverterType = ConverterType | ReturnConverterType + converter_list: list[tuple[str, AnyConverterType]] = [] + return_converter_list: list[tuple[str, AnyConverterType]] = [] + + for name, converter in converters.items(): + converter_list.append(( + name, + converter, + )) + for name, return_converter in return_converters.items(): + return_converter_list.append(( + name, + return_converter + )) + print() print("Legacy converters:") @@ -5012,15 +5032,17 @@ def run_clinic(parser: argparse.ArgumentParser, ns: argparse.Namespace) -> None: print() for title, attribute, ids in ( - ("Converters", 'converter_init', converters), - ("Return converters", 'return_converter_init', return_converters), + ("Converters", 'converter_init', converter_list), + ("Return converters", 'return_converter_init', return_converter_list), ): print(title + ":") + + ids.sort(key=lambda item: item[0].lower()) longest = -1 - for name, short_name in ids: - longest = max(longest, len(short_name)) - for name, short_name in sorted(ids, key=lambda x: x[1].lower()): - cls = module[name] + for name, _ in ids: + longest = max(longest, len(name)) + + for name, cls in ids: callable = getattr(cls, attribute, None) if not callable: continue @@ -5033,7 +5055,7 @@ def run_clinic(parser: argparse.ArgumentParser, ns: argparse.Namespace) -> None: else: s = parameter_name parameters.append(s) - print(' {}({})'.format(short_name, ', '.join(parameters))) + print(' {}({})'.format(name, ', '.join(parameters))) print() print("All converters also accept (c_default=None, py_default=None, annotation=None).") print("All return converters also accept (py_default=None).") diff --git a/Tools/gdb/libpython.py b/Tools/gdb/libpython.py index 483f28b46dfec7..656667ac93970c 100755 --- a/Tools/gdb/libpython.py +++ b/Tools/gdb/libpython.py @@ -1753,8 +1753,11 @@ def is_waiting_for_gil(self): return (name == 'take_gil') def is_gc_collect(self): - '''Is this frame gc_collect_main() within the garbage-collector?''' - return self._gdbframe.name() in ('collect', 'gc_collect_main') + '''Is this frame a collector within the garbage-collector?''' + return self._gdbframe.name() in ( + 'collect', 'gc_collect_full', 'gc_collect_main', + 'gc_collect_young', 'gc_collect_increment', + ) def get_pyop(self): try: diff --git a/Tools/jit/_stencils.py b/Tools/jit/_stencils.py index 78c566d9c8a7ef..601ea0b70701a5 100644 --- a/Tools/jit/_stencils.py +++ b/Tools/jit/_stencils.py @@ -31,6 +31,12 @@ class HoleValue(enum.Enum): OPERAND = enum.auto() # The current uop's target (exposed as _JIT_TARGET): TARGET = enum.auto() + # The base address of the machine code for the jump target (exposed as _JIT_JUMP_TARGET): + JUMP_TARGET = enum.auto() + # The base address of the machine code for the error jump target (exposed as _JIT_ERROR_TARGET): + ERROR_TARGET = enum.auto() + # The index of the exit to be jumped through (exposed as _JIT_EXIT_INDEX): + EXIT_INDEX = enum.auto() # The base address of the machine code for the first uop (exposed as _JIT_TOP): TOP = enum.auto() # A hardcoded value of zero (used for symbol lookups): @@ -124,7 +130,7 @@ def emit_aarch64_trampoline(self, hole: Hole) -> None: ): self.holes.append(hole.replace(offset=base + 4 * i, kind=kind)) - def remove_jump(self) -> None: + def remove_jump(self, *, alignment: int = 1) -> None: """Remove a zero-length continuation jump, if it exists.""" hole = max(self.holes, key=lambda hole: hole.offset) match hole: @@ -170,7 +176,7 @@ def remove_jump(self) -> None: offset -= 2 case _: return - if self.body[offset:] == jump: + if self.body[offset:] == jump and offset % alignment == 0: self.body = self.body[:offset] self.holes.remove(hole) @@ -199,9 +205,8 @@ def process_relocations(self, *, alignment: int = 1) -> None: ): self.code.pad(alignment) self.code.emit_aarch64_trampoline(hole) - self.code.pad(alignment) self.code.holes.remove(hole) - self.code.remove_jump() + self.code.remove_jump(alignment=alignment) self.code.pad(alignment) self.data.pad(8) for stencil in [self.code, self.data]: diff --git a/Tools/jit/_targets.py b/Tools/jit/_targets.py index 417fdb56ccf7a1..66db358679239e 100644 --- a/Tools/jit/_targets.py +++ b/Tools/jit/_targets.py @@ -89,7 +89,7 @@ async def _parse(self, path: pathlib.Path) -> _stencils.StencilGroup: if group.data.body: line = f"0: {str(bytes(group.data.body)).removeprefix('b')}" group.data.disassembly.append(line) - group.process_relocations() + group.process_relocations(alignment=self.alignment) return group def _handle_section(self, section: _S, group: _stencils.StencilGroup) -> None: diff --git a/Tools/jit/template.c b/Tools/jit/template.c index 504e6c875525ae..9b4fc2af9671eb 100644 --- a/Tools/jit/template.c +++ b/Tools/jit/template.c @@ -64,9 +64,17 @@ do { \ TYPE NAME = (TYPE)(uint64_t)&ALIAS; #define PATCH_JUMP(ALIAS) \ +do { \ PyAPI_DATA(void) ALIAS; \ __attribute__((musttail)) \ - return ((jit_func)&ALIAS)(frame, stack_pointer, tstate); + return ((jit_func)&ALIAS)(frame, stack_pointer, tstate); \ +} while (0) + +#undef JUMP_TO_JUMP_TARGET +#define JUMP_TO_JUMP_TARGET() PATCH_JUMP(_JIT_JUMP_TARGET) + +#undef JUMP_TO_ERROR +#define JUMP_TO_ERROR() PATCH_JUMP(_JIT_ERROR_TARGET) _Py_CODEUNIT * _JIT_ENTRY(_PyInterpreterFrame *frame, PyObject **stack_pointer, PyThreadState *tstate) @@ -79,6 +87,7 @@ _JIT_ENTRY(_PyInterpreterFrame *frame, PyObject **stack_pointer, PyThreadState * PATCH_VALUE(uint16_t, _oparg, _JIT_OPARG) PATCH_VALUE(uint64_t, _operand, _JIT_OPERAND) PATCH_VALUE(uint32_t, _target, _JIT_TARGET) + PATCH_VALUE(uint16_t, _exit_index, _JIT_EXIT_INDEX) // The actual instruction definitions (only one will be used): if (opcode == _JUMP_TO_TOP) { CHECK_EVAL_BREAKER(); @@ -91,28 +100,16 @@ _JIT_ENTRY(_PyInterpreterFrame *frame, PyObject **stack_pointer, PyThreadState * } PATCH_JUMP(_JIT_CONTINUE); // Labels that the instruction implementations expect to exist: -unbound_local_error_tier_two: - _PyEval_FormatExcCheckArg( - tstate, PyExc_UnboundLocalError, UNBOUNDLOCAL_ERROR_MSG, - PyTuple_GetItem(_PyFrame_GetCode(frame)->co_localsplusnames, oparg)); - goto error_tier_two; -pop_4_error_tier_two: - STACK_SHRINK(1); -pop_3_error_tier_two: - STACK_SHRINK(1); -pop_2_error_tier_two: - STACK_SHRINK(1); -pop_1_error_tier_two: - STACK_SHRINK(1); + error_tier_two: tstate->previous_executor = (PyObject *)current_executor; GOTO_TIER_ONE(NULL); -deoptimize: +exit_to_tier1: tstate->previous_executor = (PyObject *)current_executor; GOTO_TIER_ONE(_PyCode_CODE(_PyFrame_GetCode(frame)) + _target); -side_exit: +exit_to_trace: { - _PyExitData *exit = ¤t_executor->exits[_target]; + _PyExitData *exit = ¤t_executor->exits[_exit_index]; Py_INCREF(exit->executor); tstate->previous_executor = (PyObject *)current_executor; GOTO_TIER_TWO(exit->executor); diff --git a/Tools/scripts/summarize_stats.py b/Tools/scripts/summarize_stats.py index 6af14e1b769b80..d40106b8682388 100644 --- a/Tools/scripts/summarize_stats.py +++ b/Tools/scripts/summarize_stats.py @@ -513,6 +513,8 @@ def get_optimizer_stats(self) -> dict[str, tuple[int, int | None]]: attempts = self._data["Optimization optimizer attempts"] successes = self._data["Optimization optimizer successes"] no_memory = self._data["Optimization optimizer failure no memory"] + builtins_changed = self._data["Optimizer remove globals builtins changed"] + incorrect_keys = self._data["Optimizer remove globals incorrect keys"] return { Doc( @@ -527,6 +529,14 @@ def get_optimizer_stats(self) -> dict[str, tuple[int, int | None]]: "Optimizer no memory", "The number of optimizations that failed due to no memory.", ): (no_memory, attempts), + Doc( + "Remove globals builtins changed", + "The builtins changed during optimization", + ): (builtins_changed, attempts), + Doc( + "Remove globals incorrect keys", + "The keys in the globals dictionary aren't what was expected", + ): (incorrect_keys, attempts), } def get_histogram(self, prefix: str) -> list[tuple[int, int]]: @@ -1177,6 +1187,17 @@ def calc_unsupported_opcodes_table(stats: Stats) -> Rows: reverse=True, ) + def calc_error_in_opcodes_table(stats: Stats) -> Rows: + error_in_opcodes = stats.get_opcode_stats("error_in_opcode") + return sorted( + [ + (opcode, Count(count)) + for opcode, count in error_in_opcodes.get_opcode_counts().items() + ], + key=itemgetter(1), + reverse=True, + ) + def iter_optimization_tables(base_stats: Stats, head_stats: Stats | None = None): if not base_stats.get_optimization_stats() or ( head_stats is not None and not head_stats.get_optimization_stats() @@ -1223,6 +1244,11 @@ def iter_optimization_tables(base_stats: Stats, head_stats: Stats | None = None) ) ], ) + yield Section( + "Optimizer errored out with opcode", + "Optimization stopped after encountering this opcode", + [Table(("Opcode", "Count:"), calc_error_in_opcodes_table, JoinMode.CHANGE)], + ) return Section( "Optimization (Tier 2) stats", diff --git a/configure b/configure index 07dce38c92724f..542783e723d934 100755 --- a/configure +++ b/configure @@ -976,7 +976,7 @@ LDFLAGS CFLAGS CC HAS_XCRUN -IOS_DEPLOYMENT_TARGET +IPHONEOS_DEPLOYMENT_TARGET EXPORT_MACOSX_DEPLOYMENT_TARGET CONFIGURE_MACOSX_DEPLOYMENT_TARGET _PYTHON_HOST_PLATFORM @@ -4442,15 +4442,16 @@ if test "$cross_compiling" = yes; then _host_device=`echo $host | cut -d '-' -f4` _host_device=${_host_device:=os} - IOS_DEPLOYMENT_TARGET=${_host_os:3} - IOS_DEPLOYMENT_TARGET=${IOS_DEPLOYMENT_TARGET:=12.0} + # IPHONEOS_DEPLOYMENT_TARGET is the minimum supported iOS version + IPHONEOS_DEPLOYMENT_TARGET=${_host_os:3} + IPHONEOS_DEPLOYMENT_TARGET=${IPHONEOS_DEPLOYMENT_TARGET:=12.0} case "$host_cpu" in aarch64) - _host_ident=${IOS_DEPLOYMENT_TARGET}-arm64-iphone${_host_device} + _host_ident=${IPHONEOS_DEPLOYMENT_TARGET}-arm64-iphone${_host_device} ;; *) - _host_ident=${IOS_DEPLOYMENT_TARGET}-$host_cpu-iphone${_host_device} + _host_ident=${IPHONEOS_DEPLOYMENT_TARGET}-$host_cpu-iphone${_host_device} ;; esac ;; @@ -4597,6 +4598,9 @@ fi CONFIGURE_MACOSX_DEPLOYMENT_TARGET= EXPORT_MACOSX_DEPLOYMENT_TARGET='#' +# Record the value of IPHONEOS_DEPLOYMENT_TARGET enforced by the selected host triple. + + # checks for alternative programs # compiler flags are generated in two sets, BASECFLAGS and OPT. OPT is just @@ -4632,9 +4636,8 @@ esac case $ac_sys_system in #( iOS) : - as_fn_append CFLAGS " -mios-version-min=${IOS_DEPLOYMENT_TARGET}" - as_fn_append LDFLAGS " -mios-version-min=${IOS_DEPLOYMENT_TARGET}" - + as_fn_append CFLAGS " -mios-version-min=${IPHONEOS_DEPLOYMENT_TARGET}" + as_fn_append LDFLAGS " -mios-version-min=${IPHONEOS_DEPLOYMENT_TARGET}" ;; #( *) : ;; @@ -12740,7 +12743,6 @@ if test -z "$SHLIB_SUFFIX"; then esac ;; CYGWIN*) SHLIB_SUFFIX=.dll;; - iOS) SHLIB_SUFFIX=.dylib;; *) SHLIB_SUFFIX=.so;; esac fi @@ -17596,13 +17598,21 @@ fi if test "$ac_sys_system" = "Linux-android"; then # When these functions are used in an unprivileged process, they crash rather # than returning an error. - privileged_funcs="chroot initgroups setegid seteuid setgid setregid setresgid - setresuid setreuid setuid" - - # These functions are unimplemented and always return an error. - unimplemented_funcs="sem_open sem_unlink" + blocked_funcs="chroot initgroups setegid seteuid setgid sethostname + setregid setresgid setresuid setreuid setuid" + + # These functions are unimplemented and always return an error + # (https://android.googlesource.com/platform/system/sepolicy/+/refs/heads/android13-release/public/domain.te#1044) + blocked_funcs="$blocked_funcs sem_open sem_unlink" + + # Before API level 23, when fchmodat is called with the unimplemented flag + # AT_SYMLINK_NOFOLLOW, instead of returning ENOTSUP as it should, it actually + # follows the symlink. + if test "$ANDROID_API_LEVEL" -lt 23; then + blocked_funcs="$blocked_funcs fchmodat" + fi - for name in $privileged_funcs $unimplemented_funcs; do + for name in $blocked_funcs; do as_func_var=`printf "%s\n" "ac_cv_func_$name" | $as_tr_sh` eval "$as_func_var=no" @@ -22157,6 +22167,10 @@ fi done fi +# On Android before API level 23, clock_nanosleep returns the wrong value when +# interrupted by a signal (https://issuetracker.google.com/issues/216495770). +if ! { test "$ac_sys_system" = "Linux-android" && + test "$ANDROID_API_LEVEL" -lt 23; }; then for ac_func in clock_nanosleep do : @@ -22167,7 +22181,7 @@ then : else $as_nop - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for clock_nanosleep in -lrt" >&5 + { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for clock_nanosleep in -lrt" >&5 printf %s "checking for clock_nanosleep in -lrt... " >&6; } if test ${ac_cv_lib_rt_clock_nanosleep+y} then : @@ -22205,7 +22219,7 @@ printf "%s\n" "$ac_cv_lib_rt_clock_nanosleep" >&6; } if test "x$ac_cv_lib_rt_clock_nanosleep" = xyes then : - printf "%s\n" "#define HAVE_CLOCK_NANOSLEEP 1" >>confdefs.h + printf "%s\n" "#define HAVE_CLOCK_NANOSLEEP 1" >>confdefs.h fi @@ -22214,6 +22228,7 @@ fi fi done +fi for ac_func in nanosleep @@ -27485,6 +27500,8 @@ else $as_nop with_ensurepip=no ;; #( WASI) : with_ensurepip=no ;; #( + iOS) : + with_ensurepip=no ;; #( *) : with_ensurepip=upgrade ;; diff --git a/configure.ac b/configure.ac index 3e676c56693a3c..fc62bfe5a1d4c4 100644 --- a/configure.ac +++ b/configure.ac @@ -715,16 +715,16 @@ if test "$cross_compiling" = yes; then _host_device=`echo $host | cut -d '-' -f4` _host_device=${_host_device:=os} - dnl IOS_DEPLOYMENT_TARGET is the minimum supported iOS version - IOS_DEPLOYMENT_TARGET=${_host_os:3} - IOS_DEPLOYMENT_TARGET=${IOS_DEPLOYMENT_TARGET:=12.0} + # IPHONEOS_DEPLOYMENT_TARGET is the minimum supported iOS version + IPHONEOS_DEPLOYMENT_TARGET=${_host_os:3} + IPHONEOS_DEPLOYMENT_TARGET=${IPHONEOS_DEPLOYMENT_TARGET:=12.0} case "$host_cpu" in aarch64) - _host_ident=${IOS_DEPLOYMENT_TARGET}-arm64-iphone${_host_device} + _host_ident=${IPHONEOS_DEPLOYMENT_TARGET}-arm64-iphone${_host_device} ;; *) - _host_ident=${IOS_DEPLOYMENT_TARGET}-$host_cpu-iphone${_host_device} + _host_ident=${IPHONEOS_DEPLOYMENT_TARGET}-$host_cpu-iphone${_host_device} ;; esac ;; @@ -866,6 +866,9 @@ AC_SUBST([EXPORT_MACOSX_DEPLOYMENT_TARGET]) CONFIGURE_MACOSX_DEPLOYMENT_TARGET= EXPORT_MACOSX_DEPLOYMENT_TARGET='#' +# Record the value of IPHONEOS_DEPLOYMENT_TARGET enforced by the selected host triple. +AC_SUBST([IPHONEOS_DEPLOYMENT_TARGET]) + # checks for alternative programs # compiler flags are generated in two sets, BASECFLAGS and OPT. OPT is just @@ -901,9 +904,8 @@ AS_CASE([$host], dnl Add the compiler flag for the iOS minimum supported OS version. AS_CASE([$ac_sys_system], [iOS], [ - AS_VAR_APPEND([CFLAGS], [" -mios-version-min=${IOS_DEPLOYMENT_TARGET}"]) - AS_VAR_APPEND([LDFLAGS], [" -mios-version-min=${IOS_DEPLOYMENT_TARGET}"]) - AC_SUBST([IOS_DEPLOYMENT_TARGET]) + AS_VAR_APPEND([CFLAGS], [" -mios-version-min=${IPHONEOS_DEPLOYMENT_TARGET}"]) + AS_VAR_APPEND([LDFLAGS], [" -mios-version-min=${IPHONEOS_DEPLOYMENT_TARGET}"]) ], ) @@ -3285,7 +3287,6 @@ if test -z "$SHLIB_SUFFIX"; then esac ;; CYGWIN*) SHLIB_SUFFIX=.dll;; - iOS) SHLIB_SUFFIX=.dylib;; *) SHLIB_SUFFIX=.so;; esac fi @@ -4935,13 +4936,21 @@ fi if test "$ac_sys_system" = "Linux-android"; then # When these functions are used in an unprivileged process, they crash rather # than returning an error. - privileged_funcs="chroot initgroups setegid seteuid setgid setregid setresgid - setresuid setreuid setuid" - - # These functions are unimplemented and always return an error. - unimplemented_funcs="sem_open sem_unlink" + blocked_funcs="chroot initgroups setegid seteuid setgid sethostname + setregid setresgid setresuid setreuid setuid" + + # These functions are unimplemented and always return an error + # (https://android.googlesource.com/platform/system/sepolicy/+/refs/heads/android13-release/public/domain.te#1044) + blocked_funcs="$blocked_funcs sem_open sem_unlink" + + # Before API level 23, when fchmodat is called with the unimplemented flag + # AT_SYMLINK_NOFOLLOW, instead of returning ENOTSUP as it should, it actually + # follows the symlink. + if test "$ANDROID_API_LEVEL" -lt 23; then + blocked_funcs="$blocked_funcs fchmodat" + fi - for name in $privileged_funcs $unimplemented_funcs; do + for name in $blocked_funcs; do AS_VAR_PUSHDEF([func_var], [ac_cv_func_$name]) AS_VAR_SET([func_var], [no]) AS_VAR_POPDEF([func_var]) @@ -5304,11 +5313,16 @@ then ]) fi -AC_CHECK_FUNCS([clock_nanosleep], [], [ - AC_CHECK_LIB([rt], [clock_nanosleep], [ - AC_DEFINE([HAVE_CLOCK_NANOSLEEP], [1]) - ]) -]) +# On Android before API level 23, clock_nanosleep returns the wrong value when +# interrupted by a signal (https://issuetracker.google.com/issues/216495770). +if ! { test "$ac_sys_system" = "Linux-android" && + test "$ANDROID_API_LEVEL" -lt 23; }; then + AC_CHECK_FUNCS([clock_nanosleep], [], [ + AC_CHECK_LIB([rt], [clock_nanosleep], [ + AC_DEFINE([HAVE_CLOCK_NANOSLEEP], [1]) + ]) + ]) +fi AC_CHECK_FUNCS([nanosleep], [], [ AC_CHECK_LIB([rt], [nanosleep], [ @@ -6927,6 +6941,7 @@ AC_ARG_WITH([ensurepip], AS_CASE([$ac_sys_system], [Emscripten], [with_ensurepip=no], [WASI], [with_ensurepip=no], + [iOS], [with_ensurepip=no], [with_ensurepip=upgrade] ) ]) diff --git a/iOS/README.rst b/iOS/README.rst index b67199e66f95b3..df429b64cec77f 100644 --- a/iOS/README.rst +++ b/iOS/README.rst @@ -182,7 +182,10 @@ This can be done by defining the ``LIBLZMA_CFLAGS``, ``LIBLZMA_LIBS``, ``BZIP2_CFLAGS``, ``BZIP2_LIBS``, ``LIBFFI_CFLAGS``, and ``LIBFFI_LIBS`` environment variables, and the ``--with-openssl`` configure option. Versions of these libraries pre-compiled for iOS can be found in `this repository -`__. +`__. LibFFI is +especially important, as many parts of the standard library (including the +``platform``, ``sysconfig`` and ``webbrowser`` modules) require the use of the +``ctypes`` module at runtime. By default, Python will be compiled with an iOS deployment target (i.e., the minimum supported iOS version) of 12.0. To specify a different deployment @@ -248,16 +251,11 @@ the XCframework:: cp path/to/iphoneos/bin Python.xcframework/ios-arm64 cp path/to/iphoneos/lib Python.xcframework/ios-arm64 - cp path/to/iphonesimulator/bin Python.xcframework/ios-arm64_x86-64-simulator - cp path/to/iphonesimulator/lib Python.xcframework/ios-arm64_x86-64-simulator + cp path/to/iphonesimulator/bin Python.xcframework/ios-arm64_x86_64-simulator + cp path/to/iphonesimulator/lib Python.xcframework/ios-arm64_x86_64-simulator Note that the name of the architecture-specific slice for the simulator will -depend on the CPU architecture that you build. - -Then, add symbolic links to "common" platform names for each slice:: - - ln -si ios-arm64 Python.xcframework/iphoneos - ln -si ios-arm64_x86-64-simulator Python.xcframework/iphonesimulator +depend on the CPU architecture(s) that you build. You now have a Python.xcframework that can be used in a project. @@ -306,6 +304,49 @@ Debugging test failures The easiest way to diagnose a single test failure is to open the testbed project in Xcode and run the tests from there using the "Product > Test" menu item. +To test in Xcode, you must ensure the testbed project has a copy of a compiled +framework. If you've configured your build with the default install location of +``iOS/Frameworks``, you can copy from that location into the test project. To +test on an ARM64 simulator, run:: + + $ rm -rf iOS/testbed/Python.xcframework/ios-arm64_x86_64-simulator/* + $ cp -r iOS/Frameworks/arm64-iphonesimulator/* iOS/testbed/Python.xcframework/ios-arm64_x86_64-simulator + +To test on an x86-64 simulator, run:: + + $ rm -rf iOS/testbed/Python.xcframework/ios-arm64_x86_64-simulator/* + $ cp -r iOS/Frameworks/x86_64-iphonesimulator/* iOS/testbed/Python.xcframework/ios-arm64_x86_64-simulator + +To test on a physical device:: + + $ rm -rf iOS/testbed/Python.xcframework/ios-arm64/* + $ cp -r iOS/Frameworks/arm64-iphoneos/* iOS/testbed/Python.xcframework/ios-arm64 + +Alternatively, you can configure your build to install directly into the +testbed project. For a simulator, use:: + + --enable-framework=$(pwd)/iOS/testbed/Python.xcframework/ios-arm64_x86_64-simulator + +For a physical device, use:: + + --enable-framework=$(pwd)/iOS/testbed/Python.xcframework/ios-arm64 + + +Testing on an iOS device +^^^^^^^^^^^^^^^^^^^^^^^^ + +To test on an iOS device, the app needs to be signed with known developer +credentials. To obtain these credentials, you must have an iOS Developer +account, and your Xcode install will need to be logged into your account (see +the Accounts tab of the Preferences dialog). + +Once the project is open, and you're signed into your Apple Developer account, +select the root node of the project tree (labeled "iOSTestbed"), then the +"Signing & Capabilities" tab in the details page. Select a development team +(this will likely be your own name), and plug in a physical device to your +macOS machine with a USB cable. You should then be able to select your physical +device from the list of targets in the pulldown in the Xcode titlebar. + Running specific tests ^^^^^^^^^^^^^^^^^^^^^^ diff --git a/iOS/Resources/Info.plist.in b/iOS/Resources/Info.plist.in index 3ecdc894f0a285..c3e261ecd9eff7 100644 --- a/iOS/Resources/Info.plist.in +++ b/iOS/Resources/Info.plist.in @@ -17,7 +17,7 @@ CFBundlePackageType FMWK CFBundleShortVersionString - %VERSION% + @VERSION@ CFBundleLongVersionString %VERSION%, (c) 2001-2024 Python Software Foundation. CFBundleSignature @@ -29,6 +29,6 @@ iPhoneOS MinimumOSVersion - @IOS_DEPLOYMENT_TARGET@ + @IPHONEOS_DEPLOYMENT_TARGET@ diff --git a/iOS/testbed/iOSTestbed.xcodeproj/project.pbxproj b/iOS/testbed/iOSTestbed.xcodeproj/project.pbxproj index 4f138a4e7ccefd..d57cfc3dbe0304 100644 --- a/iOS/testbed/iOSTestbed.xcodeproj/project.pbxproj +++ b/iOS/testbed/iOSTestbed.xcodeproj/project.pbxproj @@ -273,7 +273,7 @@ ); runOnlyForDeploymentPostprocessing = 0; shellPath = /bin/sh; - shellScript = "set -e\n\ninstall_dylib () {\n INSTALL_BASE=$1\n FULL_DYLIB=$2\n\n # The name of the .dylib file\n DYLIB=$(basename \"$FULL_DYLIB\")\n # The name of the .dylib file, relative to the install base\n RELATIVE_DYLIB=${FULL_DYLIB#$CODESIGNING_FOLDER_PATH/$INSTALL_BASE/}\n # The full dotted name of the binary module, constructed from the file path.\n FULL_MODULE_NAME=$(echo $RELATIVE_DYLIB | cut -d \".\" -f 1 | tr \"/\" \".\"); \n # A bundle identifier; not actually used, but required by Xcode framework packaging\n FRAMEWORK_BUNDLE_ID=$(echo $PRODUCT_BUNDLE_IDENTIFIER.$FULL_MODULE_NAME | tr \"_\" \"-\")\n # The name of the framework folder.\n FRAMEWORK_FOLDER=\"Frameworks/$FULL_MODULE_NAME.framework\"\n\n # If the framework folder doesn't exist, create it.\n if [ ! -d \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER\" ]; then\n echo \"Creating framework for $RELATIVE_DYLIB\" \n mkdir -p \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER\"\n cp \"$CODESIGNING_FOLDER_PATH/dylib-Info-template.plist\" \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/Info.plist\"\n plutil -replace CFBundleExecutable -string \"$DYLIB\" \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/Info.plist\"\n plutil -replace CFBundleIdentifier -string \"$FRAMEWORK_BUNDLE_ID\" \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/Info.plist\" \n fi\n \n echo \"Installing binary for $RELATIVE_DYLIB\" \n mv \"$FULL_DYLIB\" \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER\"\n}\n\nPYTHON_VER=$(ls -1 \"$CODESIGNING_FOLDER_PATH/python/lib\")\necho \"Install Python $PYTHON_VER standard library dylibs...\"\nfind \"$CODESIGNING_FOLDER_PATH/python/lib/$PYTHON_VER/lib-dynload\" -name \"*.dylib\" | while read FULL_DYLIB; do\n install_dylib python/lib/$PYTHON_VER/lib-dynload \"$FULL_DYLIB\"\ndone\n\n# Clean up dylib template \nrm -f \"$CODESIGNING_FOLDER_PATH/dylib-Info-template.plist\"\necho \"Signing frameworks as $EXPANDED_CODE_SIGN_IDENTITY_NAME ($EXPANDED_CODE_SIGN_IDENTITY)...\"\nfind \"$CODESIGNING_FOLDER_PATH/Frameworks\" -name \"*.framework\" -exec /usr/bin/codesign --force --sign \"$EXPANDED_CODE_SIGN_IDENTITY\" ${OTHER_CODE_SIGN_FLAGS:-} -o runtime --timestamp=none --preserve-metadata=identifier,entitlements,flags --generate-entitlement-der \"{}\" \\; \n"; + shellScript = "set -e\n\ninstall_dylib () {\n INSTALL_BASE=$1\n FULL_EXT=$2\n\n # The name of the extension file\n EXT=$(basename \"$FULL_EXT\")\n # The location of the extension file, relative to the bundle\n RELATIVE_EXT=${FULL_EXT#$CODESIGNING_FOLDER_PATH/} \n # The path to the extension file, relative to the install base\n PYTHON_EXT=${RELATIVE_EXT/$INSTALL_BASE/}\n # The full dotted name of the extension module, constructed from the file path.\n FULL_MODULE_NAME=$(echo $PYTHON_EXT | cut -d \".\" -f 1 | tr \"/\" \".\"); \n # A bundle identifier; not actually used, but required by Xcode framework packaging\n FRAMEWORK_BUNDLE_ID=$(echo $PRODUCT_BUNDLE_IDENTIFIER.$FULL_MODULE_NAME | tr \"_\" \"-\")\n # The name of the framework folder.\n FRAMEWORK_FOLDER=\"Frameworks/$FULL_MODULE_NAME.framework\"\n\n # If the framework folder doesn't exist, create it.\n if [ ! -d \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER\" ]; then\n echo \"Creating framework for $RELATIVE_EXT\" \n mkdir -p \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER\"\n cp \"$CODESIGNING_FOLDER_PATH/dylib-Info-template.plist\" \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/Info.plist\"\n plutil -replace CFBundleExecutable -string \"$FULL_MODULE_NAME\" \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/Info.plist\"\n plutil -replace CFBundleIdentifier -string \"$FRAMEWORK_BUNDLE_ID\" \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/Info.plist\"\n fi\n \n echo \"Installing binary for $FRAMEWORK_FOLDER/$FULL_MODULE_NAME\" \n mv \"$FULL_EXT\" \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/$FULL_MODULE_NAME\"\n # Create a placeholder .fwork file where the .so was\n echo \"$FRAMEWORK_FOLDER/$FULL_MODULE_NAME\" > ${FULL_EXT%.so}.fwork\n # Create a back reference to the .so file location in the framework\n echo \"${RELATIVE_EXT%.so}.fwork\" > \"$CODESIGNING_FOLDER_PATH/$FRAMEWORK_FOLDER/$FULL_MODULE_NAME.origin\" \n}\n\nPYTHON_VER=$(ls -1 \"$CODESIGNING_FOLDER_PATH/python/lib\")\necho \"Install Python $PYTHON_VER standard library extension modules...\"\nfind \"$CODESIGNING_FOLDER_PATH/python/lib/$PYTHON_VER/lib-dynload\" -name \"*.so\" | while read FULL_EXT; do\n install_dylib python/lib/$PYTHON_VER/lib-dynload/ \"$FULL_EXT\"\ndone\n\n# Clean up dylib template \nrm -f \"$CODESIGNING_FOLDER_PATH/dylib-Info-template.plist\"\necho \"Signing frameworks as $EXPANDED_CODE_SIGN_IDENTITY_NAME ($EXPANDED_CODE_SIGN_IDENTITY)...\"\nfind \"$CODESIGNING_FOLDER_PATH/Frameworks\" -name \"*.framework\" -exec /usr/bin/codesign --force --sign \"$EXPANDED_CODE_SIGN_IDENTITY\" ${OTHER_CODE_SIGN_FLAGS:-} -o runtime --timestamp=none --preserve-metadata=identifier,entitlements,flags --generate-entitlement-der \"{}\" \\; \n"; }; /* End PBXShellScriptBuildPhase section */ @@ -441,7 +441,7 @@ CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = NO; CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 1; - DEVELOPMENT_TEAM = 3HEZE76D99; + DEVELOPMENT_TEAM = ""; ENABLE_USER_SCRIPT_SANDBOXING = NO; HEADER_SEARCH_PATHS = "\"$(BUILT_PRODUCTS_DIR)/Python.framework/Headers\""; INFOPLIST_FILE = "iOSTestbed/iOSTestbed-Info.plist"; @@ -471,7 +471,7 @@ CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = NO; CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 1; - DEVELOPMENT_TEAM = 3HEZE76D99; + DEVELOPMENT_TEAM = ""; ENABLE_TESTABILITY = YES; ENABLE_USER_SCRIPT_SANDBOXING = NO; HEADER_SEARCH_PATHS = "\"$(BUILT_PRODUCTS_DIR)/Python.framework/Headers\""; diff --git a/iOS/testbed/iOSTestbedTests/iOSTestbedTests.m b/iOS/testbed/iOSTestbedTests/iOSTestbedTests.m index 53ea107db4a2de..e6a919c304ec8d 100644 --- a/iOS/testbed/iOSTestbedTests/iOSTestbedTests.m +++ b/iOS/testbed/iOSTestbedTests/iOSTestbedTests.m @@ -15,7 +15,7 @@ - (void)testPython { const char *argv[] = { "iOSTestbed", // argv[0] is the process that is running. "-uall", // Enable all resources - "-v", // run in verbose mode so we get test failure information + "-W", // Display test output on failure // To run a subset of tests, add the test names below; e.g., // "test_os", // "test_sys",