diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 9e236534ae3770..e1a2a62c60c6de 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -492,6 +492,7 @@ jobs:
with:
config_hash: ${{ needs.check_source.outputs.config_hash }}
options: ./configure --config-cache --with-thread-sanitizer --with-pydebug
+ suppressions_path: Tools/tsan/supressions.txt
build_tsan_free_threading:
name: 'Thread sanitizer (free-threading)'
@@ -501,6 +502,7 @@ jobs:
with:
config_hash: ${{ needs.check_source.outputs.config_hash }}
options: ./configure --config-cache --disable-gil --with-thread-sanitizer --with-pydebug
+ suppressions_path: Tools/tsan/suppressions_free_threading.txt
# CIFuzz job based on https://google.github.io/oss-fuzz/getting-started/continuous-integration/
cifuzz:
diff --git a/.github/workflows/reusable-tsan.yml b/.github/workflows/reusable-tsan.yml
index 96a9c1b0cda3c3..8ddb3b3ada32c2 100644
--- a/.github/workflows/reusable-tsan.yml
+++ b/.github/workflows/reusable-tsan.yml
@@ -7,6 +7,10 @@ on:
options:
required: true
type: string
+ suppressions_path:
+ description: 'A repo relative path to the suppressions file'
+ required: true
+ type: string
jobs:
build_tsan_reusable:
@@ -30,7 +34,7 @@ jobs:
sudo sysctl -w vm.mmap_rnd_bits=28
- name: TSAN Option Setup
run: |
- echo "TSAN_OPTIONS=suppressions=${GITHUB_WORKSPACE}/Tools/tsan/supressions.txt" >> $GITHUB_ENV
+ echo "TSAN_OPTIONS=suppressions=${GITHUB_WORKSPACE}/${{ inputs.suppressions_path }}" >> $GITHUB_ENV
echo "CC=clang" >> $GITHUB_ENV
echo "CXX=clang++" >> $GITHUB_ENV
- name: Add ccache to PATH
diff --git a/Doc/c-api/unicode.rst b/Doc/c-api/unicode.rst
index 78eec14e3a24d6..7320d035bab513 100644
--- a/Doc/c-api/unicode.rst
+++ b/Doc/c-api/unicode.rst
@@ -523,7 +523,7 @@ APIs:
- Get the fully qualified name of an object type;
call :c:func:`PyType_GetFullyQualifiedName`.
- * - ``T#``
+ * - ``#T``
- :c:expr:`PyObject*`
- Similar to ``T`` format, but use a colon (``:``) as separator between
the module name and the qualified name.
@@ -533,7 +533,7 @@ APIs:
- Get the fully qualified name of a type;
call :c:func:`PyType_GetFullyQualifiedName`.
- * - ``N#``
+ * - ``#N``
- :c:expr:`PyTypeObject*`
- Similar to ``N`` format, but use a colon (``:``) as separator between
the module name and the qualified name.
@@ -574,7 +574,7 @@ APIs:
copied as-is to the result string, and any extra arguments discarded.
.. versionchanged:: 3.13
- Support for ``%T``, ``%T#``, ``%N`` and ``%N#`` formats added.
+ Support for ``%T``, ``%#T``, ``%N`` and ``%#N`` formats added.
.. c:function:: PyObject* PyUnicode_FromFormatV(const char *format, va_list vargs)
diff --git a/Doc/conf.py b/Doc/conf.py
index f4c75c5758cb28..e7b688e9e6e0a8 100644
--- a/Doc/conf.py
+++ b/Doc/conf.py
@@ -12,6 +12,8 @@
sys.path.append(os.path.abspath('tools/extensions'))
sys.path.append(os.path.abspath('includes'))
+from pyspecific import SOURCE_URI
+
# General configuration
# ---------------------
@@ -24,6 +26,7 @@
'pyspecific',
'sphinx.ext.coverage',
'sphinx.ext.doctest',
+ 'sphinx.ext.extlinks',
]
# Skip if downstream redistributors haven't installed them
@@ -513,6 +516,19 @@
r'https://unix.org/version2/whatsnew/lp64_wp.html',
]
+# Options for sphinx.ext.extlinks
+# -------------------------------
+
+# This config is a dictionary of external sites,
+# mapping unique short aliases to a base URL and a prefix.
+# https://www.sphinx-doc.org/en/master/usage/extensions/extlinks.html
+extlinks = {
+ "cve": ("https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-%s", "CVE-%s"),
+ "cwe": ("https://cwe.mitre.org/data/definitions/%s.html", "CWE-%s"),
+ "pypi": ("https://pypi.org/project/%s/", "%s"),
+ "source": (SOURCE_URI, "%s"),
+}
+extlinks_detect_hardcoded_links = True
# Options for extensions
# ----------------------
diff --git a/Doc/faq/library.rst b/Doc/faq/library.rst
index e2f8004c7e3aea..b959cd73921428 100644
--- a/Doc/faq/library.rst
+++ b/Doc/faq/library.rst
@@ -616,8 +616,7 @@ use ``p.read(n)``.
("ptys") instead of pipes. Or you can use a Python interface to Don Libes'
"expect" library. A Python extension that interfaces to expect is called
"expy" and available from https://expectpy.sourceforge.net. A pure Python
- solution that works like expect is `pexpect
- `_.
+ solution that works like expect is :pypi:`pexpect`.
How do I access the serial (RS232) port?
@@ -625,7 +624,7 @@ How do I access the serial (RS232) port?
For Win32, OSX, Linux, BSD, Jython, IronPython:
- https://pypi.org/project/pyserial/
+ :pypi:`pyserial`
For Unix, see a Usenet post by Mitch Chapman:
diff --git a/Doc/glossary.rst b/Doc/glossary.rst
index ee8b26665d6921..05ac3edb63b65d 100644
--- a/Doc/glossary.rst
+++ b/Doc/glossary.rst
@@ -547,12 +547,12 @@ Glossary
tasks such as compression or hashing. Also, the GIL is always released
when doing I/O.
- Past efforts to create a "free-threaded" interpreter (one which locks
- shared data at a much finer granularity) have not been successful
- because performance suffered in the common single-processor case. It
- is believed that overcoming this performance issue would make the
- implementation much more complicated and therefore costlier to maintain.
-
+ As of Python 3.13, the GIL can be disabled using the :option:`--disable-gil`
+ build configuration. After building Python with this option, code must be
+ run with :option:`-X gil 0 <-X>` or after setting the :envvar:`PYTHON_GIL=0 `
+ environment variable. This feature enables improved performance for
+ multi-threaded applications and makes it easier to use multi-core CPUs
+ efficiently. For more details, see :pep:`703`.
hash-based pyc
A bytecode cache file that uses the hash rather than the last-modified
@@ -800,8 +800,7 @@ Glossary
method resolution order
Method Resolution Order is the order in which base classes are searched
- for a member during lookup. See `The Python 2.3 Method Resolution Order
- `_ for details of the
+ for a member during lookup. See :ref:`python_2.3_mro` for details of the
algorithm used by the Python interpreter since the 2.3 release.
module
diff --git a/Doc/howto/curses.rst b/Doc/howto/curses.rst
index 4828e2fa29bd24..f9ad81e38f8dc3 100644
--- a/Doc/howto/curses.rst
+++ b/Doc/howto/curses.rst
@@ -43,7 +43,7 @@ appearance---and the curses library will figure out what control codes
need to be sent to the terminal to produce the right output. curses
doesn't provide many user-interface concepts such as buttons, checkboxes,
or dialogs; if you need such features, consider a user interface library such as
-`Urwid `_.
+:pypi:`Urwid`.
The curses library was originally written for BSD Unix; the later System V
versions of Unix from AT&T added many enhancements and new functions. BSD curses
@@ -56,8 +56,7 @@ versions of curses carried by some proprietary Unixes may not support
everything, though.
The Windows version of Python doesn't include the :mod:`curses`
-module. A ported version called `UniCurses
-`_ is available.
+module. A ported version called :pypi:`UniCurses` is available.
The Python curses module
@@ -429,8 +428,7 @@ User Input
The C curses library offers only very simple input mechanisms. Python's
:mod:`curses` module adds a basic text-input widget. (Other libraries
-such as `Urwid `_ have more extensive
-collections of widgets.)
+such as :pypi:`Urwid` have more extensive collections of widgets.)
There are two methods for getting input from a window:
diff --git a/Doc/howto/index.rst b/Doc/howto/index.rst
index 8b334555ab6463..065071e39a06c5 100644
--- a/Doc/howto/index.rst
+++ b/Doc/howto/index.rst
@@ -33,4 +33,5 @@ Currently, the HOWTOs are:
annotations.rst
isolating-extensions.rst
timerfd.rst
+ mro.rst
diff --git a/Doc/howto/logging-cookbook.rst b/Doc/howto/logging-cookbook.rst
index 61723bc6cf256a..60d88204b795f6 100644
--- a/Doc/howto/logging-cookbook.rst
+++ b/Doc/howto/logging-cookbook.rst
@@ -1912,7 +1912,7 @@ Subclassing QueueHandler and QueueListener- a ``pynng`` example
---------------------------------------------------------------
In a similar way to the above section, we can implement a listener and handler
-using `pynng `_, which is a Python binding to
+using :pypi:`pynng`, which is a Python binding to
`NNG `_, billed as a spiritual successor to ZeroMQ.
The following snippets illustrate -- you can test them in an environment which has
``pynng`` installed. Just for variety, we present the listener first.
@@ -3575,9 +3575,8 @@ A Qt GUI for logging
A question that comes up from time to time is about how to log to a GUI
application. The `Qt `_ framework is a popular
-cross-platform UI framework with Python bindings using `PySide2
-`_ or `PyQt5
-`_ libraries.
+cross-platform UI framework with Python bindings using :pypi:`PySide2`
+or :pypi:`PyQt5` libraries.
The following example shows how to log to a Qt GUI. This introduces a simple
``QtHandler`` class which takes a callable, which should be a slot in the main
diff --git a/Doc/howto/mro.rst b/Doc/howto/mro.rst
new file mode 100644
index 00000000000000..a44ef6848af4f3
--- /dev/null
+++ b/Doc/howto/mro.rst
@@ -0,0 +1,671 @@
+.. _python_2.3_mro:
+
+The Python 2.3 Method Resolution Order
+======================================
+
+.. note::
+
+ This is a historical document, provided as an appendix to the official
+ documentation.
+ The Method Resolution Order discussed here was *introduced* in Python 2.3,
+ but it is still used in later versions -- including Python 3.
+
+By `Michele Simionato `__.
+
+:Abstract:
+
+ *This document is intended for Python programmers who want to
+ understand the C3 Method Resolution Order used in Python 2.3.
+ Although it is not intended for newbies, it is quite pedagogical with
+ many worked out examples. I am not aware of other publicly available
+ documents with the same scope, therefore it should be useful.*
+
+Disclaimer:
+
+ *I donate this document to the Python Software Foundation, under the
+ Python 2.3 license. As usual in these circumstances, I warn the
+ reader that what follows* should *be correct, but I don't give any
+ warranty. Use it at your own risk and peril!*
+
+Acknowledgments:
+
+ *All the people of the Python mailing list who sent me their support.
+ Paul Foley who pointed out various imprecisions and made me to add the
+ part on local precedence ordering. David Goodger for help with the
+ formatting in reStructuredText. David Mertz for help with the editing.
+ Finally, Guido van Rossum who enthusiastically added this document to
+ the official Python 2.3 home-page.*
+
+The beginning
+-------------
+
+ *Felix qui potuit rerum cognoscere causas* -- Virgilius
+
+Everything started with a post by Samuele Pedroni to the Python
+development mailing list [#]_. In his post, Samuele showed that the
+Python 2.2 method resolution order is not monotonic and he proposed to
+replace it with the C3 method resolution order. Guido agreed with his
+arguments and therefore now Python 2.3 uses C3. The C3 method itself
+has nothing to do with Python, since it was invented by people working
+on Dylan and it is described in a paper intended for lispers [#]_. The
+present paper gives a (hopefully) readable discussion of the C3
+algorithm for Pythonistas who want to understand the reasons for the
+change.
+
+First of all, let me point out that what I am going to say only applies
+to the *new style classes* introduced in Python 2.2: *classic classes*
+maintain their old method resolution order, depth first and then left to
+right. Therefore, there is no breaking of old code for classic classes;
+and even if in principle there could be breaking of code for Python 2.2
+new style classes, in practice the cases in which the C3 resolution
+order differs from the Python 2.2 method resolution order are so rare
+that no real breaking of code is expected. Therefore:
+
+ *Don't be scared!*
+
+Moreover, unless you make strong use of multiple inheritance and you
+have non-trivial hierarchies, you don't need to understand the C3
+algorithm, and you can easily skip this paper. On the other hand, if
+you really want to know how multiple inheritance works, then this paper
+is for you. The good news is that things are not as complicated as you
+might expect.
+
+Let me begin with some basic definitions.
+
+1) Given a class C in a complicated multiple inheritance hierarchy, it
+ is a non-trivial task to specify the order in which methods are
+ overridden, i.e. to specify the order of the ancestors of C.
+
+2) The list of the ancestors of a class C, including the class itself,
+ ordered from the nearest ancestor to the furthest, is called the
+ class precedence list or the *linearization* of C.
+
+3) The *Method Resolution Order* (MRO) is the set of rules that
+ construct the linearization. In the Python literature, the idiom
+ "the MRO of C" is also used as a synonymous for the linearization of
+ the class C.
+
+4) For instance, in the case of single inheritance hierarchy, if C is a
+ subclass of C1, and C1 is a subclass of C2, then the linearization of
+ C is simply the list [C, C1 , C2]. However, with multiple
+ inheritance hierarchies, the construction of the linearization is
+ more cumbersome, since it is more difficult to construct a
+ linearization that respects *local precedence ordering* and
+ *monotonicity*.
+
+5) I will discuss the local precedence ordering later, but I can give
+ the definition of monotonicity here. A MRO is monotonic when the
+ following is true: *if C1 precedes C2 in the linearization of C,
+ then C1 precedes C2 in the linearization of any subclass of C*.
+ Otherwise, the innocuous operation of deriving a new class could
+ change the resolution order of methods, potentially introducing very
+ subtle bugs. Examples where this happens will be shown later.
+
+6) Not all classes admit a linearization. There are cases, in
+ complicated hierarchies, where it is not possible to derive a class
+ such that its linearization respects all the desired properties.
+
+Here I give an example of this situation. Consider the hierarchy
+
+ >>> O = object
+ >>> class X(O): pass
+ >>> class Y(O): pass
+ >>> class A(X,Y): pass
+ >>> class B(Y,X): pass
+
+which can be represented with the following inheritance graph, where I
+have denoted with O the ``object`` class, which is the beginning of any
+hierarchy for new style classes:
+
+ .. code-block:: text
+
+ -----------
+ | |
+ | O |
+ | / \ |
+ - X Y /
+ | / | /
+ | / |/
+ A B
+ \ /
+ ?
+
+In this case, it is not possible to derive a new class C from A and B,
+since X precedes Y in A, but Y precedes X in B, therefore the method
+resolution order would be ambiguous in C.
+
+Python 2.3 raises an exception in this situation (TypeError: MRO
+conflict among bases Y, X) forbidding the naive programmer from creating
+ambiguous hierarchies. Python 2.2 instead does not raise an exception,
+but chooses an *ad hoc* ordering (CABXYO in this case).
+
+The C3 Method Resolution Order
+------------------------------
+
+Let me introduce a few simple notations which will be useful for the
+following discussion. I will use the shortcut notation::
+
+ C1 C2 ... CN
+
+to indicate the list of classes [C1, C2, ... , CN].
+
+The *head* of the list is its first element::
+
+ head = C1
+
+whereas the *tail* is the rest of the list::
+
+ tail = C2 ... CN.
+
+I shall also use the notation::
+
+ C + (C1 C2 ... CN) = C C1 C2 ... CN
+
+to denote the sum of the lists [C] + [C1, C2, ... ,CN].
+
+Now I can explain how the MRO works in Python 2.3.
+
+Consider a class C in a multiple inheritance hierarchy, with C
+inheriting from the base classes B1, B2, ... , BN. We want to
+compute the linearization L[C] of the class C. The rule is the
+following:
+
+ *the linearization of C is the sum of C plus the merge of the
+ linearizations of the parents and the list of the parents.*
+
+In symbolic notation::
+
+ L[C(B1 ... BN)] = C + merge(L[B1] ... L[BN], B1 ... BN)
+
+In particular, if C is the ``object`` class, which has no parents, the
+linearization is trivial::
+
+ L[object] = object.
+
+However, in general one has to compute the merge according to the following
+prescription:
+
+ *take the head of the first list, i.e L[B1][0]; if this head is not in
+ the tail of any of the other lists, then add it to the linearization
+ of C and remove it from the lists in the merge, otherwise look at the
+ head of the next list and take it, if it is a good head. Then repeat
+ the operation until all the class are removed or it is impossible to
+ find good heads. In this case, it is impossible to construct the
+ merge, Python 2.3 will refuse to create the class C and will raise an
+ exception.*
+
+This prescription ensures that the merge operation *preserves* the
+ordering, if the ordering can be preserved. On the other hand, if the
+order cannot be preserved (as in the example of serious order
+disagreement discussed above) then the merge cannot be computed.
+
+The computation of the merge is trivial if C has only one parent
+(single inheritance); in this case::
+
+ L[C(B)] = C + merge(L[B],B) = C + L[B]
+
+However, in the case of multiple inheritance things are more cumbersome
+and I don't expect you can understand the rule without a couple of
+examples ;-)
+
+Examples
+--------
+
+First example. Consider the following hierarchy:
+
+ >>> O = object
+ >>> class F(O): pass
+ >>> class E(O): pass
+ >>> class D(O): pass
+ >>> class C(D,F): pass
+ >>> class B(D,E): pass
+ >>> class A(B,C): pass
+
+In this case the inheritance graph can be drawn as:
+
+ .. code-block:: text
+
+ 6
+ ---
+ Level 3 | O | (more general)
+ / --- \
+ / | \ |
+ / | \ |
+ / | \ |
+ --- --- --- |
+ Level 2 3 | D | 4| E | | F | 5 |
+ --- --- --- |
+ \ \ _ / | |
+ \ / \ _ | |
+ \ / \ | |
+ --- --- |
+ Level 1 1 | B | | C | 2 |
+ --- --- |
+ \ / |
+ \ / \ /
+ ---
+ Level 0 0 | A | (more specialized)
+ ---
+
+
+The linearizations of O,D,E and F are trivial::
+
+ L[O] = O
+ L[D] = D O
+ L[E] = E O
+ L[F] = F O
+
+The linearization of B can be computed as::
+
+ L[B] = B + merge(DO, EO, DE)
+
+We see that D is a good head, therefore we take it and we are reduced to
+compute ``merge(O,EO,E)``. Now O is not a good head, since it is in the
+tail of the sequence EO. In this case the rule says that we have to
+skip to the next sequence. Then we see that E is a good head; we take
+it and we are reduced to compute ``merge(O,O)`` which gives O. Therefore::
+
+ L[B] = B D E O
+
+Using the same procedure one finds::
+
+ L[C] = C + merge(DO,FO,DF)
+ = C + D + merge(O,FO,F)
+ = C + D + F + merge(O,O)
+ = C D F O
+
+Now we can compute::
+
+ L[A] = A + merge(BDEO,CDFO,BC)
+ = A + B + merge(DEO,CDFO,C)
+ = A + B + C + merge(DEO,DFO)
+ = A + B + C + D + merge(EO,FO)
+ = A + B + C + D + E + merge(O,FO)
+ = A + B + C + D + E + F + merge(O,O)
+ = A B C D E F O
+
+In this example, the linearization is ordered in a pretty nice way
+according to the inheritance level, in the sense that lower levels (i.e.
+more specialized classes) have higher precedence (see the inheritance
+graph). However, this is not the general case.
+
+I leave as an exercise for the reader to compute the linearization for
+my second example:
+
+ >>> O = object
+ >>> class F(O): pass
+ >>> class E(O): pass
+ >>> class D(O): pass
+ >>> class C(D,F): pass
+ >>> class B(E,D): pass
+ >>> class A(B,C): pass
+
+The only difference with the previous example is the change B(D,E) -->
+B(E,D); however even such a little modification completely changes the
+ordering of the hierarchy:
+
+ .. code-block:: text
+
+ 6
+ ---
+ Level 3 | O |
+ / --- \
+ / | \
+ / | \
+ / | \
+ --- --- ---
+ Level 2 2 | E | 4 | D | | F | 5
+ --- --- ---
+ \ / \ /
+ \ / \ /
+ \ / \ /
+ --- ---
+ Level 1 1 | B | | C | 3
+ --- ---
+ \ /
+ \ /
+ ---
+ Level 0 0 | A |
+ ---
+
+
+Notice that the class E, which is in the second level of the hierarchy,
+precedes the class C, which is in the first level of the hierarchy, i.e.
+E is more specialized than C, even if it is in a higher level.
+
+A lazy programmer can obtain the MRO directly from Python 2.2, since in
+this case it coincides with the Python 2.3 linearization. It is enough
+to invoke the .mro() method of class A:
+
+ >>> A.mro() # doctest: +NORMALIZE_WHITESPACE
+ [, , ,
+ , , ,
+ ]
+
+Finally, let me consider the example discussed in the first section,
+involving a serious order disagreement. In this case, it is
+straightforward to compute the linearizations of O, X, Y, A and B:
+
+ .. code-block:: text
+
+ L[O] = 0
+ L[X] = X O
+ L[Y] = Y O
+ L[A] = A X Y O
+ L[B] = B Y X O
+
+However, it is impossible to compute the linearization for a class C
+that inherits from A and B::
+
+ L[C] = C + merge(AXYO, BYXO, AB)
+ = C + A + merge(XYO, BYXO, B)
+ = C + A + B + merge(XYO, YXO)
+
+At this point we cannot merge the lists XYO and YXO, since X is in the
+tail of YXO whereas Y is in the tail of XYO: therefore there are no
+good heads and the C3 algorithm stops. Python 2.3 raises an error and
+refuses to create the class C.
+
+Bad Method Resolution Orders
+----------------------------
+
+A MRO is *bad* when it breaks such fundamental properties as local
+precedence ordering and monotonicity. In this section, I will show
+that both the MRO for classic classes and the MRO for new style classes
+in Python 2.2 are bad.
+
+It is easier to start with the local precedence ordering. Consider the
+following example:
+
+ >>> F=type('Food',(),{'remember2buy':'spam'})
+ >>> E=type('Eggs',(F,),{'remember2buy':'eggs'})
+ >>> G=type('GoodFood',(F,E),{}) # under Python 2.3 this is an error! # doctest: +SKIP
+
+with inheritance diagram
+
+ .. code-block:: text
+
+ O
+ |
+ (buy spam) F
+ | \
+ | E (buy eggs)
+ | /
+ G
+
+ (buy eggs or spam ?)
+
+
+We see that class G inherits from F and E, with F *before* E: therefore
+we would expect the attribute *G.remember2buy* to be inherited by
+*F.rembermer2buy* and not by *E.remember2buy*: nevertheless Python 2.2
+gives
+
+ >>> G.remember2buy # doctest: +SKIP
+ 'eggs'
+
+This is a breaking of local precedence ordering since the order in the
+local precedence list, i.e. the list of the parents of G, is not
+preserved in the Python 2.2 linearization of G::
+
+ L[G,P22]= G E F object # F *follows* E
+
+One could argue that the reason why F follows E in the Python 2.2
+linearization is that F is less specialized than E, since F is the
+superclass of E; nevertheless the breaking of local precedence ordering
+is quite non-intuitive and error prone. This is particularly true since
+it is a different from old style classes:
+
+ >>> class F: remember2buy='spam'
+ >>> class E(F): remember2buy='eggs'
+ >>> class G(F,E): pass # doctest: +SKIP
+ >>> G.remember2buy # doctest: +SKIP
+ 'spam'
+
+In this case the MRO is GFEF and the local precedence ordering is
+preserved.
+
+As a general rule, hierarchies such as the previous one should be
+avoided, since it is unclear if F should override E or viceversa.
+Python 2.3 solves the ambiguity by raising an exception in the creation
+of class G, effectively stopping the programmer from generating
+ambiguous hierarchies. The reason for that is that the C3 algorithm
+fails when the merge::
+
+ merge(FO,EFO,FE)
+
+cannot be computed, because F is in the tail of EFO and E is in the tail
+of FE.
+
+The real solution is to design a non-ambiguous hierarchy, i.e. to derive
+G from E and F (the more specific first) and not from F and E; in this
+case the MRO is GEF without any doubt.
+
+ .. code-block:: text
+
+ O
+ |
+ F (spam)
+ / |
+ (eggs) E |
+ \ |
+ G
+ (eggs, no doubt)
+
+
+Python 2.3 forces the programmer to write good hierarchies (or, at
+least, less error-prone ones).
+
+On a related note, let me point out that the Python 2.3 algorithm is
+smart enough to recognize obvious mistakes, as the duplication of
+classes in the list of parents:
+
+ >>> class A(object): pass
+ >>> class C(A,A): pass # error
+ Traceback (most recent call last):
+ File "", line 1, in ?
+ TypeError: duplicate base class A
+
+Python 2.2 (both for classic classes and new style classes) in this
+situation, would not raise any exception.
+
+Finally, I would like to point out two lessons we have learned from this
+example:
+
+1. despite the name, the MRO determines the resolution order of
+ attributes, not only of methods;
+
+2. the default food for Pythonistas is spam ! (but you already knew
+ that ;-)
+
+Having discussed the issue of local precedence ordering, let me now
+consider the issue of monotonicity. My goal is to show that neither the
+MRO for classic classes nor that for Python 2.2 new style classes is
+monotonic.
+
+To prove that the MRO for classic classes is non-monotonic is rather
+trivial, it is enough to look at the diamond diagram:
+
+ .. code-block:: text
+
+
+ C
+ / \
+ / \
+ A B
+ \ /
+ \ /
+ D
+
+One easily discerns the inconsistency::
+
+ L[B,P21] = B C # B precedes C : B's methods win
+ L[D,P21] = D A C B C # B follows C : C's methods win!
+
+On the other hand, there are no problems with the Python 2.2 and 2.3
+MROs, they give both::
+
+ L[D] = D A B C
+
+Guido points out in his essay [#]_ that the classic MRO is not so bad in
+practice, since one can typically avoids diamonds for classic classes.
+But all new style classes inherit from ``object``, therefore diamonds are
+unavoidable and inconsistencies shows up in every multiple inheritance
+graph.
+
+The MRO of Python 2.2 makes breaking monotonicity difficult, but not
+impossible. The following example, originally provided by Samuele
+Pedroni, shows that the MRO of Python 2.2 is non-monotonic:
+
+ >>> class A(object): pass
+ >>> class B(object): pass
+ >>> class C(object): pass
+ >>> class D(object): pass
+ >>> class E(object): pass
+ >>> class K1(A,B,C): pass
+ >>> class K2(D,B,E): pass
+ >>> class K3(D,A): pass
+ >>> class Z(K1,K2,K3): pass
+
+Here are the linearizations according to the C3 MRO (the reader should
+verify these linearizations as an exercise and draw the inheritance
+diagram ;-) ::
+
+ L[A] = A O
+ L[B] = B O
+ L[C] = C O
+ L[D] = D O
+ L[E] = E O
+ L[K1]= K1 A B C O
+ L[K2]= K2 D B E O
+ L[K3]= K3 D A O
+ L[Z] = Z K1 K2 K3 D A B C E O
+
+Python 2.2 gives exactly the same linearizations for A, B, C, D, E, K1,
+K2 and K3, but a different linearization for Z::
+
+ L[Z,P22] = Z K1 K3 A K2 D B C E O
+
+It is clear that this linearization is *wrong*, since A comes before D
+whereas in the linearization of K3 A comes *after* D. In other words, in
+K3 methods derived by D override methods derived by A, but in Z, which
+still is a subclass of K3, methods derived by A override methods derived
+by D! This is a violation of monotonicity. Moreover, the Python 2.2
+linearization of Z is also inconsistent with local precedence ordering,
+since the local precedence list of the class Z is [K1, K2, K3] (K2
+precedes K3), whereas in the linearization of Z K2 *follows* K3. These
+problems explain why the 2.2 rule has been dismissed in favor of the C3
+rule.
+
+The end
+-------
+
+This section is for the impatient reader, who skipped all the previous
+sections and jumped immediately to the end. This section is for the
+lazy programmer too, who didn't want to exercise her/his brain.
+Finally, it is for the programmer with some hubris, otherwise s/he would
+not be reading a paper on the C3 method resolution order in multiple
+inheritance hierarchies ;-) These three virtues taken all together (and
+*not* separately) deserve a prize: the prize is a short Python 2.2
+script that allows you to compute the 2.3 MRO without risk to your
+brain. Simply change the last line to play with the various examples I
+have discussed in this paper.::
+
+ #
+
+ """C3 algorithm by Samuele Pedroni (with readability enhanced by me)."""
+
+ class __metaclass__(type):
+ "All classes are metamagically modified to be nicely printed"
+ __repr__ = lambda cls: cls.__name__
+
+ class ex_2:
+ "Serious order disagreement" #From Guido
+ class O: pass
+ class X(O): pass
+ class Y(O): pass
+ class A(X,Y): pass
+ class B(Y,X): pass
+ try:
+ class Z(A,B): pass #creates Z(A,B) in Python 2.2
+ except TypeError:
+ pass # Z(A,B) cannot be created in Python 2.3
+
+ class ex_5:
+ "My first example"
+ class O: pass
+ class F(O): pass
+ class E(O): pass
+ class D(O): pass
+ class C(D,F): pass
+ class B(D,E): pass
+ class A(B,C): pass
+
+ class ex_6:
+ "My second example"
+ class O: pass
+ class F(O): pass
+ class E(O): pass
+ class D(O): pass
+ class C(D,F): pass
+ class B(E,D): pass
+ class A(B,C): pass
+
+ class ex_9:
+ "Difference between Python 2.2 MRO and C3" #From Samuele
+ class O: pass
+ class A(O): pass
+ class B(O): pass
+ class C(O): pass
+ class D(O): pass
+ class E(O): pass
+ class K1(A,B,C): pass
+ class K2(D,B,E): pass
+ class K3(D,A): pass
+ class Z(K1,K2,K3): pass
+
+ def merge(seqs):
+ print '\n\nCPL[%s]=%s' % (seqs[0][0],seqs),
+ res = []; i=0
+ while 1:
+ nonemptyseqs=[seq for seq in seqs if seq]
+ if not nonemptyseqs: return res
+ i+=1; print '\n',i,'round: candidates...',
+ for seq in nonemptyseqs: # find merge candidates among seq heads
+ cand = seq[0]; print ' ',cand,
+ nothead=[s for s in nonemptyseqs if cand in s[1:]]
+ if nothead: cand=None #reject candidate
+ else: break
+ if not cand: raise "Inconsistent hierarchy"
+ res.append(cand)
+ for seq in nonemptyseqs: # remove cand
+ if seq[0] == cand: del seq[0]
+
+ def mro(C):
+ "Compute the class precedence list (mro) according to C3"
+ return merge([[C]]+map(mro,C.__bases__)+[list(C.__bases__)])
+
+ def print_mro(C):
+ print '\nMRO[%s]=%s' % (C,mro(C))
+ print '\nP22 MRO[%s]=%s' % (C,C.mro())
+
+ print_mro(ex_9.Z)
+
+ #
+
+That's all folks,
+
+ enjoy !
+
+
+Resources
+---------
+
+.. [#] The thread on python-dev started by Samuele Pedroni:
+ https://mail.python.org/pipermail/python-dev/2002-October/029035.html
+
+.. [#] The paper *A Monotonic Superclass Linearization for Dylan*:
+ https://doi.org/10.1145/236337.236343
+
+.. [#] Guido van Rossum's essay, *Unifying types and classes in Python 2.2*:
+ https://web.archive.org/web/20140210194412/http://www.python.org/download/releases/2.2.2/descrintro
diff --git a/Doc/library/asyncio-queue.rst b/Doc/library/asyncio-queue.rst
index d86fbc21351e2d..9b579cc1d5fdfe 100644
--- a/Doc/library/asyncio-queue.rst
+++ b/Doc/library/asyncio-queue.rst
@@ -62,6 +62,9 @@ Queue
Remove and return an item from the queue. If queue is empty,
wait until an item is available.
+ Raises :exc:`QueueShutDown` if the queue has been shut down and
+ is empty, or if the queue has been shut down immediately.
+
.. method:: get_nowait()
Return an item if one is immediately available, else raise
@@ -82,6 +85,8 @@ Queue
Put an item into the queue. If the queue is full, wait until a
free slot is available before adding the item.
+ Raises :exc:`QueueShutDown` if the queue has been shut down.
+
.. method:: put_nowait(item)
Put an item into the queue without blocking.
@@ -92,6 +97,22 @@ Queue
Return the number of items in the queue.
+ .. method:: shutdown(immediate=False)
+
+ Shut down the queue, making :meth:`~Queue.get` and :meth:`~Queue.put`
+ raise :exc:`QueueShutDown`.
+
+ By default, :meth:`~Queue.get` on a shut down queue will only
+ raise once the queue is empty. Set *immediate* to true to make
+ :meth:`~Queue.get` raise immediately instead.
+
+ All blocked callers of :meth:`~Queue.put` and :meth:`~Queue.get`
+ will be unblocked. If *immediate* is true, a task will be marked
+ as done for each remaining item in the queue, which may unblock
+ callers of :meth:`~Queue.join`.
+
+ .. versionadded:: 3.13
+
.. method:: task_done()
Indicate that a formerly enqueued task is complete.
@@ -105,6 +126,9 @@ Queue
call was received for every item that had been :meth:`~Queue.put`
into the queue).
+ ``shutdown(immediate=True)`` calls :meth:`task_done` for each
+ remaining item in the queue.
+
Raises :exc:`ValueError` if called more times than there were
items placed in the queue.
@@ -145,6 +169,14 @@ Exceptions
on a queue that has reached its *maxsize*.
+.. exception:: QueueShutDown
+
+ Exception raised when :meth:`~Queue.put` or :meth:`~Queue.get` is
+ called on a queue which has been shut down.
+
+ .. versionadded:: 3.13
+
+
Examples
========
diff --git a/Doc/library/asyncio-stream.rst b/Doc/library/asyncio-stream.rst
index 68b1dff20213e1..3fdc79b3c6896c 100644
--- a/Doc/library/asyncio-stream.rst
+++ b/Doc/library/asyncio-stream.rst
@@ -260,8 +260,19 @@ StreamReader
buffer is reset. The :attr:`IncompleteReadError.partial` attribute
may contain a portion of the separator.
+ The *separator* may also be a tuple of separators. In this
+ case the return value will be the shortest possible that has any
+ separator as the suffix. For the purposes of :exc:`LimitOverrunError`,
+ the shortest possible separator is considered to be the one that
+ matched.
+
.. versionadded:: 3.5.2
+ .. versionchanged:: 3.13
+
+ The *separator* parameter may now be a :class:`tuple` of
+ separators.
+
.. method:: at_eof()
Return ``True`` if the buffer is empty and :meth:`feed_eof`
diff --git a/Doc/library/asyncio-task.rst b/Doc/library/asyncio-task.rst
index 3b10a0d628a86e..3d300c37419f13 100644
--- a/Doc/library/asyncio-task.rst
+++ b/Doc/library/asyncio-task.rst
@@ -392,6 +392,27 @@ is also included in the exception group.
The same special case is made for
:exc:`KeyboardInterrupt` and :exc:`SystemExit` as in the previous paragraph.
+Task groups are careful not to mix up the internal cancellation used to
+"wake up" their :meth:`~object.__aexit__` with cancellation requests
+for the task in which they are running made by other parties.
+In particular, when one task group is syntactically nested in another,
+and both experience an exception in one of their child tasks simultaneously,
+the inner task group will process its exceptions, and then the outer task group
+will receive another cancellation and process its own exceptions.
+
+In the case where a task group is cancelled externally and also must
+raise an :exc:`ExceptionGroup`, it will call the parent task's
+:meth:`~asyncio.Task.cancel` method. This ensures that a
+:exc:`asyncio.CancelledError` will be raised at the next
+:keyword:`await`, so the cancellation is not lost.
+
+Task groups preserve the cancellation count
+reported by :meth:`asyncio.Task.cancelling`.
+
+.. versionchanged:: 3.13
+
+ Improved handling of simultaneous internal and external cancellations
+ and correct preservation of cancellation counts.
Sleeping
========
@@ -1369,6 +1390,15 @@ Task Object
catching :exc:`CancelledError`, it needs to call this method to remove
the cancellation state.
+ When this method decrements the cancellation count to zero,
+ the method checks if a previous :meth:`cancel` call had arranged
+ for :exc:`CancelledError` to be thrown into the task.
+ If it hasn't been thrown yet, that arrangement will be
+ rescinded (by resetting the internal ``_must_cancel`` flag).
+
+ .. versionchanged:: 3.13
+ Changed to rescind pending cancellation requests upon reaching zero.
+
.. method:: cancelling()
Return the number of pending cancellation requests to this Task, i.e.,
diff --git a/Doc/library/code.rst b/Doc/library/code.rst
index 091840781bd235..8cb604cf48ff0b 100644
--- a/Doc/library/code.rst
+++ b/Doc/library/code.rst
@@ -41,7 +41,7 @@ build applications which provide an interactive interpreter prompt.
the :meth:`InteractiveConsole.raw_input` method, if provided. If *local* is
provided, it is passed to the :class:`InteractiveConsole` constructor for
use as the default namespace for the interpreter loop. If *local_exit* is provided,
- it is passed to the :class:`InteractiveConsole` constructor. The :meth:`interact`
+ it is passed to the :class:`InteractiveConsole` constructor. The :meth:`~InteractiveConsole.interact`
method of the instance is then run with *banner* and *exitmsg* passed as the
banner and exit message to use, if provided. The console object is discarded
after use.
diff --git a/Doc/library/codecs.rst b/Doc/library/codecs.rst
index a757f19b99448c..010ae25557a9c9 100644
--- a/Doc/library/codecs.rst
+++ b/Doc/library/codecs.rst
@@ -1478,7 +1478,7 @@ Internationalized Domain Names (IDN)). It builds upon the ``punycode`` encoding
and :mod:`stringprep`.
If you need the IDNA 2008 standard from :rfc:`5891` and :rfc:`5895`, use the
-third-party `idna module `_.
+third-party :pypi:`idna` module.
These RFCs together define a protocol to support non-ASCII characters in domain
names. A domain name containing non-ASCII characters (such as
diff --git a/Doc/library/dataclasses.rst b/Doc/library/dataclasses.rst
index 61b2263339da71..7a8b83e1384e5f 100644
--- a/Doc/library/dataclasses.rst
+++ b/Doc/library/dataclasses.rst
@@ -12,7 +12,7 @@
--------------
This module provides a decorator and functions for automatically
-adding generated :term:`special method`\s such as :meth:`~object.__init__` and
+adding generated :term:`special methods ` such as :meth:`~object.__init__` and
:meth:`~object.__repr__` to user-defined classes. It was originally described
in :pep:`557`.
@@ -39,7 +39,7 @@ will add, among other things, a :meth:`!__init__` that looks like::
self.quantity_on_hand = quantity_on_hand
Note that this method is automatically added to the class: it is not
-directly specified in the ``InventoryItem`` definition shown above.
+directly specified in the :class:`!InventoryItem` definition shown above.
.. versionadded:: 3.7
@@ -86,13 +86,13 @@ Module contents
The parameters to ``@dataclass`` are:
- - ``init``: If true (the default), a :meth:`~object.__init__` method will be
+ - *init*: If true (the default), a :meth:`~object.__init__` method will be
generated.
If the class already defines :meth:`!__init__`, this parameter is
ignored.
- - ``repr``: If true (the default), a :meth:`~object.__repr__` method will be
+ - *repr*: If true (the default), a :meth:`~object.__repr__` method will be
generated. The generated repr string will have the class name and
the name and repr of each field, in the order they are defined in
the class. Fields that are marked as being excluded from the repr
@@ -102,7 +102,7 @@ Module contents
If the class already defines :meth:`!__repr__`, this parameter is
ignored.
- - ``eq``: If true (the default), an :meth:`~object.__eq__` method will be
+ - *eq*: If true (the default), an :meth:`~object.__eq__` method will be
generated. This method compares the class as if it were a tuple
of its fields, in order. Both instances in the comparison must
be of the identical type.
@@ -110,26 +110,26 @@ Module contents
If the class already defines :meth:`!__eq__`, this parameter is
ignored.
- - ``order``: If true (the default is ``False``), :meth:`~object.__lt__`,
+ - *order*: If true (the default is ``False``), :meth:`~object.__lt__`,
:meth:`~object.__le__`, :meth:`~object.__gt__`, and :meth:`~object.__ge__` methods will be
generated. These compare the class as if it were a tuple of its
fields, in order. Both instances in the comparison must be of the
- identical type. If ``order`` is true and ``eq`` is false, a
+ identical type. If *order* is true and *eq* is false, a
:exc:`ValueError` is raised.
If the class already defines any of :meth:`!__lt__`,
:meth:`!__le__`, :meth:`!__gt__`, or :meth:`!__ge__`, then
:exc:`TypeError` is raised.
- - ``unsafe_hash``: If ``False`` (the default), a :meth:`~object.__hash__` method
- is generated according to how ``eq`` and ``frozen`` are set.
+ - *unsafe_hash*: If ``False`` (the default), a :meth:`~object.__hash__` method
+ is generated according to how *eq* and *frozen* are set.
:meth:`!__hash__` is used by built-in :meth:`hash()`, and when objects are
added to hashed collections such as dictionaries and sets. Having a
:meth:`!__hash__` implies that instances of the class are immutable.
Mutability is a complicated property that depends on the programmer's
intent, the existence and behavior of :meth:`!__eq__`, and the values of
- the ``eq`` and ``frozen`` flags in the ``@dataclass`` decorator.
+ the *eq* and *frozen* flags in the ``@dataclass`` decorator.
By default, ``@dataclass`` will not implicitly add a :meth:`~object.__hash__`
method unless it is safe to do so. Neither will it add or change an
@@ -149,29 +149,29 @@ Module contents
method in your dataclass and set ``unsafe_hash=True``; this will result
in a :exc:`TypeError`.
- If ``eq`` and ``frozen`` are both true, by default ``@dataclass`` will
- generate a :meth:`!__hash__` method for you. If ``eq`` is true and
- ``frozen`` is false, :meth:`!__hash__` will be set to ``None``, marking it
- unhashable (which it is, since it is mutable). If ``eq`` is false,
+ If *eq* and *frozen* are both true, by default ``@dataclass`` will
+ generate a :meth:`!__hash__` method for you. If *eq* is true and
+ *frozen* is false, :meth:`!__hash__` will be set to ``None``, marking it
+ unhashable (which it is, since it is mutable). If *eq* is false,
:meth:`!__hash__` will be left untouched meaning the :meth:`!__hash__`
method of the superclass will be used (if the superclass is
:class:`object`, this means it will fall back to id-based hashing).
- - ``frozen``: If true (the default is ``False``), assigning to fields will
+ - *frozen*: If true (the default is ``False``), assigning to fields will
generate an exception. This emulates read-only frozen instances. If
:meth:`~object.__setattr__` or :meth:`~object.__delattr__` is defined in the class, then
:exc:`TypeError` is raised. See the discussion below.
- - ``match_args``: If true (the default is ``True``), the
- ``__match_args__`` tuple will be created from the list of
+ - *match_args*: If true (the default is ``True``), the
+ :attr:`~object.__match_args__` tuple will be created from the list of
parameters to the generated :meth:`~object.__init__` method (even if
:meth:`!__init__` is not generated, see above). If false, or if
- ``__match_args__`` is already defined in the class, then
- ``__match_args__`` will not be generated.
+ :attr:`!__match_args__` is already defined in the class, then
+ :attr:`!__match_args__` will not be generated.
.. versionadded:: 3.10
- - ``kw_only``: If true (the default value is ``False``), then all
+ - *kw_only*: If true (the default value is ``False``), then all
fields will be marked as keyword-only. If a field is marked as
keyword-only, then the only effect is that the :meth:`~object.__init__`
parameter generated from a keyword-only field must be specified
@@ -182,7 +182,7 @@ Module contents
.. versionadded:: 3.10
- - ``slots``: If true (the default is ``False``), :attr:`~object.__slots__` attribute
+ - *slots*: If true (the default is ``False``), :attr:`~object.__slots__` attribute
will be generated and new class will be returned instead of the original one.
If :attr:`!__slots__` is already defined in the class, then :exc:`TypeError`
is raised.
@@ -190,16 +190,16 @@ Module contents
.. versionadded:: 3.10
.. versionchanged:: 3.11
- If a field name is already included in the ``__slots__``
- of a base class, it will not be included in the generated ``__slots__``
+ If a field name is already included in the :attr:`!__slots__`
+ of a base class, it will not be included in the generated :attr:`!__slots__`
to prevent :ref:`overriding them `.
- Therefore, do not use ``__slots__`` to retrieve the field names of a
+ Therefore, do not use :attr:`!__slots__` to retrieve the field names of a
dataclass. Use :func:`fields` instead.
To be able to determine inherited slots,
- base class ``__slots__`` may be any iterable, but *not* an iterator.
+ base class :attr:`!__slots__` may be any iterable, but *not* an iterator.
- - ``weakref_slot``: If true (the default is ``False``), add a slot
+ - *weakref_slot*: If true (the default is ``False``), add a slot
named "__weakref__", which is required to make an instance
weakref-able. It is an error to specify ``weakref_slot=True``
without also specifying ``slots=True``.
@@ -214,7 +214,7 @@ Module contents
a: int # 'a' has no default value
b: int = 0 # assign a default value for 'b'
- In this example, both ``a`` and ``b`` will be included in the added
+ In this example, both :attr:`!a` and :attr:`!b` will be included in the added
:meth:`~object.__init__` method, which will be defined as::
def __init__(self, a: int, b: int = 0):
@@ -245,25 +245,25 @@ Module contents
The parameters to :func:`!field` are:
- - ``default``: If provided, this will be the default value for this
+ - *default*: If provided, this will be the default value for this
field. This is needed because the :func:`!field` call itself
replaces the normal position of the default value.
- - ``default_factory``: If provided, it must be a zero-argument
+ - *default_factory*: If provided, it must be a zero-argument
callable that will be called when a default value is needed for
this field. Among other purposes, this can be used to specify
fields with mutable default values, as discussed below. It is an
- error to specify both ``default`` and ``default_factory``.
+ error to specify both *default* and *default_factory*.
- - ``init``: If true (the default), this field is included as a
+ - *init*: If true (the default), this field is included as a
parameter to the generated :meth:`~object.__init__` method.
- - ``repr``: If true (the default), this field is included in the
+ - *repr*: If true (the default), this field is included in the
string returned by the generated :meth:`~object.__repr__` method.
- - ``hash``: This can be a bool or ``None``. If true, this field is
+ - *hash*: This can be a bool or ``None``. If true, this field is
included in the generated :meth:`~object.__hash__` method. If ``None`` (the
- default), use the value of ``compare``: this would normally be
+ default), use the value of *compare*: this would normally be
the expected behavior. A field should be considered in the hash
if it's used for comparisons. Setting this value to anything
other than ``None`` is discouraged.
@@ -274,11 +274,11 @@ Module contents
fields that contribute to the type's hash value. Even if a field
is excluded from the hash, it will still be used for comparisons.
- - ``compare``: If true (the default), this field is included in the
+ - *compare*: If true (the default), this field is included in the
generated equality and comparison methods (:meth:`~object.__eq__`,
:meth:`~object.__gt__`, et al.).
- - ``metadata``: This can be a mapping or None. None is treated as
+ - *metadata*: This can be a mapping or None. None is treated as
an empty dict. This value is wrapped in
:func:`~types.MappingProxyType` to make it read-only, and exposed
on the :class:`Field` object. It is not used at all by Data
@@ -286,7 +286,7 @@ Module contents
Multiple third-parties can each have their own key, to use as a
namespace in the metadata.
- - ``kw_only``: If true, this field will be marked as keyword-only.
+ - *kw_only*: If true, this field will be marked as keyword-only.
This is used when the generated :meth:`~object.__init__` method's
parameters are computed.
@@ -294,7 +294,7 @@ Module contents
If the default value of a field is specified by a call to
:func:`!field`, then the class attribute for this field will be
- replaced by the specified ``default`` value. If no ``default`` is
+ replaced by the specified *default* value. If *default* is not
provided, then the class attribute will be deleted. The intent is
that after the :func:`@dataclass ` decorator runs, the class
attributes will all contain the default values for the fields, just
@@ -308,9 +308,9 @@ Module contents
z: int = field(repr=False, default=10)
t: int = 20
- The class attribute ``C.z`` will be ``10``, the class attribute
- ``C.t`` will be ``20``, and the class attributes ``C.x`` and
- ``C.y`` will not be set.
+ The class attribute :attr:`!C.z` will be ``10``, the class attribute
+ :attr:`!C.t` will be ``20``, and the class attributes :attr:`!C.x` and
+ :attr:`!C.y` will not be set.
.. class:: Field
@@ -319,10 +319,10 @@ Module contents
module-level method (see below). Users should never instantiate a
:class:`!Field` object directly. Its documented attributes are:
- - ``name``: The name of the field.
- - ``type``: The type of the field.
- - ``default``, ``default_factory``, ``init``, ``repr``, ``hash``,
- ``compare``, ``metadata``, and ``kw_only`` have the identical
+ - :attr:`!name`: The name of the field.
+ - :attr:`!type`: The type of the field.
+ - :attr:`!default`, :attr:`!default_factory`, :attr:`!init`, :attr:`!repr`, :attr:`!hash`,
+ :attr:`!compare`, :attr:`!metadata`, and :attr:`!kw_only` have the identical
meaning and values as they do in the :func:`field` function.
Other attributes may exist, but they are private and must not be
@@ -337,8 +337,8 @@ Module contents
.. function:: asdict(obj, *, dict_factory=dict)
- Converts the dataclass ``obj`` to a dict (by using the
- factory function ``dict_factory``). Each dataclass is converted
+ Converts the dataclass *obj* to a dict (by using the
+ factory function *dict_factory*). Each dataclass is converted
to a dict of its fields, as ``name: value`` pairs. dataclasses, dicts,
lists, and tuples are recursed into. Other objects are copied with
:func:`copy.deepcopy`.
@@ -362,15 +362,15 @@ Module contents
To create a shallow copy, the following workaround may be used::
- dict((field.name, getattr(obj, field.name)) for field in fields(obj))
+ {field.name: getattr(obj, field.name) for field in fields(obj)}
- :func:`!asdict` raises :exc:`TypeError` if ``obj`` is not a dataclass
+ :func:`!asdict` raises :exc:`TypeError` if *obj* is not a dataclass
instance.
.. function:: astuple(obj, *, tuple_factory=tuple)
- Converts the dataclass ``obj`` to a tuple (by using the
- factory function ``tuple_factory``). Each dataclass is converted
+ Converts the dataclass *obj* to a tuple (by using the
+ factory function *tuple_factory*). Each dataclass is converted
to a tuple of its field values. dataclasses, dicts, lists, and
tuples are recursed into. Other objects are copied with
:func:`copy.deepcopy`.
@@ -384,28 +384,28 @@ Module contents
tuple(getattr(obj, field.name) for field in dataclasses.fields(obj))
- :func:`!astuple` raises :exc:`TypeError` if ``obj`` is not a dataclass
+ :func:`!astuple` raises :exc:`TypeError` if *obj* is not a dataclass
instance.
.. function:: make_dataclass(cls_name, fields, *, bases=(), namespace=None, init=True, repr=True, eq=True, order=False, unsafe_hash=False, frozen=False, match_args=True, kw_only=False, slots=False, weakref_slot=False, module=None)
- Creates a new dataclass with name ``cls_name``, fields as defined
- in ``fields``, base classes as given in ``bases``, and initialized
- with a namespace as given in ``namespace``. ``fields`` is an
+ Creates a new dataclass with name *cls_name*, fields as defined
+ in *fields*, base classes as given in *bases*, and initialized
+ with a namespace as given in *namespace*. *fields* is an
iterable whose elements are each either ``name``, ``(name, type)``,
or ``(name, type, Field)``. If just ``name`` is supplied,
- ``typing.Any`` is used for ``type``. The values of ``init``,
- ``repr``, ``eq``, ``order``, ``unsafe_hash``, ``frozen``,
- ``match_args``, ``kw_only``, ``slots``, and ``weakref_slot`` have
+ :data:`typing.Any` is used for ``type``. The values of *init*,
+ *repr*, *eq*, *order*, *unsafe_hash*, *frozen*,
+ *match_args*, *kw_only*, *slots*, and *weakref_slot* have
the same meaning as they do in :func:`@dataclass `.
- If ``module`` is defined, the ``__module__`` attribute
+ If *module* is defined, the :attr:`!__module__` attribute
of the dataclass is set to that value.
By default, it is set to the module name of the caller.
This function is not strictly required, because any Python
- mechanism for creating a new class with ``__annotations__`` can
- then apply the ``@dataclass`` function to convert that class to
+ mechanism for creating a new class with :attr:`!__annotations__` can
+ then apply the :func:`@dataclass ` function to convert that class to
a dataclass. This function is provided as a convenience. For
example::
@@ -428,10 +428,10 @@ Module contents
.. function:: replace(obj, /, **changes)
- Creates a new object of the same type as ``obj``, replacing
- fields with values from ``changes``. If ``obj`` is not a Data
- Class, raises :exc:`TypeError`. If values in ``changes`` do not
- specify fields, raises :exc:`TypeError`.
+ Creates a new object of the same type as *obj*, replacing
+ fields with values from *changes*. If *obj* is not a Data
+ Class, raises :exc:`TypeError`. If keys in *changes* are not
+ field names of the given dataclass, raises :exc:`TypeError`.
The newly returned object is created by calling the :meth:`~object.__init__`
method of the dataclass. This ensures that
@@ -441,7 +441,7 @@ Module contents
specified on the call to :func:`!replace` so that they can be passed to
:meth:`!__init__` and :meth:`__post_init__`.
- It is an error for ``changes`` to contain any fields that are
+ It is an error for *changes* to contain any fields that are
defined as having ``init=False``. A :exc:`ValueError` will be raised
in this case.
@@ -451,7 +451,7 @@ Module contents
initialized at all. It is expected that ``init=False`` fields will
be rarely and judiciously used. If they are used, it might be wise
to have alternate class constructors, or perhaps a custom
- ``replace()`` (or similarly named) method which handles instance
+ :func:`!replace` (or similarly named) method which handles instance
copying.
Dataclass instances are also supported by generic function :func:`copy.replace`.
@@ -513,7 +513,7 @@ Post-init processing
.. function:: __post_init__()
When defined on the class, it will be called by the generated
- :meth:`~object.__init__`, normally as ``self.__post_init__()``.
+ :meth:`~object.__init__`, normally as :meth:`!self.__post_init__`.
However, if any ``InitVar`` fields are defined, they will also be
passed to :meth:`!__post_init__` in the order they were defined in the
class. If no :meth:`!__init__` method is generated, then
@@ -556,17 +556,21 @@ See the section below on init-only variables for ways to pass
parameters to :meth:`!__post_init__`. Also see the warning about how
:func:`replace` handles ``init=False`` fields.
+.. _dataclasses-class-variables:
+
Class variables
---------------
One of the few places where :func:`@dataclass ` actually inspects the type
of a field is to determine if a field is a class variable as defined
in :pep:`526`. It does this by checking if the type of the field is
-``typing.ClassVar``. If a field is a ``ClassVar``, it is excluded
+:data:`typing.ClassVar`. If a field is a ``ClassVar``, it is excluded
from consideration as a field and is ignored by the dataclass
mechanisms. Such ``ClassVar`` pseudo-fields are not returned by the
module-level :func:`fields` function.
+.. _dataclasses-init-only-variables:
+
Init-only variables
-------------------
@@ -595,8 +599,10 @@ value is not provided when creating the class::
c = C(10, database=my_database)
-In this case, :func:`fields` will return :class:`Field` objects for ``i`` and
-``j``, but not for ``database``.
+In this case, :func:`fields` will return :class:`Field` objects for :attr:`!i` and
+:attr:`!j`, but not for :attr:`!database`.
+
+.. _dataclasses-frozen:
Frozen instances
----------------
@@ -611,6 +617,8 @@ There is a tiny performance penalty when using ``frozen=True``:
:meth:`~object.__init__` cannot use simple assignment to initialize fields, and
must use :meth:`!__setattr__`.
+.. _dataclasses-inheritance:
+
Inheritance
-----------
@@ -634,10 +642,10 @@ example::
z: int = 10
x: int = 15
-The final list of fields is, in order, ``x``, ``y``, ``z``. The final
-type of ``x`` is ``int``, as specified in class ``C``.
+The final list of fields is, in order, :attr:`!x`, :attr:`!y`, :attr:`!z`. The final
+type of :attr:`!x` is :class:`int`, as specified in class :class:`!C`.
-The generated :meth:`~object.__init__` method for ``C`` will look like::
+The generated :meth:`~object.__init__` method for :class:`!C` will look like::
def __init__(self, x: int = 15, y: int = 0, z: int = 10):
@@ -650,8 +658,8 @@ keyword-only parameters are moved to come after all regular
keyword-only parameters are implemented in Python: they must come
after non-keyword-only parameters.
-In this example, ``Base.y``, ``Base.w``, and ``D.t`` are keyword-only
-fields, and ``Base.x`` and ``D.z`` are regular fields::
+In this example, :attr:`!Base.y`, :attr:`!Base.w`, and :attr:`!D.t` are keyword-only
+fields, and :attr:`!Base.x` and :attr:`!D.z` are regular fields::
@dataclass
class Base:
@@ -665,7 +673,7 @@ fields, and ``Base.x`` and ``D.z`` are regular fields::
z: int = 10
t: int = field(kw_only=True, default=0)
-The generated :meth:`!__init__` method for ``D`` will look like::
+The generated :meth:`!__init__` method for :class:`!D` will look like::
def __init__(self, x: Any = 15.0, z: int = 10, *, y: int = 0, w: int = 1, t: int = 0):
@@ -680,14 +688,14 @@ re-ordered :meth:`!__init__` parameter list.
Default factory functions
-------------------------
-If a :func:`field` specifies a ``default_factory``, it is called with
+If a :func:`field` specifies a *default_factory*, it is called with
zero arguments when a default value for the field is needed. For
example, to create a new instance of a list, use::
mylist: list = field(default_factory=list)
If a field is excluded from :meth:`~object.__init__` (using ``init=False``)
-and the field also specifies ``default_factory``, then the default
+and the field also specifies *default_factory*, then the default
factory function will always be called from the generated
:meth:`!__init__` function. This happens because there is no other
way to give the field an initial value.
@@ -710,8 +718,8 @@ Consider this example, not using dataclasses::
assert o1.x == [1, 2]
assert o1.x is o2.x
-Note that the two instances of class ``C`` share the same class
-variable ``x``, as expected.
+Note that the two instances of class :class:`!C` share the same class
+variable :attr:`!x`, as expected.
Using dataclasses, *if* this code was valid::
@@ -732,10 +740,10 @@ it would generate code similar to::
assert D().x is D().x
-This has the same issue as the original example using class ``C``.
-That is, two instances of class ``D`` that do not specify a value
-for ``x`` when creating a class instance will share the same copy
-of ``x``. Because dataclasses just use normal Python class
+This has the same issue as the original example using class :class:`!C`.
+That is, two instances of class :class:`!D` that do not specify a value
+for :attr:`!x` when creating a class instance will share the same copy
+of :attr:`!x`. Because dataclasses just use normal Python class
creation they also share this behavior. There is no general way
for Data Classes to detect this condition. Instead, the
:func:`@dataclass ` decorator will raise a :exc:`ValueError` if it
@@ -753,8 +761,8 @@ mutable types as default values for fields::
assert D().x is not D().x
.. versionchanged:: 3.11
- Instead of looking for and disallowing objects of type ``list``,
- ``dict``, or ``set``, unhashable objects are now not allowed as
+ Instead of looking for and disallowing objects of type :class:`list`,
+ :class:`dict`, or :class:`set`, unhashable objects are now not allowed as
default values. Unhashability is used to approximate
mutability.
diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst
index 047427d3269027..e8bd51ba20802e 100644
--- a/Doc/library/datetime.rst
+++ b/Doc/library/datetime.rst
@@ -37,7 +37,7 @@ on efficient attribute extraction for output formatting and manipulation.
Package `dateutil `_
Third-party library with expanded time zone and parsing support.
- Package `DateType `_
+ Package :pypi:`DateType`
Third-party library that introduces distinct static types to e.g. allow
:term:`static type checkers `
to differentiate between naive and aware datetimes.
diff --git a/Doc/library/decimal.rst b/Doc/library/decimal.rst
index 8c671bf900712b..3c51dbc04dc92e 100644
--- a/Doc/library/decimal.rst
+++ b/Doc/library/decimal.rst
@@ -1517,7 +1517,7 @@ are also included in the pure Python version for compatibility.
the C version uses a thread-local rather than a coroutine-local context and the value
is ``False``. This is slightly faster in some nested context scenarios.
-.. versionadded:: 3.8.3
+ .. versionadded:: 3.8.3
Rounding modes
diff --git a/Doc/library/glob.rst b/Doc/library/glob.rst
index 15fef747296ed4..ab6da98bc74ad2 100644
--- a/Doc/library/glob.rst
+++ b/Doc/library/glob.rst
@@ -75,6 +75,10 @@ The :mod:`glob` module defines the following functions:
Using the "``**``" pattern in large directory trees may consume
an inordinate amount of time.
+ .. note::
+ This function may return duplicate path names if *pathname*
+ contains multiple "``**``" patterns and *recursive* is true.
+
.. versionchanged:: 3.5
Support for recursive globs using "``**``".
@@ -94,6 +98,10 @@ The :mod:`glob` module defines the following functions:
.. audit-event:: glob.glob pathname,recursive glob.iglob
.. audit-event:: glob.glob/2 pathname,recursive,root_dir,dir_fd glob.iglob
+ .. note::
+ This function may return duplicate path names if *pathname*
+ contains multiple "``**``" patterns and *recursive* is true.
+
.. versionchanged:: 3.5
Support for recursive globs using "``**``".
diff --git a/Doc/library/heapq.rst b/Doc/library/heapq.rst
index ddbada13bddf5b..ad407141a2f590 100644
--- a/Doc/library/heapq.rst
+++ b/Doc/library/heapq.rst
@@ -17,7 +17,9 @@ This module provides an implementation of the heap queue algorithm, also known
as the priority queue algorithm.
Heaps are binary trees for which every parent node has a value less than or
-equal to any of its children. This implementation uses arrays for which
+equal to any of its children. We refer to this condition as the heap invariant.
+
+This implementation uses arrays for which
``heap[k] <= heap[2*k+1]`` and ``heap[k] <= heap[2*k+2]`` for all *k*, counting
elements from zero. For the sake of comparison, non-existing elements are
considered to be infinite. The interesting property of a heap is that its
@@ -319,4 +321,3 @@ applications, and I think it is good to keep a 'heap' module around. :-)
backwards, and this was also used to avoid the rewinding time. Believe me, real
good tape sorts were quite spectacular to watch! From all times, sorting has
always been a Great Art! :-)
-
diff --git a/Doc/library/http.rst b/Doc/library/http.rst
index 5e1912716e5319..998d6e73f9dd82 100644
--- a/Doc/library/http.rst
+++ b/Doc/library/http.rst
@@ -59,63 +59,63 @@ available in :class:`http.HTTPStatus` are:
======= =================================== ==================================================================
Code Enum Name Details
======= =================================== ==================================================================
-``100`` ``CONTINUE`` HTTP/1.1 :rfc:`7231`, Section 6.2.1
-``101`` ``SWITCHING_PROTOCOLS`` HTTP/1.1 :rfc:`7231`, Section 6.2.2
+``100`` ``CONTINUE`` HTTP Semantics :rfc:`9110`, Section 15.2.1
+``101`` ``SWITCHING_PROTOCOLS`` HTTP Semantics :rfc:`9110`, Section 15.2.2
``102`` ``PROCESSING`` WebDAV :rfc:`2518`, Section 10.1
``103`` ``EARLY_HINTS`` An HTTP Status Code for Indicating Hints :rfc:`8297`
-``200`` ``OK`` HTTP/1.1 :rfc:`7231`, Section 6.3.1
-``201`` ``CREATED`` HTTP/1.1 :rfc:`7231`, Section 6.3.2
-``202`` ``ACCEPTED`` HTTP/1.1 :rfc:`7231`, Section 6.3.3
-``203`` ``NON_AUTHORITATIVE_INFORMATION`` HTTP/1.1 :rfc:`7231`, Section 6.3.4
-``204`` ``NO_CONTENT`` HTTP/1.1 :rfc:`7231`, Section 6.3.5
-``205`` ``RESET_CONTENT`` HTTP/1.1 :rfc:`7231`, Section 6.3.6
-``206`` ``PARTIAL_CONTENT`` HTTP/1.1 :rfc:`7233`, Section 4.1
+``200`` ``OK`` HTTP Semantics :rfc:`9110`, Section 15.3.1
+``201`` ``CREATED`` HTTP Semantics :rfc:`9110`, Section 15.3.2
+``202`` ``ACCEPTED`` HTTP Semantics :rfc:`9110`, Section 15.3.3
+``203`` ``NON_AUTHORITATIVE_INFORMATION`` HTTP Semantics :rfc:`9110`, Section 15.3.4
+``204`` ``NO_CONTENT`` HTTP Semantics :rfc:`9110`, Section 15.3.5
+``205`` ``RESET_CONTENT`` HTTP Semantics :rfc:`9110`, Section 15.3.6
+``206`` ``PARTIAL_CONTENT`` HTTP Semantics :rfc:`9110`, Section 15.3.7
``207`` ``MULTI_STATUS`` WebDAV :rfc:`4918`, Section 11.1
``208`` ``ALREADY_REPORTED`` WebDAV Binding Extensions :rfc:`5842`, Section 7.1 (Experimental)
``226`` ``IM_USED`` Delta Encoding in HTTP :rfc:`3229`, Section 10.4.1
-``300`` ``MULTIPLE_CHOICES`` HTTP/1.1 :rfc:`7231`, Section 6.4.1
-``301`` ``MOVED_PERMANENTLY`` HTTP/1.1 :rfc:`7231`, Section 6.4.2
-``302`` ``FOUND`` HTTP/1.1 :rfc:`7231`, Section 6.4.3
-``303`` ``SEE_OTHER`` HTTP/1.1 :rfc:`7231`, Section 6.4.4
-``304`` ``NOT_MODIFIED`` HTTP/1.1 :rfc:`7232`, Section 4.1
-``305`` ``USE_PROXY`` HTTP/1.1 :rfc:`7231`, Section 6.4.5
-``307`` ``TEMPORARY_REDIRECT`` HTTP/1.1 :rfc:`7231`, Section 6.4.7
-``308`` ``PERMANENT_REDIRECT`` Permanent Redirect :rfc:`7238`, Section 3 (Experimental)
-``400`` ``BAD_REQUEST`` HTTP/1.1 :rfc:`7231`, Section 6.5.1
-``401`` ``UNAUTHORIZED`` HTTP/1.1 Authentication :rfc:`7235`, Section 3.1
-``402`` ``PAYMENT_REQUIRED`` HTTP/1.1 :rfc:`7231`, Section 6.5.2
-``403`` ``FORBIDDEN`` HTTP/1.1 :rfc:`7231`, Section 6.5.3
-``404`` ``NOT_FOUND`` HTTP/1.1 :rfc:`7231`, Section 6.5.4
-``405`` ``METHOD_NOT_ALLOWED`` HTTP/1.1 :rfc:`7231`, Section 6.5.5
-``406`` ``NOT_ACCEPTABLE`` HTTP/1.1 :rfc:`7231`, Section 6.5.6
-``407`` ``PROXY_AUTHENTICATION_REQUIRED`` HTTP/1.1 Authentication :rfc:`7235`, Section 3.2
-``408`` ``REQUEST_TIMEOUT`` HTTP/1.1 :rfc:`7231`, Section 6.5.7
-``409`` ``CONFLICT`` HTTP/1.1 :rfc:`7231`, Section 6.5.8
-``410`` ``GONE`` HTTP/1.1 :rfc:`7231`, Section 6.5.9
-``411`` ``LENGTH_REQUIRED`` HTTP/1.1 :rfc:`7231`, Section 6.5.10
-``412`` ``PRECONDITION_FAILED`` HTTP/1.1 :rfc:`7232`, Section 4.2
-``413`` ``REQUEST_ENTITY_TOO_LARGE`` HTTP/1.1 :rfc:`7231`, Section 6.5.11
-``414`` ``REQUEST_URI_TOO_LONG`` HTTP/1.1 :rfc:`7231`, Section 6.5.12
-``415`` ``UNSUPPORTED_MEDIA_TYPE`` HTTP/1.1 :rfc:`7231`, Section 6.5.13
-``416`` ``REQUESTED_RANGE_NOT_SATISFIABLE`` HTTP/1.1 Range Requests :rfc:`7233`, Section 4.4
-``417`` ``EXPECTATION_FAILED`` HTTP/1.1 :rfc:`7231`, Section 6.5.14
+``300`` ``MULTIPLE_CHOICES`` HTTP Semantics :rfc:`9110`, Section 15.4.1
+``301`` ``MOVED_PERMANENTLY`` HTTP Semantics :rfc:`9110`, Section 15.4.2
+``302`` ``FOUND`` HTTP Semantics :rfc:`9110`, Section 15.4.3
+``303`` ``SEE_OTHER`` HTTP Semantics :rfc:`9110`, Section 15.4.4
+``304`` ``NOT_MODIFIED`` HTTP Semantics :rfc:`9110`, Section 15.4.5
+``305`` ``USE_PROXY`` HTTP Semantics :rfc:`9110`, Section 15.4.6
+``307`` ``TEMPORARY_REDIRECT`` HTTP Semantics :rfc:`9110`, Section 15.4.8
+``308`` ``PERMANENT_REDIRECT`` HTTP Semantics :rfc:`9110`, Section 15.4.9
+``400`` ``BAD_REQUEST`` HTTP Semantics :rfc:`9110`, Section 15.5.1
+``401`` ``UNAUTHORIZED`` HTTP Semantics :rfc:`9110`, Section 15.5.2
+``402`` ``PAYMENT_REQUIRED`` HTTP Semantics :rfc:`9110`, Section 15.5.3
+``403`` ``FORBIDDEN`` HTTP Semantics :rfc:`9110`, Section 15.5.4
+``404`` ``NOT_FOUND`` HTTP Semantics :rfc:`9110`, Section 15.5.5
+``405`` ``METHOD_NOT_ALLOWED`` HTTP Semantics :rfc:`9110`, Section 15.5.6
+``406`` ``NOT_ACCEPTABLE`` HTTP Semantics :rfc:`9110`, Section 15.5.7
+``407`` ``PROXY_AUTHENTICATION_REQUIRED`` HTTP Semantics :rfc:`9110`, Section 15.5.8
+``408`` ``REQUEST_TIMEOUT`` HTTP Semantics :rfc:`9110`, Section 15.5.9
+``409`` ``CONFLICT`` HTTP Semantics :rfc:`9110`, Section 15.5.10
+``410`` ``GONE`` HTTP Semantics :rfc:`9110`, Section 15.5.11
+``411`` ``LENGTH_REQUIRED`` HTTP Semantics :rfc:`9110`, Section 15.5.12
+``412`` ``PRECONDITION_FAILED`` HTTP Semantics :rfc:`9110`, Section 15.5.13
+``413`` ``CONTENT_TOO_LARGE`` HTTP Semantics :rfc:`9110`, Section 15.5.14
+``414`` ``URI_TOO_LONG`` HTTP Semantics :rfc:`9110`, Section 15.5.15
+``415`` ``UNSUPPORTED_MEDIA_TYPE`` HTTP Semantics :rfc:`9110`, Section 15.5.16
+``416`` ``RANGE_NOT_SATISFIABLE`` HTTP Semantics :rfc:`9110`, Section 15.5.17
+``417`` ``EXPECTATION_FAILED`` HTTP Semantics :rfc:`9110`, Section 15.5.18
``418`` ``IM_A_TEAPOT`` HTCPCP/1.0 :rfc:`2324`, Section 2.3.2
-``421`` ``MISDIRECTED_REQUEST`` HTTP/2 :rfc:`7540`, Section 9.1.2
-``422`` ``UNPROCESSABLE_ENTITY`` WebDAV :rfc:`4918`, Section 11.2
+``421`` ``MISDIRECTED_REQUEST`` HTTP Semantics :rfc:`9110`, Section 15.5.20
+``422`` ``UNPROCESSABLE_CONTENT`` HTTP Semantics :rfc:`9110`, Section 15.5.21
``423`` ``LOCKED`` WebDAV :rfc:`4918`, Section 11.3
``424`` ``FAILED_DEPENDENCY`` WebDAV :rfc:`4918`, Section 11.4
``425`` ``TOO_EARLY`` Using Early Data in HTTP :rfc:`8470`
-``426`` ``UPGRADE_REQUIRED`` HTTP/1.1 :rfc:`7231`, Section 6.5.15
+``426`` ``UPGRADE_REQUIRED`` HTTP Semantics :rfc:`9110`, Section 15.5.22
``428`` ``PRECONDITION_REQUIRED`` Additional HTTP Status Codes :rfc:`6585`
``429`` ``TOO_MANY_REQUESTS`` Additional HTTP Status Codes :rfc:`6585`
``431`` ``REQUEST_HEADER_FIELDS_TOO_LARGE`` Additional HTTP Status Codes :rfc:`6585`
``451`` ``UNAVAILABLE_FOR_LEGAL_REASONS`` An HTTP Status Code to Report Legal Obstacles :rfc:`7725`
-``500`` ``INTERNAL_SERVER_ERROR`` HTTP/1.1 :rfc:`7231`, Section 6.6.1
-``501`` ``NOT_IMPLEMENTED`` HTTP/1.1 :rfc:`7231`, Section 6.6.2
-``502`` ``BAD_GATEWAY`` HTTP/1.1 :rfc:`7231`, Section 6.6.3
-``503`` ``SERVICE_UNAVAILABLE`` HTTP/1.1 :rfc:`7231`, Section 6.6.4
-``504`` ``GATEWAY_TIMEOUT`` HTTP/1.1 :rfc:`7231`, Section 6.6.5
-``505`` ``HTTP_VERSION_NOT_SUPPORTED`` HTTP/1.1 :rfc:`7231`, Section 6.6.6
+``500`` ``INTERNAL_SERVER_ERROR`` HTTP Semantics :rfc:`9110`, Section 15.6.1
+``501`` ``NOT_IMPLEMENTED`` HTTP Semantics :rfc:`9110`, Section 15.6.2
+``502`` ``BAD_GATEWAY`` HTTP Semantics :rfc:`9110`, Section 15.6.3
+``503`` ``SERVICE_UNAVAILABLE`` HTTP Semantics :rfc:`9110`, Section 15.6.4
+``504`` ``GATEWAY_TIMEOUT`` HTTP Semantics :rfc:`9110`, Section 15.6.5
+``505`` ``HTTP_VERSION_NOT_SUPPORTED`` HTTP Semantics :rfc:`9110`, Section 15.6.6
``506`` ``VARIANT_ALSO_NEGOTIATES`` Transparent Content Negotiation in HTTP :rfc:`2295`, Section 8.1 (Experimental)
``507`` ``INSUFFICIENT_STORAGE`` WebDAV :rfc:`4918`, Section 11.5
``508`` ``LOOP_DETECTED`` WebDAV Binding Extensions :rfc:`5842`, Section 7.2 (Experimental)
@@ -137,6 +137,10 @@ equal to the constant name (i.e. ``http.HTTPStatus.OK`` is also available as
.. versionadded:: 3.9
Added ``103 EARLY_HINTS``, ``418 IM_A_TEAPOT`` and ``425 TOO_EARLY`` status codes.
+.. versionchanged:: 3.13
+ Implemented RFC9110 naming for status constants. Old constant names are preserved for
+ backwards compatibility.
+
HTTP status category
--------------------
@@ -144,15 +148,15 @@ HTTP status category
The enum values have several properties to indicate the HTTP status category:
-==================== ======================== ===============================
+==================== ======================== ======================================
Property Indicates that Details
-==================== ======================== ===============================
-``is_informational`` ``100 <= status <= 199`` HTTP/1.1 :rfc:`7231`, Section 6
-``is_success`` ``200 <= status <= 299`` HTTP/1.1 :rfc:`7231`, Section 6
-``is_redirection`` ``300 <= status <= 399`` HTTP/1.1 :rfc:`7231`, Section 6
-``is_client_error`` ``400 <= status <= 499`` HTTP/1.1 :rfc:`7231`, Section 6
-``is_server_error`` ``500 <= status <= 599`` HTTP/1.1 :rfc:`7231`, Section 6
-==================== ======================== ===============================
+==================== ======================== ======================================
+``is_informational`` ``100 <= status <= 199`` HTTP Semantics :rfc:`9110`, Section 15
+``is_success`` ``200 <= status <= 299`` HTTP Semantics :rfc:`9110`, Section 15
+``is_redirection`` ``300 <= status <= 399`` HTTP Semantics :rfc:`9110`, Section 15
+``is_client_error`` ``400 <= status <= 499`` HTTP Semantics :rfc:`9110`, Section 15
+``is_server_error`` ``500 <= status <= 599`` HTTP Semantics :rfc:`9110`, Section 15
+==================== ======================== ======================================
Usage::
@@ -203,13 +207,13 @@ available in :class:`http.HTTPMethod` are:
=========== =================================== ==================================================================
Method Enum Name Details
=========== =================================== ==================================================================
-``GET`` ``GET`` HTTP/1.1 :rfc:`7231`, Section 4.3.1
-``HEAD`` ``HEAD`` HTTP/1.1 :rfc:`7231`, Section 4.3.2
-``POST`` ``POST`` HTTP/1.1 :rfc:`7231`, Section 4.3.3
-``PUT`` ``PUT`` HTTP/1.1 :rfc:`7231`, Section 4.3.4
-``DELETE`` ``DELETE`` HTTP/1.1 :rfc:`7231`, Section 4.3.5
-``CONNECT`` ``CONNECT`` HTTP/1.1 :rfc:`7231`, Section 4.3.6
-``OPTIONS`` ``OPTIONS`` HTTP/1.1 :rfc:`7231`, Section 4.3.7
-``TRACE`` ``TRACE`` HTTP/1.1 :rfc:`7231`, Section 4.3.8
+``GET`` ``GET`` HTTP Semantics :rfc:`9110`, Section 9.3.1
+``HEAD`` ``HEAD`` HTTP Semantics :rfc:`9110`, Section 9.3.2
+``POST`` ``POST`` HTTP Semantics :rfc:`9110`, Section 9.3.3
+``PUT`` ``PUT`` HTTP Semantics :rfc:`9110`, Section 9.3.4
+``DELETE`` ``DELETE`` HTTP Semantics :rfc:`9110`, Section 9.3.5
+``CONNECT`` ``CONNECT`` HTTP Semantics :rfc:`9110`, Section 9.3.6
+``OPTIONS`` ``OPTIONS`` HTTP Semantics :rfc:`9110`, Section 9.3.7
+``TRACE`` ``TRACE`` HTTP Semantics :rfc:`9110`, Section 9.3.8
``PATCH`` ``PATCH`` HTTP/1.1 :rfc:`5789`
=========== =================================== ==================================================================
diff --git a/Doc/library/importlib.metadata.rst b/Doc/library/importlib.metadata.rst
index f4fb7791855307..5039bc6e85ee20 100644
--- a/Doc/library/importlib.metadata.rst
+++ b/Doc/library/importlib.metadata.rst
@@ -26,7 +26,7 @@ this package can eliminate the need to use the older and less efficient
``importlib.metadata`` operates on third-party *distribution packages*
installed into Python's ``site-packages`` directory via tools such as
-`pip `_.
+:pypi:`pip`.
Specifically, it works with distributions with discoverable
``dist-info`` or ``egg-info`` directories,
and metadata defined by the `Core metadata specifications `_.
@@ -177,7 +177,7 @@ for more information on entry points, their definition, and usage.
no parameters and always returned a dictionary of entry points, keyed
by group. With ``importlib_metadata`` 5.0 and Python 3.12,
``entry_points`` always returns an ``EntryPoints`` object. See
- `backports.entry_points_selectable `_
+ :pypi:`backports.entry_points_selectable`
for compatibility options.
.. versionchanged:: 3.13
diff --git a/Doc/library/ipaddress.rst b/Doc/library/ipaddress.rst
index 8f090b5eec5980..a4073a4dac86b9 100644
--- a/Doc/library/ipaddress.rst
+++ b/Doc/library/ipaddress.rst
@@ -334,14 +334,14 @@ write code that handles both IP versions correctly. Address objects are
.. attribute:: is_multicast
.. attribute:: is_private
.. attribute:: is_global
+
+ .. versionadded:: 3.4
+
.. attribute:: is_unspecified
.. attribute:: is_reserved
.. attribute:: is_loopback
.. attribute:: is_link_local
- .. versionadded:: 3.4
- is_global
-
.. attribute:: is_site_local
``True`` if the address is reserved for site-local usage. Note that
diff --git a/Doc/library/itertools.rst b/Doc/library/itertools.rst
index b092efe0dc362b..9a5cb8be37d349 100644
--- a/Doc/library/itertools.rst
+++ b/Doc/library/itertools.rst
@@ -791,7 +791,7 @@ recipes. Currently, the ``sliding_window()``, ``iter_index()``, and ``sieve()``
recipes are being tested to see whether they prove their worth.
Substantially all of these recipes and many, many others can be installed from
-the `more-itertools project `_ found
+the :pypi:`more-itertools` project found
on the Python Package Index::
python -m pip install more-itertools
diff --git a/Doc/library/logging.rst b/Doc/library/logging.rst
index 7816cc20945fa8..a733b288ecb6d0 100644
--- a/Doc/library/logging.rst
+++ b/Doc/library/logging.rst
@@ -1003,7 +1003,7 @@ the options available to you.
| | | portion of the time). |
+----------------+-------------------------+-----------------------------------------------+
| created | ``%(created)f`` | Time when the :class:`LogRecord` was created |
-| | | (as returned by :func:`time.time`). |
+| | | (as returned by :func:`time.time_ns` / 1e9). |
+----------------+-------------------------+-----------------------------------------------+
| exc_info | You shouldn't need to | Exception tuple (à la ``sys.exc_info``) or, |
| | format this yourself. | if no exception has occurred, ``None``. |
diff --git a/Doc/library/os.path.rst b/Doc/library/os.path.rst
index dcc877da0b3122..ebeb3bb50b8b1f 100644
--- a/Doc/library/os.path.rst
+++ b/Doc/library/os.path.rst
@@ -145,7 +145,7 @@ the :mod:`glob` module.)
.. function:: lexists(path)
- Return ``True`` if *path* refers to an existing path. Returns ``True`` for
+ Return ``True`` if *path* refers to an existing path, including
broken symbolic links. Equivalent to :func:`exists` on platforms lacking
:func:`os.lstat`.
@@ -409,9 +409,8 @@ the :mod:`glob` module.)
style names such as ``C:\\PROGRA~1`` to ``C:\\Program Files``.
If a path doesn't exist or a symlink loop is encountered, and *strict* is
- ``True``, :exc:`OSError` is raised. If *strict* is ``False``, the path is
- resolved as far as possible and any remainder is appended without checking
- whether it exists.
+ ``True``, :exc:`OSError` is raised. If *strict* is ``False`` these errors
+ are ignored, and so the result might be missing or otherwise inaccessible.
.. note::
This function emulates the operating system's procedure for making a path
diff --git a/Doc/library/pathlib.rst b/Doc/library/pathlib.rst
index f4ed479401f65c..2e18e41869376e 100644
--- a/Doc/library/pathlib.rst
+++ b/Doc/library/pathlib.rst
@@ -1004,10 +1004,6 @@ call fails (for example because the path doesn't exist).
.. seealso::
:ref:`pathlib-pattern-language` documentation.
- This method calls :meth:`Path.is_dir` on the top-level directory and
- propagates any :exc:`OSError` exception that is raised. Subsequent
- :exc:`OSError` exceptions from scanning directories are suppressed.
-
By default, or when the *case_sensitive* keyword-only argument is set to
``None``, this method matches paths using platform-specific casing rules:
typically, case-sensitive on POSIX, and case-insensitive on Windows.
@@ -1028,6 +1024,11 @@ call fails (for example because the path doesn't exist).
.. versionchanged:: 3.13
The *pattern* parameter accepts a :term:`path-like object`.
+ .. versionchanged:: 3.13
+ Any :exc:`OSError` exceptions raised from scanning the filesystem are
+ suppressed. In previous versions, such exceptions are suppressed in many
+ cases, but not all.
+
.. method:: Path.rglob(pattern, *, case_sensitive=None, recurse_symlinks=False)
diff --git a/Doc/library/platform.rst b/Doc/library/platform.rst
index 069dab791dcbe5..66af37e3073852 100644
--- a/Doc/library/platform.rst
+++ b/Doc/library/platform.rst
@@ -219,8 +219,8 @@ Windows Platform
default to an empty string).
As a hint: *ptype* is ``'Uniprocessor Free'`` on single processor NT machines
- and ``'Multiprocessor Free'`` on multi processor machines. The *'Free'* refers
- to the OS version being free of debugging code. It could also state *'Checked'*
+ and ``'Multiprocessor Free'`` on multi processor machines. The ``'Free'`` refers
+ to the OS version being free of debugging code. It could also state ``'Checked'``
which means the OS version uses debugging code, i.e. code that checks arguments,
ranges, etc.
diff --git a/Doc/library/queue.rst b/Doc/library/queue.rst
index f2a6dbf589fd87..fce23313c7de28 100644
--- a/Doc/library/queue.rst
+++ b/Doc/library/queue.rst
@@ -245,8 +245,10 @@ them down.
queue is empty. Set *immediate* to true to make :meth:`~Queue.get` raise
immediately instead.
- All blocked callers of :meth:`~Queue.put` will be unblocked. If *immediate*
- is true, also unblock callers of :meth:`~Queue.get` and :meth:`~Queue.join`.
+ All blocked callers of :meth:`~Queue.put` and :meth:`~Queue.get` will be
+ unblocked. If *immediate* is true, a task will be marked as done for each
+ remaining item in the queue, which may unblock callers of
+ :meth:`~Queue.join`.
.. versionadded:: 3.13
diff --git a/Doc/library/re.rst b/Doc/library/re.rst
index 0336121c2bc631..fe7da856076819 100644
--- a/Doc/library/re.rst
+++ b/Doc/library/re.rst
@@ -48,7 +48,7 @@ fine-tuning parameters.
.. seealso::
- The third-party `regex `_ module,
+ The third-party :pypi:`regex` module,
which has an API compatible with the standard library :mod:`re` module,
but offers additional functionality and a more thorough Unicode support.
diff --git a/Doc/library/secrets.rst b/Doc/library/secrets.rst
index 4405dfc0535973..8f1a68d1d8816c 100644
--- a/Doc/library/secrets.rst
+++ b/Doc/library/secrets.rst
@@ -155,7 +155,7 @@ Generate an eight-character alphanumeric password:
.. note::
Applications should not
- `store passwords in a recoverable format `_,
+ :cwe:`store passwords in a recoverable format <257>`,
whether plain text or encrypted. They should be salted and hashed
using a cryptographically strong one-way (irreversible) hash function.
diff --git a/Doc/library/shlex.rst b/Doc/library/shlex.rst
index f94833ad5331a9..716420f5e74ffa 100644
--- a/Doc/library/shlex.rst
+++ b/Doc/library/shlex.rst
@@ -412,17 +412,17 @@ otherwise. To illustrate, you can see the difference in the following snippet:
.. doctest::
:options: +NORMALIZE_WHITESPACE
- >>> import shlex
- >>> text = "a && b; c && d || e; f >'abc'; (def \"ghi\")"
- >>> s = shlex.shlex(text, posix=True)
- >>> s.whitespace_split = True
- >>> list(s)
- ['a', '&&', 'b;', 'c', '&&', 'd', '||', 'e;', 'f', '>abc;', '(def', 'ghi)']
- >>> s = shlex.shlex(text, posix=True, punctuation_chars=True)
- >>> s.whitespace_split = True
- >>> list(s)
- ['a', '&&', 'b', ';', 'c', '&&', 'd', '||', 'e', ';', 'f', '>', 'abc', ';',
- '(', 'def', 'ghi', ')']
+ >>> import shlex
+ >>> text = "a && b; c && d || e; f >'abc'; (def \"ghi\")"
+ >>> s = shlex.shlex(text, posix=True)
+ >>> s.whitespace_split = True
+ >>> list(s)
+ ['a', '&&', 'b;', 'c', '&&', 'd', '||', 'e;', 'f', '>abc;', '(def', 'ghi)']
+ >>> s = shlex.shlex(text, posix=True, punctuation_chars=True)
+ >>> s.whitespace_split = True
+ >>> list(s)
+ ['a', '&&', 'b', ';', 'c', '&&', 'd', '||', 'e', ';', 'f', '>', 'abc', ';',
+ '(', 'def', 'ghi', ')']
Of course, tokens will be returned which are not valid for shells, and you'll
need to implement your own error checks on the returned tokens.
@@ -431,10 +431,10 @@ Instead of passing ``True`` as the value for the punctuation_chars parameter,
you can pass a string with specific characters, which will be used to determine
which characters constitute punctuation. For example::
- >>> import shlex
- >>> s = shlex.shlex("a && b || c", punctuation_chars="|")
- >>> list(s)
- ['a', '&', '&', 'b', '||', 'c']
+ >>> import shlex
+ >>> s = shlex.shlex("a && b || c", punctuation_chars="|")
+ >>> list(s)
+ ['a', '&', '&', 'b', '||', 'c']
.. note:: When ``punctuation_chars`` is specified, the :attr:`~shlex.wordchars`
attribute is augmented with the characters ``~-./*?=``. That is because these
diff --git a/Doc/library/site.rst b/Doc/library/site.rst
index 2dc9fb09d727e2..e52bbd32d4d493 100644
--- a/Doc/library/site.rst
+++ b/Doc/library/site.rst
@@ -74,6 +74,10 @@ with ``import`` (followed by space or tab) are executed.
Limiting a code chunk to a single line is a deliberate measure
to discourage putting anything more complex here.
+.. versionchanged:: 3.13
+ The :file:`.pth` files are now decoded by UTF-8 at first and then by the
+ :term:`locale encoding` if it fails.
+
.. index::
single: package
triple: path; configuration; file
diff --git a/Doc/library/sqlite3.rst b/Doc/library/sqlite3.rst
index e76dc91bf2d875..e6961821b639b9 100644
--- a/Doc/library/sqlite3.rst
+++ b/Doc/library/sqlite3.rst
@@ -16,6 +16,8 @@
src = sqlite3.connect(":memory:", isolation_level=None)
dst = sqlite3.connect("tutorial.db", isolation_level=None)
src.backup(dst)
+ src.close()
+ dst.close()
del src, dst
.. _sqlite3-intro:
@@ -220,6 +222,7 @@ creating a new cursor, then querying the database:
>>> title, year = res.fetchone()
>>> print(f'The highest scoring Monty Python movie is {title!r}, released in {year}')
The highest scoring Monty Python movie is 'Monty Python and the Holy Grail', released in 1975
+ >>> new_con.close()
You've now created an SQLite database using the :mod:`!sqlite3` module,
inserted data and retrieved values from it in multiple ways.
@@ -394,29 +397,11 @@ Module functions
will get tracebacks from callbacks on :data:`sys.stderr`. Use ``False``
to disable the feature again.
- Register an :func:`unraisable hook handler ` for an
- improved debug experience:
-
- .. testsetup:: sqlite3.trace
-
- import sqlite3
+ .. note::
- .. doctest:: sqlite3.trace
-
- >>> sqlite3.enable_callback_tracebacks(True)
- >>> con = sqlite3.connect(":memory:")
- >>> def evil_trace(stmt):
- ... 5/0
- ...
- >>> con.set_trace_callback(evil_trace)
- >>> def debug(unraisable):
- ... print(f"{unraisable.exc_value!r} in callback {unraisable.object.__name__}")
- ... print(f"Error message: {unraisable.err_msg}")
- >>> import sys
- >>> sys.unraisablehook = debug
- >>> cur = con.execute("SELECT 1")
- ZeroDivisionError('division by zero') in callback evil_trace
- Error message: None
+ Errors in user-defined function callbacks are logged as unraisable exceptions.
+ Use an :func:`unraisable hook handler ` for
+ introspection of the failed callback.
.. function:: register_adapter(type, adapter, /)
@@ -762,6 +747,7 @@ Connection objects
>>> for row in con.execute("SELECT md5(?)", (b"foo",)):
... print(row)
('acbd18db4cc2f85cedef654fccc4a4d8',)
+ >>> con.close()
.. versionchanged:: 3.13
@@ -908,6 +894,7 @@ Connection objects
FROM test ORDER BY x
""")
print(cur.fetchall())
+ con.close()
.. testoutput::
:hide:
@@ -1068,13 +1055,10 @@ Connection objects
.. versionchanged:: 3.10
Added the ``sqlite3.enable_load_extension`` auditing event.
- .. testsetup:: sqlite3.loadext
-
- import sqlite3
- con = sqlite3.connect(":memory:")
+ .. We cannot doctest the load extension API, since there is no convenient
+ way to skip it.
- .. testcode:: sqlite3.loadext
- :skipif: True # not testable at the moment
+ .. code-block::
con.enable_load_extension(True)
@@ -1098,14 +1082,6 @@ Connection objects
for row in con.execute("SELECT rowid, name, ingredients FROM recipe WHERE name MATCH 'pie'"):
print(row)
- con.close()
-
- .. testoutput:: sqlite3.loadext
- :hide:
-
- (2, 'broccoli pie', 'broccoli cheese onions flour')
- (3, 'pumpkin pie', 'pumpkin sugar flour butter')
-
.. method:: load_extension(path, /, *, entrypoint=None)
Load an SQLite extension from a shared library.
@@ -1230,6 +1206,8 @@ Connection objects
src = sqlite3.connect('example.db')
dst = sqlite3.connect(':memory:')
src.backup(dst)
+ dst.close()
+ src.close()
.. versionadded:: 3.7
@@ -1296,6 +1274,10 @@ Connection objects
>>> con.getlimit(sqlite3.SQLITE_LIMIT_ATTACHED)
1
+ .. testcleanup:: sqlite3.limits
+
+ con.close()
+
.. versionadded:: 3.11
.. _SQLite limit category: https://www.sqlite.org/c3ref/c_limit_attached.html
@@ -1577,6 +1559,10 @@ Cursor objects
# cur is an sqlite3.Cursor object
cur.executemany("INSERT INTO data VALUES(?)", rows)
+ .. testcleanup:: sqlite3.cursor
+
+ con.close()
+
.. note::
Any resulting rows are discarded,
@@ -1682,6 +1668,7 @@ Cursor objects
>>> cur = con.cursor()
>>> cur.connection == con
True
+ >>> con.close()
.. attribute:: description
@@ -1802,6 +1789,7 @@ Blob objects
greeting = blob.read()
print(greeting) # outputs "b'Hello, world!'"
+ con.close()
.. testoutput::
:hide:
@@ -2114,6 +2102,7 @@ Here's an example of both styles:
params = (1972,)
cur.execute("SELECT * FROM lang WHERE first_appeared = ?", params)
print(cur.fetchall())
+ con.close()
.. testoutput::
:hide:
@@ -2172,6 +2161,7 @@ The object passed to *protocol* will be of type :class:`PrepareProtocol`.
cur.execute("SELECT ?", (Point(4.0, -3.2),))
print(cur.fetchone()[0])
+ con.close()
.. testoutput::
:hide:
@@ -2202,6 +2192,7 @@ This function can then be registered using :func:`register_adapter`.
cur.execute("SELECT ?", (Point(1.0, 2.5),))
print(cur.fetchone()[0])
+ con.close()
.. testoutput::
:hide:
@@ -2286,6 +2277,8 @@ The following example illustrates the implicit and explicit approaches:
cur.execute("INSERT INTO test(p) VALUES(?)", (p,))
cur.execute('SELECT p AS "p [point]" FROM test')
print("with column names:", cur.fetchone()[0])
+ cur.close()
+ con.close()
.. testoutput::
:hide:
@@ -2492,6 +2485,8 @@ Some useful URI tricks include:
res = con2.execute("SELECT data FROM shared")
assert res.fetchone() == (28,)
+ con1.close()
+ con2.close()
More information about this feature, including a list of parameters,
can be found in the `SQLite URI documentation`_.
@@ -2538,6 +2533,7 @@ Queries now return :class:`!Row` objects:
'Earth'
>>> row["RADIUS"] # Column names are case-insensitive.
6378
+ >>> con.close()
.. note::
@@ -2564,6 +2560,7 @@ Using it, queries now return a :class:`!dict` instead of a :class:`!tuple`:
>>> for row in con.execute("SELECT 1 AS a, 2 AS b"):
... print(row)
{'a': 1, 'b': 2}
+ >>> con.close()
The following row factory returns a :term:`named tuple`:
@@ -2590,6 +2587,7 @@ The following row factory returns a :term:`named tuple`:
1
>>> row.b # Attribute access.
2
+ >>> con.close()
With some adjustments, the above recipe can be adapted to use a
:class:`~dataclasses.dataclass`, or any other custom class,
@@ -2747,3 +2745,11 @@ regardless of the value of :attr:`~Connection.isolation_level`.
.. _SQLite transaction behaviour:
https://www.sqlite.org/lang_transaction.html#deferred_immediate_and_exclusive_transactions
+
+.. testcleanup::
+
+ import os
+ os.remove("backup.db")
+ os.remove("dump.sql")
+ os.remove("example.db")
+ os.remove("tutorial.db")
diff --git a/Doc/library/ssl.rst b/Doc/library/ssl.rst
index 3564855594c32d..a90436286ca819 100644
--- a/Doc/library/ssl.rst
+++ b/Doc/library/ssl.rst
@@ -1820,7 +1820,7 @@ to speed up repeated connections from the same clients.
.. versionchanged:: 3.6
*session* argument was added.
- .. versionchanged:: 3.7
+ .. versionchanged:: 3.7
The method returns an instance of :attr:`SSLContext.sslsocket_class`
instead of hard-coded :class:`SSLSocket`.
diff --git a/Doc/library/statistics.rst b/Doc/library/statistics.rst
index 197c123f8356d8..873ccd650f45cd 100644
--- a/Doc/library/statistics.rst
+++ b/Doc/library/statistics.rst
@@ -1163,7 +1163,7 @@ accurately approximated inverse cumulative distribution function.
.. testcode::
from random import choice, random, seed
- from math import sqrt, log, pi, tan, asin
+ from math import sqrt, log, pi, tan, asin, cos, acos
from statistics import NormalDist
kernel_invcdfs = {
@@ -1172,6 +1172,7 @@ accurately approximated inverse cumulative distribution function.
'sigmoid': lambda p: log(tan(p * pi/2)),
'rectangular': lambda p: 2*p - 1,
'triangular': lambda p: sqrt(2*p) - 1 if p < 0.5 else 1 - sqrt(2 - 2*p),
+ 'parabolic': lambda p: 2 * cos((acos(2*p-1) + pi) / 3),
'cosine': lambda p: 2*asin(2*p - 1)/pi,
}
diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst
index 62fc10997fc5b5..fc613d4dbe1b5c 100644
--- a/Doc/library/stdtypes.rst
+++ b/Doc/library/stdtypes.rst
@@ -5559,8 +5559,7 @@ a string to a binary integer or a binary integer to a string in linear time,
have sub-quadratic complexity. Converting a large value such as ``int('1' *
500_000)`` can take over a second on a fast CPU.
-Limiting conversion size offers a practical way to avoid `CVE-2020-10735
-`_.
+Limiting conversion size offers a practical way to avoid :cve:`2020-10735`.
The limit is applied to the number of digit characters in the input or output
string when a non-linear conversion algorithm would be involved. Underscores
diff --git a/Doc/library/tomllib.rst b/Doc/library/tomllib.rst
index f9e2dfeb13dc87..406985b84471f2 100644
--- a/Doc/library/tomllib.rst
+++ b/Doc/library/tomllib.rst
@@ -19,14 +19,14 @@ support writing TOML.
.. seealso::
- The `Tomli-W package `__
+ The :pypi:`Tomli-W package `
is a TOML writer that can be used in conjunction with this module,
providing a write API familiar to users of the standard library
:mod:`marshal` and :mod:`pickle` modules.
.. seealso::
- The `TOML Kit package `__
+ The :pypi:`TOML Kit package `
is a style-preserving TOML library with both read and write capability.
It is a recommended replacement for this module for editing already
existing TOML files.
diff --git a/Doc/library/typing.rst b/Doc/library/typing.rst
index 73214e18d556b2..31cf225ebf8fab 100644
--- a/Doc/library/typing.rst
+++ b/Doc/library/typing.rst
@@ -39,7 +39,7 @@ they can also be more complex. The :mod:`typing` module provides a vocabulary of
more advanced type hints.
New features are frequently added to the ``typing`` module.
-The `typing_extensions `_ package
+The :pypi:`typing_extensions` package
provides backports of these new features to older versions of Python.
.. seealso::
@@ -1385,22 +1385,23 @@ These can be used as types in annotations. They all support subscription using
.. versionadded:: 3.9
-.. data:: TypeGuard
+.. data:: TypeIs
- Special typing construct for marking user-defined type guard functions.
+ Special typing construct for marking user-defined type predicate functions.
- ``TypeGuard`` can be used to annotate the return type of a user-defined
- type guard function. ``TypeGuard`` only accepts a single type argument.
- At runtime, functions marked this way should return a boolean.
+ ``TypeIs`` can be used to annotate the return type of a user-defined
+ type predicate function. ``TypeIs`` only accepts a single type argument.
+ At runtime, functions marked this way should return a boolean and take at
+ least one positional argument.
- ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static
+ ``TypeIs`` aims to benefit *type narrowing* -- a technique used by static
type checkers to determine a more precise type of an expression within a
program's code flow. Usually type narrowing is done by analyzing
conditional code flow and applying the narrowing to a block of code. The
- conditional expression here is sometimes referred to as a "type guard"::
+ conditional expression here is sometimes referred to as a "type predicate"::
def is_str(val: str | float):
- # "isinstance" type guard
+ # "isinstance" type predicate
if isinstance(val, str):
# Type of ``val`` is narrowed to ``str``
...
@@ -1409,8 +1410,73 @@ These can be used as types in annotations. They all support subscription using
...
Sometimes it would be convenient to use a user-defined boolean function
- as a type guard. Such a function should use ``TypeGuard[...]`` as its
- return type to alert static type checkers to this intention.
+ as a type predicate. Such a function should use ``TypeIs[...]`` or
+ :data:`TypeGuard` as its return type to alert static type checkers to
+ this intention. ``TypeIs`` usually has more intuitive behavior than
+ ``TypeGuard``, but it cannot be used when the input and output types
+ are incompatible (e.g., ``list[object]`` to ``list[int]``) or when the
+ function does not return ``True`` for all instances of the narrowed type.
+
+ Using ``-> TypeIs[NarrowedType]`` tells the static type checker that for a given
+ function:
+
+ 1. The return value is a boolean.
+ 2. If the return value is ``True``, the type of its argument
+ is the intersection of the argument's original type and ``NarrowedType``.
+ 3. If the return value is ``False``, the type of its argument
+ is narrowed to exclude ``NarrowedType``.
+
+ For example::
+
+ from typing import assert_type, final, TypeIs
+
+ class Parent: pass
+ class Child(Parent): pass
+ @final
+ class Unrelated: pass
+
+ def is_parent(val: object) -> TypeIs[Parent]:
+ return isinstance(val, Parent)
+
+ def run(arg: Child | Unrelated):
+ if is_parent(arg):
+ # Type of ``arg`` is narrowed to the intersection
+ # of ``Parent`` and ``Child``, which is equivalent to
+ # ``Child``.
+ assert_type(arg, Child)
+ else:
+ # Type of ``arg`` is narrowed to exclude ``Parent``,
+ # so only ``Unrelated`` is left.
+ assert_type(arg, Unrelated)
+
+ The type inside ``TypeIs`` must be consistent with the type of the
+ function's argument; if it is not, static type checkers will raise
+ an error. An incorrectly written ``TypeIs`` function can lead to
+ unsound behavior in the type system; it is the user's responsibility
+ to write such functions in a type-safe manner.
+
+ If a ``TypeIs`` function is a class or instance method, then the type in
+ ``TypeIs`` maps to the type of the second parameter after ``cls`` or
+ ``self``.
+
+ In short, the form ``def foo(arg: TypeA) -> TypeIs[TypeB]: ...``,
+ means that if ``foo(arg)`` returns ``True``, then ``arg`` is an instance
+ of ``TypeB``, and if it returns ``False``, it is not an instance of ``TypeB``.
+
+ ``TypeIs`` also works with type variables. For more information, see
+ :pep:`742` (Narrowing types with ``TypeIs``).
+
+ .. versionadded:: 3.13
+
+
+.. data:: TypeGuard
+
+ Special typing construct for marking user-defined type predicate functions.
+
+ Type predicate functions are user-defined functions that return whether their
+ argument is an instance of a particular type.
+ ``TypeGuard`` works similarly to :data:`TypeIs`, but has subtly different
+ effects on type checking behavior (see below).
Using ``-> TypeGuard`` tells the static type checker that for a given
function:
@@ -1419,6 +1485,8 @@ These can be used as types in annotations. They all support subscription using
2. If the return value is ``True``, the type of its argument
is the type inside ``TypeGuard``.
+ ``TypeGuard`` also works with type variables. See :pep:`647` for more details.
+
For example::
def is_str_list(val: list[object]) -> TypeGuard[list[str]]:
@@ -1433,23 +1501,19 @@ These can be used as types in annotations. They all support subscription using
# Type of ``val`` remains as ``list[object]``.
print("Not a list of strings!")
- If ``is_str_list`` is a class or instance method, then the type in
- ``TypeGuard`` maps to the type of the second parameter after ``cls`` or
- ``self``.
-
- In short, the form ``def foo(arg: TypeA) -> TypeGuard[TypeB]: ...``,
- means that if ``foo(arg)`` returns ``True``, then ``arg`` narrows from
- ``TypeA`` to ``TypeB``.
-
- .. note::
-
- ``TypeB`` need not be a narrower form of ``TypeA`` -- it can even be a
- wider form. The main reason is to allow for things like
- narrowing ``list[object]`` to ``list[str]`` even though the latter
- is not a subtype of the former, since ``list`` is invariant.
- The responsibility of writing type-safe type guards is left to the user.
-
- ``TypeGuard`` also works with type variables. See :pep:`647` for more details.
+ ``TypeIs`` and ``TypeGuard`` differ in the following ways:
+
+ * ``TypeIs`` requires the narrowed type to be a subtype of the input type, while
+ ``TypeGuard`` does not. The main reason is to allow for things like
+ narrowing ``list[object]`` to ``list[str]`` even though the latter
+ is not a subtype of the former, since ``list`` is invariant.
+ * When a ``TypeGuard`` function returns ``True``, type checkers narrow the type of the
+ variable to exactly the ``TypeGuard`` type. When a ``TypeIs`` function returns ``True``,
+ type checkers can infer a more precise type combining the previously known type of the
+ variable with the ``TypeIs`` type. (Technically, this is known as an intersection type.)
+ * When a ``TypeGuard`` function returns ``False``, type checkers cannot narrow the type of
+ the variable at all. When a ``TypeIs`` function returns ``False``, type checkers can narrow
+ the type of the variable to exclude the ``TypeIs`` type.
.. versionadded:: 3.10
diff --git a/Doc/library/unittest.mock.rst b/Doc/library/unittest.mock.rst
index d1f2a96df667c6..ee4c7b2ed252b0 100644
--- a/Doc/library/unittest.mock.rst
+++ b/Doc/library/unittest.mock.rst
@@ -35,7 +35,7 @@ is based on the 'action -> assertion' pattern instead of 'record -> replay'
used by many mocking frameworks.
There is a backport of :mod:`unittest.mock` for earlier versions of Python,
-available as `mock on PyPI `_.
+available as :pypi:`mock` on PyPI.
Quick Guide
diff --git a/Doc/library/urllib.request.rst b/Doc/library/urllib.request.rst
index 1a17c9cd7dec4e..c1e60a46774704 100644
--- a/Doc/library/urllib.request.rst
+++ b/Doc/library/urllib.request.rst
@@ -113,9 +113,9 @@ The :mod:`urllib.request` module defines the following functions:
``http/1.1`` when no *context* is given. Custom *context* should set
ALPN protocols with :meth:`~ssl.SSLContext.set_alpn_protocols`.
- .. versionchanged:: 3.13
- Remove *cafile*, *capath* and *cadefault* parameters: use the *context*
- parameter instead.
+ .. versionchanged:: 3.13
+ Remove *cafile*, *capath* and *cadefault* parameters: use the *context*
+ parameter instead.
.. function:: install_opener(opener)
diff --git a/Doc/library/venv.rst b/Doc/library/venv.rst
index ecb01b352e8cbc..cdd1fde2e44b00 100644
--- a/Doc/library/venv.rst
+++ b/Doc/library/venv.rst
@@ -27,7 +27,7 @@ optionally be isolated from the packages in the base environment,
so only those explicitly installed in the virtual environment are available.
When used from within a virtual environment, common installation tools such as
-`pip`_ will install Python packages into a virtual environment
+:pypi:`pip` will install Python packages into a virtual environment
without needing to be told to do so explicitly.
A virtual environment is (amongst other things):
@@ -614,7 +614,3 @@ subclass which installs setuptools and pip into a created virtual environment::
This script is also available for download `online
`_.
-
-
-.. _setuptools: https://pypi.org/project/setuptools/
-.. _pip: https://pypi.org/project/pip/
diff --git a/Doc/library/webbrowser.rst b/Doc/library/webbrowser.rst
index c1c4619d9df776..3775d9f9245428 100644
--- a/Doc/library/webbrowser.rst
+++ b/Doc/library/webbrowser.rst
@@ -42,9 +42,12 @@ a new tab, with the browser being brought to the foreground. The use of the
The script :program:`webbrowser` can be used as a command-line interface for the
module. It accepts a URL as the argument. It accepts the following optional
-parameters: ``-n`` opens the URL in a new browser window, if possible;
-``-t`` opens the URL in a new browser page ("tab"). The options are,
-naturally, mutually exclusive. Usage example::
+parameters:
+
+* ``-n``/``--new-window`` opens the URL in a new browser window, if possible.
+* ``-t``/``--new-tab`` opens the URL in a new browser page ("tab").
+
+The options are, naturally, mutually exclusive. Usage example::
python -m webbrowser -t "https://www.python.org"
diff --git a/Doc/library/xml.etree.elementtree.rst b/Doc/library/xml.etree.elementtree.rst
index 7d721f7633899e..30a7b653f940e9 100644
--- a/Doc/library/xml.etree.elementtree.rst
+++ b/Doc/library/xml.etree.elementtree.rst
@@ -840,33 +840,28 @@ Functions
.. module:: xml.etree.ElementInclude
-.. function:: xml.etree.ElementInclude.default_loader( href, parse, encoding=None)
- :module:
+.. function:: default_loader(href, parse, encoding=None)
- Default loader. This default loader reads an included resource from disk. *href* is a URL.
- *parse* is for parse mode either "xml" or "text". *encoding*
- is an optional text encoding. If not given, encoding is ``utf-8``. Returns the
- expanded resource. If the parse mode is ``"xml"``, this is an ElementTree
- instance. If the parse mode is "text", this is a Unicode string. If the
- loader fails, it can return None or raise an exception.
+ Default loader. This default loader reads an included resource from disk.
+ *href* is a URL. *parse* is for parse mode either "xml" or "text".
+ *encoding* is an optional text encoding. If not given, encoding is ``utf-8``.
+ Returns the expanded resource.
+ If the parse mode is ``"xml"``, this is an :class:`~xml.etree.ElementTree.Element` instance.
+ If the parse mode is ``"text"``, this is a string.
+ If the loader fails, it can return ``None`` or raise an exception.
-.. function:: xml.etree.ElementInclude.include( elem, loader=None, base_url=None, \
- max_depth=6)
- :module:
+.. function:: include(elem, loader=None, base_url=None, max_depth=6)
- This function expands XInclude directives. *elem* is the root element. *loader* is
- an optional resource loader. If omitted, it defaults to :func:`default_loader`.
+ This function expands XInclude directives in-place in tree pointed by *elem*.
+ *elem* is either the root :class:`~xml.etree.ElementTree.Element` or an
+ :class:`~xml.etree.ElementTree.ElementTree` instance to find such element.
+ *loader* is an optional resource loader. If omitted, it defaults to :func:`default_loader`.
If given, it should be a callable that implements the same interface as
:func:`default_loader`. *base_url* is base URL of the original file, to resolve
relative include file references. *max_depth* is the maximum number of recursive
- inclusions. Limited to reduce the risk of malicious content explosion. Pass a
- negative value to disable the limitation.
-
- Returns the expanded resource. If the parse mode is
- ``"xml"``, this is an ElementTree instance. If the parse mode is "text",
- this is a Unicode string. If the loader fails, it can return None or
- raise an exception.
+ inclusions. Limited to reduce the risk of malicious content explosion.
+ Pass ``None`` to disable the limitation.
.. versionchanged:: 3.9
Added the *base_url* and *max_depth* parameters.
diff --git a/Doc/library/xml.rst b/Doc/library/xml.rst
index 662cc459197e2c..d495995398959d 100644
--- a/Doc/library/xml.rst
+++ b/Doc/library/xml.rst
@@ -124,10 +124,9 @@ large tokens
Expat needs to re-parse unfinished tokens; without the protection
introduced in Expat 2.6.0, this can lead to quadratic runtime that can
be used to cause denial of service in the application parsing XML.
- The issue is known as
- `CVE-2023-52425 `_.
+ The issue is known as :cve:`2023-52425`.
-The documentation for `defusedxml`_ on PyPI has further information about
+The documentation for :pypi:`defusedxml` on PyPI has further information about
all known attack vectors with examples and references.
.. _defusedxml-package:
@@ -135,14 +134,13 @@ all known attack vectors with examples and references.
The :mod:`!defusedxml` Package
------------------------------
-`defusedxml`_ is a pure Python package with modified subclasses of all stdlib
+:pypi:`defusedxml` is a pure Python package with modified subclasses of all stdlib
XML parsers that prevent any potentially malicious operation. Use of this
package is recommended for any server code that parses untrusted XML data. The
package also ships with example exploits and extended documentation on more
XML exploits such as XPath injection.
-.. _defusedxml: https://pypi.org/project/defusedxml/
.. _Billion Laughs: https://en.wikipedia.org/wiki/Billion_laughs
.. _ZIP bomb: https://en.wikipedia.org/wiki/Zip_bomb
.. _DTD: https://en.wikipedia.org/wiki/Document_type_definition
diff --git a/Doc/library/zipfile.rst b/Doc/library/zipfile.rst
index b6f881fd2dfd70..ee53f162ac9080 100644
--- a/Doc/library/zipfile.rst
+++ b/Doc/library/zipfile.rst
@@ -632,7 +632,7 @@ Path objects are traversable using the ``/`` operator or ``joinpath``.
Prior to 3.10, ``joinpath`` was undocumented and accepted
exactly one parameter.
-The `zipp `_ project provides backports
+The :pypi:`zipp` project provides backports
of the latest path object functionality to older Pythons. Use
``zipp.Path`` in place of ``zipfile.Path`` for early access to
changes.
diff --git a/Doc/library/zoneinfo.rst b/Doc/library/zoneinfo.rst
index f8624da6e51dbb..54f1988375570c 100644
--- a/Doc/library/zoneinfo.rst
+++ b/Doc/library/zoneinfo.rst
@@ -17,7 +17,7 @@ The :mod:`zoneinfo` module provides a concrete time zone implementation to
support the IANA time zone database as originally specified in :pep:`615`. By
default, :mod:`zoneinfo` uses the system's time zone data if available; if no
system time zone data is available, the library will fall back to using the
-first-party `tzdata`_ package available on PyPI.
+first-party :pypi:`tzdata` package available on PyPI.
.. seealso::
@@ -25,7 +25,7 @@ first-party `tzdata`_ package available on PyPI.
Provides the :class:`~datetime.time` and :class:`~datetime.datetime`
types with which the :class:`ZoneInfo` class is designed to be used.
- Package `tzdata`_
+ Package :pypi:`tzdata`
First-party package maintained by the CPython core developers to supply
time zone data via PyPI.
@@ -93,7 +93,7 @@ Data sources
The ``zoneinfo`` module does not directly provide time zone data, and instead
pulls time zone information from the system time zone database or the
-first-party PyPI package `tzdata`_, if available. Some systems, including
+first-party PyPI package :pypi:`tzdata`, if available. Some systems, including
notably Windows systems, do not have an IANA database available, and so for
projects targeting cross-platform compatibility that require time zone data, it
is recommended to declare a dependency on tzdata. If neither system data nor
@@ -413,5 +413,3 @@ Exceptions and warnings
be filtered out, such as a relative path.
.. Links and references:
-
-.. _tzdata: https://pypi.org/project/tzdata/
diff --git a/Doc/reference/datamodel.rst b/Doc/reference/datamodel.rst
index bc835b8e30cb29..6d6395a21f65d2 100644
--- a/Doc/reference/datamodel.rst
+++ b/Doc/reference/datamodel.rst
@@ -932,11 +932,8 @@ name is not found there, the attribute search continues in the base classes.
This search of the base classes uses the C3 method resolution order which
behaves correctly even in the presence of 'diamond' inheritance structures
where there are multiple inheritance paths leading back to a common ancestor.
-Additional details on the C3 MRO used by Python can be found in the
-documentation accompanying the 2.3 release at
-https://www.python.org/download/releases/2.3/mro/.
-
-.. XXX: Could we add that MRO doc as an appendix to the language ref?
+Additional details on the C3 MRO used by Python can be found at
+:ref:`python_2.3_mro`.
.. index::
pair: object; class
diff --git a/Doc/tools/extensions/c_annotations.py b/Doc/tools/extensions/c_annotations.py
index a8b6d8995e3f40..abd0a8c817f154 100644
--- a/Doc/tools/extensions/c_annotations.py
+++ b/Doc/tools/extensions/c_annotations.py
@@ -1,4 +1,3 @@
-# -*- coding: utf-8 -*-
"""
c_annotations.py
~~~~~~~~~~~~~~~~
@@ -34,11 +33,10 @@
REST_ROLE_MAP = {
'function': 'func',
- 'var': 'data',
- 'type': 'type',
'macro': 'macro',
- 'type': 'type',
'member': 'member',
+ 'type': 'type',
+ 'var': 'data',
}
@@ -63,7 +61,7 @@ def __init__(self, name):
class Annotations:
def __init__(self, refcount_filename, stable_abi_file):
self.refcount_data = {}
- with open(refcount_filename, 'r') as fp:
+ with open(refcount_filename, encoding='utf8') as fp:
for line in fp:
line = line.strip()
if line[:1] in ("", "#"):
@@ -71,7 +69,7 @@ def __init__(self, refcount_filename, stable_abi_file):
continue
parts = line.split(":", 4)
if len(parts) != 5:
- raise ValueError("Wrong field count in %r" % line)
+ raise ValueError(f"Wrong field count in {line!r}")
function, type, arg, refcount, comment = parts
# Get the entry, creating it if needed:
try:
@@ -91,9 +89,8 @@ def __init__(self, refcount_filename, stable_abi_file):
entry.result_refs = refcount
self.stable_abi_data = {}
- with open(stable_abi_file, 'r') as fp:
+ with open(stable_abi_file, encoding='utf8') as fp:
for record in csv.DictReader(fp):
- role = record['role']
name = record['name']
self.stable_abi_data[name] = record
@@ -180,13 +177,17 @@ def add_annotations(self, app, doctree):
continue
elif not entry.result_type.endswith("Object*"):
continue
+ classes = ['refcount']
if entry.result_refs is None:
rc = sphinx_gettext('Return value: Always NULL.')
+ classes.append('return_null')
elif entry.result_refs:
rc = sphinx_gettext('Return value: New reference.')
+ classes.append('return_new_ref')
else:
rc = sphinx_gettext('Return value: Borrowed reference.')
- node.insert(0, nodes.emphasis(rc, rc, classes=['refcount']))
+ classes.append('return_borrowed_ref')
+ node.insert(0, nodes.emphasis(rc, rc, classes=classes))
def init_annotations(app):
@@ -228,6 +229,7 @@ def setup(app):
'stableabi': directives.flag,
}
old_handle_signature = CObject.handle_signature
+
def new_handle_signature(self, sig, signode):
signode.parent['stableabi'] = 'stableabi' in self.options
return old_handle_signature(self, sig, signode)
diff --git a/Doc/tools/extensions/pyspecific.py b/Doc/tools/extensions/pyspecific.py
index c31d67d2868144..8c88612cf68180 100644
--- a/Doc/tools/extensions/pyspecific.py
+++ b/Doc/tools/extensions/pyspecific.py
@@ -26,7 +26,6 @@
from sphinx.locale import _ as sphinx_gettext
from sphinx.util import logging
from sphinx.util.docutils import SphinxDirective
-from sphinx.util.nodes import split_explicit_title
from sphinx.writers.text import TextWriter, TextTranslator
try:
@@ -39,6 +38,7 @@
ISSUE_URI = 'https://bugs.python.org/issue?@action=redirect&bpo=%s'
GH_ISSUE_URI = 'https://github.com/python/cpython/issues/%s'
+# Used in conf.py and updated here by python/release-tools/run_release.py
SOURCE_URI = 'https://github.com/python/cpython/tree/main/%s'
# monkey-patch reST parser to disable alphabetic and roman enumerated lists
@@ -54,6 +54,7 @@
std.token_re = re.compile(r'`((~?[\w-]*:)?\w+)`')
+
# Support for marking up and linking to bugs.python.org issues
def issue_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
@@ -85,16 +86,6 @@ def gh_issue_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
return [refnode], []
-# Support for linking to Python source files easily
-
-def source_role(typ, rawtext, text, lineno, inliner, options={}, content=[]):
- has_t, title, target = split_explicit_title(text)
- title = utils.unescape(title)
- target = utils.unescape(target)
- refnode = nodes.reference(title, title, refuri=SOURCE_URI % target)
- return [refnode], []
-
-
# Support for marking up implementation details
class ImplementationDetail(Directive):
@@ -194,7 +185,6 @@ def parse_platforms(self):
return platforms
-
# Support for documenting audit event
def audit_events_purge(app, env, docname):
@@ -710,7 +700,6 @@ def patch_pairindextypes(app, _env) -> None:
def setup(app):
app.add_role('issue', issue_role)
app.add_role('gh', gh_issue_role)
- app.add_role('source', source_role)
app.add_directive('impl-detail', ImplementationDetail)
app.add_directive('availability', Availability)
app.add_directive('audit-event', AuditEvent)
diff --git a/Doc/tutorial/classes.rst b/Doc/tutorial/classes.rst
index d1c303ef037027..7ab528acb370f2 100644
--- a/Doc/tutorial/classes.rst
+++ b/Doc/tutorial/classes.rst
@@ -665,7 +665,7 @@ class, that calls each parent only once, and that is monotonic (meaning that a
class can be subclassed without affecting the precedence order of its parents).
Taken together, these properties make it possible to design reliable and
extensible classes with multiple inheritance. For more detail, see
-https://www.python.org/download/releases/2.3/mro/.
+:ref:`python_2.3_mro`.
.. _tut-private:
diff --git a/Doc/tutorial/errors.rst b/Doc/tutorial/errors.rst
index 0b9acd00fdc6bd..981b14f5a4212b 100644
--- a/Doc/tutorial/errors.rst
+++ b/Doc/tutorial/errors.rst
@@ -119,9 +119,9 @@ may name multiple exceptions as a parenthesized tuple, for example::
... except (RuntimeError, TypeError, NameError):
... pass
-A class in an :keyword:`except` clause is compatible with an exception if it is
-the same class or a base class thereof (but not the other way around --- an
-*except clause* listing a derived class is not compatible with a base class).
+A class in an :keyword:`except` clause matches exceptions which are instances of the
+class itself or one of its derived classes (but not the other way around --- an
+*except clause* listing a derived class does not match instances of its base classes).
For example, the following code will print B, C, D in that order::
class B(Exception):
diff --git a/Doc/using/cmdline.rst b/Doc/using/cmdline.rst
index 565d86cb1a0dd3..295e3fb09830ce 100644
--- a/Doc/using/cmdline.rst
+++ b/Doc/using/cmdline.rst
@@ -500,43 +500,73 @@ Miscellaneous options
* ``-X faulthandler`` to enable :mod:`faulthandler`.
See also :envvar:`PYTHONFAULTHANDLER`.
+
+ .. versionadded:: 3.3
+
* ``-X showrefcount`` to output the total reference count and number of used
memory blocks when the program finishes or after each statement in the
interactive interpreter. This only works on :ref:`debug builds
`.
+
+ .. versionadded:: 3.4
+
* ``-X tracemalloc`` to start tracing Python memory allocations using the
:mod:`tracemalloc` module. By default, only the most recent frame is
stored in a traceback of a trace. Use ``-X tracemalloc=NFRAME`` to start
tracing with a traceback limit of *NFRAME* frames.
See :func:`tracemalloc.start` and :envvar:`PYTHONTRACEMALLOC`
for more information.
+
+ .. versionadded:: 3.4
+
* ``-X int_max_str_digits`` configures the :ref:`integer string conversion
length limitation `. See also
:envvar:`PYTHONINTMAXSTRDIGITS`.
+
+ .. versionadded:: 3.11
+
* ``-X importtime`` to show how long each import takes. It shows module
name, cumulative time (including nested imports) and self time (excluding
nested imports). Note that its output may be broken in multi-threaded
application. Typical usage is ``python3 -X importtime -c 'import
asyncio'``. See also :envvar:`PYTHONPROFILEIMPORTTIME`.
+
+ .. versionadded:: 3.7
+
* ``-X dev``: enable :ref:`Python Development Mode `, introducing
additional runtime checks that are too expensive to be enabled by
default. See also :envvar:`PYTHONDEVMODE`.
+
+ .. versionadded:: 3.7
+
* ``-X utf8`` enables the :ref:`Python UTF-8 Mode `.
``-X utf8=0`` explicitly disables :ref:`Python UTF-8 Mode `
(even when it would otherwise activate automatically).
See also :envvar:`PYTHONUTF8`.
+
+ .. versionadded:: 3.7
+
* ``-X pycache_prefix=PATH`` enables writing ``.pyc`` files to a parallel
tree rooted at the given directory instead of to the code tree. See also
:envvar:`PYTHONPYCACHEPREFIX`.
+
+ .. versionadded:: 3.8
+
* ``-X warn_default_encoding`` issues a :class:`EncodingWarning` when the
locale-specific default encoding is used for opening files.
See also :envvar:`PYTHONWARNDEFAULTENCODING`.
+
+ .. versionadded:: 3.10
+
* ``-X no_debug_ranges`` disables the inclusion of the tables mapping extra
location information (end line, start column offset and end column offset)
to every instruction in code objects. This is useful when smaller code
objects and pyc files are desired as well as suppressing the extra visual
location indicators when the interpreter displays tracebacks. See also
:envvar:`PYTHONNODEBUGRANGES`.
+
+ .. versionadded:: 3.11
+
* ``-X frozen_modules`` determines whether or not frozen modules are
ignored by the import machinery. A value of ``on`` means they get
imported and ``off`` means they are ignored. The default is ``on``
@@ -545,71 +575,52 @@ Miscellaneous options
Note that the :mod:`!importlib_bootstrap` and
:mod:`!importlib_bootstrap_external` frozen modules are always used, even
if this flag is set to ``off``. See also :envvar:`PYTHON_FROZEN_MODULES`.
+
+ .. versionadded:: 3.11
+
* ``-X perf`` enables support for the Linux ``perf`` profiler.
When this option is provided, the ``perf`` profiler will be able to
report Python calls. This option is only available on some platforms and
will do nothing if is not supported on the current system. The default value
is "off". See also :envvar:`PYTHONPERFSUPPORT` and :ref:`perf_profiling`.
+
+ .. versionadded:: 3.12
+
* :samp:`-X cpu_count={n}` overrides :func:`os.cpu_count`,
:func:`os.process_cpu_count`, and :func:`multiprocessing.cpu_count`.
*n* must be greater than or equal to 1.
This option may be useful for users who need to limit CPU resources of a
container system. See also :envvar:`PYTHON_CPU_COUNT`.
If *n* is ``default``, nothing is overridden.
+
+ .. versionadded:: 3.13
+
* :samp:`-X presite={package.module}` specifies a module that should be
imported before the :mod:`site` module is executed and before the
:mod:`__main__` module exists. Therefore, the imported module isn't
:mod:`__main__`. This can be used to execute code early during Python
initialization. Python needs to be :ref:`built in debug mode `
for this option to exist. See also :envvar:`PYTHON_PRESITE`.
+
+ .. versionadded:: 3.13
+
* :samp:`-X gil={0,1}` forces the GIL to be disabled or enabled,
respectively. Only available in builds configured with
:option:`--disable-gil`. See also :envvar:`PYTHON_GIL`.
+ .. versionadded:: 3.13
+
It also allows passing arbitrary values and retrieving them through the
:data:`sys._xoptions` dictionary.
.. versionadded:: 3.2
- .. versionchanged:: 3.3
- Added the ``-X faulthandler`` option.
-
- .. versionchanged:: 3.4
- Added the ``-X showrefcount`` and ``-X tracemalloc`` options.
-
- .. versionchanged:: 3.6
- Added the ``-X showalloccount`` option.
-
- .. versionchanged:: 3.7
- Added the ``-X importtime``, ``-X dev`` and ``-X utf8`` options.
-
- .. versionchanged:: 3.8
- Added the ``-X pycache_prefix`` option. The ``-X dev`` option now logs
- ``close()`` exceptions in :class:`io.IOBase` destructor.
-
.. versionchanged:: 3.9
- Using ``-X dev`` option, check *encoding* and *errors* arguments on
- string encoding and decoding operations.
-
- The ``-X showalloccount`` option has been removed.
+ Removed the ``-X showalloccount`` option.
.. versionchanged:: 3.10
- Added the ``-X warn_default_encoding`` option.
Removed the ``-X oldparser`` option.
- .. versionchanged:: 3.11
- Added the ``-X no_debug_ranges``, ``-X frozen_modules`` and
- ``-X int_max_str_digits`` options.
-
- .. versionchanged:: 3.12
- Added the ``-X perf`` option.
-
- .. versionchanged:: 3.13
- Added the ``-X cpu_count`` and ``-X presite`` options.
-
- .. versionchanged:: 3.13
- Added the ``-X gil`` option.
-
.. _using-on-controlling-color:
Controlling color
diff --git a/Doc/using/configure.rst b/Doc/using/configure.rst
index eef0c5022d37af..580d31fd422c5a 100644
--- a/Doc/using/configure.rst
+++ b/Doc/using/configure.rst
@@ -518,6 +518,15 @@ also be used to improve performance.
GCC is used: add ``-fno-semantic-interposition`` to the compiler and linker
flags.
+ .. note::
+
+ During the build, you may encounter compiler warnings about
+ profile data not being available for some source files.
+ These warnings are harmless, as only a subset of the code is exercised
+ during profile data acquisition.
+ To disable these warnings on Clang, manually suppress them by adding
+ ``-Wno-profile-instr-unprofiled`` to :envvar:`CFLAGS`.
+
.. versionadded:: 3.6
.. versionchanged:: 3.10
diff --git a/Doc/using/mac.rst b/Doc/using/mac.rst
index 8f3372b8e017f5..31d37aad2a7408 100644
--- a/Doc/using/mac.rst
+++ b/Doc/using/mac.rst
@@ -145,7 +145,7 @@ There are several options for building GUI applications on the Mac with Python.
*PyObjC* is a Python binding to Apple's Objective-C/Cocoa framework, which is
the foundation of most modern Mac development. Information on PyObjC is
-available from https://pypi.org/project/pyobjc/.
+available from :pypi:`pyobjc`.
The standard Python GUI toolkit is :mod:`tkinter`, based on the cross-platform
Tk toolkit (https://www.tcl.tk). An Aqua-native version of Tk is bundled with
@@ -177,7 +177,7 @@ Distributing Python Applications
A range of tools exist for converting your Python code into a standalone
distributable application:
-* `py2app `__: Supports creating macOS ``.app``
+* :pypi:`py2app`: Supports creating macOS ``.app``
bundles from a Python project.
* `Briefcase `__: Part of the `BeeWare Project
diff --git a/Doc/using/windows.rst b/Doc/using/windows.rst
index cc4db34b04d900..ef98d32e8674ec 100644
--- a/Doc/using/windows.rst
+++ b/Doc/using/windows.rst
@@ -1285,7 +1285,7 @@ The Windows-specific standard modules are documented in
PyWin32
-------
-The `PyWin32 `_ module by Mark Hammond
+The :pypi:`PyWin32` module by Mark Hammond
is a collection of modules for advanced Windows-specific support. This includes
utilities for:
diff --git a/Doc/whatsnew/2.3.rst b/Doc/whatsnew/2.3.rst
index 37cd41add8132c..8adf36e316c6fb 100644
--- a/Doc/whatsnew/2.3.rst
+++ b/Doc/whatsnew/2.3.rst
@@ -1084,7 +1084,7 @@ Here are all of the changes that Python 2.3 makes to the core Python language.
C3 algorithm as described in the paper `"A Monotonic Superclass Linearization
for Dylan" `_. To
understand the motivation for this change, read Michele Simionato's article
- `"Python 2.3 Method Resolution Order" `_, or
+ :ref:`python_2.3_mro`, or
read the thread on python-dev starting with the message at
https://mail.python.org/pipermail/python-dev/2002-October/029035.html. Samuele
Pedroni first pointed out the problem and also implemented the fix by coding the
diff --git a/Doc/whatsnew/2.6.rst b/Doc/whatsnew/2.6.rst
index e4ade5ecd82b9d..fc2de7124859a8 100644
--- a/Doc/whatsnew/2.6.rst
+++ b/Doc/whatsnew/2.6.rst
@@ -3015,8 +3015,7 @@ Changes to Python's build process and to the C API include:
``PyRun_SimpleString("sys.path.pop(0)\n")`` afterwards to discard
the first ``sys.path`` component.
- Security issue reported as `CVE-2008-5983
- `_;
+ Security issue reported as :cve:`2008-5983`;
discussed in :gh:`50003`, and fixed by Antoine Pitrou.
* The BerkeleyDB module now has a C API object, available as
diff --git a/Doc/whatsnew/2.7.rst b/Doc/whatsnew/2.7.rst
index 5c99fbc503ba65..031777b9cf6413 100644
--- a/Doc/whatsnew/2.7.rst
+++ b/Doc/whatsnew/2.7.rst
@@ -1831,8 +1831,7 @@ The :mod:`unittest` module was greatly enhanced; many
new features were added. Most of these features were implemented
by Michael Foord, unless otherwise noted. The enhanced version of
the module is downloadable separately for use with Python versions 2.4 to 2.6,
-packaged as the :mod:`!unittest2` package, from
-https://pypi.org/project/unittest2.
+packaged as the :mod:`!unittest2` package, from :pypi:`unittest2`.
When used from the command line, the module can automatically discover
tests. It's not as fancy as `py.test `__ or
@@ -2178,8 +2177,7 @@ Changes to Python's build process and to the C API include:
whether the application should be using :c:func:`!PySys_SetArgvEx`
with *updatepath* set to false.
- Security issue reported as `CVE-2008-5983
- `_;
+ Security issue reported as :cve:`2008-5983`;
discussed in :issue:`5753`, and fixed by Antoine Pitrou.
* New macros: the Python header files now define the following macros:
@@ -2626,7 +2624,7 @@ with the first of those changes appearing in the Python 2.7.7 release.
2 applications. (Contributed by Alex Gaynor; :issue:`21304`.)
* OpenSSL 1.0.1h was upgraded for the official Windows installers published on
- python.org. (contributed by Zachary Ware in :issue:`21671` for CVE-2014-0224)
+ python.org. (Contributed by Zachary Ware in :issue:`21671` for :cve:`2014-0224`.)
:pep:`466` related features added in Python 2.7.9:
diff --git a/Doc/whatsnew/3.10.rst b/Doc/whatsnew/3.10.rst
index e35179a2d8e513..1a4ecdf1737303 100644
--- a/Doc/whatsnew/3.10.rst
+++ b/Doc/whatsnew/3.10.rst
@@ -2331,8 +2331,7 @@ Converting between :class:`int` and :class:`str` in bases other than 2
(binary), 4, 8 (octal), 16 (hexadecimal), or 32 such as base 10 (decimal)
now raises a :exc:`ValueError` if the number of digits in string form is
above a limit to avoid potential denial of service attacks due to the
-algorithmic complexity. This is a mitigation for `CVE-2020-10735
-`_.
+algorithmic complexity. This is a mitigation for :cve:`2020-10735`.
This limit can be configured or disabled by environment variable, command
line flag, or :mod:`sys` APIs. See the :ref:`integer string conversion
length limitation ` documentation. The default limit
diff --git a/Doc/whatsnew/3.11.rst b/Doc/whatsnew/3.11.rst
index 4f4c1de8d8d596..7a74df330a86c7 100644
--- a/Doc/whatsnew/3.11.rst
+++ b/Doc/whatsnew/3.11.rst
@@ -544,8 +544,7 @@ Other CPython Implementation Changes
(binary), 4, 8 (octal), 16 (hexadecimal), or 32 such as base 10 (decimal)
now raises a :exc:`ValueError` if the number of digits in string form is
above a limit to avoid potential denial of service attacks due to the
- algorithmic complexity. This is a mitigation for `CVE-2020-10735
- `_.
+ algorithmic complexity. This is a mitigation for :cve:`2020-10735`.
This limit can be configured or disabled by environment variable, command
line flag, or :mod:`sys` APIs. See the :ref:`integer string conversion
length limitation ` documentation. The default limit
@@ -2028,7 +2027,7 @@ Removed C APIs are :ref:`listed separately `.
(and corresponding :c:macro:`!EXPERIMENTAL_ISOLATED_SUBINTERPRETERS` macro)
have been removed.
-* `Pynche `_
+* :pypi:`Pynche`
--- The Pythonically Natural Color and Hue Editor --- has been moved out
of ``Tools/scripts`` and is `being developed independently
`_ from the Python source tree.
diff --git a/Doc/whatsnew/3.12.rst b/Doc/whatsnew/3.12.rst
index b986e638498abd..f2ef4efcb378bc 100644
--- a/Doc/whatsnew/3.12.rst
+++ b/Doc/whatsnew/3.12.rst
@@ -927,8 +927,6 @@ tempfile
* :func:`tempfile.mkdtemp` now always returns an absolute path, even if the
argument provided to the *dir* parameter is a relative path.
-.. _whatsnew-typing-py312:
-
threading
---------
@@ -963,6 +961,8 @@ types
:ref:`user-defined-generics` when subclassed. (Contributed by
James Hilton-Balfe and Alex Waygood in :gh:`101827`.)
+.. _whatsnew-typing-py312:
+
typing
------
@@ -1659,12 +1659,10 @@ smtpd
* The ``smtpd`` module has been removed according to the schedule in :pep:`594`,
having been deprecated in Python 3.4.7 and 3.5.4.
- Use aiosmtpd_ PyPI module or any other
+ Use the :pypi:`aiosmtpd` PyPI module or any other
:mod:`asyncio`-based server instead.
(Contributed by Oleg Iarygin in :gh:`93243`.)
-.. _aiosmtpd: https://pypi.org/project/aiosmtpd/
-
sqlite3
-------
@@ -1701,9 +1699,8 @@ ssl
instead, create a :class:`ssl.SSLContext` object and call its
:class:`ssl.SSLContext.wrap_socket` method. Any package that still uses
:func:`!ssl.wrap_socket` is broken and insecure. The function neither sends a
- SNI TLS extension nor validates server hostname. Code is subject to `CWE-295
- `_: Improper Certificate
- Validation.
+ SNI TLS extension nor validates the server hostname. Code is subject to :cwe:`295`
+ (Improper Certificate Validation).
(Contributed by Victor Stinner in :gh:`94199`.)
unittest
@@ -1832,7 +1829,7 @@ Changes in the Python API
* Remove the ``asyncore``-based ``smtpd`` module deprecated in Python 3.4.7
and 3.5.4. A recommended replacement is the
- :mod:`asyncio`-based aiosmtpd_ PyPI module.
+ :mod:`asyncio`-based :pypi:`aiosmtpd` PyPI module.
* :func:`shlex.split`: Passing ``None`` for *s* argument now raises an
exception, rather than reading :data:`sys.stdin`. The feature was deprecated
diff --git a/Doc/whatsnew/3.13.rst b/Doc/whatsnew/3.13.rst
index e31f0c52d4c5f5..f957698ecb06d8 100644
--- a/Doc/whatsnew/3.13.rst
+++ b/Doc/whatsnew/3.13.rst
@@ -82,11 +82,15 @@ Important deprecations, removals or restrictions:
Interpreter improvements:
-* A basic :ref:`JIT compiler ` was added.
+* :pep:`744`: A basic :ref:`JIT compiler ` was added.
It is currently disabled by default (though we may turn it on later).
Performance improvements are modest -- we expect to be improving this
over the next few releases.
+New typing features:
+
+* :pep:`742`: :data:`typing.TypeIs` was added, providing more intuitive
+ type narrowing behavior.
New Features
============
@@ -181,7 +185,7 @@ Other Language Changes
(Contributed by Victor Stinner in :gh:`114570`.)
-* Allow controlling Expat >=2.6.0 reparse deferral (CVE-2023-52425)
+* Allow controlling Expat >=2.6.0 reparse deferral (:cve:`2023-52425`)
by adding five new methods:
* :meth:`xml.etree.ElementTree.XMLParser.flush`
@@ -192,13 +196,6 @@ Other Language Changes
(Contributed by Sebastian Pipping in :gh:`115623`.)
-* When :func:`asyncio.TaskGroup.create_task` is called on an inactive
- :class:`asyncio.TaskGroup`, the given coroutine will be closed (which
- prevents a :exc:`RuntimeWarning` about the given coroutine being
- never awaited).
-
- (Contributed by Arthur Tacca and Jason Zhang in :gh:`115957`.)
-
* The :func:`ssl.create_default_context` API now includes
:data:`ssl.VERIFY_X509_PARTIAL_CHAIN` and :data:`ssl.VERIFY_X509_STRICT`
in its default flags.
@@ -215,9 +212,8 @@ Other Language Changes
(Contributed by William Woodruff in :gh:`112389`.)
* The :class:`configparser.ConfigParser` now accepts unnamed sections before named
- ones if configured to do so.
-
- (Contributed by Pedro Sousa Lacerda in :gh:`66449`)
+ ones if configured to do so.
+ (Contributed by Pedro Sousa Lacerda in :gh:`66449`.)
New Modules
@@ -236,7 +232,7 @@ argparse
:meth:`~argparse.ArgumentParser.add_argument` and :meth:`!add_parser`
which allows to deprecate command-line options, positional arguments and
subcommands.
- (Contributed by Serhiy Storchaka in :gh:`83648`).
+ (Contributed by Serhiy Storchaka in :gh:`83648`.)
array
-----
@@ -266,9 +262,9 @@ ast
argument that does not map to a field on the AST node is now deprecated,
and will raise an exception in Python 3.15.
-* :func:`ast.parse` now accepts an optional argument ``optimize``
+* :func:`ast.parse` now accepts an optional argument *optimize*
which is passed on to the :func:`compile` built-in. This makes it
- possible to obtain an optimized ``AST``.
+ possible to obtain an optimized AST.
(Contributed by Irit Katriel in :gh:`108113`.)
asyncio
@@ -296,11 +292,46 @@ asyncio
with the tasks being completed.
(Contributed by Justin Arthur in :gh:`77714`.)
+* When :func:`asyncio.TaskGroup.create_task` is called on an inactive
+ :class:`asyncio.TaskGroup`, the given coroutine will be closed (which
+ prevents a :exc:`RuntimeWarning` about the given coroutine being
+ never awaited).
+ (Contributed by Arthur Tacca and Jason Zhang in :gh:`115957`.)
+
+* Improved behavior of :class:`asyncio.TaskGroup` when an external cancellation
+ collides with an internal cancellation. For example, when two task groups
+ are nested and both experience an exception in a child task simultaneously,
+ it was possible that the outer task group would hang, because its internal
+ cancellation was swallowed by the inner task group.
+
+ In the case where a task group is cancelled externally and also must
+ raise an :exc:`ExceptionGroup`, it will now call the parent task's
+ :meth:`~asyncio.Task.cancel` method. This ensures that a
+ :exc:`asyncio.CancelledError` will be raised at the next
+ :keyword:`await`, so the cancellation is not lost.
+
+ An added benefit of these changes is that task groups now preserve the
+ cancellation count (:meth:`asyncio.Task.cancelling`).
+
+ In order to handle some corner cases, :meth:`asyncio.Task.uncancel` may now
+ reset the undocumented ``_must_cancel`` flag when the cancellation count
+ reaches zero.
+
+ (Inspired by an issue reported by Arthur Tacca in :gh:`116720`.)
+
+* Add :meth:`asyncio.Queue.shutdown` (along with
+ :exc:`asyncio.QueueShutDown`) for queue termination.
+ (Contributed by Laurie Opperman and Yves Duprat in :gh:`104228`.)
+
+* Accept a tuple of separators in :meth:`asyncio.StreamReader.readuntil`,
+ stopping when one of them is encountered.
+ (Contributed by Bruce Merry in :gh:`81322`.)
+
base64
------
* Add :func:`base64.z85encode` and :func:`base64.z85decode` functions which allow encoding
- and decoding z85 data.
+ and decoding Z85 data.
See `Z85 specification `_ for more information.
(Contributed by Matan Perelman in :gh:`75299`.)
@@ -323,6 +354,9 @@ dbm
from the database.
(Contributed by Donghee Na in :gh:`107122`.)
+* Add new :mod:`dbm.sqlite3` backend, and make it the default :mod:`!dbm` backend.
+ (Contributed by Raymond Hettinger and Erlend E. Aasland in :gh:`100414`.)
+
dis
---
@@ -332,16 +366,6 @@ dis
the ``show_offsets`` parameter.
(Contributed by Irit Katriel in :gh:`112137`.)
-dbm
----
-
-* Add :meth:`dbm.gnu.gdbm.clear` and :meth:`dbm.ndbm.ndbm.clear` methods that remove all items
- from the database.
- (Contributed by Donghee Na in :gh:`107122`.)
-
-* Add new :mod:`dbm.sqlite3` backend, and make it the default :mod:`!dbm` backend.
- (Contributed by Raymond Hettinger and Erlend E. Aasland in :gh:`100414`.)
-
doctest
-------
@@ -358,10 +382,10 @@ email
encountered instead of potentially inaccurate values. Add optional *strict*
parameter to these two functions: use ``strict=False`` to get the old
behavior, accept malformed inputs.
- ``getattr(email.utils, 'supports_strict_parsing', False)`` can be use to
+ ``getattr(email.utils, 'supports_strict_parsing', False)`` can be used to
check if the *strict* parameter is available.
(Contributed by Thomas Dwyer and Victor Stinner for :gh:`102988` to improve
- the CVE-2023-27043 fix.)
+ the :cve:`2023-27043` fix.)
fractions
---------
@@ -375,23 +399,24 @@ gc
--
* The cyclic garbage collector is now incremental, which changes the meanings
- of the results of :meth:`gc.get_threshold` and :meth:`gc.get_threshold` as
+ of the results of :meth:`gc.get_threshold` and :meth:`gc.set_threshold` as
well as :meth:`gc.get_count` and :meth:`gc.get_stats`.
-* :meth:`gc.get_threshold` returns a three-tuple for backwards compatibility,
- the first value is the threshold for young collections, as before, the second
- value determines the rate at which the old collection is scanned; the
- default is 10 and higher values mean that the old collection is scanned more slowly.
- The third value is meangless and is always zero.
-* :meth:`gc.set_threshold` ignores any items after the second.
-* :meth:`gc.get_count` and :meth:`gc.get_stats`.
- These functions return the same format of results as before.
- The only difference is that instead of the results refering to
- the young, aging and old generations, the results refer to the
- young generation and the aging and collecting spaces of the old generation.
-
-In summary, code that attempted to manipulate the behavior of the cycle GC may
-not work exactly as intended, but it is very unlikely to harmful.
-All other code will work just fine.
+
+ * :meth:`gc.get_threshold` returns a three-item tuple for backwards compatibility.
+ The first value is the threshold for young collections, as before; the second
+ value determines the rate at which the old collection is scanned (the
+ default is 10, and higher values mean that the old collection is scanned more slowly).
+ The third value is meaningless and is always zero.
+ * :meth:`gc.set_threshold` ignores any items after the second.
+ * :meth:`gc.get_count` and :meth:`gc.get_stats`
+ return the same format of results as before.
+ The only difference is that instead of the results referring to
+ the young, aging and old generations, the results refer to the
+ young generation and the aging and collecting spaces of the old generation.
+
+ In summary, code that attempted to manipulate the behavior of the cycle GC may
+ not work exactly as intended, but it is very unlikely to be harmful.
+ All other code will work just fine.
glob
----
@@ -400,46 +425,49 @@ glob
shell-style wildcards to a regular expression.
(Contributed by Barney Gale in :gh:`72904`.)
-io
---
-
-The :class:`io.IOBase` finalizer now logs the ``close()`` method errors with
-:data:`sys.unraisablehook`. Previously, errors were ignored silently by default,
-and only logged in :ref:`Python Development Mode ` or on :ref:`Python
-built on debug mode `.
-(Contributed by Victor Stinner in :gh:`62948`.)
-
importlib
---------
-Previously deprecated :mod:`importlib.resources` functions are un-deprecated:
+* Previously deprecated :mod:`importlib.resources` functions are un-deprecated:
- * :func:`~importlib.resources.is_resource()`
- * :func:`~importlib.resources.open_binary()`
- * :func:`~importlib.resources.open_text()`
- * :func:`~importlib.resources.path()`
- * :func:`~importlib.resources.read_binary()`
- * :func:`~importlib.resources.read_text()`
+ * :func:`~importlib.resources.is_resource()`
+ * :func:`~importlib.resources.open_binary()`
+ * :func:`~importlib.resources.open_text()`
+ * :func:`~importlib.resources.path()`
+ * :func:`~importlib.resources.read_binary()`
+ * :func:`~importlib.resources.read_text()`
-All now allow for a directory (or tree) of resources, using multiple positional
-arguments.
+ All now allow for a directory (or tree) of resources, using multiple positional
+ arguments.
-For text-reading functions, the *encoding* and *errors* must now be given as
-keyword arguments.
+ For text-reading functions, the *encoding* and *errors* must now be given as
+ keyword arguments.
-The :func:`~importlib.resources.contents()` remains deprecated in favor of
-the full-featured :class:`~importlib.resources.abc.Traversable` API.
-However, there is now no plan to remove it.
+ The :func:`~importlib.resources.contents()` remains deprecated in favor of
+ the full-featured :class:`~importlib.resources.abc.Traversable` API.
+ However, there is now no plan to remove it.
+
+ (Contributed by Petr Viktorin in :gh:`106532`.)
+
+io
+--
-(Contributed by Petr Viktorin in :gh:`106532`.)
+* The :class:`io.IOBase` finalizer now logs the ``close()`` method errors with
+ :data:`sys.unraisablehook`. Previously, errors were ignored silently by default,
+ and only logged in :ref:`Python Development Mode ` or on :ref:`Python
+ built on debug mode `.
+ (Contributed by Victor Stinner in :gh:`62948`.)
ipaddress
---------
* Add the :attr:`ipaddress.IPv4Address.ipv6_mapped` property, which returns the IPv4-mapped IPv6 address.
(Contributed by Charles Machalow in :gh:`109466`.)
-* Fix ``is_global`` and ``is_private`` behavior in ``IPv4Address``, ``IPv6Address``, ``IPv4Network``
- and ``IPv6Network``.
+* Fix ``is_global`` and ``is_private`` behavior in
+ :class:`~ipaddress.IPv4Address`,
+ :class:`~ipaddress.IPv6Address`,
+ :class:`~ipaddress.IPv4Network` and
+ :class:`~ipaddress.IPv6Network`.
itertools
---------
@@ -460,12 +488,12 @@ marshal
math
----
-A new function :func:`~math.fma` for fused multiply-add operations has been
-added. This function computes ``x * y + z`` with only a single round, and so
-avoids any intermediate loss of precision. It wraps the ``fma()`` function
-provided by C99, and follows the specification of the IEEE 754
-"fusedMultiplyAdd" operation for special cases.
-(Contributed by Mark Dickinson and Victor Stinner in :gh:`73468`.)
+* A new function :func:`~math.fma` for fused multiply-add operations has been
+ added. This function computes ``x * y + z`` with only a single round, and so
+ avoids any intermediate loss of precision. It wraps the ``fma()`` function
+ provided by C99, and follows the specification of the IEEE 754
+ "fusedMultiplyAdd" operation for special cases.
+ (Contributed by Mark Dickinson and Victor Stinner in :gh:`73468`.)
mmap
----
@@ -561,18 +589,18 @@ pathlib
* Add *recurse_symlinks* keyword-only argument to :meth:`pathlib.Path.glob`
and :meth:`~pathlib.Path.rglob`.
- (Contributed by Barney Gale in :gh:`77609`).
+ (Contributed by Barney Gale in :gh:`77609`.)
* Add *follow_symlinks* keyword-only argument to :meth:`~pathlib.Path.is_file`,
:meth:`~pathlib.Path.is_dir`, :meth:`~pathlib.Path.owner`,
:meth:`~pathlib.Path.group`.
(Contributed by Barney Gale in :gh:`105793`, and Kamil Turek in
- :gh:`107962`).
+ :gh:`107962`.)
* Return files and directories from :meth:`pathlib.Path.glob` and
:meth:`~pathlib.Path.rglob` when given a pattern that ends with "``**``". In
earlier versions, only directories were returned.
- (Contributed by Barney Gale in :gh:`70303`).
+ (Contributed by Barney Gale in :gh:`70303`.)
pdb
---
@@ -602,6 +630,13 @@ re
* Rename :exc:`!re.error` to :exc:`re.PatternError` for improved clarity.
:exc:`!re.error` is kept for backward compatibility.
+site
+----
+
+* :file:`.pth` files are now decoded by UTF-8 first, and then by the
+ :term:`locale encoding` if the UTF-8 decoding fails.
+ (Contributed by Inada Naoki in :gh:`117802`.)
+
sqlite3
-------
@@ -621,6 +656,8 @@ statistics
from a fixed number of discrete samples.
(Contributed by Raymond Hettinger in :gh:`115863`.)
+.. _whatsnew313-subprocess:
+
subprocess
----------
@@ -653,7 +690,7 @@ time
* On Windows, :func:`time.time()` now uses the
``GetSystemTimePreciseAsFileTime()`` clock to have a resolution better
- than 1 us, instead of the ``GetSystemTimeAsFileTime()`` clock which has a
+ than 1 μs, instead of the ``GetSystemTimeAsFileTime()`` clock which has a
resolution of 15.6 ms.
(Contributed by Victor Stinner in :gh:`63207`.)
@@ -750,6 +787,7 @@ zipimport
* Gains support for ZIP64 format files. Everybody loves huge code right?
(Contributed by Tim Hatch in :gh:`94146`.)
+.. Add improved modules above alphabetically, not here at the end.
Optimizations
=============
@@ -760,7 +798,8 @@ Optimizations
* The :mod:`subprocess` module uses :func:`os.posix_spawn` in more situations
including the default where ``close_fds=True`` on many modern platforms. This
should provide a noteworthy performance increase launching processes on
- FreeBSD and Solaris. See the ``subprocess`` section above for details.
+ FreeBSD and Solaris. See the :ref:`subprocess `
+ section above for details.
(Contributed by Jakub Kulik in :gh:`113117`.)
.. _whatsnew313-jit-compiler:
@@ -799,6 +838,8 @@ The internal architecture is roughly as follows.
*copy-and-patch*. It has no runtime dependencies, but there is a new
build-time dependency on LLVM.
+See :pep:`744` for more details.
+
(JIT by Brandt Bucher, inspired by a paper by Haoran Xu and Fredrik Kjolstad.
Tier 2 IR by Mark Shannon and Guido van Rossum.
Tier 2 optimizer by Ken Jin.)
@@ -812,7 +853,7 @@ Deprecated
emits :exc:`DeprecationWarning` since 3.13
and will be removed in Python 3.16.
Use the ``'w'`` format code instead.
- (contributed by Hugo van Kemenade in :gh:`80480`)
+ (Contributed by Hugo van Kemenade in :gh:`80480`.)
* :mod:`ctypes`: Deprecate undocumented :func:`!ctypes.SetPointerType`
and :func:`!ctypes.ARRAY` functions.
@@ -828,6 +869,11 @@ Deprecated
membership in :data:`~dis.hasarg` instead.
(Contributed by Irit Katriel in :gh:`109319`.)
+* :ref:`frame-objects`:
+ Calling :meth:`frame.clear` on a suspended frame raises :exc:`RuntimeError`
+ (as has always been the case for an executing frame).
+ (Contributed by Irit Katriel in :gh:`79932`.)
+
* :mod:`getopt` and :mod:`optparse` modules: They are now
:term:`soft deprecated`: the :mod:`argparse` module should be used for new projects.
Previously, the :mod:`optparse` module was already deprecated, its removal
@@ -857,6 +903,12 @@ Deprecated
removal in Python 3.15. Use :func:`os.path.isreserved` to detect reserved
paths on Windows.
+* :mod:`platform`:
+ :func:`~platform.java_ver` is deprecated and will be removed in 3.15.
+ It was largely untested, had a confusing API,
+ and was only useful for Jython support.
+ (Contributed by Nikita Sobolev in :gh:`116349`.)
+
* :mod:`pydoc`: Deprecate undocumented :func:`!pydoc.ispackage` function.
(Contributed by Zackery Spytz in :gh:`64020`.)
@@ -885,6 +937,10 @@ Deprecated
Replace it with the :envvar:`PYTHONLEGACYWINDOWSFSENCODING` environment variable.
(Contributed by Inada Naoki in :gh:`73427`.)
+* :mod:`tarfile`:
+ The undocumented and unused ``tarfile`` attribute of :class:`tarfile.TarFile`
+ is deprecated and scheduled for removal in Python 3.16.
+
* :mod:`traceback`: The field *exc_type* of :class:`traceback.TracebackException`
is deprecated. Use *exc_type_str* instead.
@@ -917,28 +973,19 @@ Deprecated
the new :ref:`type parameter syntax ` instead.
(Contributed by Michael The in :gh:`107116`.)
-* :mod:`wave`: Deprecate the ``getmark()``, ``setmark()`` and ``getmarkers()``
- methods of the :class:`wave.Wave_read` and :class:`wave.Wave_write` classes.
- They will be removed in Python 3.15.
- (Contributed by Victor Stinner in :gh:`105096`.)
-
-* Calling :meth:`frame.clear` on a suspended frame raises :exc:`RuntimeError`
- (as has always been the case for an executing frame).
- (Contributed by Irit Katriel in :gh:`79932`.)
-
-* Assignment to a function's :attr:`~function.__code__` attribute where the new code
+* :ref:`user-defined-funcs`:
+ Assignment to a function's :attr:`~function.__code__` attribute where the new code
object's type does not match the function's type, is deprecated. The
different types are: plain function, generator, async generator and
coroutine.
(Contributed by Irit Katriel in :gh:`81137`.)
-* The undocumented and unused ``tarfile`` attribute of :class:`tarfile.TarFile`
- is deprecated and scheduled for removal in Python 3.16.
+* :mod:`wave`: Deprecate the ``getmark()``, ``setmark()`` and ``getmarkers()``
+ methods of the :class:`wave.Wave_read` and :class:`wave.Wave_write` classes.
+ They will be removed in Python 3.15.
+ (Contributed by Victor Stinner in :gh:`105096`.)
-* :func:`platform.java_ver` is deprecated and will be removed in 3.15.
- It was largely untested, had a confusing API,
- and was only useful for Jython support.
- (Contributed by Nikita Sobolev in :gh:`116349`.)
+.. Add deprecations above alphabetically, not here at the end.
Pending Removal in Python 3.14
------------------------------
@@ -1038,7 +1085,8 @@ Pending Removal in Python 3.14
* :mod:`typing`: :class:`~typing.ByteString`, deprecated since Python 3.9,
now causes a :exc:`DeprecationWarning` to be emitted when it is used.
-* :class:`!urllib.parse.Quoter` is deprecated: it was not intended to be a
+* :mod:`urllib`:
+ :class:`!urllib.parse.Quoter` is deprecated: it was not intended to be a
public API.
(Contributed by Gregory P. Smith in :gh:`88168`.)
@@ -1066,6 +1114,12 @@ Pending Removal in Python 3.15
removal in Python 3.15. Use :func:`os.path.isreserved` to detect reserved
paths on Windows.
+* :mod:`platform`:
+ :func:`~platform.java_ver` is deprecated and will be removed in 3.15.
+ It was largely untested, had a confusing API,
+ and was only useful for Jython support.
+ (Contributed by Nikita Sobolev in :gh:`116349`.)
+
* :mod:`threading`:
Passing any arguments to :func:`threading.RLock` is now deprecated.
C version allows any numbers of args and kwargs,
@@ -1075,22 +1129,22 @@ Pending Removal in Python 3.15
* :class:`typing.NamedTuple`:
- * The undocumented keyword argument syntax for creating NamedTuple classes
+ * The undocumented keyword argument syntax for creating :class:`!NamedTuple` classes
(``NT = NamedTuple("NT", x=int)``) is deprecated, and will be disallowed in
3.15. Use the class-based syntax or the functional syntax instead.
- * When using the functional syntax to create a NamedTuple class, failing to
- pass a value to the 'fields' parameter (``NT = NamedTuple("NT")``) is
- deprecated. Passing ``None`` to the 'fields' parameter
+ * When using the functional syntax to create a :class:`!NamedTuple` class, failing to
+ pass a value to the *fields* parameter (``NT = NamedTuple("NT")``) is
+ deprecated. Passing ``None`` to the *fields* parameter
(``NT = NamedTuple("NT", None)``) is also deprecated. Both will be
- disallowed in Python 3.15. To create a NamedTuple class with 0 fields, use
+ disallowed in Python 3.15. To create a :class:`!NamedTuple` class with 0 fields, use
``class NT(NamedTuple): pass`` or ``NT = NamedTuple("NT", [])``.
* :class:`typing.TypedDict`: When using the functional syntax to create a
- TypedDict class, failing to pass a value to the 'fields' parameter (``TD =
- TypedDict("TD")``) is deprecated. Passing ``None`` to the 'fields' parameter
+ :class:`!TypedDict` class, failing to pass a value to the *fields* parameter (``TD =
+ TypedDict("TD")``) is deprecated. Passing ``None`` to the *fields* parameter
(``TD = TypedDict("TD", None)``) is also deprecated. Both will be disallowed
- in Python 3.15. To create a TypedDict class with 0 fields, use ``class
+ in Python 3.15. To create a :class:`!TypedDict` class with 0 fields, use ``class
TD(TypedDict): pass`` or ``TD = TypedDict("TD", {})``.
* :mod:`wave`: Deprecate the ``getmark()``, ``setmark()`` and ``getmarkers()``
@@ -1098,11 +1152,6 @@ Pending Removal in Python 3.15
They will be removed in Python 3.15.
(Contributed by Victor Stinner in :gh:`105096`.)
-* :func:`platform.java_ver` is deprecated and will be removed in 3.15.
- It was largely untested, had a confusing API,
- and was only useful for Jython support.
- (Contributed by Nikita Sobolev in :gh:`116349`.)
-
Pending Removal in Python 3.16
------------------------------
@@ -1274,9 +1323,8 @@ PEP 594: dead batteries
* ``cgi.FieldStorage`` can typically be replaced with
:func:`urllib.parse.parse_qsl` for ``GET`` and ``HEAD`` requests,
- and the :mod:`email.message` module or `multipart
- `__ PyPI project for ``POST`` and
- ``PUT``.
+ and the :mod:`email.message` module or :pypi:`multipart`
+ PyPI project for ``POST`` and ``PUT``.
* ``cgi.parse()`` can be replaced by calling :func:`urllib.parse.parse_qs`
directly on the desired query string, except for ``multipart/form-data``
@@ -1294,7 +1342,7 @@ PEP 594: dead batteries
* ``cgi.parse_multipart()`` can be replaced with the functionality in the
:mod:`email` package (e.g. :class:`email.message.EmailMessage` and
:class:`email.message.Message`) which implements the same MIME RFCs, or
- with the `multipart `__ PyPI project.
+ with the :pypi:`multipart` PyPI project.
(Contributed by Victor Stinner in :gh:`104773`.)
@@ -1302,21 +1350,19 @@ PEP 594: dead batteries
The :mod:`hashlib` module is a potential replacement for certain use cases.
Otherwise, the following PyPI projects can be used:
- * `bcrypt `_:
+ * :pypi:`bcrypt`:
Modern password hashing for your software and your servers.
- * `passlib `_:
+ * :pypi:`passlib`:
Comprehensive password hashing framework supporting over 30 schemes.
- * `argon2-cffi `_:
+ * :pypi:`argon2-cffi`:
The secure Argon2 password hashing algorithm.
- * `legacycrypt `_:
+ * :pypi:`legacycrypt`:
Wrapper to the POSIX crypt library call and associated functionality.
(Contributed by Victor Stinner in :gh:`104773`.)
- * :mod:`!imghdr`: use the projects
- `filetype `_,
- `puremagic `_,
- or `python-magic `_ instead.
+ * :mod:`!imghdr`: use the projects :pypi:`filetype`,
+ :pypi:`puremagic`, or :pypi:`python-magic` instead.
(Contributed by Victor Stinner in :gh:`104773`.)
* :mod:`!mailcap`.
@@ -1330,8 +1376,7 @@ PEP 594: dead batteries
(Contributed by Victor Stinner in :gh:`104773`.)
* :mod:`!nntplib`:
- the `PyPI nntplib project `_
- can be used instead.
+ the :pypi:`nntplib` PyPI project can be used instead.
(Contributed by Victor Stinner in :gh:`104773`.)
* :mod:`!ossaudiodev`: use the
@@ -1341,23 +1386,19 @@ PEP 594: dead batteries
* :mod:`!pipes`: use the :mod:`subprocess` module instead.
(Contributed by Victor Stinner in :gh:`104773`.)
- * :mod:`!sndhdr`: use the projects
- `filetype `_,
- `puremagic `_, or
- `python-magic `_ instead.
+ * :mod:`!sndhdr`: use the projects :pypi:`filetype_,
+ :pypi:`puremagic`, or :pypi:`python-magic` instead.
(Contributed by Victor Stinner in :gh:`104773`.)
* :mod:`!spwd`:
- the `python-pam project `_
- can be used instead.
+ the :pypi:`python-pam` project can be used instead.
(Contributed by Victor Stinner in :gh:`104773`.)
* :mod:`!sunau`.
(Contributed by Victor Stinner in :gh:`104773`.)
- * :mod:`!telnetlib`, use the projects
- `telnetlib3 `_ or
- `Exscript `_ instead.
+ * :mod:`!telnetlib`, use the projects :pypi:`telnetlib3 ` or
+ :pypi:`Exscript` instead.
(Contributed by Victor Stinner in :gh:`104773`.)
* :mod:`!uu`: the :mod:`base64` module is a modern alternative.
@@ -1543,15 +1584,15 @@ Changes in the Python API
than directories only. Users may add a trailing slash to match only
directories.
-* :c:func:`!PyCode_GetFirstFree` is an ustable API now and has been renamed
+* :c:func:`!PyCode_GetFirstFree` is an unstable API now and has been renamed
to :c:func:`PyUnstable_Code_GetFirstFree`.
- (Contributed by Bogdan Romanyuk in :gh:`115781`)
+ (Contributed by Bogdan Romanyuk in :gh:`115781`.)
Build Changes
=============
-* Autoconf 2.71 and aclocal 1.16.4 is now required to regenerate
+* Autoconf 2.71 and aclocal 1.16.4 are now required to regenerate
the :file:`configure` script.
(Contributed by Christian Heimes in :gh:`89886`.)
@@ -1574,10 +1615,10 @@ Build Changes
:ref:`limited C API `.
(Contributed by Victor Stinner in :gh:`85283`.)
-* ``wasm32-wasi`` is now a tier 2 platform.
+* ``wasm32-wasi`` is now a :pep:`11` tier 2 platform.
(Contributed by Brett Cannon in :gh:`115192`.)
-* ``wasm32-emscripten`` is no longer a supported platform.
+* ``wasm32-emscripten`` is no longer a :pep:`11` supported platform.
(Contributed by Brett Cannon in :gh:`115192`.)
@@ -1771,7 +1812,7 @@ New Features
Equivalent to getting the ``type.__module__`` attribute.
(Contributed by Eric Snow and Victor Stinner in :gh:`111696`.)
-* Add support for ``%T``, ``%T#``, ``%N`` and ``%N#`` formats to
+* Add support for ``%T``, ``%#T``, ``%N`` and ``%#N`` formats to
:c:func:`PyUnicode_FromFormat`: format the fully qualified name of an object
type and of a type: call :c:func:`PyType_GetModuleName`. See :pep:`737` for
more information.
@@ -1803,6 +1844,13 @@ Porting to Python 3.13
and ``setitimer()`` functions.
(Contributed by Victor Stinner in :gh:`108765`.)
+* On Windows, ``Python.h`` no longer includes the ```` standard
+ header file. If needed, it should now be included explicitly. For example, it
+ provides ``offsetof()`` function, and ``size_t`` and ``ptrdiff_t`` types.
+ Including ```` explicitly was already needed by all other
+ platforms, the ``HAVE_STDDEF_H`` macro is only defined on Windows.
+ (Contributed by Victor Stinner in :gh:`108765`.)
+
* If the :c:macro:`Py_LIMITED_API` macro is defined, :c:macro:`!Py_BUILD_CORE`,
:c:macro:`!Py_BUILD_CORE_BUILTIN` and :c:macro:`!Py_BUILD_CORE_MODULE` macros
are now undefined by ````.
@@ -1812,7 +1860,7 @@ Porting to Python 3.13
were removed. They should be replaced by the new macros ``Py_TRASHCAN_BEGIN``
and ``Py_TRASHCAN_END``.
- A tp_dealloc function that has the old macros, such as::
+ A ``tp_dealloc`` function that has the old macros, such as::
static void
mytype_dealloc(mytype *p)
@@ -1837,13 +1885,6 @@ Porting to Python 3.13
Note that ``Py_TRASHCAN_BEGIN`` has a second argument which
should be the deallocation function it is in.
-* On Windows, ``Python.h`` no longer includes the ```` standard
- header file. If needed, it should now be included explicitly. For example, it
- provides ``offsetof()`` function, and ``size_t`` and ``ptrdiff_t`` types.
- Including ```` explicitly was already needed by all other
- platforms, the ``HAVE_STDDEF_H`` macro is only defined on Windows.
- (Contributed by Victor Stinner in :gh:`108765`.)
-
Deprecated
----------
diff --git a/Doc/whatsnew/3.5.rst b/Doc/whatsnew/3.5.rst
index 5c2ec230441b42..cd8a903327cc2f 100644
--- a/Doc/whatsnew/3.5.rst
+++ b/Doc/whatsnew/3.5.rst
@@ -951,7 +951,7 @@ New :class:`~collections.abc.Awaitable`, :class:`~collections.abc.Coroutine`,
(Contributed by Yury Selivanov in :issue:`24184`.)
For earlier Python versions, a backport of the new ABCs is available in an
-external `PyPI package `_.
+external :pypi:`PyPI package `.
compileall
diff --git a/Doc/whatsnew/3.7.rst b/Doc/whatsnew/3.7.rst
index 8122e0ee129b0d..ad7c8b5320180e 100644
--- a/Doc/whatsnew/3.7.rst
+++ b/Doc/whatsnew/3.7.rst
@@ -2609,8 +2609,7 @@ Converting between :class:`int` and :class:`str` in bases other than 2
(binary), 4, 8 (octal), 16 (hexadecimal), or 32 such as base 10 (decimal)
now raises a :exc:`ValueError` if the number of digits in string form is
above a limit to avoid potential denial of service attacks due to the
-algorithmic complexity. This is a mitigation for `CVE-2020-10735
-`_.
+algorithmic complexity. This is a mitigation for :cve:`2020-10735`.
This limit can be configured or disabled by environment variable, command
line flag, or :mod:`sys` APIs. See the :ref:`integer string conversion
length limitation ` documentation. The default limit
diff --git a/Doc/whatsnew/3.8.rst b/Doc/whatsnew/3.8.rst
index 9a2652f5e33605..1356f24547b424 100644
--- a/Doc/whatsnew/3.8.rst
+++ b/Doc/whatsnew/3.8.rst
@@ -2337,8 +2337,7 @@ Converting between :class:`int` and :class:`str` in bases other than 2
(binary), 4, 8 (octal), 16 (hexadecimal), or 32 such as base 10 (decimal)
now raises a :exc:`ValueError` if the number of digits in string form is
above a limit to avoid potential denial of service attacks due to the
-algorithmic complexity. This is a mitigation for `CVE-2020-10735
-`_.
+algorithmic complexity. This is a mitigation for :cve:`2020-10735`.
This limit can be configured or disabled by environment variable, command
line flag, or :mod:`sys` APIs. See the :ref:`integer string conversion
length limitation ` documentation. The default limit
diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst
index 49d926b0edcd0f..de248bc3584d9a 100644
--- a/Doc/whatsnew/3.9.rst
+++ b/Doc/whatsnew/3.9.rst
@@ -300,12 +300,9 @@ Example::
As a fall-back source of data for platforms that don't ship the IANA database,
-the |tzdata|_ module was released as a first-party package -- distributed via
+the :pypi:`tzdata` module was released as a first-party package -- distributed via
PyPI and maintained by the CPython core team.
-.. |tzdata| replace:: ``tzdata``
-.. _tzdata: https://pypi.org/project/tzdata/
-
.. seealso::
:pep:`615` -- Support for the IANA Time Zone Database in the Standard Library
@@ -1592,8 +1589,7 @@ Converting between :class:`int` and :class:`str` in bases other than 2
(binary), 4, 8 (octal), 16 (hexadecimal), or 32 such as base 10 (decimal)
now raises a :exc:`ValueError` if the number of digits in string form is
above a limit to avoid potential denial of service attacks due to the
-algorithmic complexity. This is a mitigation for `CVE-2020-10735
-`_.
+algorithmic complexity. This is a mitigation for :cve:`2020-10735`.
This limit can be configured or disabled by environment variable, command
line flag, or :mod:`sys` APIs. See the :ref:`integer string conversion
length limitation ` documentation. The default limit
diff --git a/Doc/whatsnew/index.rst b/Doc/whatsnew/index.rst
index b9c19602653219..39837f8c62548f 100644
--- a/Doc/whatsnew/index.rst
+++ b/Doc/whatsnew/index.rst
@@ -34,8 +34,8 @@ anyone wishing to stay up-to-date after a new release.
2.1.rst
2.0.rst
-The "Changelog" is an HTML version of the `file built
-`_ from the contents of the
+The "Changelog" is an HTML version of the :pypi:`file built`
+from the contents of the
:source:`Misc/NEWS.d` directory tree, which contains *all* nontrivial changes
to Python for the current version.
diff --git a/Include/Python.h b/Include/Python.h
index ca38a98d8c4eca..bb771fb3aec980 100644
--- a/Include/Python.h
+++ b/Include/Python.h
@@ -45,6 +45,11 @@
# endif
#endif
+// gh-111506: The free-threaded build is not compatible with the limited API
+// or the stable ABI.
+#if defined(Py_LIMITED_API) && defined(Py_GIL_DISABLED)
+# error "The limited API is not currently supported in the free-threaded build"
+#endif
// Include Python header files
#include "pyport.h"
diff --git a/Include/cpython/pylifecycle.h b/Include/cpython/pylifecycle.h
index d425a233f71000..e46dfe59ec4630 100644
--- a/Include/cpython/pylifecycle.h
+++ b/Include/cpython/pylifecycle.h
@@ -63,6 +63,15 @@ typedef struct {
.gil = PyInterpreterConfig_OWN_GIL, \
}
+// gh-117649: The free-threaded build does not currently support single-phase
+// init extensions in subinterpreters. For now, we ensure that
+// `check_multi_interp_extensions` is always `1`, even in the legacy config.
+#ifdef Py_GIL_DISABLED
+# define _PyInterpreterConfig_LEGACY_CHECK_MULTI_INTERP_EXTENSIONS 1
+#else
+# define _PyInterpreterConfig_LEGACY_CHECK_MULTI_INTERP_EXTENSIONS 0
+#endif
+
#define _PyInterpreterConfig_LEGACY_INIT \
{ \
.use_main_obmalloc = 1, \
@@ -70,7 +79,7 @@ typedef struct {
.allow_exec = 1, \
.allow_threads = 1, \
.allow_daemon_threads = 1, \
- .check_multi_interp_extensions = 0, \
+ .check_multi_interp_extensions = _PyInterpreterConfig_LEGACY_CHECK_MULTI_INTERP_EXTENSIONS, \
.gil = PyInterpreterConfig_SHARED_GIL, \
}
diff --git a/Include/cpython/pystats.h b/Include/cpython/pystats.h
index e74fdd4d32e26c..38480a4f6cd78f 100644
--- a/Include/cpython/pystats.h
+++ b/Include/cpython/pystats.h
@@ -100,6 +100,7 @@ typedef struct _gc_stats {
typedef struct _uop_stats {
uint64_t execution_count;
uint64_t miss;
+ uint64_t pair_count[MAX_UOP_ID + 1];
} UOpStats;
#define _Py_UOP_HIST_SIZE 32
diff --git a/Include/cpython/weakrefobject.h b/Include/cpython/weakrefobject.h
index 1559e2def61260..9a796098c6b48f 100644
--- a/Include/cpython/weakrefobject.h
+++ b/Include/cpython/weakrefobject.h
@@ -30,6 +30,14 @@ struct _PyWeakReference {
PyWeakReference *wr_prev;
PyWeakReference *wr_next;
vectorcallfunc vectorcall;
+
+#ifdef Py_GIL_DISABLED
+ /* Pointer to the lock used when clearing in free-threaded builds.
+ * Normally this can be derived from wr_object, but in some cases we need
+ * to lock after wr_object has been set to Py_None.
+ */
+ struct _PyMutex *weakrefs_lock;
+#endif
};
Py_DEPRECATED(3.13) static inline PyObject* PyWeakref_GET_OBJECT(PyObject *ref_obj)
diff --git a/Include/internal/pycore_bytes_methods.h b/Include/internal/pycore_bytes_methods.h
index b9c0a4e2b2f77d..059dc2599bbd77 100644
--- a/Include/internal/pycore_bytes_methods.h
+++ b/Include/internal/pycore_bytes_methods.h
@@ -26,11 +26,16 @@ extern void _Py_bytes_title(char *result, const char *s, Py_ssize_t len);
extern void _Py_bytes_capitalize(char *result, const char *s, Py_ssize_t len);
extern void _Py_bytes_swapcase(char *result, const char *s, Py_ssize_t len);
-extern PyObject *_Py_bytes_find(const char *str, Py_ssize_t len, PyObject *args);
-extern PyObject *_Py_bytes_index(const char *str, Py_ssize_t len, PyObject *args);
-extern PyObject *_Py_bytes_rfind(const char *str, Py_ssize_t len, PyObject *args);
-extern PyObject *_Py_bytes_rindex(const char *str, Py_ssize_t len, PyObject *args);
-extern PyObject *_Py_bytes_count(const char *str, Py_ssize_t len, PyObject *args);
+extern PyObject *_Py_bytes_find(const char *str, Py_ssize_t len, PyObject *sub,
+ Py_ssize_t start, Py_ssize_t end);
+extern PyObject *_Py_bytes_index(const char *str, Py_ssize_t len, PyObject *sub,
+ Py_ssize_t start, Py_ssize_t end);
+extern PyObject *_Py_bytes_rfind(const char *str, Py_ssize_t len, PyObject *sub,
+ Py_ssize_t start, Py_ssize_t end);
+extern PyObject *_Py_bytes_rindex(const char *str, Py_ssize_t len, PyObject *sub,
+ Py_ssize_t start, Py_ssize_t end);
+extern PyObject *_Py_bytes_count(const char *str, Py_ssize_t len, PyObject *sub,
+ Py_ssize_t start, Py_ssize_t end);
extern int _Py_bytes_contains(const char *str, Py_ssize_t len, PyObject *arg);
extern PyObject *_Py_bytes_startswith(const char *str, Py_ssize_t len,
PyObject *subobj, Py_ssize_t start,
diff --git a/Include/internal/pycore_code.h b/Include/internal/pycore_code.h
index 688051bbff7aac..1ec0348d6e5e8b 100644
--- a/Include/internal/pycore_code.h
+++ b/Include/internal/pycore_code.h
@@ -310,6 +310,13 @@ extern int _PyStaticCode_Init(PyCodeObject *co);
#define GC_STAT_ADD(gen, name, n) do { if (_Py_stats) _Py_stats->gc_stats[(gen)].name += (n); } while (0)
#define OPT_STAT_INC(name) do { if (_Py_stats) _Py_stats->optimization_stats.name++; } while (0)
#define UOP_STAT_INC(opname, name) do { if (_Py_stats) { assert(opname < 512); _Py_stats->optimization_stats.opcode[opname].name++; } } while (0)
+#define UOP_PAIR_INC(uopcode, lastuop) \
+ do { \
+ if (lastuop && _Py_stats) { \
+ _Py_stats->optimization_stats.opcode[lastuop].pair_count[uopcode]++; \
+ } \
+ lastuop = uopcode; \
+ } while (0)
#define OPT_UNSUPPORTED_OPCODE(opname) do { if (_Py_stats) _Py_stats->optimization_stats.unsupported_opcode[opname]++; } while (0)
#define OPT_ERROR_IN_OPCODE(opname) do { if (_Py_stats) _Py_stats->optimization_stats.error_in_opcode[opname]++; } while (0)
#define OPT_HIST(length, name) \
@@ -337,6 +344,7 @@ PyAPI_FUNC(PyObject*) _Py_GetSpecializationStats(void);
#define GC_STAT_ADD(gen, name, n) ((void)0)
#define OPT_STAT_INC(name) ((void)0)
#define UOP_STAT_INC(opname, name) ((void)0)
+#define UOP_PAIR_INC(uopcode, lastuop) ((void)0)
#define OPT_UNSUPPORTED_OPCODE(opname) ((void)0)
#define OPT_ERROR_IN_OPCODE(opname) ((void)0)
#define OPT_HIST(length, name) ((void)0)
diff --git a/Include/internal/pycore_crossinterp.h b/Include/internal/pycore_crossinterp.h
index 63abef864ff87f..2dd165eae74850 100644
--- a/Include/internal/pycore_crossinterp.h
+++ b/Include/internal/pycore_crossinterp.h
@@ -217,6 +217,11 @@ typedef struct _excinfo {
const char *errdisplay;
} _PyXI_excinfo;
+PyAPI_FUNC(int) _PyXI_InitExcInfo(_PyXI_excinfo *info, PyObject *exc);
+PyAPI_FUNC(PyObject *) _PyXI_FormatExcInfo(_PyXI_excinfo *info);
+PyAPI_FUNC(PyObject *) _PyXI_ExcInfoAsObject(_PyXI_excinfo *info);
+PyAPI_FUNC(void) _PyXI_ClearExcInfo(_PyXI_excinfo *info);
+
typedef enum error_code {
_PyXI_ERR_NO_ERROR = 0,
@@ -313,6 +318,22 @@ PyAPI_FUNC(PyObject *) _PyXI_ApplyCapturedException(_PyXI_session *session);
PyAPI_FUNC(int) _PyXI_HasCapturedException(_PyXI_session *session);
+/*************/
+/* other API */
+/*************/
+
+// Export for _testinternalcapi shared extension
+PyAPI_FUNC(PyInterpreterState *) _PyXI_NewInterpreter(
+ PyInterpreterConfig *config,
+ long *maybe_whence,
+ PyThreadState **p_tstate,
+ PyThreadState **p_save_tstate);
+PyAPI_FUNC(void) _PyXI_EndInterpreter(
+ PyInterpreterState *interp,
+ PyThreadState *tstate,
+ PyThreadState **p_save_tstate);
+
+
#ifdef __cplusplus
}
#endif
diff --git a/Include/internal/pycore_gc.h b/Include/internal/pycore_gc.h
index c4482c4ffcfa60..60020b5c01f8a6 100644
--- a/Include/internal/pycore_gc.h
+++ b/Include/internal/pycore_gc.h
@@ -39,12 +39,13 @@ static inline PyObject* _Py_FROM_GC(PyGC_Head *gc) {
/* Bit flags for ob_gc_bits (in Py_GIL_DISABLED builds) */
#ifdef Py_GIL_DISABLED
-# define _PyGC_BITS_TRACKED (1)
-# define _PyGC_BITS_FINALIZED (2)
+# define _PyGC_BITS_TRACKED (1) // Tracked by the GC
+# define _PyGC_BITS_FINALIZED (2) // tp_finalize was called
# define _PyGC_BITS_UNREACHABLE (4)
# define _PyGC_BITS_FROZEN (8)
# define _PyGC_BITS_SHARED (16)
# define _PyGC_BITS_SHARED_INLINE (32)
+# define _PyGC_BITS_DEFERRED (64) // Use deferred reference counting
#endif
/* True if the object is currently tracked by the GC. */
diff --git a/Include/internal/pycore_global_objects_fini_generated.h b/Include/internal/pycore_global_objects_fini_generated.h
index 9aa34f5927dea8..24f32fca2e5331 100644
--- a/Include/internal/pycore_global_objects_fini_generated.h
+++ b/Include/internal/pycore_global_objects_fini_generated.h
@@ -793,6 +793,7 @@ _PyStaticObjects_CheckRefcnt(PyInterpreterState *interp) {
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(alias));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(allow_code));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(append));
+ _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(arg));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(argdefs));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(args));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(arguments));
@@ -1275,6 +1276,7 @@ _PyStaticObjects_CheckRefcnt(PyInterpreterState *interp) {
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(version));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(volume));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(wait_all));
+ _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(warn_on_full_buffer));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(warnings));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(warnoptions));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(wbits));
diff --git a/Include/internal/pycore_global_strings.h b/Include/internal/pycore_global_strings.h
index 9a0d42f6f12a1e..024f817408cee5 100644
--- a/Include/internal/pycore_global_strings.h
+++ b/Include/internal/pycore_global_strings.h
@@ -282,6 +282,7 @@ struct _Py_global_strings {
STRUCT_FOR_ID(alias)
STRUCT_FOR_ID(allow_code)
STRUCT_FOR_ID(append)
+ STRUCT_FOR_ID(arg)
STRUCT_FOR_ID(argdefs)
STRUCT_FOR_ID(args)
STRUCT_FOR_ID(arguments)
@@ -764,6 +765,7 @@ struct _Py_global_strings {
STRUCT_FOR_ID(version)
STRUCT_FOR_ID(volume)
STRUCT_FOR_ID(wait_all)
+ STRUCT_FOR_ID(warn_on_full_buffer)
STRUCT_FOR_ID(warnings)
STRUCT_FOR_ID(warnoptions)
STRUCT_FOR_ID(wbits)
diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h
index b5cea863ff35dc..d38959e3ce4ec5 100644
--- a/Include/internal/pycore_interp.h
+++ b/Include/internal/pycore_interp.h
@@ -59,6 +59,12 @@ struct _stoptheworld_state {
PyThreadState *requester; // Thread that requested the pause (may be NULL).
};
+#ifdef Py_GIL_DISABLED
+// This should be prime but otherwise the choice is arbitrary. A larger value
+// increases concurrency at the expense of memory.
+# define NUM_WEAKREF_LIST_LOCKS 127
+#endif
+
/* cross-interpreter data registry */
/* Tracks some rare events per-interpreter, used by the optimizer to turn on/off
@@ -97,11 +103,23 @@ struct _is {
int requires_idref;
PyThread_type_lock id_mutex;
+#define _PyInterpreterState_WHENCE_NOTSET -1
+#define _PyInterpreterState_WHENCE_UNKNOWN 0
+#define _PyInterpreterState_WHENCE_RUNTIME 1
+#define _PyInterpreterState_WHENCE_LEGACY_CAPI 2
+#define _PyInterpreterState_WHENCE_CAPI 3
+#define _PyInterpreterState_WHENCE_XI 4
+#define _PyInterpreterState_WHENCE_STDLIB 5
+#define _PyInterpreterState_WHENCE_MAX 5
+ long _whence;
+
/* Has been initialized to a safe state.
In order to be effective, this must be set to 0 during or right
after allocation. */
int _initialized;
+ /* Has been fully initialized via pylifecycle.c. */
+ int _ready;
int finalizing;
uintptr_t last_restart_version;
@@ -203,6 +221,7 @@ struct _is {
#if defined(Py_GIL_DISABLED)
struct _mimalloc_interp_state mimalloc;
struct _brc_state brc; // biased reference counting state
+ PyMutex weakref_locks[NUM_WEAKREF_LIST_LOCKS];
#endif
// Per-interpreter state for the obmalloc allocator. For the main
@@ -298,6 +317,13 @@ PyAPI_FUNC(int) _PyInterpreterState_IDInitref(PyInterpreterState *);
PyAPI_FUNC(int) _PyInterpreterState_IDIncref(PyInterpreterState *);
PyAPI_FUNC(void) _PyInterpreterState_IDDecref(PyInterpreterState *);
+PyAPI_FUNC(int) _PyInterpreterState_IsReady(PyInterpreterState *interp);
+
+PyAPI_FUNC(long) _PyInterpreterState_GetWhence(PyInterpreterState *interp);
+extern void _PyInterpreterState_SetWhence(
+ PyInterpreterState *interp,
+ long whence);
+
extern const PyConfig* _PyInterpreterState_GetConfig(PyInterpreterState *interp);
// Get a copy of the current interpreter configuration.
diff --git a/Include/internal/pycore_object.h b/Include/internal/pycore_object.h
index 4fc5e9bf653c1c..7b1c919e627dd4 100644
--- a/Include/internal/pycore_object.h
+++ b/Include/internal/pycore_object.h
@@ -86,9 +86,9 @@ PyAPI_FUNC(void) _Py_NO_RETURN _Py_FatalRefcountErrorFunc(
built against the pre-3.12 stable ABI. */
PyAPI_DATA(Py_ssize_t) _Py_RefTotal;
-extern void _Py_AddRefTotal(PyInterpreterState *, Py_ssize_t);
-extern void _Py_IncRefTotal(PyInterpreterState *);
-extern void _Py_DecRefTotal(PyInterpreterState *);
+extern void _Py_AddRefTotal(PyThreadState *, Py_ssize_t);
+extern void _Py_IncRefTotal(PyThreadState *);
+extern void _Py_DecRefTotal(PyThreadState *);
# define _Py_DEC_REFTOTAL(interp) \
interp->object_state.reftotal--
@@ -101,7 +101,7 @@ static inline void _Py_RefcntAdd(PyObject* op, Py_ssize_t n)
return;
}
#ifdef Py_REF_DEBUG
- _Py_AddRefTotal(_PyInterpreterState_GET(), n);
+ _Py_AddRefTotal(_PyThreadState_GET(), n);
#endif
#if !defined(Py_GIL_DISABLED)
op->ob_refcnt += n;
@@ -158,6 +158,21 @@ static inline void _Py_ClearImmortal(PyObject *op)
op = NULL; \
} while (0)
+// Mark an object as supporting deferred reference counting. This is a no-op
+// in the default (with GIL) build. Objects that use deferred reference
+// counting should be tracked by the GC so that they are eventually collected.
+extern void _PyObject_SetDeferredRefcount(PyObject *op);
+
+static inline int
+_PyObject_HasDeferredRefcount(PyObject *op)
+{
+#ifdef Py_GIL_DISABLED
+ return (op->ob_gc_bits & _PyGC_BITS_DEFERRED) != 0;
+#else
+ return 0;
+#endif
+}
+
#if !defined(Py_GIL_DISABLED)
static inline void
_Py_DECREF_SPECIALIZED(PyObject *op, const destructor destruct)
@@ -393,7 +408,7 @@ _Py_TryIncrefFast(PyObject *op) {
_Py_INCREF_STAT_INC();
_Py_atomic_store_uint32_relaxed(&op->ob_ref_local, local);
#ifdef Py_REF_DEBUG
- _Py_IncRefTotal(_PyInterpreterState_GET());
+ _Py_IncRefTotal(_PyThreadState_GET());
#endif
return 1;
}
@@ -416,7 +431,7 @@ _Py_TryIncRefShared(PyObject *op)
&shared,
shared + (1 << _Py_REF_SHARED_SHIFT))) {
#ifdef Py_REF_DEBUG
- _Py_IncRefTotal(_PyInterpreterState_GET());
+ _Py_IncRefTotal(_PyThreadState_GET());
#endif
_Py_INCREF_STAT_INC();
return 1;
@@ -426,7 +441,7 @@ _Py_TryIncRefShared(PyObject *op)
/* Tries to incref the object op and ensures that *src still points to it. */
static inline int
-_Py_TryIncref(PyObject **src, PyObject *op)
+_Py_TryIncrefCompare(PyObject **src, PyObject *op)
{
if (_Py_TryIncrefFast(op)) {
return 1;
@@ -452,7 +467,7 @@ _Py_XGetRef(PyObject **ptr)
if (value == NULL) {
return value;
}
- if (_Py_TryIncref(ptr, value)) {
+ if (_Py_TryIncrefCompare(ptr, value)) {
return value;
}
}
@@ -467,7 +482,7 @@ _Py_TryXGetRef(PyObject **ptr)
if (value == NULL) {
return value;
}
- if (_Py_TryIncref(ptr, value)) {
+ if (_Py_TryIncrefCompare(ptr, value)) {
return value;
}
return NULL;
@@ -506,7 +521,41 @@ _Py_XNewRefWithLock(PyObject *obj)
return _Py_NewRefWithLock(obj);
}
+static inline void
+_PyObject_SetMaybeWeakref(PyObject *op)
+{
+ if (_Py_IsImmortal(op)) {
+ return;
+ }
+ for (;;) {
+ Py_ssize_t shared = _Py_atomic_load_ssize_relaxed(&op->ob_ref_shared);
+ if ((shared & _Py_REF_SHARED_FLAG_MASK) != 0) {
+ // Nothing to do if it's in WEAKREFS, QUEUED, or MERGED states.
+ return;
+ }
+ if (_Py_atomic_compare_exchange_ssize(
+ &op->ob_ref_shared, &shared, shared | _Py_REF_MAYBE_WEAKREF)) {
+ return;
+ }
+ }
+}
+
+#endif
+
+/* Tries to incref op and returns 1 if successful or 0 otherwise. */
+static inline int
+_Py_TryIncref(PyObject *op)
+{
+#ifdef Py_GIL_DISABLED
+ return _Py_TryIncrefFast(op) || _Py_TryIncRefShared(op);
+#else
+ if (Py_REFCNT(op) > 0) {
+ Py_INCREF(op);
+ return 1;
+ }
+ return 0;
#endif
+}
#ifdef Py_REF_DEBUG
extern void _PyInterpreterState_FinalizeRefTotal(PyInterpreterState *);
diff --git a/Include/internal/pycore_pyatomic_ft_wrappers.h b/Include/internal/pycore_pyatomic_ft_wrappers.h
index e441600d54e1aa..2514f51f1b0086 100644
--- a/Include/internal/pycore_pyatomic_ft_wrappers.h
+++ b/Include/internal/pycore_pyatomic_ft_wrappers.h
@@ -20,9 +20,12 @@ extern "C" {
#endif
#ifdef Py_GIL_DISABLED
+#define FT_ATOMIC_LOAD_PTR(value) _Py_atomic_load_ptr(&value)
#define FT_ATOMIC_LOAD_SSIZE(value) _Py_atomic_load_ssize(&value)
#define FT_ATOMIC_LOAD_SSIZE_RELAXED(value) \
_Py_atomic_load_ssize_relaxed(&value)
+#define FT_ATOMIC_STORE_PTR(value, new_value) \
+ _Py_atomic_store_ptr(&value, new_value)
#define FT_ATOMIC_STORE_PTR_RELAXED(value, new_value) \
_Py_atomic_store_ptr_relaxed(&value, new_value)
#define FT_ATOMIC_STORE_PTR_RELEASE(value, new_value) \
@@ -30,8 +33,10 @@ extern "C" {
#define FT_ATOMIC_STORE_SSIZE_RELAXED(value, new_value) \
_Py_atomic_store_ssize_relaxed(&value, new_value)
#else
+#define FT_ATOMIC_LOAD_PTR(value) value
#define FT_ATOMIC_LOAD_SSIZE(value) value
#define FT_ATOMIC_LOAD_SSIZE_RELAXED(value) value
+#define FT_ATOMIC_STORE_PTR(value, new_value) value = new_value
#define FT_ATOMIC_STORE_PTR_RELAXED(value, new_value) value = new_value
#define FT_ATOMIC_STORE_PTR_RELEASE(value, new_value) value = new_value
#define FT_ATOMIC_STORE_SSIZE_RELAXED(value, new_value) value = new_value
diff --git a/Include/internal/pycore_pystate.h b/Include/internal/pycore_pystate.h
index 35e266acd3ab60..eb5b5fee59009c 100644
--- a/Include/internal/pycore_pystate.h
+++ b/Include/internal/pycore_pystate.h
@@ -77,6 +77,9 @@ _Py_IsMainInterpreterFinalizing(PyInterpreterState *interp)
interp == &_PyRuntime._main_interpreter);
}
+// Export for _xxsubinterpreters module.
+PyAPI_FUNC(PyObject *) _PyInterpreterState_GetIDObject(PyInterpreterState *);
+
// Export for _xxsubinterpreters module.
PyAPI_FUNC(int) _PyInterpreterState_SetRunningMain(PyInterpreterState *);
PyAPI_FUNC(void) _PyInterpreterState_SetNotRunningMain(PyInterpreterState *);
diff --git a/Include/internal/pycore_runtime_init.h b/Include/internal/pycore_runtime_init.h
index 88d888943d28b1..33c7a9dadfd2a1 100644
--- a/Include/internal/pycore_runtime_init.h
+++ b/Include/internal/pycore_runtime_init.h
@@ -162,6 +162,7 @@ extern PyTypeObject _PyExc_MemoryError;
#define _PyInterpreterState_INIT(INTERP) \
{ \
.id_refcount = -1, \
+ ._whence = _PyInterpreterState_WHENCE_NOTSET, \
.imports = IMPORTS_INIT, \
.ceval = { \
.recursion_limit = Py_DEFAULT_RECURSION_LIMIT, \
diff --git a/Include/internal/pycore_runtime_init_generated.h b/Include/internal/pycore_runtime_init_generated.h
index d75f0f88656128..795f95cb3a7885 100644
--- a/Include/internal/pycore_runtime_init_generated.h
+++ b/Include/internal/pycore_runtime_init_generated.h
@@ -791,6 +791,7 @@ extern "C" {
INIT_ID(alias), \
INIT_ID(allow_code), \
INIT_ID(append), \
+ INIT_ID(arg), \
INIT_ID(argdefs), \
INIT_ID(args), \
INIT_ID(arguments), \
@@ -1273,6 +1274,7 @@ extern "C" {
INIT_ID(version), \
INIT_ID(volume), \
INIT_ID(wait_all), \
+ INIT_ID(warn_on_full_buffer), \
INIT_ID(warnings), \
INIT_ID(warnoptions), \
INIT_ID(wbits), \
diff --git a/Include/internal/pycore_tstate.h b/Include/internal/pycore_tstate.h
index e268e6fbbb087b..733e3172a1c0ff 100644
--- a/Include/internal/pycore_tstate.h
+++ b/Include/internal/pycore_tstate.h
@@ -38,6 +38,10 @@ typedef struct _PyThreadStateImpl {
struct _brc_thread_state brc;
#endif
+#if defined(Py_REF_DEBUG) && defined(Py_GIL_DISABLED)
+ Py_ssize_t reftotal; // this thread's total refcount operations
+#endif
+
} _PyThreadStateImpl;
diff --git a/Include/internal/pycore_typeobject.h b/Include/internal/pycore_typeobject.h
index 8a25935f308178..09c4501c38c935 100644
--- a/Include/internal/pycore_typeobject.h
+++ b/Include/internal/pycore_typeobject.h
@@ -68,6 +68,43 @@ struct types_state {
unsigned int next_version_tag;
struct type_cache type_cache;
+
+ /* Every static builtin type is initialized for each interpreter
+ during its own initialization, including for the main interpreter
+ during global runtime initialization. This is done by calling
+ _PyStaticType_InitBuiltin().
+
+ The first time a static builtin type is initialized, all the
+ normal PyType_Ready() stuff happens. The only difference from
+ normal is that there are three PyTypeObject fields holding
+ objects which are stored here (on PyInterpreterState) rather
+ than in the corresponding PyTypeObject fields. Those are:
+ tp_dict (cls.__dict__), tp_subclasses (cls.__subclasses__),
+ and tp_weaklist.
+
+ When a subinterpreter is initialized, each static builtin type
+ is still initialized, but only the interpreter-specific portion,
+ namely those three objects.
+
+ Those objects are stored in the PyInterpreterState.types.builtins
+ array, at the index corresponding to each specific static builtin
+ type. That index (a size_t value) is stored in the tp_subclasses
+ field. For static builtin types, we re-purposed the now-unused
+ tp_subclasses to avoid adding another field to PyTypeObject.
+ In all other cases tp_subclasses holds a dict like before.
+ (The field was previously defined as PyObject*, but is now void*
+ to reflect its dual use.)
+
+ The index for each static builtin type isn't statically assigned.
+ Instead it is calculated the first time a type is initialized
+ (by the main interpreter). The index matches the order in which
+ the type was initialized relative to the others. The actual
+ value comes from the current value of num_builtins_initialized,
+ as each type is initialized for the main interpreter.
+
+ num_builtins_initialized is incremented once for each static
+ builtin type. Once initialization is over for a subinterpreter,
+ the value will be the same as for all other interpreters. */
size_t num_builtins_initialized;
static_builtin_state builtins[_Py_MAX_STATIC_BUILTIN_TYPES];
PyMutex mutex;
@@ -152,6 +189,18 @@ PyAPI_FUNC(PyObject*) _PySuper_Lookup(PyTypeObject *su_type, PyObject *su_obj,
extern PyObject* _PyType_GetFullyQualifiedName(PyTypeObject *type, char sep);
+// Perform the following operation, in a thread-safe way when required by the
+// build mode.
+//
+// self->tp_flags = (self->tp_flags & ~mask) | flags;
+extern void _PyType_SetFlags(PyTypeObject *self, unsigned long mask,
+ unsigned long flags);
+
+// Like _PyType_SetFlags(), but apply the operation to self and any of its
+// subclasses without Py_TPFLAGS_IMMUTABLETYPE set.
+extern void _PyType_SetFlagsRecursive(PyTypeObject *self, unsigned long mask,
+ unsigned long flags);
+
#ifdef __cplusplus
}
diff --git a/Include/internal/pycore_unicodeobject_generated.h b/Include/internal/pycore_unicodeobject_generated.h
index 7f67e67f571eae..272462e99b0d94 100644
--- a/Include/internal/pycore_unicodeobject_generated.h
+++ b/Include/internal/pycore_unicodeobject_generated.h
@@ -687,6 +687,9 @@ _PyUnicode_InitStaticStrings(PyInterpreterState *interp) {
string = &_Py_ID(append);
assert(_PyUnicode_CheckConsistency(string, 1));
_PyUnicode_InternInPlace(interp, &string);
+ string = &_Py_ID(arg);
+ assert(_PyUnicode_CheckConsistency(string, 1));
+ _PyUnicode_InternInPlace(interp, &string);
string = &_Py_ID(argdefs);
assert(_PyUnicode_CheckConsistency(string, 1));
_PyUnicode_InternInPlace(interp, &string);
@@ -2133,6 +2136,9 @@ _PyUnicode_InitStaticStrings(PyInterpreterState *interp) {
string = &_Py_ID(wait_all);
assert(_PyUnicode_CheckConsistency(string, 1));
_PyUnicode_InternInPlace(interp, &string);
+ string = &_Py_ID(warn_on_full_buffer);
+ assert(_PyUnicode_CheckConsistency(string, 1));
+ _PyUnicode_InternInPlace(interp, &string);
string = &_Py_ID(warnings);
assert(_PyUnicode_CheckConsistency(string, 1));
_PyUnicode_InternInPlace(interp, &string);
diff --git a/Include/internal/pycore_uop_metadata.h b/Include/internal/pycore_uop_metadata.h
index 111824a938f6cc..e5a99421c241e0 100644
--- a/Include/internal/pycore_uop_metadata.h
+++ b/Include/internal/pycore_uop_metadata.h
@@ -51,22 +51,22 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = {
[_UNARY_NEGATIVE] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_UNARY_NOT] = HAS_PURE_FLAG,
[_TO_BOOL] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
- [_TO_BOOL_BOOL] = HAS_EXIT_FLAG | HAS_PASSTHROUGH_FLAG,
+ [_TO_BOOL_BOOL] = HAS_EXIT_FLAG,
[_TO_BOOL_INT] = HAS_EXIT_FLAG,
[_TO_BOOL_LIST] = HAS_EXIT_FLAG,
[_TO_BOOL_NONE] = HAS_EXIT_FLAG,
[_TO_BOOL_STR] = HAS_EXIT_FLAG,
[_REPLACE_WITH_TRUE] = 0,
[_UNARY_INVERT] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
- [_GUARD_BOTH_INT] = HAS_EXIT_FLAG | HAS_PASSTHROUGH_FLAG,
+ [_GUARD_BOTH_INT] = HAS_EXIT_FLAG,
[_BINARY_OP_MULTIPLY_INT] = HAS_ERROR_FLAG | HAS_PURE_FLAG,
[_BINARY_OP_ADD_INT] = HAS_ERROR_FLAG | HAS_PURE_FLAG,
[_BINARY_OP_SUBTRACT_INT] = HAS_ERROR_FLAG | HAS_PURE_FLAG,
- [_GUARD_BOTH_FLOAT] = HAS_EXIT_FLAG | HAS_PASSTHROUGH_FLAG,
+ [_GUARD_BOTH_FLOAT] = HAS_EXIT_FLAG,
[_BINARY_OP_MULTIPLY_FLOAT] = HAS_PURE_FLAG,
[_BINARY_OP_ADD_FLOAT] = HAS_PURE_FLAG,
[_BINARY_OP_SUBTRACT_FLOAT] = HAS_PURE_FLAG,
- [_GUARD_BOTH_UNICODE] = HAS_EXIT_FLAG | HAS_PASSTHROUGH_FLAG,
+ [_GUARD_BOTH_UNICODE] = HAS_EXIT_FLAG,
[_BINARY_OP_ADD_UNICODE] = HAS_ERROR_FLAG | HAS_PURE_FLAG,
[_BINARY_SUBSCR] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_BINARY_SLICE] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
@@ -129,23 +129,23 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = {
[_LOAD_SUPER_ATTR_ATTR] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_LOAD_SUPER_ATTR_METHOD] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_LOAD_ATTR] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
- [_GUARD_TYPE_VERSION] = HAS_EXIT_FLAG | HAS_PASSTHROUGH_FLAG,
- [_CHECK_MANAGED_OBJECT_HAS_VALUES] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG,
+ [_GUARD_TYPE_VERSION] = HAS_EXIT_FLAG,
+ [_CHECK_MANAGED_OBJECT_HAS_VALUES] = HAS_DEOPT_FLAG,
[_LOAD_ATTR_INSTANCE_VALUE_0] = HAS_DEOPT_FLAG,
[_LOAD_ATTR_INSTANCE_VALUE_1] = HAS_DEOPT_FLAG,
[_LOAD_ATTR_INSTANCE_VALUE] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_OPARG_AND_1_FLAG,
- [_CHECK_ATTR_MODULE] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG,
+ [_CHECK_ATTR_MODULE] = HAS_DEOPT_FLAG,
[_LOAD_ATTR_MODULE] = HAS_ARG_FLAG | HAS_DEOPT_FLAG,
- [_CHECK_ATTR_WITH_HINT] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG,
+ [_CHECK_ATTR_WITH_HINT] = HAS_DEOPT_FLAG,
[_LOAD_ATTR_WITH_HINT] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG,
[_LOAD_ATTR_SLOT_0] = HAS_DEOPT_FLAG,
[_LOAD_ATTR_SLOT_1] = HAS_DEOPT_FLAG,
[_LOAD_ATTR_SLOT] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_OPARG_AND_1_FLAG,
- [_CHECK_ATTR_CLASS] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG,
+ [_CHECK_ATTR_CLASS] = HAS_DEOPT_FLAG,
[_LOAD_ATTR_CLASS_0] = 0,
[_LOAD_ATTR_CLASS_1] = 0,
[_LOAD_ATTR_CLASS] = HAS_ARG_FLAG | HAS_OPARG_AND_1_FLAG,
- [_GUARD_DORV_NO_DICT] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG,
+ [_GUARD_DORV_NO_DICT] = HAS_DEOPT_FLAG,
[_STORE_ATTR_INSTANCE_VALUE] = 0,
[_STORE_ATTR_SLOT] = HAS_ESCAPES_FLAG,
[_COMPARE_OP] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
@@ -167,31 +167,31 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = {
[_GET_ITER] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_GET_YIELD_FROM_ITER] = HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG,
[_FOR_ITER_TIER_TWO] = HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG,
- [_ITER_CHECK_LIST] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG,
- [_GUARD_NOT_EXHAUSTED_LIST] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG,
+ [_ITER_CHECK_LIST] = HAS_DEOPT_FLAG,
+ [_GUARD_NOT_EXHAUSTED_LIST] = HAS_DEOPT_FLAG,
[_ITER_NEXT_LIST] = 0,
- [_ITER_CHECK_TUPLE] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG,
- [_GUARD_NOT_EXHAUSTED_TUPLE] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG,
+ [_ITER_CHECK_TUPLE] = HAS_DEOPT_FLAG,
+ [_GUARD_NOT_EXHAUSTED_TUPLE] = HAS_DEOPT_FLAG,
[_ITER_NEXT_TUPLE] = 0,
- [_ITER_CHECK_RANGE] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG,
- [_GUARD_NOT_EXHAUSTED_RANGE] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG,
+ [_ITER_CHECK_RANGE] = HAS_DEOPT_FLAG,
+ [_GUARD_NOT_EXHAUSTED_RANGE] = HAS_DEOPT_FLAG,
[_ITER_NEXT_RANGE] = HAS_ERROR_FLAG,
[_WITH_EXCEPT_START] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_PUSH_EXC_INFO] = 0,
- [_GUARD_DORV_VALUES_INST_ATTR_FROM_DICT] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG,
- [_GUARD_KEYS_VERSION] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG,
+ [_GUARD_DORV_VALUES_INST_ATTR_FROM_DICT] = HAS_DEOPT_FLAG,
+ [_GUARD_KEYS_VERSION] = HAS_DEOPT_FLAG,
[_LOAD_ATTR_METHOD_WITH_VALUES] = HAS_ARG_FLAG,
[_LOAD_ATTR_METHOD_NO_DICT] = HAS_ARG_FLAG,
[_LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES] = HAS_ARG_FLAG,
[_LOAD_ATTR_NONDESCRIPTOR_NO_DICT] = HAS_ARG_FLAG,
- [_CHECK_ATTR_METHOD_LAZY_DICT] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG,
+ [_CHECK_ATTR_METHOD_LAZY_DICT] = HAS_DEOPT_FLAG,
[_LOAD_ATTR_METHOD_LAZY_DICT] = HAS_ARG_FLAG,
[_CHECK_PERIODIC] = HAS_EVAL_BREAK_FLAG,
- [_CHECK_CALL_BOUND_METHOD_EXACT_ARGS] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG,
+ [_CHECK_CALL_BOUND_METHOD_EXACT_ARGS] = HAS_ARG_FLAG | HAS_DEOPT_FLAG,
[_INIT_CALL_BOUND_METHOD_EXACT_ARGS] = HAS_ARG_FLAG,
[_CHECK_PEP_523] = HAS_DEOPT_FLAG,
- [_CHECK_FUNCTION_EXACT_ARGS] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG,
- [_CHECK_STACK_SPACE] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG,
+ [_CHECK_FUNCTION_EXACT_ARGS] = HAS_ARG_FLAG | HAS_DEOPT_FLAG,
+ [_CHECK_STACK_SPACE] = HAS_ARG_FLAG | HAS_DEOPT_FLAG,
[_INIT_CALL_PY_EXACT_ARGS_0] = HAS_ESCAPES_FLAG | HAS_PURE_FLAG,
[_INIT_CALL_PY_EXACT_ARGS_1] = HAS_ESCAPES_FLAG | HAS_PURE_FLAG,
[_INIT_CALL_PY_EXACT_ARGS_2] = HAS_ESCAPES_FLAG | HAS_PURE_FLAG,
diff --git a/Include/internal/pycore_weakref.h b/Include/internal/pycore_weakref.h
index dea267b49039e7..e057a27340f718 100644
--- a/Include/internal/pycore_weakref.h
+++ b/Include/internal/pycore_weakref.h
@@ -9,7 +9,35 @@ extern "C" {
#endif
#include "pycore_critical_section.h" // Py_BEGIN_CRITICAL_SECTION()
+#include "pycore_lock.h"
#include "pycore_object.h" // _Py_REF_IS_MERGED()
+#include "pycore_pyatomic_ft_wrappers.h"
+
+#ifdef Py_GIL_DISABLED
+
+#define WEAKREF_LIST_LOCK(obj) \
+ _PyInterpreterState_GET() \
+ ->weakref_locks[((uintptr_t)obj) % NUM_WEAKREF_LIST_LOCKS]
+
+// Lock using the referenced object
+#define LOCK_WEAKREFS(obj) \
+ PyMutex_LockFlags(&WEAKREF_LIST_LOCK(obj), _Py_LOCK_DONT_DETACH)
+#define UNLOCK_WEAKREFS(obj) PyMutex_Unlock(&WEAKREF_LIST_LOCK(obj))
+
+// Lock using a weakref
+#define LOCK_WEAKREFS_FOR_WR(wr) \
+ PyMutex_LockFlags(wr->weakrefs_lock, _Py_LOCK_DONT_DETACH)
+#define UNLOCK_WEAKREFS_FOR_WR(wr) PyMutex_Unlock(wr->weakrefs_lock)
+
+#else
+
+#define LOCK_WEAKREFS(obj)
+#define UNLOCK_WEAKREFS(obj)
+
+#define LOCK_WEAKREFS_FOR_WR(wr)
+#define UNLOCK_WEAKREFS_FOR_WR(wr)
+
+#endif
static inline int _is_dead(PyObject *obj)
{
@@ -30,53 +58,64 @@ static inline int _is_dead(PyObject *obj)
static inline PyObject* _PyWeakref_GET_REF(PyObject *ref_obj)
{
assert(PyWeakref_Check(ref_obj));
- PyObject *ret = NULL;
- Py_BEGIN_CRITICAL_SECTION(ref_obj);
PyWeakReference *ref = _Py_CAST(PyWeakReference*, ref_obj);
- PyObject *obj = ref->wr_object;
+ PyObject *obj = FT_ATOMIC_LOAD_PTR(ref->wr_object);
if (obj == Py_None) {
// clear_weakref() was called
- goto end;
+ return NULL;
}
- if (_is_dead(obj)) {
- goto end;
+ LOCK_WEAKREFS(obj);
+#ifdef Py_GIL_DISABLED
+ if (ref->wr_object == Py_None) {
+ // clear_weakref() was called
+ UNLOCK_WEAKREFS(obj);
+ return NULL;
}
-#if !defined(Py_GIL_DISABLED)
- assert(Py_REFCNT(obj) > 0);
#endif
- ret = Py_NewRef(obj);
-end:
- Py_END_CRITICAL_SECTION();
- return ret;
+ if (_Py_TryIncref(obj)) {
+ UNLOCK_WEAKREFS(obj);
+ return obj;
+ }
+ UNLOCK_WEAKREFS(obj);
+ return NULL;
}
static inline int _PyWeakref_IS_DEAD(PyObject *ref_obj)
{
assert(PyWeakref_Check(ref_obj));
int ret = 0;
- Py_BEGIN_CRITICAL_SECTION(ref_obj);
PyWeakReference *ref = _Py_CAST(PyWeakReference*, ref_obj);
- PyObject *obj = ref->wr_object;
+ PyObject *obj = FT_ATOMIC_LOAD_PTR(ref->wr_object);
if (obj == Py_None) {
// clear_weakref() was called
ret = 1;
}
else {
+ LOCK_WEAKREFS(obj);
// See _PyWeakref_GET_REF() for the rationale of this test
+#ifdef Py_GIL_DISABLED
+ ret = (ref->wr_object == Py_None) || _is_dead(obj);
+#else
ret = _is_dead(obj);
+#endif
+ UNLOCK_WEAKREFS(obj);
}
- Py_END_CRITICAL_SECTION();
return ret;
}
-extern Py_ssize_t _PyWeakref_GetWeakrefCount(PyWeakReference *head);
+extern Py_ssize_t _PyWeakref_GetWeakrefCount(PyObject *obj);
+
+// Clear all the weak references to obj but leave their callbacks uncalled and
+// intact.
+extern void _PyWeakref_ClearWeakRefsExceptCallbacks(PyObject *obj);
extern void _PyWeakref_ClearRef(PyWeakReference *self);
+PyAPI_FUNC(int) _PyWeakref_IsDead(PyObject *weakref);
+
#ifdef __cplusplus
}
#endif
#endif /* !Py_INTERNAL_WEAKREF_H */
-
diff --git a/Include/object.h b/Include/object.h
index 13443329dfb5a2..ffcacf1a3ef4ed 100644
--- a/Include/object.h
+++ b/Include/object.h
@@ -115,8 +115,11 @@ check by comparing the reference count field to the immortality reference count.
// Kept for backward compatibility. It was needed by Py_TRACE_REFS build.
#define _PyObject_EXTRA_INIT
-// Make all internal uses of PyObject_HEAD_INIT immortal while preserving the
-// C-API expectation that the refcnt will be set to 1.
+/* Make all uses of PyObject_HEAD_INIT immortal.
+ *
+ * Statically allocated objects might be shared between
+ * interpreters, so must be marked as immortal.
+ */
#if defined(Py_GIL_DISABLED)
#define PyObject_HEAD_INIT(type) \
{ \
@@ -128,19 +131,13 @@ check by comparing the reference count field to the immortality reference count.
0, \
(type), \
},
-#elif defined(Py_BUILD_CORE)
+#else
#define PyObject_HEAD_INIT(type) \
{ \
{ _Py_IMMORTAL_REFCNT }, \
(type) \
},
-#else
-#define PyObject_HEAD_INIT(type) \
- { \
- { 1 }, \
- (type) \
- },
-#endif /* Py_BUILD_CORE */
+#endif
#define PyVarObject_HEAD_INIT(type, size) \
{ \
diff --git a/Include/patchlevel.h b/Include/patchlevel.h
index 942922bd0df698..35c595deaa72c2 100644
--- a/Include/patchlevel.h
+++ b/Include/patchlevel.h
@@ -20,10 +20,10 @@
#define PY_MINOR_VERSION 13
#define PY_MICRO_VERSION 0
#define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_ALPHA
-#define PY_RELEASE_SERIAL 5
+#define PY_RELEASE_SERIAL 6
/* Version as a string */
-#define PY_VERSION "3.13.0a5+"
+#define PY_VERSION "3.13.0a6+"
/*--end constants--*/
/* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2.
diff --git a/Include/pyport.h b/Include/pyport.h
index 9d7ef0061806ad..2ba81a4be42822 100644
--- a/Include/pyport.h
+++ b/Include/pyport.h
@@ -572,6 +572,9 @@ extern "C" {
# if defined(__SANITIZE_ADDRESS__)
# define _Py_ADDRESS_SANITIZER
# endif
+# if defined(__SANITIZE_THREAD__)
+# define _Py_THREAD_SANITIZER
+# endif
#endif
diff --git a/Lib/asyncio/queues.py b/Lib/asyncio/queues.py
index a9656a6df561ba..2f3865114a84f9 100644
--- a/Lib/asyncio/queues.py
+++ b/Lib/asyncio/queues.py
@@ -1,4 +1,11 @@
-__all__ = ('Queue', 'PriorityQueue', 'LifoQueue', 'QueueFull', 'QueueEmpty')
+__all__ = (
+ 'Queue',
+ 'PriorityQueue',
+ 'LifoQueue',
+ 'QueueFull',
+ 'QueueEmpty',
+ 'QueueShutDown',
+)
import collections
import heapq
@@ -18,6 +25,11 @@ class QueueFull(Exception):
pass
+class QueueShutDown(Exception):
+ """Raised when putting on to or getting from a shut-down Queue."""
+ pass
+
+
class Queue(mixins._LoopBoundMixin):
"""A queue, useful for coordinating producer and consumer coroutines.
@@ -41,6 +53,7 @@ def __init__(self, maxsize=0):
self._finished = locks.Event()
self._finished.set()
self._init(maxsize)
+ self._is_shutdown = False
# These three are overridable in subclasses.
@@ -81,6 +94,8 @@ def _format(self):
result += f' _putters[{len(self._putters)}]'
if self._unfinished_tasks:
result += f' tasks={self._unfinished_tasks}'
+ if self._is_shutdown:
+ result += ' shutdown'
return result
def qsize(self):
@@ -112,8 +127,12 @@ async def put(self, item):
Put an item into the queue. If the queue is full, wait until a free
slot is available before adding item.
+
+ Raises QueueShutDown if the queue has been shut down.
"""
while self.full():
+ if self._is_shutdown:
+ raise QueueShutDown
putter = self._get_loop().create_future()
self._putters.append(putter)
try:
@@ -125,7 +144,7 @@ async def put(self, item):
self._putters.remove(putter)
except ValueError:
# The putter could be removed from self._putters by a
- # previous get_nowait call.
+ # previous get_nowait call or a shutdown call.
pass
if not self.full() and not putter.cancelled():
# We were woken up by get_nowait(), but can't take
@@ -138,7 +157,11 @@ def put_nowait(self, item):
"""Put an item into the queue without blocking.
If no free slot is immediately available, raise QueueFull.
+
+ Raises QueueShutDown if the queue has been shut down.
"""
+ if self._is_shutdown:
+ raise QueueShutDown
if self.full():
raise QueueFull
self._put(item)
@@ -150,8 +173,13 @@ async def get(self):
"""Remove and return an item from the queue.
If queue is empty, wait until an item is available.
+
+ Raises QueueShutDown if the queue has been shut down and is empty, or
+ if the queue has been shut down immediately.
"""
while self.empty():
+ if self._is_shutdown and self.empty():
+ raise QueueShutDown
getter = self._get_loop().create_future()
self._getters.append(getter)
try:
@@ -163,7 +191,7 @@ async def get(self):
self._getters.remove(getter)
except ValueError:
# The getter could be removed from self._getters by a
- # previous put_nowait call.
+ # previous put_nowait call, or a shutdown call.
pass
if not self.empty() and not getter.cancelled():
# We were woken up by put_nowait(), but can't take
@@ -176,8 +204,13 @@ def get_nowait(self):
"""Remove and return an item from the queue.
Return an item if one is immediately available, else raise QueueEmpty.
+
+ Raises QueueShutDown if the queue has been shut down and is empty, or
+ if the queue has been shut down immediately.
"""
if self.empty():
+ if self._is_shutdown:
+ raise QueueShutDown
raise QueueEmpty
item = self._get()
self._wakeup_next(self._putters)
@@ -194,6 +227,9 @@ def task_done(self):
been processed (meaning that a task_done() call was received for every
item that had been put() into the queue).
+ shutdown(immediate=True) calls task_done() for each remaining item in
+ the queue.
+
Raises ValueError if called more times than there were items placed in
the queue.
"""
@@ -214,6 +250,34 @@ async def join(self):
if self._unfinished_tasks > 0:
await self._finished.wait()
+ def shutdown(self, immediate=False):
+ """Shut-down the queue, making queue gets and puts raise QueueShutDown.
+
+ By default, gets will only raise once the queue is empty. Set
+ 'immediate' to True to make gets raise immediately instead.
+
+ All blocked callers of put() and get() will be unblocked. If
+ 'immediate', a task is marked as done for each item remaining in
+ the queue, which may unblock callers of join().
+ """
+ self._is_shutdown = True
+ if immediate:
+ while not self.empty():
+ self._get()
+ if self._unfinished_tasks > 0:
+ self._unfinished_tasks -= 1
+ if self._unfinished_tasks == 0:
+ self._finished.set()
+ # All getters need to re-check queue-empty to raise ShutDown
+ while self._getters:
+ getter = self._getters.popleft()
+ if not getter.done():
+ getter.set_result(None)
+ while self._putters:
+ putter = self._putters.popleft()
+ if not putter.done():
+ putter.set_result(None)
+
class PriorityQueue(Queue):
"""A subclass of Queue; retrieves entries in priority order (lowest first).
diff --git a/Lib/asyncio/streams.py b/Lib/asyncio/streams.py
index 3fe52dbac25c91..64aac4cc50d15a 100644
--- a/Lib/asyncio/streams.py
+++ b/Lib/asyncio/streams.py
@@ -590,20 +590,34 @@ async def readuntil(self, separator=b'\n'):
If the data cannot be read because of over limit, a
LimitOverrunError exception will be raised, and the data
will be left in the internal buffer, so it can be read again.
+
+ The ``separator`` may also be a tuple of separators. In this
+ case the return value will be the shortest possible that has any
+ separator as the suffix. For the purposes of LimitOverrunError,
+ the shortest possible separator is considered to be the one that
+ matched.
"""
- seplen = len(separator)
- if seplen == 0:
+ if isinstance(separator, tuple):
+ # Makes sure shortest matches wins
+ separator = sorted(separator, key=len)
+ else:
+ separator = [separator]
+ if not separator:
+ raise ValueError('Separator should contain at least one element')
+ min_seplen = len(separator[0])
+ max_seplen = len(separator[-1])
+ if min_seplen == 0:
raise ValueError('Separator should be at least one-byte string')
if self._exception is not None:
raise self._exception
# Consume whole buffer except last bytes, which length is
- # one less than seplen. Let's check corner cases with
- # separator='SEPARATOR':
+ # one less than max_seplen. Let's check corner cases with
+ # separator[-1]='SEPARATOR':
# * we have received almost complete separator (without last
# byte). i.e buffer='some textSEPARATO'. In this case we
- # can safely consume len(separator) - 1 bytes.
+ # can safely consume max_seplen - 1 bytes.
# * last byte of buffer is first byte of separator, i.e.
# buffer='abcdefghijklmnopqrS'. We may safely consume
# everything except that last byte, but this require to
@@ -616,26 +630,35 @@ async def readuntil(self, separator=b'\n'):
# messages :)
# `offset` is the number of bytes from the beginning of the buffer
- # where there is no occurrence of `separator`.
+ # where there is no occurrence of any `separator`.
offset = 0
- # Loop until we find `separator` in the buffer, exceed the buffer size,
+ # Loop until we find a `separator` in the buffer, exceed the buffer size,
# or an EOF has happened.
while True:
buflen = len(self._buffer)
- # Check if we now have enough data in the buffer for `separator` to
- # fit.
- if buflen - offset >= seplen:
- isep = self._buffer.find(separator, offset)
-
- if isep != -1:
- # `separator` is in the buffer. `isep` will be used later
- # to retrieve the data.
+ # Check if we now have enough data in the buffer for shortest
+ # separator to fit.
+ if buflen - offset >= min_seplen:
+ match_start = None
+ match_end = None
+ for sep in separator:
+ isep = self._buffer.find(sep, offset)
+
+ if isep != -1:
+ # `separator` is in the buffer. `match_start` and
+ # `match_end` will be used later to retrieve the
+ # data.
+ end = isep + len(sep)
+ if match_end is None or end < match_end:
+ match_end = end
+ match_start = isep
+ if match_end is not None:
break
# see upper comment for explanation.
- offset = buflen + 1 - seplen
+ offset = max(0, buflen + 1 - max_seplen)
if offset > self._limit:
raise exceptions.LimitOverrunError(
'Separator is not found, and chunk exceed the limit',
@@ -644,7 +667,7 @@ async def readuntil(self, separator=b'\n'):
# Complete message (with full separator) may be present in buffer
# even when EOF flag is set. This may happen when the last chunk
# adds data which makes separator be found. That's why we check for
- # EOF *ater* inspecting the buffer.
+ # EOF *after* inspecting the buffer.
if self._eof:
chunk = bytes(self._buffer)
self._buffer.clear()
@@ -653,12 +676,12 @@ async def readuntil(self, separator=b'\n'):
# _wait_for_data() will resume reading if stream was paused.
await self._wait_for_data('readuntil')
- if isep > self._limit:
+ if match_start > self._limit:
raise exceptions.LimitOverrunError(
- 'Separator is found, but chunk is longer than limit', isep)
+ 'Separator is found, but chunk is longer than limit', match_start)
- chunk = self._buffer[:isep + seplen]
- del self._buffer[:isep + seplen]
+ chunk = self._buffer[:match_end]
+ del self._buffer[:match_end]
self._maybe_resume_transport()
return bytes(chunk)
diff --git a/Lib/asyncio/taskgroups.py b/Lib/asyncio/taskgroups.py
index 57f01230159319..f2ee9648c43876 100644
--- a/Lib/asyncio/taskgroups.py
+++ b/Lib/asyncio/taskgroups.py
@@ -77,12 +77,6 @@ async def __aexit__(self, et, exc, tb):
propagate_cancellation_error = exc
else:
propagate_cancellation_error = None
- if self._parent_cancel_requested:
- # If this flag is set we *must* call uncancel().
- if self._parent_task.uncancel() == 0:
- # If there are no pending cancellations left,
- # don't propagate CancelledError.
- propagate_cancellation_error = None
if et is not None:
if not self._aborting:
@@ -130,6 +124,13 @@ async def __aexit__(self, et, exc, tb):
if self._base_error is not None:
raise self._base_error
+ if self._parent_cancel_requested:
+ # If this flag is set we *must* call uncancel().
+ if self._parent_task.uncancel() == 0:
+ # If there are no pending cancellations left,
+ # don't propagate CancelledError.
+ propagate_cancellation_error = None
+
# Propagate CancelledError if there is one, except if there
# are other errors -- those have priority.
if propagate_cancellation_error is not None and not self._errors:
@@ -139,6 +140,12 @@ async def __aexit__(self, et, exc, tb):
self._errors.append(exc)
if self._errors:
+ # If the parent task is being cancelled from the outside
+ # of the taskgroup, un-cancel and re-cancel the parent task,
+ # which will keep the cancel count stable.
+ if self._parent_task.cancelling():
+ self._parent_task.uncancel()
+ self._parent_task.cancel()
# Exceptions are heavy objects that can have object
# cycles (bad for GC); let's not keep a reference to
# a bunch of them.
diff --git a/Lib/asyncio/tasks.py b/Lib/asyncio/tasks.py
index 7fb697b9441c33..dadcb5b5f36bd7 100644
--- a/Lib/asyncio/tasks.py
+++ b/Lib/asyncio/tasks.py
@@ -255,6 +255,8 @@ def uncancel(self):
"""
if self._num_cancels_requested > 0:
self._num_cancels_requested -= 1
+ if self._num_cancels_requested == 0:
+ self._must_cancel = False
return self._num_cancels_requested
def __eager_start(self):
diff --git a/Lib/configparser.py b/Lib/configparser.py
index d0326c60e9b907..ff7d712bed4afc 100644
--- a/Lib/configparser.py
+++ b/Lib/configparser.py
@@ -143,17 +143,18 @@
between keys and values are surrounded by spaces.
"""
-from collections.abc import MutableMapping
+# Do not import dataclasses; overhead is unacceptable (gh-117703)
+
+from collections.abc import Iterable, MutableMapping
from collections import ChainMap as _ChainMap
import contextlib
-from dataclasses import dataclass, field
import functools
import io
import itertools
import os
import re
import sys
-from typing import Iterable
+import types
__all__ = ("NoSectionError", "DuplicateOptionError", "DuplicateSectionError",
"NoOptionError", "InterpolationError", "InterpolationDepthError",
@@ -538,21 +539,18 @@ def _interpolate_some(self, parser, option, accum, rest, section, map,
"found: %r" % (rest,))
-@dataclass
class _ReadState:
- elements_added : set[str] = field(default_factory=set)
+ elements_added : set[str]
cursect : dict[str, str] | None = None
sectname : str | None = None
optname : str | None = None
lineno : int = 0
indent_level : int = 0
- errors : list[ParsingError] = field(default_factory=list)
-
+ errors : list[ParsingError]
-@dataclass
-class _Prefixes:
- full : Iterable[str]
- inline : Iterable[str]
+ def __init__(self):
+ self.elements_added = set()
+ self.errors = list()
class _Line(str):
@@ -560,7 +558,7 @@ class _Line(str):
def __new__(cls, val, *args, **kwargs):
return super().__new__(cls, val)
- def __init__(self, val, prefixes: _Prefixes):
+ def __init__(self, val, prefixes):
self.prefixes = prefixes
@functools.cached_property
@@ -653,7 +651,7 @@ def __init__(self, defaults=None, dict_type=_default_dict,
else:
self._optcre = re.compile(self._OPT_TMPL.format(delim=d),
re.VERBOSE)
- self._prefixes = _Prefixes(
+ self._prefixes = types.SimpleNamespace(
full=tuple(comment_prefixes or ()),
inline=tuple(inline_comment_prefixes or ()),
)
diff --git a/Lib/doctest.py b/Lib/doctest.py
index 7ea71b0d11ce66..e003e30786ed93 100644
--- a/Lib/doctest.py
+++ b/Lib/doctest.py
@@ -1142,7 +1142,14 @@ def _find_lineno(self, obj, source_lines):
obj = obj.fget
if inspect.isfunction(obj) and getattr(obj, '__doc__', None):
# We don't use `docstring` var here, because `obj` can be changed.
- obj = inspect.unwrap(obj).__code__
+ obj = inspect.unwrap(obj)
+ try:
+ obj = obj.__code__
+ except AttributeError:
+ # Functions implemented in C don't necessarily
+ # have a __code__ attribute.
+ # If there's no code, there's no lineno
+ return None
if inspect.istraceback(obj): obj = obj.tb_frame
if inspect.isframe(obj): obj = obj.f_code
if inspect.iscode(obj):
diff --git a/Lib/enum.py b/Lib/enum.py
index 2a135e1b1f1826..98a49eafbb9897 100644
--- a/Lib/enum.py
+++ b/Lib/enum.py
@@ -1088,8 +1088,6 @@ def _add_member_(cls, name, member):
setattr(cls, name, member)
# now add to _member_map_ (even aliases)
cls._member_map_[name] = member
- #
- cls._member_map_[name] = member
EnumMeta = EnumType # keep EnumMeta name for backwards compatibility
@@ -1802,20 +1800,31 @@ def convert_class(cls):
for name, value in attrs.items():
if isinstance(value, auto) and auto.value is _auto_null:
value = gnv(name, 1, len(member_names), gnv_last_values)
- if value in value2member_map or value in unhashable_values:
+ # create basic member (possibly isolate value for alias check)
+ if use_args:
+ if not isinstance(value, tuple):
+ value = (value, )
+ member = new_member(enum_class, *value)
+ value = value[0]
+ else:
+ member = new_member(enum_class)
+ if __new__ is None:
+ member._value_ = value
+ # now check if alias
+ try:
+ contained = value2member_map.get(member._value_)
+ except TypeError:
+ contained = None
+ if member._value_ in unhashable_values:
+ for m in enum_class:
+ if m._value_ == member._value_:
+ contained = m
+ break
+ if contained is not None:
# an alias to an existing member
- enum_class(value)._add_alias_(name)
+ contained._add_alias_(name)
else:
- # create the member
- if use_args:
- if not isinstance(value, tuple):
- value = (value, )
- member = new_member(enum_class, *value)
- value = value[0]
- else:
- member = new_member(enum_class)
- if __new__ is None:
- member._value_ = value
+ # finish creating member
member._name_ = name
member.__objclass__ = enum_class
member.__init__(value)
@@ -1847,24 +1856,31 @@ def convert_class(cls):
if value.value is _auto_null:
value.value = gnv(name, 1, len(member_names), gnv_last_values)
value = value.value
+ # create basic member (possibly isolate value for alias check)
+ if use_args:
+ if not isinstance(value, tuple):
+ value = (value, )
+ member = new_member(enum_class, *value)
+ value = value[0]
+ else:
+ member = new_member(enum_class)
+ if __new__ is None:
+ member._value_ = value
+ # now check if alias
try:
- contained = value in value2member_map
+ contained = value2member_map.get(member._value_)
except TypeError:
- contained = value in unhashable_values
- if contained:
+ contained = None
+ if member._value_ in unhashable_values:
+ for m in enum_class:
+ if m._value_ == member._value_:
+ contained = m
+ break
+ if contained is not None:
# an alias to an existing member
- enum_class(value)._add_alias_(name)
+ contained._add_alias_(name)
else:
- # create the member
- if use_args:
- if not isinstance(value, tuple):
- value = (value, )
- member = new_member(enum_class, *value)
- value = value[0]
- else:
- member = new_member(enum_class)
- if __new__ is None:
- member._value_ = value
+ # finish creating member
member._name_ = name
member.__objclass__ = enum_class
member.__init__(value)
diff --git a/Lib/functools.py b/Lib/functools.py
index 601cb8e7c0b74b..b42b9eaa0a045c 100644
--- a/Lib/functools.py
+++ b/Lib/functools.py
@@ -673,7 +673,7 @@ def cache(user_function, /):
def _c3_merge(sequences):
"""Merges MROs in *sequences* to a single MRO using the C3 algorithm.
- Adapted from https://www.python.org/download/releases/2.3/mro/.
+ Adapted from https://docs.python.org/3/howto/mro.html.
"""
result = []
diff --git a/Lib/glob.py b/Lib/glob.py
index a915cf0bdf4502..72cf22299763f0 100644
--- a/Lib/glob.py
+++ b/Lib/glob.py
@@ -4,7 +4,9 @@
import os
import re
import fnmatch
+import functools
import itertools
+import operator
import stat
import sys
@@ -256,7 +258,9 @@ def escape(pathname):
return drive + pathname
+_special_parts = ('', '.', '..')
_dir_open_flags = os.O_RDONLY | getattr(os, 'O_DIRECTORY', 0)
+_no_recurse_symlinks = object()
def translate(pat, *, recursive=False, include_hidden=False, seps=None):
@@ -312,3 +316,242 @@ def translate(pat, *, recursive=False, include_hidden=False, seps=None):
results.append(any_sep)
res = ''.join(results)
return fr'(?s:{res})\Z'
+
+
+@functools.lru_cache(maxsize=512)
+def _compile_pattern(pat, sep, case_sensitive, recursive=True):
+ """Compile given glob pattern to a re.Pattern object (observing case
+ sensitivity)."""
+ flags = re.NOFLAG if case_sensitive else re.IGNORECASE
+ regex = translate(pat, recursive=recursive, include_hidden=True, seps=sep)
+ return re.compile(regex, flags=flags).match
+
+
+class _Globber:
+ """Class providing shell-style pattern matching and globbing.
+ """
+
+ def __init__(self, sep, case_sensitive, case_pedantic=False, recursive=False):
+ self.sep = sep
+ self.case_sensitive = case_sensitive
+ self.case_pedantic = case_pedantic
+ self.recursive = recursive
+
+ # Low-level methods
+
+ lstat = staticmethod(os.lstat)
+ scandir = staticmethod(os.scandir)
+ parse_entry = operator.attrgetter('path')
+ concat_path = operator.add
+
+ if os.name == 'nt':
+ @staticmethod
+ def add_slash(pathname):
+ tail = os.path.splitroot(pathname)[2]
+ if not tail or tail[-1] in '\\/':
+ return pathname
+ return f'{pathname}\\'
+ else:
+ @staticmethod
+ def add_slash(pathname):
+ if not pathname or pathname[-1] == '/':
+ return pathname
+ return f'{pathname}/'
+
+ # High-level methods
+
+ def compile(self, pat):
+ return _compile_pattern(pat, self.sep, self.case_sensitive, self.recursive)
+
+ def selector(self, parts):
+ """Returns a function that selects from a given path, walking and
+ filtering according to the glob-style pattern parts in *parts*.
+ """
+ if not parts:
+ return self.select_exists
+ part = parts.pop()
+ if self.recursive and part == '**':
+ selector = self.recursive_selector
+ elif part in _special_parts:
+ selector = self.special_selector
+ elif not self.case_pedantic and magic_check.search(part) is None:
+ selector = self.literal_selector
+ else:
+ selector = self.wildcard_selector
+ return selector(part, parts)
+
+ def special_selector(self, part, parts):
+ """Returns a function that selects special children of the given path.
+ """
+ select_next = self.selector(parts)
+
+ def select_special(path, exists=False):
+ path = self.concat_path(self.add_slash(path), part)
+ return select_next(path, exists)
+ return select_special
+
+ def literal_selector(self, part, parts):
+ """Returns a function that selects a literal descendant of a path.
+ """
+
+ # Optimization: consume and join any subsequent literal parts here,
+ # rather than leaving them for the next selector. This reduces the
+ # number of string concatenation operations and calls to add_slash().
+ while parts and magic_check.search(parts[-1]) is None:
+ part += self.sep + parts.pop()
+
+ select_next = self.selector(parts)
+
+ def select_literal(path, exists=False):
+ path = self.concat_path(self.add_slash(path), part)
+ return select_next(path, exists=False)
+ return select_literal
+
+ def wildcard_selector(self, part, parts):
+ """Returns a function that selects direct children of a given path,
+ filtering by pattern.
+ """
+
+ match = None if part == '*' else self.compile(part)
+ dir_only = bool(parts)
+ if dir_only:
+ select_next = self.selector(parts)
+
+ def select_wildcard(path, exists=False):
+ try:
+ # We must close the scandir() object before proceeding to
+ # avoid exhausting file descriptors when globbing deep trees.
+ with self.scandir(path) as scandir_it:
+ entries = list(scandir_it)
+ except OSError:
+ pass
+ else:
+ for entry in entries:
+ if match is None or match(entry.name):
+ if dir_only:
+ try:
+ if not entry.is_dir():
+ continue
+ except OSError:
+ continue
+ entry_path = self.parse_entry(entry)
+ if dir_only:
+ yield from select_next(entry_path, exists=True)
+ else:
+ yield entry_path
+ return select_wildcard
+
+ def recursive_selector(self, part, parts):
+ """Returns a function that selects a given path and all its children,
+ recursively, filtering by pattern.
+ """
+ # Optimization: consume following '**' parts, which have no effect.
+ while parts and parts[-1] == '**':
+ parts.pop()
+
+ # Optimization: consume and join any following non-special parts here,
+ # rather than leaving them for the next selector. They're used to
+ # build a regular expression, which we use to filter the results of
+ # the recursive walk. As a result, non-special pattern segments
+ # following a '**' wildcard don't require additional filesystem access
+ # to expand.
+ follow_symlinks = self.recursive is not _no_recurse_symlinks
+ if follow_symlinks:
+ while parts and parts[-1] not in _special_parts:
+ part += self.sep + parts.pop()
+
+ match = None if part == '**' else self.compile(part)
+ dir_only = bool(parts)
+ select_next = self.selector(parts)
+
+ def select_recursive(path, exists=False):
+ path = self.add_slash(path)
+ match_pos = len(str(path))
+ if match is None or match(str(path), match_pos):
+ yield from select_next(path, exists)
+ stack = [path]
+ while stack:
+ yield from select_recursive_step(stack, match_pos)
+
+ def select_recursive_step(stack, match_pos):
+ path = stack.pop()
+ try:
+ # We must close the scandir() object before proceeding to
+ # avoid exhausting file descriptors when globbing deep trees.
+ with self.scandir(path) as scandir_it:
+ entries = list(scandir_it)
+ except OSError:
+ pass
+ else:
+ for entry in entries:
+ is_dir = False
+ try:
+ if entry.is_dir(follow_symlinks=follow_symlinks):
+ is_dir = True
+ except OSError:
+ pass
+
+ if is_dir or not dir_only:
+ entry_path = self.parse_entry(entry)
+ if match is None or match(str(entry_path), match_pos):
+ if dir_only:
+ yield from select_next(entry_path, exists=True)
+ else:
+ # Optimization: directly yield the path if this is
+ # last pattern part.
+ yield entry_path
+ if is_dir:
+ stack.append(entry_path)
+
+ return select_recursive
+
+ def select_exists(self, path, exists=False):
+ """Yields the given path, if it exists.
+ """
+ if exists:
+ # Optimization: this path is already known to exist, e.g. because
+ # it was returned from os.scandir(), so we skip calling lstat().
+ yield path
+ else:
+ try:
+ self.lstat(path)
+ yield path
+ except OSError:
+ pass
+
+ @classmethod
+ def walk(cls, root, top_down, on_error, follow_symlinks):
+ """Walk the directory tree from the given root, similar to os.walk().
+ """
+ paths = [root]
+ while paths:
+ path = paths.pop()
+ if isinstance(path, tuple):
+ yield path
+ continue
+ try:
+ with cls.scandir(path) as scandir_it:
+ dirnames = []
+ filenames = []
+ if not top_down:
+ paths.append((path, dirnames, filenames))
+ for entry in scandir_it:
+ name = entry.name
+ try:
+ if entry.is_dir(follow_symlinks=follow_symlinks):
+ if not top_down:
+ paths.append(cls.parse_entry(entry))
+ dirnames.append(name)
+ else:
+ filenames.append(name)
+ except OSError:
+ filenames.append(name)
+ except OSError as error:
+ if on_error is not None:
+ on_error(error)
+ else:
+ if top_down:
+ yield path, dirnames, filenames
+ if dirnames:
+ prefix = cls.add_slash(path)
+ paths += [cls.concat_path(prefix, d) for d in reversed(dirnames)]
diff --git a/Lib/http/__init__.py b/Lib/http/__init__.py
index e093a1fec4dffc..d64741ec0dd29a 100644
--- a/Lib/http/__init__.py
+++ b/Lib/http/__init__.py
@@ -9,7 +9,7 @@ class HTTPStatus:
Status codes from the following RFCs are all observed:
- * RFC 7231: Hypertext Transfer Protocol (HTTP/1.1), obsoletes 2616
+ * RFC 9110: HTTP Semantics, obsoletes 7231, which obsoleted 2616
* RFC 6585: Additional HTTP Status Codes
* RFC 3229: Delta encoding in HTTP
* RFC 4918: HTTP Extensions for WebDAV, obsoletes 2518
@@ -26,7 +26,6 @@ class HTTPStatus:
def __new__(cls, value, phrase, description=''):
obj = int.__new__(cls, value)
obj._value_ = value
-
obj.phrase = phrase
obj.description = description
return obj
@@ -115,22 +114,25 @@ def is_server_error(self):
'Client must specify Content-Length')
PRECONDITION_FAILED = (412, 'Precondition Failed',
'Precondition in headers is false')
- REQUEST_ENTITY_TOO_LARGE = (413, 'Request Entity Too Large',
- 'Entity is too large')
- REQUEST_URI_TOO_LONG = (414, 'Request-URI Too Long',
+ CONTENT_TOO_LARGE = (413, 'Content Too Large',
+ 'Content is too large')
+ REQUEST_ENTITY_TOO_LARGE = CONTENT_TOO_LARGE
+ URI_TOO_LONG = (414, 'URI Too Long',
'URI is too long')
+ REQUEST_URI_TOO_LONG = URI_TOO_LONG
UNSUPPORTED_MEDIA_TYPE = (415, 'Unsupported Media Type',
'Entity body in unsupported format')
- REQUESTED_RANGE_NOT_SATISFIABLE = (416,
- 'Requested Range Not Satisfiable',
+ RANGE_NOT_SATISFIABLE = (416, 'Range Not Satisfiable',
'Cannot satisfy request range')
+ REQUESTED_RANGE_NOT_SATISFIABLE = RANGE_NOT_SATISFIABLE
EXPECTATION_FAILED = (417, 'Expectation Failed',
'Expect condition could not be satisfied')
IM_A_TEAPOT = (418, 'I\'m a Teapot',
'Server refuses to brew coffee because it is a teapot.')
MISDIRECTED_REQUEST = (421, 'Misdirected Request',
'Server is not able to produce a response')
- UNPROCESSABLE_ENTITY = 422, 'Unprocessable Entity'
+ UNPROCESSABLE_CONTENT = 422, 'Unprocessable Content'
+ UNPROCESSABLE_ENTITY = UNPROCESSABLE_CONTENT
LOCKED = 423, 'Locked'
FAILED_DEPENDENCY = 424, 'Failed Dependency'
TOO_EARLY = 425, 'Too Early'
@@ -177,7 +179,7 @@ class HTTPMethod:
Methods from the following RFCs are all observed:
- * RFC 7231: Hypertext Transfer Protocol (HTTP/1.1), obsoletes 2616
+ * RFF 9110: HTTP Semantics, obsoletes 7231, which obsoleted 2616
* RFC 5789: PATCH Method for HTTP
"""
def __new__(cls, value, description):
diff --git a/Lib/importlib/util.py b/Lib/importlib/util.py
index f1bb4b1fb41576..c94a148e4c50e0 100644
--- a/Lib/importlib/util.py
+++ b/Lib/importlib/util.py
@@ -178,15 +178,17 @@ def __getattribute__(self, attr):
# Only the first thread to get the lock should trigger the load
# and reset the module's class. The rest can now getattr().
if object.__getattribute__(self, '__class__') is _LazyModule:
+ __class__ = loader_state['__class__']
+
# Reentrant calls from the same thread must be allowed to proceed without
# triggering the load again.
# exec_module() and self-referential imports are the primary ways this can
# happen, but in any case we must return something to avoid deadlock.
if loader_state['is_loading']:
- return object.__getattribute__(self, attr)
+ return __class__.__getattribute__(self, attr)
loader_state['is_loading'] = True
- __dict__ = object.__getattribute__(self, '__dict__')
+ __dict__ = __class__.__getattribute__(self, '__dict__')
# All module metadata must be gathered from __spec__ in order to avoid
# using mutated values.
@@ -216,8 +218,10 @@ def __getattribute__(self, attr):
# Update after loading since that's what would happen in an eager
# loading situation.
__dict__.update(attrs_updated)
- # Finally, stop triggering this method.
- self.__class__ = types.ModuleType
+ # Finally, stop triggering this method, if the module did not
+ # already update its own __class__.
+ if isinstance(self, _LazyModule):
+ object.__setattr__(self, '__class__', __class__)
return getattr(self, attr)
diff --git a/Lib/linecache.py b/Lib/linecache.py
index b97999fc1dc909..d1113b108dc5e4 100644
--- a/Lib/linecache.py
+++ b/Lib/linecache.py
@@ -5,9 +5,6 @@
that name.
"""
-import sys
-import os
-
__all__ = ["getline", "clearcache", "checkcache", "lazycache"]
@@ -66,6 +63,11 @@ def checkcache(filename=None):
size, mtime, lines, fullname = entry
if mtime is None:
continue # no-op for files loaded via a __loader__
+ try:
+ # This import can fail if the interpreter is shutting down
+ import os
+ except ImportError:
+ return
try:
stat = os.stat(fullname)
except OSError:
@@ -76,6 +78,12 @@ def checkcache(filename=None):
def updatecache(filename, module_globals=None):
+ # These imports are not at top level because linecache is in the critical
+ # path of the interpreter startup and importing os and sys take a lot of time
+ # and slow down the startup sequence.
+ import os
+ import sys
+
"""Update a cache entry and return its list of lines.
If something's wrong, print a message, discard the cache entry,
and return an empty list."""
diff --git a/Lib/logging/__init__.py b/Lib/logging/__init__.py
index 927e3e653f065a..174b37c0ab305b 100644
--- a/Lib/logging/__init__.py
+++ b/Lib/logging/__init__.py
@@ -56,7 +56,7 @@
#
#_startTime is used as the base when calculating the relative time of events
#
-_startTime = time.time()
+_startTime = time.time_ns()
#
#raiseExceptions is used to see if exceptions during handling should be
@@ -300,7 +300,7 @@ def __init__(self, name, level, pathname, lineno,
"""
Initialize a logging record with interesting information.
"""
- ct = time.time()
+ ct = time.time_ns()
self.name = name
self.msg = msg
#
@@ -339,9 +339,14 @@ def __init__(self, name, level, pathname, lineno,
self.stack_info = sinfo
self.lineno = lineno
self.funcName = func
- self.created = ct
- self.msecs = int((ct - int(ct)) * 1000) + 0.0 # see gh-89047
- self.relativeCreated = (self.created - _startTime) * 1000
+ self.created = ct / 1e9 # ns to float seconds
+
+ # Get the number of whole milliseconds (0-999) in the fractional part of seconds.
+ # Eg: 1_677_903_920_999_998_503 ns --> 999_998_503 ns--> 999 ms
+ # Convert to float by adding 0.0 for historical reasons. See gh-89047
+ self.msecs = (ct % 1_000_000_000) // 1_000_000 + 0.0
+
+ self.relativeCreated = (ct - _startTime) / 1e6
if logThreads:
self.thread = threading.get_ident()
self.threadName = threading.current_thread().name
@@ -572,7 +577,7 @@ class Formatter(object):
%(lineno)d Source line number where the logging call was issued
(if available)
%(funcName)s Function name
- %(created)f Time when the LogRecord was created (time.time()
+ %(created)f Time when the LogRecord was created (time.time_ns() / 1e9
return value)
%(asctime)s Textual time when the LogRecord was created
%(msecs)d Millisecond portion of the creation time
diff --git a/Lib/ntpath.py b/Lib/ntpath.py
index f9f6c78566e8ed..aba18bfe407abf 100644
--- a/Lib/ntpath.py
+++ b/Lib/ntpath.py
@@ -108,10 +108,8 @@ def join(path, *paths):
seps = '\\/'
colon_seps = ':\\/'
try:
- if not paths:
- path[:0] + sep #23780: Ensure compatible data type even if p is null.
result_drive, result_root, result_path = splitroot(path)
- for p in map(os.fspath, paths):
+ for p in paths:
p_drive, p_root, p_path = splitroot(p)
if p_root:
# Second path is absolute
@@ -368,13 +366,15 @@ def expanduser(path):
If user or $HOME is unknown, do nothing."""
path = os.fspath(path)
if isinstance(path, bytes):
+ seps = b'\\/'
tilde = b'~'
else:
+ seps = '\\/'
tilde = '~'
if not path.startswith(tilde):
return path
i, n = 1, len(path)
- while i < n and path[i] not in _get_bothseps(path):
+ while i < n and path[i] not in seps:
i += 1
if 'USERPROFILE' in os.environ:
diff --git a/Lib/pathlib/__init__.py b/Lib/pathlib/__init__.py
index 747000f1a43475..a4721fbe813962 100644
--- a/Lib/pathlib/__init__.py
+++ b/Lib/pathlib/__init__.py
@@ -5,8 +5,10 @@
operating systems.
"""
+import glob
import io
import ntpath
+import operator
import os
import posixpath
import sys
@@ -111,6 +113,7 @@ class PurePath(_abc.PurePathBase):
'_hash',
)
parser = os.path
+ _globber = glob._Globber
def __new__(cls, *args, **kwargs):
"""Construct a PurePath from one or several strings and or existing
@@ -253,14 +256,17 @@ def _format_parsed_parts(cls, drv, root, tail):
return cls.parser.sep.join(tail)
def _from_parsed_parts(self, drv, root, tail):
- path_str = self._format_parsed_parts(drv, root, tail)
- path = self.with_segments(path_str)
- path._str = path_str or '.'
+ path = self._from_parsed_string(self._format_parsed_parts(drv, root, tail))
path._drv = drv
path._root = root
path._tail_cached = tail
return path
+ def _from_parsed_string(self, path_str):
+ path = self.with_segments(path_str)
+ path._str = path_str or '.'
+ return path
+
@classmethod
def _parse_path(cls, path):
if not path:
@@ -453,21 +459,6 @@ def as_uri(self):
from urllib.parse import quote_from_bytes
return prefix + quote_from_bytes(os.fsencode(path))
- @property
- def _pattern_stack(self):
- """Stack of path components, to be used with patterns in glob()."""
- parts = self._tail.copy()
- pattern = self._raw_path
- if self.anchor:
- raise NotImplementedError("Non-relative patterns are unsupported")
- elif not parts:
- raise ValueError("Unacceptable pattern: {!r}".format(pattern))
- elif pattern[-1] in (self.parser.sep, self.parser.altsep):
- # GH-65238: pathlib doesn't preserve trailing slash. Add it back.
- parts.append('')
- parts.reverse()
- return parts
-
@property
def _pattern_str(self):
"""The path expressed as a string, for use in pattern-matching."""
@@ -576,48 +567,29 @@ def write_text(self, data, encoding=None, errors=None, newline=None):
encoding = io.text_encoding(encoding)
return _abc.PathBase.write_text(self, data, encoding, errors, newline)
+ _remove_leading_dot = operator.itemgetter(slice(2, None))
+ _remove_trailing_slash = operator.itemgetter(slice(-1))
+
+ def _filter_trailing_slash(self, paths):
+ sep = self.parser.sep
+ anchor_len = len(self.anchor)
+ for path_str in paths:
+ if len(path_str) > anchor_len and path_str[-1] == sep:
+ path_str = path_str[:-1]
+ yield path_str
+
def iterdir(self):
"""Yield path objects of the directory contents.
The children are yielded in arbitrary order, and the
special entries '.' and '..' are not included.
"""
- return (self._make_child_relpath(name) for name in os.listdir(self))
-
- def _scandir(self):
- return os.scandir(self)
-
- def _direntry_str(self, entry):
- # Transform an entry yielded from _scandir() into a path string.
- return entry.name if str(self) == '.' else entry.path
-
- def _make_child_direntry(self, entry):
- # Transform an entry yielded from _scandir() into a path object.
- path_str = self._direntry_str(entry)
- path = self.with_segments(path_str)
- path._str = path_str
- path._drv = self.drive
- path._root = self.root
- path._tail_cached = self._tail + [entry.name]
- return path
-
- def _make_child_relpath(self, name):
- if not name:
- return self
- path_str = str(self)
- tail = self._tail
- if tail:
- path_str = f'{path_str}{self.parser.sep}{name}'
- elif path_str != '.':
- path_str = f'{path_str}{name}'
- else:
- path_str = name
- path = self.with_segments(path_str)
- path._str = path_str
- path._drv = self.drive
- path._root = self.root
- path._tail_cached = tail + [name]
- return path
+ root_dir = str(self)
+ with os.scandir(root_dir) as scandir_it:
+ paths = [entry.path for entry in scandir_it]
+ if root_dir == '.':
+ paths = map(self._remove_leading_dot, paths)
+ return map(self._from_parsed_string, paths)
def glob(self, pattern, *, case_sensitive=None, recurse_symlinks=False):
"""Iterate over this subtree and yield all existing files (of any
@@ -626,8 +598,28 @@ def glob(self, pattern, *, case_sensitive=None, recurse_symlinks=False):
sys.audit("pathlib.Path.glob", self, pattern)
if not isinstance(pattern, PurePath):
pattern = self.with_segments(pattern)
- return _abc.PathBase.glob(
- self, pattern, case_sensitive=case_sensitive, recurse_symlinks=recurse_symlinks)
+ if pattern.anchor:
+ raise NotImplementedError("Non-relative patterns are unsupported")
+ parts = pattern._tail.copy()
+ if not parts:
+ raise ValueError("Unacceptable pattern: {!r}".format(pattern))
+ raw = pattern._raw_path
+ if raw[-1] in (self.parser.sep, self.parser.altsep):
+ # GH-65238: pathlib doesn't preserve trailing slash. Add it back.
+ parts.append('')
+ select = self._glob_selector(parts[::-1], case_sensitive, recurse_symlinks)
+ root = str(self)
+ paths = select(root)
+
+ # Normalize results
+ if root == '.':
+ paths = map(self._remove_leading_dot, paths)
+ if parts[-1] == '':
+ paths = map(self._remove_trailing_slash, paths)
+ elif parts[-1] == '**':
+ paths = self._filter_trailing_slash(paths)
+ paths = map(self._from_parsed_string, paths)
+ return paths
def rglob(self, pattern, *, case_sensitive=None, recurse_symlinks=False):
"""Recursively yield all existing files (of any kind, including
@@ -638,14 +630,17 @@ def rglob(self, pattern, *, case_sensitive=None, recurse_symlinks=False):
if not isinstance(pattern, PurePath):
pattern = self.with_segments(pattern)
pattern = '**' / pattern
- return _abc.PathBase.glob(
- self, pattern, case_sensitive=case_sensitive, recurse_symlinks=recurse_symlinks)
+ return self.glob(pattern, case_sensitive=case_sensitive, recurse_symlinks=recurse_symlinks)
def walk(self, top_down=True, on_error=None, follow_symlinks=False):
"""Walk the directory tree from this directory, similar to os.walk()."""
sys.audit("pathlib.Path.walk", self, on_error, follow_symlinks)
- return _abc.PathBase.walk(
- self, top_down=top_down, on_error=on_error, follow_symlinks=follow_symlinks)
+ root_dir = str(self)
+ results = self._globber.walk(root_dir, top_down, on_error, follow_symlinks)
+ for path_str, dirnames, filenames in results:
+ if root_dir == '.':
+ path_str = path_str[2:]
+ yield self._from_parsed_string(path_str), dirnames, filenames
def absolute(self):
"""Return an absolute version of this path
@@ -669,9 +664,7 @@ def absolute(self):
# of joining, and we exploit the fact that getcwd() returns a
# fully-normalized string by storing it in _str. This is used to
# implement Path.cwd().
- result = self.with_segments(cwd)
- result._str = cwd
- return result
+ return self._from_parsed_string(cwd)
drive, root, rel = os.path.splitroot(cwd)
if not rel:
return self._from_parsed_parts(drive, root, self._tail)
diff --git a/Lib/pathlib/_abc.py b/Lib/pathlib/_abc.py
index ca38a51d072cfb..05698d5de24afb 100644
--- a/Lib/pathlib/_abc.py
+++ b/Lib/pathlib/_abc.py
@@ -12,6 +12,8 @@
"""
import functools
+import glob
+import operator
from errno import ENOENT, ENOTDIR, EBADF, ELOOP, EINVAL
from stat import S_ISDIR, S_ISLNK, S_ISREG, S_ISSOCK, S_ISBLK, S_ISCHR, S_ISFIFO
@@ -40,109 +42,29 @@ def _ignore_error(exception):
def _is_case_sensitive(parser):
return parser.normcase('Aa') == 'Aa'
-#
-# Globbing helpers
-#
-
-re = glob = None
-
-
-@functools.lru_cache(maxsize=512)
-def _compile_pattern(pat, sep, case_sensitive, recursive=True):
- """Compile given glob pattern to a re.Pattern object (observing case
- sensitivity)."""
- global re, glob
- if re is None:
- import re, glob
-
- flags = re.NOFLAG if case_sensitive else re.IGNORECASE
- regex = glob.translate(pat, recursive=recursive, include_hidden=True, seps=sep)
- return re.compile(regex, flags=flags).match
-
-def _select_special(paths, part):
- """Yield special literal children of the given paths."""
- for path in paths:
- yield path._make_child_relpath(part)
+class Globber(glob._Globber):
+ lstat = operator.methodcaller('lstat')
+ add_slash = operator.methodcaller('joinpath', '')
+ @staticmethod
+ def scandir(path):
+ # Emulate os.scandir(), which returns an object that can be used as a
+ # context manager. This method is called by walk() and glob().
+ from contextlib import nullcontext
+ return nullcontext(path.iterdir())
-def _select_children(parent_paths, dir_only, match):
- """Yield direct children of given paths, filtering by name and type."""
- for parent_path in parent_paths:
- try:
- # We must close the scandir() object before proceeding to
- # avoid exhausting file descriptors when globbing deep trees.
- with parent_path._scandir() as scandir_it:
- entries = list(scandir_it)
- except OSError:
- pass
- else:
- for entry in entries:
- if dir_only:
- try:
- if not entry.is_dir():
- continue
- except OSError:
- continue
- # Avoid cost of making a path object for non-matching paths by
- # matching against the os.DirEntry.name string.
- if match is None or match(entry.name):
- yield parent_path._make_child_direntry(entry)
-
+ @staticmethod
+ def concat_path(path, text):
+ """Appends text to the given path.
+ """
+ return path.with_segments(path._raw_path + text)
-def _select_recursive(parent_paths, dir_only, follow_symlinks, match):
- """Yield given paths and all their children, recursively, filtering by
- string and type.
- """
- for parent_path in parent_paths:
- if match is not None:
- # If we're filtering paths through a regex, record the length of
- # the parent path. We'll pass it to match(path, pos=...) later.
- parent_len = len(str(parent_path._make_child_relpath('_'))) - 1
- paths = [parent_path._make_child_relpath('')]
- while paths:
- path = paths.pop()
- if match is None or match(str(path), parent_len):
- # Yield *directory* path that matches pattern (if any).
- yield path
- try:
- # We must close the scandir() object before proceeding to
- # avoid exhausting file descriptors when globbing deep trees.
- with path._scandir() as scandir_it:
- entries = list(scandir_it)
- except OSError:
- pass
- else:
- for entry in entries:
- # Handle directory entry.
- try:
- if entry.is_dir(follow_symlinks=follow_symlinks):
- # Recurse into this directory.
- paths.append(path._make_child_direntry(entry))
- continue
- except OSError:
- pass
-
- # Handle file entry.
- if not dir_only:
- # Avoid cost of making a path object for non-matching
- # files by matching against the os.DirEntry object.
- if match is None or match(path._direntry_str(entry), parent_len):
- # Yield *file* path that matches pattern (if any).
- yield path._make_child_direntry(entry)
-
-
-def _select_unique(paths):
- """Yields the given paths, filtering out duplicates."""
- yielded = set()
- try:
- for path in paths:
- path_str = str(path)
- if path_str not in yielded:
- yield path
- yielded.add(path_str)
- finally:
- yielded.clear()
+ @staticmethod
+ def parse_entry(entry):
+ """Returns the path of an entry yielded from scandir().
+ """
+ return entry
class UnsupportedOperation(NotImplementedError):
@@ -218,6 +140,7 @@ class PurePathBase:
'_resolving',
)
parser = ParserBase()
+ _globber = Globber
def __init__(self, path, *paths):
self._raw_path = self.parser.join(path, *paths) if paths else path
@@ -454,14 +377,6 @@ def is_absolute(self):
a drive)."""
return self.parser.isabs(self._raw_path)
- @property
- def _pattern_stack(self):
- """Stack of path components, to be used with patterns in glob()."""
- anchor, parts = self._stack
- if anchor:
- raise NotImplementedError("Non-relative patterns are unsupported")
- return parts
-
@property
def _pattern_str(self):
"""The path expressed as a string, for use in pattern-matching."""
@@ -487,8 +402,9 @@ def match(self, path_pattern, *, case_sensitive=None):
return False
if len(path_parts) > len(pattern_parts) and path_pattern.anchor:
return False
+ globber = self._globber(sep, case_sensitive)
for path_part, pattern_part in zip(path_parts, pattern_parts):
- match = _compile_pattern(pattern_part, sep, case_sensitive, recursive=False)
+ match = globber.compile(pattern_part)
if match(path_part) is None:
return False
return True
@@ -502,7 +418,8 @@ def full_match(self, pattern, *, case_sensitive=None):
pattern = self.with_segments(pattern)
if case_sensitive is None:
case_sensitive = _is_case_sensitive(self.parser)
- match = _compile_pattern(pattern._pattern_str, pattern.parser.sep, case_sensitive)
+ globber = self._globber(pattern.parser.sep, case_sensitive, recursive=True)
+ match = globber.compile(pattern._pattern_str)
return match(self._pattern_str) is not None
@@ -766,24 +683,18 @@ def iterdir(self):
"""
raise UnsupportedOperation(self._unsupported_msg('iterdir()'))
- def _scandir(self):
- # Emulate os.scandir(), which returns an object that can be used as a
- # context manager. This method is called by walk() and glob().
- from contextlib import nullcontext
- return nullcontext(self.iterdir())
-
- def _direntry_str(self, entry):
- # Transform an entry yielded from _scandir() into a path string.
- # PathBase._scandir() yields PathBase objects, so use str().
- return str(entry)
-
- def _make_child_direntry(self, entry):
- # Transform an entry yielded from _scandir() into a path object.
- # PathBase._scandir() yields PathBase objects, so this is a no-op.
- return entry
-
- def _make_child_relpath(self, name):
- return self.joinpath(name)
+ def _glob_selector(self, parts, case_sensitive, recurse_symlinks):
+ if case_sensitive is None:
+ case_sensitive = _is_case_sensitive(self.parser)
+ case_pedantic = False
+ else:
+ # The user has expressed a case sensitivity choice, but we don't
+ # know the case sensitivity of the underlying filesystem, so we
+ # must use scandir() for everything, including non-wildcard parts.
+ case_pedantic = True
+ recursive = True if recurse_symlinks else glob._no_recurse_symlinks
+ globber = self._globber(self.parser.sep, case_sensitive, case_pedantic, recursive)
+ return globber.selector(parts)
def glob(self, pattern, *, case_sensitive=None, recurse_symlinks=True):
"""Iterate over this subtree and yield all existing files (of any
@@ -791,56 +702,11 @@ def glob(self, pattern, *, case_sensitive=None, recurse_symlinks=True):
"""
if not isinstance(pattern, PurePathBase):
pattern = self.with_segments(pattern)
- if case_sensitive is None:
- # TODO: evaluate case-sensitivity of each directory in _select_children().
- case_sensitive = _is_case_sensitive(self.parser)
-
- stack = pattern._pattern_stack
- specials = ('', '.', '..')
- deduplicate_paths = False
- sep = self.parser.sep
- paths = iter([self] if self.is_dir() else [])
- while stack:
- part = stack.pop()
- if part in specials:
- # Join special component (e.g. '..') onto paths.
- paths = _select_special(paths, part)
-
- elif part == '**':
- # Consume following '**' components, which have no effect.
- while stack and stack[-1] == '**':
- stack.pop()
-
- # Consume following non-special components, provided we're
- # treating symlinks consistently. Each component is joined
- # onto 'part', which is used to generate an re.Pattern object.
- if recurse_symlinks:
- while stack and stack[-1] not in specials:
- part += sep + stack.pop()
-
- # If the previous loop consumed pattern components, compile an
- # re.Pattern object based on those components.
- match = _compile_pattern(part, sep, case_sensitive) if part != '**' else None
-
- # Recursively walk directories, filtering by type and regex.
- paths = _select_recursive(paths, bool(stack), recurse_symlinks, match)
-
- # De-duplicate if we've already seen a '**' component.
- if deduplicate_paths:
- paths = _select_unique(paths)
- deduplicate_paths = True
-
- elif '**' in part:
- raise ValueError("Invalid pattern: '**' can only be an entire path component")
-
- else:
- # If the pattern component isn't '*', compile an re.Pattern
- # object based on the component.
- match = _compile_pattern(part, sep, case_sensitive) if part != '*' else None
-
- # Iterate over directories' children filtering by type and regex.
- paths = _select_children(paths, bool(stack), match)
- return paths
+ anchor, parts = pattern._stack
+ if anchor:
+ raise NotImplementedError("Non-relative patterns are unsupported")
+ select = self._glob_selector(parts, case_sensitive, recurse_symlinks)
+ return select(self)
def rglob(self, pattern, *, case_sensitive=None, recurse_symlinks=True):
"""Recursively yield all existing files (of any kind, including
@@ -854,48 +720,7 @@ def rglob(self, pattern, *, case_sensitive=None, recurse_symlinks=True):
def walk(self, top_down=True, on_error=None, follow_symlinks=False):
"""Walk the directory tree from this directory, similar to os.walk()."""
- paths = [self]
-
- while paths:
- path = paths.pop()
- if isinstance(path, tuple):
- yield path
- continue
-
- # We may not have read permission for self, in which case we can't
- # get a list of the files the directory contains. os.walk()
- # always suppressed the exception in that instance, rather than
- # blow up for a minor reason when (say) a thousand readable
- # directories are still left to visit. That logic is copied here.
- try:
- scandir_obj = path._scandir()
- except OSError as error:
- if on_error is not None:
- on_error(error)
- continue
-
- with scandir_obj as scandir_it:
- dirnames = []
- filenames = []
- if not top_down:
- paths.append((path, dirnames, filenames))
- for entry in scandir_it:
- try:
- is_dir = entry.is_dir(follow_symlinks=follow_symlinks)
- except OSError:
- # Carried over from os.path.isdir().
- is_dir = False
-
- if is_dir:
- if not top_down:
- paths.append(path._make_child_direntry(entry))
- dirnames.append(entry.name)
- else:
- filenames.append(entry.name)
-
- if top_down:
- yield path, dirnames, filenames
- paths += [path._make_child_relpath(d) for d in reversed(dirnames)]
+ return self._globber.walk(self, top_down, on_error, follow_symlinks)
def absolute(self):
"""Return an absolute version of this path
diff --git a/Lib/posixpath.py b/Lib/posixpath.py
index 0e8bb5ab10d916..dd29fbb1614aa8 100644
--- a/Lib/posixpath.py
+++ b/Lib/posixpath.py
@@ -77,12 +77,11 @@ def join(a, *p):
sep = _get_sep(a)
path = a
try:
- if not p:
- path[:0] + sep #23780: Ensure compatible data type even if p is null.
- for b in map(os.fspath, p):
- if b.startswith(sep):
+ for b in p:
+ b = os.fspath(b)
+ if b.startswith(sep) or not path:
path = b
- elif not path or path.endswith(sep):
+ elif path.endswith(sep):
path += b
else:
path += sep + b
@@ -430,11 +429,6 @@ def realpath(filename, *, strict=False):
# the same links.
seen = {}
- # Whether we're calling lstat() and readlink() to resolve symlinks. If we
- # encounter an OSError for a symlink loop in non-strict mode, this is
- # switched off.
- querying = True
-
while rest:
name = rest.pop()
if name is None:
@@ -452,9 +446,6 @@ def realpath(filename, *, strict=False):
newpath = path + name
else:
newpath = path + sep + name
- if not querying:
- path = newpath
- continue
try:
st = os.lstat(newpath)
if not stat.S_ISLNK(st.st_mode):
@@ -476,11 +467,8 @@ def realpath(filename, *, strict=False):
if strict:
# Raise OSError(errno.ELOOP)
os.stat(newpath)
- else:
- # Return already resolved part + rest of the path unchanged.
- path = newpath
- querying = False
- continue
+ path = newpath
+ continue
seen[newpath] = None # not resolved symlink
target = os.readlink(newpath)
if target.startswith(sep):
@@ -502,10 +490,10 @@ def realpath(filename, *, strict=False):
def relpath(path, start=None):
"""Return a relative version of a path"""
+ path = os.fspath(path)
if not path:
raise ValueError("no path specified")
- path = os.fspath(path)
if isinstance(path, bytes):
curdir = b'.'
sep = b'/'
diff --git a/Lib/pydoc_data/topics.py b/Lib/pydoc_data/topics.py
index 05045ac8c945c8..26fc498ac95bd7 100644
--- a/Lib/pydoc_data/topics.py
+++ b/Lib/pydoc_data/topics.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Autogenerated by Sphinx on Tue Mar 12 18:35:04 2024
+# Autogenerated by Sphinx on Tue Apr 9 11:53:07 2024
# as part of the release process.
topics = {'assert': 'The "assert" statement\n'
'**********************\n'
@@ -5221,12 +5221,13 @@
'the\n'
'current directory, it is read with "\'utf-8\'" encoding and '
'executed as\n'
- 'if it had been typed at the debugger prompt. This is '
- 'particularly\n'
- 'useful for aliases. If both files exist, the one in the home\n'
- 'directory is read first and aliases defined there can be '
- 'overridden by\n'
- 'the local file.\n'
+ 'if it had been typed at the debugger prompt, with the exception '
+ 'that\n'
+ 'empty lines and lines starting with "#" are ignored. This is\n'
+ 'particularly useful for aliases. If both files exist, the one '
+ 'in the\n'
+ 'home directory is read first and aliases defined there can be\n'
+ 'overridden by the local file.\n'
'\n'
'Changed in version 3.2: ".pdbrc" can now contain commands that\n'
'continue debugging, such as "continue" or "next". Previously, '
@@ -8640,32 +8641,36 @@
'\n'
' nonlocal_stmt ::= "nonlocal" identifier ("," identifier)*\n'
'\n'
- 'The "nonlocal" statement causes the listed identifiers to refer '
- 'to\n'
- 'previously bound variables in the nearest enclosing scope '
- 'excluding\n'
- 'globals. This is important because the default behavior for '
- 'binding is\n'
- 'to search the local namespace first. The statement allows\n'
- 'encapsulated code to rebind variables outside of the local '
- 'scope\n'
- 'besides the global (module) scope.\n'
- '\n'
- 'Names listed in a "nonlocal" statement, unlike those listed in '
- 'a\n'
- '"global" statement, must refer to pre-existing bindings in an\n'
- 'enclosing scope (the scope in which a new binding should be '
- 'created\n'
- 'cannot be determined unambiguously).\n'
- '\n'
- 'Names listed in a "nonlocal" statement must not collide with '
- 'pre-\n'
- 'existing bindings in the local scope.\n'
+ 'When the definition of a function or class is nested (enclosed) '
+ 'within\n'
+ 'the definitions of other functions, its nonlocal scopes are the '
+ 'local\n'
+ 'scopes of the enclosing functions. The "nonlocal" statement '
+ 'causes the\n'
+ 'listed identifiers to refer to names previously bound in '
+ 'nonlocal\n'
+ 'scopes. It allows encapsulated code to rebind such nonlocal\n'
+ 'identifiers. If a name is bound in more than one nonlocal '
+ 'scope, the\n'
+ 'nearest binding is used. If a name is not bound in any nonlocal '
+ 'scope,\n'
+ 'or if there is no nonlocal scope, a "SyntaxError" is raised.\n'
+ '\n'
+ 'The nonlocal statement applies to the entire scope of a function '
+ 'or\n'
+ 'class body. A "SyntaxError" is raised if a variable is used or\n'
+ 'assigned to prior to its nonlocal declaration in the scope.\n'
'\n'
'See also:\n'
'\n'
' **PEP 3104** - Access to Names in Outer Scopes\n'
- ' The specification for the "nonlocal" statement.\n',
+ ' The specification for the "nonlocal" statement.\n'
+ '\n'
+ '**Programmer’s note:** "nonlocal" is a directive to the parser '
+ 'and\n'
+ 'applies only to code parsed along with it. See the note for '
+ 'the\n'
+ '"global" statement.\n',
'numbers': 'Numeric literals\n'
'****************\n'
'\n'
@@ -13805,14 +13810,18 @@
'contains\n'
'the numbers 0, 1, …, *n*-1. Item *i* of sequence *a* is selected '
'by\n'
- '"a[i]".\n'
+ '"a[i]". Some sequences, including built-in sequences, interpret\n'
+ 'negative subscripts by adding the sequence length. For example,\n'
+ '"a[-2]" equals "a[n-2]", the second to last item of sequence a '
+ 'with\n'
+ 'length "n".\n'
'\n'
'Sequences also support slicing: "a[i:j]" selects all items with '
'index\n'
'*k* such that *i* "<=" *k* "<" *j*. When used as an expression, a\n'
- 'slice is a sequence of the same type. This implies that the index '
- 'set\n'
- 'is renumbered so that it starts at 0.\n'
+ 'slice is a sequence of the same type. The comment above about '
+ 'negative\n'
+ 'indexes also applies to negative slice positions.\n'
'\n'
'Some sequences also support “extended slicing” with a third “step”\n'
'parameter: "a[i:j:k]" selects all items of *a* with index *x* where '
@@ -14413,7 +14422,7 @@
'a common ancestor. Additional details on the C3 MRO used by Python '
'can\n'
'be found in the documentation accompanying the 2.3 release at\n'
- 'https://www.python.org/download/releases/2.3/mro/.\n'
+ 'https://docs.python.org/3/howto/mro.html.\n'
'\n'
'When a class attribute reference (for class "C", say) would yield '
'a\n'
diff --git a/Lib/queue.py b/Lib/queue.py
index 387ce5425879a4..25beb46e30d6bd 100644
--- a/Lib/queue.py
+++ b/Lib/queue.py
@@ -239,8 +239,9 @@ def shutdown(self, immediate=False):
By default, gets will only raise once the queue is empty. Set
'immediate' to True to make gets raise immediately instead.
- All blocked callers of put() will be unblocked, and also get()
- and join() if 'immediate'.
+ All blocked callers of put() and get() will be unblocked. If
+ 'immediate', a task is marked as done for each item remaining in
+ the queue, which may unblock callers of join().
'''
with self.mutex:
self.is_shutdown = True
@@ -249,9 +250,10 @@ def shutdown(self, immediate=False):
self._get()
if self.unfinished_tasks > 0:
self.unfinished_tasks -= 1
- self.not_empty.notify_all()
# release all blocked threads in `join()`
self.all_tasks_done.notify_all()
+ # All getters need to re-check queue-empty to raise ShutDown
+ self.not_empty.notify_all()
self.not_full.notify_all()
# Override these methods to implement other queue organizations
diff --git a/Lib/site.py b/Lib/site.py
index 162bbec4f8f41b..93af9c453ac7bb 100644
--- a/Lib/site.py
+++ b/Lib/site.py
@@ -179,35 +179,44 @@ def addpackage(sitedir, name, known_paths):
return
_trace(f"Processing .pth file: {fullname!r}")
try:
- # locale encoding is not ideal especially on Windows. But we have used
- # it for a long time. setuptools uses the locale encoding too.
- f = io.TextIOWrapper(io.open_code(fullname), encoding="locale")
+ with io.open_code(fullname) as f:
+ pth_content = f.read()
except OSError:
return
- with f:
- for n, line in enumerate(f):
- if line.startswith("#"):
- continue
- if line.strip() == "":
+
+ try:
+ pth_content = pth_content.decode()
+ except UnicodeDecodeError:
+ # Fallback to locale encoding for backward compatibility.
+ # We will deprecate this fallback in the future.
+ import locale
+ pth_content = pth_content.decode(locale.getencoding())
+ _trace(f"Cannot read {fullname!r} as UTF-8. "
+ f"Using fallback encoding {locale.getencoding()!r}")
+
+ for n, line in enumerate(pth_content.splitlines(), 1):
+ if line.startswith("#"):
+ continue
+ if line.strip() == "":
+ continue
+ try:
+ if line.startswith(("import ", "import\t")):
+ exec(line)
continue
- try:
- if line.startswith(("import ", "import\t")):
- exec(line)
- continue
- line = line.rstrip()
- dir, dircase = makepath(sitedir, line)
- if not dircase in known_paths and os.path.exists(dir):
- sys.path.append(dir)
- known_paths.add(dircase)
- except Exception as exc:
- print("Error processing line {:d} of {}:\n".format(n+1, fullname),
- file=sys.stderr)
- import traceback
- for record in traceback.format_exception(exc):
- for line in record.splitlines():
- print(' '+line, file=sys.stderr)
- print("\nRemainder of file ignored", file=sys.stderr)
- break
+ line = line.rstrip()
+ dir, dircase = makepath(sitedir, line)
+ if dircase not in known_paths and os.path.exists(dir):
+ sys.path.append(dir)
+ known_paths.add(dircase)
+ except Exception as exc:
+ print(f"Error processing line {n:d} of {fullname}:\n",
+ file=sys.stderr)
+ import traceback
+ for record in traceback.format_exception(exc):
+ for line in record.splitlines():
+ print(' '+line, file=sys.stderr)
+ print("\nRemainder of file ignored", file=sys.stderr)
+ break
if reset:
known_paths = None
return known_paths
diff --git a/Lib/statistics.py b/Lib/statistics.py
index 58fb31def8896e..fc00891b083dc3 100644
--- a/Lib/statistics.py
+++ b/Lib/statistics.py
@@ -919,13 +919,13 @@ def kde(data, h, kernel='normal', *, cumulative=False):
sqrt2pi = sqrt(2 * pi)
sqrt2 = sqrt(2)
K = lambda t: exp(-1/2 * t * t) / sqrt2pi
- I = lambda t: 1/2 * (1.0 + erf(t / sqrt2))
+ W = lambda t: 1/2 * (1.0 + erf(t / sqrt2))
support = None
case 'logistic':
# 1.0 / (exp(t) + 2.0 + exp(-t))
K = lambda t: 1/2 / (1.0 + cosh(t))
- I = lambda t: 1.0 - 1.0 / (exp(t) + 1.0)
+ W = lambda t: 1.0 - 1.0 / (exp(t) + 1.0)
support = None
case 'sigmoid':
@@ -933,39 +933,39 @@ def kde(data, h, kernel='normal', *, cumulative=False):
c1 = 1 / pi
c2 = 2 / pi
K = lambda t: c1 / cosh(t)
- I = lambda t: c2 * atan(exp(t))
+ W = lambda t: c2 * atan(exp(t))
support = None
case 'rectangular' | 'uniform':
K = lambda t: 1/2
- I = lambda t: 1/2 * t + 1/2
+ W = lambda t: 1/2 * t + 1/2
support = 1.0
case 'triangular':
K = lambda t: 1.0 - abs(t)
- I = lambda t: t*t * (1/2 if t < 0.0 else -1/2) + t + 1/2
+ W = lambda t: t*t * (1/2 if t < 0.0 else -1/2) + t + 1/2
support = 1.0
case 'parabolic' | 'epanechnikov':
K = lambda t: 3/4 * (1.0 - t * t)
- I = lambda t: -1/4 * t**3 + 3/4 * t + 1/2
+ W = lambda t: -1/4 * t**3 + 3/4 * t + 1/2
support = 1.0
case 'quartic' | 'biweight':
K = lambda t: 15/16 * (1.0 - t * t) ** 2
- I = lambda t: 3/16 * t**5 - 5/8 * t**3 + 15/16 * t + 1/2
+ W = lambda t: 3/16 * t**5 - 5/8 * t**3 + 15/16 * t + 1/2
support = 1.0
case 'triweight':
K = lambda t: 35/32 * (1.0 - t * t) ** 3
- I = lambda t: 35/32 * (-1/7*t**7 + 3/5*t**5 - t**3 + t) + 1/2
+ W = lambda t: 35/32 * (-1/7*t**7 + 3/5*t**5 - t**3 + t) + 1/2
support = 1.0
case 'cosine':
c1 = pi / 4
c2 = pi / 2
K = lambda t: c1 * cos(c2 * t)
- I = lambda t: 1/2 * sin(c2 * t) + 1/2
+ W = lambda t: 1/2 * sin(c2 * t) + 1/2
support = 1.0
case _:
@@ -974,10 +974,14 @@ def kde(data, h, kernel='normal', *, cumulative=False):
if support is None:
def pdf(x):
+ n = len(data)
return sum(K((x - x_i) / h) for x_i in data) / (n * h)
def cdf(x):
- return sum(I((x - x_i) / h) for x_i in data) / n
+
+ n = len(data)
+ return sum(W((x - x_i) / h) for x_i in data) / n
+
else:
@@ -985,16 +989,24 @@ def cdf(x):
bandwidth = h * support
def pdf(x):
+ nonlocal n, sample
+ if len(data) != n:
+ sample = sorted(data)
+ n = len(data)
i = bisect_left(sample, x - bandwidth)
j = bisect_right(sample, x + bandwidth)
supported = sample[i : j]
return sum(K((x - x_i) / h) for x_i in supported) / (n * h)
def cdf(x):
+ nonlocal n, sample
+ if len(data) != n:
+ sample = sorted(data)
+ n = len(data)
i = bisect_left(sample, x - bandwidth)
j = bisect_right(sample, x + bandwidth)
supported = sample[i : j]
- return sum((I((x - x_i) / h) for x_i in supported), i) / n
+ return sum((W((x - x_i) / h) for x_i in supported), i) / n
if cumulative:
cdf.__doc__ = f'CDF estimate with {h=!r} and {kernel=!r}'
diff --git a/Lib/tarfile.py b/Lib/tarfile.py
index 6f315a6408f185..149b1c3b1bda28 100755
--- a/Lib/tarfile.py
+++ b/Lib/tarfile.py
@@ -2247,7 +2247,7 @@ def _get_filter_function(self, filter):
'Python 3.14 will, by default, filter extracted tar '
+ 'archives and reject files or modify their metadata. '
+ 'Use the filter argument to control this behavior.',
- DeprecationWarning)
+ DeprecationWarning, stacklevel=3)
return fully_trusted_filter
if isinstance(filter, str):
raise TypeError(
diff --git a/Lib/test/datetimetester.py b/Lib/test/datetimetester.py
index c77263998c99f5..570110893629cf 100644
--- a/Lib/test/datetimetester.py
+++ b/Lib/test/datetimetester.py
@@ -1927,6 +1927,10 @@ def test_fromisoformat_fails(self):
'2009-02-29', # Invalid leap day
'2019-W53-1', # No week 53 in 2019
'2020-W54-1', # No week 54
+ '0000-W25-1', # Invalid year
+ '10000-W25-1', # Invalid year
+ '2020-W25-0', # Invalid day-of-week
+ '2020-W25-8', # Invalid day-of-week
'2009\ud80002\ud80028', # Separators are surrogate codepoints
]
diff --git a/Lib/test/libregrtest/run_workers.py b/Lib/test/libregrtest/run_workers.py
index 9cfe1b9d6fd07d..235047cf2e563c 100644
--- a/Lib/test/libregrtest/run_workers.py
+++ b/Lib/test/libregrtest/run_workers.py
@@ -79,8 +79,12 @@ class MultiprocessResult:
err_msg: str | None = None
+class WorkerThreadExited:
+ """Indicates that a worker thread has exited"""
+
ExcStr = str
QueueOutput = tuple[Literal[False], MultiprocessResult] | tuple[Literal[True], ExcStr]
+QueueContent = QueueOutput | WorkerThreadExited
class ExitThread(Exception):
@@ -376,8 +380,8 @@ def _runtest(self, test_name: TestName) -> MultiprocessResult:
def run(self) -> None:
fail_fast = self.runtests.fail_fast
fail_env_changed = self.runtests.fail_env_changed
- while not self._stopped:
- try:
+ try:
+ while not self._stopped:
try:
test_name = next(self.pending)
except StopIteration:
@@ -396,11 +400,12 @@ def run(self) -> None:
if mp_result.result.must_stop(fail_fast, fail_env_changed):
break
- except ExitThread:
- break
- except BaseException:
- self.output.put((True, traceback.format_exc()))
- break
+ except ExitThread:
+ pass
+ except BaseException:
+ self.output.put((True, traceback.format_exc()))
+ finally:
+ self.output.put(WorkerThreadExited())
def _wait_completed(self) -> None:
popen = self._popen
@@ -458,8 +463,9 @@ def __init__(self, num_workers: int, runtests: RunTests,
self.log = logger.log
self.display_progress = logger.display_progress
self.results: TestResults = results
+ self.live_worker_count = 0
- self.output: queue.Queue[QueueOutput] = queue.Queue()
+ self.output: queue.Queue[QueueContent] = queue.Queue()
tests_iter = runtests.iter_tests()
self.pending = MultiprocessIterator(tests_iter)
self.timeout = runtests.timeout
@@ -497,6 +503,7 @@ def start_workers(self) -> None:
self.log(msg)
for worker in self.workers:
worker.start()
+ self.live_worker_count += 1
def stop_workers(self) -> None:
start_time = time.monotonic()
@@ -511,14 +518,18 @@ def _get_result(self) -> QueueOutput | None:
# bpo-46205: check the status of workers every iteration to avoid
# waiting forever on an empty queue.
- while any(worker.is_alive() for worker in self.workers):
+ while self.live_worker_count > 0:
if use_faulthandler:
faulthandler.dump_traceback_later(MAIN_PROCESS_TIMEOUT,
exit=True)
# wait for a thread
try:
- return self.output.get(timeout=PROGRESS_UPDATE)
+ result = self.output.get(timeout=PROGRESS_UPDATE)
+ if isinstance(result, WorkerThreadExited):
+ self.live_worker_count -= 1
+ continue
+ return result
except queue.Empty:
pass
@@ -528,12 +539,6 @@ def _get_result(self) -> QueueOutput | None:
if running:
self.log(running)
- # all worker threads are done: consume pending results
- try:
- return self.output.get(timeout=0)
- except queue.Empty:
- return None
-
def display_result(self, mp_result: MultiprocessResult) -> None:
result = mp_result.result
pgo = self.runtests.pgo
diff --git a/Lib/test/libregrtest/utils.py b/Lib/test/libregrtest/utils.py
index 837f73b28b4018..791f996127ea58 100644
--- a/Lib/test/libregrtest/utils.py
+++ b/Lib/test/libregrtest/utils.py
@@ -698,6 +698,14 @@ def get_signal_name(exitcode):
except ValueError:
pass
+ # Shell exit code (ex: WASI build)
+ if 128 < exitcode < 256:
+ signum = exitcode - 128
+ try:
+ return signal.Signals(signum).name
+ except ValueError:
+ pass
+
try:
return WINDOWS_STATUS[exitcode]
except KeyError:
diff --git a/Lib/test/list_tests.py b/Lib/test/list_tests.py
index 26118e14bb97e0..89cd10f76a318e 100644
--- a/Lib/test/list_tests.py
+++ b/Lib/test/list_tests.py
@@ -6,7 +6,7 @@
from functools import cmp_to_key
from test import seq_tests
-from test.support import ALWAYS_EQ, NEVER_EQ, Py_C_RECURSION_LIMIT
+from test.support import ALWAYS_EQ, NEVER_EQ, get_c_recursion_limit
class CommonTest(seq_tests.CommonTest):
@@ -61,7 +61,7 @@ def test_repr(self):
def test_repr_deep(self):
a = self.type2test([])
- for i in range(Py_C_RECURSION_LIMIT + 1):
+ for i in range(get_c_recursion_limit() + 1):
a = self.type2test([a])
self.assertRaises(RecursionError, repr, a)
diff --git a/Lib/test/mapping_tests.py b/Lib/test/mapping_tests.py
index b4cfce19a7174e..ed89a81a6ea685 100644
--- a/Lib/test/mapping_tests.py
+++ b/Lib/test/mapping_tests.py
@@ -1,7 +1,7 @@
# tests common to dict and UserDict
import unittest
import collections
-from test.support import Py_C_RECURSION_LIMIT
+from test.support import get_c_recursion_limit
class BasicTestMappingProtocol(unittest.TestCase):
@@ -624,7 +624,7 @@ def __repr__(self):
def test_repr_deep(self):
d = self._empty_mapping()
- for i in range(Py_C_RECURSION_LIMIT + 1):
+ for i in range(get_c_recursion_limit() + 1):
d0 = d
d = self._empty_mapping()
d[1] = d0
diff --git a/Lib/test/pythoninfo.py b/Lib/test/pythoninfo.py
index 0cfd033bb637a7..1db9fb9537f888 100644
--- a/Lib/test/pythoninfo.py
+++ b/Lib/test/pythoninfo.py
@@ -513,6 +513,7 @@ def collect_sysconfig(info_add):
'MACHDEP',
'MULTIARCH',
'OPT',
+ 'PGO_PROF_USE_FLAG',
'PY_CFLAGS',
'PY_CFLAGS_NODIST',
'PY_CORE_LDFLAGS',
diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py
index fb4b0a5071d71f..6eb0f84b02ea22 100644
--- a/Lib/test/support/__init__.py
+++ b/Lib/test/support/__init__.py
@@ -56,7 +56,7 @@
"run_with_tz", "PGO", "missing_compiler_executable",
"ALWAYS_EQ", "NEVER_EQ", "LARGEST", "SMALLEST",
"LOOPBACK_TIMEOUT", "INTERNET_TIMEOUT", "SHORT_TIMEOUT", "LONG_TIMEOUT",
- "Py_DEBUG", "EXCEEDS_RECURSION_LIMIT", "Py_C_RECURSION_LIMIT",
+ "Py_DEBUG", "exceeds_recursion_limit", "get_c_recursion_limit",
"skip_on_s390x",
"without_optimizer",
]
@@ -842,6 +842,12 @@ def requires_gil_enabled(msg="needs the GIL enabled"):
"""Decorator for skipping tests on the free-threaded build."""
return unittest.skipIf(Py_GIL_DISABLED, msg)
+def expected_failure_if_gil_disabled():
+ """Expect test failure if the GIL is disabled."""
+ if Py_GIL_DISABLED:
+ return unittest.expectedFailure
+ return lambda test_case: test_case
+
if Py_GIL_DISABLED:
_header = 'PHBBInP'
else:
@@ -2490,22 +2496,18 @@ def adjust_int_max_str_digits(max_digits):
sys.set_int_max_str_digits(current)
-def _get_c_recursion_limit():
+def get_c_recursion_limit():
try:
import _testcapi
return _testcapi.Py_C_RECURSION_LIMIT
- except (ImportError, AttributeError):
- # Originally taken from Include/cpython/pystate.h .
- if sys.platform == 'win32':
- return 4000
- else:
- return 10000
+ except ImportError:
+ raise unittest.SkipTest('requires _testcapi')
+
-# The default C recursion limit.
-Py_C_RECURSION_LIMIT = _get_c_recursion_limit()
+def exceeds_recursion_limit():
+ """For recursion tests, easily exceeds default recursion limit."""
+ return get_c_recursion_limit() * 3
-#For recursion tests, easily exceeds default recursion limit
-EXCEEDS_RECURSION_LIMIT = Py_C_RECURSION_LIMIT * 3
#Windows doesn't have os.uname() but it doesn't support s390x.
skip_on_s390x = unittest.skipIf(hasattr(os, 'uname') and os.uname().machine == 's390x',
diff --git a/Lib/test/support/interpreters/__init__.py b/Lib/test/support/interpreters/__init__.py
index 8be4ee736aa93b..0a5a9259479be4 100644
--- a/Lib/test/support/interpreters/__init__.py
+++ b/Lib/test/support/interpreters/__init__.py
@@ -74,50 +74,77 @@ def __str__(self):
def create():
"""Return a new (idle) Python interpreter."""
id = _interpreters.create(reqrefs=True)
- return Interpreter(id)
+ return Interpreter(id, _ownsref=True)
def list_all():
"""Return all existing interpreters."""
- return [Interpreter(id) for id in _interpreters.list_all()]
+ return [Interpreter(id, _whence=whence)
+ for id, whence in _interpreters.list_all(require_ready=True)]
def get_current():
"""Return the currently running interpreter."""
- id = _interpreters.get_current()
- return Interpreter(id)
+ id, whence = _interpreters.get_current()
+ return Interpreter(id, _whence=whence)
def get_main():
"""Return the main interpreter."""
- id = _interpreters.get_main()
- return Interpreter(id)
+ id, whence = _interpreters.get_main()
+ assert whence == _interpreters.WHENCE_RUNTIME, repr(whence)
+ return Interpreter(id, _whence=whence)
_known = weakref.WeakValueDictionary()
class Interpreter:
- """A single Python interpreter."""
+ """A single Python interpreter.
- def __new__(cls, id, /):
+ Attributes:
+
+ "id" - the unique process-global ID number for the interpreter
+ "whence" - indicates where the interpreter was created
+
+ If the interpreter wasn't created by this module
+ then any method that modifies the interpreter will fail,
+ i.e. .close(), .prepare_main(), .exec(), and .call()
+ """
+
+ _WHENCE_TO_STR = {
+ _interpreters.WHENCE_UNKNOWN: 'unknown',
+ _interpreters.WHENCE_RUNTIME: 'runtime init',
+ _interpreters.WHENCE_LEGACY_CAPI: 'legacy C-API',
+ _interpreters.WHENCE_CAPI: 'C-API',
+ _interpreters.WHENCE_XI: 'cross-interpreter C-API',
+ _interpreters.WHENCE_STDLIB: '_interpreters module',
+ }
+
+ def __new__(cls, id, /, _whence=None, _ownsref=None):
# There is only one instance for any given ID.
if not isinstance(id, int):
raise TypeError(f'id must be an int, got {id!r}')
id = int(id)
+ if _whence is None:
+ if _ownsref:
+ _whence = _interpreters.WHENCE_STDLIB
+ else:
+ _whence = _interpreters.whence(id)
+ assert _whence in cls._WHENCE_TO_STR, repr(_whence)
+ if _ownsref is None:
+ _ownsref = (_whence == _interpreters.WHENCE_STDLIB)
try:
self = _known[id]
assert hasattr(self, '_ownsref')
except KeyError:
- # This may raise InterpreterNotFoundError:
- _interpreters.incref(id)
- try:
- self = super().__new__(cls)
- self._id = id
- self._ownsref = True
- except BaseException:
- _interpreters.decref(id)
- raise
+ self = super().__new__(cls)
_known[id] = self
+ self._id = id
+ self._whence = _whence
+ self._ownsref = _ownsref
+ if _ownsref:
+ # This may raise InterpreterNotFoundError:
+ _interpreters.incref(id)
return self
def __repr__(self):
@@ -142,7 +169,7 @@ def _decref(self):
return
self._ownsref = False
try:
- _interpreters.decref(self.id)
+ _interpreters.decref(self._id)
except InterpreterNotFoundError:
pass
@@ -150,17 +177,24 @@ def _decref(self):
def id(self):
return self._id
+ @property
+ def whence(self):
+ return self._WHENCE_TO_STR[self._whence]
+
def is_running(self):
"""Return whether or not the identified interpreter is running."""
return _interpreters.is_running(self._id)
+ # Everything past here is available only to interpreters created by
+ # interpreters.create().
+
def close(self):
"""Finalize and destroy the interpreter.
Attempting to destroy the current interpreter results
in an InterpreterError.
"""
- return _interpreters.destroy(self._id)
+ return _interpreters.destroy(self._id, restrict=True)
def prepare_main(self, ns=None, /, **kwargs):
"""Bind the given values into the interpreter's __main__.
@@ -168,7 +202,7 @@ def prepare_main(self, ns=None, /, **kwargs):
The values must be shareable.
"""
ns = dict(ns, **kwargs) if ns is not None else kwargs
- _interpreters.set___main___attrs(self._id, ns)
+ _interpreters.set___main___attrs(self._id, ns, restrict=True)
def exec(self, code, /):
"""Run the given source code in the interpreter.
@@ -188,7 +222,7 @@ def exec(self, code, /):
that time, the previous interpreter is allowed to run
in other threads.
"""
- excinfo = _interpreters.exec(self._id, code)
+ excinfo = _interpreters.exec(self._id, code, restrict=True)
if excinfo is not None:
raise ExecutionFailed(excinfo)
@@ -208,7 +242,7 @@ def call(self, callable, /):
# XXX Support args and kwargs.
# XXX Support arbitrary callables.
# XXX Support returning the return value (e.g. via pickle).
- excinfo = _interpreters.call(self._id, callable)
+ excinfo = _interpreters.call(self._id, callable, restrict=True)
if excinfo is not None:
raise ExecutionFailed(excinfo)
diff --git a/Lib/test/test__xxinterpchannels.py b/Lib/test/test__xxinterpchannels.py
index c5d29bd2dd911f..3db0cb7e6e1d49 100644
--- a/Lib/test/test__xxinterpchannels.py
+++ b/Lib/test/test__xxinterpchannels.py
@@ -9,7 +9,7 @@
from test.support import import_helper
from test.test__xxsubinterpreters import (
- interpreters,
+ _interpreters,
_run_output,
clean_up_interpreters,
)
@@ -49,14 +49,15 @@ def run_interp(id, source, **shared):
def _run_interp(id, source, shared, _mainns={}):
source = dedent(source)
- main = interpreters.get_main()
+ main, *_ = _interpreters.get_main()
if main == id:
- if interpreters.get_current() != main:
+ cur, *_ = _interpreters.get_current()
+ if cur != main:
raise RuntimeError
# XXX Run a func?
exec(source, _mainns)
else:
- interpreters.run_string(id, source, shared)
+ _interpreters.run_string(id, source, shared)
class Interpreter(namedtuple('Interpreter', 'name id')):
@@ -71,7 +72,7 @@ def from_raw(cls, raw):
raise NotImplementedError
def __new__(cls, name=None, id=None):
- main = interpreters.get_main()
+ main, *_ = _interpreters.get_main()
if id == main:
if not name:
name = 'main'
@@ -89,7 +90,7 @@ def __new__(cls, name=None, id=None):
name = 'main'
id = main
else:
- id = interpreters.create()
+ id = _interpreters.create()
self = super().__new__(cls, name, id)
return self
@@ -370,7 +371,7 @@ def test_sequential_ids(self):
self.assertEqual(set(after) - set(before), {id1, id2, id3})
def test_ids_global(self):
- id1 = interpreters.create()
+ id1 = _interpreters.create()
out = _run_output(id1, dedent("""
import _xxinterpchannels as _channels
cid = _channels.create()
@@ -378,7 +379,7 @@ def test_ids_global(self):
"""))
cid1 = int(out.strip())
- id2 = interpreters.create()
+ id2 = _interpreters.create()
out = _run_output(id2, dedent("""
import _xxinterpchannels as _channels
cid = _channels.create()
@@ -390,7 +391,7 @@ def test_ids_global(self):
def test_channel_list_interpreters_none(self):
"""Test listing interpreters for a channel with no associations."""
- # Test for channel with no associated interpreters.
+ # Test for channel with no associated _interpreters.
cid = channels.create()
send_interps = channels.list_interpreters(cid, send=True)
recv_interps = channels.list_interpreters(cid, send=False)
@@ -398,8 +399,8 @@ def test_channel_list_interpreters_none(self):
self.assertEqual(recv_interps, [])
def test_channel_list_interpreters_basic(self):
- """Test basic listing channel interpreters."""
- interp0 = interpreters.get_main()
+ """Test basic listing channel _interpreters."""
+ interp0, *_ = _interpreters.get_main()
cid = channels.create()
channels.send(cid, "send", blocking=False)
# Test for a channel that has one end associated to an interpreter.
@@ -408,7 +409,7 @@ def test_channel_list_interpreters_basic(self):
self.assertEqual(send_interps, [interp0])
self.assertEqual(recv_interps, [])
- interp1 = interpreters.create()
+ interp1 = _interpreters.create()
_run_output(interp1, dedent(f"""
import _xxinterpchannels as _channels
obj = _channels.recv({cid})
@@ -421,10 +422,10 @@ def test_channel_list_interpreters_basic(self):
def test_channel_list_interpreters_multiple(self):
"""Test listing interpreters for a channel with many associations."""
- interp0 = interpreters.get_main()
- interp1 = interpreters.create()
- interp2 = interpreters.create()
- interp3 = interpreters.create()
+ interp0, *_ = _interpreters.get_main()
+ interp1 = _interpreters.create()
+ interp2 = _interpreters.create()
+ interp3 = _interpreters.create()
cid = channels.create()
channels.send(cid, "send", blocking=False)
@@ -447,8 +448,8 @@ def test_channel_list_interpreters_multiple(self):
def test_channel_list_interpreters_destroyed(self):
"""Test listing channel interpreters with a destroyed interpreter."""
- interp0 = interpreters.get_main()
- interp1 = interpreters.create()
+ interp0, *_ = _interpreters.get_main()
+ interp1 = _interpreters.create()
cid = channels.create()
channels.send(cid, "send", blocking=False)
_run_output(interp1, dedent(f"""
@@ -461,7 +462,7 @@ def test_channel_list_interpreters_destroyed(self):
self.assertEqual(send_interps, [interp0])
self.assertEqual(recv_interps, [interp1])
- interpreters.destroy(interp1)
+ _interpreters.destroy(interp1)
# Destroyed interpreter should not be listed.
send_interps = channels.list_interpreters(cid, send=True)
recv_interps = channels.list_interpreters(cid, send=False)
@@ -472,9 +473,9 @@ def test_channel_list_interpreters_released(self):
"""Test listing channel interpreters with a released channel."""
# Set up one channel with main interpreter on the send end and two
# subinterpreters on the receive end.
- interp0 = interpreters.get_main()
- interp1 = interpreters.create()
- interp2 = interpreters.create()
+ interp0, *_ = _interpreters.get_main()
+ interp1 = _interpreters.create()
+ interp2 = _interpreters.create()
cid = channels.create()
channels.send(cid, "data", blocking=False)
_run_output(interp1, dedent(f"""
@@ -494,7 +495,7 @@ def test_channel_list_interpreters_released(self):
# Release the main interpreter from the send end.
channels.release(cid, send=True)
- # Send end should have no associated interpreters.
+ # Send end should have no associated _interpreters.
send_interps = channels.list_interpreters(cid, send=True)
recv_interps = channels.list_interpreters(cid, send=False)
self.assertEqual(len(send_interps), 0)
@@ -513,8 +514,8 @@ def test_channel_list_interpreters_released(self):
def test_channel_list_interpreters_closed(self):
"""Test listing channel interpreters with a closed channel."""
- interp0 = interpreters.get_main()
- interp1 = interpreters.create()
+ interp0, *_ = _interpreters.get_main()
+ interp1 = _interpreters.create()
cid = channels.create()
# Put something in the channel so that it's not empty.
channels.send(cid, "send", blocking=False)
@@ -535,8 +536,8 @@ def test_channel_list_interpreters_closed(self):
def test_channel_list_interpreters_closed_send_end(self):
"""Test listing channel interpreters with a channel's send end closed."""
- interp0 = interpreters.get_main()
- interp1 = interpreters.create()
+ interp0, *_ = _interpreters.get_main()
+ interp1 = _interpreters.create()
cid = channels.create()
# Put something in the channel so that it's not empty.
channels.send(cid, "send", blocking=False)
@@ -589,9 +590,9 @@ def test_allowed_types(self):
def test_run_string_arg_unresolved(self):
cid = channels.create()
- interp = interpreters.create()
+ interp = _interpreters.create()
- interpreters.set___main___attrs(interp, dict(cid=cid.send))
+ _interpreters.set___main___attrs(interp, dict(cid=cid.send))
out = _run_output(interp, dedent("""
import _xxinterpchannels as _channels
print(cid.end)
@@ -609,7 +610,7 @@ def test_run_string_arg_unresolved(self):
def test_run_string_arg_resolved(self):
cid = channels.create()
cid = channels._channel_id(cid, _resolve=True)
- interp = interpreters.create()
+ interp = _interpreters.create()
out = _run_output(interp, dedent("""
import _xxinterpchannels as _channels
@@ -635,7 +636,7 @@ def test_send_recv_main(self):
self.assertIsNot(obj, orig)
def test_send_recv_same_interpreter(self):
- id1 = interpreters.create()
+ id1 = _interpreters.create()
out = _run_output(id1, dedent("""
import _xxinterpchannels as _channels
cid = _channels.create()
@@ -648,7 +649,7 @@ def test_send_recv_same_interpreter(self):
def test_send_recv_different_interpreters(self):
cid = channels.create()
- id1 = interpreters.create()
+ id1 = _interpreters.create()
out = _run_output(id1, dedent(f"""
import _xxinterpchannels as _channels
_channels.send({cid}, b'spam', blocking=False)
@@ -674,7 +675,7 @@ def f():
def test_send_recv_different_interpreters_and_threads(self):
cid = channels.create()
- id1 = interpreters.create()
+ id1 = _interpreters.create()
out = None
def f():
@@ -737,12 +738,12 @@ def test_recv_default(self):
def test_recv_sending_interp_destroyed(self):
with self.subTest('closed'):
cid1 = channels.create()
- interp = interpreters.create()
- interpreters.run_string(interp, dedent(f"""
+ interp = _interpreters.create()
+ _interpreters.run_string(interp, dedent(f"""
import _xxinterpchannels as _channels
_channels.send({cid1}, b'spam', blocking=False)
"""))
- interpreters.destroy(interp)
+ _interpreters.destroy(interp)
with self.assertRaisesRegex(RuntimeError,
f'channel {cid1} is closed'):
@@ -750,13 +751,13 @@ def test_recv_sending_interp_destroyed(self):
del cid1
with self.subTest('still open'):
cid2 = channels.create()
- interp = interpreters.create()
- interpreters.run_string(interp, dedent(f"""
+ interp = _interpreters.create()
+ _interpreters.run_string(interp, dedent(f"""
import _xxinterpchannels as _channels
_channels.send({cid2}, b'spam', blocking=False)
"""))
channels.send(cid2, b'eggs', blocking=False)
- interpreters.destroy(interp)
+ _interpreters.destroy(interp)
channels.recv(cid2)
with self.assertRaisesRegex(RuntimeError,
@@ -1010,24 +1011,24 @@ def test_close_single_user(self):
def test_close_multiple_users(self):
cid = channels.create()
- id1 = interpreters.create()
- id2 = interpreters.create()
- interpreters.run_string(id1, dedent(f"""
+ id1 = _interpreters.create()
+ id2 = _interpreters.create()
+ _interpreters.run_string(id1, dedent(f"""
import _xxinterpchannels as _channels
_channels.send({cid}, b'spam', blocking=False)
"""))
- interpreters.run_string(id2, dedent(f"""
+ _interpreters.run_string(id2, dedent(f"""
import _xxinterpchannels as _channels
_channels.recv({cid})
"""))
channels.close(cid)
- excsnap = interpreters.run_string(id1, dedent(f"""
+ excsnap = _interpreters.run_string(id1, dedent(f"""
_channels.send({cid}, b'spam')
"""))
self.assertEqual(excsnap.type.__name__, 'ChannelClosedError')
- excsnap = interpreters.run_string(id2, dedent(f"""
+ excsnap = _interpreters.run_string(id2, dedent(f"""
_channels.send({cid}, b'spam')
"""))
self.assertEqual(excsnap.type.__name__, 'ChannelClosedError')
@@ -1154,8 +1155,8 @@ def test_close_never_used(self):
def test_close_by_unassociated_interp(self):
cid = channels.create()
channels.send(cid, b'spam', blocking=False)
- interp = interpreters.create()
- interpreters.run_string(interp, dedent(f"""
+ interp = _interpreters.create()
+ _interpreters.run_string(interp, dedent(f"""
import _xxinterpchannels as _channels
_channels.close({cid}, force=True)
"""))
@@ -1251,9 +1252,9 @@ def test_single_user(self):
def test_multiple_users(self):
cid = channels.create()
- id1 = interpreters.create()
- id2 = interpreters.create()
- interpreters.run_string(id1, dedent(f"""
+ id1 = _interpreters.create()
+ id2 = _interpreters.create()
+ _interpreters.run_string(id1, dedent(f"""
import _xxinterpchannels as _channels
_channels.send({cid}, b'spam', blocking=False)
"""))
@@ -1263,7 +1264,7 @@ def test_multiple_users(self):
_channels.release({cid})
print(repr(obj))
"""))
- interpreters.run_string(id1, dedent(f"""
+ _interpreters.run_string(id1, dedent(f"""
_channels.release({cid})
"""))
@@ -1310,8 +1311,8 @@ def test_never_used(self):
def test_by_unassociated_interp(self):
cid = channels.create()
channels.send(cid, b'spam', blocking=False)
- interp = interpreters.create()
- interpreters.run_string(interp, dedent(f"""
+ interp = _interpreters.create()
+ _interpreters.run_string(interp, dedent(f"""
import _xxinterpchannels as _channels
_channels.release({cid})
"""))
@@ -1325,8 +1326,8 @@ def test_by_unassociated_interp(self):
def test_close_if_unassociated(self):
# XXX Something's not right with this test...
cid = channels.create()
- interp = interpreters.create()
- interpreters.run_string(interp, dedent(f"""
+ interp = _interpreters.create()
+ _interpreters.run_string(interp, dedent(f"""
import _xxinterpchannels as _channels
obj = _channels.send({cid}, b'spam', blocking=False)
_channels.release({cid})
diff --git a/Lib/test/test__xxsubinterpreters.py b/Lib/test/test__xxsubinterpreters.py
index 841077adbb0f16..c8c964f642f1cf 100644
--- a/Lib/test/test__xxsubinterpreters.py
+++ b/Lib/test/test__xxsubinterpreters.py
@@ -13,7 +13,7 @@
from test.support import script_helper
-interpreters = import_helper.import_module('_xxsubinterpreters')
+_interpreters = import_helper.import_module('_xxsubinterpreters')
_testinternalcapi = import_helper.import_module('_testinternalcapi')
from _xxsubinterpreters import InterpreterNotFoundError
@@ -36,7 +36,7 @@ def _captured_script(script):
def _run_output(interp, request):
script, rpipe = _captured_script(request)
with rpipe:
- interpreters.run_string(interp, script)
+ _interpreters.run_string(interp, script)
return rpipe.read()
@@ -47,7 +47,7 @@ def _wait_for_interp_to_run(interp, timeout=None):
if timeout is None:
timeout = support.SHORT_TIMEOUT
for _ in support.sleeping_retry(timeout, error=False):
- if interpreters.is_running(interp):
+ if _interpreters.is_running(interp):
break
else:
raise RuntimeError('interp is not running')
@@ -57,7 +57,7 @@ def _wait_for_interp_to_run(interp, timeout=None):
def _running(interp):
r, w = os.pipe()
def run():
- interpreters.run_string(interp, dedent(f"""
+ _interpreters.run_string(interp, dedent(f"""
# wait for "signal"
with open({r}, encoding="utf-8") as rpipe:
rpipe.read()
@@ -75,12 +75,12 @@ def run():
def clean_up_interpreters():
- for id in interpreters.list_all():
+ for id, *_ in _interpreters.list_all():
if id == 0: # main
continue
try:
- interpreters.destroy(id)
- except interpreters.InterpreterError:
+ _interpreters.destroy(id)
+ except _interpreters.InterpreterError:
pass # already destroyed
@@ -112,7 +112,7 @@ def test_default_shareables(self):
for obj in shareables:
with self.subTest(obj):
self.assertTrue(
- interpreters.is_shareable(obj))
+ _interpreters.is_shareable(obj))
def test_not_shareable(self):
class Cheese:
@@ -141,7 +141,7 @@ class SubBytes(bytes):
for obj in not_shareables:
with self.subTest(repr(obj)):
self.assertFalse(
- interpreters.is_shareable(obj))
+ _interpreters.is_shareable(obj))
class ShareableTypeTests(unittest.TestCase):
@@ -230,7 +230,7 @@ class ModuleTests(TestBase):
def test_import_in_interpreter(self):
_run_output(
- interpreters.create(),
+ _interpreters.create(),
'import _xxsubinterpreters as _interpreters',
)
@@ -241,45 +241,45 @@ def test_import_in_interpreter(self):
class ListAllTests(TestBase):
def test_initial(self):
- main = interpreters.get_main()
- ids = interpreters.list_all()
+ main, *_ = _interpreters.get_main()
+ ids = [id for id, *_ in _interpreters.list_all()]
self.assertEqual(ids, [main])
def test_after_creating(self):
- main = interpreters.get_main()
- first = interpreters.create()
- second = interpreters.create()
- ids = interpreters.list_all()
+ main, *_ = _interpreters.get_main()
+ first = _interpreters.create()
+ second = _interpreters.create()
+ ids = [id for id, *_ in _interpreters.list_all()]
self.assertEqual(ids, [main, first, second])
def test_after_destroying(self):
- main = interpreters.get_main()
- first = interpreters.create()
- second = interpreters.create()
- interpreters.destroy(first)
- ids = interpreters.list_all()
+ main, *_ = _interpreters.get_main()
+ first = _interpreters.create()
+ second = _interpreters.create()
+ _interpreters.destroy(first)
+ ids = [id for id, *_ in _interpreters.list_all()]
self.assertEqual(ids, [main, second])
class GetCurrentTests(TestBase):
def test_main(self):
- main = interpreters.get_main()
- cur = interpreters.get_current()
+ main, *_ = _interpreters.get_main()
+ cur, *_ = _interpreters.get_current()
self.assertEqual(cur, main)
self.assertIsInstance(cur, int)
def test_subinterpreter(self):
- main = interpreters.get_main()
- interp = interpreters.create()
+ main, *_ = _interpreters.get_main()
+ interp = _interpreters.create()
out = _run_output(interp, dedent("""
import _xxsubinterpreters as _interpreters
- cur = _interpreters.get_current()
+ cur, *_ = _interpreters.get_current()
print(cur)
assert isinstance(cur, int)
"""))
cur = int(out.strip())
- _, expected = interpreters.list_all()
+ _, expected = [id for id, *_ in _interpreters.list_all()]
self.assertEqual(cur, expected)
self.assertNotEqual(cur, main)
@@ -287,17 +287,17 @@ def test_subinterpreter(self):
class GetMainTests(TestBase):
def test_from_main(self):
- [expected] = interpreters.list_all()
- main = interpreters.get_main()
+ [expected] = [id for id, *_ in _interpreters.list_all()]
+ main, *_ = _interpreters.get_main()
self.assertEqual(main, expected)
self.assertIsInstance(main, int)
def test_from_subinterpreter(self):
- [expected] = interpreters.list_all()
- interp = interpreters.create()
+ [expected] = [id for id, *_ in _interpreters.list_all()]
+ interp = _interpreters.create()
out = _run_output(interp, dedent("""
import _xxsubinterpreters as _interpreters
- main = _interpreters.get_main()
+ main, *_ = _interpreters.get_main()
print(main)
assert isinstance(main, int)
"""))
@@ -308,20 +308,20 @@ def test_from_subinterpreter(self):
class IsRunningTests(TestBase):
def test_main(self):
- main = interpreters.get_main()
- self.assertTrue(interpreters.is_running(main))
+ main, *_ = _interpreters.get_main()
+ self.assertTrue(_interpreters.is_running(main))
@unittest.skip('Fails on FreeBSD')
def test_subinterpreter(self):
- interp = interpreters.create()
- self.assertFalse(interpreters.is_running(interp))
+ interp = _interpreters.create()
+ self.assertFalse(_interpreters.is_running(interp))
with _running(interp):
- self.assertTrue(interpreters.is_running(interp))
- self.assertFalse(interpreters.is_running(interp))
+ self.assertTrue(_interpreters.is_running(interp))
+ self.assertFalse(_interpreters.is_running(interp))
def test_from_subinterpreter(self):
- interp = interpreters.create()
+ interp = _interpreters.create()
out = _run_output(interp, dedent(f"""
import _xxsubinterpreters as _interpreters
if _interpreters.is_running({interp}):
@@ -332,34 +332,35 @@ def test_from_subinterpreter(self):
self.assertEqual(out.strip(), 'True')
def test_already_destroyed(self):
- interp = interpreters.create()
- interpreters.destroy(interp)
+ interp = _interpreters.create()
+ _interpreters.destroy(interp)
with self.assertRaises(InterpreterNotFoundError):
- interpreters.is_running(interp)
+ _interpreters.is_running(interp)
def test_does_not_exist(self):
with self.assertRaises(InterpreterNotFoundError):
- interpreters.is_running(1_000_000)
+ _interpreters.is_running(1_000_000)
def test_bad_id(self):
with self.assertRaises(ValueError):
- interpreters.is_running(-1)
+ _interpreters.is_running(-1)
class CreateTests(TestBase):
def test_in_main(self):
- id = interpreters.create()
+ id = _interpreters.create()
self.assertIsInstance(id, int)
- self.assertIn(id, interpreters.list_all())
+ after = [id for id, *_ in _interpreters.list_all()]
+ self.assertIn(id, after)
@unittest.skip('enable this test when working on pystate.c')
def test_unique_id(self):
seen = set()
for _ in range(100):
- id = interpreters.create()
- interpreters.destroy(id)
+ id = _interpreters.create()
+ _interpreters.destroy(id)
seen.add(id)
self.assertEqual(len(seen), 100)
@@ -369,7 +370,7 @@ def test_in_thread(self):
id = None
def f():
nonlocal id
- id = interpreters.create()
+ id = _interpreters.create()
lock.acquire()
lock.release()
@@ -377,11 +378,12 @@ def f():
with lock:
t.start()
t.join()
- self.assertIn(id, interpreters.list_all())
+ after = set(id for id, *_ in _interpreters.list_all())
+ self.assertIn(id, after)
def test_in_subinterpreter(self):
- main, = interpreters.list_all()
- id1 = interpreters.create()
+ main, = [id for id, *_ in _interpreters.list_all()]
+ id1 = _interpreters.create()
out = _run_output(id1, dedent("""
import _xxsubinterpreters as _interpreters
id = _interpreters.create()
@@ -390,11 +392,12 @@ def test_in_subinterpreter(self):
"""))
id2 = int(out.strip())
- self.assertEqual(set(interpreters.list_all()), {main, id1, id2})
+ after = set(id for id, *_ in _interpreters.list_all())
+ self.assertEqual(after, {main, id1, id2})
def test_in_threaded_subinterpreter(self):
- main, = interpreters.list_all()
- id1 = interpreters.create()
+ main, = [id for id, *_ in _interpreters.list_all()]
+ id1 = _interpreters.create()
id2 = None
def f():
nonlocal id2
@@ -409,144 +412,155 @@ def f():
t.start()
t.join()
- self.assertEqual(set(interpreters.list_all()), {main, id1, id2})
+ after = set(id for id, *_ in _interpreters.list_all())
+ self.assertEqual(after, {main, id1, id2})
def test_after_destroy_all(self):
- before = set(interpreters.list_all())
+ before = set(id for id, *_ in _interpreters.list_all())
# Create 3 subinterpreters.
ids = []
for _ in range(3):
- id = interpreters.create()
+ id = _interpreters.create()
ids.append(id)
# Now destroy them.
for id in ids:
- interpreters.destroy(id)
+ _interpreters.destroy(id)
# Finally, create another.
- id = interpreters.create()
- self.assertEqual(set(interpreters.list_all()), before | {id})
+ id = _interpreters.create()
+ after = set(id for id, *_ in _interpreters.list_all())
+ self.assertEqual(after, before | {id})
def test_after_destroy_some(self):
- before = set(interpreters.list_all())
+ before = set(id for id, *_ in _interpreters.list_all())
# Create 3 subinterpreters.
- id1 = interpreters.create()
- id2 = interpreters.create()
- id3 = interpreters.create()
+ id1 = _interpreters.create()
+ id2 = _interpreters.create()
+ id3 = _interpreters.create()
# Now destroy 2 of them.
- interpreters.destroy(id1)
- interpreters.destroy(id3)
+ _interpreters.destroy(id1)
+ _interpreters.destroy(id3)
# Finally, create another.
- id = interpreters.create()
- self.assertEqual(set(interpreters.list_all()), before | {id, id2})
+ id = _interpreters.create()
+ after = set(id for id, *_ in _interpreters.list_all())
+ self.assertEqual(after, before | {id, id2})
class DestroyTests(TestBase):
def test_one(self):
- id1 = interpreters.create()
- id2 = interpreters.create()
- id3 = interpreters.create()
- self.assertIn(id2, interpreters.list_all())
- interpreters.destroy(id2)
- self.assertNotIn(id2, interpreters.list_all())
- self.assertIn(id1, interpreters.list_all())
- self.assertIn(id3, interpreters.list_all())
+ id1 = _interpreters.create()
+ id2 = _interpreters.create()
+ id3 = _interpreters.create()
+ before = set(id for id, *_ in _interpreters.list_all())
+ self.assertIn(id2, before)
+
+ _interpreters.destroy(id2)
+
+ after = set(id for id, *_ in _interpreters.list_all())
+ self.assertNotIn(id2, after)
+ self.assertIn(id1, after)
+ self.assertIn(id3, after)
def test_all(self):
- before = set(interpreters.list_all())
+ initial = set(id for id, *_ in _interpreters.list_all())
ids = set()
for _ in range(3):
- id = interpreters.create()
+ id = _interpreters.create()
ids.add(id)
- self.assertEqual(set(interpreters.list_all()), before | ids)
+ before = set(id for id, *_ in _interpreters.list_all())
+ self.assertEqual(before, initial | ids)
for id in ids:
- interpreters.destroy(id)
- self.assertEqual(set(interpreters.list_all()), before)
+ _interpreters.destroy(id)
+ after = set(id for id, *_ in _interpreters.list_all())
+ self.assertEqual(after, initial)
def test_main(self):
- main, = interpreters.list_all()
- with self.assertRaises(interpreters.InterpreterError):
- interpreters.destroy(main)
+ main, = [id for id, *_ in _interpreters.list_all()]
+ with self.assertRaises(_interpreters.InterpreterError):
+ _interpreters.destroy(main)
def f():
- with self.assertRaises(interpreters.InterpreterError):
- interpreters.destroy(main)
+ with self.assertRaises(_interpreters.InterpreterError):
+ _interpreters.destroy(main)
t = threading.Thread(target=f)
t.start()
t.join()
def test_already_destroyed(self):
- id = interpreters.create()
- interpreters.destroy(id)
+ id = _interpreters.create()
+ _interpreters.destroy(id)
with self.assertRaises(InterpreterNotFoundError):
- interpreters.destroy(id)
+ _interpreters.destroy(id)
def test_does_not_exist(self):
with self.assertRaises(InterpreterNotFoundError):
- interpreters.destroy(1_000_000)
+ _interpreters.destroy(1_000_000)
def test_bad_id(self):
with self.assertRaises(ValueError):
- interpreters.destroy(-1)
+ _interpreters.destroy(-1)
def test_from_current(self):
- main, = interpreters.list_all()
- id = interpreters.create()
+ main, = [id for id, *_ in _interpreters.list_all()]
+ id = _interpreters.create()
script = dedent(f"""
import _xxsubinterpreters as _interpreters
try:
_interpreters.destroy({id})
- except interpreters.InterpreterError:
+ except _interpreters.InterpreterError:
pass
""")
- interpreters.run_string(id, script)
- self.assertEqual(set(interpreters.list_all()), {main, id})
+ _interpreters.run_string(id, script)
+ after = set(id for id, *_ in _interpreters.list_all())
+ self.assertEqual(after, {main, id})
def test_from_sibling(self):
- main, = interpreters.list_all()
- id1 = interpreters.create()
- id2 = interpreters.create()
+ main, = [id for id, *_ in _interpreters.list_all()]
+ id1 = _interpreters.create()
+ id2 = _interpreters.create()
script = dedent(f"""
import _xxsubinterpreters as _interpreters
_interpreters.destroy({id2})
""")
- interpreters.run_string(id1, script)
+ _interpreters.run_string(id1, script)
- self.assertEqual(set(interpreters.list_all()), {main, id1})
+ after = set(id for id, *_ in _interpreters.list_all())
+ self.assertEqual(after, {main, id1})
def test_from_other_thread(self):
- id = interpreters.create()
+ id = _interpreters.create()
def f():
- interpreters.destroy(id)
+ _interpreters.destroy(id)
t = threading.Thread(target=f)
t.start()
t.join()
def test_still_running(self):
- main, = interpreters.list_all()
- interp = interpreters.create()
+ main, = [id for id, *_ in _interpreters.list_all()]
+ interp = _interpreters.create()
with _running(interp):
- self.assertTrue(interpreters.is_running(interp),
+ self.assertTrue(_interpreters.is_running(interp),
msg=f"Interp {interp} should be running before destruction.")
- with self.assertRaises(interpreters.InterpreterError,
+ with self.assertRaises(_interpreters.InterpreterError,
msg=f"Should not be able to destroy interp {interp} while it's still running."):
- interpreters.destroy(interp)
- self.assertTrue(interpreters.is_running(interp))
+ _interpreters.destroy(interp)
+ self.assertTrue(_interpreters.is_running(interp))
class RunStringTests(TestBase):
def setUp(self):
super().setUp()
- self.id = interpreters.create()
+ self.id = _interpreters.create()
def test_success(self):
script, file = _captured_script('print("it worked!", end="")')
with file:
- interpreters.run_string(self.id, script)
+ _interpreters.run_string(self.id, script)
out = file.read()
self.assertEqual(out, 'it worked!')
@@ -555,7 +569,7 @@ def test_in_thread(self):
script, file = _captured_script('print("it worked!", end="")')
with file:
def f():
- interpreters.run_string(self.id, script)
+ _interpreters.run_string(self.id, script)
t = threading.Thread(target=f)
t.start()
@@ -565,7 +579,7 @@ def f():
self.assertEqual(out, 'it worked!')
def test_create_thread(self):
- subinterp = interpreters.create()
+ subinterp = _interpreters.create()
script, file = _captured_script("""
import threading
def f():
@@ -576,7 +590,7 @@ def f():
t.join()
""")
with file:
- interpreters.run_string(subinterp, script)
+ _interpreters.run_string(subinterp, script)
out = file.read()
self.assertEqual(out, 'it worked!')
@@ -584,7 +598,7 @@ def f():
def test_create_daemon_thread(self):
with self.subTest('isolated'):
expected = 'spam spam spam spam spam'
- subinterp = interpreters.create('isolated')
+ subinterp = _interpreters.create('isolated')
script, file = _captured_script(f"""
import threading
def f():
@@ -598,13 +612,13 @@ def f():
print('{expected}', end='')
""")
with file:
- interpreters.run_string(subinterp, script)
+ _interpreters.run_string(subinterp, script)
out = file.read()
self.assertEqual(out, expected)
with self.subTest('not isolated'):
- subinterp = interpreters.create('legacy')
+ subinterp = _interpreters.create('legacy')
script, file = _captured_script("""
import threading
def f():
@@ -615,13 +629,13 @@ def f():
t.join()
""")
with file:
- interpreters.run_string(subinterp, script)
+ _interpreters.run_string(subinterp, script)
out = file.read()
self.assertEqual(out, 'it worked!')
def test_shareable_types(self):
- interp = interpreters.create()
+ interp = _interpreters.create()
objects = [
None,
'spam',
@@ -630,15 +644,15 @@ def test_shareable_types(self):
]
for obj in objects:
with self.subTest(obj):
- interpreters.set___main___attrs(interp, dict(obj=obj))
- interpreters.run_string(
+ _interpreters.set___main___attrs(interp, dict(obj=obj))
+ _interpreters.run_string(
interp,
f'assert(obj == {obj!r})',
)
def test_os_exec(self):
expected = 'spam spam spam spam spam'
- subinterp = interpreters.create()
+ subinterp = _interpreters.create()
script, file = _captured_script(f"""
import os, sys
try:
@@ -647,7 +661,7 @@ def test_os_exec(self):
print('{expected}', end='')
""")
with file:
- interpreters.run_string(subinterp, script)
+ _interpreters.run_string(subinterp, script)
out = file.read()
self.assertEqual(out, expected)
@@ -668,7 +682,7 @@ def test_fork(self):
with open('{file.name}', 'w', encoding='utf-8') as out:
out.write('{expected}')
""")
- interpreters.run_string(self.id, script)
+ _interpreters.run_string(self.id, script)
file.seek(0)
content = file.read()
@@ -676,31 +690,31 @@ def test_fork(self):
def test_already_running(self):
with _running(self.id):
- with self.assertRaises(interpreters.InterpreterError):
- interpreters.run_string(self.id, 'print("spam")')
+ with self.assertRaises(_interpreters.InterpreterError):
+ _interpreters.run_string(self.id, 'print("spam")')
def test_does_not_exist(self):
id = 0
- while id in interpreters.list_all():
+ while id in set(id for id, *_ in _interpreters.list_all()):
id += 1
with self.assertRaises(InterpreterNotFoundError):
- interpreters.run_string(id, 'print("spam")')
+ _interpreters.run_string(id, 'print("spam")')
def test_error_id(self):
with self.assertRaises(ValueError):
- interpreters.run_string(-1, 'print("spam")')
+ _interpreters.run_string(-1, 'print("spam")')
def test_bad_id(self):
with self.assertRaises(TypeError):
- interpreters.run_string('spam', 'print("spam")')
+ _interpreters.run_string('spam', 'print("spam")')
def test_bad_script(self):
with self.assertRaises(TypeError):
- interpreters.run_string(self.id, 10)
+ _interpreters.run_string(self.id, 10)
def test_bytes_for_script(self):
with self.assertRaises(TypeError):
- interpreters.run_string(self.id, b'print("spam")')
+ _interpreters.run_string(self.id, b'print("spam")')
def test_with_shared(self):
r, w = os.pipe()
@@ -721,8 +735,8 @@ def test_with_shared(self):
with open({w}, 'wb') as chan:
pickle.dump(ns, chan)
""")
- interpreters.set___main___attrs(self.id, shared)
- interpreters.run_string(self.id, script)
+ _interpreters.set___main___attrs(self.id, shared)
+ _interpreters.run_string(self.id, script)
with open(r, 'rb') as chan:
ns = pickle.load(chan)
@@ -732,7 +746,7 @@ def test_with_shared(self):
self.assertIsNone(ns['cheddar'])
def test_shared_overwrites(self):
- interpreters.run_string(self.id, dedent("""
+ _interpreters.run_string(self.id, dedent("""
spam = 'eggs'
ns1 = dict(vars())
del ns1['__builtins__']
@@ -743,8 +757,8 @@ def test_shared_overwrites(self):
ns2 = dict(vars())
del ns2['__builtins__']
""")
- interpreters.set___main___attrs(self.id, shared)
- interpreters.run_string(self.id, script)
+ _interpreters.set___main___attrs(self.id, shared)
+ _interpreters.run_string(self.id, script)
r, w = os.pipe()
script = dedent(f"""
@@ -754,7 +768,7 @@ def test_shared_overwrites(self):
with open({w}, 'wb') as chan:
pickle.dump(ns, chan)
""")
- interpreters.run_string(self.id, script)
+ _interpreters.run_string(self.id, script)
with open(r, 'rb') as chan:
ns = pickle.load(chan)
@@ -775,8 +789,8 @@ def test_shared_overwrites_default_vars(self):
with open({w}, 'wb') as chan:
pickle.dump(ns, chan)
""")
- interpreters.set___main___attrs(self.id, shared)
- interpreters.run_string(self.id, script)
+ _interpreters.set___main___attrs(self.id, shared)
+ _interpreters.run_string(self.id, script)
with open(r, 'rb') as chan:
ns = pickle.load(chan)
@@ -784,7 +798,7 @@ def test_shared_overwrites_default_vars(self):
def test_main_reused(self):
r, w = os.pipe()
- interpreters.run_string(self.id, dedent(f"""
+ _interpreters.run_string(self.id, dedent(f"""
spam = True
ns = dict(vars())
@@ -798,7 +812,7 @@ def test_main_reused(self):
ns1 = pickle.load(chan)
r, w = os.pipe()
- interpreters.run_string(self.id, dedent(f"""
+ _interpreters.run_string(self.id, dedent(f"""
eggs = False
ns = dict(vars())
@@ -827,7 +841,7 @@ def test_execution_namespace_is_main(self):
with open({w}, 'wb') as chan:
pickle.dump(ns, chan)
""")
- interpreters.run_string(self.id, script)
+ _interpreters.run_string(self.id, script)
with open(r, 'rb') as chan:
ns = pickle.load(chan)
@@ -872,13 +886,13 @@ class RunFailedTests(TestBase):
def setUp(self):
super().setUp()
- self.id = interpreters.create()
+ self.id = _interpreters.create()
def add_module(self, modname, text):
import tempfile
tempdir = tempfile.mkdtemp()
self.addCleanup(lambda: os_helper.rmtree(tempdir))
- interpreters.run_string(self.id, dedent(f"""
+ _interpreters.run_string(self.id, dedent(f"""
import sys
sys.path.insert(0, {tempdir!r})
"""))
@@ -900,11 +914,11 @@ class NeverError(Exception): pass
raise NeverError # never raised
""").format(dedent(text))
if fails:
- err = interpreters.run_string(self.id, script)
+ err = _interpreters.run_string(self.id, script)
self.assertIsNot(err, None)
return err
else:
- err = interpreters.run_string(self.id, script)
+ err = _interpreters.run_string(self.id, script)
self.assertIs(err, None)
return None
except:
@@ -1029,7 +1043,7 @@ class RunFuncTests(TestBase):
def setUp(self):
super().setUp()
- self.id = interpreters.create()
+ self.id = _interpreters.create()
def test_success(self):
r, w = os.pipe()
@@ -1039,8 +1053,8 @@ def script():
with open(w, 'w', encoding="utf-8") as spipe:
with contextlib.redirect_stdout(spipe):
print('it worked!', end='')
- interpreters.set___main___attrs(self.id, dict(w=w))
- interpreters.run_func(self.id, script)
+ _interpreters.set___main___attrs(self.id, dict(w=w))
+ _interpreters.run_func(self.id, script)
with open(r, encoding="utf-8") as outfile:
out = outfile.read()
@@ -1056,8 +1070,8 @@ def script():
with contextlib.redirect_stdout(spipe):
print('it worked!', end='')
def f():
- interpreters.set___main___attrs(self.id, dict(w=w))
- interpreters.run_func(self.id, script)
+ _interpreters.set___main___attrs(self.id, dict(w=w))
+ _interpreters.run_func(self.id, script)
t = threading.Thread(target=f)
t.start()
t.join()
@@ -1077,8 +1091,8 @@ def script():
with contextlib.redirect_stdout(spipe):
print('it worked!', end='')
code = script.__code__
- interpreters.set___main___attrs(self.id, dict(w=w))
- interpreters.run_func(self.id, code)
+ _interpreters.set___main___attrs(self.id, dict(w=w))
+ _interpreters.run_func(self.id, code)
with open(r, encoding="utf-8") as outfile:
out = outfile.read()
@@ -1091,7 +1105,7 @@ def script():
assert spam
with self.assertRaises(ValueError):
- interpreters.run_func(self.id, script)
+ _interpreters.run_func(self.id, script)
# XXX This hasn't been fixed yet.
@unittest.expectedFailure
@@ -1099,38 +1113,38 @@ def test_return_value(self):
def script():
return 'spam'
with self.assertRaises(ValueError):
- interpreters.run_func(self.id, script)
+ _interpreters.run_func(self.id, script)
def test_args(self):
with self.subTest('args'):
def script(a, b=0):
assert a == b
with self.assertRaises(ValueError):
- interpreters.run_func(self.id, script)
+ _interpreters.run_func(self.id, script)
with self.subTest('*args'):
def script(*args):
assert not args
with self.assertRaises(ValueError):
- interpreters.run_func(self.id, script)
+ _interpreters.run_func(self.id, script)
with self.subTest('**kwargs'):
def script(**kwargs):
assert not kwargs
with self.assertRaises(ValueError):
- interpreters.run_func(self.id, script)
+ _interpreters.run_func(self.id, script)
with self.subTest('kwonly'):
def script(*, spam=True):
assert spam
with self.assertRaises(ValueError):
- interpreters.run_func(self.id, script)
+ _interpreters.run_func(self.id, script)
with self.subTest('posonly'):
def script(spam, /):
assert spam
with self.assertRaises(ValueError):
- interpreters.run_func(self.id, script)
+ _interpreters.run_func(self.id, script)
if __name__ == '__main__':
diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py
index 3929e4e00d59c2..5b47cdaafb092e 100644
--- a/Lib/test/test_ast.py
+++ b/Lib/test/test_ast.py
@@ -1153,9 +1153,9 @@ def next(self):
@support.cpython_only
def test_ast_recursion_limit(self):
- fail_depth = support.EXCEEDS_RECURSION_LIMIT
+ fail_depth = support.exceeds_recursion_limit()
crash_depth = 100_000
- success_depth = int(support.Py_C_RECURSION_LIMIT * 0.8)
+ success_depth = int(support.get_c_recursion_limit() * 0.8)
if _testinternalcapi is not None:
remaining = _testinternalcapi.get_c_recursion_remaining()
success_depth = min(success_depth, remaining)
diff --git a/Lib/test/test_asyncio/test_queues.py b/Lib/test/test_asyncio/test_queues.py
index 2d058ccf6a8c72..5019e9a293525d 100644
--- a/Lib/test/test_asyncio/test_queues.py
+++ b/Lib/test/test_asyncio/test_queues.py
@@ -522,5 +522,204 @@ class PriorityQueueJoinTests(_QueueJoinTestMixin, unittest.IsolatedAsyncioTestCa
q_class = asyncio.PriorityQueue
+class _QueueShutdownTestMixin:
+ q_class = None
+
+ def assertRaisesShutdown(self, msg="Didn't appear to shut-down queue"):
+ return self.assertRaises(asyncio.QueueShutDown, msg=msg)
+
+ async def test_format(self):
+ q = self.q_class()
+ q.shutdown()
+ self.assertEqual(q._format(), 'maxsize=0 shutdown')
+
+ async def test_shutdown_empty(self):
+ # Test shutting down an empty queue
+
+ # Setup empty queue, and join() and get() tasks
+ q = self.q_class()
+ loop = asyncio.get_running_loop()
+ get_task = loop.create_task(q.get())
+ await asyncio.sleep(0) # want get task pending before shutdown
+
+ # Perform shut-down
+ q.shutdown(immediate=False) # unfinished tasks: 0 -> 0
+
+ self.assertEqual(q.qsize(), 0)
+
+ # Ensure join() task successfully finishes
+ await q.join()
+
+ # Ensure get() task is finished, and raised ShutDown
+ await asyncio.sleep(0)
+ self.assertTrue(get_task.done())
+ with self.assertRaisesShutdown():
+ await get_task
+
+ # Ensure put() and get() raise ShutDown
+ with self.assertRaisesShutdown():
+ await q.put("data")
+ with self.assertRaisesShutdown():
+ q.put_nowait("data")
+
+ with self.assertRaisesShutdown():
+ await q.get()
+ with self.assertRaisesShutdown():
+ q.get_nowait()
+
+ async def test_shutdown_nonempty(self):
+ # Test shutting down a non-empty queue
+
+ # Setup full queue with 1 item, and join() and put() tasks
+ q = self.q_class(maxsize=1)
+ loop = asyncio.get_running_loop()
+
+ q.put_nowait("data")
+ join_task = loop.create_task(q.join())
+ put_task = loop.create_task(q.put("data2"))
+
+ # Ensure put() task is not finished
+ await asyncio.sleep(0)
+ self.assertFalse(put_task.done())
+
+ # Perform shut-down
+ q.shutdown(immediate=False) # unfinished tasks: 1 -> 1
+
+ self.assertEqual(q.qsize(), 1)
+
+ # Ensure put() task is finished, and raised ShutDown
+ await asyncio.sleep(0)
+ self.assertTrue(put_task.done())
+ with self.assertRaisesShutdown():
+ await put_task
+
+ # Ensure get() succeeds on enqueued item
+ self.assertEqual(await q.get(), "data")
+
+ # Ensure join() task is not finished
+ await asyncio.sleep(0)
+ self.assertFalse(join_task.done())
+
+ # Ensure put() and get() raise ShutDown
+ with self.assertRaisesShutdown():
+ await q.put("data")
+ with self.assertRaisesShutdown():
+ q.put_nowait("data")
+
+ with self.assertRaisesShutdown():
+ await q.get()
+ with self.assertRaisesShutdown():
+ q.get_nowait()
+
+ # Ensure there is 1 unfinished task, and join() task succeeds
+ q.task_done()
+
+ await asyncio.sleep(0)
+ self.assertTrue(join_task.done())
+ await join_task
+
+ with self.assertRaises(
+ ValueError, msg="Didn't appear to mark all tasks done"
+ ):
+ q.task_done()
+
+ async def test_shutdown_immediate(self):
+ # Test immediately shutting down a queue
+
+ # Setup queue with 1 item, and a join() task
+ q = self.q_class()
+ loop = asyncio.get_running_loop()
+ q.put_nowait("data")
+ join_task = loop.create_task(q.join())
+
+ # Perform shut-down
+ q.shutdown(immediate=True) # unfinished tasks: 1 -> 0
+
+ self.assertEqual(q.qsize(), 0)
+
+ # Ensure join() task has successfully finished
+ await asyncio.sleep(0)
+ self.assertTrue(join_task.done())
+ await join_task
+
+ # Ensure put() and get() raise ShutDown
+ with self.assertRaisesShutdown():
+ await q.put("data")
+ with self.assertRaisesShutdown():
+ q.put_nowait("data")
+
+ with self.assertRaisesShutdown():
+ await q.get()
+ with self.assertRaisesShutdown():
+ q.get_nowait()
+
+ # Ensure there are no unfinished tasks
+ with self.assertRaises(
+ ValueError, msg="Didn't appear to mark all tasks done"
+ ):
+ q.task_done()
+
+ async def test_shutdown_immediate_with_unfinished(self):
+ # Test immediately shutting down a queue with unfinished tasks
+
+ # Setup queue with 2 items (1 retrieved), and a join() task
+ q = self.q_class()
+ loop = asyncio.get_running_loop()
+ q.put_nowait("data")
+ q.put_nowait("data")
+ join_task = loop.create_task(q.join())
+ self.assertEqual(await q.get(), "data")
+
+ # Perform shut-down
+ q.shutdown(immediate=True) # unfinished tasks: 2 -> 1
+
+ self.assertEqual(q.qsize(), 0)
+
+ # Ensure join() task is not finished
+ await asyncio.sleep(0)
+ self.assertFalse(join_task.done())
+
+ # Ensure put() and get() raise ShutDown
+ with self.assertRaisesShutdown():
+ await q.put("data")
+ with self.assertRaisesShutdown():
+ q.put_nowait("data")
+
+ with self.assertRaisesShutdown():
+ await q.get()
+ with self.assertRaisesShutdown():
+ q.get_nowait()
+
+ # Ensure there is 1 unfinished task
+ q.task_done()
+ with self.assertRaises(
+ ValueError, msg="Didn't appear to mark all tasks done"
+ ):
+ q.task_done()
+
+ # Ensure join() task has successfully finished
+ await asyncio.sleep(0)
+ self.assertTrue(join_task.done())
+ await join_task
+
+
+class QueueShutdownTests(
+ _QueueShutdownTestMixin, unittest.IsolatedAsyncioTestCase
+):
+ q_class = asyncio.Queue
+
+
+class LifoQueueShutdownTests(
+ _QueueShutdownTestMixin, unittest.IsolatedAsyncioTestCase
+):
+ q_class = asyncio.LifoQueue
+
+
+class PriorityQueueShutdownTests(
+ _QueueShutdownTestMixin, unittest.IsolatedAsyncioTestCase
+):
+ q_class = asyncio.PriorityQueue
+
+
if __name__ == '__main__':
unittest.main()
diff --git a/Lib/test/test_asyncio/test_streams.py b/Lib/test/test_asyncio/test_streams.py
index 2cf48538d5d30d..ae943f39869815 100644
--- a/Lib/test/test_asyncio/test_streams.py
+++ b/Lib/test/test_asyncio/test_streams.py
@@ -383,6 +383,10 @@ def test_readuntil_separator(self):
stream = asyncio.StreamReader(loop=self.loop)
with self.assertRaisesRegex(ValueError, 'Separator should be'):
self.loop.run_until_complete(stream.readuntil(separator=b''))
+ with self.assertRaisesRegex(ValueError, 'Separator should be'):
+ self.loop.run_until_complete(stream.readuntil(separator=(b'',)))
+ with self.assertRaisesRegex(ValueError, 'Separator should contain'):
+ self.loop.run_until_complete(stream.readuntil(separator=()))
def test_readuntil_multi_chunks(self):
stream = asyncio.StreamReader(loop=self.loop)
@@ -466,6 +470,55 @@ def test_readuntil_limit_found_sep(self):
self.assertEqual(b'some dataAAA', stream._buffer)
+ def test_readuntil_multi_separator(self):
+ stream = asyncio.StreamReader(loop=self.loop)
+
+ # Simple case
+ stream.feed_data(b'line 1\nline 2\r')
+ data = self.loop.run_until_complete(stream.readuntil((b'\r', b'\n')))
+ self.assertEqual(b'line 1\n', data)
+ data = self.loop.run_until_complete(stream.readuntil((b'\r', b'\n')))
+ self.assertEqual(b'line 2\r', data)
+ self.assertEqual(b'', stream._buffer)
+
+ # First end position matches, even if that's a longer match
+ stream.feed_data(b'ABCDEFG')
+ data = self.loop.run_until_complete(stream.readuntil((b'DEF', b'BCDE')))
+ self.assertEqual(b'ABCDE', data)
+ self.assertEqual(b'FG', stream._buffer)
+
+ def test_readuntil_multi_separator_limit(self):
+ stream = asyncio.StreamReader(loop=self.loop, limit=3)
+ stream.feed_data(b'some dataA')
+
+ with self.assertRaisesRegex(asyncio.LimitOverrunError,
+ 'is found') as cm:
+ self.loop.run_until_complete(stream.readuntil((b'A', b'ome dataA')))
+
+ self.assertEqual(b'some dataA', stream._buffer)
+
+ def test_readuntil_multi_separator_negative_offset(self):
+ # If the buffer is big enough for the smallest separator (but does
+ # not contain it) but too small for the largest, `offset` must not
+ # become negative.
+ stream = asyncio.StreamReader(loop=self.loop)
+ stream.feed_data(b'data')
+
+ readuntil_task = self.loop.create_task(stream.readuntil((b'A', b'long sep')))
+ self.loop.call_soon(stream.feed_data, b'Z')
+ self.loop.call_soon(stream.feed_data, b'Aaaa')
+
+ data = self.loop.run_until_complete(readuntil_task)
+ self.assertEqual(b'dataZA', data)
+ self.assertEqual(b'aaa', stream._buffer)
+
+ def test_readuntil_bytearray(self):
+ stream = asyncio.StreamReader(loop=self.loop)
+ stream.feed_data(b'some data\r\n')
+ data = self.loop.run_until_complete(stream.readuntil(bytearray(b'\r\n')))
+ self.assertEqual(b'some data\r\n', data)
+ self.assertEqual(b'', stream._buffer)
+
def test_readexactly_zero_or_less(self):
# Read exact number of bytes (zero or less).
stream = asyncio.StreamReader(loop=self.loop)
diff --git a/Lib/test/test_asyncio/test_taskgroups.py b/Lib/test/test_asyncio/test_taskgroups.py
index 1ec8116953f811..4852536defc93d 100644
--- a/Lib/test/test_asyncio/test_taskgroups.py
+++ b/Lib/test/test_asyncio/test_taskgroups.py
@@ -833,6 +833,72 @@ async def run_coro_after_tg_closes():
loop = asyncio.get_event_loop()
loop.run_until_complete(run_coro_after_tg_closes())
+ async def test_cancelling_level_preserved(self):
+ async def raise_after(t, e):
+ await asyncio.sleep(t)
+ raise e()
+
+ try:
+ async with asyncio.TaskGroup() as tg:
+ tg.create_task(raise_after(0.0, RuntimeError))
+ except* RuntimeError:
+ pass
+ self.assertEqual(asyncio.current_task().cancelling(), 0)
+
+ async def test_nested_groups_both_cancelled(self):
+ async def raise_after(t, e):
+ await asyncio.sleep(t)
+ raise e()
+
+ try:
+ async with asyncio.TaskGroup() as outer_tg:
+ try:
+ async with asyncio.TaskGroup() as inner_tg:
+ inner_tg.create_task(raise_after(0, RuntimeError))
+ outer_tg.create_task(raise_after(0, ValueError))
+ except* RuntimeError:
+ pass
+ else:
+ self.fail("RuntimeError not raised")
+ self.assertEqual(asyncio.current_task().cancelling(), 1)
+ except* ValueError:
+ pass
+ else:
+ self.fail("ValueError not raised")
+ self.assertEqual(asyncio.current_task().cancelling(), 0)
+
+ async def test_error_and_cancel(self):
+ event = asyncio.Event()
+
+ async def raise_error():
+ event.set()
+ await asyncio.sleep(0)
+ raise RuntimeError()
+
+ async def inner():
+ try:
+ async with taskgroups.TaskGroup() as tg:
+ tg.create_task(raise_error())
+ await asyncio.sleep(1)
+ self.fail("Sleep in group should have been cancelled")
+ except* RuntimeError:
+ self.assertEqual(asyncio.current_task().cancelling(), 1)
+ self.assertEqual(asyncio.current_task().cancelling(), 1)
+ await asyncio.sleep(1)
+ self.fail("Sleep after group should have been cancelled")
+
+ async def outer():
+ t = asyncio.create_task(inner())
+ await event.wait()
+ self.assertEqual(t.cancelling(), 0)
+ t.cancel()
+ self.assertEqual(t.cancelling(), 1)
+ with self.assertRaises(asyncio.CancelledError):
+ await t
+ self.assertTrue(t.cancelled())
+
+ await outer()
+
if __name__ == "__main__":
unittest.main()
diff --git a/Lib/test/test_asyncio/test_tasks.py b/Lib/test/test_asyncio/test_tasks.py
index bc6d88e65a4966..5b09c81faef62a 100644
--- a/Lib/test/test_asyncio/test_tasks.py
+++ b/Lib/test/test_asyncio/test_tasks.py
@@ -684,6 +684,30 @@ def on_timeout():
finally:
loop.close()
+ def test_uncancel_resets_must_cancel(self):
+
+ async def coro():
+ await fut
+ return 42
+
+ loop = asyncio.new_event_loop()
+ fut = asyncio.Future(loop=loop)
+ task = self.new_task(loop, coro())
+ loop.run_until_complete(asyncio.sleep(0)) # Get task waiting for fut
+ fut.set_result(None) # Make task runnable
+ try:
+ task.cancel() # Enter cancelled state
+ self.assertEqual(task.cancelling(), 1)
+ self.assertTrue(task._must_cancel)
+
+ task.uncancel() # Undo cancellation
+ self.assertEqual(task.cancelling(), 0)
+ self.assertFalse(task._must_cancel)
+ finally:
+ res = loop.run_until_complete(task)
+ self.assertEqual(res, 42)
+ loop.close()
+
def test_cancel(self):
def gen():
diff --git a/Lib/test/test_capi/test_dict.py b/Lib/test/test_capi/test_dict.py
index bcc978d224a583..e726e3d813d888 100644
--- a/Lib/test/test_capi/test_dict.py
+++ b/Lib/test/test_capi/test_dict.py
@@ -2,8 +2,11 @@
from collections import OrderedDict, UserDict
from types import MappingProxyType
from test import support
-import _testcapi
-import _testlimitedcapi
+from test.support import import_helper
+
+
+_testcapi = import_helper.import_module("_testcapi")
+_testlimitedcapi = import_helper.import_module("_testlimitedcapi")
NULL = None
diff --git a/Lib/test/test_capi/test_eval_code_ex.py b/Lib/test/test_capi/test_eval_code_ex.py
index 2d28e5289eff94..b298e5007e5e7d 100644
--- a/Lib/test/test_capi/test_eval_code_ex.py
+++ b/Lib/test/test_capi/test_eval_code_ex.py
@@ -1,11 +1,16 @@
import unittest
+import builtins
+from collections import UserDict
from test.support import import_helper
+from test.support import swap_attr
# Skip this test if the _testcapi module isn't available.
_testcapi = import_helper.import_module('_testcapi')
+NULL = None
+
class PyEval_EvalCodeExTests(unittest.TestCase):
@@ -13,43 +18,108 @@ def test_simple(self):
def f():
return a
- self.assertEqual(_testcapi.eval_code_ex(f.__code__, dict(a=1)), 1)
-
- # Need to force the compiler to use LOAD_NAME
- # def test_custom_locals(self):
- # def f():
- # return
+ eval_code_ex = _testcapi.eval_code_ex
+ code = f.__code__
+ self.assertEqual(eval_code_ex(code, dict(a=1)), 1)
+
+ self.assertRaises(NameError, eval_code_ex, code, {})
+ self.assertRaises(SystemError, eval_code_ex, code, UserDict(a=1))
+ self.assertRaises(SystemError, eval_code_ex, code, [])
+ self.assertRaises(SystemError, eval_code_ex, code, 1)
+ # CRASHES eval_code_ex(code, NULL)
+ # CRASHES eval_code_ex(1, {})
+ # CRASHES eval_code_ex(NULL, {})
+
+ def test_custom_locals(self):
+ # Monkey-patch __build_class__ to get a class code object.
+ code = None
+ def build_class(func, name, /, *bases, **kwds):
+ nonlocal code
+ code = func.__code__
+
+ with swap_attr(builtins, '__build_class__', build_class):
+ class A:
+ # Uses LOAD_NAME for a
+ r[:] = [a]
+
+ eval_code_ex = _testcapi.eval_code_ex
+ results = []
+ g = dict(a=1, r=results)
+ self.assertIsNone(eval_code_ex(code, g))
+ self.assertEqual(results, [1])
+ self.assertIsNone(eval_code_ex(code, g, dict(a=2)))
+ self.assertEqual(results, [2])
+ self.assertIsNone(eval_code_ex(code, g, UserDict(a=3)))
+ self.assertEqual(results, [3])
+ self.assertIsNone(eval_code_ex(code, g, {}))
+ self.assertEqual(results, [1])
+ self.assertIsNone(eval_code_ex(code, g, NULL))
+ self.assertEqual(results, [1])
+
+ self.assertRaises(TypeError, eval_code_ex, code, g, [])
+ self.assertRaises(TypeError, eval_code_ex, code, g, 1)
+ self.assertRaises(NameError, eval_code_ex, code, dict(r=results), {})
+ self.assertRaises(NameError, eval_code_ex, code, dict(r=results), NULL)
+ self.assertRaises(TypeError, eval_code_ex, code, dict(r=results), [])
+ self.assertRaises(TypeError, eval_code_ex, code, dict(r=results), 1)
def test_with_args(self):
def f(a, b, c):
return a
- self.assertEqual(_testcapi.eval_code_ex(f.__code__, {}, {}, (1, 2, 3)), 1)
+ eval_code_ex = _testcapi.eval_code_ex
+ code = f.__code__
+ self.assertEqual(eval_code_ex(code, {}, {}, (1, 2, 3)), 1)
+ self.assertRaises(TypeError, eval_code_ex, code, {}, {}, (1, 2))
+ self.assertRaises(TypeError, eval_code_ex, code, {}, {}, (1, 2, 3, 4))
def test_with_kwargs(self):
def f(a, b, c):
return a
- self.assertEqual(_testcapi.eval_code_ex(f.__code__, {}, {}, (), dict(a=1, b=2, c=3)), 1)
+ eval_code_ex = _testcapi.eval_code_ex
+ code = f.__code__
+ self.assertEqual(eval_code_ex(code, {}, {}, (), dict(a=1, b=2, c=3)), 1)
+ self.assertRaises(TypeError, eval_code_ex, code, {}, {}, (), dict(a=1, b=2))
+ self.assertRaises(TypeError, eval_code_ex, code, {}, {}, (), dict(a=1, b=2))
+ self.assertRaises(TypeError, eval_code_ex, code, {}, {}, (), dict(a=1, b=2, c=3, d=4))
def test_with_default(self):
def f(a):
return a
- self.assertEqual(_testcapi.eval_code_ex(f.__code__, {}, {}, (), {}, (1,)), 1)
+ eval_code_ex = _testcapi.eval_code_ex
+ code = f.__code__
+ self.assertEqual(eval_code_ex(code, {}, {}, (), {}, (1,)), 1)
+ self.assertRaises(TypeError, eval_code_ex, code, {}, {}, (), {}, ())
def test_with_kwarg_default(self):
def f(*, a):
return a
- self.assertEqual(_testcapi.eval_code_ex(f.__code__, {}, {}, (), {}, (), dict(a=1)), 1)
+ eval_code_ex = _testcapi.eval_code_ex
+ code = f.__code__
+ self.assertEqual(eval_code_ex(code, {}, {}, (), {}, (), dict(a=1)), 1)
+ self.assertRaises(TypeError, eval_code_ex, code, {}, {}, (), {}, (), {})
+ self.assertRaises(TypeError, eval_code_ex, code, {}, {}, (), {}, (), NULL)
+ self.assertRaises(SystemError, eval_code_ex, code, {}, {}, (), {}, (), UserDict(a=1))
+ self.assertRaises(SystemError, eval_code_ex, code, {}, {}, (), {}, (), [])
+ self.assertRaises(SystemError, eval_code_ex, code, {}, {}, (), {}, (), 1)
def test_with_closure(self):
a = 1
+ b = 2
def f():
+ b
return a
- self.assertEqual(_testcapi.eval_code_ex(f.__code__, {}, {}, (), {}, (), {}, f.__closure__), 1)
+ eval_code_ex = _testcapi.eval_code_ex
+ code = f.__code__
+ self.assertEqual(eval_code_ex(code, {}, {}, (), {}, (), {}, f.__closure__), 1)
+ self.assertEqual(eval_code_ex(code, {}, {}, (), {}, (), {}, f.__closure__[::-1]), 2)
+
+ # CRASHES eval_code_ex(code, {}, {}, (), {}, (), {}, ()), 1)
+ # CRASHES eval_code_ex(code, {}, {}, (), {}, (), {}, NULL), 1)
if __name__ == "__main__":
diff --git a/Lib/test/test_capi/test_misc.py b/Lib/test/test_capi/test_misc.py
index 2f2bf03749f834..9c24ec8fd05b12 100644
--- a/Lib/test/test_capi/test_misc.py
+++ b/Lib/test/test_capi/test_misc.py
@@ -26,6 +26,8 @@
from test.support import threading_helper
from test.support import warnings_helper
from test.support import requires_limited_api
+from test.support import requires_gil_enabled, expected_failure_if_gil_disabled
+from test.support import Py_GIL_DISABLED
from test.support.script_helper import assert_python_failure, assert_python_ok, run_python_until_end
try:
import _posixsubprocess
@@ -2023,15 +2025,30 @@ def test_configured_settings(self):
kwlist[-2] = 'check_multi_interp_extensions'
kwlist[-1] = 'own_gil'
- # expected to work
- for config, expected in {
+ expected_to_work = {
(True, True, True, True, True, True, True):
(ALL_FLAGS, True),
(True, False, False, False, False, False, False):
(OBMALLOC, False),
(False, False, False, True, False, True, False):
(THREADS | EXTENSIONS, False),
- }.items():
+ }
+
+ expected_to_fail = {
+ (False, False, False, False, False, False, False),
+ }
+
+ # gh-117649: The free-threaded build does not currently allow
+ # setting check_multi_interp_extensions to False.
+ if Py_GIL_DISABLED:
+ for config in list(expected_to_work.keys()):
+ kwargs = dict(zip(kwlist, config))
+ if not kwargs['check_multi_interp_extensions']:
+ del expected_to_work[config]
+ expected_to_fail.add(config)
+
+ # expected to work
+ for config, expected in expected_to_work.items():
kwargs = dict(zip(kwlist, config))
exp_flags, exp_gil = expected
expected = {
@@ -2055,9 +2072,7 @@ def test_configured_settings(self):
self.assertEqual(settings, expected)
# expected to fail
- for config in [
- (False, False, False, False, False, False, False),
- ]:
+ for config in expected_to_fail:
kwargs = dict(zip(kwlist, config))
with self.subTest(config):
script = textwrap.dedent(f'''
@@ -2065,11 +2080,14 @@ def test_configured_settings(self):
_testinternalcapi.get_interp_settings()
raise NotImplementedError('unreachable')
''')
- with self.assertRaises(RuntimeError):
+ with self.assertRaises(_interpreters.InterpreterError):
support.run_in_subinterp_with_config(script, **kwargs)
@unittest.skipIf(_testsinglephase is None, "test requires _testsinglephase module")
@unittest.skipUnless(hasattr(os, "pipe"), "requires os.pipe()")
+ # gh-117649: The free-threaded build does not currently allow overriding
+ # the check_multi_interp_extensions setting.
+ @expected_failure_if_gil_disabled()
def test_overridden_setting_extensions_subinterp_check(self):
"""
PyInterpreterConfig.check_multi_interp_extensions can be overridden
@@ -2121,6 +2139,9 @@ def check(enabled, override):
}
r, w = os.pipe()
+ if Py_GIL_DISABLED:
+ # gh-117649: The test fails before `w` is closed
+ self.addCleanup(os.close, w)
script = textwrap.dedent(f'''
from test.test_capi.check_config import run_singlephase_check
run_singlephase_check({override}, {w})
@@ -2165,6 +2186,9 @@ def test_mutate_exception(self):
self.assertFalse(hasattr(binascii.Error, "foobar"))
@unittest.skipIf(_testmultiphase is None, "test requires _testmultiphase module")
+ # gh-117649: The free-threaded build does not currently support sharing
+ # extension module state between interpreters.
+ @expected_failure_if_gil_disabled()
def test_module_state_shared_in_global(self):
"""
bpo-44050: Extension module state should be shared between interpreters
@@ -2223,7 +2247,7 @@ class InterpreterConfigTests(unittest.TestCase):
allow_exec=True,
allow_threads=True,
allow_daemon_threads=True,
- check_multi_interp_extensions=False,
+ check_multi_interp_extensions=bool(Py_GIL_DISABLED),
gil='shared',
),
'empty': types.SimpleNamespace(
@@ -2386,6 +2410,8 @@ def test_interp_init(self):
check_multi_interp_extensions=False
),
]
+ if Py_GIL_DISABLED:
+ invalid.append(dict(check_multi_interp_extensions=False))
def match(config, override_cases):
ns = vars(config)
for overrides in override_cases:
@@ -2403,7 +2429,7 @@ def check(config):
continue
if match(config, invalid):
with self.subTest(f'invalid: {config}'):
- with self.assertRaises(RuntimeError):
+ with self.assertRaises(_interpreters.InterpreterError):
check(config)
elif match(config, questionable):
with self.subTest(f'questionable: {config}'):
@@ -2427,7 +2453,9 @@ def new_interp(config):
with self.subTest('main'):
expected = _interpreters.new_config('legacy')
expected.gil = 'own'
- interpid = _interpreters.get_main()
+ if Py_GIL_DISABLED:
+ expected.check_multi_interp_extensions = False
+ interpid, *_ = _interpreters.get_main()
config = _interpreters.get_config(interpid)
self.assert_ns_equal(config, expected)
@@ -2448,6 +2476,7 @@ def new_interp(config):
'empty',
use_main_obmalloc=True,
gil='shared',
+ check_multi_interp_extensions=bool(Py_GIL_DISABLED),
)
with new_interp(orig) as interpid:
config = _interpreters.get_config(interpid)
@@ -2579,7 +2608,7 @@ def test_linked_lifecycle_does_not_exist(self):
def test_linked_lifecycle_initial(self):
is_linked = _testinternalcapi.interpreter_refcount_linked
- get_refcount = _testinternalcapi.get_interpreter_refcount
+ get_refcount, _, _ = self.get_refcount_helpers()
# A new interpreter will start out not linked, with a refcount of 0.
interpid = self.new_interpreter()
diff --git a/Lib/test/test_capi/test_opt.py b/Lib/test/test_capi/test_opt.py
index 7ca0f6927fe4a1..28d18739b6d4a5 100644
--- a/Lib/test/test_capi/test_opt.py
+++ b/Lib/test/test_capi/test_opt.py
@@ -6,9 +6,10 @@
import os
import _opcode
-import _testinternalcapi
-from test.support import script_helper, requires_specialization
+from test.support import script_helper, requires_specialization, import_helper
+
+_testinternalcapi = import_helper.import_module("_testinternalcapi")
from _testinternalcapi import TIER2_THRESHOLD
diff --git a/Lib/test/test_capi/test_unicode.py b/Lib/test/test_capi/test_unicode.py
index a64c75c415c3fe..a69f817c515ba7 100644
--- a/Lib/test/test_capi/test_unicode.py
+++ b/Lib/test/test_capi/test_unicode.py
@@ -650,6 +650,40 @@ def check_format(expected, format, *args):
check_format('\U0001f4bb+' if sizeof(c_wchar) > 2 else '\U0001f4bb',
b'%.2lV', None, c_wchar_p('\U0001f4bb+\U0001f40d'))
+ # test %T
+ check_format('type: str',
+ b'type: %T', py_object("abc"))
+ check_format(f'type: st',
+ b'type: %.2T', py_object("abc"))
+ check_format(f'type: str',
+ b'type: %10T', py_object("abc"))
+
+ class LocalType:
+ pass
+ obj = LocalType()
+ fullname = f'{__name__}.{LocalType.__qualname__}'
+ check_format(f'type: {fullname}',
+ b'type: %T', py_object(obj))
+ fullname_alt = f'{__name__}:{LocalType.__qualname__}'
+ check_format(f'type: {fullname_alt}',
+ b'type: %#T', py_object(obj))
+
+ # test %N
+ check_format('type: str',
+ b'type: %N', py_object(str))
+ check_format(f'type: st',
+ b'type: %.2N', py_object(str))
+ check_format(f'type: str',
+ b'type: %10N', py_object(str))
+
+ check_format(f'type: {fullname}',
+ b'type: %N', py_object(type(obj)))
+ check_format(f'type: {fullname_alt}',
+ b'type: %#N', py_object(type(obj)))
+ with self.assertRaisesRegex(TypeError, "%N argument must be a type"):
+ check_format('type: str',
+ b'type: %N', py_object("abc"))
+
# test variable width and precision
check_format(' abc', b'%*s', c_int(5), b'abc')
check_format('ab', b'%.*s', c_int(2), b'abc')
diff --git a/Lib/test/test_capi/test_watchers.py b/Lib/test/test_capi/test_watchers.py
index ae062b1bda26b7..8e84d0077c7573 100644
--- a/Lib/test/test_capi/test_watchers.py
+++ b/Lib/test/test_capi/test_watchers.py
@@ -1,7 +1,7 @@
import unittest
from contextlib import contextmanager, ExitStack
-from test.support import catch_unraisable_exception, import_helper
+from test.support import catch_unraisable_exception, import_helper, gc_collect
# Skip this test if the _testcapi module isn't available.
@@ -372,6 +372,7 @@ def code_watcher(self, which_watcher):
def assert_event_counts(self, exp_created_0, exp_destroyed_0,
exp_created_1, exp_destroyed_1):
+ gc_collect() # code objects are collected by GC in free-threaded build
self.assertEqual(
exp_created_0, _testcapi.get_code_watcher_num_created_events(0))
self.assertEqual(
@@ -432,6 +433,7 @@ def test_dealloc_error(self):
with self.code_watcher(2):
with catch_unraisable_exception() as cm:
del co
+ gc_collect()
self.assertEqual(str(cm.unraisable.exc_value), "boom!")
diff --git a/Lib/test/test_class.py b/Lib/test/test_class.py
index 4c1814142736e3..a9cfd8df691845 100644
--- a/Lib/test/test_class.py
+++ b/Lib/test/test_class.py
@@ -862,6 +862,16 @@ class C: pass
self.assertFalse(has_inline_values(c))
self.check_100(c)
+ def test_bug_117750(self):
+ "Aborted on 3.13a6"
+ class C:
+ def __init__(self):
+ self.__dict__.clear()
+
+ obj = C()
+ self.assertEqual(obj.__dict__, {})
+ obj.foo = None # Aborted here
+ self.assertEqual(obj.__dict__, {"foo":None})
if __name__ == '__main__':
diff --git a/Lib/test/test_clinic.py b/Lib/test/test_clinic.py
index 9788ac0261fa49..43b629f59f0346 100644
--- a/Lib/test/test_clinic.py
+++ b/Lib/test/test_clinic.py
@@ -5,7 +5,7 @@
from functools import partial
from test import support, test_tools
from test.support import os_helper
-from test.support.os_helper import TESTFN, unlink
+from test.support.os_helper import TESTFN, unlink, rmtree
from textwrap import dedent
from unittest import TestCase
import inspect
@@ -662,6 +662,61 @@ class C "void *" ""
err = "Illegal C basename: '.illegal.'"
self.expect_failure(block, err, lineno=7)
+ def test_cloned_forced_text_signature(self):
+ block = dedent("""
+ /*[clinic input]
+ @text_signature "($module, a[, b])"
+ src
+ a: object
+ param a
+ b: object = NULL
+ /
+
+ docstring
+ [clinic start generated code]*/
+
+ /*[clinic input]
+ dst = src
+ [clinic start generated code]*/
+ """)
+ self.clinic.parse(block)
+ self.addCleanup(rmtree, "clinic")
+ funcs = self.clinic.functions
+ self.assertEqual(len(funcs), 2)
+
+ src_docstring_lines = funcs[0].docstring.split("\n")
+ dst_docstring_lines = funcs[1].docstring.split("\n")
+
+ # Signatures are copied.
+ self.assertEqual(src_docstring_lines[0], "src($module, a[, b])")
+ self.assertEqual(dst_docstring_lines[0], "dst($module, a[, b])")
+
+ # Param docstrings are copied.
+ self.assertIn(" param a", src_docstring_lines)
+ self.assertIn(" param a", dst_docstring_lines)
+
+ # Docstrings are not copied.
+ self.assertIn("docstring", src_docstring_lines)
+ self.assertNotIn("docstring", dst_docstring_lines)
+
+ def test_cloned_forced_text_signature_illegal(self):
+ block = """
+ /*[clinic input]
+ @text_signature "($module, a[, b])"
+ src
+ a: object
+ b: object = NULL
+ /
+ [clinic start generated code]*/
+
+ /*[clinic input]
+ @text_signature "($module, a_override[, b])"
+ dst = src
+ [clinic start generated code]*/
+ """
+ err = "Cannot use @text_signature when cloning a function"
+ self.expect_failure(block, err, lineno=11)
+
class ParseFileUnitTest(TestCase):
def expect_parsing_failure(
@@ -822,9 +877,8 @@ def _test(self, input, output):
blocks = list(BlockParser(input, language))
writer = BlockPrinter(language)
- c = _make_clinic()
for block in blocks:
- writer.print_block(block, limited_capi=c.limited_capi, header_includes=c.includes)
+ writer.print_block(block)
output = writer.f.getvalue()
assert output == input, "output != input!\n\noutput " + repr(output) + "\n\n input " + repr(input)
@@ -2454,6 +2508,16 @@ def test_state_func_docstring_only_one_param_template(self):
"""
self.expect_failure(block, err, lineno=7)
+ def test_kind_defining_class(self):
+ function = self.parse_function("""
+ module m
+ class m.C "PyObject *" ""
+ m.C.meth
+ cls: defining_class
+ """, signatures_in_block=3, function_index=2)
+ p = function.parameters['cls']
+ self.assertEqual(p.kind, inspect.Parameter.POSITIONAL_ONLY)
+
class ClinicExternalTest(TestCase):
maxDiff = None
@@ -3339,26 +3403,50 @@ def test_cloned_func_with_converter_exception_message(self):
func = getattr(ac_tester, name)
self.assertEqual(func(), name)
- def test_meth_method_no_params(self):
+ def test_get_defining_class(self):
obj = ac_tester.TestClass()
- meth = obj.meth_method_no_params
+ meth = obj.get_defining_class
+ self.assertIs(obj.get_defining_class(), ac_tester.TestClass)
+
+ # 'defining_class' argument is a positional only argument
+ with self.assertRaises(TypeError):
+ obj.get_defining_class_arg(cls=ac_tester.TestClass)
+
check = partial(self.assertRaisesRegex, TypeError, "no arguments")
check(meth, 1)
check(meth, a=1)
- def test_meth_method_no_params_capi(self):
+ def test_get_defining_class_capi(self):
from _testcapi import pyobject_vectorcall
obj = ac_tester.TestClass()
- meth = obj.meth_method_no_params
+ meth = obj.get_defining_class
pyobject_vectorcall(meth, None, None)
pyobject_vectorcall(meth, (), None)
pyobject_vectorcall(meth, (), ())
pyobject_vectorcall(meth, None, ())
+ self.assertIs(pyobject_vectorcall(meth, (), ()), ac_tester.TestClass)
check = partial(self.assertRaisesRegex, TypeError, "no arguments")
check(pyobject_vectorcall, meth, (1,), None)
check(pyobject_vectorcall, meth, (1,), ("a",))
+ def test_get_defining_class_arg(self):
+ obj = ac_tester.TestClass()
+ self.assertEqual(obj.get_defining_class_arg("arg"),
+ (ac_tester.TestClass, "arg"))
+ self.assertEqual(obj.get_defining_class_arg(arg=123),
+ (ac_tester.TestClass, 123))
+
+ # 'defining_class' argument is a positional only argument
+ with self.assertRaises(TypeError):
+ obj.get_defining_class_arg(cls=ac_tester.TestClass, arg="arg")
+
+ # wrong number of arguments
+ with self.assertRaises(TypeError):
+ obj.get_defining_class_arg()
+ with self.assertRaises(TypeError):
+ obj.get_defining_class_arg("arg1", "arg2")
+
def test_depr_star_new(self):
cls = ac_tester.DeprStarNew
cls()
@@ -4048,9 +4136,6 @@ def test_Class_repr(self):
self.assertRegex(repr(cls), r"")
def test_FunctionKind_repr(self):
- self.assertEqual(
- repr(FunctionKind.INVALID), ""
- )
self.assertEqual(
repr(FunctionKind.CLASS_METHOD), ""
)
diff --git a/Lib/test/test_code.py b/Lib/test/test_code.py
index ecd1e82a6dbef9..fe8c672e71a7b5 100644
--- a/Lib/test/test_code.py
+++ b/Lib/test/test_code.py
@@ -141,7 +141,7 @@
ctypes = None
from test.support import (cpython_only,
check_impl_detail, requires_debug_ranges,
- gc_collect)
+ gc_collect, Py_GIL_DISABLED)
from test.support.script_helper import assert_python_ok
from test.support import threading_helper, import_helper
from test.support.bytecode_helper import instructions_with_positions
@@ -834,6 +834,7 @@ def test_free_called(self):
SetExtra(f.__code__, FREE_INDEX, ctypes.c_voidp(100))
del f
+ gc_collect() # For free-threaded build
self.assertEqual(LAST_FREED, 100)
def test_get_set(self):
@@ -865,13 +866,18 @@ def __init__(self, f, test):
def run(self):
del self.f
gc_collect()
- self.test.assertEqual(LAST_FREED, 500)
+ # gh-117683: In the free-threaded build, the code object's
+ # destructor may still be running concurrently in the main
+ # thread.
+ if not Py_GIL_DISABLED:
+ self.test.assertEqual(LAST_FREED, 500)
SetExtra(f.__code__, FREE_INDEX, ctypes.c_voidp(500))
tt = ThreadTest(f, self)
del f
tt.start()
tt.join()
+ gc_collect() # For free-threaded build
self.assertEqual(LAST_FREED, 500)
diff --git a/Lib/test/test_collections.py b/Lib/test/test_collections.py
index 1fb492ecebd668..955323cae88f92 100644
--- a/Lib/test/test_collections.py
+++ b/Lib/test/test_collections.py
@@ -542,7 +542,7 @@ def test_odd_sizes(self):
self.assertEqual(Dot(1)._replace(d=999), (999,))
self.assertEqual(Dot(1)._fields, ('d',))
- n = support.EXCEEDS_RECURSION_LIMIT
+ n = support.exceeds_recursion_limit()
names = list(set(''.join([choice(string.ascii_letters)
for j in range(10)]) for i in range(n)))
n = len(names)
diff --git a/Lib/test/test_compile.py b/Lib/test/test_compile.py
index 9d5f721806a884..638b6e96b5025b 100644
--- a/Lib/test/test_compile.py
+++ b/Lib/test/test_compile.py
@@ -13,7 +13,7 @@
import warnings
from test import support
from test.support import (script_helper, requires_debug_ranges,
- requires_specialization, Py_C_RECURSION_LIMIT)
+ requires_specialization, get_c_recursion_limit)
from test.support.bytecode_helper import instructions_with_positions
from test.support.os_helper import FakePath
@@ -114,7 +114,7 @@ def __getitem__(self, key):
@unittest.skipIf(support.is_wasi, "exhausts limited stack on WASI")
def test_extended_arg(self):
- repeat = int(Py_C_RECURSION_LIMIT * 0.9)
+ repeat = int(get_c_recursion_limit() * 0.9)
longexpr = 'x = x or ' + '-x' * repeat
g = {}
code = textwrap.dedent('''
@@ -634,9 +634,10 @@ def test_yet_more_evil_still_undecodable(self):
@unittest.skipIf(support.is_wasi, "exhausts limited stack on WASI")
def test_compiler_recursion_limit(self):
# Expected limit is Py_C_RECURSION_LIMIT
- fail_depth = Py_C_RECURSION_LIMIT + 1
- crash_depth = Py_C_RECURSION_LIMIT * 100
- success_depth = int(Py_C_RECURSION_LIMIT * 0.8)
+ limit = get_c_recursion_limit()
+ fail_depth = limit + 1
+ crash_depth = limit * 100
+ success_depth = int(limit * 0.8)
def check_limit(prefix, repeated, mode="single"):
expect_ok = prefix + repeated * success_depth
diff --git a/Lib/test/test_concurrent_futures/executor.py b/Lib/test/test_concurrent_futures/executor.py
index 6a79fe69ec37cf..3049bb74861439 100644
--- a/Lib/test/test_concurrent_futures/executor.py
+++ b/Lib/test/test_concurrent_futures/executor.py
@@ -83,24 +83,34 @@ def test_no_stale_references(self):
# references.
my_object = MyObject()
my_object_collected = threading.Event()
- my_object_callback = weakref.ref(
- my_object, lambda obj: my_object_collected.set())
- fut = self.executor.submit(my_object.my_method)
+ def set_event():
+ if Py_GIL_DISABLED:
+ # gh-117688 Avoid deadlock by setting the event in a
+ # background thread. The current thread may be in the middle
+ # of the my_object_collected.wait() call, which holds locks
+ # needed by my_object_collected.set().
+ threading.Thread(target=my_object_collected.set).start()
+ else:
+ my_object_collected.set()
+ my_object_callback = weakref.ref(my_object, lambda obj: set_event())
+ # Deliberately discarding the future.
+ self.executor.submit(my_object.my_method)
del my_object
if Py_GIL_DISABLED:
# Due to biased reference counting, my_object might only be
# deallocated while the thread that created it runs -- if the
# thread is paused waiting on an event, it may not merge the
- # refcount of the queued object. For that reason, we wait for the
- # task to finish (so that it's no longer referenced) and force a
- # GC to ensure that it is collected.
- fut.result() # Wait for the task to finish.
- support.gc_collect()
+ # refcount of the queued object. For that reason, we alternate
+ # between running the GC and waiting for the event.
+ wait_time = 0
+ collected = False
+ while not collected and wait_time <= support.SHORT_TIMEOUT:
+ support.gc_collect()
+ collected = my_object_collected.wait(timeout=1.0)
+ wait_time += 1.0
else:
- del fut # Deliberately discard the future.
-
- collected = my_object_collected.wait(timeout=support.SHORT_TIMEOUT)
+ collected = my_object_collected.wait(timeout=support.SHORT_TIMEOUT)
self.assertTrue(collected,
"Stale reference not collected within timeout.")
diff --git a/Lib/test/test_ctypes/test_refcounts.py b/Lib/test/test_ctypes/test_refcounts.py
index e6427d4a295b15..012722d8486218 100644
--- a/Lib/test/test_ctypes/test_refcounts.py
+++ b/Lib/test/test_ctypes/test_refcounts.py
@@ -4,6 +4,7 @@
import unittest
from test import support
from test.support import import_helper
+from test.support import script_helper
_ctypes_test = import_helper.import_module("_ctypes_test")
@@ -110,5 +111,18 @@ def func():
func()
+class ModuleIsolationTest(unittest.TestCase):
+ def test_finalize(self):
+ # check if gc_decref() succeeds
+ script = (
+ "import ctypes;"
+ "import sys;"
+ "del sys.modules['_ctypes'];"
+ "import _ctypes;"
+ "exit()"
+ )
+ script_helper.assert_python_ok("-c", script)
+
+
if __name__ == '__main__':
unittest.main()
diff --git a/Lib/test/test_descr.py b/Lib/test/test_descr.py
index 097ca38e0b1ed8..93f66a721e8108 100644
--- a/Lib/test/test_descr.py
+++ b/Lib/test/test_descr.py
@@ -1687,10 +1687,10 @@ class D(C):
self.assertEqual(d.foo(1), (d, 1))
self.assertEqual(D.foo(d, 1), (d, 1))
sm = staticmethod(None)
- self.assertEqual(sm.__dict__, {'__doc__': None})
+ self.assertEqual(sm.__dict__, {'__doc__': None.__doc__})
sm.x = 42
self.assertEqual(sm.x, 42)
- self.assertEqual(sm.__dict__, {"x" : 42, '__doc__': None})
+ self.assertEqual(sm.__dict__, {"x" : 42, '__doc__': None.__doc__})
del sm.x
self.assertNotHasAttr(sm, "x")
@@ -4594,18 +4594,16 @@ def test_special_unbound_method_types(self):
def test_not_implemented(self):
# Testing NotImplemented...
# all binary methods should be able to return a NotImplemented
- import operator
def specialmethod(self, other):
return NotImplemented
def check(expr, x, y):
- try:
- exec(expr, {'x': x, 'y': y, 'operator': operator})
- except TypeError:
- pass
- else:
- self.fail("no TypeError from %r" % (expr,))
+ with (
+ self.subTest(expr=expr, x=x, y=y),
+ self.assertRaises(TypeError),
+ ):
+ exec(expr, {'x': x, 'y': y})
N1 = sys.maxsize + 1 # might trigger OverflowErrors instead of
# TypeErrors
@@ -4626,12 +4624,23 @@ def check(expr, x, y):
('__and__', 'x & y', 'x &= y'),
('__or__', 'x | y', 'x |= y'),
('__xor__', 'x ^ y', 'x ^= y')]:
- rname = '__r' + name[2:]
+ # Defines 'left' magic method:
A = type('A', (), {name: specialmethod})
a = A()
check(expr, a, a)
check(expr, a, N1)
check(expr, a, N2)
+ # Defines 'right' magic method:
+ rname = '__r' + name[2:]
+ B = type('B', (), {rname: specialmethod})
+ b = B()
+ check(expr, b, b)
+ check(expr, a, b)
+ check(expr, b, a)
+ check(expr, b, N1)
+ check(expr, b, N2)
+ check(expr, N1, b)
+ check(expr, N2, b)
if iexpr:
check(iexpr, a, a)
check(iexpr, a, N1)
diff --git a/Lib/test/test_dict.py b/Lib/test/test_dict.py
index 620d0ca4f4c2da..e5dba7cdc570a8 100644
--- a/Lib/test/test_dict.py
+++ b/Lib/test/test_dict.py
@@ -8,7 +8,7 @@
import unittest
import weakref
from test import support
-from test.support import import_helper, Py_C_RECURSION_LIMIT
+from test.support import import_helper, get_c_recursion_limit
class DictTest(unittest.TestCase):
@@ -596,7 +596,7 @@ def __repr__(self):
def test_repr_deep(self):
d = {}
- for i in range(Py_C_RECURSION_LIMIT + 1):
+ for i in range(get_c_recursion_limit() + 1):
d = {1: d}
self.assertRaises(RecursionError, repr, d)
diff --git a/Lib/test/test_dictviews.py b/Lib/test/test_dictviews.py
index cad568b6ac4c2d..d9881611c19c43 100644
--- a/Lib/test/test_dictviews.py
+++ b/Lib/test/test_dictviews.py
@@ -2,7 +2,7 @@
import copy
import pickle
import unittest
-from test.support import Py_C_RECURSION_LIMIT
+from test.support import get_c_recursion_limit
class DictSetTest(unittest.TestCase):
@@ -279,7 +279,7 @@ def test_recursive_repr(self):
def test_deeply_nested_repr(self):
d = {}
- for i in range(Py_C_RECURSION_LIMIT//2 + 100):
+ for i in range(get_c_recursion_limit()//2 + 100):
d = {42: d.values()}
self.assertRaises(RecursionError, repr, d)
diff --git a/Lib/test/test_doctest/test_doctest.py b/Lib/test/test_doctest/test_doctest.py
index 1457a3790b68be..6da6999cac7c0c 100644
--- a/Lib/test/test_doctest/test_doctest.py
+++ b/Lib/test/test_doctest/test_doctest.py
@@ -19,8 +19,12 @@
import _colorize # used in doctests
-if not support.has_subprocess_support:
- raise unittest.SkipTest("test_CLI requires subprocess support.")
+def doctest_skip_if(condition):
+ def decorator(func):
+ if condition and support.HAVE_DOCSTRINGS:
+ func.__doc__ = ">>> pass # doctest: +SKIP"
+ return func
+ return decorator
# NOTE: There are some additional tests relating to interaction with
@@ -467,7 +471,7 @@ def basics(): r"""
>>> tests = finder.find(sample_func)
>>> print(tests) # doctest: +ELLIPSIS
- []
+ []
The exact name depends on how test_doctest was invoked, so allow for
leading path components.
@@ -2574,6 +2578,20 @@ def test_look_in_unwrapped():
'one other test'
"""
+@doctest_skip_if(support.check_impl_detail(cpython=False))
+def test_wrapped_c_func():
+ """
+ # https://github.com/python/cpython/issues/117692
+ >>> import binascii
+ >>> from test.test_doctest.decorator_mod import decorator
+
+ >>> c_func_wrapped = decorator(binascii.b2a_hex)
+ >>> tests = doctest.DocTestFinder(exclude_empty=False).find(c_func_wrapped)
+ >>> for test in tests:
+ ... print(test.lineno, test.name)
+ None b2a_hex
+ """
+
def test_unittest_reportflags():
"""Default unittest reporting flags can be set to control reporting
@@ -3001,6 +3019,7 @@ def test_unicode(): """
"""
+@doctest_skip_if(not support.has_subprocess_support)
def test_CLI(): r"""
The doctest module can be used to run doctests against an arbitrary file.
These tests test this CLI functionality.
diff --git a/Lib/test/test_dynamic.py b/Lib/test/test_dynamic.py
index 3928bbab4423c2..0cb56a98f1c12a 100644
--- a/Lib/test/test_dynamic.py
+++ b/Lib/test/test_dynamic.py
@@ -4,7 +4,7 @@
import sys
import unittest
-from test.support import is_wasi, Py_DEBUG, swap_item, swap_attr
+from test.support import is_wasi, swap_item, swap_attr
class RebindBuiltinsTests(unittest.TestCase):
@@ -134,7 +134,7 @@ def test_eval_gives_lambda_custom_globals(self):
self.assertEqual(foo(), 7)
- @unittest.skipIf(is_wasi and Py_DEBUG, "stack depth too shallow in pydebug WASI")
+ @unittest.skipIf(is_wasi, "stack depth too shallow in WASI")
def test_load_global_specialization_failure_keeps_oparg(self):
# https://github.com/python/cpython/issues/91625
class MyGlobals(dict):
diff --git a/Lib/test/test_enum.py b/Lib/test/test_enum.py
index 6418d243db65ce..529dfc62eff680 100644
--- a/Lib/test/test_enum.py
+++ b/Lib/test/test_enum.py
@@ -5170,7 +5170,57 @@ class Unhashable:
self.assertIn('python', Unhashable)
self.assertEqual(Unhashable.name.value, 'python')
self.assertEqual(Unhashable.name.name, 'name')
- _test_simple_enum(Unhashable, Unhashable)
+ _test_simple_enum(CheckedUnhashable, Unhashable)
+ ##
+ class CheckedComplexStatus(IntEnum):
+ def __new__(cls, value, phrase, description=''):
+ obj = int.__new__(cls, value)
+ obj._value_ = value
+ obj.phrase = phrase
+ obj.description = description
+ return obj
+ CONTINUE = 100, 'Continue', 'Request received, please continue'
+ PROCESSING = 102, 'Processing'
+ EARLY_HINTS = 103, 'Early Hints'
+ SOME_HINTS = 103, 'Some Early Hints'
+ #
+ @_simple_enum(IntEnum)
+ class ComplexStatus:
+ def __new__(cls, value, phrase, description=''):
+ obj = int.__new__(cls, value)
+ obj._value_ = value
+ obj.phrase = phrase
+ obj.description = description
+ return obj
+ CONTINUE = 100, 'Continue', 'Request received, please continue'
+ PROCESSING = 102, 'Processing'
+ EARLY_HINTS = 103, 'Early Hints'
+ SOME_HINTS = 103, 'Some Early Hints'
+ _test_simple_enum(CheckedComplexStatus, ComplexStatus)
+ #
+ #
+ class CheckedComplexFlag(IntFlag):
+ def __new__(cls, value, label):
+ obj = int.__new__(cls, value)
+ obj._value_ = value
+ obj.label = label
+ return obj
+ SHIRT = 1, 'upper half'
+ VEST = 1, 'outer upper half'
+ PANTS = 2, 'lower half'
+ self.assertIs(CheckedComplexFlag.SHIRT, CheckedComplexFlag.VEST)
+ #
+ @_simple_enum(IntFlag)
+ class ComplexFlag:
+ def __new__(cls, value, label):
+ obj = int.__new__(cls, value)
+ obj._value_ = value
+ obj.label = label
+ return obj
+ SHIRT = 1, 'upper half'
+ VEST = 1, 'uppert half'
+ PANTS = 2, 'lower half'
+ _test_simple_enum(CheckedComplexFlag, ComplexFlag)
class MiscTestCase(unittest.TestCase):
diff --git a/Lib/test/test_exception_group.py b/Lib/test/test_exception_group.py
index 20122679223843..b4fc290b1f32b6 100644
--- a/Lib/test/test_exception_group.py
+++ b/Lib/test/test_exception_group.py
@@ -1,7 +1,7 @@
import collections.abc
import types
import unittest
-from test.support import Py_C_RECURSION_LIMIT
+from test.support import get_c_recursion_limit
class TestExceptionGroupTypeHierarchy(unittest.TestCase):
def test_exception_group_types(self):
@@ -460,7 +460,7 @@ def test_basics_split_by_predicate__match(self):
class DeepRecursionInSplitAndSubgroup(unittest.TestCase):
def make_deep_eg(self):
e = TypeError(1)
- for i in range(Py_C_RECURSION_LIMIT + 1):
+ for i in range(get_c_recursion_limit() + 1):
e = ExceptionGroup('eg', [e])
return e
diff --git a/Lib/test/test_exceptions.py b/Lib/test/test_exceptions.py
index 6ad6acc61563e5..1224f143b5441f 100644
--- a/Lib/test/test_exceptions.py
+++ b/Lib/test/test_exceptions.py
@@ -1424,7 +1424,7 @@ def gen():
next(generator)
recursionlimit = sys.getrecursionlimit()
try:
- recurse(support.EXCEEDS_RECURSION_LIMIT)
+ recurse(support.exceeds_recursion_limit())
finally:
sys.setrecursionlimit(recursionlimit)
print('Done.')
@@ -1451,7 +1451,8 @@ def test_recursion_normalizing_infinite_exception(self):
"""
rc, out, err = script_helper.assert_python_failure("-c", code)
self.assertEqual(rc, 1)
- self.assertIn(b'RecursionError: maximum recursion depth exceeded', err)
+ expected = b'RecursionError: maximum recursion depth exceeded'
+ self.assertTrue(expected in err, msg=f"{expected!r} not found in {err[:3_000]!r}... (truncated)")
self.assertIn(b'Done.', out)
diff --git a/Lib/test/test_faulthandler.py b/Lib/test/test_faulthandler.py
index 200f34d18ca60a..61ec8fe3151af1 100644
--- a/Lib/test/test_faulthandler.py
+++ b/Lib/test/test_faulthandler.py
@@ -575,10 +575,12 @@ def run(self):
lineno = 8
else:
lineno = 10
+ # When the traceback is dumped, the waiter thread may be in the
+ # `self.running.set()` call or in `self.stop.wait()`.
regex = r"""
^Thread 0x[0-9a-f]+ \(most recent call first\):
(?: File ".*threading.py", line [0-9]+ in [_a-z]+
- ){{1,3}} File "", line 23 in run
+ ){{1,3}} File "", line (?:22|23) in run
File ".*threading.py", line [0-9]+ in _bootstrap_inner
File ".*threading.py", line [0-9]+ in _bootstrap
diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py
index 3ba4929dd1b133..c48c399a10c853 100644
--- a/Lib/test/test_functools.py
+++ b/Lib/test/test_functools.py
@@ -1867,7 +1867,7 @@ def fib(n):
return fib(n-1) + fib(n-2)
if not support.Py_DEBUG:
- depth = support.Py_C_RECURSION_LIMIT*2//7
+ depth = support.get_c_recursion_limit()*2//7
with support.infinite_recursion():
fib(depth)
if self.module == c_functools:
diff --git a/Lib/test/test_gc.py b/Lib/test/test_gc.py
index 71c7fb0edebaa5..52681dc18cfb86 100644
--- a/Lib/test/test_gc.py
+++ b/Lib/test/test_gc.py
@@ -226,7 +226,9 @@ def test_function(self):
exec("def f(): pass\n", d)
gc.collect()
del d
- self.assertEqual(gc.collect(), 2)
+ # In the free-threaded build, the count returned by `gc.collect()`
+ # is 3 because it includes f's code object.
+ self.assertIn(gc.collect(), (2, 3))
def test_function_tp_clear_leaves_consistent_state(self):
# https://github.com/python/cpython/issues/91636
diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py
index 6e63a8872d9c6e..9d853d254db7c6 100644
--- a/Lib/test/test_httplib.py
+++ b/Lib/test/test_httplib.py
@@ -651,22 +651,25 @@ def is_server_error(self):
'Client must specify Content-Length')
PRECONDITION_FAILED = (412, 'Precondition Failed',
'Precondition in headers is false')
- REQUEST_ENTITY_TOO_LARGE = (413, 'Request Entity Too Large',
- 'Entity is too large')
- REQUEST_URI_TOO_LONG = (414, 'Request-URI Too Long',
- 'URI is too long')
+ CONTENT_TOO_LARGE = (413, 'Content Too Large',
+ 'Content is too large')
+ REQUEST_ENTITY_TOO_LARGE = CONTENT_TOO_LARGE
+ URI_TOO_LONG = (414, 'URI Too Long', 'URI is too long')
+ REQUEST_URI_TOO_LONG = URI_TOO_LONG
UNSUPPORTED_MEDIA_TYPE = (415, 'Unsupported Media Type',
'Entity body in unsupported format')
- REQUESTED_RANGE_NOT_SATISFIABLE = (416,
- 'Requested Range Not Satisfiable',
+ RANGE_NOT_SATISFIABLE = (416,
+ 'Range Not Satisfiable',
'Cannot satisfy request range')
+ REQUESTED_RANGE_NOT_SATISFIABLE = RANGE_NOT_SATISFIABLE
EXPECTATION_FAILED = (417, 'Expectation Failed',
'Expect condition could not be satisfied')
IM_A_TEAPOT = (418, 'I\'m a Teapot',
'Server refuses to brew coffee because it is a teapot.')
MISDIRECTED_REQUEST = (421, 'Misdirected Request',
'Server is not able to produce a response')
- UNPROCESSABLE_ENTITY = 422, 'Unprocessable Entity'
+ UNPROCESSABLE_CONTENT = 422, 'Unprocessable Content'
+ UNPROCESSABLE_ENTITY = UNPROCESSABLE_CONTENT
LOCKED = 423, 'Locked'
FAILED_DEPENDENCY = 424, 'Failed Dependency'
TOO_EARLY = 425, 'Too Early'
@@ -1718,13 +1721,17 @@ def test_client_constants(self):
'GONE',
'LENGTH_REQUIRED',
'PRECONDITION_FAILED',
+ 'CONTENT_TOO_LARGE',
'REQUEST_ENTITY_TOO_LARGE',
+ 'URI_TOO_LONG',
'REQUEST_URI_TOO_LONG',
'UNSUPPORTED_MEDIA_TYPE',
+ 'RANGE_NOT_SATISFIABLE',
'REQUESTED_RANGE_NOT_SATISFIABLE',
'EXPECTATION_FAILED',
'IM_A_TEAPOT',
'MISDIRECTED_REQUEST',
+ 'UNPROCESSABLE_CONTENT',
'UNPROCESSABLE_ENTITY',
'LOCKED',
'FAILED_DEPENDENCY',
diff --git a/Lib/test/test_httpservers.py b/Lib/test/test_httpservers.py
index d762ec6102ab8a..7c0edfea4f9778 100644
--- a/Lib/test/test_httpservers.py
+++ b/Lib/test/test_httpservers.py
@@ -1205,7 +1205,7 @@ def test_request_length(self):
# Issue #10714: huge request lines are discarded, to avoid Denial
# of Service attacks.
result = self.send_typical_request(b'GET ' + b'x' * 65537)
- self.assertEqual(result[0], b'HTTP/1.1 414 Request-URI Too Long\r\n')
+ self.assertEqual(result[0], b'HTTP/1.1 414 URI Too Long\r\n')
self.assertFalse(self.handler.get_called)
self.assertIsInstance(self.handler.requestline, str)
diff --git a/Lib/test/test_import/__init__.py b/Lib/test/test_import/__init__.py
index 6678548a0ffaca..4726619b08edc4 100644
--- a/Lib/test/test_import/__init__.py
+++ b/Lib/test/test_import/__init__.py
@@ -30,7 +30,8 @@
from test.support import os_helper
from test.support import (
STDLIB_DIR, swap_attr, swap_item, cpython_only, is_apple_mobile, is_emscripten,
- is_wasi, run_in_subinterp, run_in_subinterp_with_config, Py_TRACE_REFS)
+ is_wasi, run_in_subinterp, run_in_subinterp_with_config, Py_TRACE_REFS,
+ requires_gil_enabled, Py_GIL_DISABLED)
from test.support.import_helper import (
forget, make_legacy_pyc, unlink, unload, ready_to_import,
DirsOnSysPath, CleanImport, import_module)
@@ -158,6 +159,9 @@ def meth(self, _meth=meth):
finally:
restore__testsinglephase()
meth = cpython_only(meth)
+ # gh-117649: free-threaded build does not currently support single-phase
+ # init modules in subinterpreters.
+ meth = requires_gil_enabled(meth)
return unittest.skipIf(_testsinglephase is None,
'test requires _testsinglephase module')(meth)
@@ -1876,8 +1880,9 @@ def test_builtin_compat(self):
# since they still don't implement multi-phase init.
module = '_imp'
require_builtin(module)
- with self.subTest(f'{module}: not strict'):
- self.check_compatible_here(module, strict=False)
+ if not Py_GIL_DISABLED:
+ with self.subTest(f'{module}: not strict'):
+ self.check_compatible_here(module, strict=False)
with self.subTest(f'{module}: strict, not fresh'):
self.check_compatible_here(module, strict=True)
@@ -1888,8 +1893,9 @@ def test_frozen_compat(self):
require_frozen(module, skip=True)
if __import__(module).__spec__.origin != 'frozen':
raise unittest.SkipTest(f'{module} is unexpectedly not frozen')
- with self.subTest(f'{module}: not strict'):
- self.check_compatible_here(module, strict=False)
+ if not Py_GIL_DISABLED:
+ with self.subTest(f'{module}: not strict'):
+ self.check_compatible_here(module, strict=False)
with self.subTest(f'{module}: strict, not fresh'):
self.check_compatible_here(module, strict=True)
@@ -1908,8 +1914,9 @@ def test_single_init_extension_compat(self):
def test_multi_init_extension_compat(self):
module = '_testmultiphase'
require_extension(module)
- with self.subTest(f'{module}: not strict'):
- self.check_compatible_here(module, strict=False)
+ if not Py_GIL_DISABLED:
+ with self.subTest(f'{module}: not strict'):
+ self.check_compatible_here(module, strict=False)
with self.subTest(f'{module}: strict, not fresh'):
self.check_compatible_here(module, strict=True)
with self.subTest(f'{module}: strict, fresh'):
@@ -1930,8 +1937,9 @@ def test_multi_init_extension_non_isolated_compat(self):
self.check_incompatible_here(modname, filename, isolated=True)
with self.subTest(f'{modname}: not isolated'):
self.check_incompatible_here(modname, filename, isolated=False)
- with self.subTest(f'{modname}: not strict'):
- self.check_compatible_here(modname, filename, strict=False)
+ if not Py_GIL_DISABLED:
+ with self.subTest(f'{modname}: not strict'):
+ self.check_compatible_here(modname, filename, strict=False)
@unittest.skipIf(_testmultiphase is None, "test requires _testmultiphase module")
def test_multi_init_extension_per_interpreter_gil_compat(self):
@@ -1949,16 +1957,18 @@ def test_multi_init_extension_per_interpreter_gil_compat(self):
with self.subTest(f'{modname}: not isolated, strict'):
self.check_compatible_here(modname, filename,
strict=True, isolated=False)
- with self.subTest(f'{modname}: not isolated, not strict'):
- self.check_compatible_here(modname, filename,
- strict=False, isolated=False)
+ if not Py_GIL_DISABLED:
+ with self.subTest(f'{modname}: not isolated, not strict'):
+ self.check_compatible_here(modname, filename,
+ strict=False, isolated=False)
@unittest.skipIf(_testinternalcapi is None, "requires _testinternalcapi")
def test_python_compat(self):
module = 'threading'
require_pure_python(module)
- with self.subTest(f'{module}: not strict'):
- self.check_compatible_here(module, strict=False)
+ if not Py_GIL_DISABLED:
+ with self.subTest(f'{module}: not strict'):
+ self.check_compatible_here(module, strict=False)
with self.subTest(f'{module}: strict, not fresh'):
self.check_compatible_here(module, strict=True)
with self.subTest(f'{module}: strict, fresh'):
diff --git a/Lib/test/test_importlib/test_lazy.py b/Lib/test/test_importlib/test_lazy.py
index 4d2cc4eb62b67c..5c6e0303528906 100644
--- a/Lib/test/test_importlib/test_lazy.py
+++ b/Lib/test/test_importlib/test_lazy.py
@@ -196,6 +196,34 @@ def test_lazy_self_referential_modules(self):
test_load = module.loads('{}')
self.assertEqual(test_load, {})
+ def test_lazy_module_type_override(self):
+ # Verify that lazy loading works with a module that modifies
+ # its __class__ to be a custom type.
+
+ # Example module from PEP 726
+ module = self.new_module(source_code="""\
+import sys
+from types import ModuleType
+
+CONSTANT = 3.14
+
+class ImmutableModule(ModuleType):
+ def __setattr__(self, name, value):
+ raise AttributeError('Read-only attribute!')
+
+ def __delattr__(self, name):
+ raise AttributeError('Read-only attribute!')
+
+sys.modules[__name__].__class__ = ImmutableModule
+""")
+ sys.modules[TestingImporter.module_name] = module
+ self.assertIsInstance(module, util._LazyModule)
+ self.assertEqual(module.CONSTANT, 3.14)
+ with self.assertRaises(AttributeError):
+ module.CONSTANT = 2.71
+ with self.assertRaises(AttributeError):
+ del module.CONSTANT
+
if __name__ == '__main__':
unittest.main()
diff --git a/Lib/test/test_importlib/test_util.py b/Lib/test/test_importlib/test_util.py
index 115cb7a56c98f7..f0583c5fd0196f 100644
--- a/Lib/test/test_importlib/test_util.py
+++ b/Lib/test/test_importlib/test_util.py
@@ -682,6 +682,9 @@ def ensure_destroyed():
raise ImportError(excsnap.msg)
@unittest.skipIf(_testsinglephase is None, "test requires _testsinglephase module")
+ # gh-117649: single-phase init modules are not currently supported in
+ # subinterpreters in the free-threaded build
+ @support.expected_failure_if_gil_disabled()
def test_single_phase_init_module(self):
script = textwrap.dedent('''
from importlib.util import _incompatible_extension_module_restrictions
@@ -706,6 +709,7 @@ def test_single_phase_init_module(self):
self.run_with_own_gil(script)
@unittest.skipIf(_testmultiphase is None, "test requires _testmultiphase module")
+ @support.requires_gil_enabled("gh-117649: not supported in free-threaded build")
def test_incomplete_multi_phase_init_module(self):
# Apple extensions must be distributed as frameworks. This requires
# a specialist loader.
diff --git a/Lib/test/test_inspect/test_inspect.py b/Lib/test/test_inspect/test_inspect.py
index 6494842c217662..e8b09c413f12da 100644
--- a/Lib/test/test_inspect/test_inspect.py
+++ b/Lib/test/test_inspect/test_inspect.py
@@ -5221,10 +5221,17 @@ class TestSignatureDefinitions(unittest.TestCase):
# This test case provides a home for checking that particular APIs
# have signatures available for introspection
+ @staticmethod
+ def is_public(name):
+ return not name.startswith('_') or name.startswith('__') and name.endswith('__')
+
@cpython_only
@unittest.skipIf(MISSING_C_DOCSTRINGS,
"Signature information for builtins requires docstrings")
- def test_builtins_have_signatures(self):
+ def _test_module_has_signatures(self, module,
+ no_signature=(), unsupported_signature=(),
+ methods_no_signature={}, methods_unsupported_signature={},
+ good_exceptions=()):
# This checks all builtin callables in CPython have signatures
# A few have signatures Signature can't yet handle, so we skip those
# since they will have to wait until PEP 457 adds the required
@@ -5233,48 +5240,272 @@ def test_builtins_have_signatures(self):
# reasons, so we also skip those for the time being, but design
# the test to fail in order to indicate when it needs to be
# updated.
- no_signature = set()
- # These need PEP 457 groups
- needs_groups = {"range", "slice", "dir", "getattr",
- "next", "iter", "vars"}
- no_signature |= needs_groups
- # These have unrepresentable parameter default values of NULL
- needs_null = {"anext"}
- no_signature |= needs_null
- # These need *args support in Argument Clinic
- needs_varargs = {"min", "max", "__build_class__"}
- no_signature |= needs_varargs
- # These builtin types are expected to provide introspection info
- types_with_signatures = {
- 'bool', 'classmethod', 'complex', 'enumerate', 'filter', 'float',
- 'frozenset', 'list', 'map', 'memoryview', 'object', 'property',
- 'reversed', 'set', 'staticmethod', 'tuple', 'zip'
- }
+ no_signature = no_signature or set()
# Check the signatures we expect to be there
- ns = vars(builtins)
+ ns = vars(module)
+ try:
+ names = set(module.__all__)
+ except AttributeError:
+ names = set(name for name in ns if self.is_public(name))
for name, obj in sorted(ns.items()):
+ if name not in names:
+ continue
if not callable(obj):
continue
- # The builtin types haven't been converted to AC yet
- if isinstance(obj, type) and (name not in types_with_signatures):
- # Note that this also skips all the exception types
+ if (isinstance(obj, type) and
+ issubclass(obj, BaseException) and
+ name not in good_exceptions):
no_signature.add(name)
- if (name in no_signature):
- # Not yet converted
- continue
- if name in {'classmethod', 'staticmethod'}:
- # Bug gh-112006: inspect.unwrap() does not work with types
- # with the __wrapped__ data descriptor.
- continue
- with self.subTest(builtin=name):
- self.assertIsNotNone(inspect.signature(obj))
+ if name not in no_signature and name not in unsupported_signature:
+ with self.subTest('supported', builtin=name):
+ self.assertIsNotNone(inspect.signature(obj))
+ if isinstance(obj, type):
+ with self.subTest(type=name):
+ self._test_builtin_methods_have_signatures(obj,
+ methods_no_signature.get(name, ()),
+ methods_unsupported_signature.get(name, ()))
# Check callables that haven't been converted don't claim a signature
# This ensures this test will start failing as more signatures are
# added, so the affected items can be moved into the scope of the
# regression test above
- for name in no_signature - needs_null:
- with self.subTest(builtin=name):
- self.assertIsNone(ns[name].__text_signature__)
+ for name in no_signature:
+ with self.subTest('none', builtin=name):
+ obj = ns[name]
+ self.assertIsNone(obj.__text_signature__)
+ self.assertRaises(ValueError, inspect.signature, obj)
+ for name in unsupported_signature:
+ with self.subTest('unsupported', builtin=name):
+ obj = ns[name]
+ self.assertIsNotNone(obj.__text_signature__)
+ self.assertRaises(ValueError, inspect.signature, obj)
+
+ def _test_builtin_methods_have_signatures(self, cls, no_signature, unsupported_signature):
+ ns = vars(cls)
+ for name in ns:
+ obj = getattr(cls, name, None)
+ if not callable(obj) or isinstance(obj, type):
+ continue
+ if name not in no_signature and name not in unsupported_signature:
+ with self.subTest('supported', method=name):
+ self.assertIsNotNone(inspect.signature(obj))
+ for name in no_signature:
+ with self.subTest('none', method=name):
+ self.assertIsNone(getattr(cls, name).__text_signature__)
+ self.assertRaises(ValueError, inspect.signature, getattr(cls, name))
+ for name in unsupported_signature:
+ with self.subTest('unsupported', method=name):
+ self.assertIsNotNone(getattr(cls, name).__text_signature__)
+ self.assertRaises(ValueError, inspect.signature, getattr(cls, name))
+
+ def test_builtins_have_signatures(self):
+ no_signature = {'type', 'super', 'bytearray', 'bytes', 'dict', 'int', 'str'}
+ # These need PEP 457 groups
+ needs_groups = {"range", "slice", "dir", "getattr",
+ "next", "iter", "vars"}
+ no_signature |= needs_groups
+ # These have unrepresentable parameter default values of NULL
+ unsupported_signature = {"anext"}
+ # These need *args support in Argument Clinic
+ needs_varargs = {"min", "max", "__build_class__"}
+ no_signature |= needs_varargs
+
+ methods_no_signature = {
+ 'dict': {'update'},
+ 'object': {'__class__'},
+ }
+ methods_unsupported_signature = {
+ 'bytearray': {'count', 'endswith', 'find', 'hex', 'index', 'rfind', 'rindex', 'startswith'},
+ 'bytes': {'count', 'endswith', 'find', 'hex', 'index', 'rfind', 'rindex', 'startswith'},
+ 'dict': {'pop'},
+ 'int': {'__round__'},
+ 'memoryview': {'cast', 'hex'},
+ 'str': {'count', 'endswith', 'find', 'index', 'maketrans', 'rfind', 'rindex', 'startswith'},
+ }
+ self._test_module_has_signatures(builtins,
+ no_signature, unsupported_signature,
+ methods_no_signature, methods_unsupported_signature)
+
+ def test_types_module_has_signatures(self):
+ unsupported_signature = {'CellType'}
+ methods_no_signature = {
+ 'AsyncGeneratorType': {'athrow'},
+ 'CoroutineType': {'throw'},
+ 'GeneratorType': {'throw'},
+ }
+ self._test_module_has_signatures(types,
+ unsupported_signature=unsupported_signature,
+ methods_no_signature=methods_no_signature)
+
+ def test_sys_module_has_signatures(self):
+ no_signature = {'getsizeof', 'set_asyncgen_hooks'}
+ self._test_module_has_signatures(sys, no_signature)
+
+ def test_abc_module_has_signatures(self):
+ import abc
+ self._test_module_has_signatures(abc)
+
+ def test_atexit_module_has_signatures(self):
+ import atexit
+ self._test_module_has_signatures(atexit)
+
+ def test_codecs_module_has_signatures(self):
+ import codecs
+ methods_no_signature = {'StreamReader': {'charbuffertype'}}
+ self._test_module_has_signatures(codecs,
+ methods_no_signature=methods_no_signature)
+
+ def test_collections_module_has_signatures(self):
+ no_signature = {'OrderedDict', 'defaultdict'}
+ unsupported_signature = {'deque'}
+ methods_no_signature = {
+ 'OrderedDict': {'update'},
+ }
+ methods_unsupported_signature = {
+ 'deque': {'index'},
+ 'OrderedDict': {'pop'},
+ 'UserString': {'maketrans'},
+ }
+ self._test_module_has_signatures(collections,
+ no_signature, unsupported_signature,
+ methods_no_signature, methods_unsupported_signature)
+
+ def test_collections_abc_module_has_signatures(self):
+ import collections.abc
+ self._test_module_has_signatures(collections.abc)
+
+ def test_errno_module_has_signatures(self):
+ import errno
+ self._test_module_has_signatures(errno)
+
+ def test_faulthandler_module_has_signatures(self):
+ import faulthandler
+ unsupported_signature = {'dump_traceback', 'dump_traceback_later', 'enable'}
+ unsupported_signature |= {name for name in ['register']
+ if hasattr(faulthandler, name)}
+ self._test_module_has_signatures(faulthandler, unsupported_signature=unsupported_signature)
+
+ def test_functools_module_has_signatures(self):
+ no_signature = {'reduce'}
+ self._test_module_has_signatures(functools, no_signature)
+
+ def test_gc_module_has_signatures(self):
+ import gc
+ no_signature = {'set_threshold'}
+ self._test_module_has_signatures(gc, no_signature)
+
+ def test_io_module_has_signatures(self):
+ methods_no_signature = {
+ 'BufferedRWPair': {'read', 'peek', 'read1', 'readinto', 'readinto1', 'write'},
+ }
+ self._test_module_has_signatures(io,
+ methods_no_signature=methods_no_signature)
+
+ def test_itertools_module_has_signatures(self):
+ import itertools
+ no_signature = {'islice', 'repeat'}
+ self._test_module_has_signatures(itertools, no_signature)
+
+ def test_locale_module_has_signatures(self):
+ import locale
+ self._test_module_has_signatures(locale)
+
+ def test_marshal_module_has_signatures(self):
+ import marshal
+ self._test_module_has_signatures(marshal)
+
+ def test_operator_module_has_signatures(self):
+ import operator
+ self._test_module_has_signatures(operator)
+
+ def test_os_module_has_signatures(self):
+ unsupported_signature = {'chmod', 'utime'}
+ unsupported_signature |= {name for name in
+ ['get_terminal_size', 'posix_spawn', 'posix_spawnp',
+ 'register_at_fork', 'startfile']
+ if hasattr(os, name)}
+ self._test_module_has_signatures(os, unsupported_signature=unsupported_signature)
+
+ def test_pwd_module_has_signatures(self):
+ pwd = import_helper.import_module('pwd')
+ self._test_module_has_signatures(pwd)
+
+ def test_re_module_has_signatures(self):
+ import re
+ methods_no_signature = {'Match': {'group'}}
+ self._test_module_has_signatures(re,
+ methods_no_signature=methods_no_signature,
+ good_exceptions={'error', 'PatternError'})
+
+ def test_signal_module_has_signatures(self):
+ import signal
+ self._test_module_has_signatures(signal)
+
+ def test_stat_module_has_signatures(self):
+ import stat
+ self._test_module_has_signatures(stat)
+
+ def test_string_module_has_signatures(self):
+ import string
+ self._test_module_has_signatures(string)
+
+ def test_symtable_module_has_signatures(self):
+ import symtable
+ self._test_module_has_signatures(symtable)
+
+ def test_sysconfig_module_has_signatures(self):
+ import sysconfig
+ self._test_module_has_signatures(sysconfig)
+
+ def test_threading_module_has_signatures(self):
+ import threading
+ self._test_module_has_signatures(threading)
+
+ def test_thread_module_has_signatures(self):
+ import _thread
+ no_signature = {'RLock'}
+ self._test_module_has_signatures(_thread, no_signature)
+
+ def test_time_module_has_signatures(self):
+ no_signature = {
+ 'asctime', 'ctime', 'get_clock_info', 'gmtime', 'localtime',
+ 'strftime', 'strptime'
+ }
+ no_signature |= {name for name in
+ ['clock_getres', 'clock_settime', 'clock_settime_ns',
+ 'pthread_getcpuclockid']
+ if hasattr(time, name)}
+ self._test_module_has_signatures(time, no_signature)
+
+ def test_tokenize_module_has_signatures(self):
+ import tokenize
+ self._test_module_has_signatures(tokenize)
+
+ def test_tracemalloc_module_has_signatures(self):
+ import tracemalloc
+ self._test_module_has_signatures(tracemalloc)
+
+ def test_typing_module_has_signatures(self):
+ import typing
+ no_signature = {'ParamSpec', 'ParamSpecArgs', 'ParamSpecKwargs',
+ 'Text', 'TypeAliasType', 'TypeVar', 'TypeVarTuple'}
+ methods_no_signature = {
+ 'Generic': {'__class_getitem__', '__init_subclass__'},
+ }
+ methods_unsupported_signature = {
+ 'Text': {'count', 'find', 'index', 'rfind', 'rindex', 'startswith', 'endswith', 'maketrans'},
+ }
+ self._test_module_has_signatures(typing, no_signature,
+ methods_no_signature=methods_no_signature,
+ methods_unsupported_signature=methods_unsupported_signature)
+
+ def test_warnings_module_has_signatures(self):
+ unsupported_signature = {'warn', 'warn_explicit'}
+ self._test_module_has_signatures(warnings, unsupported_signature=unsupported_signature)
+
+ def test_weakref_module_has_signatures(self):
+ import weakref
+ no_signature = {'ReferenceType', 'ref'}
+ self._test_module_has_signatures(weakref, no_signature)
def test_python_function_override_signature(self):
def func(*args, **kwargs):
diff --git a/Lib/test/test_interpreters/test_api.py b/Lib/test/test_interpreters/test_api.py
index a326b39fd234c7..2bd8bee4063920 100644
--- a/Lib/test/test_interpreters/test_api.py
+++ b/Lib/test/test_interpreters/test_api.py
@@ -1,6 +1,7 @@
import os
import pickle
-from textwrap import dedent
+import sys
+from textwrap import dedent, indent
import threading
import types
import unittest
@@ -9,9 +10,23 @@
from test.support import import_helper
# Raise SkipTest if subinterpreters not supported.
_interpreters = import_helper.import_module('_xxsubinterpreters')
+from test.support import Py_GIL_DISABLED
from test.support import interpreters
-from test.support.interpreters import InterpreterNotFoundError
-from .utils import _captured_script, _run_output, _running, TestBase
+from test.support.interpreters import (
+ InterpreterError, InterpreterNotFoundError, ExecutionFailed,
+)
+from .utils import (
+ _captured_script, _run_output, _running, TestBase,
+ requires_test_modules, _testinternalcapi,
+)
+
+
+WHENCE_STR_UNKNOWN = 'unknown'
+WHENCE_STR_RUNTIME = 'runtime init'
+WHENCE_STR_LEGACY_CAPI = 'legacy C-API'
+WHENCE_STR_CAPI = 'C-API'
+WHENCE_STR_XI = 'cross-interpreter C-API'
+WHENCE_STR_STDLIB = '_interpreters module'
class ModuleTests(TestBase):
@@ -157,6 +172,18 @@ def test_idempotent(self):
id2 = id(interp)
self.assertNotEqual(id1, id2)
+ @requires_test_modules
+ def test_created_with_capi(self):
+ expected = _testinternalcapi.next_interpreter_id()
+ text = self.run_temp_from_capi(f"""
+ import {interpreters.__name__} as interpreters
+ interp = interpreters.get_current()
+ print((interp.id, interp.whence))
+ """)
+ interpid, whence = eval(text)
+ self.assertEqual(interpid, expected)
+ self.assertEqual(whence, WHENCE_STR_CAPI)
+
class ListAllTests(TestBase):
@@ -199,6 +226,33 @@ def test_idempotent(self):
for interp1, interp2 in zip(actual, expected):
self.assertIs(interp1, interp2)
+ def test_created_with_capi(self):
+ mainid, *_ = _interpreters.get_main()
+ interpid1 = _interpreters.create()
+ interpid2 = _interpreters.create()
+ interpid3 = _interpreters.create()
+ interpid4 = interpid3 + 1
+ interpid5 = interpid4 + 1
+ expected = [
+ (mainid, WHENCE_STR_RUNTIME),
+ (interpid1, WHENCE_STR_STDLIB),
+ (interpid2, WHENCE_STR_STDLIB),
+ (interpid3, WHENCE_STR_STDLIB),
+ (interpid4, WHENCE_STR_CAPI),
+ (interpid5, WHENCE_STR_STDLIB),
+ ]
+ expected2 = expected[:-2]
+ text = self.run_temp_from_capi(f"""
+ import {interpreters.__name__} as interpreters
+ interp = interpreters.create()
+ print(
+ [(i.id, i.whence) for i in interpreters.list_all()])
+ """)
+ res = eval(text)
+ res2 = [(i.id, i.whence) for i in interpreters.list_all()]
+ self.assertEqual(res, expected)
+ self.assertEqual(res2, expected2)
+
class InterpreterObjectTests(TestBase):
@@ -251,6 +305,38 @@ def test_id_readonly(self):
with self.assertRaises(AttributeError):
interp.id = 1_000_000
+ def test_whence(self):
+ main = interpreters.get_main()
+ interp = interpreters.create()
+
+ with self.subTest('main'):
+ self.assertEqual(main.whence, WHENCE_STR_RUNTIME)
+
+ with self.subTest('from _interpreters'):
+ self.assertEqual(interp.whence, WHENCE_STR_STDLIB)
+
+ with self.subTest('from C-API'):
+ text = self.run_temp_from_capi(f"""
+ import {interpreters.__name__} as interpreters
+ interp = interpreters.get_current()
+ print(repr(interp.whence))
+ """)
+ whence = eval(text)
+ self.assertEqual(whence, WHENCE_STR_CAPI)
+
+ with self.subTest('readonly'):
+ for value in [
+ None,
+ WHENCE_STR_UNKNOWN,
+ WHENCE_STR_RUNTIME,
+ WHENCE_STR_STDLIB,
+ WHENCE_STR_CAPI,
+ ]:
+ with self.assertRaises(AttributeError):
+ interp.whence = value
+ with self.assertRaises(AttributeError):
+ main.whence = value
+
def test_hashable(self):
interp = interpreters.create()
expected = hash(interp.id)
@@ -276,6 +362,7 @@ def test_main(self):
main = interpreters.get_main()
self.assertTrue(main.is_running())
+ # XXX Is this still true?
@unittest.skip('Fails on FreeBSD')
def test_subinterpreter(self):
interp = interpreters.create()
@@ -337,6 +424,55 @@ def task():
interp.exec('t.join()')
self.assertEqual(os.read(r_interp, 1), FINISHED)
+ def test_created_with_capi(self):
+ script = dedent(f"""
+ import {interpreters.__name__} as interpreters
+ interp = interpreters.get_current()
+ print(interp.is_running())
+ """)
+ def parse_results(text):
+ self.assertNotEqual(text, "")
+ try:
+ return eval(text)
+ except Exception:
+ raise Exception(repr(text))
+
+ with self.subTest('running __main__ (from self)'):
+ with self.interpreter_from_capi() as interpid:
+ text = self.run_from_capi(interpid, script, main=True)
+ running = parse_results(text)
+ self.assertTrue(running)
+
+ with self.subTest('running, but not __main__ (from self)'):
+ text = self.run_temp_from_capi(script)
+ running = parse_results(text)
+ self.assertFalse(running)
+
+ with self.subTest('running __main__ (from other)'):
+ with self.interpreter_obj_from_capi() as (interp, interpid):
+ before = interp.is_running()
+ with self.running_from_capi(interpid, main=True):
+ during = interp.is_running()
+ after = interp.is_running()
+ self.assertFalse(before)
+ self.assertTrue(during)
+ self.assertFalse(after)
+
+ with self.subTest('running, but not __main__ (from other)'):
+ with self.interpreter_obj_from_capi() as (interp, interpid):
+ before = interp.is_running()
+ with self.running_from_capi(interpid, main=False):
+ during = interp.is_running()
+ after = interp.is_running()
+ self.assertFalse(before)
+ self.assertFalse(during)
+ self.assertFalse(after)
+
+ with self.subTest('not running (from other)'):
+ with self.interpreter_obj_from_capi() as (interp, _):
+ running = interp.is_running()
+ self.assertFalse(running)
+
class TestInterpreterClose(TestBase):
@@ -364,11 +500,11 @@ def test_all(self):
def test_main(self):
main, = interpreters.list_all()
- with self.assertRaises(interpreters.InterpreterError):
+ with self.assertRaises(InterpreterError):
main.close()
def f():
- with self.assertRaises(interpreters.InterpreterError):
+ with self.assertRaises(InterpreterError):
main.close()
t = threading.Thread(target=f)
@@ -419,12 +555,13 @@ def f():
t.start()
t.join()
+ # XXX Is this still true?
@unittest.skip('Fails on FreeBSD')
def test_still_running(self):
main, = interpreters.list_all()
interp = interpreters.create()
with _running(interp):
- with self.assertRaises(interpreters.InterpreterError):
+ with self.assertRaises(InterpreterError):
interp.close()
self.assertTrue(interp.is_running())
@@ -459,6 +596,53 @@ def task():
self.assertEqual(os.read(r_interp, 1), FINISHED)
+ def test_created_with_capi(self):
+ script = dedent(f"""
+ import {interpreters.__name__} as interpreters
+ interp = interpreters.get_current()
+ interp.close()
+ """)
+
+ with self.subTest('running __main__ (from self)'):
+ with self.interpreter_from_capi() as interpid:
+ with self.assertRaisesRegex(ExecutionFailed,
+ 'InterpreterError.*unrecognized'):
+ self.run_from_capi(interpid, script, main=True)
+
+ with self.subTest('running, but not __main__ (from self)'):
+ with self.assertRaisesRegex(ExecutionFailed,
+ 'InterpreterError.*unrecognized'):
+ self.run_temp_from_capi(script)
+
+ with self.subTest('running __main__ (from other)'):
+ with self.interpreter_obj_from_capi() as (interp, interpid):
+ with self.running_from_capi(interpid, main=True):
+ with self.assertRaisesRegex(InterpreterError, 'unrecognized'):
+ interp.close()
+ # Make sure it wssn't closed.
+ self.assertTrue(
+ self.interp_exists(interpid))
+
+ # The rest would be skipped until we deal with running threads when
+ # interp.close() is called. However, the "whence" restrictions
+ # trigger first.
+
+ with self.subTest('running, but not __main__ (from other)'):
+ with self.interpreter_obj_from_capi() as (interp, interpid):
+ with self.running_from_capi(interpid, main=False):
+ with self.assertRaisesRegex(InterpreterError, 'unrecognized'):
+ interp.close()
+ # Make sure it wssn't closed.
+ self.assertTrue(
+ self.interp_exists(interpid))
+
+ with self.subTest('not running (from other)'):
+ with self.interpreter_obj_from_capi() as (interp, interpid):
+ with self.assertRaisesRegex(InterpreterError, 'unrecognized'):
+ interp.close()
+ self.assertTrue(
+ self.interp_exists(interpid))
+
class TestInterpreterPrepareMain(TestBase):
@@ -511,26 +695,44 @@ def test_not_shareable(self):
interp.prepare_main(spam={'spam': 'eggs', 'foo': 'bar'})
# Make sure neither was actually bound.
- with self.assertRaises(interpreters.ExecutionFailed):
+ with self.assertRaises(ExecutionFailed):
interp.exec('print(foo)')
- with self.assertRaises(interpreters.ExecutionFailed):
+ with self.assertRaises(ExecutionFailed):
interp.exec('print(spam)')
+ def test_running(self):
+ interp = interpreters.create()
+ interp.prepare_main({'spam': True})
+ with self.running(interp):
+ with self.assertRaisesRegex(InterpreterError, 'running'):
+ interp.prepare_main({'spam': False})
+ interp.exec('assert spam is True')
+
+ @requires_test_modules
+ def test_created_with_capi(self):
+ with self.interpreter_obj_from_capi() as (interp, interpid):
+ with self.assertRaisesRegex(InterpreterError, 'unrecognized'):
+ interp.prepare_main({'spam': True})
+ with self.assertRaisesRegex(ExecutionFailed, 'NameError'):
+ self.run_from_capi(interpid, 'assert spam is True')
+
class TestInterpreterExec(TestBase):
def test_success(self):
interp = interpreters.create()
- script, file = _captured_script('print("it worked!", end="")')
- with file:
+ script, results = _captured_script('print("it worked!", end="")')
+ with results:
interp.exec(script)
- out = file.read()
+ results = results.final()
+ results.raise_if_failed()
+ out = results.stdout
self.assertEqual(out, 'it worked!')
def test_failure(self):
interp = interpreters.create()
- with self.assertRaises(interpreters.ExecutionFailed):
+ with self.assertRaises(ExecutionFailed):
interp.exec('raise Exception')
def test_display_preserved_exception(self):
@@ -583,15 +785,17 @@ def script():
def test_in_thread(self):
interp = interpreters.create()
- script, file = _captured_script('print("it worked!", end="")')
- with file:
+ script, results = _captured_script('print("it worked!", end="")')
+ with results:
def f():
interp.exec(script)
t = threading.Thread(target=f)
t.start()
t.join()
- out = file.read()
+ results = results.final()
+ results.raise_if_failed()
+ out = results.stdout
self.assertEqual(out, 'it worked!')
@@ -618,6 +822,7 @@ def test_fork(self):
content = file.read()
self.assertEqual(content, expected)
+ # XXX Is this still true?
@unittest.skip('Fails on FreeBSD')
def test_already_running(self):
interp = interpreters.create()
@@ -666,6 +871,11 @@ def task():
self.assertEqual(os.read(r_interp, 1), RAN)
self.assertEqual(os.read(r_interp, 1), FINISHED)
+ def test_created_with_capi(self):
+ with self.interpreter_obj_from_capi() as (interp, _):
+ with self.assertRaisesRegex(InterpreterError, 'unrecognized'):
+ interp.exec('raise Exception("it worked!")')
+
# test_xxsubinterpreters covers the remaining
# Interpreter.exec() behavior.
@@ -830,7 +1040,7 @@ def test_call(self):
raise Exception((args, kwargs))
interp.call(callable)
- with self.assertRaises(interpreters.ExecutionFailed):
+ with self.assertRaises(ExecutionFailed):
interp.call(call_func_failure)
def test_call_in_thread(self):
@@ -983,7 +1193,7 @@ def test_new_config(self):
allow_exec=True,
allow_threads=True,
allow_daemon_threads=True,
- check_multi_interp_extensions=False,
+ check_multi_interp_extensions=bool(Py_GIL_DISABLED),
gil='shared',
),
'empty': types.SimpleNamespace(
@@ -1064,46 +1274,111 @@ def test_new_config(self):
with self.assertRaises(ValueError):
_interpreters.new_config(gil=value)
- def test_get_config(self):
- # This test overlaps with
- # test.test_capi.test_misc.InterpreterConfigTests.
+ def test_get_main(self):
+ interpid, whence = _interpreters.get_main()
+ self.assertEqual(interpid, 0)
+ self.assertEqual(whence, _interpreters.WHENCE_RUNTIME)
+ self.assertEqual(
+ _interpreters.whence(interpid),
+ _interpreters.WHENCE_RUNTIME)
+ def test_get_current(self):
with self.subTest('main'):
- expected = _interpreters.new_config('legacy')
- expected.gil = 'own'
- interpid = _interpreters.get_main()
- config = _interpreters.get_config(interpid)
- self.assert_ns_equal(config, expected)
+ main, *_ = _interpreters.get_main()
+ interpid, whence = _interpreters.get_current()
+ self.assertEqual(interpid, main)
+ self.assertEqual(whence, _interpreters.WHENCE_RUNTIME)
+
+ script = f"""
+ import {_interpreters.__name__} as _interpreters
+ interpid, whence = _interpreters.get_current()
+ print((interpid, whence))
+ """
+ def parse_stdout(text):
+ interpid, whence = eval(text)
+ return interpid, whence
+
+ with self.subTest('from _interpreters'):
+ orig = _interpreters.create()
+ text = self.run_and_capture(orig, script)
+ interpid, whence = parse_stdout(text)
+ self.assertEqual(interpid, orig)
+ self.assertEqual(whence, _interpreters.WHENCE_STDLIB)
+
+ with self.subTest('from C-API'):
+ last = 0
+ for id, *_ in _interpreters.list_all():
+ last = max(last, id)
+ expected = last + 1
+ text = self.run_temp_from_capi(script)
+ interpid, whence = parse_stdout(text)
+ self.assertEqual(interpid, expected)
+ self.assertEqual(whence, _interpreters.WHENCE_CAPI)
+
+ def test_list_all(self):
+ mainid, *_ = _interpreters.get_main()
+ interpid1 = _interpreters.create()
+ interpid2 = _interpreters.create()
+ interpid3 = _interpreters.create()
+ expected = [
+ (mainid, _interpreters.WHENCE_RUNTIME),
+ (interpid1, _interpreters.WHENCE_STDLIB),
+ (interpid2, _interpreters.WHENCE_STDLIB),
+ (interpid3, _interpreters.WHENCE_STDLIB),
+ ]
- with self.subTest('isolated'):
- expected = _interpreters.new_config('isolated')
- interpid = _interpreters.create('isolated')
- config = _interpreters.get_config(interpid)
- self.assert_ns_equal(config, expected)
+ with self.subTest('main'):
+ res = _interpreters.list_all()
+ self.assertEqual(res, expected)
+
+ with self.subTest('via interp from _interpreters'):
+ text = self.run_and_capture(interpid2, f"""
+ import {_interpreters.__name__} as _interpreters
+ print(
+ _interpreters.list_all())
+ """)
- with self.subTest('legacy'):
- expected = _interpreters.new_config('legacy')
- interpid = _interpreters.create('legacy')
- config = _interpreters.get_config(interpid)
- self.assert_ns_equal(config, expected)
+ res = eval(text)
+ self.assertEqual(res, expected)
+
+ with self.subTest('via interp from C-API'):
+ interpid4 = interpid3 + 1
+ interpid5 = interpid4 + 1
+ expected2 = expected + [
+ (interpid4, _interpreters.WHENCE_CAPI),
+ (interpid5, _interpreters.WHENCE_STDLIB),
+ ]
+ expected3 = expected + [
+ (interpid5, _interpreters.WHENCE_STDLIB),
+ ]
+ text = self.run_temp_from_capi(f"""
+ import {_interpreters.__name__} as _interpreters
+ _interpreters.create()
+ print(
+ _interpreters.list_all())
+ """)
+ res2 = eval(text)
+ res3 = _interpreters.list_all()
+ self.assertEqual(res2, expected2)
+ self.assertEqual(res3, expected3)
def test_create(self):
isolated = _interpreters.new_config('isolated')
legacy = _interpreters.new_config('legacy')
default = isolated
- with self.subTest('no arg'):
+ with self.subTest('no args'):
interpid = _interpreters.create()
config = _interpreters.get_config(interpid)
self.assert_ns_equal(config, default)
- with self.subTest('arg: None'):
+ with self.subTest('config: None'):
interpid = _interpreters.create(None)
config = _interpreters.get_config(interpid)
self.assert_ns_equal(config, default)
- with self.subTest('arg: \'empty\''):
- with self.assertRaises(interpreters.InterpreterError):
+ with self.subTest('config: \'empty\''):
+ with self.assertRaises(InterpreterError):
# The "empty" config isn't viable on its own.
_interpreters.create('empty')
@@ -1121,6 +1396,7 @@ def test_create(self):
with self.subTest('custom'):
orig = _interpreters.new_config('empty')
orig.use_main_obmalloc = True
+ orig.check_multi_interp_extensions = bool(Py_GIL_DISABLED)
orig.gil = 'shared'
interpid = _interpreters.create(orig)
config = _interpreters.get_config(interpid)
@@ -1138,6 +1414,249 @@ def test_create(self):
with self.assertRaises(ValueError):
_interpreters.create(orig)
+ with self.subTest('whence'):
+ interpid = _interpreters.create()
+ self.assertEqual(
+ _interpreters.whence(interpid),
+ _interpreters.WHENCE_STDLIB)
+
+ @requires_test_modules
+ def test_destroy(self):
+ with self.subTest('from _interpreters'):
+ interpid = _interpreters.create()
+ before = [id for id, *_ in _interpreters.list_all()]
+ _interpreters.destroy(interpid)
+ after = [id for id, *_ in _interpreters.list_all()]
+
+ self.assertIn(interpid, before)
+ self.assertNotIn(interpid, after)
+ self.assertFalse(
+ self.interp_exists(interpid))
+
+ with self.subTest('main'):
+ interpid, *_ = _interpreters.get_main()
+ with self.assertRaises(InterpreterError):
+ # It is the current interpreter.
+ _interpreters.destroy(interpid)
+
+ with self.subTest('from C-API'):
+ interpid = _testinternalcapi.create_interpreter()
+ with self.assertRaisesRegex(InterpreterError, 'unrecognized'):
+ _interpreters.destroy(interpid, restrict=True)
+ self.assertTrue(
+ self.interp_exists(interpid))
+ _interpreters.destroy(interpid)
+ self.assertFalse(
+ self.interp_exists(interpid))
+
+ def test_get_config(self):
+ # This test overlaps with
+ # test.test_capi.test_misc.InterpreterConfigTests.
+
+ with self.subTest('main'):
+ expected = _interpreters.new_config('legacy')
+ expected.gil = 'own'
+ if Py_GIL_DISABLED:
+ expected.check_multi_interp_extensions = False
+ interpid, *_ = _interpreters.get_main()
+ config = _interpreters.get_config(interpid)
+ self.assert_ns_equal(config, expected)
+
+ with self.subTest('isolated'):
+ expected = _interpreters.new_config('isolated')
+ interpid = _interpreters.create('isolated')
+ config = _interpreters.get_config(interpid)
+ self.assert_ns_equal(config, expected)
+
+ with self.subTest('legacy'):
+ expected = _interpreters.new_config('legacy')
+ interpid = _interpreters.create('legacy')
+ config = _interpreters.get_config(interpid)
+ self.assert_ns_equal(config, expected)
+
+ with self.subTest('from C-API'):
+ orig = _interpreters.new_config('isolated')
+ with self.interpreter_from_capi(orig) as interpid:
+ with self.assertRaisesRegex(InterpreterError, 'unrecognized'):
+ _interpreters.get_config(interpid, restrict=True)
+ config = _interpreters.get_config(interpid)
+ self.assert_ns_equal(config, orig)
+
+ @requires_test_modules
+ def test_whence(self):
+ with self.subTest('main'):
+ interpid, *_ = _interpreters.get_main()
+ whence = _interpreters.whence(interpid)
+ self.assertEqual(whence, _interpreters.WHENCE_RUNTIME)
+
+ with self.subTest('stdlib'):
+ interpid = _interpreters.create()
+ whence = _interpreters.whence(interpid)
+ self.assertEqual(whence, _interpreters.WHENCE_STDLIB)
+
+ for orig, name in {
+ _interpreters.WHENCE_UNKNOWN: 'not ready',
+ _interpreters.WHENCE_LEGACY_CAPI: 'legacy C-API',
+ _interpreters.WHENCE_CAPI: 'C-API',
+ _interpreters.WHENCE_XI: 'cross-interpreter C-API',
+ }.items():
+ with self.subTest(f'from C-API ({orig}: {name})'):
+ with self.interpreter_from_capi(whence=orig) as interpid:
+ whence = _interpreters.whence(interpid)
+ self.assertEqual(whence, orig)
+
+ with self.subTest('from C-API, running'):
+ text = self.run_temp_from_capi(dedent(f"""
+ import {_interpreters.__name__} as _interpreters
+ interpid, *_ = _interpreters.get_current()
+ print(_interpreters.whence(interpid))
+ """),
+ config=True)
+ whence = eval(text)
+ self.assertEqual(whence, _interpreters.WHENCE_CAPI)
+
+ with self.subTest('from legacy C-API, running'):
+ ...
+ text = self.run_temp_from_capi(dedent(f"""
+ import {_interpreters.__name__} as _interpreters
+ interpid, *_ = _interpreters.get_current()
+ print(_interpreters.whence(interpid))
+ """),
+ config=False)
+ whence = eval(text)
+ self.assertEqual(whence, _interpreters.WHENCE_LEGACY_CAPI)
+
+ def test_is_running(self):
+ def check(interpid, expected):
+ with self.assertRaisesRegex(InterpreterError, 'unrecognized'):
+ _interpreters.is_running(interpid, restrict=True)
+ running = _interpreters.is_running(interpid)
+ self.assertIs(running, expected)
+
+ with self.subTest('from _interpreters (running)'):
+ interpid = _interpreters.create()
+ with self.running(interpid):
+ running = _interpreters.is_running(interpid)
+ self.assertTrue(running)
+
+ with self.subTest('from _interpreters (not running)'):
+ interpid = _interpreters.create()
+ running = _interpreters.is_running(interpid)
+ self.assertFalse(running)
+
+ with self.subTest('main'):
+ interpid, *_ = _interpreters.get_main()
+ check(interpid, True)
+
+ with self.subTest('from C-API (running __main__)'):
+ with self.interpreter_from_capi() as interpid:
+ with self.running_from_capi(interpid, main=True):
+ check(interpid, True)
+
+ with self.subTest('from C-API (running, but not __main__)'):
+ with self.interpreter_from_capi() as interpid:
+ with self.running_from_capi(interpid, main=False):
+ check(interpid, False)
+
+ with self.subTest('from C-API (not running)'):
+ with self.interpreter_from_capi() as interpid:
+ check(interpid, False)
+
+ def test_exec(self):
+ with self.subTest('run script'):
+ interpid = _interpreters.create()
+ script, results = _captured_script('print("it worked!", end="")')
+ with results:
+ exc = _interpreters.exec(interpid, script)
+ results = results.final()
+ results.raise_if_failed()
+ out = results.stdout
+ self.assertEqual(out, 'it worked!')
+
+ with self.subTest('uncaught exception'):
+ interpid = _interpreters.create()
+ script, results = _captured_script("""
+ raise Exception('uh-oh!')
+ print("it worked!", end="")
+ """)
+ with results:
+ exc = _interpreters.exec(interpid, script)
+ out = results.stdout()
+ self.assertEqual(out, '')
+ self.assert_ns_equal(exc, types.SimpleNamespace(
+ type=types.SimpleNamespace(
+ __name__='Exception',
+ __qualname__='Exception',
+ __module__='builtins',
+ ),
+ msg='uh-oh!',
+ # We check these in other tests.
+ formatted=exc.formatted,
+ errdisplay=exc.errdisplay,
+ ))
+
+ with self.subTest('from C-API'):
+ with self.interpreter_from_capi() as interpid:
+ with self.assertRaisesRegex(InterpreterError, 'unrecognized'):
+ _interpreters.exec(interpid, 'raise Exception("it worked!")',
+ restrict=True)
+ exc = _interpreters.exec(interpid, 'raise Exception("it worked!")')
+ self.assertIsNot(exc, None)
+ self.assertEqual(exc.msg, 'it worked!')
+
+ def test_call(self):
+ with self.subTest('no args'):
+ interpid = _interpreters.create()
+ exc = _interpreters.call(interpid, call_func_return_shareable)
+ self.assertIs(exc, None)
+
+ with self.subTest('uncaught exception'):
+ interpid = _interpreters.create()
+ exc = _interpreters.call(interpid, call_func_failure)
+ self.assertEqual(exc, types.SimpleNamespace(
+ type=types.SimpleNamespace(
+ __name__='Exception',
+ __qualname__='Exception',
+ __module__='builtins',
+ ),
+ msg='spam!',
+ # We check these in other tests.
+ formatted=exc.formatted,
+ errdisplay=exc.errdisplay,
+ ))
+
+ @requires_test_modules
+ def test_set___main___attrs(self):
+ with self.subTest('from _interpreters'):
+ interpid = _interpreters.create()
+ before1 = _interpreters.exec(interpid, 'assert spam == \'eggs\'')
+ before2 = _interpreters.exec(interpid, 'assert ham == 42')
+ self.assertEqual(before1.type.__name__, 'NameError')
+ self.assertEqual(before2.type.__name__, 'NameError')
+
+ _interpreters.set___main___attrs(interpid, dict(
+ spam='eggs',
+ ham=42,
+ ))
+ after1 = _interpreters.exec(interpid, 'assert spam == \'eggs\'')
+ after2 = _interpreters.exec(interpid, 'assert ham == 42')
+ after3 = _interpreters.exec(interpid, 'assert spam == 42')
+ self.assertIs(after1, None)
+ self.assertIs(after2, None)
+ self.assertEqual(after3.type.__name__, 'AssertionError')
+
+ with self.subTest('from C-API'):
+ with self.interpreter_from_capi() as interpid:
+ with self.assertRaisesRegex(InterpreterError, 'unrecognized'):
+ _interpreters.set___main___attrs(interpid, {'spam': True},
+ restrict=True)
+ _interpreters.set___main___attrs(interpid, {'spam': True})
+ rc = _testinternalcapi.exec_interpreter(
+ interpid,
+ 'assert spam is True',
+ )
+ self.assertEqual(rc, 0)
+
if __name__ == '__main__':
# Test needs to be a package, so we can do relative imports.
diff --git a/Lib/test/test_interpreters/utils.py b/Lib/test/test_interpreters/utils.py
index 5ade6762ea24ef..8e475816f04de4 100644
--- a/Lib/test/test_interpreters/utils.py
+++ b/Lib/test/test_interpreters/utils.py
@@ -1,30 +1,347 @@
+from collections import namedtuple
import contextlib
+import json
+import io
import os
import os.path
+import pickle
+import queue
+#import select
import subprocess
import sys
import tempfile
-from textwrap import dedent
+from textwrap import dedent, indent
import threading
import types
import unittest
+import warnings
from test import support
-from test.support import os_helper
+# We would use test.support.import_helper.import_module(),
+# but the indirect import of test.support.os_helper causes refleaks.
+try:
+ import _xxsubinterpreters as _interpreters
+except ImportError as exc:
+ raise unittest.SkipTest(str(exc))
from test.support import interpreters
-def _captured_script(script):
- r, w = os.pipe()
- indented = script.replace('\n', '\n ')
- wrapped = dedent(f"""
- import contextlib
- with open({w}, 'w', encoding='utf-8') as spipe:
- with contextlib.redirect_stdout(spipe):
+try:
+ import _testinternalcapi
+ import _testcapi
+except ImportError:
+ _testinternalcapi = None
+ _testcapi = None
+
+def requires_test_modules(func):
+ return unittest.skipIf(_testinternalcapi is None, "test requires _testinternalcapi module")(func)
+
+
+def _dump_script(text):
+ lines = text.splitlines()
+ print()
+ print('-' * 20)
+ for i, line in enumerate(lines, 1):
+ print(f' {i:>{len(str(len(lines)))}} {line}')
+ print('-' * 20)
+
+
+def _close_file(file):
+ try:
+ if hasattr(file, 'close'):
+ file.close()
+ else:
+ os.close(file)
+ except OSError as exc:
+ if exc.errno != 9:
+ raise # re-raise
+ # It was closed already.
+
+
+def pack_exception(exc=None):
+ captured = _interpreters.capture_exception(exc)
+ data = dict(captured.__dict__)
+ data['type'] = dict(captured.type.__dict__)
+ return json.dumps(data)
+
+
+def unpack_exception(packed):
+ try:
+ data = json.loads(packed)
+ except json.decoder.JSONDecodeError:
+ warnings.warn('incomplete exception data', RuntimeWarning)
+ print(packed if isinstance(packed, str) else packed.decode('utf-8'))
+ return None
+ exc = types.SimpleNamespace(**data)
+ exc.type = types.SimpleNamespace(**exc.type)
+ return exc;
+
+
+class CapturingResults:
+
+ STDIO = dedent("""\
+ with open({w_pipe}, 'wb', buffering=0) as _spipe_{stream}:
+ _captured_std{stream} = io.StringIO()
+ with contextlib.redirect_std{stream}(_captured_std{stream}):
+ #########################
+ # begin wrapped script
+
+ {indented}
+
+ # end wrapped script
+ #########################
+ text = _captured_std{stream}.getvalue()
+ _spipe_{stream}.write(text.encode('utf-8'))
+ """)[:-1]
+ EXC = dedent("""\
+ with open({w_pipe}, 'wb', buffering=0) as _spipe_exc:
+ try:
+ #########################
+ # begin wrapped script
+
{indented}
- """)
- return wrapped, open(r, encoding='utf-8')
+
+ # end wrapped script
+ #########################
+ except Exception as exc:
+ text = _interp_utils.pack_exception(exc)
+ _spipe_exc.write(text.encode('utf-8'))
+ """)[:-1]
+
+ @classmethod
+ def wrap_script(cls, script, *, stdout=True, stderr=False, exc=False):
+ script = dedent(script).strip(os.linesep)
+ imports = [
+ f'import {__name__} as _interp_utils',
+ ]
+ wrapped = script
+
+ # Handle exc.
+ if exc:
+ exc = os.pipe()
+ r_exc, w_exc = exc
+ indented = wrapped.replace('\n', '\n ')
+ wrapped = cls.EXC.format(
+ w_pipe=w_exc,
+ indented=indented,
+ )
+ else:
+ exc = None
+
+ # Handle stdout.
+ if stdout:
+ imports.extend([
+ 'import contextlib, io',
+ ])
+ stdout = os.pipe()
+ r_out, w_out = stdout
+ indented = wrapped.replace('\n', '\n ')
+ wrapped = cls.STDIO.format(
+ w_pipe=w_out,
+ indented=indented,
+ stream='out',
+ )
+ else:
+ stdout = None
+
+ # Handle stderr.
+ if stderr == 'stdout':
+ stderr = None
+ elif stderr:
+ if not stdout:
+ imports.extend([
+ 'import contextlib, io',
+ ])
+ stderr = os.pipe()
+ r_err, w_err = stderr
+ indented = wrapped.replace('\n', '\n ')
+ wrapped = cls.STDIO.format(
+ w_pipe=w_err,
+ indented=indented,
+ stream='err',
+ )
+ else:
+ stderr = None
+
+ if wrapped == script:
+ raise NotImplementedError
+ else:
+ for line in imports:
+ wrapped = f'{line}{os.linesep}{wrapped}'
+
+ results = cls(stdout, stderr, exc)
+ return wrapped, results
+
+ def __init__(self, out, err, exc):
+ self._rf_out = None
+ self._rf_err = None
+ self._rf_exc = None
+ self._w_out = None
+ self._w_err = None
+ self._w_exc = None
+
+ if out is not None:
+ r_out, w_out = out
+ self._rf_out = open(r_out, 'rb', buffering=0)
+ self._w_out = w_out
+
+ if err is not None:
+ r_err, w_err = err
+ self._rf_err = open(r_err, 'rb', buffering=0)
+ self._w_err = w_err
+
+ if exc is not None:
+ r_exc, w_exc = exc
+ self._rf_exc = open(r_exc, 'rb', buffering=0)
+ self._w_exc = w_exc
+
+ self._buf_out = b''
+ self._buf_err = b''
+ self._buf_exc = b''
+ self._exc = None
+
+ self._closed = False
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *args):
+ self.close()
+
+ @property
+ def closed(self):
+ return self._closed
+
+ def close(self):
+ if self._closed:
+ return
+ self._closed = True
+
+ if self._w_out is not None:
+ _close_file(self._w_out)
+ self._w_out = None
+ if self._w_err is not None:
+ _close_file(self._w_err)
+ self._w_err = None
+ if self._w_exc is not None:
+ _close_file(self._w_exc)
+ self._w_exc = None
+
+ self._capture()
+
+ if self._rf_out is not None:
+ _close_file(self._rf_out)
+ self._rf_out = None
+ if self._rf_err is not None:
+ _close_file(self._rf_err)
+ self._rf_err = None
+ if self._rf_exc is not None:
+ _close_file(self._rf_exc)
+ self._rf_exc = None
+
+ def _capture(self):
+ # Ideally this is called only after the script finishes
+ # (and thus has closed the write end of the pipe.
+ if self._rf_out is not None:
+ chunk = self._rf_out.read(100)
+ while chunk:
+ self._buf_out += chunk
+ chunk = self._rf_out.read(100)
+ if self._rf_err is not None:
+ chunk = self._rf_err.read(100)
+ while chunk:
+ self._buf_err += chunk
+ chunk = self._rf_err.read(100)
+ if self._rf_exc is not None:
+ chunk = self._rf_exc.read(100)
+ while chunk:
+ self._buf_exc += chunk
+ chunk = self._rf_exc.read(100)
+
+ def _unpack_stdout(self):
+ return self._buf_out.decode('utf-8')
+
+ def _unpack_stderr(self):
+ return self._buf_err.decode('utf-8')
+
+ def _unpack_exc(self):
+ if self._exc is not None:
+ return self._exc
+ if not self._buf_exc:
+ return None
+ self._exc = unpack_exception(self._buf_exc)
+ return self._exc
+
+ def stdout(self):
+ if self.closed:
+ return self.final().stdout
+ self._capture()
+ return self._unpack_stdout()
+
+ def stderr(self):
+ if self.closed:
+ return self.final().stderr
+ self._capture()
+ return self._unpack_stderr()
+
+ def exc(self):
+ if self.closed:
+ return self.final().exc
+ self._capture()
+ return self._unpack_exc()
+
+ def final(self, *, force=False):
+ try:
+ return self._final
+ except AttributeError:
+ if not self._closed:
+ if not force:
+ raise Exception('no final results available yet')
+ else:
+ return CapturedResults.Proxy(self)
+ self._final = CapturedResults(
+ self._unpack_stdout(),
+ self._unpack_stderr(),
+ self._unpack_exc(),
+ )
+ return self._final
+
+
+class CapturedResults(namedtuple('CapturedResults', 'stdout stderr exc')):
+
+ class Proxy:
+ def __init__(self, capturing):
+ self._capturing = capturing
+ def _finish(self):
+ if self._capturing is None:
+ return
+ self._final = self._capturing.final()
+ self._capturing = None
+ def __iter__(self):
+ self._finish()
+ yield from self._final
+ def __len__(self):
+ self._finish()
+ return len(self._final)
+ def __getattr__(self, name):
+ self._finish()
+ if name.startswith('_'):
+ raise AttributeError(name)
+ return getattr(self._final, name)
+
+ def raise_if_failed(self):
+ if self.exc is not None:
+ raise interpreters.ExecutionFailed(self.exc)
+
+
+def _captured_script(script, *, stdout=True, stderr=False, exc=False):
+ return CapturingResults.wrap_script(
+ script,
+ stdout=stdout,
+ stderr=stderr,
+ exc=exc,
+ )
def clean_up_interpreters():
@@ -33,17 +350,17 @@ def clean_up_interpreters():
continue
try:
interp.close()
- except RuntimeError:
+ except _interpreters.InterpreterError:
pass # already destroyed
def _run_output(interp, request, init=None):
- script, rpipe = _captured_script(request)
- with rpipe:
+ script, results = _captured_script(request)
+ with results:
if init:
interp.prepare_main(init)
interp.exec(script)
- return rpipe.read()
+ return results.stdout()
@contextlib.contextmanager
@@ -85,6 +402,7 @@ def ensure_closed(fd):
def temp_dir(self):
tempdir = tempfile.mkdtemp()
tempdir = os.path.realpath(tempdir)
+ from test.support import os_helper
self.addCleanup(lambda: os_helper.rmtree(tempdir))
return tempdir
@@ -175,3 +493,197 @@ def assert_ns_equal(self, ns1, ns2, msg=None):
diff = f'namespace({diff})'
standardMsg = self._truncateMessage(standardMsg, diff)
self.fail(self._formatMessage(msg, standardMsg))
+
+ def _run_string(self, interp, script):
+ wrapped, results = _captured_script(script, exc=False)
+ #_dump_script(wrapped)
+ with results:
+ if isinstance(interp, interpreters.Interpreter):
+ interp.exec(script)
+ else:
+ err = _interpreters.run_string(interp, wrapped)
+ if err is not None:
+ return None, err
+ return results.stdout(), None
+
+ def run_and_capture(self, interp, script):
+ text, err = self._run_string(interp, script)
+ if err is not None:
+ raise interpreters.ExecutionFailed(err)
+ else:
+ return text
+
+ def interp_exists(self, interpid):
+ try:
+ _interpreters.whence(interpid)
+ except _interpreters.InterpreterNotFoundError:
+ return False
+ else:
+ return True
+
+ @requires_test_modules
+ @contextlib.contextmanager
+ def interpreter_from_capi(self, config=None, whence=None):
+ if config is False:
+ if whence is None:
+ whence = _interpreters.WHENCE_LEGACY_CAPI
+ else:
+ assert whence in (_interpreters.WHENCE_LEGACY_CAPI,
+ _interpreters.WHENCE_UNKNOWN), repr(whence)
+ config = None
+ elif config is True:
+ config = _interpreters.new_config('default')
+ elif config is None:
+ if whence not in (
+ _interpreters.WHENCE_LEGACY_CAPI,
+ _interpreters.WHENCE_UNKNOWN,
+ ):
+ config = _interpreters.new_config('legacy')
+ elif isinstance(config, str):
+ config = _interpreters.new_config(config)
+
+ if whence is None:
+ whence = _interpreters.WHENCE_XI
+
+ interpid = _testinternalcapi.create_interpreter(config, whence=whence)
+ try:
+ yield interpid
+ finally:
+ try:
+ _testinternalcapi.destroy_interpreter(interpid)
+ except _interpreters.InterpreterNotFoundError:
+ pass
+
+ @contextlib.contextmanager
+ def interpreter_obj_from_capi(self, config='legacy'):
+ with self.interpreter_from_capi(config) as interpid:
+ interp = interpreters.Interpreter(
+ interpid,
+ _whence=_interpreters.WHENCE_CAPI,
+ _ownsref=False,
+ )
+ yield interp, interpid
+
+ @contextlib.contextmanager
+ def capturing(self, script):
+ wrapped, capturing = _captured_script(script, stdout=True, exc=True)
+ #_dump_script(wrapped)
+ with capturing:
+ yield wrapped, capturing.final(force=True)
+
+ @requires_test_modules
+ def run_from_capi(self, interpid, script, *, main=False):
+ with self.capturing(script) as (wrapped, results):
+ rc = _testinternalcapi.exec_interpreter(interpid, wrapped, main=main)
+ assert rc == 0, rc
+ results.raise_if_failed()
+ return results.stdout
+
+ @contextlib.contextmanager
+ def _running(self, run_interp, exec_interp):
+ token = b'\0'
+ r_in, w_in = self.pipe()
+ r_out, w_out = self.pipe()
+
+ def close():
+ _close_file(r_in)
+ _close_file(w_in)
+ _close_file(r_out)
+ _close_file(w_out)
+
+ # Start running (and wait).
+ script = dedent(f"""
+ import os
+ try:
+ # handshake
+ token = os.read({r_in}, 1)
+ os.write({w_out}, token)
+ # Wait for the "done" message.
+ os.read({r_in}, 1)
+ except BrokenPipeError:
+ pass
+ except OSError as exc:
+ if exc.errno != 9:
+ raise # re-raise
+ # It was closed already.
+ """)
+ failed = None
+ def run():
+ nonlocal failed
+ try:
+ run_interp(script)
+ except Exception as exc:
+ failed = exc
+ close()
+ t = threading.Thread(target=run)
+ t.start()
+
+ # handshake
+ try:
+ os.write(w_in, token)
+ token2 = os.read(r_out, 1)
+ assert token2 == token, (token2, token)
+ except OSError:
+ t.join()
+ if failed is not None:
+ raise failed
+
+ # CM __exit__()
+ try:
+ try:
+ yield
+ finally:
+ # Send "done".
+ os.write(w_in, b'\0')
+ finally:
+ close()
+ t.join()
+ if failed is not None:
+ raise failed
+
+ @contextlib.contextmanager
+ def running(self, interp):
+ if isinstance(interp, int):
+ interpid = interp
+ def exec_interp(script):
+ exc = _interpreters.exec(interpid, script)
+ assert exc is None, exc
+ run_interp = exec_interp
+ else:
+ def run_interp(script):
+ text = self.run_and_capture(interp, script)
+ assert text == '', repr(text)
+ def exec_interp(script):
+ interp.exec(script)
+ with self._running(run_interp, exec_interp):
+ yield
+
+ @requires_test_modules
+ @contextlib.contextmanager
+ def running_from_capi(self, interpid, *, main=False):
+ def run_interp(script):
+ text = self.run_from_capi(interpid, script, main=main)
+ assert text == '', repr(text)
+ def exec_interp(script):
+ rc = _testinternalcapi.exec_interpreter(interpid, script)
+ assert rc == 0, rc
+ with self._running(run_interp, exec_interp):
+ yield
+
+ @requires_test_modules
+ def run_temp_from_capi(self, script, config='legacy'):
+ if config is False:
+ # Force using Py_NewInterpreter().
+ run_in_interp = (lambda s, c: _testcapi.run_in_subinterp(s))
+ config = None
+ else:
+ run_in_interp = _testinternalcapi.run_in_subinterp_with_config
+ if config is True:
+ config = 'default'
+ if isinstance(config, str):
+ config = _interpreters.new_config(config)
+ with self.capturing(script) as (wrapped, results):
+ rc = run_in_interp(wrapped, config)
+ assert rc == 0, rc
+ results.raise_if_failed()
+ return results.stdout
diff --git a/Lib/test/test_io.py b/Lib/test/test_io.py
index 4ea1ef15c0661d..c3dc0572c58c27 100644
--- a/Lib/test/test_io.py
+++ b/Lib/test/test_io.py
@@ -40,7 +40,8 @@
from test.support.script_helper import (
assert_python_ok, assert_python_failure, run_python_until_end)
from test.support import (
- import_helper, is_apple, os_helper, skip_if_sanitizer, threading_helper, warnings_helper
+ import_helper, is_apple, os_helper, skip_if_sanitizer, threading_helper, warnings_helper,
+ skip_on_s390x
)
from test.support.os_helper import FakePath
@@ -1700,6 +1701,9 @@ class CBufferedReaderTest(BufferedReaderTest, SizeofTest):
@skip_if_sanitizer(memory=True, address=True, thread=True,
reason="sanitizer defaults to crashing "
"instead of returning NULL for malloc failure.")
+ # gh-117755: The test allocates 9 223 372 036 854 775 807 bytes
+ # (0x7fffffffffffffff) and mimalloc fails with a division by zero on s390x.
+ @skip_on_s390x
def test_constructor(self):
BufferedReaderTest.test_constructor(self)
# The allocation can succeed on 32-bit builds, e.g. with more
@@ -2068,6 +2072,9 @@ class CBufferedWriterTest(BufferedWriterTest, SizeofTest):
@skip_if_sanitizer(memory=True, address=True, thread=True,
reason="sanitizer defaults to crashing "
"instead of returning NULL for malloc failure.")
+ # gh-117755: The test allocates 9 223 372 036 854 775 807 bytes
+ # (0x7fffffffffffffff) and mimalloc fails with a division by zero on s390x.
+ @skip_on_s390x
def test_constructor(self):
BufferedWriterTest.test_constructor(self)
# The allocation can succeed on 32-bit builds, e.g. with more
@@ -2590,6 +2597,9 @@ class CBufferedRandomTest(BufferedRandomTest, SizeofTest):
@skip_if_sanitizer(memory=True, address=True, thread=True,
reason="sanitizer defaults to crashing "
"instead of returning NULL for malloc failure.")
+ # gh-117755: The test allocates 9 223 372 036 854 775 807 bytes
+ # (0x7fffffffffffffff) and mimalloc fails with a division by zero on s390x.
+ @skip_on_s390x
def test_constructor(self):
BufferedRandomTest.test_constructor(self)
# The allocation can succeed on 32-bit builds, e.g. with more
diff --git a/Lib/test/test_isinstance.py b/Lib/test/test_isinstance.py
index 7f759fb3317146..95a119ba683e09 100644
--- a/Lib/test/test_isinstance.py
+++ b/Lib/test/test_isinstance.py
@@ -352,7 +352,7 @@ def blowstack(fxn, arg, compare_to):
# Make sure that calling isinstance with a deeply nested tuple for its
# argument will raise RecursionError eventually.
tuple_arg = (compare_to,)
- for cnt in range(support.EXCEEDS_RECURSION_LIMIT):
+ for cnt in range(support.exceeds_recursion_limit()):
tuple_arg = (tuple_arg,)
fxn(arg, tuple_arg)
diff --git a/Lib/test/test_json/test_decode.py b/Lib/test/test_json/test_decode.py
index 124045b13184b3..79fb239b35d3f2 100644
--- a/Lib/test/test_json/test_decode.py
+++ b/Lib/test/test_json/test_decode.py
@@ -8,14 +8,34 @@
class TestDecode:
def test_decimal(self):
rval = self.loads('1.1', parse_float=decimal.Decimal)
- self.assertTrue(isinstance(rval, decimal.Decimal))
+ self.assertIsInstance(rval, decimal.Decimal)
self.assertEqual(rval, decimal.Decimal('1.1'))
def test_float(self):
rval = self.loads('1', parse_int=float)
- self.assertTrue(isinstance(rval, float))
+ self.assertIsInstance(rval, float)
self.assertEqual(rval, 1.0)
+ def test_bytes(self):
+ self.assertEqual(self.loads(b"1"), 1)
+
+ def test_parse_constant(self):
+ for constant, expected in [
+ ("Infinity", "INFINITY"),
+ ("-Infinity", "-INFINITY"),
+ ("NaN", "NAN"),
+ ]:
+ self.assertEqual(
+ self.loads(constant, parse_constant=str.upper), expected
+ )
+
+ def test_constant_invalid_case(self):
+ for constant in [
+ "nan", "NAN", "naN", "infinity", "INFINITY", "inFiniTy"
+ ]:
+ with self.assertRaises(self.JSONDecodeError):
+ self.loads(constant)
+
def test_empty_objects(self):
self.assertEqual(self.loads('{}'), {})
self.assertEqual(self.loads('[]'), [])
@@ -88,7 +108,8 @@ def test_string_with_utf8_bom(self):
self.json.load(StringIO(bom_json))
self.assertIn('BOM', str(cm.exception))
# make sure that the BOM is not detected in the middle of a string
- bom_in_str = '"{}"'.format(''.encode('utf-8-sig').decode('utf-8'))
+ bom = ''.encode('utf-8-sig').decode('utf-8')
+ bom_in_str = f'"{bom}"'
self.assertEqual(self.loads(bom_in_str), '\ufeff')
self.assertEqual(self.json.load(StringIO(bom_in_str)), '\ufeff')
diff --git a/Lib/test/test_json/test_encode_basestring_ascii.py b/Lib/test/test_json/test_encode_basestring_ascii.py
index 4bbc6c71489a83..6a39b72a09df35 100644
--- a/Lib/test/test_json/test_encode_basestring_ascii.py
+++ b/Lib/test/test_json/test_encode_basestring_ascii.py
@@ -23,8 +23,7 @@ def test_encode_basestring_ascii(self):
for input_string, expect in CASES:
result = self.json.encoder.encode_basestring_ascii(input_string)
self.assertEqual(result, expect,
- '{0!r} != {1!r} for {2}({3!r})'.format(
- result, expect, fname, input_string))
+ f'{result!r} != {expect!r} for {fname}({input_string!r})')
def test_ordered_dict(self):
# See issue 6105
diff --git a/Lib/test/test_json/test_fail.py b/Lib/test/test_json/test_fail.py
index d6bce605e21463..a74240f1107de3 100644
--- a/Lib/test/test_json/test_fail.py
+++ b/Lib/test/test_json/test_fail.py
@@ -89,7 +89,7 @@ def test_failures(self):
except self.JSONDecodeError:
pass
else:
- self.fail("Expected failure for fail{0}.json: {1!r}".format(idx, doc))
+ self.fail(f"Expected failure for fail{idx}.json: {doc!r}")
def test_non_string_keys_dict(self):
data = {'a' : 1, (1, 2) : 2}
diff --git a/Lib/test/test_json/test_unicode.py b/Lib/test/test_json/test_unicode.py
index 2e8bba2775256a..68629cceeb9be9 100644
--- a/Lib/test/test_json/test_unicode.py
+++ b/Lib/test/test_json/test_unicode.py
@@ -20,12 +20,17 @@ def test_encoding4(self):
def test_encoding5(self):
u = '\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
j = self.dumps(u, ensure_ascii=False)
- self.assertEqual(j, '"{0}"'.format(u))
+ self.assertEqual(j, f'"{u}"')
def test_encoding6(self):
u = '\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
j = self.dumps([u], ensure_ascii=False)
- self.assertEqual(j, '["{0}"]'.format(u))
+ self.assertEqual(j, f'["{u}"]')
+
+ def test_encoding7(self):
+ u = '\N{GREEK SMALL LETTER ALPHA}\N{GREEK CAPITAL LETTER OMEGA}'
+ j = self.dumps(u + "\n", ensure_ascii=False)
+ self.assertEqual(j, f'"{u}\\n"')
def test_big_unicode_encode(self):
u = '\U0001d120'
@@ -34,13 +39,13 @@ def test_big_unicode_encode(self):
def test_big_unicode_decode(self):
u = 'z\U0001d120x'
- self.assertEqual(self.loads('"' + u + '"'), u)
+ self.assertEqual(self.loads(f'"{u}"'), u)
self.assertEqual(self.loads('"z\\ud834\\udd20x"'), u)
def test_unicode_decode(self):
for i in range(0, 0xd7ff):
u = chr(i)
- s = '"\\u{0:04x}"'.format(i)
+ s = f'"\\u{i:04x}"'
self.assertEqual(self.loads(s), u)
def test_unicode_preservation(self):
diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py
index 7b5bc6b6a74180..3f0b363066df2c 100644
--- a/Lib/test/test_logging.py
+++ b/Lib/test/test_logging.py
@@ -60,6 +60,7 @@
import weakref
from http.server import HTTPServer, BaseHTTPRequestHandler
+from unittest.mock import patch
from urllib.parse import urlparse, parse_qs
from socketserver import (ThreadingUDPServer, DatagramRequestHandler,
ThreadingTCPServer, StreamRequestHandler)
@@ -2191,7 +2192,8 @@ def test_output(self):
self.handled.clear()
msg = "sp\xe4m"
logger.error(msg)
- self.handled.wait()
+ handled = self.handled.wait(support.SHORT_TIMEOUT)
+ self.assertTrue(handled, "HTTP request timed out")
self.assertEqual(self.log_data.path, '/frob')
self.assertEqual(self.command, method)
if method == 'GET':
@@ -4551,6 +4553,44 @@ def test_issue_89047(self):
s = f.format(r)
self.assertNotIn('.1000', s)
+ def test_msecs_has_no_floating_point_precision_loss(self):
+ # See issue gh-102402
+ tests = (
+ # time_ns is approx. 2023-03-04 04:25:20 UTC
+ # (time_ns, expected_msecs_value)
+ (1_677_902_297_100_000_000, 100.0), # exactly 100ms
+ (1_677_903_920_999_998_503, 999.0), # check truncating doesn't round
+ (1_677_903_920_000_998_503, 0.0), # check truncating doesn't round
+ )
+ for ns, want in tests:
+ with patch('time.time_ns') as patched_ns:
+ patched_ns.return_value = ns
+ record = logging.makeLogRecord({'msg': 'test'})
+ self.assertEqual(record.msecs, want)
+ self.assertEqual(record.created, ns / 1e9)
+
+ def test_relativeCreated_has_higher_precision(self):
+ # See issue gh-102402
+ ns = 1_677_903_920_000_998_503 # approx. 2023-03-04 04:25:20 UTC
+ offsets_ns = (200, 500, 12_354, 99_999, 1_677_903_456_999_123_456)
+ orig_modules = import_helper._save_and_remove_modules(['logging'])
+ try:
+ with patch("time.time_ns") as patched_ns:
+ # mock for module import
+ patched_ns.return_value = ns
+ import logging
+ for offset_ns in offsets_ns:
+ new_ns = ns + offset_ns
+ # mock for log record creation
+ patched_ns.return_value = new_ns
+ record = logging.makeLogRecord({'msg': 'test'})
+ self.assertAlmostEqual(record.created, new_ns / 1e9, places=6)
+ # After PR gh-102412, precision (places) increases from 3 to 7
+ self.assertAlmostEqual(record.relativeCreated, offset_ns / 1e6, places=7)
+ finally:
+ import_helper._save_and_remove_modules(['logging'])
+ sys.modules.update(orig_modules)
+
class TestBufferingFormatter(logging.BufferingFormatter):
def formatHeader(self, records):
diff --git a/Lib/test/test_marshal.py b/Lib/test/test_marshal.py
index 615568e6af2102..64ee1ba867d592 100644
--- a/Lib/test/test_marshal.py
+++ b/Lib/test/test_marshal.py
@@ -118,7 +118,7 @@ def test_code(self):
def test_many_codeobjects(self):
# Issue2957: bad recursion count on code objects
# more than MAX_MARSHAL_STACK_DEPTH
- count = support.EXCEEDS_RECURSION_LIMIT
+ count = support.exceeds_recursion_limit()
codes = (ExceptionTestCase.test_exceptions.__code__,) * count
marshal.loads(marshal.dumps(codes))
diff --git a/Lib/test/test_pathlib/test_pathlib.py b/Lib/test/test_pathlib/test_pathlib.py
index 651d66656cbd61..5fd1a41cbee17b 100644
--- a/Lib/test/test_pathlib/test_pathlib.py
+++ b/Lib/test/test_pathlib/test_pathlib.py
@@ -1263,6 +1263,13 @@ def test_glob_dot(self):
self.assertEqual(
set(P('.').glob('**/*/*')), {P("dirD/fileD")})
+ def test_glob_inaccessible(self):
+ P = self.cls
+ p = P(self.base, "mydir1", "mydir2")
+ p.mkdir(parents=True)
+ p.parent.chmod(0)
+ self.assertEqual(set(p.glob('*')), set())
+
def test_rglob_pathlike(self):
P = self.cls
p = P(self.base, "dirC")
diff --git a/Lib/test/test_pathlib/test_pathlib_abc.py b/Lib/test/test_pathlib/test_pathlib_abc.py
index 336115cf0fead2..aadecbc142cca6 100644
--- a/Lib/test/test_pathlib/test_pathlib_abc.py
+++ b/Lib/test/test_pathlib/test_pathlib_abc.py
@@ -8,6 +8,7 @@
from pathlib._abc import UnsupportedOperation, ParserBase, PurePathBase, PathBase
import posixpath
+from test.support import is_wasi
from test.support.os_helper import TESTFN
@@ -1429,10 +1430,10 @@ def __repr__(self):
return "{}({!r})".format(self.__class__.__name__, self.as_posix())
def stat(self, *, follow_symlinks=True):
- if follow_symlinks:
- path = str(self.resolve())
+ if follow_symlinks or self.name in ('', '.', '..'):
+ path = str(self.resolve(strict=True))
else:
- path = str(self.parent.resolve() / self.name)
+ path = str(self.parent.resolve(strict=True) / self.name)
if path in self._files:
st_mode = stat.S_IFREG
elif path in self._directories:
@@ -1741,8 +1742,9 @@ def _check(glob, expected):
def test_glob_posix(self):
P = self.cls
p = P(self.base)
+ q = p / "FILEa"
given = set(p.glob("FILEa"))
- expect = set()
+ expect = {q} if q.exists() else set()
self.assertEqual(given, expect)
self.assertEqual(set(p.glob("FILEa*")), set())
@@ -1753,8 +1755,6 @@ def test_glob_windows(self):
self.assertEqual(set(p.glob("FILEa")), { P(self.base, "fileA") })
self.assertEqual(set(p.glob("*a\\")), { P(self.base, "dirA/") })
self.assertEqual(set(p.glob("F*a")), { P(self.base, "fileA") })
- self.assertEqual(set(map(str, p.glob("FILEa"))), {f"{p}\\fileA"})
- self.assertEqual(set(map(str, p.glob("F*a"))), {f"{p}\\fileA"})
def test_glob_empty_pattern(self):
P = self.cls
@@ -1857,8 +1857,9 @@ def _check(path, glob, expected):
def test_rglob_posix(self):
P = self.cls
p = P(self.base, "dirC")
+ q = p / "dirD" / "FILEd"
given = set(p.rglob("FILEd"))
- expect = set()
+ expect = {q} if q.exists() else set()
self.assertEqual(given, expect)
self.assertEqual(set(p.rglob("FILEd*")), set())
@@ -1868,7 +1869,6 @@ def test_rglob_windows(self):
p = P(self.base, "dirC")
self.assertEqual(set(p.rglob("FILEd")), { P(self.base, "dirC/dirD/fileD") })
self.assertEqual(set(p.rglob("*\\")), { P(self.base, "dirC/dirD/") })
- self.assertEqual(set(map(str, p.rglob("FILEd"))), {f"{p}\\dirD\\fileD"})
@needs_symlinks
def test_rglob_recurse_symlinks_common(self):
@@ -1921,6 +1921,8 @@ def test_rglob_symlink_loop(self):
}
self.assertEqual(given, {p / x for x in expect})
+ # See https://github.com/WebAssembly/wasi-filesystem/issues/26
+ @unittest.skipIf(is_wasi, "WASI resolution of '..' parts doesn't match POSIX")
def test_glob_dotdot(self):
# ".." is not special in globs.
P = self.cls
@@ -1931,7 +1933,11 @@ def test_glob_dotdot(self):
self.assertEqual(set(p.glob("dirA/../file*")), { P(self.base, "dirA/../fileA") })
self.assertEqual(set(p.glob("dirA/../file*/..")), set())
self.assertEqual(set(p.glob("../xyzzy")), set())
- self.assertEqual(set(p.glob("xyzzy/..")), set())
+ if self.cls.parser is posixpath:
+ self.assertEqual(set(p.glob("xyzzy/..")), set())
+ else:
+ # ".." segments are normalized first on Windows, so this path is stat()able.
+ self.assertEqual(set(p.glob("xyzzy/..")), { P(self.base, "xyzzy", "..") })
self.assertEqual(set(p.glob("/".join([".."] * 50))), { P(self.base, *[".."] * 50)})
@needs_symlinks
diff --git a/Lib/test/test_peg_generator/test_c_parser.py b/Lib/test/test_peg_generator/test_c_parser.py
index 9e273e99e387a4..1411e55dd0f293 100644
--- a/Lib/test/test_peg_generator/test_c_parser.py
+++ b/Lib/test/test_peg_generator/test_c_parser.py
@@ -13,9 +13,7 @@
from test.support import os_helper, import_helper
from test.support.script_helper import assert_python_ok
-_py_cflags_nodist = sysconfig.get_config_var("PY_CFLAGS_NODIST")
-_pgo_flag = sysconfig.get_config_var("PGO_PROF_USE_FLAG")
-if _pgo_flag and _py_cflags_nodist and _pgo_flag in _py_cflags_nodist:
+if support.check_cflags_pgo():
raise unittest.SkipTest("peg_generator test disabled under PGO build")
test_tools.skip_if_missing("peg_generator")
diff --git a/Lib/test/test_posixpath.py b/Lib/test/test_posixpath.py
index 807f985f7f4df7..7c122e6204b100 100644
--- a/Lib/test/test_posixpath.py
+++ b/Lib/test/test_posixpath.py
@@ -56,6 +56,8 @@ def test_join(self):
self.assertEqual(fn(b"/foo", b"bar", b"baz"), b"/foo/bar/baz")
self.assertEqual(fn(b"/foo/", b"bar/", b"baz/"), b"/foo/bar/baz/")
+ self.assertEqual(fn("a", ""), "a/")
+ self.assertEqual(fn("a", "", ""), "a/")
self.assertEqual(fn("a", "b"), "a/b")
self.assertEqual(fn("a", "b/"), "a/b/")
self.assertEqual(fn("a/", "b"), "a/b")
@@ -484,7 +486,7 @@ def test_realpath_symlink_loops(self):
self.assertEqual(realpath(ABSTFN+"1/../x"), dirname(ABSTFN) + "/x")
os.symlink(ABSTFN+"x", ABSTFN+"y")
self.assertEqual(realpath(ABSTFN+"1/../" + basename(ABSTFN) + "y"),
- ABSTFN + "y")
+ ABSTFN + "x")
self.assertEqual(realpath(ABSTFN+"1/../" + basename(ABSTFN) + "1"),
ABSTFN + "1")
@@ -650,6 +652,7 @@ def test_relpath(self):
(real_getcwd, os.getcwd) = (os.getcwd, lambda: r"/home/user/bar")
try:
curdir = os.path.split(os.getcwd())[-1]
+ self.assertRaises(TypeError, posixpath.relpath, None)
self.assertRaises(ValueError, posixpath.relpath, "")
self.assertEqual(posixpath.relpath("a"), "a")
self.assertEqual(posixpath.relpath(posixpath.abspath("a")), "a")
diff --git a/Lib/test/test_queue.py b/Lib/test/test_queue.py
index c4d10110132393..d5927fbf39142b 100644
--- a/Lib/test/test_queue.py
+++ b/Lib/test/test_queue.py
@@ -636,6 +636,23 @@ def test_shutdown_get_task_done_join(self):
self.assertEqual(results, [True]*len(thrds))
+ def test_shutdown_pending_get(self):
+ def get():
+ try:
+ results.append(q.get())
+ except Exception as e:
+ results.append(e)
+
+ q = self.type2test()
+ results = []
+ get_thread = threading.Thread(target=get)
+ get_thread.start()
+ q.shutdown(immediate=False)
+ get_thread.join(timeout=10.0)
+ self.assertFalse(get_thread.is_alive())
+ self.assertEqual(len(results), 1)
+ self.assertIsInstance(results[0], self.queue.ShutDown)
+
class QueueTest(BaseQueueTestMixin):
diff --git a/Lib/test/test_re.py b/Lib/test/test_re.py
index b1ac22c28cf7c1..b8b50e8b3c2190 100644
--- a/Lib/test/test_re.py
+++ b/Lib/test/test_re.py
@@ -1,7 +1,7 @@
from test.support import (gc_collect, bigmemtest, _2G,
cpython_only, captured_stdout,
check_disallow_instantiation, is_emscripten, is_wasi,
- warnings_helper, SHORT_TIMEOUT, CPUStopwatch)
+ warnings_helper, SHORT_TIMEOUT, CPUStopwatch, requires_resource)
import locale
import re
import string
@@ -2282,6 +2282,9 @@ def test_bug_40736(self):
with self.assertRaisesRegex(TypeError, "got 'type'"):
re.search("x*", type)
+ # gh-117594: The test is not slow by itself, but it relies on
+ # the absolute computation time and can fail on very slow computers.
+ @requires_resource('cpu')
def test_search_anchor_at_beginning(self):
s = 'x'*10**7
with CPUStopwatch() as stopwatch:
diff --git a/Lib/test/test_regrtest.py b/Lib/test/test_regrtest.py
index d222b3803fdba7..809abd7e92d65f 100644
--- a/Lib/test/test_regrtest.py
+++ b/Lib/test/test_regrtest.py
@@ -2291,6 +2291,7 @@ def test_get_signal_name(self):
for exitcode, expected in (
(-int(signal.SIGINT), 'SIGINT'),
(-int(signal.SIGSEGV), 'SIGSEGV'),
+ (128 + int(signal.SIGABRT), 'SIGABRT'),
(3221225477, "STATUS_ACCESS_VIOLATION"),
(0xC00000FD, "STATUS_STACK_OVERFLOW"),
):
diff --git a/Lib/test/test_rlcompleter.py b/Lib/test/test_rlcompleter.py
index 273ce2cf5c7dd2..1cff6a218f8d75 100644
--- a/Lib/test/test_rlcompleter.py
+++ b/Lib/test/test_rlcompleter.py
@@ -55,7 +55,7 @@ def test_attr_matches(self):
if x.startswith('s')])
self.assertEqual(self.stdcompleter.attr_matches('tuple.foospamegg'), [])
expected = sorted({'None.%s%s' % (x,
- '()' if x == '__init_subclass__'
+ '()' if x in ('__init_subclass__', '__class__')
else '' if x == '__doc__'
else '(')
for x in dir(None)})
diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py
index 0e50d09c8f28d6..6ec010d13f9e7e 100644
--- a/Lib/test/test_ssl.py
+++ b/Lib/test/test_ssl.py
@@ -4537,7 +4537,11 @@ def msg_cb(conn, direction, version, content_type, msg_type, data):
# server aborts connection with an error.
with self.assertRaisesRegex(
OSError,
- 'certificate required|EOF occurred|closed by the remote host|Connection reset by peer'
+ ('certificate required'
+ '|EOF occurred'
+ '|closed by the remote host'
+ '|Connection reset by peer'
+ '|Broken pipe')
):
# receive CertificateRequest
data = s.recv(1024)
diff --git a/Lib/test/test_str.py b/Lib/test/test_str.py
index b4927113db44e3..ea37eb5d96457d 100644
--- a/Lib/test/test_str.py
+++ b/Lib/test/test_str.py
@@ -2651,6 +2651,24 @@ def test_check_encoding_errors(self):
proc = assert_python_failure('-X', 'dev', '-c', code)
self.assertEqual(proc.rc, 10, proc)
+ def test_str_invalid_call(self):
+ check = lambda *a, **kw: self.assertRaises(TypeError, str, *a, **kw)
+
+ # too many args
+ check(1, "", "", 1)
+
+ # no such kw arg
+ check(test=1)
+
+ # 'encoding' must be str
+ check(1, encoding=1)
+ check(1, 1)
+
+ # 'errors' must be str
+ check(1, errors=1)
+ check(1, "", errors=1)
+ check(1, 1, 1)
+
class StringModuleTest(unittest.TestCase):
def test_formatter_parser(self):
diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py
index 6a66df4e897e3f..ab26bf56d9ced9 100644
--- a/Lib/test/test_sys.py
+++ b/Lib/test/test_sys.py
@@ -1708,11 +1708,15 @@ class newstyleclass(object): pass
# TODO: add check that forces layout of unicodefields
# weakref
import weakref
- check(weakref.ref(int), size('2Pn3P'))
+ if support.Py_GIL_DISABLED:
+ expected = size('2Pn4P')
+ else:
+ expected = size('2Pn3P')
+ check(weakref.ref(int), expected)
# weakproxy
# XXX
# weakcallableproxy
- check(weakref.proxy(int), size('2Pn3P'))
+ check(weakref.proxy(int), expected)
def check_slots(self, obj, base, extra):
expected = sys.getsizeof(base) + struct.calcsize(extra)
diff --git a/Lib/test/test_sys_settrace.py b/Lib/test/test_sys_settrace.py
index 125f40227118f6..ded1d9224d82d9 100644
--- a/Lib/test/test_sys_settrace.py
+++ b/Lib/test/test_sys_settrace.py
@@ -3039,7 +3039,7 @@ def test_trace_unpack_long_sequence(self):
def test_trace_lots_of_globals(self):
- count = min(1000, int(support.Py_C_RECURSION_LIMIT * 0.8))
+ count = min(1000, int(support.get_c_recursion_limit() * 0.8))
code = """if 1:
def f():
diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py
index 39541faa237b24..f18dcc02b23856 100644
--- a/Lib/test/test_tarfile.py
+++ b/Lib/test/test_tarfile.py
@@ -738,6 +738,31 @@ def test_extract_directory(self):
finally:
os_helper.rmtree(DIR)
+ def test_deprecation_if_no_filter_passed_to_extractall(self):
+ DIR = pathlib.Path(TEMPDIR) / "extractall"
+ with (
+ os_helper.temp_dir(DIR),
+ tarfile.open(tarname, encoding="iso8859-1") as tar
+ ):
+ directories = [t for t in tar if t.isdir()]
+ with self.assertWarnsRegex(DeprecationWarning, "Use the filter argument") as cm:
+ tar.extractall(DIR, directories)
+ # check that the stacklevel of the deprecation warning is correct:
+ self.assertEqual(cm.filename, __file__)
+
+ def test_deprecation_if_no_filter_passed_to_extract(self):
+ dirtype = "ustar/dirtype"
+ DIR = pathlib.Path(TEMPDIR) / "extractall"
+ with (
+ os_helper.temp_dir(DIR),
+ tarfile.open(tarname, encoding="iso8859-1") as tar
+ ):
+ tarinfo = tar.getmember(dirtype)
+ with self.assertWarnsRegex(DeprecationWarning, "Use the filter argument") as cm:
+ tar.extract(tarinfo, path=DIR)
+ # check that the stacklevel of the deprecation warning is correct:
+ self.assertEqual(cm.filename, __file__)
+
def test_extractall_pathlike_name(self):
DIR = pathlib.Path(TEMPDIR) / "extractall"
with os_helper.temp_dir(DIR), \
diff --git a/Lib/test/test_threading.py b/Lib/test/test_threading.py
index a7701fa285aee2..a712ed10f022d6 100644
--- a/Lib/test/test_threading.py
+++ b/Lib/test/test_threading.py
@@ -1527,6 +1527,7 @@ def func():
{before_start}
t.start()
""")
+ check_multi_interp_extensions = bool(support.Py_GIL_DISABLED)
script = textwrap.dedent(f"""
import test.support
test.support.run_in_subinterp_with_config(
@@ -1536,7 +1537,7 @@ def func():
allow_exec=True,
allow_threads={allowed},
allow_daemon_threads={daemon_allowed},
- check_multi_interp_extensions=False,
+ check_multi_interp_extensions={check_multi_interp_extensions},
own_gil=False,
)
""")
diff --git a/Lib/test/test_tools/test_makefile.py b/Lib/test/test_tools/test_makefile.py
index 29f5c28e33bb2b..48a7c1a773bb83 100644
--- a/Lib/test/test_tools/test_makefile.py
+++ b/Lib/test/test_tools/test_makefile.py
@@ -67,6 +67,10 @@ def test_makefile_test_folders(self):
)
used.append(relpath)
+ # Don't check the wheel dir when Python is built --with-wheel-pkg-dir
+ if sysconfig.get_config_var('WHEEL_PKG_DIR'):
+ test_dirs.remove('test/wheeldata')
+
# Check that there are no extra entries:
unique_test_dirs = set(test_dirs)
self.assertSetEqual(unique_test_dirs, set(used))
diff --git a/Lib/test/test_typing.py b/Lib/test/test_typing.py
index 927f74eb69fbc7..bae0a8480b994f 100644
--- a/Lib/test/test_typing.py
+++ b/Lib/test/test_typing.py
@@ -38,7 +38,7 @@
from typing import Self, LiteralString
from typing import TypeAlias
from typing import ParamSpec, Concatenate, ParamSpecArgs, ParamSpecKwargs
-from typing import TypeGuard
+from typing import TypeGuard, TypeIs
import abc
import textwrap
import typing
@@ -5207,6 +5207,7 @@ def test_subclass_special_form(self):
Literal[1, 2],
Concatenate[int, ParamSpec("P")],
TypeGuard[int],
+ TypeIs[range],
):
with self.subTest(msg=obj):
with self.assertRaisesRegex(
@@ -6748,6 +6749,7 @@ class C(Generic[T]): pass
self.assertEqual(get_args(NotRequired[int]), (int,))
self.assertEqual(get_args(TypeAlias), ())
self.assertEqual(get_args(TypeGuard[int]), (int,))
+ self.assertEqual(get_args(TypeIs[range]), (range,))
Ts = TypeVarTuple('Ts')
self.assertEqual(get_args(Ts), ())
self.assertEqual(get_args((*Ts,)[0]), (Ts,))
@@ -9592,6 +9594,56 @@ def test_no_isinstance(self):
issubclass(int, TypeGuard)
+class TypeIsTests(BaseTestCase):
+ def test_basics(self):
+ TypeIs[int] # OK
+
+ def foo(arg) -> TypeIs[int]: ...
+ self.assertEqual(gth(foo), {'return': TypeIs[int]})
+
+ with self.assertRaises(TypeError):
+ TypeIs[int, str]
+
+ def test_repr(self):
+ self.assertEqual(repr(TypeIs), 'typing.TypeIs')
+ cv = TypeIs[int]
+ self.assertEqual(repr(cv), 'typing.TypeIs[int]')
+ cv = TypeIs[Employee]
+ self.assertEqual(repr(cv), 'typing.TypeIs[%s.Employee]' % __name__)
+ cv = TypeIs[tuple[int]]
+ self.assertEqual(repr(cv), 'typing.TypeIs[tuple[int]]')
+
+ def test_cannot_subclass(self):
+ with self.assertRaisesRegex(TypeError, CANNOT_SUBCLASS_TYPE):
+ class C(type(TypeIs)):
+ pass
+ with self.assertRaisesRegex(TypeError, CANNOT_SUBCLASS_TYPE):
+ class D(type(TypeIs[int])):
+ pass
+ with self.assertRaisesRegex(TypeError,
+ r'Cannot subclass typing\.TypeIs'):
+ class E(TypeIs):
+ pass
+ with self.assertRaisesRegex(TypeError,
+ r'Cannot subclass typing\.TypeIs\[int\]'):
+ class F(TypeIs[int]):
+ pass
+
+ def test_cannot_init(self):
+ with self.assertRaises(TypeError):
+ TypeIs()
+ with self.assertRaises(TypeError):
+ type(TypeIs)()
+ with self.assertRaises(TypeError):
+ type(TypeIs[Optional[int]])()
+
+ def test_no_isinstance(self):
+ with self.assertRaises(TypeError):
+ isinstance(1, TypeIs[int])
+ with self.assertRaises(TypeError):
+ issubclass(int, TypeIs)
+
+
SpecialAttrsP = typing.ParamSpec('SpecialAttrsP')
SpecialAttrsT = typing.TypeVar('SpecialAttrsT', int, float, complex)
@@ -9691,6 +9743,7 @@ def test_special_attrs(self):
typing.Optional: 'Optional',
typing.TypeAlias: 'TypeAlias',
typing.TypeGuard: 'TypeGuard',
+ typing.TypeIs: 'TypeIs',
typing.TypeVar: 'TypeVar',
typing.Union: 'Union',
typing.Self: 'Self',
@@ -9705,6 +9758,7 @@ def test_special_attrs(self):
typing.Literal[True, 2]: 'Literal',
typing.Optional[Any]: 'Optional',
typing.TypeGuard[Any]: 'TypeGuard',
+ typing.TypeIs[Any]: 'TypeIs',
typing.Union[Any]: 'Any',
typing.Union[int, float]: 'Union',
# Incompatible special forms (tested in test_special_attrs2)
diff --git a/Lib/test/test_venv.py b/Lib/test/test_venv.py
index f410ce7198dc86..668642f73e8d9f 100644
--- a/Lib/test/test_venv.py
+++ b/Lib/test/test_venv.py
@@ -23,7 +23,8 @@
is_emscripten, is_wasi,
requires_venv_with_pip, TEST_HOME_DIR,
requires_resource, copy_python_src_ignore)
-from test.support.os_helper import (can_symlink, EnvironmentVarGuard, rmtree)
+from test.support.os_helper import (can_symlink, EnvironmentVarGuard, rmtree,
+ TESTFN)
import unittest
import venv
from unittest.mock import patch, Mock
@@ -744,6 +745,36 @@ def test_cli_without_scm_ignore_files(self):
with self.assertRaises(FileNotFoundError):
self.get_text_file_contents('.gitignore')
+ def test_venv_same_path(self):
+ same_path = venv.EnvBuilder._same_path
+ if sys.platform == 'win32':
+ # Case-insensitive, and handles short/long names
+ tests = [
+ (True, TESTFN, TESTFN),
+ (True, TESTFN.lower(), TESTFN.upper()),
+ ]
+ import _winapi
+ # ProgramFiles is the most reliable path that will have short/long
+ progfiles = os.getenv('ProgramFiles')
+ if progfiles:
+ tests = [
+ *tests,
+ (True, progfiles, progfiles),
+ (True, _winapi.GetShortPathName(progfiles), _winapi.GetLongPathName(progfiles)),
+ ]
+ else:
+ # Just a simple case-sensitive comparison
+ tests = [
+ (True, TESTFN, TESTFN),
+ (False, TESTFN.lower(), TESTFN.upper()),
+ ]
+ for r, path1, path2 in tests:
+ with self.subTest(f"{path1}-{path2}"):
+ if r:
+ self.assertTrue(same_path(path1, path2))
+ else:
+ self.assertFalse(same_path(path1, path2))
+
@requireVenvCreate
class EnsurePipTest(BaseTest):
"""Test venv module installation of pip."""
diff --git a/Lib/test/test_weakref.py b/Lib/test/test_weakref.py
index 6fbd292c1e6793..499ba77fd19542 100644
--- a/Lib/test/test_weakref.py
+++ b/Lib/test/test_weakref.py
@@ -1255,6 +1255,12 @@ class MappingTestCase(TestBase):
COUNT = 10
+ if support.check_sanitizer(thread=True) and support.Py_GIL_DISABLED:
+ # Reduce iteration count to get acceptable latency
+ NUM_THREADED_ITERATIONS = 1000
+ else:
+ NUM_THREADED_ITERATIONS = 100000
+
def check_len_cycles(self, dict_type, cons):
N = 20
items = [RefCycle() for i in range(N)]
@@ -1880,7 +1886,7 @@ def test_make_weak_keyed_dict_repr(self):
def test_threaded_weak_valued_setdefault(self):
d = weakref.WeakValueDictionary()
with collect_in_thread():
- for i in range(100000):
+ for i in range(self.NUM_THREADED_ITERATIONS):
x = d.setdefault(10, RefCycle())
self.assertIsNot(x, None) # we never put None in there!
del x
@@ -1889,7 +1895,7 @@ def test_threaded_weak_valued_setdefault(self):
def test_threaded_weak_valued_pop(self):
d = weakref.WeakValueDictionary()
with collect_in_thread():
- for i in range(100000):
+ for i in range(self.NUM_THREADED_ITERATIONS):
d[10] = RefCycle()
x = d.pop(10, 10)
self.assertIsNot(x, None) # we never put None in there!
@@ -1900,13 +1906,32 @@ def test_threaded_weak_valued_consistency(self):
# WeakValueDictionary when collecting from another thread.
d = weakref.WeakValueDictionary()
with collect_in_thread():
- for i in range(200000):
+ for i in range(2 * self.NUM_THREADED_ITERATIONS):
o = RefCycle()
d[10] = o
# o is still alive, so the dict can't be empty
self.assertEqual(len(d), 1)
o = None # lose ref
+ @support.cpython_only
+ def test_weak_valued_consistency(self):
+ # A single-threaded, deterministic repro for issue #28427: old keys
+ # should not remove new values from WeakValueDictionary. This relies on
+ # an implementation detail of CPython's WeakValueDictionary (its
+ # underlying dictionary of KeyedRefs) to reproduce the issue.
+ d = weakref.WeakValueDictionary()
+ with support.disable_gc():
+ d[10] = RefCycle()
+ # Keep the KeyedRef alive after it's replaced so that GC will invoke
+ # the callback.
+ wr = d.data[10]
+ # Replace the value with something that isn't cyclic garbage
+ o = RefCycle()
+ d[10] = o
+ # Trigger GC, which will invoke the callback for `wr`
+ gc.collect()
+ self.assertEqual(len(d), 1)
+
def check_threaded_weak_dict_copy(self, type_, deepcopy):
# `type_` should be either WeakKeyDictionary or WeakValueDictionary.
# `deepcopy` should be either True or False.
diff --git a/Lib/test/test_webbrowser.py b/Lib/test/test_webbrowser.py
index a1bccb5f19b60f..849665294c3dfa 100644
--- a/Lib/test/test_webbrowser.py
+++ b/Lib/test/test_webbrowser.py
@@ -1,15 +1,17 @@
-import webbrowser
-import unittest
import os
-import sys
+import re
+import shlex
import subprocess
-from unittest import mock
+import sys
+import unittest
+import webbrowser
from test import support
-from test.support import is_apple_mobile
from test.support import import_helper
+from test.support import is_apple_mobile
from test.support import os_helper
from test.support import requires_subprocess
from test.support import threading_helper
+from unittest import mock
# The webbrowser module uses threading locks
threading_helper.requires_working_threading(module=True)
@@ -98,6 +100,15 @@ def test_open_new_tab(self):
options=[],
arguments=[URL])
+ def test_open_bad_new_parameter(self):
+ with self.assertRaisesRegex(webbrowser.Error,
+ re.escape("Bad 'new' parameter to open(); "
+ "expected 0, 1, or 2, got 999")):
+ self._test('open',
+ options=[],
+ arguments=[URL],
+ kw=dict(new=999))
+
class EdgeCommandTest(CommandTestMixin, unittest.TestCase):
@@ -205,22 +216,22 @@ class ELinksCommandTest(CommandTestMixin, unittest.TestCase):
def test_open(self):
self._test('open', options=['-remote'],
- arguments=['openURL({})'.format(URL)])
+ arguments=[f'openURL({URL})'])
def test_open_with_autoraise_false(self):
self._test('open',
options=['-remote'],
- arguments=['openURL({})'.format(URL)])
+ arguments=[f'openURL({URL})'])
def test_open_new(self):
self._test('open_new',
options=['-remote'],
- arguments=['openURL({},new-window)'.format(URL)])
+ arguments=[f'openURL({URL},new-window)'])
def test_open_new_tab(self):
self._test('open_new_tab',
options=['-remote'],
- arguments=['openURL({},new-tab)'.format(URL)])
+ arguments=[f'openURL({URL},new-tab)'])
@unittest.skipUnless(sys.platform == "ios", "Test only applicable to iOS")
@@ -342,7 +353,6 @@ def test_register_default(self):
def test_register_preferred(self):
self._check_registration(preferred=True)
-
@unittest.skipUnless(sys.platform == "darwin", "macOS specific test")
def test_no_xdg_settings_on_macOS(self):
# On macOS webbrowser should not use xdg-settings to
@@ -423,5 +433,62 @@ def test_environment_preferred(self):
self.assertEqual(webbrowser.get().name, sys.executable)
-if __name__=='__main__':
+class CliTest(unittest.TestCase):
+ def test_parse_args(self):
+ for command, url, new_win in [
+ # No optional arguments
+ ("https://example.com", "https://example.com", 0),
+ # Each optional argument
+ ("https://example.com -n", "https://example.com", 1),
+ ("-n https://example.com", "https://example.com", 1),
+ ("https://example.com -t", "https://example.com", 2),
+ ("-t https://example.com", "https://example.com", 2),
+ # Long form
+ ("https://example.com --new-window", "https://example.com", 1),
+ ("--new-window https://example.com", "https://example.com", 1),
+ ("https://example.com --new-tab", "https://example.com", 2),
+ ("--new-tab https://example.com", "https://example.com", 2),
+ ]:
+ args = webbrowser.parse_args(shlex.split(command))
+
+ self.assertEqual(args.url, url)
+ self.assertEqual(args.new_win, new_win)
+
+ def test_parse_args_error(self):
+ for command in [
+ # Arguments must not both be given
+ "https://example.com -n -t",
+ "https://example.com --new-window --new-tab",
+ "https://example.com -n --new-tab",
+ "https://example.com --new-window -t",
+ # Ensure ambiguous shortening fails
+ "https://example.com --new",
+ ]:
+ with self.assertRaises(SystemExit):
+ webbrowser.parse_args(shlex.split(command))
+
+ def test_main(self):
+ for command, expected_url, expected_new_win in [
+ # No optional arguments
+ ("https://example.com", "https://example.com", 0),
+ # Each optional argument
+ ("https://example.com -n", "https://example.com", 1),
+ ("-n https://example.com", "https://example.com", 1),
+ ("https://example.com -t", "https://example.com", 2),
+ ("-t https://example.com", "https://example.com", 2),
+ # Long form
+ ("https://example.com --new-window", "https://example.com", 1),
+ ("--new-window https://example.com", "https://example.com", 1),
+ ("https://example.com --new-tab", "https://example.com", 2),
+ ("--new-tab https://example.com", "https://example.com", 2),
+ ]:
+ with (
+ mock.patch("webbrowser.open", return_value=None) as mock_open,
+ mock.patch("builtins.print", return_value=None),
+ ):
+ webbrowser.main(shlex.split(command))
+ mock_open.assert_called_once_with(expected_url, expected_new_win)
+
+
+if __name__ == '__main__':
unittest.main()
diff --git a/Lib/test/test_winapi.py b/Lib/test/test_winapi.py
index 014aeea7239e2b..2ac6f3621710cd 100644
--- a/Lib/test/test_winapi.py
+++ b/Lib/test/test_winapi.py
@@ -1,6 +1,9 @@
# Test the Windows-only _winapi module
+import os
+import pathlib
import random
+import re
import threading
import time
import unittest
@@ -92,3 +95,35 @@ def test_many_events_waitany(self):
def test_max_events_waitany(self):
self._events_waitany_test(MAXIMUM_BATCHED_WAIT_OBJECTS)
+
+
+class WinAPITests(unittest.TestCase):
+ def test_getlongpathname(self):
+ testfn = pathlib.Path(os.getenv("ProgramFiles")).parents[-1] / "PROGRA~1"
+ if not os.path.isdir(testfn):
+ raise unittest.SkipTest("require x:\\PROGRA~1 to test")
+
+ # pathlib.Path will be rejected - only str is accepted
+ with self.assertRaises(TypeError):
+ _winapi.GetLongPathName(testfn)
+
+ actual = _winapi.GetLongPathName(os.fsdecode(testfn))
+
+ # Can't assume that PROGRA~1 expands to any particular variation, so
+ # ensure it matches any one of them.
+ candidates = set(testfn.parent.glob("Progra*"))
+ self.assertIn(pathlib.Path(actual), candidates)
+
+ def test_getshortpathname(self):
+ testfn = pathlib.Path(os.getenv("ProgramFiles"))
+ if not os.path.isdir(testfn):
+ raise unittest.SkipTest("require '%ProgramFiles%' to test")
+
+ # pathlib.Path will be rejected - only str is accepted
+ with self.assertRaises(TypeError):
+ _winapi.GetShortPathName(testfn)
+
+ actual = _winapi.GetShortPathName(os.fsdecode(testfn))
+
+ # Should contain "PROGRA~" but we can't predict the number
+ self.assertIsNotNone(re.match(r".\:\\PROGRA~\d", actual.upper()), actual)
diff --git a/Lib/test/test_wmi.py b/Lib/test/test_wmi.py
index bf8c52e646dc18..f667926d1f8ddf 100644
--- a/Lib/test/test_wmi.py
+++ b/Lib/test/test_wmi.py
@@ -14,11 +14,13 @@ def wmi_exec_query(query):
# gh-112278: WMI maybe slow response when first call.
try:
return _wmi.exec_query(query)
+ except BrokenPipeError:
+ pass
except WindowsError as e:
if e.winerror != 258:
raise
- time.sleep(LOOPBACK_TIMEOUT)
- return _wmi.exec_query(query)
+ time.sleep(LOOPBACK_TIMEOUT)
+ return _wmi.exec_query(query)
class WmiTests(unittest.TestCase):
diff --git a/Lib/test/test_wsgiref.py b/Lib/test/test_wsgiref.py
index 9316d0ecbcf1ae..b047f7b06f85d3 100644
--- a/Lib/test/test_wsgiref.py
+++ b/Lib/test/test_wsgiref.py
@@ -137,7 +137,7 @@ def test_environ(self):
def test_request_length(self):
out, err = run_amock(data=b"GET " + (b"x" * 65537) + b" HTTP/1.0\n\n")
self.assertEqual(out.splitlines()[0],
- b"HTTP/1.0 414 Request-URI Too Long")
+ b"HTTP/1.0 414 URI Too Long")
def test_validated_hello(self):
out, err = run_amock(validator(hello_app))
diff --git a/Lib/typing.py b/Lib/typing.py
index d8e4ee3635994c..231492cdcc01cf 100644
--- a/Lib/typing.py
+++ b/Lib/typing.py
@@ -153,6 +153,7 @@
'TYPE_CHECKING',
'TypeAlias',
'TypeGuard',
+ 'TypeIs',
'TypeAliasType',
'Unpack',
]
@@ -818,28 +819,31 @@ def Concatenate(self, parameters):
@_SpecialForm
def TypeGuard(self, parameters):
- """Special typing construct for marking user-defined type guard functions.
+ """Special typing construct for marking user-defined type predicate functions.
``TypeGuard`` can be used to annotate the return type of a user-defined
- type guard function. ``TypeGuard`` only accepts a single type argument.
+ type predicate function. ``TypeGuard`` only accepts a single type argument.
At runtime, functions marked this way should return a boolean.
``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static
type checkers to determine a more precise type of an expression within a
program's code flow. Usually type narrowing is done by analyzing
conditional code flow and applying the narrowing to a block of code. The
- conditional expression here is sometimes referred to as a "type guard".
+ conditional expression here is sometimes referred to as a "type predicate".
Sometimes it would be convenient to use a user-defined boolean function
- as a type guard. Such a function should use ``TypeGuard[...]`` as its
- return type to alert static type checkers to this intention.
+ as a type predicate. Such a function should use ``TypeGuard[...]`` or
+ ``TypeIs[...]`` as its return type to alert static type checkers to
+ this intention. ``TypeGuard`` should be used over ``TypeIs`` when narrowing
+ from an incompatible type (e.g., ``list[object]`` to ``list[int]``) or when
+ the function does not return ``True`` for all instances of the narrowed type.
- Using ``-> TypeGuard`` tells the static type checker that for a given
- function:
+ Using ``-> TypeGuard[NarrowedType]`` tells the static type checker that
+ for a given function:
1. The return value is a boolean.
2. If the return value is ``True``, the type of its argument
- is the type inside ``TypeGuard``.
+ is ``NarrowedType``.
For example::
@@ -860,7 +864,7 @@ def func1(val: list[object]):
type-unsafe results. The main reason is to allow for things like
narrowing ``list[object]`` to ``list[str]`` even though the latter is not
a subtype of the former, since ``list`` is invariant. The responsibility of
- writing type-safe type guards is left to the user.
+ writing type-safe type predicates is left to the user.
``TypeGuard`` also works with type variables. For more information, see
PEP 647 (User-Defined Type Guards).
@@ -869,6 +873,75 @@ def func1(val: list[object]):
return _GenericAlias(self, (item,))
+@_SpecialForm
+def TypeIs(self, parameters):
+ """Special typing construct for marking user-defined type predicate functions.
+
+ ``TypeIs`` can be used to annotate the return type of a user-defined
+ type predicate function. ``TypeIs`` only accepts a single type argument.
+ At runtime, functions marked this way should return a boolean and accept
+ at least one argument.
+
+ ``TypeIs`` aims to benefit *type narrowing* -- a technique used by static
+ type checkers to determine a more precise type of an expression within a
+ program's code flow. Usually type narrowing is done by analyzing
+ conditional code flow and applying the narrowing to a block of code. The
+ conditional expression here is sometimes referred to as a "type predicate".
+
+ Sometimes it would be convenient to use a user-defined boolean function
+ as a type predicate. Such a function should use ``TypeIs[...]`` or
+ ``TypeGuard[...]`` as its return type to alert static type checkers to
+ this intention. ``TypeIs`` usually has more intuitive behavior than
+ ``TypeGuard``, but it cannot be used when the input and output types
+ are incompatible (e.g., ``list[object]`` to ``list[int]``) or when the
+ function does not return ``True`` for all instances of the narrowed type.
+
+ Using ``-> TypeIs[NarrowedType]`` tells the static type checker that for
+ a given function:
+
+ 1. The return value is a boolean.
+ 2. If the return value is ``True``, the type of its argument
+ is the intersection of the argument's original type and
+ ``NarrowedType``.
+ 3. If the return value is ``False``, the type of its argument
+ is narrowed to exclude ``NarrowedType``.
+
+ For example::
+
+ from typing import assert_type, final, TypeIs
+
+ class Parent: pass
+ class Child(Parent): pass
+ @final
+ class Unrelated: pass
+
+ def is_parent(val: object) -> TypeIs[Parent]:
+ return isinstance(val, Parent)
+
+ def run(arg: Child | Unrelated):
+ if is_parent(arg):
+ # Type of ``arg`` is narrowed to the intersection
+ # of ``Parent`` and ``Child``, which is equivalent to
+ # ``Child``.
+ assert_type(arg, Child)
+ else:
+ # Type of ``arg`` is narrowed to exclude ``Parent``,
+ # so only ``Unrelated`` is left.
+ assert_type(arg, Unrelated)
+
+ The type inside ``TypeIs`` must be consistent with the type of the
+ function's argument; if it is not, static type checkers will raise
+ an error. An incorrectly written ``TypeIs`` function can lead to
+ unsound behavior in the type system; it is the user's responsibility
+ to write such functions in a type-safe manner.
+
+ ``TypeIs`` also works with type variables. For more information, see
+ PEP 742 (Narrowing types with ``TypeIs``).
+ """
+ item = _type_check(parameters, f'{self} accepts only single type.')
+ return _GenericAlias(self, (item,))
+
+
class ForwardRef(_Final, _root=True):
"""Internal wrapper to hold a forward reference."""
@@ -1241,11 +1314,12 @@ class _GenericAlias(_BaseGenericAlias, _root=True):
# A = Callable[[], None] # _CallableGenericAlias
# B = Callable[[T], None] # _CallableGenericAlias
# C = B[int] # _CallableGenericAlias
- # * Parameterized `Final`, `ClassVar` and `TypeGuard`:
+ # * Parameterized `Final`, `ClassVar`, `TypeGuard`, and `TypeIs`:
# # All _GenericAlias
# Final[int]
# ClassVar[float]
- # TypeVar[bool]
+ # TypeGuard[bool]
+ # TypeIs[range]
def __init__(self, origin, args, *, inst=True, name=None):
super().__init__(origin, inst=inst, name=name)
diff --git a/Lib/venv/__init__.py b/Lib/venv/__init__.py
index 4856594755ae57..fa69d5846f2fa7 100644
--- a/Lib/venv/__init__.py
+++ b/Lib/venv/__init__.py
@@ -107,6 +107,33 @@ def _venv_path(self, env_dir, name):
}
return sysconfig.get_path(name, scheme='venv', vars=vars)
+ @classmethod
+ def _same_path(cls, path1, path2):
+ """Check whether two paths appear the same.
+
+ Whether they refer to the same file is irrelevant; we're testing for
+ whether a human reader would look at the path string and easily tell
+ that they're the same file.
+ """
+ if sys.platform == 'win32':
+ if os.path.normcase(path1) == os.path.normcase(path2):
+ return True
+ # gh-90329: Don't display a warning for short/long names
+ import _winapi
+ try:
+ path1 = _winapi.GetLongPathName(os.fsdecode(path1))
+ except OSError:
+ pass
+ try:
+ path2 = _winapi.GetLongPathName(os.fsdecode(path2))
+ except OSError:
+ pass
+ if os.path.normcase(path1) == os.path.normcase(path2):
+ return True
+ return False
+ else:
+ return path1 == path2
+
def ensure_directories(self, env_dir):
"""
Create the directories for the environment.
@@ -171,7 +198,7 @@ def create_if_needed(d):
# bpo-45337: Fix up env_exec_cmd to account for file system redirections.
# Some redirects only apply to CreateFile and not CreateProcess
real_env_exe = os.path.realpath(context.env_exe)
- if os.path.normcase(real_env_exe) != os.path.normcase(context.env_exe):
+ if not self._same_path(real_env_exe, context.env_exe):
logger.warning('Actual environment location may have moved due to '
'redirects, links or junctions.\n'
' Requested location: "%s"\n'
diff --git a/Lib/webbrowser.py b/Lib/webbrowser.py
index 7ef80a8f5ace9e..b7fbc41853ea65 100755
--- a/Lib/webbrowser.py
+++ b/Lib/webbrowser.py
@@ -11,14 +11,17 @@
__all__ = ["Error", "open", "open_new", "open_new_tab", "get", "register"]
+
class Error(Exception):
pass
+
_lock = threading.RLock()
_browsers = {} # Dictionary of available browser controllers
_tryorder = None # Preference order of available browsers
_os_preferred_browser = None # The preferred browser
+
def register(name, klass, instance=None, *, preferred=False):
"""Register a browser connector."""
with _lock:
@@ -34,6 +37,7 @@ def register(name, klass, instance=None, *, preferred=False):
else:
_tryorder.append(name)
+
def get(using=None):
"""Return a browser launcher instance appropriate for the environment."""
if _tryorder is None:
@@ -64,6 +68,7 @@ def get(using=None):
return command[0]()
raise Error("could not locate runnable browser")
+
# Please note: the following definition hides a builtin function.
# It is recommended one does "import webbrowser" and uses webbrowser.open(url)
# instead of "from webbrowser import *".
@@ -87,6 +92,7 @@ def open(url, new=0, autoraise=True):
return True
return False
+
def open_new(url):
"""Open url in a new window of the default browser.
@@ -94,6 +100,7 @@ def open_new(url):
"""
return open(url, 1)
+
def open_new_tab(url):
"""Open url in a new page ("tab") of the default browser.
@@ -136,7 +143,7 @@ def _synthesize(browser, *, preferred=False):
# General parent classes
-class BaseBrowser(object):
+class BaseBrowser:
"""Parent class for all browsers. Do not use directly."""
args = ['%s']
@@ -197,7 +204,7 @@ def open(self, url, new=0, autoraise=True):
else:
p = subprocess.Popen(cmdline, close_fds=True,
start_new_session=True)
- return (p.poll() is None)
+ return p.poll() is None
except OSError:
return False
@@ -225,7 +232,8 @@ def _invoke(self, args, remote, autoraise, url=None):
# use autoraise argument only for remote invocation
autoraise = int(autoraise)
opt = self.raise_opts[autoraise]
- if opt: raise_opt = [opt]
+ if opt:
+ raise_opt = [opt]
cmdline = [self.name] + raise_opt + args
@@ -266,8 +274,8 @@ def open(self, url, new=0, autoraise=True):
else:
action = self.remote_action_newtab
else:
- raise Error("Bad 'new' parameter to open(); " +
- "expected 0, 1, or 2, got %s" % new)
+ raise Error("Bad 'new' parameter to open(); "
+ f"expected 0, 1, or 2, got {new}")
args = [arg.replace("%s", url).replace("%action", action)
for arg in self.remote_args]
@@ -302,7 +310,7 @@ class Epiphany(UnixBrowser):
class Chrome(UnixBrowser):
- "Launcher class for Google Chrome browser."
+ """Launcher class for Google Chrome browser."""
remote_args = ['%action', '%s']
remote_action = ""
@@ -310,11 +318,12 @@ class Chrome(UnixBrowser):
remote_action_newtab = ""
background = True
+
Chromium = Chrome
class Opera(UnixBrowser):
- "Launcher class for Opera browser."
+ """Launcher class for Opera browser."""
remote_args = ['%action', '%s']
remote_action = ""
@@ -324,7 +333,7 @@ class Opera(UnixBrowser):
class Elinks(UnixBrowser):
- "Launcher class for Elinks browsers."
+ """Launcher class for Elinks browsers."""
remote_args = ['-remote', 'openURL(%s%action)']
remote_action = ""
@@ -387,11 +396,11 @@ def open(self, url, new=0, autoraise=True):
except OSError:
return False
else:
- return (p.poll() is None)
+ return p.poll() is None
class Edge(UnixBrowser):
- "Launcher class for Microsoft Edge browser."
+ """Launcher class for Microsoft Edge browser."""
remote_args = ['%action', '%s']
remote_action = ""
@@ -461,7 +470,6 @@ def register_X_browsers():
if shutil.which("opera"):
register("opera", None, Opera("opera"))
-
if shutil.which("microsoft-edge"):
register("microsoft-edge", None, Edge("microsoft-edge"))
@@ -514,7 +522,8 @@ def register_standard_browsers():
cmd = "xdg-settings get default-web-browser".split()
raw_result = subprocess.check_output(cmd, stderr=subprocess.DEVNULL)
result = raw_result.decode().strip()
- except (FileNotFoundError, subprocess.CalledProcessError, PermissionError, NotADirectoryError) :
+ except (FileNotFoundError, subprocess.CalledProcessError,
+ PermissionError, NotADirectoryError):
pass
else:
global _os_preferred_browser
@@ -584,15 +593,16 @@ def __init__(self, name='default'):
def open(self, url, new=0, autoraise=True):
sys.audit("webbrowser.open", url)
+ url = url.replace('"', '%22')
if self.name == 'default':
- script = 'open location "%s"' % url.replace('"', '%22') # opens in default browser
+ script = f'open location "{url}"' # opens in default browser
else:
script = f'''
- tell application "%s"
+ tell application "{self.name}"
activate
- open location "%s"
+ open location "{url}"
end
- '''%(self.name, url.replace('"', '%22'))
+ '''
osapipe = os.popen("osascript", "w")
if osapipe is None:
@@ -667,33 +677,31 @@ def open(self, url, new=0, autoraise=True):
return True
-def main():
- import getopt
- usage = """Usage: %s [-n | -t | -h] url
- -n: open new window
- -t: open new tab
- -h, --help: show help""" % sys.argv[0]
- try:
- opts, args = getopt.getopt(sys.argv[1:], 'ntdh',['help'])
- except getopt.error as msg:
- print(msg, file=sys.stderr)
- print(usage, file=sys.stderr)
- sys.exit(1)
- new_win = 0
- for o, a in opts:
- if o == '-n': new_win = 1
- elif o == '-t': new_win = 2
- elif o == '-h' or o == '--help':
- print(usage, file=sys.stderr)
- sys.exit()
- if len(args) != 1:
- print(usage, file=sys.stderr)
- sys.exit(1)
-
- url = args[0]
- open(url, new_win)
+def parse_args(arg_list: list[str] | None):
+ import argparse
+ parser = argparse.ArgumentParser(description="Open URL in a web browser.")
+ parser.add_argument("url", help="URL to open")
+
+ group = parser.add_mutually_exclusive_group()
+ group.add_argument("-n", "--new-window", action="store_const",
+ const=1, default=0, dest="new_win",
+ help="open new window")
+ group.add_argument("-t", "--new-tab", action="store_const",
+ const=2, default=0, dest="new_win",
+ help="open new tab")
+
+ args = parser.parse_args(arg_list)
+
+ return args
+
+
+def main(arg_list: list[str] | None = None):
+ args = parse_args(arg_list)
+
+ open(args.url, args.new_win)
print("\a")
+
if __name__ == "__main__":
main()
diff --git a/Lib/xml/etree/ElementInclude.py b/Lib/xml/etree/ElementInclude.py
index 40a9b22292479f..986e6c3bbe90f6 100644
--- a/Lib/xml/etree/ElementInclude.py
+++ b/Lib/xml/etree/ElementInclude.py
@@ -79,8 +79,8 @@ class LimitedRecursiveIncludeError(FatalIncludeError):
# @param parse Parse mode. Either "xml" or "text".
# @param encoding Optional text encoding (UTF-8 by default for "text").
# @return The expanded resource. If the parse mode is "xml", this
-# is an ElementTree instance. If the parse mode is "text", this
-# is a Unicode string. If the loader fails, it can return None
+# is an Element instance. If the parse mode is "text", this
+# is a string. If the loader fails, it can return None
# or raise an OSError exception.
# @throws OSError If the loader fails to load the resource.
@@ -98,7 +98,7 @@ def default_loader(href, parse, encoding=None):
##
# Expand XInclude directives.
#
-# @param elem Root element.
+# @param elem Root Element or any ElementTree of a tree to be expanded
# @param loader Optional resource loader. If omitted, it defaults
# to {@link default_loader}. If given, it should be a callable
# that implements the same interface as default_loader.
@@ -106,12 +106,13 @@ def default_loader(href, parse, encoding=None):
# relative include file references.
# @param max_depth The maximum number of recursive inclusions.
# Limited to reduce the risk of malicious content explosion.
-# Pass a negative value to disable the limitation.
+# Pass None to disable the limitation.
# @throws LimitedRecursiveIncludeError If the {@link max_depth} was exceeded.
# @throws FatalIncludeError If the function fails to include a given
# resource, or if the tree contains malformed XInclude elements.
-# @throws IOError If the function fails to load a given resource.
-# @returns the node or its replacement if it was an XInclude node
+# @throws OSError If the function fails to load a given resource.
+# @throws ValueError If negative {@link max_depth} is passed.
+# @returns None. Modifies tree pointed by {@link elem}
def include(elem, loader=None, base_url=None,
max_depth=DEFAULT_MAX_INCLUSION_DEPTH):
diff --git a/Misc/ACKS b/Misc/ACKS
index fe014a364dd82d..76d30b257b4693 100644
--- a/Misc/ACKS
+++ b/Misc/ACKS
@@ -496,6 +496,7 @@ David Edelsohn
John Edmonds
Benjamin Edwards
Grant Edwards
+Vlad Efanov
Zvi Effron
John Ehresman
Tal Einat
@@ -641,6 +642,7 @@ Neil Girdhar
Matt Giuca
Andrea Giudiceandrea
Franz Glasner
+Jeff Glass
Wim Glenn
Michael Goderbauer
Karan Goel
diff --git a/Misc/NEWS.d/3.10.0a1.rst b/Misc/NEWS.d/3.10.0a1.rst
index 4842a026aa49f7..2e32ca9f3b26bb 100644
--- a/Misc/NEWS.d/3.10.0a1.rst
+++ b/Misc/NEWS.d/3.10.0a1.rst
@@ -5,7 +5,7 @@
.. section: Security
Fixes ``python3x._pth`` being ignored on Windows, caused by the fix for
-:issue:`29778` (CVE-2020-15801).
+:issue:`29778` (:cve:`2020-15801`).
..
@@ -25,7 +25,7 @@ events.
.. section: Security
Ensure :file:`python3.dll` is loaded from correct locations when Python is
-embedded (CVE-2020-15523).
+embedded (:cve:`2020-15523`).
..
@@ -1596,7 +1596,7 @@ UnpicklingError instead of crashing.
.. section: Library
Avoid infinite loop when reading specially crafted TAR files using the
-tarfile module (CVE-2019-20907).
+tarfile module (:cve:`2019-20907`).
..
diff --git a/Misc/NEWS.d/3.10.0a7.rst b/Misc/NEWS.d/3.10.0a7.rst
index 74120a3b40c012..fe6213d95a88bb 100644
--- a/Misc/NEWS.d/3.10.0a7.rst
+++ b/Misc/NEWS.d/3.10.0a7.rst
@@ -4,7 +4,7 @@
.. release date: 2021-04-05
.. section: Security
-CVE-2021-3426: Remove the ``getfile`` feature of the :mod:`pydoc` module
+:cve:`2021-3426`: Remove the ``getfile`` feature of the :mod:`pydoc` module
which could be abused to read arbitrary files on the disk (directory
traversal vulnerability). Moreover, even source code of Python modules can
contain sensitive data like passwords. Vulnerability reported by David
diff --git a/Misc/NEWS.d/3.11.0a1.rst b/Misc/NEWS.d/3.11.0a1.rst
index 754e782dfe661b..e6cf9c001a1a01 100644
--- a/Misc/NEWS.d/3.11.0a1.rst
+++ b/Misc/NEWS.d/3.11.0a1.rst
@@ -38,7 +38,7 @@ significant performance overhead when loading from ``.pyc`` files.
.. section: Security
Update the vendored copy of libexpat to 2.4.1 (from 2.2.8) to get the fix
-for the CVE-2013-0340 "Billion Laughs" vulnerability. This copy is most used
+for the :cve:`2013-0340` "Billion Laughs" vulnerability. This copy is most used
on Windows and macOS.
..
diff --git a/Misc/NEWS.d/3.11.0a7.rst b/Misc/NEWS.d/3.11.0a7.rst
index ec99bd0294ceca..f4e2ad8db678f5 100644
--- a/Misc/NEWS.d/3.11.0a7.rst
+++ b/Misc/NEWS.d/3.11.0a7.rst
@@ -1421,7 +1421,7 @@ Patch by Victor Stinner.
.. nonce: IB0XL4
.. section: Windows
-Update ``zlib`` to v1.2.12 to resolve CVE-2018-25032.
+Update ``zlib`` to v1.2.12 to resolve :cve:`2018-25032`.
..
@@ -1472,8 +1472,8 @@ Update Windows installer to use SQLite 3.38.1.
.. nonce: SPrGS9
.. section: Windows
-Update bzip2 to 1.0.8 in Windows builds to mitigate CVE-2016-3189 and
-CVE-2019-12900
+Update bzip2 to 1.0.8 in Windows builds to mitigate :cve:`2016-3189` and
+:cve:`2019-12900`.
..
@@ -1482,7 +1482,7 @@ CVE-2019-12900
.. nonce: Ufd4tG
.. section: Windows
-Prevent CVE-2022-26488 by ensuring the Add to PATH option in the Windows
+Prevent :cve:`2022-26488` by ensuring the Add to PATH option in the Windows
installer uses the correct path when being repaired.
..
diff --git a/Misc/NEWS.d/3.12.0a1.rst b/Misc/NEWS.d/3.12.0a1.rst
index 4739e0fb89a4a8..f75a83c1d950d4 100644
--- a/Misc/NEWS.d/3.12.0a1.rst
+++ b/Misc/NEWS.d/3.12.0a1.rst
@@ -29,8 +29,7 @@ process. This was a potential privilege escalation. Filesystem based socket
permissions restrict this to the *forkserver* process user as was the
default in Python 3.8 and earlier.
-This prevents Linux `CVE-2022-42919
-`_.
+This prevents Linux :cve:`2022-42919`.
..
@@ -3200,9 +3199,8 @@ Remove the :func:`ssl.wrap_socket` function, deprecated in Python 3.7:
instead, create a :class:`ssl.SSLContext` object and call its
:class:`ssl.SSLContext.wrap_socket` method. Any package that still uses
:func:`ssl.wrap_socket` is broken and insecure. The function neither sends a
-SNI TLS extension nor validates server hostname. Code is subject to `CWE-295
-`_: Improper Certificate
-Validation. Patch by Victor Stinner.
+SNI TLS extension nor validates server hostname. Code is subject to :cwe:`295`
+Improper Certificate Validation. Patch by Victor Stinner.
..
@@ -4404,8 +4402,7 @@ Remove extra row
.. section: Documentation
Deprecated tools ``make suspicious`` and ``rstlint.py`` are now removed.
-They have been replaced by `spinx-lint
-`_.
+They have been replaced by :pypi:`sphinx-lint`.
..
diff --git a/Misc/NEWS.d/3.12.0a2.rst b/Misc/NEWS.d/3.12.0a2.rst
index a9c5038fa489bb..f1d69d9b3e7638 100644
--- a/Misc/NEWS.d/3.12.0a2.rst
+++ b/Misc/NEWS.d/3.12.0a2.rst
@@ -968,7 +968,7 @@ if :option:`--with-system-expat` is passed to :program:`configure`.
.. nonce: 0f6e_N
.. section: Windows
-Update Windows builds to zlib v1.2.13. v1.2.12 has CVE-2022-37434, but the
+Update Windows builds to zlib v1.2.13. v1.2.12 has :cve:`2022-37434`, but the
vulnerable ``inflateGetHeader`` API is not used by Python.
..
diff --git a/Misc/NEWS.d/3.12.0a6.rst b/Misc/NEWS.d/3.12.0a6.rst
index cf28bdb9258820..05f9243eb6b1bc 100644
--- a/Misc/NEWS.d/3.12.0a6.rst
+++ b/Misc/NEWS.d/3.12.0a6.rst
@@ -15,7 +15,7 @@ from the HACL* project.
.. section: Security
Updated the OpenSSL version used in Windows and macOS binary release builds
-to 1.1.1t to address CVE-2023-0286, CVE-2022-4303, and CVE-2022-4303 per
+to 1.1.1t to address :cve:`2023-0286`, :cve:`2022-4303`, and :cve:`2022-4303` per
`the OpenSSL 2023-02-07 security advisory
`_.
diff --git a/Misc/NEWS.d/3.12.0b1.rst b/Misc/NEWS.d/3.12.0b1.rst
index d9804be764c9a9..764b80b7b7d436 100644
--- a/Misc/NEWS.d/3.12.0b1.rst
+++ b/Misc/NEWS.d/3.12.0b1.rst
@@ -37,7 +37,7 @@ or lacks SHA3.
:func:`urllib.parse.urlsplit` now strips leading C0 control and space
characters following the specification for URLs defined by WHATWG in
-response to CVE-2023-24329. Patch by Illia Volochii.
+response to :cve:`2023-24329`. Patch by Illia Volochii.
..
diff --git a/Misc/NEWS.d/3.13.0a1.rst b/Misc/NEWS.d/3.13.0a1.rst
index 16715bee5a8e49..4937f9da5ae629 100644
--- a/Misc/NEWS.d/3.13.0a1.rst
+++ b/Misc/NEWS.d/3.13.0a1.rst
@@ -8,9 +8,7 @@ Fixed an issue where instances of :class:`ssl.SSLSocket` were vulnerable to
a bypass of the TLS handshake and included protections (like certificate
verification) and treating sent unencrypted data as if it were
post-handshake TLS encrypted data. Security issue reported as
-`CVE-2023-40217
-`_ by Aapo
-Oksman. Patch by Gregory P. Smith.
+:cve:`2023-40217` by Aapo Oksman. Patch by Gregory P. Smith.
..
@@ -4184,8 +4182,7 @@ Hugo van Kemenade.
.. section: Library
:pep:`594`: Remove the :mod:`!spwd` module, deprecated in Python 3.11: the
-`python-pam project `_ can be used
-instead. Patch by Victor Stinner.
+:pypi:`python-pam` project can be used instead. Patch by Victor Stinner.
..
diff --git a/Misc/NEWS.d/3.13.0a3.rst b/Misc/NEWS.d/3.13.0a3.rst
index 95aa66603de7cb..218ba609bd80c0 100644
--- a/Misc/NEWS.d/3.13.0a3.rst
+++ b/Misc/NEWS.d/3.13.0a3.rst
@@ -1551,7 +1551,7 @@ optional *strict* parameter to these two functions: use ``strict=False`` to
get the old behavior, accept malformed inputs. ``getattr(email.utils,
'supports_strict_parsing', False)`` can be use to check if the *strict*
paramater is available. Patch by Thomas Dwyer and Victor Stinner to improve
-the CVE-2023-27043 fix.
+the :cve:`2023-27043` fix.
..
diff --git a/Misc/NEWS.d/3.13.0a5.rst b/Misc/NEWS.d/3.13.0a5.rst
index fb0163eed67aeb..55dee59827ad8f 100644
--- a/Misc/NEWS.d/3.13.0a5.rst
+++ b/Misc/NEWS.d/3.13.0a5.rst
@@ -4,7 +4,7 @@
.. release date: 2024-03-12
.. section: Security
-Allow controlling Expat >=2.6.0 reparse deferral (CVE-2023-52425) by adding
+Allow controlling Expat >=2.6.0 reparse deferral (:cve:`2023-52425`) by adding
five new methods:
* :meth:`xml.etree.ElementTree.XMLParser.flush`
@@ -875,7 +875,7 @@ Add 'default' and 'version' help text for localization in argparse.
.. nonce: fb9a0R
.. section: Documentation
-Document CVE-2023-52425 of Expat <2.6.0 under "XML vulnerabilities".
+Document :cve:`2023-52425` of Expat <2.6.0 under "XML vulnerabilities".
..
diff --git a/Misc/NEWS.d/3.13.0a6.rst b/Misc/NEWS.d/3.13.0a6.rst
new file mode 100644
index 00000000000000..06807b396ed5da
--- /dev/null
+++ b/Misc/NEWS.d/3.13.0a6.rst
@@ -0,0 +1,1270 @@
+.. date: 2024-04-08-20-26-15
+.. gh-issue: 117648
+.. nonce: NzVEa7
+.. release date: 2024-04-09
+.. section: Core and Builtins
+
+Improve performance of :func:`os.path.join` and :func:`os.path.expanduser`.
+
+..
+
+.. date: 2024-04-06-16-42-34
+.. gh-issue: 117584
+.. nonce: hqk9Hn
+.. section: Core and Builtins
+
+Raise :exc:`TypeError` for non-paths in :func:`posixpath.relpath()`.
+
+..
+
+.. date: 2024-04-04-13-42-59
+.. gh-issue: 117494
+.. nonce: GPQH64
+.. section: Core and Builtins
+
+Refactored the instruction sequence data structure out of compile.c into
+instruction_sequence.c.
+
+..
+
+.. date: 2024-04-03-13-44-04
+.. gh-issue: 116968
+.. nonce: zgcdG2
+.. section: Core and Builtins
+
+Introduce a unified 16-bit backoff counter type (``_Py_BackoffCounter``),
+shared between the Tier 1 adaptive specializer and the Tier 2 optimizer. The
+API used for adaptive specialization counters is changed but the behavior is
+(supposed to be) identical.
+
+The behavior of the Tier 2 counters is changed:
+
+* There are no longer dynamic thresholds (we never varied these).
+* All counters now use the same exponential backoff.
+* The counter for ``JUMP_BACKWARD`` starts counting down from 16.
+* The ``temperature`` in side exits starts counting down from 64.
+
+..
+
+.. date: 2024-04-03-09-49-15
+.. gh-issue: 117431
+.. nonce: WAqRgc
+.. section: Core and Builtins
+
+Improve the performance of the following :class:`bytes` and
+:class:`bytearray` methods by adapting them to the :c:macro:`METH_FASTCALL`
+calling convention:
+
+* :meth:`!endswith`
+* :meth:`!startswith`
+
+..
+
+.. date: 2024-04-02-17-37-35
+.. gh-issue: 117431
+.. nonce: vDKAOn
+.. section: Core and Builtins
+
+Improve the performance of the following :class:`str` methods by adapting
+them to the :c:macro:`METH_FASTCALL` calling convention:
+
+* :meth:`~str.count`
+* :meth:`~str.endswith`
+* :meth:`~str.find`
+* :meth:`~str.index`
+* :meth:`~str.rfind`
+* :meth:`~str.rindex`
+* :meth:`~str.startswith`
+
+..
+
+.. date: 2024-04-02-10-04-57
+.. gh-issue: 117411
+.. nonce: YdyVmG
+.. section: Core and Builtins
+
+Move ``PyFutureFeatures`` to an internal header and make it private.
+
+..
+
+.. date: 2024-04-02-06-16-49
+.. gh-issue: 109120
+.. nonce: X485oN
+.. section: Core and Builtins
+
+Added handle of incorrect star expressions, e.g ``f(3, *)``. Patch by
+Grigoryev Semyon
+
+..
+
+.. date: 2024-03-29-21-43-19
+.. gh-issue: 117381
+.. nonce: fT0JFM
+.. section: Core and Builtins
+
+Fix error message for :func:`ntpath.commonpath`.
+
+..
+
+.. date: 2024-03-29-15-04-13
+.. gh-issue: 117349
+.. nonce: OB9kQQ
+.. section: Core and Builtins
+
+Optimise several functions in :mod:`os.path`.
+
+..
+
+.. date: 2024-03-28-19-13-20
+.. gh-issue: 117335
+.. nonce: d6uKJu
+.. section: Core and Builtins
+
+Raise TypeError for non-sequences for :func:`ntpath.commonpath`.
+
+..
+
+.. date: 2024-03-26-17-22-38
+.. gh-issue: 117266
+.. nonce: Kwh79O
+.. section: Core and Builtins
+
+Fix crashes for certain user-created subclasses of :class:`ast.AST`. Such
+classes are now expected to set the ``_field_types`` attribute.
+
+..
+
+.. date: 2024-03-25-17-04-54
+.. gh-issue: 99108
+.. nonce: 8bjdO6
+.. section: Core and Builtins
+
+Updated the :mod:`hashlib` built-in `HACL\* project`_ C code from upstream
+that we use for many implementations when they are not present via OpenSSL
+in a given build. This also avoids the rare potential for a C symbol name
+one definition rule linking issue.
+
+.. _HACL\* project: https://github.com/hacl-star/hacl-star
+
+..
+
+.. date: 2024-03-25-12-51-12
+.. gh-issue: 117108
+.. nonce: tNqDEo
+.. section: Core and Builtins
+
+Change the old space bit of objects in the young generation from 0 to
+gcstate->visited, so that any objects created during GC will have the old
+bit set correctly if they get moved into the old generation.
+
+..
+
+.. date: 2024-03-21-12-10-11
+.. gh-issue: 117108
+.. nonce: _6jIrB
+.. section: Core and Builtins
+
+The cycle GC now chooses the size of increments based on the total heap
+size, instead of the rate of object creation. This ensures that it can keep
+up with growing heaps.
+
+..
+
+.. date: 2024-03-21-09-57-57
+.. gh-issue: 117114
+.. nonce: Qu-p55
+.. section: Core and Builtins
+
+Make :func:`os.path.isdevdrive` available on all platforms. For those that
+do not offer Dev Drives, it will always return ``False``.
+
+..
+
+.. date: 2024-03-13-16-55-25
+.. gh-issue: 116735
+.. nonce: o3w6y8
+.. section: Core and Builtins
+
+For ``INSTRUMENTED_CALL_FUNCTION_EX``, set ``arg0`` to
+``sys.monitoring.MISSING`` instead of ``None`` for :monitoring-event:`CALL`
+event.
+
+..
+
+.. date: 2024-03-12-20-31-57
+.. gh-issue: 113964
+.. nonce: bJppzg
+.. section: Core and Builtins
+
+Starting new threads and process creation through :func:`os.fork` are now
+only prevented once all non-daemon threads exit.
+
+..
+
+.. date: 2024-03-11-22-05-56
+.. gh-issue: 116626
+.. nonce: GsyczB
+.. section: Core and Builtins
+
+Ensure ``INSTRUMENTED_CALL_FUNCTION_EX`` always emits
+:monitoring-event:`CALL`
+
+..
+
+.. date: 2024-03-11-00-45-39
+.. gh-issue: 116554
+.. nonce: gYumG5
+.. section: Core and Builtins
+
+``list.sort()`` now exploits more cases of partial ordering, particularly
+those with long descending runs with sub-runs of equal values. Those are
+recognized as single runs now (previously, each block of repeated values
+caused a new run to be created).
+
+..
+
+.. date: 2024-03-07-16-12-39
+.. gh-issue: 114099
+.. nonce: ujdjn2
+.. section: Core and Builtins
+
+Added a Loader that can discover extension modules in an iOS-style
+Frameworks folder.
+
+..
+
+.. date: 2024-02-25-14-17-25
+.. gh-issue: 115775
+.. nonce: CNbGbJ
+.. section: Core and Builtins
+
+Compiler populates the new ``__static_attributes__`` field on a class with
+the names of attributes of this class which are accessed through self.X from
+any function in its body.
+
+..
+
+.. date: 2024-02-24-03-39-09
+.. gh-issue: 115776
+.. nonce: THJXqg
+.. section: Core and Builtins
+
+The array of values, the ``PyDictValues`` struct is now embedded in the
+object during allocation. This provides better performance in the common
+case, and does not degrade as much when the object's ``__dict__`` is
+materialized.
+
+..
+
+.. date: 2024-01-07-04-22-51
+.. gh-issue: 108362
+.. nonce: oB9Gcf
+.. section: Core and Builtins
+
+Implement an incremental cyclic garbage collector. By collecting the old
+generation in increments, there is no need for a full heap scan. This can
+hugely reduce maximum pause time for programs with large heaps.
+
+Reduce the number of generations from three to two. The old generation is
+split into two spaces, "visited" and "pending".
+
+Collection happens in two steps:: * An increment is formed from the young
+generation and a small part of the pending space. * This increment is
+scanned and the survivors moved to the end of the visited space.
+
+When the collecting space becomes empty, the two spaces are swapped.
+
+..
+
+.. date: 2023-10-14-00-05-17
+.. gh-issue: 109870
+.. nonce: oKpJ3P
+.. section: Core and Builtins
+
+Dataclasses now calls :func:`exec` once per dataclass, instead of once per
+method being added. This can speed up dataclass creation by up to 20%.
+
+..
+
+.. date: 2022-10-05-09-33-48
+.. gh-issue: 97901
+.. nonce: BOLluU
+.. section: Core and Builtins
+
+Mime type ``text/rtf`` is now supported by :mod:`mimetypes`.
+
+..
+
+.. bpo: 24612
+.. date: 2021-09-04-22-33-01
+.. nonce: SsTuUX
+.. section: Core and Builtins
+
+Improve the :exc:`SyntaxError` that happens when 'not' appears after an
+operator. Patch by Pablo Galindo
+
+..
+
+.. date: 2024-04-03-18-36-53
+.. gh-issue: 117467
+.. nonce: l6rWlj
+.. section: Library
+
+Preserve mailbox ownership when rewriting in :func:`mailbox.mbox.flush`.
+Patch by Tony Mountifield.
+
+..
+
+.. date: 2024-04-02-20-30-12
+.. gh-issue: 114848
+.. nonce: YX4pEc
+.. section: Library
+
+Raise :exc:`FileNotFoundError` when ``getcwd()`` returns '(unreachable)',
+which can happen on Linux >= 2.6.36 with glibc < 2.27.
+
+..
+
+.. date: 2024-04-02-13-13-46
+.. gh-issue: 117459
+.. nonce: jiIZmH
+.. section: Library
+
+:meth:`asyncio.asyncio.run_coroutine_threadsafe` now keeps the traceback of
+:class:`CancelledError`, :class:`TimeoutError` and
+:class:`InvalidStateError` which are raised in the coroutine.
+
+..
+
+.. date: 2024-03-29-15-58-01
+.. gh-issue: 117337
+.. nonce: 7w3Qwp
+.. section: Library
+
+Deprecate undocumented :func:`!glob.glob0` and :func:`!glob.glob1`
+functions. Use :func:`glob.glob` and pass a directory to its *root_dir*
+argument instead.
+
+..
+
+.. date: 2024-03-29-12-07-26
+.. gh-issue: 117348
+.. nonce: WjCYvK
+.. section: Library
+
+Refactored :meth:`configparser.RawConfigParser._read` to reduce cyclometric
+complexity and improve comprehensibility.
+
+..
+
+.. date: 2024-03-28-17-55-22
+.. gh-issue: 66449
+.. nonce: 4jhuEV
+.. section: Library
+
+:class:`configparser.ConfigParser` now accepts unnamed sections before named
+ones, if configured to do so.
+
+..
+
+.. date: 2024-03-28-13-54-20
+.. gh-issue: 88014
+.. nonce: zJz31I
+.. section: Library
+
+In documentation of :class:`gzip.GzipFile` in module gzip, explain data type
+of optional constructor argument *mtime*, and recommend ``mtime = 0`` for
+generating deterministic streams.
+
+..
+
+.. date: 2024-03-27-21-05-52
+.. gh-issue: 117310
+.. nonce: Bt2wox
+.. section: Library
+
+Fixed an unlikely early & extra ``Py_DECREF`` triggered crash in :mod:`ssl`
+when creating a new ``_ssl._SSLContext`` if CPython was built implausibly
+such that the default cipher list is empty **or** the SSL library it was
+linked against reports a failure from its C ``SSL_CTX_set_cipher_list()``
+API.
+
+..
+
+.. date: 2024-03-27-16-43-42
+.. gh-issue: 117294
+.. nonce: wbXNFv
+.. section: Library
+
+A ``DocTestCase`` now reports as skipped if all examples in the doctest are
+skipped.
+
+..
+
+.. date: 2024-03-26-11-48-39
+.. gh-issue: 98966
+.. nonce: SayV9y
+.. section: Library
+
+In :mod:`subprocess`, raise a more informative message when
+``stdout=STDOUT``.
+
+..
+
+.. date: 2024-03-25-21-15-56
+.. gh-issue: 117225
+.. nonce: oOaZXb
+.. section: Library
+
+doctest: only print "and X failed" when non-zero, don't pluralise "1 items".
+Patch by Hugo van Kemenade.
+
+..
+
+.. date: 2024-03-25-00-20-16
+.. gh-issue: 117205
+.. nonce: yV7xGb
+.. section: Library
+
+Speed up :func:`compileall.compile_dir` by 20% when using multiprocessing by
+increasing ``chunksize``.
+
+..
+
+.. date: 2024-03-23-14-26-18
+.. gh-issue: 117178
+.. nonce: vTisTG
+.. section: Library
+
+Fix regression in lazy loading of self-referential modules, introduced in
+gh-114781.
+
+..
+
+.. date: 2024-03-23-13-40-13
+.. gh-issue: 112383
+.. nonce: XuHf3G
+.. section: Library
+
+Fix :mod:`dis` module's handling of ``ENTER_EXECUTOR`` instructions.
+
+..
+
+.. date: 2024-03-23-12-28-05
+.. gh-issue: 117182
+.. nonce: a0KANW
+.. section: Library
+
+Lazy-loading of modules that modify their own ``__class__`` no longer
+reverts the ``__class__`` to :class:`types.ModuleType`.
+
+..
+
+.. date: 2024-03-21-17-07-38
+.. gh-issue: 117084
+.. nonce: w1mTpT
+.. section: Library
+
+Fix :mod:`zipfile` extraction for directory entries with the name containing
+backslashes on Windows.
+
+..
+
+.. date: 2024-03-21-07-27-36
+.. gh-issue: 117110
+.. nonce: 9K1InX
+.. section: Library
+
+Fix a bug that prevents subclasses of :class:`typing.Any` to be instantiated
+with arguments. Patch by Chris Fu.
+
+..
+
+.. date: 2024-03-20-23-07-58
+.. gh-issue: 109653
+.. nonce: uu3lrX
+.. section: Library
+
+Deferred select imports in importlib.metadata and importlib.resources for a
+14% speedup.
+
+..
+
+.. date: 2024-03-20-16-10-29
+.. gh-issue: 70647
+.. nonce: FpD6Ar
+.. section: Library
+
+Start the deprecation period for the current behavior of
+:func:`datetime.datetime.strptime` and :func:`time.strptime` which always
+fails to parse a date string with a :exc:`ValueError` involving a day of
+month such as ``strptime("02-29", "%m-%d")`` when a year is **not**
+specified and the date happen to be February 29th. This should help avoid
+users finding new bugs every four years due to a natural mistaken assumption
+about the API when parsing partial date values.
+
+..
+
+.. date: 2024-03-19-19-42-25
+.. gh-issue: 116987
+.. nonce: ZVKUH1
+.. section: Library
+
+Fixed :func:`inspect.findsource` for class code objects.
+
+..
+
+.. date: 2024-03-19-14-35-57
+.. gh-issue: 114099
+.. nonce: siNSpK
+.. section: Library
+
+Modify standard library to allow for iOS platform differences.
+
+..
+
+.. date: 2024-03-19-11-08-26
+.. gh-issue: 90872
+.. nonce: ghys95
+.. section: Library
+
+On Windows, :meth:`subprocess.Popen.wait` no longer calls
+``WaitForSingleObject()`` with a negative timeout: pass ``0`` ms if the
+timeout is negative. Patch by Victor Stinner.
+
+..
+
+.. date: 2024-03-18-14-36-50
+.. gh-issue: 116957
+.. nonce: dTCs4f
+.. section: Library
+
+configparser: Don't leave ConfigParser values in an invalid state (stored as
+a list instead of a str) after an earlier read raised DuplicateSectionError
+or DuplicateOptionError.
+
+..
+
+.. date: 2024-03-17-18-12-39
+.. gh-issue: 115538
+.. nonce: PBiRQB
+.. section: Library
+
+:class:`_io.WindowsConsoleIO` now emit a warning if a boolean value is
+passed as a filedescriptor argument.
+
+..
+
+.. date: 2024-03-14-20-59-28
+.. gh-issue: 90095
+.. nonce: 7UaJ1U
+.. section: Library
+
+Ignore empty lines and comments in ``.pdbrc``
+
+..
+
+.. date: 2024-03-14-17-24-59
+.. gh-issue: 106531
+.. nonce: 9ehywi
+.. section: Library
+
+Refreshed zipfile._path from `zipp 3.18
+`_, providing
+better compatibility for PyPy, better glob performance for deeply nested
+zipfiles, and providing internal access to ``CompleteDirs.inject`` for use
+in other tests (like importlib.resources).
+
+..
+
+.. date: 2024-03-14-17-21-25
+.. gh-issue: 63207
+.. nonce: LV16SL
+.. section: Library
+
+On Windows, :func:`time.time()` now uses the
+``GetSystemTimePreciseAsFileTime()`` clock to have a resolution better than
+1 us, instead of the ``GetSystemTimeAsFileTime()`` clock which has a
+resolution of 15.6 ms. Patch by Victor Stinner.
+
+..
+
+.. date: 2024-03-14-14-01-46
+.. gh-issue: 116764
+.. nonce: moB3Lc
+.. section: Library
+
+Restore support of ``None`` and other false values in :mod:`urllib.parse`
+functions :func:`~urllib.parse.parse_qs` and
+:func:`~urllib.parse.parse_qsl`. Also, they now raise a TypeError for
+non-zero integers and non-empty sequences.
+
+..
+
+.. date: 2024-03-14-10-01-23
+.. gh-issue: 116811
+.. nonce: _h5iKP
+.. section: Library
+
+In ``PathFinder.invalidate_caches``, delegate to
+``MetadataPathFinder.invalidate_caches``.
+
+..
+
+.. date: 2024-03-14-09-38-51
+.. gh-issue: 116647
+.. nonce: h0d_zj
+.. section: Library
+
+Fix recursive child in dataclasses
+
+..
+
+.. date: 2024-03-14-01-38-44
+.. gh-issue: 113171
+.. nonce: VFnObz
+.. section: Library
+
+Fixed various false positives and false negatives in
+
+* :attr:`ipaddress.IPv4Address.is_private` (see these docs for details)
+* :attr:`ipaddress.IPv4Address.is_global`
+* :attr:`ipaddress.IPv6Address.is_private`
+* :attr:`ipaddress.IPv6Address.is_global`
+
+Also in the corresponding :class:`ipaddress.IPv4Network` and
+:class:`ipaddress.IPv6Network` attributes.
+
+..
+
+.. date: 2024-03-13-15-45-54
+.. gh-issue: 63283
+.. nonce: OToJnG
+.. section: Library
+
+In :mod:`encodings.idna`, any capitalization of the the ACE prefix
+(``xn--``) is now acceptable. Patch by Pepijn de Vos and Zackery Spytz.
+
+..
+
+.. date: 2024-03-12-19-32-17
+.. gh-issue: 71042
+.. nonce: oI0Ron
+.. section: Library
+
+Add :func:`platform.android_ver`, which provides device and OS information
+on Android.
+
+..
+
+.. date: 2024-03-12-17-53-14
+.. gh-issue: 73468
+.. nonce: z4ZzvJ
+.. section: Library
+
+Added new :func:`math.fma` function, wrapping C99's ``fma()`` operation:
+fused multiply-add function. Patch by Mark Dickinson and Victor Stinner.
+
+..
+
+.. date: 2024-03-11-17-04-55
+.. gh-issue: 116608
+.. nonce: 30f58-
+.. section: Library
+
+The :mod:`importlib.resources` functions
+:func:`~importlib.resources.is_resource()`,
+:func:`~importlib.resources.open_binary()`,
+:func:`~importlib.resources.open_text()`,
+:func:`~importlib.resources.path()`,
+:func:`~importlib.resources.read_binary()`, and
+:func:`~importlib.resources.read_text()` are un-deprecated, and support
+subdirectories via multiple positional arguments. The
+:func:`~importlib.resources.contents()` function also allows subdirectories,
+but remains deprecated.
+
+..
+
+.. date: 2024-03-08-11-31-49
+.. gh-issue: 116484
+.. nonce: VMAsU7
+.. section: Library
+
+Change automatically generated :class:`tkinter.Checkbutton` widget names to
+avoid collisions with automatically generated
+:class:`tkinter.ttk.Checkbutton` widget names within the same parent widget.
+
+..
+
+.. date: 2024-03-07-11-10-27
+.. gh-issue: 114314
+.. nonce: iEhAMH
+.. section: Library
+
+In :mod:`ctypes`, ctype data is now stored in type objects directly rather
+than in a dict subclass. This is an internal change that should not affect
+usage.
+
+..
+
+.. date: 2024-03-06-18-30-37
+.. gh-issue: 116401
+.. nonce: 3Wcda2
+.. section: Library
+
+Fix blocking :func:`os.fwalk` and :func:`shutil.rmtree` on opening named
+pipe.
+
+..
+
+.. date: 2024-03-05-19-56-29
+.. gh-issue: 71052
+.. nonce: PMDK--
+.. section: Library
+
+Implement :func:`ctypes.util.find_library` on Android.
+
+..
+
+.. date: 2024-03-01-20-23-57
+.. gh-issue: 90535
+.. nonce: wXm-jC
+.. section: Library
+
+Fix support of *interval* values > 1 in
+:class:`logging.TimedRotatingFileHandler` for ``when='MIDNIGHT'`` and
+``when='Wx'``.
+
+..
+
+.. date: 2024-02-26-10-06-50
+.. gh-issue: 113308
+.. nonce: MbvOFt
+.. section: Library
+
+Remove some internal protected parts from :mod:`uuid`:
+``_has_uuid_generate_time_safe``, ``_netbios_getnode``,
+``_ipconfig_getnode``, and ``_load_system_functions``. They were unused.
+
+..
+
+.. date: 2024-02-18-09-50-31
+.. gh-issue: 115627
+.. nonce: HGchj0
+.. section: Library
+
+Fix the :mod:`ssl` module error handling of connection terminate by peer. It
+now throws an OSError with the appropriate error code instead of an
+EOFError.
+
+..
+
+.. date: 2024-02-01-08-09-20
+.. gh-issue: 114847
+.. nonce: -JrWrR
+.. section: Library
+
+Speed up :func:`os.path.realpath` on non-Windows platforms.
+
+..
+
+.. date: 2024-02-01-03-09-38
+.. gh-issue: 114271
+.. nonce: raCkt5
+.. section: Library
+
+Fix a race in ``threading.Thread.join()``.
+
+``threading._MainThread`` now always represents the main thread of the main
+interpreter.
+
+``PyThreadState.on_delete`` and ``PyThreadState.on_delete_data`` have been
+removed.
+
+..
+
+.. date: 2024-01-22-15-50-58
+.. gh-issue: 113538
+.. nonce: v2wrwg
+.. section: Library
+
+Add :meth:`asyncio.Server.close_clients` and
+:meth:`asyncio.Server.abort_clients` methods which allow to more forcefully
+close an asyncio server.
+
+..
+
+.. date: 2024-01-02-22-47-12
+.. gh-issue: 85287
+.. nonce: ZC5DLj
+.. section: Library
+
+Changes Unicode codecs to return UnicodeEncodeError or UnicodeDecodeError,
+rather than just UnicodeError.
+
+..
+
+.. date: 2023-12-28-22-52-45
+.. gh-issue: 113548
+.. nonce: j6TJ7O
+.. section: Library
+
+:mod:`pdb` now allows CLI arguments to ``pdb -m``.
+
+..
+
+.. date: 2023-12-11-00-51-51
+.. gh-issue: 112948
+.. nonce: k-OKp5
+.. section: Library
+
+Make completion of :mod:`pdb` similar to Python REPL
+
+..
+
+.. date: 2023-06-16-19-17-06
+.. gh-issue: 105866
+.. nonce: 0NBveV
+.. section: Library
+
+Fixed ``_get_slots`` bug which caused error when defining dataclasses with
+slots and a weakref_slot.
+
+..
+
+.. date: 2023-05-06-05-00-42
+.. gh-issue: 96471
+.. nonce: S3X5I-
+.. section: Library
+
+Add :py:class:`asyncio.Queue` termination with
+:py:meth:`~asyncio.Queue.shutdown` method.
+
+..
+
+.. date: 2022-06-22-14-45-32
+.. gh-issue: 89739
+.. nonce: CqZcRL
+.. section: Library
+
+The :mod:`zipimport` module can now read ZIP64 files.
+
+..
+
+.. bpo: 33533
+.. date: 2020-10-02-17-35-19
+.. nonce: GLIhM5
+.. section: Library
+
+:func:`asyncio.as_completed` now returns an object that is both an
+asynchronous iterator and plain iterator. The new asynchronous iteration
+pattern allows for easier correlation between prior tasks and their
+completed results. This is a closer match to
+:func:`concurrent.futures.as_completed`'s iteration pattern. Patch by Justin
+Arthur.
+
+..
+
+.. bpo: 27578
+.. date: 2020-06-11-16-20-33
+.. nonce: CIA-fu
+.. section: Library
+
+:func:`inspect.getsource` (and related functions) work with empty module
+files, returning ``'\n'`` (or reasonable equivalent) instead of raising
+``OSError``. Patch by Kernc.
+
+..
+
+.. bpo: 37141
+.. date: 2019-09-26-17-52-52
+.. nonce: onYY2-
+.. section: Library
+
+Accept an iterable of separators in :meth:`asyncio.StreamReader.readuntil`,
+stopping when one of them is encountered.
+
+..
+
+.. date: 2019-08-27-01-03-26
+.. gh-issue: 66543
+.. nonce: _TRpYr
+.. section: Library
+
+Make :func:`mimetypes.guess_type` properly parsing of URLs with only a host
+name, URLs containing fragment or query, and filenames with only a UNC
+sharepoint on Windows. Based on patch by Dong-hee Na.
+
+..
+
+.. bpo: 15010
+.. date: 2019-08-12-19-08-06
+.. nonce: 3bY2CF
+.. section: Library
+
+:meth:`unittest.TestLoader.discover` now saves the original value of
+``unittest.TestLoader._top_level_dir`` and restores it at the end of the
+call.
+
+..
+
+.. date: 2024-03-20-15-12-37
+.. gh-issue: 115977
+.. nonce: IMLi6K
+.. section: Documentation
+
+Remove compatibilty references to Emscripten.
+
+..
+
+.. date: 2024-03-20-12-41-47
+.. gh-issue: 114099
+.. nonce: ad_Ck9
+.. section: Documentation
+
+Add an iOS platform guide, and flag modules not available on iOS.
+
+..
+
+.. date: 2022-04-15-13-15-23
+.. gh-issue: 91565
+.. nonce: OznXwC
+.. section: Documentation
+
+Changes to documentation files and config outputs to reflect the new
+location for reporting bugs - i.e. GitHub rather than bugs.python.org.
+
+..
+
+.. date: 2024-03-25-21-31-49
+.. gh-issue: 83434
+.. nonce: U7Z8cY
+.. section: Tests
+
+Disable JUnit XML output (``--junit-xml=FILE`` command line option) in
+regrtest when hunting for reference leaks (``-R`` option). Patch by Victor
+Stinner.
+
+..
+
+.. date: 2024-03-24-23-49-25
+.. gh-issue: 117187
+.. nonce: eMLT5n
+.. section: Tests
+
+Fix XML tests for vanilla Expat <2.6.0.
+
+..
+
+.. date: 2024-03-21-11-32-29
+.. gh-issue: 116333
+.. nonce: F-9Ram
+.. section: Tests
+
+Tests of TLS related things (error codes, etc) were updated to be more
+lenient about specific error message strings and behaviors as seen in the
+BoringSSL and AWS-LC forks of OpenSSL.
+
+..
+
+.. date: 2024-03-20-14-19-32
+.. gh-issue: 117089
+.. nonce: WwR1Z1
+.. section: Tests
+
+Consolidated tests for importlib.metadata in their own ``metadata`` package.
+
+..
+
+.. date: 2024-03-13-12-06-49
+.. gh-issue: 115979
+.. nonce: zsNpQD
+.. section: Tests
+
+Update test_importlib so that it passes under WASI SDK 21.
+
+..
+
+.. date: 2024-03-11-23-20-28
+.. gh-issue: 112536
+.. nonce: Qv1RrX
+.. section: Tests
+
+Add --tsan to test.regrtest for running TSAN tests in reasonable execution
+times. Patch by Donghee Na.
+
+..
+
+.. date: 2024-03-06-11-00-36
+.. gh-issue: 116307
+.. nonce: Uij0t_
+.. section: Tests
+
+Added import helper ``isolated_modules`` as ``CleanImport`` does not remove
+modules imported during the context. Use it in importlib.resources tests to
+avoid leaving ``mod`` around to impede importlib.metadata tests.
+
+..
+
+.. date: 2024-03-13-16-16-43
+.. gh-issue: 114736
+.. nonce: ZhmauG
+.. section: Build
+
+Have WASI builds use WASI SDK 21.
+
+..
+
+.. date: 2024-03-08-17-05-15
+.. gh-issue: 115983
+.. nonce: ZQqk0Q
+.. section: Build
+
+Skip building test modules that must be built as shared under WASI.
+
+..
+
+.. date: 2024-03-06-17-26-55
+.. gh-issue: 71052
+.. nonce: vLbu9u
+.. section: Build
+
+Add Android build script and instructions.
+
+..
+
+.. date: 2024-03-28-22-12-00
+.. gh-issue: 117267
+.. nonce: K_tki1
+.. section: Windows
+
+Ensure ``DirEntry.stat().st_ctime`` behaves consistently with
+:func:`os.stat` during the deprecation period of ``st_ctime`` by containing
+the same value as ``st_birthtime``. After the deprecation period,
+``st_ctime`` will be the metadata change time (or unavailable through
+``DirEntry``), and only ``st_birthtime`` will contain the creation time.
+
+..
+
+.. date: 2024-03-14-20-46-23
+.. gh-issue: 116195
+.. nonce: Cu_rYs
+.. section: Windows
+
+Improves performance of :func:`os.getppid` by using an alternate system API
+when available. Contributed by vxiiduu.
+
+..
+
+.. date: 2024-03-14-09-14-21
+.. gh-issue: 88494
+.. nonce: Bwfmp7
+.. section: Windows
+
+On Windows, :func:`time.monotonic()` now uses the
+``QueryPerformanceCounter()`` clock to have a resolution better than 1 us,
+instead of the ``GetTickCount64()`` clock which has a resolution of 15.6 ms.
+Patch by Victor Stinner.
+
+..
+
+.. date: 2024-03-14-01-58-22
+.. gh-issue: 116773
+.. nonce: H2UldY
+.. section: Windows
+
+Fix instances of ``<_overlapped.Overlapped object at 0xXXX> still has
+pending operation at deallocation, the process may crash``.
+
+..
+
+.. date: 2024-02-24-23-03-43
+.. gh-issue: 91227
+.. nonce: sL4zWC
+.. section: Windows
+
+Fix the asyncio ProactorEventLoop implementation so that sending a datagram
+to an address that is not listening does not prevent receiving any more
+datagrams.
+
+..
+
+.. date: 2024-02-08-14-48-15
+.. gh-issue: 115119
+.. nonce: qMt32O
+.. section: Windows
+
+Switched from vendored ``libmpdecimal`` code to a separately-hosted external
+package in the ``cpython-source-deps`` repository when building the
+``_decimal`` module.
+
+..
+
+.. date: 2024-04-08-18-53-33
+.. gh-issue: 117642
+.. nonce: _-tYH_
+.. section: C API
+
+Fix :pep:`737` implementation for ``%#T`` and ``%#N``.
+
+..
+
+.. date: 2024-03-22-19-29-24
+.. gh-issue: 87193
+.. nonce: u7O-jY
+.. section: C API
+
+:c:func:`_PyBytes_Resize` can now be called for bytes objects with reference
+count > 1, including 1-byte bytes objects. It creates a new bytes object and
+destroys the old one if it has reference count > 1.
+
+..
+
+.. date: 2024-03-20-13-13-22
+.. gh-issue: 117021
+.. nonce: 0Q5jBx
+.. section: C API
+
+Fix integer overflow in :c:func:`PyLong_AsPid` on non-Windows 64-bit
+platforms.
+
+..
+
+.. date: 2024-03-19-09-49-04
+.. gh-issue: 115756
+.. nonce: 4Ls_Tl
+.. section: C API
+
+:c:func:`!PyCode_GetFirstFree` is an ustable API now and has been renamed to
+:c:func:`PyUnstable_Code_GetFirstFree`. (Contributed by Bogdan Romanyuk in
+:gh:`115781`)
+
+..
+
+.. date: 2024-03-18-10-58-47
+.. gh-issue: 116869
+.. nonce: lN0GBl
+.. section: C API
+
+Add ``test_cext`` test: build a C extension to check if the Python C API
+emits C compiler warnings. Patch by Victor Stinner.
+
+..
+
+.. date: 2024-03-18-09-58-46
+.. gh-issue: 116869
+.. nonce: LFDVKM
+.. section: C API
+
+Make the C API compatible with ``-Werror=declaration-after-statement``
+compiler flag again. Patch by Victor Stinner.
+
+..
+
+.. date: 2024-03-17-22-42-21
+.. gh-issue: 116936
+.. nonce: tNrzfm
+.. section: C API
+
+Add :c:func:`PyType_GetModuleByDef` to the limited C API. Patch by Victor
+Stinner.
+
+..
+
+.. date: 2024-03-16-12-21-00
+.. gh-issue: 116809
+.. nonce: JL786L
+.. section: C API
+
+Restore removed private ``_PyErr_ChainExceptions1()`` function. Patch by
+Victor Stinner.
+
+..
+
+.. date: 2024-03-15-23-57-33
+.. gh-issue: 115754
+.. nonce: zLdv82
+.. section: C API
+
+In the limited C API version 3.13, getting ``Py_None``, ``Py_False``,
+``Py_True``, ``Py_Ellipsis`` and ``Py_NotImplemented`` singletons is now
+implemented as function calls at the stable ABI level to hide implementation
+details. Getting these constants still return borrowed references. Patch by
+Victor Stinner.
+
+..
+
+.. date: 2024-03-15-23-55-24
+.. gh-issue: 115754
+.. nonce: xnzc__
+.. section: C API
+
+Add :c:func:`Py_GetConstant` and :c:func:`Py_GetConstantBorrowed` functions
+to get constants. For example, ``Py_GetConstant(Py_CONSTANT_ZERO)`` returns
+a :term:`strong reference` to the constant zero. Patch by Victor Stinner.
+
+..
+
+.. date: 2024-03-14-22-30-07
+.. gh-issue: 111696
+.. nonce: 76UMKi
+.. section: C API
+
+Add support for ``%T``, ``%T#``, ``%N`` and ``%N#`` formats to
+:c:func:`PyUnicode_FromFormat`: format the fully qualified name of an object
+type and of a type: call :c:func:`PyType_GetModuleName`. See :pep:`737` for
+more information. Patch by Victor Stinner.
+
+..
+
+.. date: 2024-03-14-18-00-32
+.. gh-issue: 111696
+.. nonce: L6oIPq
+.. section: C API
+
+Add :c:func:`PyType_GetModuleName` function to get the type's module name.
+Equivalent to getting the ``type.__module__`` attribute. Patch by Eric Snow
+and Victor Stinner.
+
+..
+
+.. date: 2024-03-14-15-17-11
+.. gh-issue: 111696
+.. nonce: YmnvAi
+.. section: C API
+
+Add :c:func:`PyType_GetFullyQualifiedName` function to get the type's fully
+qualified name. Equivalent to ``f"{type.__module__}.{type.__qualname__}"``,
+or ``type.__qualname__`` if ``type.__module__`` is not a string or is equal
+to ``"builtins"``. Patch by Victor Stinner.
+
+..
+
+.. date: 2024-03-14-10-33-58
+.. gh-issue: 85283
+.. nonce: LOgmdU
+.. section: C API
+
+The ``fcntl``, ``grp``, ``pwd``, ``termios``, ``_statistics`` and
+``_testconsole`` C extensions are now built with the :ref:`limited C API
+`. Patch by Victor Stinner.
+
+..
+
+.. date: 2024-02-28-15-50-01
+.. gh-issue: 111140
+.. nonce: mpwcUg
+.. section: C API
+
+Add additional flags to :c:func:`PyLong_AsNativeBytes` and
+:c:func:`PyLong_FromNativeBytes` to allow the caller to determine how to
+handle edge cases around values that fill the entire buffer.
+
+..
+
+.. date: 2023-12-12-19-48-31
+.. gh-issue: 113024
+.. nonce: rXcQs7
+.. section: C API
+
+Add :c:func:`PyObject_GenericHash` function.
diff --git a/Misc/NEWS.d/3.5.0a1.rst b/Misc/NEWS.d/3.5.0a1.rst
index 26b3d8253dbdee..5244db107a73da 100644
--- a/Misc/NEWS.d/3.5.0a1.rst
+++ b/Misc/NEWS.d/3.5.0a1.rst
@@ -1284,7 +1284,7 @@ Add function :func:`sys.is_finalizing` to know about interpreter shutdown.
.. section: Library
Add a default limit for the amount of data xmlrpclib.gzip_decode will
-return. This resolves CVE-2013-1753.
+return. This resolves :cve:`2013-1753`.
..
@@ -4030,7 +4030,7 @@ unittest.mock.MagicMock now supports division. Patch by Johannes Baiter.
.. section: Library
Fix arbitrary memory access in JSONDecoder.raw_decode with a negative second
-parameter. Bug reported by Guido Vranken. (See also: CVE-2014-4616)
+parameter. Bug reported by Guido Vranken. (See also: :cve:`2014-4616`)
..
diff --git a/Misc/NEWS.d/3.5.2rc1.rst b/Misc/NEWS.d/3.5.2rc1.rst
index a7e5c1b130f9e9..f9409b62e352ac 100644
--- a/Misc/NEWS.d/3.5.2rc1.rst
+++ b/Misc/NEWS.d/3.5.2rc1.rst
@@ -5,7 +5,7 @@
.. original section: Library
.. section: Security
-Update expat to 2.1.1, fixes CVE-2015-1283.
+Update expat to 2.1.1, fixes :cve:`2015-1283`.
..
@@ -15,8 +15,8 @@ Update expat to 2.1.1, fixes CVE-2015-1283.
.. original section: Library
.. section: Security
-Fix TLS stripping vulnerability in smtplib, CVE-2016-0772. Reported by Team
-Oststrom
+Fix TLS stripping vulnerability in smtplib, :cve:`2016-0772`. Reported by Team
+Oststrom.
..
diff --git a/Misc/NEWS.d/3.5.3rc1.rst b/Misc/NEWS.d/3.5.3rc1.rst
index bf4ef9302c9d1d..2424604249a65c 100644
--- a/Misc/NEWS.d/3.5.3rc1.rst
+++ b/Misc/NEWS.d/3.5.3rc1.rst
@@ -1048,7 +1048,7 @@ certs.
.. section: Library
Remove 3DES from ssl module's default cipher list to counter measure sweet32
-attack (CVE-2016-2183).
+attack (:cve:`2016-2183`).
..
@@ -1251,7 +1251,7 @@ Fix possible integer overflow in the _csv module for large record lengths.
.. nonce: OnuO9s
.. section: Library
-Prevent HTTPoxy attack (CVE-2016-1000110). Ignore the HTTP_PROXY variable
+Prevent HTTPoxy attack (:cve:`2016-1000110`). Ignore the HTTP_PROXY variable
when REQUEST_METHOD environment is set, which indicates that the script is
in CGI mode.
diff --git a/Misc/NEWS.d/3.5.4rc1.rst b/Misc/NEWS.d/3.5.4rc1.rst
index d65d5d14ee78bb..d5a85b3a2d8666 100644
--- a/Misc/NEWS.d/3.5.4rc1.rst
+++ b/Misc/NEWS.d/3.5.4rc1.rst
@@ -17,10 +17,10 @@ passing other environment variables and command arguments.
.. section: Security
Upgrade expat copy from 2.2.0 to 2.2.1 to get fixes of multiple security
-vulnerabilities including: CVE-2017-9233 (External entity infinite loop
-DoS), CVE-2016-9063 (Integer overflow, re-fix), CVE-2016-0718 (Fix
-regression bugs from 2.2.0's fix to CVE-2016-0718) and CVE-2012-0876
-(Counter hash flooding with SipHash). Note: the CVE-2016-5300 (Use
+vulnerabilities including: :cve:`2017-9233` (External entity infinite loop
+DoS), :cve:`2016-9063` (Integer overflow, re-fix), :cve:`2016-0718` (Fix
+regression bugs from 2.2.0's fix to :cve:`2016-0718`) and :cve:`2012-0876`
+(Counter hash flooding with SipHash). Note: the :cve:`2016-5300` (Use
os-specific entropy sources like getrandom) doesn't impact Python, since Python
already gets entropy from the OS to set the expat secret using
``XML_SetHashSalt()``.
@@ -46,8 +46,8 @@ authentication (``login@host``).
.. original section: Library
.. section: Security
-Update expat copy from 2.1.1 to 2.2.0 to get fixes of CVE-2016-0718 and
-CVE-2016-4472. See https://sourceforge.net/p/expat/bugs/537/ for more
+Update expat copy from 2.1.1 to 2.2.0 to get fixes of :cve:`2016-0718` and
+:cve:`2016-4472`. See https://sourceforge.net/p/expat/bugs/537/ for more
information.
..
diff --git a/Misc/NEWS.d/3.5.5rc1.rst b/Misc/NEWS.d/3.5.5rc1.rst
index 9ccbf7b8060cd4..4a44840039e388 100644
--- a/Misc/NEWS.d/3.5.5rc1.rst
+++ b/Misc/NEWS.d/3.5.5rc1.rst
@@ -24,7 +24,7 @@ also be affected)
.. nonce: Fd8kId
.. section: Security
-Fixed possible integer overflow in PyBytes_DecodeEscape, CVE-2017-1000158.
+Fixed possible integer overflow in PyBytes_DecodeEscape, :cve:`2017-1000158`.
Original patch by Jay Bosamiya; rebased to Python 3 by Miro Hrončok.
..
diff --git a/Misc/NEWS.d/3.6.0a2.rst b/Misc/NEWS.d/3.6.0a2.rst
index 05b3d9f0463c1c..89d68ab3f8078f 100644
--- a/Misc/NEWS.d/3.6.0a2.rst
+++ b/Misc/NEWS.d/3.6.0a2.rst
@@ -5,7 +5,7 @@
.. original section: Library
.. section: Security
-Update expat to 2.1.1, fixes CVE-2015-1283.
+Update expat to 2.1.1, fixes :cve:`2015-1283`.
..
@@ -15,7 +15,7 @@ Update expat to 2.1.1, fixes CVE-2015-1283.
.. original section: Library
.. section: Security
-Fix TLS stripping vulnerability in smtplib, CVE-2016-0772. Reported by Team
+Fix TLS stripping vulnerability in smtplib, :cve:`2016-0772`. Reported by Team
Oststrom.
..
diff --git a/Misc/NEWS.d/3.6.0a4.rst b/Misc/NEWS.d/3.6.0a4.rst
index d613fd5d928b65..3abbdecb57038b 100644
--- a/Misc/NEWS.d/3.6.0a4.rst
+++ b/Misc/NEWS.d/3.6.0a4.rst
@@ -359,7 +359,7 @@ Fix possible integer overflow in the _csv module for large record lengths.
.. nonce: OnuO9s
.. section: Library
-Prevent HTTPoxy attack (CVE-2016-1000110). Ignore the HTTP_PROXY variable
+Prevent HTTPoxy attack (:cve:`2016-1000110`). Ignore the HTTP_PROXY variable
when REQUEST_METHOD environment is set, which indicates that the script is
in CGI mode.
diff --git a/Misc/NEWS.d/3.6.0b1.rst b/Misc/NEWS.d/3.6.0b1.rst
index 4fb6bdd6f89c9b..bd54cf601d053b 100644
--- a/Misc/NEWS.d/3.6.0b1.rst
+++ b/Misc/NEWS.d/3.6.0b1.rst
@@ -949,7 +949,7 @@ Add scrypt (password-based key derivation function) to hashlib module
.. section: Library
Remove 3DES from ssl module's default cipher list to counter measure sweet32
-attack (CVE-2016-2183).
+attack (:cve:`2016-2183`).
..
diff --git a/Misc/NEWS.d/3.6.2rc1.rst b/Misc/NEWS.d/3.6.2rc1.rst
index 28eb88f79130c5..8e28bc9691921b 100644
--- a/Misc/NEWS.d/3.6.2rc1.rst
+++ b/Misc/NEWS.d/3.6.2rc1.rst
@@ -5,8 +5,8 @@
.. original section: Library
.. section: Security
-Update expat copy from 2.1.1 to 2.2.0 to get fixes of CVE-2016-0718 and
-CVE-2016-4472. See https://sourceforge.net/p/expat/bugs/537/ for more
+Update expat copy from 2.1.1 to 2.2.0 to get fixes of :cve:`2016-0718` and
+:cve:`2016-4472`. See https://sourceforge.net/p/expat/bugs/537/ for more
information.
..
diff --git a/Misc/NEWS.d/3.6.2rc2.rst b/Misc/NEWS.d/3.6.2rc2.rst
index 8c6545f6dbbeec..5ae7425828b692 100644
--- a/Misc/NEWS.d/3.6.2rc2.rst
+++ b/Misc/NEWS.d/3.6.2rc2.rst
@@ -17,10 +17,10 @@ passing other environment variables and command arguments.
.. section: Security
Upgrade expat copy from 2.2.0 to 2.2.1 to get fixes of multiple security
-vulnerabilities including: CVE-2017-9233 (External entity infinite loop
-DoS), CVE-2016-9063 (Integer overflow, re-fix), CVE-2016-0718 (Fix
-regression bugs from 2.2.0's fix to CVE-2016-0718) and CVE-2012-0876
-(Counter hash flooding with SipHash). Note: the CVE-2016-5300 (Use
+vulnerabilities including: :cve:`2017-9233` (External entity infinite loop
+DoS), :cve:`2016-9063` (Integer overflow, re-fix), :cve:`2016-0718` (Fix
+regression bugs from 2.2.0's fix to :cve:`2016-0718`) and :cve:`2012-0876`
+(Counter hash flooding with SipHash). Note: the :cve:`2016-5300` (Use
os-specific entropy sources like getrandom) doesn't impact Python, since
Python already gets entropy from the OS to set the expat secret using
``XML_SetHashSalt()``.
diff --git a/Misc/NEWS.d/3.6.5rc1.rst b/Misc/NEWS.d/3.6.5rc1.rst
index 056bacb5267c41..3d14cc49049c8f 100644
--- a/Misc/NEWS.d/3.6.5rc1.rst
+++ b/Misc/NEWS.d/3.6.5rc1.rst
@@ -15,7 +15,7 @@ Minimal fix to prevent buffer overrun in os.symlink on Windows
Regexes in difflib and poplib were vulnerable to catastrophic backtracking.
These regexes formed potential DOS vectors (REDOS). They have been
-refactored. This resolves CVE-2018-1060 and CVE-2018-1061. Patch by Jamie
+refactored. This resolves :cve:`2018-1060` and :cve:`2018-1061`. Patch by Jamie
Davis.
..
diff --git a/Misc/NEWS.d/3.7.0a1.rst b/Misc/NEWS.d/3.7.0a1.rst
index aca79c4cc8c1b8..58d51c420a10ae 100644
--- a/Misc/NEWS.d/3.7.0a1.rst
+++ b/Misc/NEWS.d/3.7.0a1.rst
@@ -46,10 +46,10 @@ passing other environment variables and command arguments.
.. section: Security
Upgrade expat copy from 2.2.0 to 2.2.1 to get fixes of multiple security
-vulnerabilities including: CVE-2017-9233 (External entity infinite loop
-DoS), CVE-2016-9063 (Integer overflow, re-fix), CVE-2016-0718 (Fix
-regression bugs from 2.2.0's fix to CVE-2016-0718) and CVE-2012-0876
-(Counter hash flooding with SipHash). Note: the CVE-2016-5300 (Use
+vulnerabilities including: :cve:`2017-9233` (External entity infinite loop
+DoS), :cve:`2016-9063` (Integer overflow, re-fix), :cve:`2016-0718` (Fix
+regression bugs from 2.2.0's fix to :cve:`2016-0718`) and :cve:`2012-0876`
+(Counter hash flooding with SipHash). Note: the :cve:`2016-5300` (Use
os-specific entropy sources like getrandom) doesn't impact Python, since Python
already gets entropy from the OS to set the expat secret using
``XML_SetHashSalt()``.
@@ -75,8 +75,8 @@ authentication (``login@host``).
.. original section: Library
.. section: Security
-Update expat copy from 2.1.1 to 2.2.0 to get fixes of CVE-2016-0718 and
-CVE-2016-4472. See https://sourceforge.net/p/expat/bugs/537/ for more
+Update expat copy from 2.1.1 to 2.2.0 to get fixes of :cve:`2016-0718` and
+:cve:`2016-4472`. See https://sourceforge.net/p/expat/bugs/537/ for more
information.
..
diff --git a/Misc/NEWS.d/3.7.0b3.rst b/Misc/NEWS.d/3.7.0b3.rst
index c86963b7e42daf..a0c4cb15dc2b40 100644
--- a/Misc/NEWS.d/3.7.0b3.rst
+++ b/Misc/NEWS.d/3.7.0b3.rst
@@ -4,7 +4,7 @@
.. release date: 2018-03-29
.. section: Security
-Harden ssl module against LibreSSL CVE-2018-8970.
+Harden ssl module against LibreSSL :cve:`2018-8970`.
X509_VERIFY_PARAM_set1_host() is called with an explicit namelen. A new test
ensures that NULL bytes are not allowed.
@@ -26,7 +26,7 @@ Minimal fix to prevent buffer overrun in os.symlink on Windows
Regexes in difflib and poplib were vulnerable to catastrophic backtracking.
These regexes formed potential DOS vectors (REDOS). They have been
-refactored. This resolves CVE-2018-1060 and CVE-2018-1061. Patch by Jamie
+refactored. This resolves :cve:`2018-1060` and :cve:`2018-1061`. Patch by Jamie
Davis.
..
diff --git a/Misc/NEWS.d/3.8.0a1.rst b/Misc/NEWS.d/3.8.0a1.rst
index bd9061601fe190..1964a8329979f5 100644
--- a/Misc/NEWS.d/3.8.0a1.rst
+++ b/Misc/NEWS.d/3.8.0a1.rst
@@ -4,7 +4,7 @@
.. release date: 2019-02-03
.. section: Security
-[CVE-2019-5010] Fix a NULL pointer deref in ssl module. The cert parser did
+:cve:`2019-5010`: Fix a NULL pointer deref in ssl module. The cert parser did
not handle CRL distribution points with empty DP or URI correctly. A
malicious or buggy certificate can result into segfault. Vulnerability
(TALOS-2018-0758) reported by Colin Read and Nicolas Edet of Cisco.
@@ -50,7 +50,7 @@ files or create network connections.
.. nonce: Ua9jMv
.. section: Security
-CVE-2018-14647: The C accelerated _elementtree module now initializes hash
+:cve:`2018-14647`: The C accelerated _elementtree module now initializes hash
randomization salt from _Py_HashSecret instead of libexpat's default CSPRNG.
..
@@ -89,7 +89,7 @@ Fixed thread-safety of error handling in _ssl.
.. nonce: TzSN4x
.. section: Security
-Harden ssl module against LibreSSL CVE-2018-8970.
+Harden ssl module against LibreSSL :cve:`2018-8970`.
X509_VERIFY_PARAM_set1_host() is called with an explicit namelen. A new test
ensures that NULL bytes are not allowed.
@@ -111,7 +111,7 @@ Minimal fix to prevent buffer overrun in os.symlink on Windows
Regexes in difflib and poplib were vulnerable to catastrophic backtracking.
These regexes formed potential DOS vectors (REDOS). They have been
-refactored. This resolves CVE-2018-1060 and CVE-2018-1061. Patch by Jamie
+refactored. This resolves :cve:`2018-1060` and :cve:`2018-1061`. Patch by Jamie
Davis.
..
diff --git a/Misc/NEWS.d/3.8.0a4.rst b/Misc/NEWS.d/3.8.0a4.rst
index fa5eb697d9202d..38fa1324dceb40 100644
--- a/Misc/NEWS.d/3.8.0a4.rst
+++ b/Misc/NEWS.d/3.8.0a4.rst
@@ -13,7 +13,7 @@ Fixes mishandling of pre-normalization characters in urlsplit().
.. nonce: 51E-DA
.. section: Security
-Address CVE-2019-9740 by disallowing URL paths with embedded whitespace or
+Address :cve:`2019-9740` by disallowing URL paths with embedded whitespace or
control characters through into the underlying http client request. Such
potentially malicious header injection URLs now cause an
http.client.InvalidURL exception to be raised.
diff --git a/Misc/NEWS.d/3.8.0b1.rst b/Misc/NEWS.d/3.8.0b1.rst
index 4eb0c0451e97b5..4174ab8fac6192 100644
--- a/Misc/NEWS.d/3.8.0b1.rst
+++ b/Misc/NEWS.d/3.8.0b1.rst
@@ -4,7 +4,7 @@
.. release date: 2019-06-04
.. section: Security
-CVE-2019-9948: Avoid file reading by disallowing ``local-file://`` and
+:cve:`2019-9948`: Avoid file reading by disallowing ``local-file://`` and
``local_file://`` URL schemes in ``URLopener().open()`` and
``URLopener().retrieve()`` of :mod:`urllib.request`.
diff --git a/Misc/NEWS.d/3.9.0a1.rst b/Misc/NEWS.d/3.9.0a1.rst
index 66d7fc1f32e705..8f38f04eb41798 100644
--- a/Misc/NEWS.d/3.9.0a1.rst
+++ b/Misc/NEWS.d/3.9.0a1.rst
@@ -44,7 +44,7 @@ rendering the document page as HTML. (Contributed by Donghee Na in
.. section: Security
Update vendorized expat library version to 2.2.8, which resolves
-CVE-2019-15903.
+:cve:`2019-15903`.
..
diff --git a/Misc/NEWS.d/3.9.0a5.rst b/Misc/NEWS.d/3.9.0a5.rst
index f0015ac54df307..7f7480539f2f1b 100644
--- a/Misc/NEWS.d/3.9.0a5.rst
+++ b/Misc/NEWS.d/3.9.0a5.rst
@@ -5,7 +5,7 @@
.. section: Security
Disallow control characters in hostnames in http.client, addressing
-CVE-2019-18348. Such potentially malicious header injection URLs now cause a
+:cve:`2019-18348`. Such potentially malicious header injection URLs now cause a
InvalidURL to be raised.
..
diff --git a/Misc/NEWS.d/3.9.0a6.rst b/Misc/NEWS.d/3.9.0a6.rst
index 366a260172efb8..26a6fb98efdc36 100644
--- a/Misc/NEWS.d/3.9.0a6.rst
+++ b/Misc/NEWS.d/3.9.0a6.rst
@@ -23,7 +23,7 @@ header injection attacks.
.. nonce: B299Yq
.. section: Security
-CVE-2020-8492: The :class:`~urllib.request.AbstractBasicAuthHandler` class
+:cve:`2020-8492`: The :class:`~urllib.request.AbstractBasicAuthHandler` class
of the :mod:`urllib.request` module uses an inefficient regular expression
which can be exploited by an attacker to cause a denial of service. Fix the
regex to prevent the catastrophic backtracking. Vulnerability reported by
diff --git a/Misc/NEWS.d/next/Build/2024-03-06-17-26-55.gh-issue-71052.vLbu9u.rst b/Misc/NEWS.d/next/Build/2024-03-06-17-26-55.gh-issue-71052.vLbu9u.rst
deleted file mode 100644
index 53776c0216f553..00000000000000
--- a/Misc/NEWS.d/next/Build/2024-03-06-17-26-55.gh-issue-71052.vLbu9u.rst
+++ /dev/null
@@ -1 +0,0 @@
-Add Android build script and instructions.
diff --git a/Misc/NEWS.d/next/Build/2024-03-08-17-05-15.gh-issue-115983.ZQqk0Q.rst b/Misc/NEWS.d/next/Build/2024-03-08-17-05-15.gh-issue-115983.ZQqk0Q.rst
deleted file mode 100644
index a8d39921d59092..00000000000000
--- a/Misc/NEWS.d/next/Build/2024-03-08-17-05-15.gh-issue-115983.ZQqk0Q.rst
+++ /dev/null
@@ -1 +0,0 @@
-Skip building test modules that must be built as shared under WASI.
diff --git a/Misc/NEWS.d/next/Build/2024-03-13-16-16-43.gh-issue-114736.ZhmauG.rst b/Misc/NEWS.d/next/Build/2024-03-13-16-16-43.gh-issue-114736.ZhmauG.rst
deleted file mode 100644
index cc863c3a3ceb48..00000000000000
--- a/Misc/NEWS.d/next/Build/2024-03-13-16-16-43.gh-issue-114736.ZhmauG.rst
+++ /dev/null
@@ -1 +0,0 @@
-Have WASI builds use WASI SDK 21.
diff --git a/Misc/NEWS.d/next/C API/2023-12-12-19-48-31.gh-issue-113024.rXcQs7.rst b/Misc/NEWS.d/next/C API/2023-12-12-19-48-31.gh-issue-113024.rXcQs7.rst
deleted file mode 100644
index 60ed6e64c3b6b8..00000000000000
--- a/Misc/NEWS.d/next/C API/2023-12-12-19-48-31.gh-issue-113024.rXcQs7.rst
+++ /dev/null
@@ -1 +0,0 @@
-Add :c:func:`PyObject_GenericHash` function.
diff --git a/Misc/NEWS.d/next/C API/2024-02-28-15-50-01.gh-issue-111140.mpwcUg.rst b/Misc/NEWS.d/next/C API/2024-02-28-15-50-01.gh-issue-111140.mpwcUg.rst
deleted file mode 100644
index 113db93d186009..00000000000000
--- a/Misc/NEWS.d/next/C API/2024-02-28-15-50-01.gh-issue-111140.mpwcUg.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-Add additional flags to :c:func:`PyLong_AsNativeBytes` and
-:c:func:`PyLong_FromNativeBytes` to allow the caller to determine how to handle
-edge cases around values that fill the entire buffer.
diff --git a/Misc/NEWS.d/next/C API/2024-03-14-10-33-58.gh-issue-85283.LOgmdU.rst b/Misc/NEWS.d/next/C API/2024-03-14-10-33-58.gh-issue-85283.LOgmdU.rst
deleted file mode 100644
index c8e6b1b1e6ed62..00000000000000
--- a/Misc/NEWS.d/next/C API/2024-03-14-10-33-58.gh-issue-85283.LOgmdU.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-The ``fcntl``, ``grp``, ``pwd``, ``termios``, ``_statistics`` and
-``_testconsole`` C extensions are now built with the :ref:`limited C API
-`. Patch by Victor Stinner.
diff --git a/Misc/NEWS.d/next/C API/2024-03-14-15-17-11.gh-issue-111696.YmnvAi.rst b/Misc/NEWS.d/next/C API/2024-03-14-15-17-11.gh-issue-111696.YmnvAi.rst
deleted file mode 100644
index 3d87c56bf2493a..00000000000000
--- a/Misc/NEWS.d/next/C API/2024-03-14-15-17-11.gh-issue-111696.YmnvAi.rst
+++ /dev/null
@@ -1,4 +0,0 @@
-Add :c:func:`PyType_GetFullyQualifiedName` function to get the type's fully
-qualified name. Equivalent to ``f"{type.__module__}.{type.__qualname__}"``, or
-``type.__qualname__`` if ``type.__module__`` is not a string or is equal to
-``"builtins"``. Patch by Victor Stinner.
diff --git a/Misc/NEWS.d/next/C API/2024-03-14-18-00-32.gh-issue-111696.L6oIPq.rst b/Misc/NEWS.d/next/C API/2024-03-14-18-00-32.gh-issue-111696.L6oIPq.rst
deleted file mode 100644
index 7973d7b16e5826..00000000000000
--- a/Misc/NEWS.d/next/C API/2024-03-14-18-00-32.gh-issue-111696.L6oIPq.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-Add :c:func:`PyType_GetModuleName` function to get the type's module name.
-Equivalent to getting the ``type.__module__`` attribute. Patch by Eric Snow
-and Victor Stinner.
diff --git a/Misc/NEWS.d/next/C API/2024-03-14-22-30-07.gh-issue-111696.76UMKi.rst b/Misc/NEWS.d/next/C API/2024-03-14-22-30-07.gh-issue-111696.76UMKi.rst
deleted file mode 100644
index 44c15e4e6a8256..00000000000000
--- a/Misc/NEWS.d/next/C API/2024-03-14-22-30-07.gh-issue-111696.76UMKi.rst
+++ /dev/null
@@ -1,4 +0,0 @@
-Add support for ``%T``, ``%T#``, ``%N`` and ``%N#`` formats to
-:c:func:`PyUnicode_FromFormat`: format the fully qualified name of an object
-type and of a type: call :c:func:`PyType_GetModuleName`. See :pep:`737` for
-more information. Patch by Victor Stinner.
diff --git a/Misc/NEWS.d/next/C API/2024-03-15-23-55-24.gh-issue-115754.xnzc__.rst b/Misc/NEWS.d/next/C API/2024-03-15-23-55-24.gh-issue-115754.xnzc__.rst
deleted file mode 100644
index d76c98ee54056d..00000000000000
--- a/Misc/NEWS.d/next/C API/2024-03-15-23-55-24.gh-issue-115754.xnzc__.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-Add :c:func:`Py_GetConstant` and :c:func:`Py_GetConstantBorrowed` functions to
-get constants. For example, ``Py_GetConstant(Py_CONSTANT_ZERO)`` returns a
-:term:`strong reference` to the constant zero. Patch by Victor Stinner.
diff --git a/Misc/NEWS.d/next/C API/2024-03-15-23-57-33.gh-issue-115754.zLdv82.rst b/Misc/NEWS.d/next/C API/2024-03-15-23-57-33.gh-issue-115754.zLdv82.rst
deleted file mode 100644
index feff0c0897eae1..00000000000000
--- a/Misc/NEWS.d/next/C API/2024-03-15-23-57-33.gh-issue-115754.zLdv82.rst
+++ /dev/null
@@ -1,5 +0,0 @@
-In the limited C API version 3.13, getting ``Py_None``, ``Py_False``,
-``Py_True``, ``Py_Ellipsis`` and ``Py_NotImplemented`` singletons is now
-implemented as function calls at the stable ABI level to hide implementation
-details. Getting these constants still return borrowed references. Patch by
-Victor Stinner.
diff --git a/Misc/NEWS.d/next/C API/2024-03-16-12-21-00.gh-issue-116809.JL786L.rst b/Misc/NEWS.d/next/C API/2024-03-16-12-21-00.gh-issue-116809.JL786L.rst
deleted file mode 100644
index a122e1b45b959a..00000000000000
--- a/Misc/NEWS.d/next/C API/2024-03-16-12-21-00.gh-issue-116809.JL786L.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Restore removed private ``_PyErr_ChainExceptions1()`` function. Patch by
-Victor Stinner.
diff --git a/Misc/NEWS.d/next/C API/2024-03-17-22-42-21.gh-issue-116936.tNrzfm.rst b/Misc/NEWS.d/next/C API/2024-03-17-22-42-21.gh-issue-116936.tNrzfm.rst
deleted file mode 100644
index bd2abc94082a5a..00000000000000
--- a/Misc/NEWS.d/next/C API/2024-03-17-22-42-21.gh-issue-116936.tNrzfm.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Add :c:func:`PyType_GetModuleByDef` to the limited C API. Patch by Victor
-Stinner.
diff --git a/Misc/NEWS.d/next/C API/2024-03-18-09-58-46.gh-issue-116869.LFDVKM.rst b/Misc/NEWS.d/next/C API/2024-03-18-09-58-46.gh-issue-116869.LFDVKM.rst
deleted file mode 100644
index 9b9d943f2e6d19..00000000000000
--- a/Misc/NEWS.d/next/C API/2024-03-18-09-58-46.gh-issue-116869.LFDVKM.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Make the C API compatible with ``-Werror=declaration-after-statement``
-compiler flag again. Patch by Victor Stinner.
diff --git a/Misc/NEWS.d/next/C API/2024-03-18-10-58-47.gh-issue-116869.lN0GBl.rst b/Misc/NEWS.d/next/C API/2024-03-18-10-58-47.gh-issue-116869.lN0GBl.rst
deleted file mode 100644
index 71044b4930355a..00000000000000
--- a/Misc/NEWS.d/next/C API/2024-03-18-10-58-47.gh-issue-116869.lN0GBl.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Add ``test_cext`` test: build a C extension to check if the Python C API
-emits C compiler warnings. Patch by Victor Stinner.
diff --git a/Misc/NEWS.d/next/C API/2024-03-19-09-49-04.gh-issue-115756.4Ls_Tl.rst b/Misc/NEWS.d/next/C API/2024-03-19-09-49-04.gh-issue-115756.4Ls_Tl.rst
deleted file mode 100644
index 6960395fe229a3..00000000000000
--- a/Misc/NEWS.d/next/C API/2024-03-19-09-49-04.gh-issue-115756.4Ls_Tl.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-:c:func:`!PyCode_GetFirstFree` is an ustable API now and has been renamed to
-:c:func:`PyUnstable_Code_GetFirstFree`. (Contributed by Bogdan Romanyuk in
-:gh:`115781`)
diff --git a/Misc/NEWS.d/next/C API/2024-03-20-13-13-22.gh-issue-117021.0Q5jBx.rst b/Misc/NEWS.d/next/C API/2024-03-20-13-13-22.gh-issue-117021.0Q5jBx.rst
deleted file mode 100644
index 2f93e1e6da00aa..00000000000000
--- a/Misc/NEWS.d/next/C API/2024-03-20-13-13-22.gh-issue-117021.0Q5jBx.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Fix integer overflow in :c:func:`PyLong_AsPid` on non-Windows 64-bit
-platforms.
diff --git a/Misc/NEWS.d/next/C API/2024-03-22-19-29-24.gh-issue-87193.u7O-jY.rst b/Misc/NEWS.d/next/C API/2024-03-22-19-29-24.gh-issue-87193.u7O-jY.rst
deleted file mode 100644
index cb921a9c7bf36e..00000000000000
--- a/Misc/NEWS.d/next/C API/2024-03-22-19-29-24.gh-issue-87193.u7O-jY.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-:c:func:`_PyBytes_Resize` can now be called for bytes objects with reference
-count > 1, including 1-byte bytes objects. It creates a new bytes object and
-destroys the old one if it has reference count > 1.
diff --git a/Misc/NEWS.d/next/C API/2024-04-08-09-44-29.gh-issue-117534.54ZE_n.rst b/Misc/NEWS.d/next/C API/2024-04-08-09-44-29.gh-issue-117534.54ZE_n.rst
new file mode 100644
index 00000000000000..4b7dda610fc2b2
--- /dev/null
+++ b/Misc/NEWS.d/next/C API/2024-04-08-09-44-29.gh-issue-117534.54ZE_n.rst
@@ -0,0 +1,2 @@
+Improve validation logic in the C implementation of :meth:`datetime.fromisoformat`
+to better handle invalid years. Patch by Vlad Efanov.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2021-09-04-22-33-01.bpo-24612.SsTuUX.rst b/Misc/NEWS.d/next/Core and Builtins/2021-09-04-22-33-01.bpo-24612.SsTuUX.rst
deleted file mode 100644
index d54ffc4b76db11..00000000000000
--- a/Misc/NEWS.d/next/Core and Builtins/2021-09-04-22-33-01.bpo-24612.SsTuUX.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Improve the :exc:`SyntaxError` that happens when 'not' appears after an
-operator. Patch by Pablo Galindo
diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-10-05-09-33-48.gh-issue-97901.BOLluU.rst b/Misc/NEWS.d/next/Core and Builtins/2022-10-05-09-33-48.gh-issue-97901.BOLluU.rst
deleted file mode 100644
index 4d2bd65ea1fee6..00000000000000
--- a/Misc/NEWS.d/next/Core and Builtins/2022-10-05-09-33-48.gh-issue-97901.BOLluU.rst
+++ /dev/null
@@ -1 +0,0 @@
-Mime type ``text/rtf`` is now supported by :mod:`mimetypes`.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-10-14-00-05-17.gh-issue-109870.oKpJ3P.rst b/Misc/NEWS.d/next/Core and Builtins/2023-10-14-00-05-17.gh-issue-109870.oKpJ3P.rst
deleted file mode 100644
index 390bb1260ea843..00000000000000
--- a/Misc/NEWS.d/next/Core and Builtins/2023-10-14-00-05-17.gh-issue-109870.oKpJ3P.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-Dataclasses now calls :func:`exec` once per dataclass, instead of once
-per method being added. This can speed up dataclass creation by up to
-20%.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-01-07-04-22-51.gh-issue-108362.oB9Gcf.rst b/Misc/NEWS.d/next/Core and Builtins/2024-01-07-04-22-51.gh-issue-108362.oB9Gcf.rst
deleted file mode 100644
index 893904bcecea8a..00000000000000
--- a/Misc/NEWS.d/next/Core and Builtins/2024-01-07-04-22-51.gh-issue-108362.oB9Gcf.rst
+++ /dev/null
@@ -1,12 +0,0 @@
-Implement an incremental cyclic garbage collector. By collecting the old
-generation in increments, there is no need for a full heap scan. This can
-hugely reduce maximum pause time for programs with large heaps.
-
-Reduce the number of generations from three to two. The old generation is
-split into two spaces, "visited" and "pending".
-
-Collection happens in two steps::
-* An increment is formed from the young generation and a small part of the pending space.
-* This increment is scanned and the survivors moved to the end of the visited space.
-
-When the collecting space becomes empty, the two spaces are swapped.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-02-24-03-39-09.gh-issue-115776.THJXqg.rst b/Misc/NEWS.d/next/Core and Builtins/2024-02-24-03-39-09.gh-issue-115776.THJXqg.rst
deleted file mode 100644
index 5974b1882acb22..00000000000000
--- a/Misc/NEWS.d/next/Core and Builtins/2024-02-24-03-39-09.gh-issue-115776.THJXqg.rst
+++ /dev/null
@@ -1,4 +0,0 @@
-The array of values, the ``PyDictValues`` struct is now embedded in the
-object during allocation. This provides better performance in the common
-case, and does not degrade as much when the object's ``__dict__`` is
-materialized.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-02-25-14-17-25.gh-issue-115775.CNbGbJ.rst b/Misc/NEWS.d/next/Core and Builtins/2024-02-25-14-17-25.gh-issue-115775.CNbGbJ.rst
deleted file mode 100644
index 78bef746b67d85..00000000000000
--- a/Misc/NEWS.d/next/Core and Builtins/2024-02-25-14-17-25.gh-issue-115775.CNbGbJ.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-Compiler populates the new ``__static_attributes__`` field on a class with
-the names of attributes of this class which are accessed through self.X from
-any function in its body.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-03-07-16-12-39.gh-issue-114099.ujdjn2.rst b/Misc/NEWS.d/next/Core and Builtins/2024-03-07-16-12-39.gh-issue-114099.ujdjn2.rst
deleted file mode 100644
index 5405a3bdc36f9e..00000000000000
--- a/Misc/NEWS.d/next/Core and Builtins/2024-03-07-16-12-39.gh-issue-114099.ujdjn2.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Added a Loader that can discover extension modules in an iOS-style Frameworks
-folder.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-03-11-00-45-39.gh-issue-116554.gYumG5.rst b/Misc/NEWS.d/next/Core and Builtins/2024-03-11-00-45-39.gh-issue-116554.gYumG5.rst
deleted file mode 100644
index 82f92789de0a39..00000000000000
--- a/Misc/NEWS.d/next/Core and Builtins/2024-03-11-00-45-39.gh-issue-116554.gYumG5.rst
+++ /dev/null
@@ -1 +0,0 @@
-``list.sort()`` now exploits more cases of partial ordering, particularly those with long descending runs with sub-runs of equal values. Those are recognized as single runs now (previously, each block of repeated values caused a new run to be created).
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-03-11-22-05-56.gh-issue-116626.GsyczB.rst b/Misc/NEWS.d/next/Core and Builtins/2024-03-11-22-05-56.gh-issue-116626.GsyczB.rst
deleted file mode 100644
index 5b18d04cca64b5..00000000000000
--- a/Misc/NEWS.d/next/Core and Builtins/2024-03-11-22-05-56.gh-issue-116626.GsyczB.rst
+++ /dev/null
@@ -1 +0,0 @@
-Ensure ``INSTRUMENTED_CALL_FUNCTION_EX`` always emits :monitoring-event:`CALL`
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-03-12-20-31-57.gh-issue-113964.bJppzg.rst b/Misc/NEWS.d/next/Core and Builtins/2024-03-12-20-31-57.gh-issue-113964.bJppzg.rst
deleted file mode 100644
index ab370d4aa1baee..00000000000000
--- a/Misc/NEWS.d/next/Core and Builtins/2024-03-12-20-31-57.gh-issue-113964.bJppzg.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Starting new threads and process creation through :func:`os.fork` are now
-only prevented once all non-daemon threads exit.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-03-13-16-55-25.gh-issue-116735.o3w6y8.rst b/Misc/NEWS.d/next/Core and Builtins/2024-03-13-16-55-25.gh-issue-116735.o3w6y8.rst
deleted file mode 100644
index ca15d484e345db..00000000000000
--- a/Misc/NEWS.d/next/Core and Builtins/2024-03-13-16-55-25.gh-issue-116735.o3w6y8.rst
+++ /dev/null
@@ -1 +0,0 @@
-For ``INSTRUMENTED_CALL_FUNCTION_EX``, set ``arg0`` to ``sys.monitoring.MISSING`` instead of ``None`` for :monitoring-event:`CALL` event.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-03-21-09-57-57.gh-issue-117114.Qu-p55.rst b/Misc/NEWS.d/next/Core and Builtins/2024-03-21-09-57-57.gh-issue-117114.Qu-p55.rst
deleted file mode 100644
index c9c028a8dda0e5..00000000000000
--- a/Misc/NEWS.d/next/Core and Builtins/2024-03-21-09-57-57.gh-issue-117114.Qu-p55.rst
+++ /dev/null
@@ -1 +0,0 @@
-Make :func:`os.path.isdevdrive` available on all platforms. For those that do not offer Dev Drives, it will always return ``False``.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-03-21-12-10-11.gh-issue-117108._6jIrB.rst b/Misc/NEWS.d/next/Core and Builtins/2024-03-21-12-10-11.gh-issue-117108._6jIrB.rst
deleted file mode 100644
index 57ad9606b05e05..00000000000000
--- a/Misc/NEWS.d/next/Core and Builtins/2024-03-21-12-10-11.gh-issue-117108._6jIrB.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-The cycle GC now chooses the size of increments based on the total heap
-size, instead of the rate of object creation. This ensures that it can keep
-up with growing heaps.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-03-25-12-51-12.gh-issue-117108.tNqDEo.rst b/Misc/NEWS.d/next/Core and Builtins/2024-03-25-12-51-12.gh-issue-117108.tNqDEo.rst
deleted file mode 100644
index a28c83ee6efe40..00000000000000
--- a/Misc/NEWS.d/next/Core and Builtins/2024-03-25-12-51-12.gh-issue-117108.tNqDEo.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-Change the old space bit of objects in the young generation from 0 to
-gcstate->visited, so that any objects created during GC will have the old
-bit set correctly if they get moved into the old generation.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-03-25-17-04-54.gh-issue-99108.8bjdO6.rst b/Misc/NEWS.d/next/Core and Builtins/2024-03-25-17-04-54.gh-issue-99108.8bjdO6.rst
deleted file mode 100644
index 184273b42b7e9d..00000000000000
--- a/Misc/NEWS.d/next/Core and Builtins/2024-03-25-17-04-54.gh-issue-99108.8bjdO6.rst
+++ /dev/null
@@ -1,6 +0,0 @@
-Updated the :mod:`hashlib` built-in `HACL\* project`_ C code from upstream
-that we use for many implementations when they are not present via OpenSSL
-in a given build. This also avoids the rare potential for a C symbol name
-one definition rule linking issue.
-
-.. _HACL\* project: https://github.com/hacl-star/hacl-star
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-03-26-17-22-38.gh-issue-117266.Kwh79O.rst b/Misc/NEWS.d/next/Core and Builtins/2024-03-26-17-22-38.gh-issue-117266.Kwh79O.rst
deleted file mode 100644
index 5055954676b9ab..00000000000000
--- a/Misc/NEWS.d/next/Core and Builtins/2024-03-26-17-22-38.gh-issue-117266.Kwh79O.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Fix crashes for certain user-created subclasses of :class:`ast.AST`. Such
-classes are now expected to set the ``_field_types`` attribute.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-03-28-19-13-20.gh-issue-117335.d6uKJu.rst b/Misc/NEWS.d/next/Core and Builtins/2024-03-28-19-13-20.gh-issue-117335.d6uKJu.rst
deleted file mode 100644
index e419b2e97f3886..00000000000000
--- a/Misc/NEWS.d/next/Core and Builtins/2024-03-28-19-13-20.gh-issue-117335.d6uKJu.rst
+++ /dev/null
@@ -1 +0,0 @@
-Raise TypeError for non-sequences for :func:`ntpath.commonpath`.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-03-29-15-04-13.gh-issue-117349.OB9kQQ.rst b/Misc/NEWS.d/next/Core and Builtins/2024-03-29-15-04-13.gh-issue-117349.OB9kQQ.rst
deleted file mode 100644
index 7a7bc689002017..00000000000000
--- a/Misc/NEWS.d/next/Core and Builtins/2024-03-29-15-04-13.gh-issue-117349.OB9kQQ.rst
+++ /dev/null
@@ -1 +0,0 @@
-Optimise several functions in :mod:`os.path`.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-03-29-21-43-19.gh-issue-117381.fT0JFM.rst b/Misc/NEWS.d/next/Core and Builtins/2024-03-29-21-43-19.gh-issue-117381.fT0JFM.rst
deleted file mode 100644
index 88b6c32e971e72..00000000000000
--- a/Misc/NEWS.d/next/Core and Builtins/2024-03-29-21-43-19.gh-issue-117381.fT0JFM.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fix error message for :func:`ntpath.commonpath`.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-04-02-06-16-49.gh-issue-109120.X485oN.rst b/Misc/NEWS.d/next/Core and Builtins/2024-04-02-06-16-49.gh-issue-109120.X485oN.rst
deleted file mode 100644
index 32e70b22f778e1..00000000000000
--- a/Misc/NEWS.d/next/Core and Builtins/2024-04-02-06-16-49.gh-issue-109120.X485oN.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Added handle of incorrect star expressions, e.g ``f(3, *)``. Patch by
-Grigoryev Semyon
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-04-02-10-04-57.gh-issue-117411.YdyVmG.rst b/Misc/NEWS.d/next/Core and Builtins/2024-04-02-10-04-57.gh-issue-117411.YdyVmG.rst
deleted file mode 100644
index 73c60ee33a5413..00000000000000
--- a/Misc/NEWS.d/next/Core and Builtins/2024-04-02-10-04-57.gh-issue-117411.YdyVmG.rst
+++ /dev/null
@@ -1 +0,0 @@
-Move ``PyFutureFeatures`` to an internal header and make it private.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-04-02-17-37-35.gh-issue-117431.vDKAOn.rst b/Misc/NEWS.d/next/Core and Builtins/2024-04-02-17-37-35.gh-issue-117431.vDKAOn.rst
deleted file mode 100644
index 83f243ae214f7d..00000000000000
--- a/Misc/NEWS.d/next/Core and Builtins/2024-04-02-17-37-35.gh-issue-117431.vDKAOn.rst
+++ /dev/null
@@ -1,10 +0,0 @@
-Improve the performance of the following :class:`str` methods
-by adapting them to the :c:macro:`METH_FASTCALL` calling convention:
-
-* :meth:`~str.count`
-* :meth:`~str.endswith`
-* :meth:`~str.find`
-* :meth:`~str.index`
-* :meth:`~str.rfind`
-* :meth:`~str.rindex`
-* :meth:`~str.startswith`
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-04-03-13-44-04.gh-issue-116968.zgcdG2.rst b/Misc/NEWS.d/next/Core and Builtins/2024-04-03-13-44-04.gh-issue-116968.zgcdG2.rst
deleted file mode 100644
index dc5beee0022181..00000000000000
--- a/Misc/NEWS.d/next/Core and Builtins/2024-04-03-13-44-04.gh-issue-116968.zgcdG2.rst
+++ /dev/null
@@ -1,11 +0,0 @@
-Introduce a unified 16-bit backoff counter type (``_Py_BackoffCounter``),
-shared between the Tier 1 adaptive specializer and the Tier 2 optimizer. The
-API used for adaptive specialization counters is changed but the behavior is
-(supposed to be) identical.
-
-The behavior of the Tier 2 counters is changed:
-
-* There are no longer dynamic thresholds (we never varied these).
-* All counters now use the same exponential backoff.
-* The counter for ``JUMP_BACKWARD`` starts counting down from 16.
-* The ``temperature`` in side exits starts counting down from 64.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-04-04-13-42-59.gh-issue-117494.GPQH64.rst b/Misc/NEWS.d/next/Core and Builtins/2024-04-04-13-42-59.gh-issue-117494.GPQH64.rst
deleted file mode 100644
index 3b550eda64834b..00000000000000
--- a/Misc/NEWS.d/next/Core and Builtins/2024-04-04-13-42-59.gh-issue-117494.GPQH64.rst
+++ /dev/null
@@ -1 +0,0 @@
-Refactored the instruction sequence data structure out of compile.c into instruction_sequence.c.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-04-08-14-33-38.gh-issue-117636.exnRKd.rst b/Misc/NEWS.d/next/Core and Builtins/2024-04-08-14-33-38.gh-issue-117636.exnRKd.rst
new file mode 100644
index 00000000000000..7d7cb506352193
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2024-04-08-14-33-38.gh-issue-117636.exnRKd.rst
@@ -0,0 +1 @@
+Speedup :func:`os.path.join`.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-04-09-11-31-25.gh-issue-115776.5Nthd0.rst b/Misc/NEWS.d/next/Core and Builtins/2024-04-09-11-31-25.gh-issue-115776.5Nthd0.rst
new file mode 100644
index 00000000000000..5fc0080bcb9551
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2024-04-09-11-31-25.gh-issue-115776.5Nthd0.rst
@@ -0,0 +1,2 @@
+Statically allocated objects are, by definition, immortal so must be
+marked as such regardless of whether they are in extension modules or not.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-04-10-22-16-18.gh-issue-117709.-_1YL0.rst b/Misc/NEWS.d/next/Core and Builtins/2024-04-10-22-16-18.gh-issue-117709.-_1YL0.rst
new file mode 100644
index 00000000000000..2216b53688c378
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2024-04-10-22-16-18.gh-issue-117709.-_1YL0.rst
@@ -0,0 +1,3 @@
+Speed up calls to :func:`str` with positional-only argument,
+by using the :pep:`590` ``vectorcall`` calling convention.
+Patch by Erlend Aasland.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-04-03-09-49-15.gh-issue-117431.WAqRgc.rst b/Misc/NEWS.d/next/Core and Builtins/2024-04-12-09-09-11.gh-issue-117431.lxFEeJ.rst
similarity index 64%
rename from Misc/NEWS.d/next/Core and Builtins/2024-04-03-09-49-15.gh-issue-117431.WAqRgc.rst
rename to Misc/NEWS.d/next/Core and Builtins/2024-04-12-09-09-11.gh-issue-117431.lxFEeJ.rst
index 17374d0d5c575b..0d94389aba124a 100644
--- a/Misc/NEWS.d/next/Core and Builtins/2024-04-03-09-49-15.gh-issue-117431.WAqRgc.rst
+++ b/Misc/NEWS.d/next/Core and Builtins/2024-04-12-09-09-11.gh-issue-117431.lxFEeJ.rst
@@ -2,5 +2,8 @@ Improve the performance of the following :class:`bytes` and
:class:`bytearray` methods by adapting them to the :c:macro:`METH_FASTCALL`
calling convention:
-* :meth:`!endswith`
-* :meth:`!startswith`
+* :meth:`!count`
+* :meth:`!find`
+* :meth:`!index`
+* :meth:`!rfind`
+* :meth:`!rindex`
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-04-12-11-19-18.gh-issue-117750.YttK6h.rst b/Misc/NEWS.d/next/Core and Builtins/2024-04-12-11-19-18.gh-issue-117750.YttK6h.rst
new file mode 100644
index 00000000000000..d7cf5d6e57d0cb
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2024-04-12-11-19-18.gh-issue-117750.YttK6h.rst
@@ -0,0 +1,3 @@
+Fix issue where an object's dict would get out of sync with the object's
+internal values when being cleared. ``obj.__dict__.clear()`` now clears the
+internal values, but leaves the dict attached to the object.
diff --git a/Misc/NEWS.d/next/Documentation/2022-04-15-13-15-23.gh-issue-91565.OznXwC.rst b/Misc/NEWS.d/next/Documentation/2022-04-15-13-15-23.gh-issue-91565.OznXwC.rst
deleted file mode 100644
index df97e2c447ef58..00000000000000
--- a/Misc/NEWS.d/next/Documentation/2022-04-15-13-15-23.gh-issue-91565.OznXwC.rst
+++ /dev/null
@@ -1 +0,0 @@
-Changes to documentation files and config outputs to reflect the new location for reporting bugs - i.e. GitHub rather than bugs.python.org.
diff --git a/Misc/NEWS.d/next/Documentation/2024-03-20-12-41-47.gh-issue-114099.ad_Ck9.rst b/Misc/NEWS.d/next/Documentation/2024-03-20-12-41-47.gh-issue-114099.ad_Ck9.rst
deleted file mode 100644
index c6f403ee899162..00000000000000
--- a/Misc/NEWS.d/next/Documentation/2024-03-20-12-41-47.gh-issue-114099.ad_Ck9.rst
+++ /dev/null
@@ -1 +0,0 @@
-Add an iOS platform guide, and flag modules not available on iOS.
diff --git a/Misc/NEWS.d/next/Documentation/2024-03-20-15-12-37.gh-issue-115977.IMLi6K.rst b/Misc/NEWS.d/next/Documentation/2024-03-20-15-12-37.gh-issue-115977.IMLi6K.rst
deleted file mode 100644
index 5f04e93d9a862b..00000000000000
--- a/Misc/NEWS.d/next/Documentation/2024-03-20-15-12-37.gh-issue-115977.IMLi6K.rst
+++ /dev/null
@@ -1 +0,0 @@
-Remove compatibilty references to Emscripten.
diff --git a/Misc/NEWS.d/next/Library/2019-08-12-19-08-06.bpo-15010.3bY2CF.rst b/Misc/NEWS.d/next/Library/2019-08-12-19-08-06.bpo-15010.3bY2CF.rst
deleted file mode 100644
index f61a45ed98abad..00000000000000
--- a/Misc/NEWS.d/next/Library/2019-08-12-19-08-06.bpo-15010.3bY2CF.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-:meth:`unittest.TestLoader.discover` now saves the original value of
-``unittest.TestLoader._top_level_dir`` and restores it at the end of the
-call.
diff --git a/Misc/NEWS.d/next/Library/2019-08-27-01-03-26.gh-issue-66543._TRpYr.rst b/Misc/NEWS.d/next/Library/2019-08-27-01-03-26.gh-issue-66543._TRpYr.rst
deleted file mode 100644
index 62f7aa2490bb73..00000000000000
--- a/Misc/NEWS.d/next/Library/2019-08-27-01-03-26.gh-issue-66543._TRpYr.rst
+++ /dev/null
@@ -1,4 +0,0 @@
-Make :func:`mimetypes.guess_type` properly parsing of URLs with only a host
-name, URLs containing fragment or query, and filenames with only a UNC
-sharepoint on Windows.
-Based on patch by Dong-hee Na.
diff --git a/Misc/NEWS.d/next/Library/2020-06-11-16-20-33.bpo-27578.CIA-fu.rst b/Misc/NEWS.d/next/Library/2020-06-11-16-20-33.bpo-27578.CIA-fu.rst
deleted file mode 100644
index df58a7ede45521..00000000000000
--- a/Misc/NEWS.d/next/Library/2020-06-11-16-20-33.bpo-27578.CIA-fu.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-:func:`inspect.getsource` (and related functions) work with
-empty module files, returning ``'\n'`` (or reasonable equivalent)
-instead of raising ``OSError``. Patch by Kernc.
diff --git a/Misc/NEWS.d/next/Library/2020-10-02-17-35-19.bpo-33533.GLIhM5.rst b/Misc/NEWS.d/next/Library/2020-10-02-17-35-19.bpo-33533.GLIhM5.rst
deleted file mode 100644
index 3ffd723cf1082a..00000000000000
--- a/Misc/NEWS.d/next/Library/2020-10-02-17-35-19.bpo-33533.GLIhM5.rst
+++ /dev/null
@@ -1,5 +0,0 @@
-:func:`asyncio.as_completed` now returns an object that is both an asynchronous
-iterator and plain iterator. The new asynchronous iteration pattern allows for
-easier correlation between prior tasks and their completed results. This is
-a closer match to :func:`concurrent.futures.as_completed`'s iteration pattern.
-Patch by Justin Arthur.
diff --git a/Misc/NEWS.d/next/Library/2022-06-22-14-45-32.gh-issue-89739.CqZcRL.rst b/Misc/NEWS.d/next/Library/2022-06-22-14-45-32.gh-issue-89739.CqZcRL.rst
deleted file mode 100644
index 0358c0107cb697..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-06-22-14-45-32.gh-issue-89739.CqZcRL.rst
+++ /dev/null
@@ -1 +0,0 @@
-The :mod:`zipimport` module can now read ZIP64 files.
diff --git a/Misc/NEWS.d/next/Library/2023-03-03-21-13-08.gh-issue-102402.fpkRO1.rst b/Misc/NEWS.d/next/Library/2023-03-03-21-13-08.gh-issue-102402.fpkRO1.rst
new file mode 100644
index 00000000000000..fa8f3750b85a6b
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-03-03-21-13-08.gh-issue-102402.fpkRO1.rst
@@ -0,0 +1,2 @@
+Adjust ``logging.LogRecord`` to use ``time.time_ns()`` and fix minor bug
+related to floating point math.
diff --git a/Misc/NEWS.d/next/Library/2023-06-16-19-17-06.gh-issue-105866.0NBveV.rst b/Misc/NEWS.d/next/Library/2023-06-16-19-17-06.gh-issue-105866.0NBveV.rst
deleted file mode 100644
index 28eae1232742f7..00000000000000
--- a/Misc/NEWS.d/next/Library/2023-06-16-19-17-06.gh-issue-105866.0NBveV.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fixed ``_get_slots`` bug which caused error when defining dataclasses with slots and a weakref_slot.
diff --git a/Misc/NEWS.d/next/Library/2023-12-11-00-51-51.gh-issue-112948.k-OKp5.rst b/Misc/NEWS.d/next/Library/2023-12-11-00-51-51.gh-issue-112948.k-OKp5.rst
deleted file mode 100644
index 0925a7caba6f07..00000000000000
--- a/Misc/NEWS.d/next/Library/2023-12-11-00-51-51.gh-issue-112948.k-OKp5.rst
+++ /dev/null
@@ -1 +0,0 @@
-Make completion of :mod:`pdb` similar to Python REPL
diff --git a/Misc/NEWS.d/next/Library/2023-12-28-22-52-45.gh-issue-113548.j6TJ7O.rst b/Misc/NEWS.d/next/Library/2023-12-28-22-52-45.gh-issue-113548.j6TJ7O.rst
deleted file mode 100644
index 972ddeb54822e2..00000000000000
--- a/Misc/NEWS.d/next/Library/2023-12-28-22-52-45.gh-issue-113548.j6TJ7O.rst
+++ /dev/null
@@ -1 +0,0 @@
-:mod:`pdb` now allows CLI arguments to ``pdb -m``.
diff --git a/Misc/NEWS.d/next/Library/2024-01-02-22-47-12.gh-issue-85287.ZC5DLj.rst b/Misc/NEWS.d/next/Library/2024-01-02-22-47-12.gh-issue-85287.ZC5DLj.rst
deleted file mode 100644
index e6d031fbc93e83..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-01-02-22-47-12.gh-issue-85287.ZC5DLj.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Changes Unicode codecs to return UnicodeEncodeError or UnicodeDecodeError,
-rather than just UnicodeError.
diff --git a/Misc/NEWS.d/next/Library/2024-01-22-15-50-58.gh-issue-113538.v2wrwg.rst b/Misc/NEWS.d/next/Library/2024-01-22-15-50-58.gh-issue-113538.v2wrwg.rst
deleted file mode 100644
index 5c59af98e136bb..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-01-22-15-50-58.gh-issue-113538.v2wrwg.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-Add :meth:`asyncio.Server.close_clients` and
-:meth:`asyncio.Server.abort_clients` methods which allow to more forcefully
-close an asyncio server.
diff --git a/Misc/NEWS.d/next/Library/2024-02-01-03-09-38.gh-issue-114271.raCkt5.rst b/Misc/NEWS.d/next/Library/2024-02-01-03-09-38.gh-issue-114271.raCkt5.rst
deleted file mode 100644
index 2cd35eeda830b9..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-02-01-03-09-38.gh-issue-114271.raCkt5.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-Fix a race in ``threading.Thread.join()``.
-
-``threading._MainThread`` now always represents the main thread of the main
-interpreter.
-
-``PyThreadState.on_delete`` and ``PyThreadState.on_delete_data`` have been
-removed.
diff --git a/Misc/NEWS.d/next/Library/2024-02-01-08-09-20.gh-issue-114847.-JrWrR.rst b/Misc/NEWS.d/next/Library/2024-02-01-08-09-20.gh-issue-114847.-JrWrR.rst
deleted file mode 100644
index bf011fed3efdbc..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-02-01-08-09-20.gh-issue-114847.-JrWrR.rst
+++ /dev/null
@@ -1 +0,0 @@
-Speed up :func:`os.path.realpath` on non-Windows platforms.
diff --git a/Misc/NEWS.d/next/Library/2024-02-18-09-50-31.gh-issue-115627.HGchj0.rst b/Misc/NEWS.d/next/Library/2024-02-18-09-50-31.gh-issue-115627.HGchj0.rst
deleted file mode 100644
index 75d926ab59d557..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-02-18-09-50-31.gh-issue-115627.HGchj0.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Fix the :mod:`ssl` module error handling of connection terminate by peer.
-It now throws an OSError with the appropriate error code instead of an EOFError.
diff --git a/Misc/NEWS.d/next/Library/2024-02-26-10-06-50.gh-issue-113308.MbvOFt.rst b/Misc/NEWS.d/next/Library/2024-02-26-10-06-50.gh-issue-113308.MbvOFt.rst
deleted file mode 100644
index c4c242fe3d578f..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-02-26-10-06-50.gh-issue-113308.MbvOFt.rst
+++ /dev/null
@@ -1,4 +0,0 @@
-Remove some internal protected parts from :mod:`uuid`:
-``_has_uuid_generate_time_safe``, ``_netbios_getnode``,
-``_ipconfig_getnode``, and ``_load_system_functions``.
-They were unused.
diff --git a/Misc/NEWS.d/next/Library/2024-03-01-20-23-57.gh-issue-90535.wXm-jC.rst b/Misc/NEWS.d/next/Library/2024-03-01-20-23-57.gh-issue-90535.wXm-jC.rst
deleted file mode 100644
index 9af4efabb6b5b2..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-03-01-20-23-57.gh-issue-90535.wXm-jC.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-Fix support of *interval* values > 1 in
-:class:`logging.TimedRotatingFileHandler` for ``when='MIDNIGHT'`` and
-``when='Wx'``.
diff --git a/Misc/NEWS.d/next/Library/2024-03-05-19-56-29.gh-issue-71052.PMDK--.rst b/Misc/NEWS.d/next/Library/2024-03-05-19-56-29.gh-issue-71052.PMDK--.rst
deleted file mode 100644
index ddca54c7c9ed7b..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-03-05-19-56-29.gh-issue-71052.PMDK--.rst
+++ /dev/null
@@ -1 +0,0 @@
-Implement :func:`ctypes.util.find_library` on Android.
diff --git a/Misc/NEWS.d/next/Library/2024-03-06-18-30-37.gh-issue-116401.3Wcda2.rst b/Misc/NEWS.d/next/Library/2024-03-06-18-30-37.gh-issue-116401.3Wcda2.rst
deleted file mode 100644
index 121f0065ecca95..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-03-06-18-30-37.gh-issue-116401.3Wcda2.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Fix blocking :func:`os.fwalk` and :func:`shutil.rmtree` on opening named
-pipe.
diff --git a/Misc/NEWS.d/next/Library/2024-03-07-11-10-27.gh-issue-114314.iEhAMH.rst b/Misc/NEWS.d/next/Library/2024-03-07-11-10-27.gh-issue-114314.iEhAMH.rst
deleted file mode 100644
index c241d966f9087d..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-03-07-11-10-27.gh-issue-114314.iEhAMH.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-In :mod:`ctypes`, ctype data is now stored in type objects directly rather
-than in a dict subclass. This is an internal change that should not affect
-usage.
diff --git a/Misc/NEWS.d/next/Library/2024-03-08-11-31-49.gh-issue-116484.VMAsU7.rst b/Misc/NEWS.d/next/Library/2024-03-08-11-31-49.gh-issue-116484.VMAsU7.rst
deleted file mode 100644
index 265c3810466d39..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-03-08-11-31-49.gh-issue-116484.VMAsU7.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-Change automatically generated :class:`tkinter.Checkbutton` widget names to
-avoid collisions with automatically generated
-:class:`tkinter.ttk.Checkbutton` widget names within the same parent widget.
diff --git a/Misc/NEWS.d/next/Library/2024-03-11-17-04-55.gh-issue-116608.30f58-.rst b/Misc/NEWS.d/next/Library/2024-03-11-17-04-55.gh-issue-116608.30f58-.rst
deleted file mode 100644
index d1536bc47c3ee0..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-03-11-17-04-55.gh-issue-116608.30f58-.rst
+++ /dev/null
@@ -1,10 +0,0 @@
-The :mod:`importlib.resources` functions
-:func:`~importlib.resources.is_resource()`,
-:func:`~importlib.resources.open_binary()`,
-:func:`~importlib.resources.open_text()`,
-:func:`~importlib.resources.path()`,
-:func:`~importlib.resources.read_binary()`, and
-:func:`~importlib.resources.read_text()` are un-deprecated, and support
-subdirectories via multiple positional arguments.
-The :func:`~importlib.resources.contents()` function also allows subdirectories,
-but remains deprecated.
diff --git a/Misc/NEWS.d/next/Library/2024-03-12-17-53-14.gh-issue-73468.z4ZzvJ.rst b/Misc/NEWS.d/next/Library/2024-03-12-17-53-14.gh-issue-73468.z4ZzvJ.rst
deleted file mode 100644
index c91f4eb97e06bc..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-03-12-17-53-14.gh-issue-73468.z4ZzvJ.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Added new :func:`math.fma` function, wrapping C99's ``fma()`` operation:
-fused multiply-add function. Patch by Mark Dickinson and Victor Stinner.
diff --git a/Misc/NEWS.d/next/Library/2024-03-12-19-32-17.gh-issue-71042.oI0Ron.rst b/Misc/NEWS.d/next/Library/2024-03-12-19-32-17.gh-issue-71042.oI0Ron.rst
deleted file mode 100644
index 3641cbb9b2fc1a..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-03-12-19-32-17.gh-issue-71042.oI0Ron.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Add :func:`platform.android_ver`, which provides device and OS information
-on Android.
diff --git a/Misc/NEWS.d/next/Library/2024-03-13-15-45-54.gh-issue-63283.OToJnG.rst b/Misc/NEWS.d/next/Library/2024-03-13-15-45-54.gh-issue-63283.OToJnG.rst
deleted file mode 100644
index bb4c3a4a8d741b..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-03-13-15-45-54.gh-issue-63283.OToJnG.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-In :mod:`encodings.idna`, any capitalization of the the ACE prefix
-(``xn--``) is now acceptable. Patch by Pepijn de Vos and Zackery Spytz.
diff --git a/Misc/NEWS.d/next/Library/2024-03-14-01-38-44.gh-issue-113171.VFnObz.rst b/Misc/NEWS.d/next/Library/2024-03-14-01-38-44.gh-issue-113171.VFnObz.rst
deleted file mode 100644
index f9a72473be4e2c..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-03-14-01-38-44.gh-issue-113171.VFnObz.rst
+++ /dev/null
@@ -1,9 +0,0 @@
-Fixed various false positives and false negatives in
-
-* :attr:`ipaddress.IPv4Address.is_private` (see these docs for details)
-* :attr:`ipaddress.IPv4Address.is_global`
-* :attr:`ipaddress.IPv6Address.is_private`
-* :attr:`ipaddress.IPv6Address.is_global`
-
-Also in the corresponding :class:`ipaddress.IPv4Network` and :class:`ipaddress.IPv6Network`
-attributes.
diff --git a/Misc/NEWS.d/next/Library/2024-03-14-09-38-51.gh-issue-116647.h0d_zj.rst b/Misc/NEWS.d/next/Library/2024-03-14-09-38-51.gh-issue-116647.h0d_zj.rst
deleted file mode 100644
index 081f36bff91633..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-03-14-09-38-51.gh-issue-116647.h0d_zj.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fix recursive child in dataclasses
diff --git a/Misc/NEWS.d/next/Library/2024-03-14-10-01-23.gh-issue-116811._h5iKP.rst b/Misc/NEWS.d/next/Library/2024-03-14-10-01-23.gh-issue-116811._h5iKP.rst
deleted file mode 100644
index 00168632429996..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-03-14-10-01-23.gh-issue-116811._h5iKP.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-In ``PathFinder.invalidate_caches``, delegate to
-``MetadataPathFinder.invalidate_caches``.
diff --git a/Misc/NEWS.d/next/Library/2024-03-14-14-01-46.gh-issue-116764.moB3Lc.rst b/Misc/NEWS.d/next/Library/2024-03-14-14-01-46.gh-issue-116764.moB3Lc.rst
deleted file mode 100644
index e92034b0e8b157..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-03-14-14-01-46.gh-issue-116764.moB3Lc.rst
+++ /dev/null
@@ -1,4 +0,0 @@
-Restore support of ``None`` and other false values in :mod:`urllib.parse`
-functions :func:`~urllib.parse.parse_qs` and
-:func:`~urllib.parse.parse_qsl`. Also, they now raise a TypeError for
-non-zero integers and non-empty sequences.
diff --git a/Misc/NEWS.d/next/Library/2024-03-14-17-21-25.gh-issue-63207.LV16SL.rst b/Misc/NEWS.d/next/Library/2024-03-14-17-21-25.gh-issue-63207.LV16SL.rst
deleted file mode 100644
index 1f77555d5e7d31..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-03-14-17-21-25.gh-issue-63207.LV16SL.rst
+++ /dev/null
@@ -1,4 +0,0 @@
-On Windows, :func:`time.time()` now uses the
-``GetSystemTimePreciseAsFileTime()`` clock to have a resolution better than 1
-us, instead of the ``GetSystemTimeAsFileTime()`` clock which has a resolution
-of 15.6 ms. Patch by Victor Stinner.
diff --git a/Misc/NEWS.d/next/Library/2024-03-14-17-24-59.gh-issue-106531.9ehywi.rst b/Misc/NEWS.d/next/Library/2024-03-14-17-24-59.gh-issue-106531.9ehywi.rst
deleted file mode 100644
index e2720d333783c0..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-03-14-17-24-59.gh-issue-106531.9ehywi.rst
+++ /dev/null
@@ -1,5 +0,0 @@
-Refreshed zipfile._path from `zipp 3.18
-`_, providing
-better compatibility for PyPy, better glob performance for deeply nested
-zipfiles, and providing internal access to ``CompleteDirs.inject`` for use
-in other tests (like importlib.resources).
diff --git a/Misc/NEWS.d/next/Library/2024-03-14-20-59-28.gh-issue-90095.7UaJ1U.rst b/Misc/NEWS.d/next/Library/2024-03-14-20-59-28.gh-issue-90095.7UaJ1U.rst
deleted file mode 100644
index b7024c74f7aa7d..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-03-14-20-59-28.gh-issue-90095.7UaJ1U.rst
+++ /dev/null
@@ -1 +0,0 @@
-Ignore empty lines and comments in ``.pdbrc``
diff --git a/Misc/NEWS.d/next/Library/2024-03-17-18-12-39.gh-issue-115538.PBiRQB.rst b/Misc/NEWS.d/next/Library/2024-03-17-18-12-39.gh-issue-115538.PBiRQB.rst
deleted file mode 100644
index fda2ebf7593ed5..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-03-17-18-12-39.gh-issue-115538.PBiRQB.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-:class:`_io.WindowsConsoleIO` now emit a warning if a boolean value is
-passed as a filedescriptor argument.
diff --git a/Misc/NEWS.d/next/Library/2024-03-18-14-36-50.gh-issue-116957.dTCs4f.rst b/Misc/NEWS.d/next/Library/2024-03-18-14-36-50.gh-issue-116957.dTCs4f.rst
deleted file mode 100644
index 51fe04957e26bc..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-03-18-14-36-50.gh-issue-116957.dTCs4f.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-configparser: Don't leave ConfigParser values in an invalid state (stored as
-a list instead of a str) after an earlier read raised DuplicateSectionError
-or DuplicateOptionError.
diff --git a/Misc/NEWS.d/next/Library/2024-03-19-11-08-26.gh-issue-90872.ghys95.rst b/Misc/NEWS.d/next/Library/2024-03-19-11-08-26.gh-issue-90872.ghys95.rst
deleted file mode 100644
index ead68caa9fe88b..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-03-19-11-08-26.gh-issue-90872.ghys95.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-On Windows, :meth:`subprocess.Popen.wait` no longer calls
-``WaitForSingleObject()`` with a negative timeout: pass ``0`` ms if the
-timeout is negative. Patch by Victor Stinner.
diff --git a/Misc/NEWS.d/next/Library/2024-03-19-14-35-57.gh-issue-114099.siNSpK.rst b/Misc/NEWS.d/next/Library/2024-03-19-14-35-57.gh-issue-114099.siNSpK.rst
deleted file mode 100644
index 9b57cbb812db4a..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-03-19-14-35-57.gh-issue-114099.siNSpK.rst
+++ /dev/null
@@ -1 +0,0 @@
-Modify standard library to allow for iOS platform differences.
diff --git a/Misc/NEWS.d/next/Library/2024-03-19-19-42-25.gh-issue-116987.ZVKUH1.rst b/Misc/NEWS.d/next/Library/2024-03-19-19-42-25.gh-issue-116987.ZVKUH1.rst
deleted file mode 100644
index f2da956f66c86b..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-03-19-19-42-25.gh-issue-116987.ZVKUH1.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fixed :func:`inspect.findsource` for class code objects.
diff --git a/Misc/NEWS.d/next/Library/2024-03-20-00-11-39.gh-issue-68583.mIlxxb.rst b/Misc/NEWS.d/next/Library/2024-03-20-00-11-39.gh-issue-68583.mIlxxb.rst
new file mode 100644
index 00000000000000..12caed75b79044
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-03-20-00-11-39.gh-issue-68583.mIlxxb.rst
@@ -0,0 +1,2 @@
+webbrowser CLI: replace getopt with argparse, add long options. Patch by
+Hugo van Kemenade.
diff --git a/Misc/NEWS.d/next/Library/2024-03-20-16-10-29.gh-issue-70647.FpD6Ar.rst b/Misc/NEWS.d/next/Library/2024-03-20-16-10-29.gh-issue-70647.FpD6Ar.rst
deleted file mode 100644
index a9094df06037cd..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-03-20-16-10-29.gh-issue-70647.FpD6Ar.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-Start the deprecation period for the current behavior of
-:func:`datetime.datetime.strptime` and :func:`time.strptime` which always
-fails to parse a date string with a :exc:`ValueError` involving a day of
-month such as ``strptime("02-29", "%m-%d")`` when a year is **not**
-specified and the date happen to be February 29th. This should help avoid
-users finding new bugs every four years due to a natural mistaken assumption
-about the API when parsing partial date values.
diff --git a/Misc/NEWS.d/next/Library/2024-03-20-23-07-58.gh-issue-109653.uu3lrX.rst b/Misc/NEWS.d/next/Library/2024-03-20-23-07-58.gh-issue-109653.uu3lrX.rst
deleted file mode 100644
index 38d7634b54c2fe..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-03-20-23-07-58.gh-issue-109653.uu3lrX.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Deferred select imports in importlib.metadata and importlib.resources for a
-14% speedup.
diff --git a/Misc/NEWS.d/next/Library/2024-03-21-07-27-36.gh-issue-117110.9K1InX.rst b/Misc/NEWS.d/next/Library/2024-03-21-07-27-36.gh-issue-117110.9K1InX.rst
deleted file mode 100644
index 32f8f81c8d052f..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-03-21-07-27-36.gh-issue-117110.9K1InX.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fix a bug that prevents subclasses of :class:`typing.Any` to be instantiated with arguments. Patch by Chris Fu.
diff --git a/Misc/NEWS.d/next/Library/2024-03-21-17-07-38.gh-issue-117084.w1mTpT.rst b/Misc/NEWS.d/next/Library/2024-03-21-17-07-38.gh-issue-117084.w1mTpT.rst
deleted file mode 100644
index 6e7790e926b9d2..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-03-21-17-07-38.gh-issue-117084.w1mTpT.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Fix :mod:`zipfile` extraction for directory entries with the name containing
-backslashes on Windows.
diff --git a/Misc/NEWS.d/next/Library/2024-03-23-13-40-13.gh-issue-112383.XuHf3G.rst b/Misc/NEWS.d/next/Library/2024-03-23-13-40-13.gh-issue-112383.XuHf3G.rst
deleted file mode 100644
index 931e615c2b86c5..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-03-23-13-40-13.gh-issue-112383.XuHf3G.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fix :mod:`dis` module's handling of ``ENTER_EXECUTOR`` instructions.
diff --git a/Misc/NEWS.d/next/Library/2024-03-23-14-26-18.gh-issue-117178.vTisTG.rst b/Misc/NEWS.d/next/Library/2024-03-23-14-26-18.gh-issue-117178.vTisTG.rst
deleted file mode 100644
index f9c53ebbfc3c96..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-03-23-14-26-18.gh-issue-117178.vTisTG.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Fix regression in lazy loading of self-referential modules, introduced in
-gh-114781.
diff --git a/Misc/NEWS.d/next/Library/2024-03-25-00-20-16.gh-issue-117205.yV7xGb.rst b/Misc/NEWS.d/next/Library/2024-03-25-00-20-16.gh-issue-117205.yV7xGb.rst
deleted file mode 100644
index 8d8c201afd29fb..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-03-25-00-20-16.gh-issue-117205.yV7xGb.rst
+++ /dev/null
@@ -1 +0,0 @@
-Speed up :func:`compileall.compile_dir` by 20% when using multiprocessing by increasing ``chunksize``.
diff --git a/Misc/NEWS.d/next/Library/2024-03-25-21-15-56.gh-issue-117225.oOaZXb.rst b/Misc/NEWS.d/next/Library/2024-03-25-21-15-56.gh-issue-117225.oOaZXb.rst
deleted file mode 100644
index b6c4850f608c2a..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-03-25-21-15-56.gh-issue-117225.oOaZXb.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-doctest: only print "and X failed" when non-zero, don't pluralise "1 items".
-Patch by Hugo van Kemenade.
diff --git a/Misc/NEWS.d/next/Library/2024-03-26-11-48-39.gh-issue-98966.SayV9y.rst b/Misc/NEWS.d/next/Library/2024-03-26-11-48-39.gh-issue-98966.SayV9y.rst
deleted file mode 100644
index e819a1e9a0aba0..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-03-26-11-48-39.gh-issue-98966.SayV9y.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-In :mod:`subprocess`, raise a more informative message when
-``stdout=STDOUT``.
diff --git a/Misc/NEWS.d/next/Library/2024-03-27-16-43-42.gh-issue-117294.wbXNFv.rst b/Misc/NEWS.d/next/Library/2024-03-27-16-43-42.gh-issue-117294.wbXNFv.rst
deleted file mode 100644
index bb351e6399a765..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-03-27-16-43-42.gh-issue-117294.wbXNFv.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-A ``DocTestCase`` now reports as skipped if all examples in the doctest are
-skipped.
diff --git a/Misc/NEWS.d/next/Library/2024-03-27-21-05-52.gh-issue-117310.Bt2wox.rst b/Misc/NEWS.d/next/Library/2024-03-27-21-05-52.gh-issue-117310.Bt2wox.rst
deleted file mode 100644
index 429b890b8b609a..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-03-27-21-05-52.gh-issue-117310.Bt2wox.rst
+++ /dev/null
@@ -1,4 +0,0 @@
-Fixed an unlikely early & extra ``Py_DECREF`` triggered crash in :mod:`ssl`
-when creating a new ``_ssl._SSLContext`` if CPython was built implausibly such
-that the default cipher list is empty **or** the SSL library it was linked
-against reports a failure from its C ``SSL_CTX_set_cipher_list()`` API.
diff --git a/Misc/NEWS.d/next/Library/2024-03-28-13-54-20.gh-issue-88014.zJz31I.rst b/Misc/NEWS.d/next/Library/2024-03-28-13-54-20.gh-issue-88014.zJz31I.rst
deleted file mode 100644
index f8bb784e39fbb6..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-03-28-13-54-20.gh-issue-88014.zJz31I.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-In documentation of :class:`gzip.GzipFile` in module gzip, explain data type
-of optional constructor argument *mtime*, and recommend ``mtime = 0`` for
-generating deterministic streams.
diff --git a/Misc/NEWS.d/next/Library/2024-03-28-17-55-22.gh-issue-66449.4jhuEV.rst b/Misc/NEWS.d/next/Library/2024-03-28-17-55-22.gh-issue-66449.4jhuEV.rst
deleted file mode 100644
index 898100b87e1dbd..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-03-28-17-55-22.gh-issue-66449.4jhuEV.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-:class:`configparser.ConfigParser` now accepts unnamed sections before named
-ones, if configured to do so.
diff --git a/Misc/NEWS.d/next/Library/2024-03-29-12-07-26.gh-issue-117348.WjCYvK.rst b/Misc/NEWS.d/next/Library/2024-03-29-12-07-26.gh-issue-117348.WjCYvK.rst
deleted file mode 100644
index cd3006c3b7b8f0..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-03-29-12-07-26.gh-issue-117348.WjCYvK.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Refactored :meth:`configparser.RawConfigParser._read` to reduce cyclometric
-complexity and improve comprehensibility.
diff --git a/Misc/NEWS.d/next/Library/2024-03-29-12-21-40.gh-issue-117142.U0agfh.rst b/Misc/NEWS.d/next/Library/2024-03-29-12-21-40.gh-issue-117142.U0agfh.rst
new file mode 100644
index 00000000000000..36810bd815c502
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-03-29-12-21-40.gh-issue-117142.U0agfh.rst
@@ -0,0 +1 @@
+Convert :mod:`!_ctypes` to multi-phase initialisation (:pep:`489`).
diff --git a/Misc/NEWS.d/next/Library/2024-03-29-15-58-01.gh-issue-117337.7w3Qwp.rst b/Misc/NEWS.d/next/Library/2024-03-29-15-58-01.gh-issue-117337.7w3Qwp.rst
deleted file mode 100644
index 73bd2569c7c9cb..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-03-29-15-58-01.gh-issue-117337.7w3Qwp.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-Deprecate undocumented :func:`!glob.glob0` and :func:`!glob.glob1`
-functions. Use :func:`glob.glob` and pass a directory to its
-*root_dir* argument instead.
diff --git a/Misc/NEWS.d/next/Library/2024-04-02-13-13-46.gh-issue-117459.jiIZmH.rst b/Misc/NEWS.d/next/Library/2024-04-02-13-13-46.gh-issue-117459.jiIZmH.rst
deleted file mode 100644
index 549bd44112befe..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-04-02-13-13-46.gh-issue-117459.jiIZmH.rst
+++ /dev/null
@@ -1 +0,0 @@
-:meth:`asyncio.asyncio.run_coroutine_threadsafe` now keeps the traceback of :class:`CancelledError`, :class:`TimeoutError` and :class:`InvalidStateError` which are raised in the coroutine.
diff --git a/Misc/NEWS.d/next/Library/2024-04-02-20-30-12.gh-issue-114848.YX4pEc.rst b/Misc/NEWS.d/next/Library/2024-04-02-20-30-12.gh-issue-114848.YX4pEc.rst
deleted file mode 100644
index 30b1a50976f52d..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-04-02-20-30-12.gh-issue-114848.YX4pEc.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Raise :exc:`FileNotFoundError` when ``getcwd()`` returns '(unreachable)',
-which can happen on Linux >= 2.6.36 with glibc < 2.27.
diff --git a/Misc/NEWS.d/next/Library/2024-04-03-16-01-31.gh-issue-117516.7DlHje.rst b/Misc/NEWS.d/next/Library/2024-04-03-16-01-31.gh-issue-117516.7DlHje.rst
new file mode 100644
index 00000000000000..bbf69126d956d2
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-04-03-16-01-31.gh-issue-117516.7DlHje.rst
@@ -0,0 +1 @@
+Add :data:`typing.TypeIs`, implementing :pep:`742`. Patch by Jelle Zijlstra.
diff --git a/Misc/NEWS.d/next/Library/2024-04-03-18-36-53.gh-issue-117467.l6rWlj.rst b/Misc/NEWS.d/next/Library/2024-04-03-18-36-53.gh-issue-117467.l6rWlj.rst
deleted file mode 100644
index 64ae9ff7b2f0b5..00000000000000
--- a/Misc/NEWS.d/next/Library/2024-04-03-18-36-53.gh-issue-117467.l6rWlj.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Preserve mailbox ownership when rewriting in :func:`mailbox.mbox.flush`.
-Patch by Tony Mountifield.
diff --git a/Misc/NEWS.d/next/Library/2024-04-04-15-28-12.gh-issue-116720.aGhXns.rst b/Misc/NEWS.d/next/Library/2024-04-04-15-28-12.gh-issue-116720.aGhXns.rst
new file mode 100644
index 00000000000000..39c7d6b8a1e978
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-04-04-15-28-12.gh-issue-116720.aGhXns.rst
@@ -0,0 +1,18 @@
+Improved behavior of :class:`asyncio.TaskGroup` when an external cancellation
+collides with an internal cancellation. For example, when two task groups
+are nested and both experience an exception in a child task simultaneously,
+it was possible that the outer task group would misbehave, because
+its internal cancellation was swallowed by the inner task group.
+
+In the case where a task group is cancelled externally and also must
+raise an :exc:`ExceptionGroup`, it will now call the parent task's
+:meth:`~asyncio.Task.cancel` method. This ensures that a
+:exc:`asyncio.CancelledError` will be raised at the next
+:keyword:`await`, so the cancellation is not lost.
+
+An added benefit of these changes is that task groups now preserve the
+cancellation count (:meth:`asyncio.Task.cancelling`).
+
+In order to handle some corner cases, :meth:`asyncio.Task.uncancel` may now
+reset the undocumented ``_must_cancel`` flag when the cancellation count
+reaches zero.
diff --git a/Misc/NEWS.d/next/Library/2024-04-05-13-38-53.gh-issue-117546.lWjhHE.rst b/Misc/NEWS.d/next/Library/2024-04-05-13-38-53.gh-issue-117546.lWjhHE.rst
new file mode 100644
index 00000000000000..9762991e47a6a4
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-04-05-13-38-53.gh-issue-117546.lWjhHE.rst
@@ -0,0 +1,2 @@
+Fix issue where :func:`os.path.realpath` stopped resolving symlinks after
+encountering a symlink loop on POSIX.
diff --git a/Misc/NEWS.d/next/Library/2024-04-06-20-31-09.gh-issue-117586.UgWdRK.rst b/Misc/NEWS.d/next/Library/2024-04-06-20-31-09.gh-issue-117586.UgWdRK.rst
new file mode 100644
index 00000000000000..65c699977bd807
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-04-06-20-31-09.gh-issue-117586.UgWdRK.rst
@@ -0,0 +1 @@
+Speed up :meth:`pathlib.Path.glob` by working with strings internally.
diff --git a/Misc/NEWS.d/next/Library/2024-04-07-19-39-20.gh-issue-102247.h8rqiX.rst b/Misc/NEWS.d/next/Library/2024-04-07-19-39-20.gh-issue-102247.h8rqiX.rst
new file mode 100644
index 00000000000000..c0f74916ddfb1f
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-04-07-19-39-20.gh-issue-102247.h8rqiX.rst
@@ -0,0 +1,3 @@
+the status codes enum with constants in http.HTTPStatus are updated to include the names from RFC9110. This RFC includes some HTTP statuses previously only used for WEBDAV and assigns more generic names to them.
+
+The old constants are preserved for backwards compatibility.
diff --git a/Misc/NEWS.d/next/Library/2024-04-08-19-12-26.gh-issue-117663.CPfc_p.rst b/Misc/NEWS.d/next/Library/2024-04-08-19-12-26.gh-issue-117663.CPfc_p.rst
new file mode 100644
index 00000000000000..2c7a5224b5a6eb
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-04-08-19-12-26.gh-issue-117663.CPfc_p.rst
@@ -0,0 +1,2 @@
+Fix ``_simple_enum`` to detect aliases when multiple arguments are present
+but only one is the member value.
diff --git a/Misc/NEWS.d/next/Library/2024-04-09-20-14-44.gh-issue-117348.A2NAAz.rst b/Misc/NEWS.d/next/Library/2024-04-09-20-14-44.gh-issue-117348.A2NAAz.rst
new file mode 100644
index 00000000000000..2451a4e4f622e4
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-04-09-20-14-44.gh-issue-117348.A2NAAz.rst
@@ -0,0 +1,2 @@
+Largely restored import time performance of configparser by avoiding
+dataclasses.
diff --git a/Misc/NEWS.d/next/Library/2024-04-09-23-22-21.gh-issue-117692.EciInD.rst b/Misc/NEWS.d/next/Library/2024-04-09-23-22-21.gh-issue-117692.EciInD.rst
new file mode 100644
index 00000000000000..98a6e125c440ef
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-04-09-23-22-21.gh-issue-117692.EciInD.rst
@@ -0,0 +1,2 @@
+Fixes a bug when :class:`doctest.DocTestFinder` was failing on wrapped
+``builtin_function_or_method``.
diff --git a/Misc/NEWS.d/next/Library/2024-04-10-20-59-10.gh-issue-117722.oxIUEI.rst b/Misc/NEWS.d/next/Library/2024-04-10-20-59-10.gh-issue-117722.oxIUEI.rst
new file mode 100644
index 00000000000000..de999883658898
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-04-10-20-59-10.gh-issue-117722.oxIUEI.rst
@@ -0,0 +1,2 @@
+Change the new multi-separator support in :meth:`asyncio.Stream.readuntil`
+to only accept tuples of separators rather than arbitrary iterables.
diff --git a/Misc/NEWS.d/next/Library/2024-04-10-21-08-32.gh-issue-117586.UCL__1.rst b/Misc/NEWS.d/next/Library/2024-04-10-21-08-32.gh-issue-117586.UCL__1.rst
new file mode 100644
index 00000000000000..aefac85f9c61b9
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-04-10-21-08-32.gh-issue-117586.UCL__1.rst
@@ -0,0 +1 @@
+Speed up :meth:`pathlib.Path.walk` by working with strings internally.
diff --git a/Misc/NEWS.d/next/Library/2024-04-10-21-30-37.gh-issue-117727.uAYNVS.rst b/Misc/NEWS.d/next/Library/2024-04-10-21-30-37.gh-issue-117727.uAYNVS.rst
new file mode 100644
index 00000000000000..3a0b6834e91f18
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-04-10-21-30-37.gh-issue-117727.uAYNVS.rst
@@ -0,0 +1,2 @@
+Speed up :meth:`pathlib.Path.iterdir` by using :func:`os.scandir`
+internally.
diff --git a/Misc/NEWS.d/next/Library/2024-04-10-22-35-24.gh-issue-115060.XEVuOb.rst b/Misc/NEWS.d/next/Library/2024-04-10-22-35-24.gh-issue-115060.XEVuOb.rst
new file mode 100644
index 00000000000000..b5084a0e86c74f
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-04-10-22-35-24.gh-issue-115060.XEVuOb.rst
@@ -0,0 +1,2 @@
+Speed up :meth:`pathlib.Path.glob` by not scanning directories for
+non-wildcard pattern segments.
diff --git a/Misc/NEWS.d/next/Library/2024-04-12-17-37-11.gh-issue-77102.Mk6X_E.rst b/Misc/NEWS.d/next/Library/2024-04-12-17-37-11.gh-issue-77102.Mk6X_E.rst
new file mode 100644
index 00000000000000..6f91251126dc7b
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-04-12-17-37-11.gh-issue-77102.Mk6X_E.rst
@@ -0,0 +1,3 @@
+:mod:`site` module now parses ``.pth`` file with UTF-8 first, and
+:term:`locale encoding` if ``UnicodeDecodeError`` happened. It supported
+only locale encoding before.
diff --git a/Misc/NEWS.d/next/Library/2024-04-13-01-45-15.gh-issue-115060.IxoM03.rst b/Misc/NEWS.d/next/Library/2024-04-13-01-45-15.gh-issue-115060.IxoM03.rst
new file mode 100644
index 00000000000000..50b374acb90ad0
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-04-13-01-45-15.gh-issue-115060.IxoM03.rst
@@ -0,0 +1,3 @@
+Speed up :meth:`pathlib.Path.glob` by omitting an initial
+:meth:`~pathlib.Path.is_dir` call. As a result of this change,
+:meth:`~pathlib.Path.glob` can no longer raise :exc:`OSError`.
diff --git a/Misc/NEWS.d/next/Library/2024-04-14-15-59-28.gh-issue-117691.1mtREE.rst b/Misc/NEWS.d/next/Library/2024-04-14-15-59-28.gh-issue-117691.1mtREE.rst
new file mode 100644
index 00000000000000..d90817a9ebde2f
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-04-14-15-59-28.gh-issue-117691.1mtREE.rst
@@ -0,0 +1,5 @@
+Improve the error messages emitted by :mod:`tarfile` deprecation warnings
+relating to PEP 706. If a ``filter`` argument is not provided to
+``extract()`` or ``extractall``, the deprecation warning now points to the
+line in the user's code where the relevant function was called. Patch by
+Alex Waygood.
diff --git a/Misc/NEWS.d/next/Security/2024-03-25-21-25-28.gh-issue-117233.E4CyI_.rst b/Misc/NEWS.d/next/Security/2024-03-25-21-25-28.gh-issue-117233.E4CyI_.rst
new file mode 100644
index 00000000000000..a4142ec21b7e5d
--- /dev/null
+++ b/Misc/NEWS.d/next/Security/2024-03-25-21-25-28.gh-issue-117233.E4CyI_.rst
@@ -0,0 +1,3 @@
+Detect BLAKE2, SHA3, Shake, & truncated SHA512 support in the OpenSSL-ish
+libcrypto library at build time. This allows :mod:`hashlib` to be used with
+libraries that do not to support every algorithm that upstream OpenSSL does.
diff --git a/Misc/NEWS.d/next/Tests/2024-03-06-11-00-36.gh-issue-116307.Uij0t_.rst b/Misc/NEWS.d/next/Tests/2024-03-06-11-00-36.gh-issue-116307.Uij0t_.rst
deleted file mode 100644
index 0bc4be94789f21..00000000000000
--- a/Misc/NEWS.d/next/Tests/2024-03-06-11-00-36.gh-issue-116307.Uij0t_.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-Added import helper ``isolated_modules`` as ``CleanImport`` does not remove
-modules imported during the context. Use it in importlib.resources tests to
-avoid leaving ``mod`` around to impede importlib.metadata tests.
diff --git a/Misc/NEWS.d/next/Tests/2024-03-11-23-20-28.gh-issue-112536.Qv1RrX.rst b/Misc/NEWS.d/next/Tests/2024-03-11-23-20-28.gh-issue-112536.Qv1RrX.rst
deleted file mode 100644
index de9e1c557b093c..00000000000000
--- a/Misc/NEWS.d/next/Tests/2024-03-11-23-20-28.gh-issue-112536.Qv1RrX.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Add --tsan to test.regrtest for running TSAN tests in reasonable execution
-times. Patch by Donghee Na.
diff --git a/Misc/NEWS.d/next/Tests/2024-03-13-12-06-49.gh-issue-115979.zsNpQD.rst b/Misc/NEWS.d/next/Tests/2024-03-13-12-06-49.gh-issue-115979.zsNpQD.rst
deleted file mode 100644
index 02bc2b88942e4f..00000000000000
--- a/Misc/NEWS.d/next/Tests/2024-03-13-12-06-49.gh-issue-115979.zsNpQD.rst
+++ /dev/null
@@ -1 +0,0 @@
-Update test_importlib so that it passes under WASI SDK 21.
diff --git a/Misc/NEWS.d/next/Tests/2024-03-20-14-19-32.gh-issue-117089.WwR1Z1.rst b/Misc/NEWS.d/next/Tests/2024-03-20-14-19-32.gh-issue-117089.WwR1Z1.rst
deleted file mode 100644
index ab0baec8c96035..00000000000000
--- a/Misc/NEWS.d/next/Tests/2024-03-20-14-19-32.gh-issue-117089.WwR1Z1.rst
+++ /dev/null
@@ -1 +0,0 @@
-Consolidated tests for importlib.metadata in their own ``metadata`` package.
diff --git a/Misc/NEWS.d/next/Tests/2024-03-21-11-32-29.gh-issue-116333.F-9Ram.rst b/Misc/NEWS.d/next/Tests/2024-03-21-11-32-29.gh-issue-116333.F-9Ram.rst
deleted file mode 100644
index 3fdb6bb3bd7af7..00000000000000
--- a/Misc/NEWS.d/next/Tests/2024-03-21-11-32-29.gh-issue-116333.F-9Ram.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-Tests of TLS related things (error codes, etc) were updated to be more
-lenient about specific error message strings and behaviors as seen in the
-BoringSSL and AWS-LC forks of OpenSSL.
diff --git a/Misc/NEWS.d/next/Tests/2024-03-24-23-49-25.gh-issue-117187.eMLT5n.rst b/Misc/NEWS.d/next/Tests/2024-03-24-23-49-25.gh-issue-117187.eMLT5n.rst
deleted file mode 100644
index 0c0b0e0f443396..00000000000000
--- a/Misc/NEWS.d/next/Tests/2024-03-24-23-49-25.gh-issue-117187.eMLT5n.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fix XML tests for vanilla Expat <2.6.0.
diff --git a/Misc/NEWS.d/next/Tests/2024-03-25-21-31-49.gh-issue-83434.U7Z8cY.rst b/Misc/NEWS.d/next/Tests/2024-03-25-21-31-49.gh-issue-83434.U7Z8cY.rst
deleted file mode 100644
index 7b7a8fcf53bb3c..00000000000000
--- a/Misc/NEWS.d/next/Tests/2024-03-25-21-31-49.gh-issue-83434.U7Z8cY.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-Disable JUnit XML output (``--junit-xml=FILE`` command line option) in
-regrtest when hunting for reference leaks (``-R`` option). Patch by Victor
-Stinner.
diff --git a/Misc/NEWS.d/next/Windows/2024-02-08-14-48-15.gh-issue-115119.qMt32O.rst b/Misc/NEWS.d/next/Windows/2024-02-08-14-48-15.gh-issue-115119.qMt32O.rst
deleted file mode 100644
index f95fed1084cf4f..00000000000000
--- a/Misc/NEWS.d/next/Windows/2024-02-08-14-48-15.gh-issue-115119.qMt32O.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-Switched from vendored ``libmpdecimal`` code to a separately-hosted external
-package in the ``cpython-source-deps`` repository when building the
-``_decimal`` module.
diff --git a/Misc/NEWS.d/next/Windows/2024-02-24-23-03-43.gh-issue-91227.sL4zWC.rst b/Misc/NEWS.d/next/Windows/2024-02-24-23-03-43.gh-issue-91227.sL4zWC.rst
deleted file mode 100644
index 8e53afdd619001..00000000000000
--- a/Misc/NEWS.d/next/Windows/2024-02-24-23-03-43.gh-issue-91227.sL4zWC.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fix the asyncio ProactorEventLoop implementation so that sending a datagram to an address that is not listening does not prevent receiving any more datagrams.
diff --git a/Misc/NEWS.d/next/Windows/2024-03-14-01-58-22.gh-issue-116773.H2UldY.rst b/Misc/NEWS.d/next/Windows/2024-03-14-01-58-22.gh-issue-116773.H2UldY.rst
deleted file mode 100644
index 8fc3fe80041d26..00000000000000
--- a/Misc/NEWS.d/next/Windows/2024-03-14-01-58-22.gh-issue-116773.H2UldY.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fix instances of ``<_overlapped.Overlapped object at 0xXXX> still has pending operation at deallocation, the process may crash``.
diff --git a/Misc/NEWS.d/next/Windows/2024-03-14-09-14-21.gh-issue-88494.Bwfmp7.rst b/Misc/NEWS.d/next/Windows/2024-03-14-09-14-21.gh-issue-88494.Bwfmp7.rst
deleted file mode 100644
index 5a96af0231918f..00000000000000
--- a/Misc/NEWS.d/next/Windows/2024-03-14-09-14-21.gh-issue-88494.Bwfmp7.rst
+++ /dev/null
@@ -1,4 +0,0 @@
-On Windows, :func:`time.monotonic()` now uses the ``QueryPerformanceCounter()``
-clock to have a resolution better than 1 us, instead of the
-``GetTickCount64()`` clock which has a resolution of 15.6 ms. Patch by Victor
-Stinner.
diff --git a/Misc/NEWS.d/next/Windows/2024-03-14-20-46-23.gh-issue-116195.Cu_rYs.rst b/Misc/NEWS.d/next/Windows/2024-03-14-20-46-23.gh-issue-116195.Cu_rYs.rst
deleted file mode 100644
index 32122d764e870a..00000000000000
--- a/Misc/NEWS.d/next/Windows/2024-03-14-20-46-23.gh-issue-116195.Cu_rYs.rst
+++ /dev/null
@@ -1 +0,0 @@
-Improves performance of :func:`os.getppid` by using an alternate system API when available. Contributed by vxiiduu.
diff --git a/Misc/NEWS.d/next/Windows/2024-03-28-22-12-00.gh-issue-117267.K_tki1.rst b/Misc/NEWS.d/next/Windows/2024-03-28-22-12-00.gh-issue-117267.K_tki1.rst
deleted file mode 100644
index d3221429850a11..00000000000000
--- a/Misc/NEWS.d/next/Windows/2024-03-28-22-12-00.gh-issue-117267.K_tki1.rst
+++ /dev/null
@@ -1,5 +0,0 @@
-Ensure ``DirEntry.stat().st_ctime`` behaves consistently with
-:func:`os.stat` during the deprecation period of ``st_ctime`` by containing
-the same value as ``st_birthtime``. After the deprecation period,
-``st_ctime`` will be the metadata change time (or unavailable through
-``DirEntry``), and only ``st_birthtime`` will contain the creation time.
diff --git a/Misc/NEWS.d/next/Windows/2024-04-12-14-02-58.gh-issue-90329.YpEeaO.rst b/Misc/NEWS.d/next/Windows/2024-04-12-14-02-58.gh-issue-90329.YpEeaO.rst
new file mode 100644
index 00000000000000..7242428567dd25
--- /dev/null
+++ b/Misc/NEWS.d/next/Windows/2024-04-12-14-02-58.gh-issue-90329.YpEeaO.rst
@@ -0,0 +1,5 @@
+Suppress the warning displayed on virtual environment creation when the
+requested and created paths differ only by a short (8.3 style) name.
+Warnings will continue to be shown if a junction or symlink in the path
+caused the venv to be created in a different location than originally
+requested.
diff --git a/Modules/_abc.c b/Modules/_abc.c
index 399ecbbd6a2172..ad28035843fd32 100644
--- a/Modules/_abc.c
+++ b/Modules/_abc.c
@@ -21,7 +21,7 @@ PyDoc_STRVAR(_abc__doc__,
typedef struct {
PyTypeObject *_abc_data_type;
- unsigned long long abc_invalidation_counter;
+ uint64_t abc_invalidation_counter;
} _abcmodule_state;
static inline _abcmodule_state*
@@ -32,17 +32,61 @@ get_abc_state(PyObject *module)
return (_abcmodule_state *)state;
}
+static inline uint64_t
+get_invalidation_counter(_abcmodule_state *state)
+{
+#ifdef Py_GIL_DISABLED
+ return _Py_atomic_load_uint64(&state->abc_invalidation_counter);
+#else
+ return state->abc_invalidation_counter;
+#endif
+}
+
+static inline void
+increment_invalidation_counter(_abcmodule_state *state)
+{
+#ifdef Py_GIL_DISABLED
+ _Py_atomic_add_uint64(&state->abc_invalidation_counter, 1);
+#else
+ state->abc_invalidation_counter++;
+#endif
+}
+
/* This object stores internal state for ABCs.
Note that we can use normal sets for caches,
since they are never iterated over. */
typedef struct {
PyObject_HEAD
+ /* These sets of weak references are lazily created. Once created, they
+ will point to the same sets until the ABCMeta object is destroyed or
+ cleared, both of which will only happen while the object is visible to a
+ single thread. */
PyObject *_abc_registry;
- PyObject *_abc_cache; /* Normal set of weak references. */
- PyObject *_abc_negative_cache; /* Normal set of weak references. */
- unsigned long long _abc_negative_cache_version;
+ PyObject *_abc_cache;
+ PyObject *_abc_negative_cache;
+ uint64_t _abc_negative_cache_version;
} _abc_data;
+static inline uint64_t
+get_cache_version(_abc_data *impl)
+{
+#ifdef Py_GIL_DISABLED
+ return _Py_atomic_load_uint64(&impl->_abc_negative_cache_version);
+#else
+ return impl->_abc_negative_cache_version;
+#endif
+}
+
+static inline void
+set_cache_version(_abc_data *impl, uint64_t version)
+{
+#ifdef Py_GIL_DISABLED
+ _Py_atomic_store_uint64(&impl->_abc_negative_cache_version, version);
+#else
+ impl->_abc_negative_cache_version = version;
+#endif
+}
+
static int
abc_data_traverse(_abc_data *self, visitproc visit, void *arg)
{
@@ -90,7 +134,7 @@ abc_data_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
self->_abc_registry = NULL;
self->_abc_cache = NULL;
self->_abc_negative_cache = NULL;
- self->_abc_negative_cache_version = state->abc_invalidation_counter;
+ self->_abc_negative_cache_version = get_invalidation_counter(state);
return (PyObject *) self;
}
@@ -130,8 +174,12 @@ _get_impl(PyObject *module, PyObject *self)
}
static int
-_in_weak_set(PyObject *set, PyObject *obj)
+_in_weak_set(_abc_data *impl, PyObject **pset, PyObject *obj)
{
+ PyObject *set;
+ Py_BEGIN_CRITICAL_SECTION(impl);
+ set = *pset;
+ Py_END_CRITICAL_SECTION();
if (set == NULL || PySet_GET_SIZE(set) == 0) {
return 0;
}
@@ -168,16 +216,19 @@ static PyMethodDef _destroy_def = {
};
static int
-_add_to_weak_set(PyObject **pset, PyObject *obj)
+_add_to_weak_set(_abc_data *impl, PyObject **pset, PyObject *obj)
{
- if (*pset == NULL) {
- *pset = PySet_New(NULL);
- if (*pset == NULL) {
- return -1;
- }
+ PyObject *set;
+ Py_BEGIN_CRITICAL_SECTION(impl);
+ set = *pset;
+ if (set == NULL) {
+ set = *pset = PySet_New(NULL);
+ }
+ Py_END_CRITICAL_SECTION();
+ if (set == NULL) {
+ return -1;
}
- PyObject *set = *pset;
PyObject *ref, *wr;
PyObject *destroy_cb;
wr = PyWeakref_NewRef(set, NULL);
@@ -220,7 +271,11 @@ _abc__reset_registry(PyObject *module, PyObject *self)
if (impl == NULL) {
return NULL;
}
- if (impl->_abc_registry != NULL && PySet_Clear(impl->_abc_registry) < 0) {
+ PyObject *registry;
+ Py_BEGIN_CRITICAL_SECTION(impl);
+ registry = impl->_abc_registry;
+ Py_END_CRITICAL_SECTION();
+ if (registry != NULL && PySet_Clear(registry) < 0) {
Py_DECREF(impl);
return NULL;
}
@@ -247,13 +302,17 @@ _abc__reset_caches(PyObject *module, PyObject *self)
if (impl == NULL) {
return NULL;
}
- if (impl->_abc_cache != NULL && PySet_Clear(impl->_abc_cache) < 0) {
+ PyObject *cache, *negative_cache;
+ Py_BEGIN_CRITICAL_SECTION(impl);
+ cache = impl->_abc_cache;
+ negative_cache = impl->_abc_negative_cache;
+ Py_END_CRITICAL_SECTION();
+ if (cache != NULL && PySet_Clear(cache) < 0) {
Py_DECREF(impl);
return NULL;
}
/* also the second cache */
- if (impl->_abc_negative_cache != NULL &&
- PySet_Clear(impl->_abc_negative_cache) < 0) {
+ if (negative_cache != NULL && PySet_Clear(negative_cache) < 0) {
Py_DECREF(impl);
return NULL;
}
@@ -282,11 +341,14 @@ _abc__get_dump(PyObject *module, PyObject *self)
if (impl == NULL) {
return NULL;
}
- PyObject *res = Py_BuildValue("NNNK",
- PySet_New(impl->_abc_registry),
- PySet_New(impl->_abc_cache),
- PySet_New(impl->_abc_negative_cache),
- impl->_abc_negative_cache_version);
+ PyObject *res;
+ Py_BEGIN_CRITICAL_SECTION(impl);
+ res = Py_BuildValue("NNNK",
+ PySet_New(impl->_abc_registry),
+ PySet_New(impl->_abc_cache),
+ PySet_New(impl->_abc_negative_cache),
+ get_cache_version(impl));
+ Py_END_CRITICAL_SECTION();
Py_DECREF(impl);
return res;
}
@@ -453,56 +515,27 @@ _abc__abc_init(PyObject *module, PyObject *self)
if (PyType_Check(self)) {
PyTypeObject *cls = (PyTypeObject *)self;
PyObject *dict = _PyType_GetDict(cls);
- PyObject *flags = PyDict_GetItemWithError(dict,
- &_Py_ID(__abc_tpflags__));
- if (flags == NULL) {
- if (PyErr_Occurred()) {
- return NULL;
- }
+ PyObject *flags = NULL;
+ if (PyDict_Pop(dict, &_Py_ID(__abc_tpflags__), &flags) < 0) {
+ return NULL;
}
- else {
- if (PyLong_CheckExact(flags)) {
- long val = PyLong_AsLong(flags);
- if (val == -1 && PyErr_Occurred()) {
- return NULL;
- }
- if ((val & COLLECTION_FLAGS) == COLLECTION_FLAGS) {
- PyErr_SetString(PyExc_TypeError, "__abc_tpflags__ cannot be both Py_TPFLAGS_SEQUENCE and Py_TPFLAGS_MAPPING");
- return NULL;
- }
- ((PyTypeObject *)self)->tp_flags |= (val & COLLECTION_FLAGS);
- }
- if (PyDict_DelItem(dict, &_Py_ID(__abc_tpflags__)) < 0) {
- return NULL;
- }
+ if (flags == NULL || !PyLong_CheckExact(flags)) {
+ Py_XDECREF(flags);
+ Py_RETURN_NONE;
}
- }
- Py_RETURN_NONE;
-}
-
-static void
-set_collection_flag_recursive(PyTypeObject *child, unsigned long flag)
-{
- assert(flag == Py_TPFLAGS_MAPPING || flag == Py_TPFLAGS_SEQUENCE);
- if (PyType_HasFeature(child, Py_TPFLAGS_IMMUTABLETYPE) ||
- (child->tp_flags & COLLECTION_FLAGS) == flag)
- {
- return;
- }
-
- child->tp_flags &= ~COLLECTION_FLAGS;
- child->tp_flags |= flag;
-
- PyObject *grandchildren = _PyType_GetSubclasses(child);
- if (grandchildren == NULL) {
- return;
- }
- for (Py_ssize_t i = 0; i < PyList_GET_SIZE(grandchildren); i++) {
- PyObject *grandchild = PyList_GET_ITEM(grandchildren, i);
- set_collection_flag_recursive((PyTypeObject *)grandchild, flag);
+ long val = PyLong_AsLong(flags);
+ Py_DECREF(flags);
+ if (val == -1 && PyErr_Occurred()) {
+ return NULL;
+ }
+ if ((val & COLLECTION_FLAGS) == COLLECTION_FLAGS) {
+ PyErr_SetString(PyExc_TypeError, "__abc_tpflags__ cannot be both Py_TPFLAGS_SEQUENCE and Py_TPFLAGS_MAPPING");
+ return NULL;
+ }
+ _PyType_SetFlags((PyTypeObject *)self, 0, val & COLLECTION_FLAGS);
}
- Py_DECREF(grandchildren);
+ Py_RETURN_NONE;
}
/*[clinic input]
@@ -545,20 +578,23 @@ _abc__abc_register_impl(PyObject *module, PyObject *self, PyObject *subclass)
if (impl == NULL) {
return NULL;
}
- if (_add_to_weak_set(&impl->_abc_registry, subclass) < 0) {
+ if (_add_to_weak_set(impl, &impl->_abc_registry, subclass) < 0) {
Py_DECREF(impl);
return NULL;
}
Py_DECREF(impl);
/* Invalidate negative cache */
- get_abc_state(module)->abc_invalidation_counter++;
+ increment_invalidation_counter(get_abc_state(module));
- /* Set Py_TPFLAGS_SEQUENCE or Py_TPFLAGS_MAPPING flag */
+ /* Set Py_TPFLAGS_SEQUENCE or Py_TPFLAGS_MAPPING flag */
if (PyType_Check(self)) {
- unsigned long collection_flag = ((PyTypeObject *)self)->tp_flags & COLLECTION_FLAGS;
+ unsigned long collection_flag =
+ PyType_GetFlags((PyTypeObject *)self) & COLLECTION_FLAGS;
if (collection_flag) {
- set_collection_flag_recursive((PyTypeObject *)subclass, collection_flag);
+ _PyType_SetFlagsRecursive((PyTypeObject *)subclass,
+ COLLECTION_FLAGS,
+ collection_flag);
}
}
return Py_NewRef(subclass);
@@ -592,7 +628,7 @@ _abc__abc_instancecheck_impl(PyObject *module, PyObject *self,
return NULL;
}
/* Inline the cache checking. */
- int incache = _in_weak_set(impl->_abc_cache, subclass);
+ int incache = _in_weak_set(impl, &impl->_abc_cache, subclass);
if (incache < 0) {
goto end;
}
@@ -602,8 +638,8 @@ _abc__abc_instancecheck_impl(PyObject *module, PyObject *self,
}
subtype = (PyObject *)Py_TYPE(instance);
if (subtype == subclass) {
- if (impl->_abc_negative_cache_version == get_abc_state(module)->abc_invalidation_counter) {
- incache = _in_weak_set(impl->_abc_negative_cache, subclass);
+ if (get_cache_version(impl) == get_invalidation_counter(get_abc_state(module))) {
+ incache = _in_weak_set(impl, &impl->_abc_negative_cache, subclass);
if (incache < 0) {
goto end;
}
@@ -681,7 +717,7 @@ _abc__abc_subclasscheck_impl(PyObject *module, PyObject *self,
}
/* 1. Check cache. */
- incache = _in_weak_set(impl->_abc_cache, subclass);
+ incache = _in_weak_set(impl, &impl->_abc_cache, subclass);
if (incache < 0) {
goto end;
}
@@ -692,17 +728,20 @@ _abc__abc_subclasscheck_impl(PyObject *module, PyObject *self,
state = get_abc_state(module);
/* 2. Check negative cache; may have to invalidate. */
- if (impl->_abc_negative_cache_version < state->abc_invalidation_counter) {
+ uint64_t invalidation_counter = get_invalidation_counter(state);
+ if (get_cache_version(impl) < invalidation_counter) {
/* Invalidate the negative cache. */
- if (impl->_abc_negative_cache != NULL &&
- PySet_Clear(impl->_abc_negative_cache) < 0)
- {
+ PyObject *negative_cache;
+ Py_BEGIN_CRITICAL_SECTION(impl);
+ negative_cache = impl->_abc_negative_cache;
+ Py_END_CRITICAL_SECTION();
+ if (negative_cache != NULL && PySet_Clear(negative_cache) < 0) {
goto end;
}
- impl->_abc_negative_cache_version = state->abc_invalidation_counter;
+ set_cache_version(impl, invalidation_counter);
}
else {
- incache = _in_weak_set(impl->_abc_negative_cache, subclass);
+ incache = _in_weak_set(impl, &impl->_abc_negative_cache, subclass);
if (incache < 0) {
goto end;
}
@@ -720,7 +759,7 @@ _abc__abc_subclasscheck_impl(PyObject *module, PyObject *self,
}
if (ok == Py_True) {
Py_DECREF(ok);
- if (_add_to_weak_set(&impl->_abc_cache, subclass) < 0) {
+ if (_add_to_weak_set(impl, &impl->_abc_cache, subclass) < 0) {
goto end;
}
result = Py_True;
@@ -728,7 +767,7 @@ _abc__abc_subclasscheck_impl(PyObject *module, PyObject *self,
}
if (ok == Py_False) {
Py_DECREF(ok);
- if (_add_to_weak_set(&impl->_abc_negative_cache, subclass) < 0) {
+ if (_add_to_weak_set(impl, &impl->_abc_negative_cache, subclass) < 0) {
goto end;
}
result = Py_False;
@@ -744,7 +783,7 @@ _abc__abc_subclasscheck_impl(PyObject *module, PyObject *self,
/* 4. Check if it's a direct subclass. */
if (PyType_IsSubtype((PyTypeObject *)subclass, (PyTypeObject *)self)) {
- if (_add_to_weak_set(&impl->_abc_cache, subclass) < 0) {
+ if (_add_to_weak_set(impl, &impl->_abc_cache, subclass) < 0) {
goto end;
}
result = Py_True;
@@ -767,12 +806,14 @@ _abc__abc_subclasscheck_impl(PyObject *module, PyObject *self,
goto end;
}
for (pos = 0; pos < PyList_GET_SIZE(subclasses); pos++) {
- PyObject *scls = PyList_GET_ITEM(subclasses, pos);
- Py_INCREF(scls);
+ PyObject *scls = PyList_GetItemRef(subclasses, pos);
+ if (scls == NULL) {
+ goto end;
+ }
int r = PyObject_IsSubclass(subclass, scls);
Py_DECREF(scls);
if (r > 0) {
- if (_add_to_weak_set(&impl->_abc_cache, subclass) < 0) {
+ if (_add_to_weak_set(impl, &impl->_abc_cache, subclass) < 0) {
goto end;
}
result = Py_True;
@@ -784,7 +825,7 @@ _abc__abc_subclasscheck_impl(PyObject *module, PyObject *self,
}
/* No dice; update negative cache. */
- if (_add_to_weak_set(&impl->_abc_negative_cache, subclass) < 0) {
+ if (_add_to_weak_set(impl, &impl->_abc_negative_cache, subclass) < 0) {
goto end;
}
result = Py_False;
@@ -801,7 +842,7 @@ subclasscheck_check_registry(_abc_data *impl, PyObject *subclass,
PyObject **result)
{
// Fast path: check subclass is in weakref directly.
- int ret = _in_weak_set(impl->_abc_registry, subclass);
+ int ret = _in_weak_set(impl, &impl->_abc_registry, subclass);
if (ret < 0) {
*result = NULL;
return -1;
@@ -811,33 +852,27 @@ subclasscheck_check_registry(_abc_data *impl, PyObject *subclass,
return 1;
}
- if (impl->_abc_registry == NULL) {
+ PyObject *registry_shared;
+ Py_BEGIN_CRITICAL_SECTION(impl);
+ registry_shared = impl->_abc_registry;
+ Py_END_CRITICAL_SECTION();
+ if (registry_shared == NULL) {
return 0;
}
- Py_ssize_t registry_size = PySet_Size(impl->_abc_registry);
- if (registry_size == 0) {
- return 0;
- }
- // Weakref callback may remove entry from set.
- // So we take snapshot of registry first.
- PyObject **copy = PyMem_Malloc(sizeof(PyObject*) * registry_size);
- if (copy == NULL) {
- PyErr_NoMemory();
+
+ // Make a local copy of the registry to protect against concurrent
+ // modifications of _abc_registry.
+ PyObject *registry = PySet_New(registry_shared);
+ if (registry == NULL) {
return -1;
}
PyObject *key;
Py_ssize_t pos = 0;
Py_hash_t hash;
- Py_ssize_t i = 0;
- while (_PySet_NextEntry(impl->_abc_registry, &pos, &key, &hash)) {
- copy[i++] = Py_NewRef(key);
- }
- assert(i == registry_size);
-
- for (i = 0; i < registry_size; i++) {
+ while (_PySet_NextEntry(registry, &pos, &key, &hash)) {
PyObject *rkey;
- if (PyWeakref_GetRef(copy[i], &rkey) < 0) {
+ if (PyWeakref_GetRef(key, &rkey) < 0) {
// Someone inject non-weakref type in the registry.
ret = -1;
break;
@@ -853,7 +888,7 @@ subclasscheck_check_registry(_abc_data *impl, PyObject *subclass,
break;
}
if (r > 0) {
- if (_add_to_weak_set(&impl->_abc_cache, subclass) < 0) {
+ if (_add_to_weak_set(impl, &impl->_abc_cache, subclass) < 0) {
ret = -1;
break;
}
@@ -863,10 +898,7 @@ subclasscheck_check_registry(_abc_data *impl, PyObject *subclass,
}
}
- for (i = 0; i < registry_size; i++) {
- Py_DECREF(copy[i]);
- }
- PyMem_Free(copy);
+ Py_DECREF(registry);
return ret;
}
@@ -885,7 +917,7 @@ _abc_get_cache_token_impl(PyObject *module)
/*[clinic end generated code: output=c7d87841e033dacc input=70413d1c423ad9f9]*/
{
_abcmodule_state *state = get_abc_state(module);
- return PyLong_FromUnsignedLongLong(state->abc_invalidation_counter);
+ return PyLong_FromUnsignedLongLong(get_invalidation_counter(state));
}
static struct PyMethodDef _abcmodule_methods[] = {
diff --git a/Modules/_asynciomodule.c b/Modules/_asynciomodule.c
index 29246cfa6afd00..b886051186de9c 100644
--- a/Modules/_asynciomodule.c
+++ b/Modules/_asynciomodule.c
@@ -2393,6 +2393,9 @@ _asyncio_Task_uncancel_impl(TaskObj *self)
{
if (self->task_num_cancels_requested > 0) {
self->task_num_cancels_requested -= 1;
+ if (self->task_num_cancels_requested == 0) {
+ self->task_must_cancel = 0;
+ }
}
return PyLong_FromLong(self->task_num_cancels_requested);
}
diff --git a/Modules/_ctypes/_ctypes.c b/Modules/_ctypes/_ctypes.c
index 631f82879311bf..3cb0b24668eb2a 100644
--- a/Modules/_ctypes/_ctypes.c
+++ b/Modules/_ctypes/_ctypes.c
@@ -126,8 +126,16 @@ bytes(cdata)
#include "pycore_long.h" // _PyLong_GetZero()
-ctypes_state global_state = {0};
+/*[clinic input]
+module _ctypes
+[clinic start generated code]*/
+/*[clinic end generated code: output=da39a3ee5e6b4b0d input=476a19c49b31a75c]*/
+#define clinic_state() (get_module_state_by_class(cls))
+#define clinic_state_sub() (get_module_state_by_class(cls->tp_base))
+#include "clinic/_ctypes.c.h"
+#undef clinic_state
+#undef clinic_state_sub
/****************************************************************/
@@ -438,10 +446,15 @@ static PyType_Spec structparam_spec = {
CType_Type - a base metaclass. Its instances (classes) have a StgInfo.
*/
+/*[clinic input]
+class _ctypes.CType_Type "PyObject *" "clinic_state()->CType_Type"
+[clinic start generated code]*/
+/*[clinic end generated code: output=da39a3ee5e6b4b0d input=8389fc5b74a84f2a]*/
+
static int
CType_Type_traverse(PyObject *self, visitproc visit, void *arg)
{
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_def_final(Py_TYPE(self));
if (st && st->PyCType_Type) {
StgInfo *info;
if (PyStgInfo_FromType(st, self, &info) < 0) {
@@ -475,7 +488,7 @@ ctype_clear_stginfo(StgInfo *info)
static int
CType_Type_clear(PyObject *self)
{
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_def_final(Py_TYPE(self));
if (st && st->PyCType_Type) {
StgInfo *info;
if (PyStgInfo_FromType(st, self, &info) < 0) {
@@ -491,8 +504,7 @@ CType_Type_clear(PyObject *self)
static void
CType_Type_dealloc(PyObject *self)
{
- ctypes_state *st = GLOBAL_STATE();
-
+ ctypes_state *st = get_module_state_by_def_final(Py_TYPE(self));
if (st && st->PyCType_Type) {
StgInfo *info;
if (PyStgInfo_FromType(st, self, &info) < 0) {
@@ -508,19 +520,27 @@ CType_Type_dealloc(PyObject *self)
ctype_clear_stginfo(info);
}
}
-
PyTypeObject *tp = Py_TYPE(self);
PyType_Type.tp_dealloc(self);
Py_DECREF(tp);
}
+/*[clinic input]
+_ctypes.CType_Type.__sizeof__
+
+ cls: defining_class
+ /
+Return memory consumption of the type object.
+[clinic start generated code]*/
+
static PyObject *
-CType_Type_sizeof(PyObject *self)
+_ctypes_CType_Type___sizeof___impl(PyObject *self, PyTypeObject *cls)
+/*[clinic end generated code: output=c68c235be84d03f3 input=d064433b6110d1ce]*/
{
Py_ssize_t size = Py_TYPE(self)->tp_basicsize;
size += Py_TYPE(self)->tp_itemsize * Py_SIZE(self);
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_class(cls);
StgInfo *info;
if (PyStgInfo_FromType(st, self, &info) < 0) {
return NULL;
@@ -543,8 +563,7 @@ CType_Type_repeat(PyObject *self, Py_ssize_t length);
static PyMethodDef ctype_methods[] = {
- {"__sizeof__", _PyCFunction_CAST(CType_Type_sizeof),
- METH_NOARGS, PyDoc_STR("Return memory consumption of the type object.")},
+ _CTYPES_CTYPE_TYPE___SIZEOF___METHODDEF
{0},
};
@@ -647,7 +666,7 @@ StructUnionType_init(PyObject *self, PyObject *args, PyObject *kwds, int isStruc
return -1;
}
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_def(Py_TYPE(self));
StgInfo *info = PyStgInfo_Init(st, (PyTypeObject *)self);
if (!info) {
Py_DECREF(attrdict);
@@ -710,11 +729,29 @@ UnionType_init(PyObject *self, PyObject *args, PyObject *kwds)
return StructUnionType_init(self, args, kwds, 0);
}
-PyDoc_STRVAR(from_address_doc,
-"C.from_address(integer) -> C instance\naccess a C instance at the specified address");
+/*[clinic input]
+class _ctypes.CDataType "PyObject *" "clinic_state()->CType_Type"
+[clinic start generated code]*/
+/*[clinic end generated code: output=da39a3ee5e6b4b0d input=466a505a93d73156]*/
+
+
+/*[clinic input]
+_ctypes.CDataType.from_address as CDataType_from_address
+
+ type: self
+ cls: defining_class
+ value: object
+ /
+
+C.from_address(integer) -> C instance
+
+Access a C instance at the specified address.
+[clinic start generated code]*/
static PyObject *
-CDataType_from_address(PyObject *type, PyObject *value)
+CDataType_from_address_impl(PyObject *type, PyTypeObject *cls,
+ PyObject *value)
+/*[clinic end generated code: output=5be4a7c0d9aa6c74 input=827a22cefe380c01]*/
{
void *buf;
if (!PyLong_Check(value)) {
@@ -725,26 +762,37 @@ CDataType_from_address(PyObject *type, PyObject *value)
buf = (void *)PyLong_AsVoidPtr(value);
if (PyErr_Occurred())
return NULL;
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_class(cls);
return PyCData_AtAddress(st, type, buf);
}
-PyDoc_STRVAR(from_buffer_doc,
-"C.from_buffer(object, offset=0) -> C instance\ncreate a C instance from a writeable buffer");
-
static int
KeepRef(CDataObject *target, Py_ssize_t index, PyObject *keep);
+/*[clinic input]
+_ctypes.CDataType.from_buffer as CDataType_from_buffer
+
+ type: self
+ cls: defining_class
+ obj: object
+ offset: Py_ssize_t = 0
+ /
+
+C.from_buffer(object, offset=0) -> C instance
+
+Create a C instance from a writeable buffer.
+[clinic start generated code]*/
+
static PyObject *
-CDataType_from_buffer(PyObject *type, PyObject *args)
+CDataType_from_buffer_impl(PyObject *type, PyTypeObject *cls, PyObject *obj,
+ Py_ssize_t offset)
+/*[clinic end generated code: output=57604e99635abd31 input=0f36cedd105ca28d]*/
{
- PyObject *obj;
PyObject *mv;
PyObject *result;
Py_buffer *buffer;
- Py_ssize_t offset = 0;
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_class(cls);
StgInfo *info;
if (PyStgInfo_FromType(st, type, &info) < 0) {
return NULL;
@@ -754,9 +802,6 @@ CDataType_from_buffer(PyObject *type, PyObject *args)
return NULL;
}
- if (!PyArg_ParseTuple(args, "O|n:from_buffer", &obj, &offset))
- return NULL;
-
mv = PyMemoryView_FromObject(obj);
if (mv == NULL)
return NULL;
@@ -813,9 +858,6 @@ CDataType_from_buffer(PyObject *type, PyObject *args)
return result;
}
-PyDoc_STRVAR(from_buffer_copy_doc,
-"C.from_buffer_copy(object, offset=0) -> C instance\ncreate a C instance from a readable buffer");
-
static inline PyObject *
generic_pycdata_new(ctypes_state *st,
PyTypeObject *type, PyObject *args, PyObject *kwds);
@@ -823,14 +865,28 @@ generic_pycdata_new(ctypes_state *st,
static PyObject *
GenericPyCData_new(PyTypeObject *type, PyObject *args, PyObject *kwds);
+/*[clinic input]
+_ctypes.CDataType.from_buffer_copy as CDataType_from_buffer_copy
+
+ type: self
+ cls: defining_class
+ buffer: Py_buffer
+ offset: Py_ssize_t = 0
+ /
+
+C.from_buffer_copy(object, offset=0) -> C instance
+
+Create a C instance from a readable buffer.
+[clinic start generated code]*/
+
static PyObject *
-CDataType_from_buffer_copy(PyObject *type, PyObject *args)
+CDataType_from_buffer_copy_impl(PyObject *type, PyTypeObject *cls,
+ Py_buffer *buffer, Py_ssize_t offset)
+/*[clinic end generated code: output=c8fc62b03e5cc6fa input=2a81e11b765a6253]*/
{
- Py_buffer buffer;
- Py_ssize_t offset = 0;
PyObject *result;
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_class(cls);
StgInfo *info;
if (PyStgInfo_FromType(st, type, &info) < 0) {
return NULL;
@@ -840,54 +896,56 @@ CDataType_from_buffer_copy(PyObject *type, PyObject *args)
return NULL;
}
- if (!PyArg_ParseTuple(args, "y*|n:from_buffer_copy", &buffer, &offset))
- return NULL;
-
if (offset < 0) {
PyErr_SetString(PyExc_ValueError,
"offset cannot be negative");
- PyBuffer_Release(&buffer);
return NULL;
}
- if (info->size > buffer.len - offset) {
+ if (info->size > buffer->len - offset) {
PyErr_Format(PyExc_ValueError,
"Buffer size too small (%zd instead of at least %zd bytes)",
- buffer.len, info->size + offset);
- PyBuffer_Release(&buffer);
+ buffer->len, info->size + offset);
return NULL;
}
if (PySys_Audit("ctypes.cdata/buffer", "nnn",
- (Py_ssize_t)buffer.buf, buffer.len, offset) < 0) {
- PyBuffer_Release(&buffer);
+ (Py_ssize_t)buffer->buf, buffer->len, offset) < 0) {
return NULL;
}
result = generic_pycdata_new(st, (PyTypeObject *)type, NULL, NULL);
if (result != NULL) {
memcpy(((CDataObject *)result)->b_ptr,
- (char *)buffer.buf + offset, info->size);
+ (char *)buffer->buf + offset, info->size);
}
- PyBuffer_Release(&buffer);
return result;
}
-PyDoc_STRVAR(in_dll_doc,
-"C.in_dll(dll, name) -> C instance\naccess a C instance in a dll");
+/*[clinic input]
+_ctypes.CDataType.in_dll as CDataType_in_dll
+
+ type: self
+ cls: defining_class
+ dll: object
+ name: str
+ /
+
+C.in_dll(dll, name) -> C instance
+
+Access a C instance in a dll.
+[clinic start generated code]*/
static PyObject *
-CDataType_in_dll(PyObject *type, PyObject *args)
+CDataType_in_dll_impl(PyObject *type, PyTypeObject *cls, PyObject *dll,
+ const char *name)
+/*[clinic end generated code: output=d0e5c43b66bfa21f input=f85bf281477042b4]*/
{
- PyObject *dll;
- char *name;
PyObject *obj;
void *handle;
void *address;
- if (!PyArg_ParseTuple(args, "Os:in_dll", &dll, &name))
- return NULL;
- if (PySys_Audit("ctypes.dlsym", "O", args) < 0) {
+ if (PySys_Audit("ctypes.dlsym", "Os", dll, name) < 0) {
return NULL;
}
@@ -932,15 +990,24 @@ CDataType_in_dll(PyObject *type, PyObject *args)
return NULL;
}
#endif
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_def(Py_TYPE(type));
return PyCData_AtAddress(st, type, address);
}
-PyDoc_STRVAR(from_param_doc,
-"Convert a Python object into a function call parameter.");
+/*[clinic input]
+_ctypes.CDataType.from_param as CDataType_from_param
+
+ type: self
+ cls: defining_class
+ value: object
+ /
+
+Convert a Python object into a function call parameter.
+[clinic start generated code]*/
static PyObject *
-CDataType_from_param(PyObject *type, PyObject *value)
+CDataType_from_param_impl(PyObject *type, PyTypeObject *cls, PyObject *value)
+/*[clinic end generated code: output=8da9e34263309f9e input=275a52c4899ddff0]*/
{
PyObject *as_parameter;
int res = PyObject_IsInstance(value, type);
@@ -949,7 +1016,7 @@ CDataType_from_param(PyObject *type, PyObject *value)
if (res) {
return Py_NewRef(value);
}
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_class(cls);
if (PyCArg_CheckExact(st, value)) {
PyCArgObject *p = (PyCArgObject *)value;
PyObject *ob = p->obj;
@@ -979,7 +1046,7 @@ CDataType_from_param(PyObject *type, PyObject *value)
return NULL;
}
if (as_parameter) {
- value = CDataType_from_param(type, as_parameter);
+ value = CDataType_from_param_impl(type, cls, as_parameter);
Py_DECREF(as_parameter);
return value;
}
@@ -991,11 +1058,11 @@ CDataType_from_param(PyObject *type, PyObject *value)
}
static PyMethodDef CDataType_methods[] = {
- { "from_param", CDataType_from_param, METH_O, from_param_doc },
- { "from_address", CDataType_from_address, METH_O, from_address_doc },
- { "from_buffer", CDataType_from_buffer, METH_VARARGS, from_buffer_doc, },
- { "from_buffer_copy", CDataType_from_buffer_copy, METH_VARARGS, from_buffer_copy_doc, },
- { "in_dll", CDataType_in_dll, METH_VARARGS, in_dll_doc },
+ CDATATYPE_FROM_PARAM_METHODDEF
+ CDATATYPE_FROM_ADDRESS_METHODDEF
+ CDATATYPE_FROM_BUFFER_METHODDEF
+ CDATATYPE_FROM_BUFFER_COPY_METHODDEF
+ CDATATYPE_IN_DLL_METHODDEF
{ NULL, NULL },
};
@@ -1006,10 +1073,11 @@ CType_Type_repeat(PyObject *self, Py_ssize_t length)
return PyErr_Format(PyExc_ValueError,
"Array length must be >= 0, not %zd",
length);
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_def(Py_TYPE(self));
return PyCArrayType_from_ctype(st, self, length);
}
+
static int
PyCStructType_setattro(PyObject *self, PyObject *key, PyObject *value)
{
@@ -1083,6 +1151,12 @@ size property/method, and the sequence protocol.
*/
+/*[clinic input]
+class _ctypes.PyCPointerType "PyObject *" "clinic_state()->PyCPointerType_Type"
+[clinic start generated code]*/
+/*[clinic end generated code: output=da39a3ee5e6b4b0d input=c45e96c1f7645ab7]*/
+
+
static int
PyCPointerType_SetProto(ctypes_state *st, StgInfo *stginfo, PyObject *proto)
{
@@ -1136,7 +1210,7 @@ PyCPointerType_init(PyObject *self, PyObject *args, PyObject *kwds)
stginfo items size, align, length contain info about pointers itself,
stginfo->proto has info about the pointed to type!
*/
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_def(Py_TYPE(self));
StgInfo *stginfo = PyStgInfo_Init(st, (PyTypeObject *)self);
if (!stginfo) {
return -1;
@@ -1186,15 +1260,25 @@ PyCPointerType_init(PyObject *self, PyObject *args, PyObject *kwds)
return 0;
}
+/*[clinic input]
+_ctypes.PyCPointerType.set_type as PyCPointerType_set_type
+
+ self: self(type="PyTypeObject *")
+ cls: defining_class
+ type: object
+ /
+[clinic start generated code]*/
static PyObject *
-PyCPointerType_set_type(PyTypeObject *self, PyObject *type)
+PyCPointerType_set_type_impl(PyTypeObject *self, PyTypeObject *cls,
+ PyObject *type)
+/*[clinic end generated code: output=51459d8f429a70ac input=67e1e8df921f123e]*/
{
PyObject *attrdict = PyType_GetDict(self);
if (!attrdict) {
return NULL;
}
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_class(cls);
StgInfo *info;
if (PyStgInfo_FromType(st, (PyObject *)self, &info) < 0) {
Py_DECREF(attrdict);
@@ -1223,15 +1307,28 @@ PyCPointerType_set_type(PyTypeObject *self, PyObject *type)
static PyObject *_byref(ctypes_state *, PyObject *);
+/*[clinic input]
+_ctypes.PyCPointerType.from_param as PyCPointerType_from_param
+
+ type: self
+ cls: defining_class
+ value: object
+ /
+
+Convert a Python object into a function call parameter.
+[clinic start generated code]*/
+
static PyObject *
-PyCPointerType_from_param(PyObject *type, PyObject *value)
+PyCPointerType_from_param_impl(PyObject *type, PyTypeObject *cls,
+ PyObject *value)
+/*[clinic end generated code: output=a4b32d929aabaf64 input=6c231276e3997884]*/
{
if (value == Py_None) {
/* ConvParam will convert to a NULL pointer later */
return Py_NewRef(value);
}
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_class(cls);
StgInfo *typeinfo;
if (PyStgInfo_FromType(st, type, &typeinfo) < 0) {
return NULL;
@@ -1273,16 +1370,16 @@ PyCPointerType_from_param(PyObject *type, PyObject *value)
return Py_NewRef(value);
}
}
- return CDataType_from_param(type, value);
+ return CDataType_from_param_impl(type, cls, value);
}
static PyMethodDef PyCPointerType_methods[] = {
- { "from_address", CDataType_from_address, METH_O, from_address_doc },
- { "from_buffer", CDataType_from_buffer, METH_VARARGS, from_buffer_doc, },
- { "from_buffer_copy", CDataType_from_buffer_copy, METH_VARARGS, from_buffer_copy_doc, },
- { "in_dll", CDataType_in_dll, METH_VARARGS, in_dll_doc},
- { "from_param", (PyCFunction)PyCPointerType_from_param, METH_O, from_param_doc},
- { "set_type", (PyCFunction)PyCPointerType_set_type, METH_O },
+ CDATATYPE_FROM_ADDRESS_METHODDEF
+ CDATATYPE_FROM_BUFFER_METHODDEF
+ CDATATYPE_FROM_BUFFER_COPY_METHODDEF
+ CDATATYPE_IN_DLL_METHODDEF
+ PYCPOINTERTYPE_FROM_PARAM_METHODDEF
+ PYCPOINTERTYPE_SET_TYPE_METHODDEF
{ NULL, NULL },
};
@@ -1543,7 +1640,7 @@ PyCArrayType_init(PyObject *self, PyObject *args, PyObject *kwds)
goto error;
}
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_def(Py_TYPE(self));
StgInfo *stginfo = PyStgInfo_Init(st, (PyTypeObject*)self);
if (!stginfo) {
goto error;
@@ -1641,17 +1738,47 @@ _type_ attribute.
*/
+/*[clinic input]
+class _ctypes.PyCSimpleType "PyObject *" "clinic_state()->PyCSimpleType_Type"
+[clinic start generated code]*/
+/*[clinic end generated code: output=da39a3ee5e6b4b0d input=d5a45772668e7f49]*/
+
+/*[clinic input]
+class _ctypes.c_wchar_p "PyObject *" "clinic_state_sub()->PyCSimpleType_Type"
+[clinic start generated code]*/
+/*[clinic end generated code: output=da39a3ee5e6b4b0d input=468de7283d622d47]*/
+
+/*[clinic input]
+class _ctypes.c_char_p "PyObject *" "clinic_state_sub()->PyCSimpleType_Type"
+[clinic start generated code]*/
+/*[clinic end generated code: output=da39a3ee5e6b4b0d input=e750865616e7dcea]*/
+
+/*[clinic input]
+class _ctypes.c_void_p "PyObject *" "clinic_state_sub()->PyCSimpleType_Type"
+[clinic start generated code]*/
+/*[clinic end generated code: output=da39a3ee5e6b4b0d input=dd4d9646c56f43a9]*/
+
static const char SIMPLE_TYPE_CHARS[] = "cbBhHiIlLdfuzZqQPXOv?g";
+/*[clinic input]
+_ctypes.c_wchar_p.from_param as c_wchar_p_from_param
+
+ type: self
+ cls: defining_class
+ value: object
+ /
+[clinic start generated code]*/
+
static PyObject *
-c_wchar_p_from_param(PyObject *type, PyObject *value)
+c_wchar_p_from_param_impl(PyObject *type, PyTypeObject *cls, PyObject *value)
+/*[clinic end generated code: output=e453949a2f725a4c input=d322c7237a319607]*/
{
PyObject *as_parameter;
int res;
if (value == Py_None) {
Py_RETURN_NONE;
}
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_class(cls->tp_base);
if (PyUnicode_Check(value)) {
PyCArgObject *parg;
struct fielddesc *fd = _ctypes_get_fielddesc("Z");
@@ -1707,7 +1834,7 @@ c_wchar_p_from_param(PyObject *type, PyObject *value)
return NULL;
}
if (as_parameter) {
- value = c_wchar_p_from_param(type, as_parameter);
+ value = c_wchar_p_from_param_impl(type, cls, as_parameter);
Py_DECREF(as_parameter);
return value;
}
@@ -1717,15 +1844,25 @@ c_wchar_p_from_param(PyObject *type, PyObject *value)
return NULL;
}
+/*[clinic input]
+_ctypes.c_char_p.from_param as c_char_p_from_param
+
+ type: self
+ cls: defining_class
+ value: object
+ /
+[clinic start generated code]*/
+
static PyObject *
-c_char_p_from_param(PyObject *type, PyObject *value)
+c_char_p_from_param_impl(PyObject *type, PyTypeObject *cls, PyObject *value)
+/*[clinic end generated code: output=219652ab7c174aa1 input=6cf0d1b6bb4ede11]*/
{
PyObject *as_parameter;
int res;
if (value == Py_None) {
Py_RETURN_NONE;
}
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_class(cls->tp_base);
if (PyBytes_Check(value)) {
PyCArgObject *parg;
struct fielddesc *fd = _ctypes_get_fielddesc("z");
@@ -1781,7 +1918,7 @@ c_char_p_from_param(PyObject *type, PyObject *value)
return NULL;
}
if (as_parameter) {
- value = c_char_p_from_param(type, as_parameter);
+ value = c_char_p_from_param_impl(type, cls, as_parameter);
Py_DECREF(as_parameter);
return value;
}
@@ -1791,8 +1928,18 @@ c_char_p_from_param(PyObject *type, PyObject *value)
return NULL;
}
+/*[clinic input]
+_ctypes.c_void_p.from_param as c_void_p_from_param
+
+ type: self
+ cls: defining_class
+ value: object
+ /
+[clinic start generated code]*/
+
static PyObject *
-c_void_p_from_param(PyObject *type, PyObject *value)
+c_void_p_from_param_impl(PyObject *type, PyTypeObject *cls, PyObject *value)
+/*[clinic end generated code: output=984d0075b6038cc7 input=0e8b343fc19c77d4]*/
{
PyObject *as_parameter;
int res;
@@ -1801,7 +1948,7 @@ c_void_p_from_param(PyObject *type, PyObject *value)
if (value == Py_None) {
Py_RETURN_NONE;
}
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_class(cls->tp_base);
/* Should probably allow buffer interface as well */
/* int, long */
@@ -1923,7 +2070,7 @@ c_void_p_from_param(PyObject *type, PyObject *value)
return NULL;
}
if (as_parameter) {
- value = c_void_p_from_param(type, as_parameter);
+ value = c_void_p_from_param_impl(type, cls, as_parameter);
Py_DECREF(as_parameter);
return value;
}
@@ -1933,9 +2080,9 @@ c_void_p_from_param(PyObject *type, PyObject *value)
return NULL;
}
-static PyMethodDef c_void_p_method = { "from_param", c_void_p_from_param, METH_O };
-static PyMethodDef c_char_p_method = { "from_param", c_char_p_from_param, METH_O };
-static PyMethodDef c_wchar_p_method = { "from_param", c_wchar_p_from_param, METH_O };
+static PyMethodDef c_void_p_methods[] = {C_VOID_P_FROM_PARAM_METHODDEF {0}};
+static PyMethodDef c_char_p_methods[] = {C_CHAR_P_FROM_PARAM_METHODDEF {0}};
+static PyMethodDef c_wchar_p_methods[] = {C_WCHAR_P_FROM_PARAM_METHODDEF {0}};
static PyObject *CreateSwappedType(ctypes_state *st, PyTypeObject *type,
PyObject *args, PyObject *kwds,
@@ -2081,7 +2228,7 @@ PyCSimpleType_init(PyObject *self, PyObject *args, PyObject *kwds)
goto error;
}
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_def(Py_TYPE(self));
StgInfo *stginfo = PyStgInfo_Init(st, (PyTypeObject *)self);
if (!stginfo) {
goto error;
@@ -2120,15 +2267,15 @@ PyCSimpleType_init(PyObject *self, PyObject *args, PyObject *kwds)
if (((PyTypeObject *)self)->tp_base == st->Simple_Type) {
switch (*proto_str) {
case 'z': /* c_char_p */
- ml = &c_char_p_method;
+ ml = c_char_p_methods;
stginfo->flags |= TYPEFLAG_ISPOINTER;
break;
case 'Z': /* c_wchar_p */
- ml = &c_wchar_p_method;
+ ml = c_wchar_p_methods;
stginfo->flags |= TYPEFLAG_ISPOINTER;
break;
case 'P': /* c_void_p */
- ml = &c_void_p_method;
+ ml = c_void_p_methods;
stginfo->flags |= TYPEFLAG_ISPOINTER;
break;
case 's':
@@ -2202,8 +2349,22 @@ PyCSimpleType_init(PyObject *self, PyObject *args, PyObject *kwds)
* This is a *class method*.
* Convert a parameter into something that ConvParam can handle.
*/
+
+/*[clinic input]
+_ctypes.PyCSimpleType.from_param as PyCSimpleType_from_param
+
+ type: self
+ cls: defining_class
+ value: object
+ /
+
+Convert a Python object into a function call parameter.
+[clinic start generated code]*/
+
static PyObject *
-PyCSimpleType_from_param(PyObject *type, PyObject *value)
+PyCSimpleType_from_param_impl(PyObject *type, PyTypeObject *cls,
+ PyObject *value)
+/*[clinic end generated code: output=8a8453d9663e3a2e input=61cc48ce3a87a570]*/
{
const char *fmt;
PyCArgObject *parg;
@@ -2220,7 +2381,7 @@ PyCSimpleType_from_param(PyObject *type, PyObject *value)
return Py_NewRef(value);
}
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_class(cls);
StgInfo *info;
if (PyStgInfo_FromType(st, type, &info) < 0) {
return NULL;
@@ -2260,7 +2421,7 @@ PyCSimpleType_from_param(PyObject *type, PyObject *value)
Py_XDECREF(exc);
return NULL;
}
- value = PyCSimpleType_from_param(type, as_parameter);
+ value = PyCSimpleType_from_param_impl(type, cls, as_parameter);
_Py_LeaveRecursiveCall();
Py_DECREF(as_parameter);
Py_XDECREF(exc);
@@ -2276,11 +2437,11 @@ PyCSimpleType_from_param(PyObject *type, PyObject *value)
}
static PyMethodDef PyCSimpleType_methods[] = {
- { "from_param", PyCSimpleType_from_param, METH_O, from_param_doc },
- { "from_address", CDataType_from_address, METH_O, from_address_doc },
- { "from_buffer", CDataType_from_buffer, METH_VARARGS, from_buffer_doc, },
- { "from_buffer_copy", CDataType_from_buffer_copy, METH_VARARGS, from_buffer_copy_doc, },
- { "in_dll", CDataType_in_dll, METH_VARARGS, in_dll_doc},
+ PYCSIMPLETYPE_FROM_PARAM_METHODDEF
+ CDATATYPE_FROM_ADDRESS_METHODDEF
+ CDATATYPE_FROM_BUFFER_METHODDEF
+ CDATATYPE_FROM_BUFFER_COPY_METHODDEF
+ CDATATYPE_IN_DLL_METHODDEF
{ NULL, NULL },
};
@@ -2505,7 +2666,7 @@ PyCFuncPtrType_init(PyObject *self, PyObject *args, PyObject *kwds)
return -1;
}
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_def(Py_TYPE(self));
StgInfo *stginfo = PyStgInfo_Init(st, (PyTypeObject *)self);
if (!stginfo) {
Py_DECREF(attrdict);
@@ -2659,6 +2820,13 @@ KeepRef(CDataObject *target, Py_ssize_t index, PyObject *keep)
/*
PyCData_Type
*/
+
+/*[clinic input]
+class _ctypes.PyCData "PyObject *" "clinic_state()->PyCData_Type"
+[clinic start generated code]*/
+/*[clinic end generated code: output=da39a3ee5e6b4b0d input=ac13df38dee3c22c]*/
+
+
static int
PyCData_traverse(CDataObject *self, visitproc visit, void *arg)
{
@@ -2731,7 +2899,7 @@ PyCData_NewGetBuffer(PyObject *myself, Py_buffer *view, int flags)
{
CDataObject *self = (CDataObject *)myself;
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(myself)));
StgInfo *info;
if (PyStgInfo_FromObject(st, myself, &info) < 0) {
return -1;
@@ -2776,12 +2944,21 @@ PyCData_nohash(PyObject *self)
return -1;
}
+/*[clinic input]
+_ctypes.PyCData.__reduce__ as PyCData_reduce
+
+ myself: self
+ cls: defining_class
+ /
+[clinic start generated code]*/
+
static PyObject *
-PyCData_reduce(PyObject *myself, PyObject *args)
+PyCData_reduce_impl(PyObject *myself, PyTypeObject *cls)
+/*[clinic end generated code: output=1a025ccfdd8c935d input=34097a5226ea63c1]*/
{
CDataObject *self = (CDataObject *)myself;
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_class(cls);
StgInfo *info;
if (PyStgInfo_FromObject(st, myself, &info) < 0) {
return NULL;
@@ -2846,7 +3023,7 @@ PyCData_from_outparam(PyObject *self, PyObject *args)
static PyMethodDef PyCData_methods[] = {
{ "__ctypes_from_outparam__", PyCData_from_outparam, METH_NOARGS, },
- { "__reduce__", PyCData_reduce, METH_NOARGS, },
+ PYCDATA_REDUCE_METHODDEF
{ "__setstate__", PyCData_setstate, METH_VARARGS, },
{ NULL, NULL },
};
@@ -3161,7 +3338,7 @@ PyCData_set(ctypes_state *st,
static PyObject *
GenericPyCData_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
{
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_def(Py_TYPE(type));
return generic_pycdata_new(st, type, args, kwds);
}
@@ -3236,7 +3413,7 @@ PyCFuncPtr_set_restype(PyCFuncPtrObject *self, PyObject *ob, void *Py_UNUSED(ign
Py_XDECREF(oldchecker);
return 0;
}
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(self)));
StgInfo *info;
if (PyStgInfo_FromType(st, ob, &info) < 0) {
return -1;
@@ -3263,7 +3440,7 @@ PyCFuncPtr_get_restype(PyCFuncPtrObject *self, void *Py_UNUSED(ignored))
if (self->restype) {
return Py_NewRef(self->restype);
}
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(self)));
StgInfo *info;
if (PyStgInfo_FromObject(st, (PyObject *)self, &info) < 0) {
return NULL;
@@ -3285,7 +3462,7 @@ PyCFuncPtr_set_argtypes(PyCFuncPtrObject *self, PyObject *ob, void *Py_UNUSED(ig
Py_CLEAR(self->converters);
Py_CLEAR(self->argtypes);
} else {
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(self)));
converters = converters_from_argtypes(st, ob);
if (!converters)
return -1;
@@ -3302,7 +3479,7 @@ PyCFuncPtr_get_argtypes(PyCFuncPtrObject *self, void *Py_UNUSED(ignored))
if (self->argtypes) {
return Py_NewRef(self->argtypes);
}
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(self)));
StgInfo *info;
if (PyStgInfo_FromObject(st, (PyObject *)self, &info) < 0) {
return NULL;
@@ -3351,7 +3528,7 @@ static PPROC FindAddress(void *handle, const char *name, PyObject *type)
return NULL;
}
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_def(Py_TYPE(type));
StgInfo *info;
if (PyStgInfo_FromType(st, (PyObject *)type, &info) < 0) {
return NULL;
@@ -3598,7 +3775,7 @@ PyCFuncPtr_FromDll(PyTypeObject *type, PyObject *args, PyObject *kwds)
return NULL;
}
#endif
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_def(Py_TYPE(type));
if (!_validate_paramflags(st, type, paramflags)) {
Py_DECREF(ftuple);
return NULL;
@@ -3640,7 +3817,7 @@ PyCFuncPtr_FromVtblIndex(PyTypeObject *type, PyObject *args, PyObject *kwds)
if (paramflags == Py_None)
paramflags = NULL;
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_def(Py_TYPE(type));
if (!_validate_paramflags(st, type, paramflags)) {
return NULL;
}
@@ -3721,7 +3898,7 @@ PyCFuncPtr_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
}
*/
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_def(Py_TYPE(type));
StgInfo *info;
if (PyStgInfo_FromType(st, (PyObject *)type, &info) < 0) {
return NULL;
@@ -4088,7 +4265,7 @@ PyCFuncPtr_call(PyCFuncPtrObject *self, PyObject *inargs, PyObject *kwds)
int outmask;
unsigned int numretvals;
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(self)));
StgInfo *info;
if (PyStgInfo_FromObject(st, (PyObject *)self, &info) < 0) {
return NULL;
@@ -4312,7 +4489,7 @@ _init_pos_args(PyObject *self, PyTypeObject *type,
PyObject *fields;
Py_ssize_t i;
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_def(Py_TYPE(type));
StgInfo *baseinfo;
if (PyStgInfo_FromType(st, (PyObject *)type->tp_base, &baseinfo) < 0) {
return -1;
@@ -4482,7 +4659,7 @@ Array_item(PyObject *myself, Py_ssize_t index)
return NULL;
}
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(self)));
StgInfo *stginfo;
if (PyStgInfo_FromObject(st, (PyObject *)self, &stginfo) < 0) {
return NULL;
@@ -4523,7 +4700,7 @@ Array_subscript(PyObject *myself, PyObject *item)
}
slicelen = PySlice_AdjustIndices(self->b_length, &start, &stop, step);
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(self)));
StgInfo *stginfo;
if (PyStgInfo_FromObject(st, (PyObject *)self, &stginfo) < 0) {
return NULL;
@@ -4624,7 +4801,7 @@ Array_ass_item(PyObject *myself, Py_ssize_t index, PyObject *value)
return -1;
}
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(self)));
StgInfo *stginfo;
if (PyStgInfo_FromObject(st, (PyObject *)self, &stginfo) < 0) {
return -1;
@@ -4815,6 +4992,12 @@ PyCArrayType_from_ctype(ctypes_state *st, PyObject *itemtype, Py_ssize_t length)
Simple_Type
*/
+/*[clinic input]
+class _ctypes.Simple "PyObject *" "clinic_state()->Simple_Type"
+[clinic start generated code]*/
+/*[clinic end generated code: output=da39a3ee5e6b4b0d input=016c476c7aa8b8a8]*/
+
+
static int
Simple_set_value(CDataObject *self, PyObject *value, void *Py_UNUSED(ignored))
{
@@ -4826,7 +5009,7 @@ Simple_set_value(CDataObject *self, PyObject *value, void *Py_UNUSED(ignored))
return -1;
}
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(self)));
StgInfo *info;
if (PyStgInfo_FromObject(st, (PyObject *)self, &info) < 0) {
return -1;
@@ -4856,7 +5039,7 @@ Simple_init(CDataObject *self, PyObject *args, PyObject *kw)
static PyObject *
Simple_get_value(CDataObject *self, void *Py_UNUSED(ignored))
{
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(self)));
StgInfo *info;
if (PyStgInfo_FromObject(st, (PyObject *)self, &info) < 0) {
return NULL;
@@ -4872,10 +5055,19 @@ static PyGetSetDef Simple_getsets[] = {
{ NULL, NULL }
};
+/*[clinic input]
+_ctypes.Simple.__ctypes_from_outparam__ as Simple_from_outparm
+
+ self: self
+ cls: defining_class
+ /
+[clinic start generated code]*/
+
static PyObject *
-Simple_from_outparm(PyObject *self, PyObject *args)
+Simple_from_outparm_impl(PyObject *self, PyTypeObject *cls)
+/*[clinic end generated code: output=6c61d90da8aa9b4f input=0f362803fb4629d5]*/
{
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_class(cls);
if (_ctypes_simple_instance(st, (PyObject *)Py_TYPE(self))) {
return Py_NewRef(self);
}
@@ -4884,7 +5076,7 @@ Simple_from_outparm(PyObject *self, PyObject *args)
}
static PyMethodDef Simple_methods[] = {
- { "__ctypes_from_outparam__", Simple_from_outparm, METH_NOARGS, },
+ SIMPLE_FROM_OUTPARM_METHODDEF
{ NULL, NULL },
};
@@ -4898,7 +5090,7 @@ static PyObject *
Simple_repr(CDataObject *self)
{
PyObject *val, *result;
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(self)));
if (Py_TYPE(self)->tp_base != st->Simple_Type) {
return PyUnicode_FromFormat("<%s object at %p>",
@@ -4953,7 +5145,7 @@ Pointer_item(PyObject *myself, Py_ssize_t index)
return NULL;
}
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(myself)));
StgInfo *stginfo;
if (PyStgInfo_FromObject(st, (PyObject *)self, &stginfo) < 0) {
return NULL;
@@ -4997,7 +5189,7 @@ Pointer_ass_item(PyObject *myself, Py_ssize_t index, PyObject *value)
return -1;
}
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(myself)));
StgInfo *stginfo;
if (PyStgInfo_FromObject(st, (PyObject *)self, &stginfo) < 0) {
return -1;
@@ -5030,7 +5222,7 @@ Pointer_get_contents(CDataObject *self, void *closure)
return NULL;
}
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(self)));
StgInfo *stginfo;
if (PyStgInfo_FromObject(st, (PyObject *)self, &stginfo) < 0) {
return NULL;
@@ -5053,7 +5245,7 @@ Pointer_set_contents(CDataObject *self, PyObject *value, void *closure)
"Pointer does not support item deletion");
return -1;
}
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(self)));
StgInfo *stginfo;
if (PyStgInfo_FromObject(st, (PyObject *)self, &stginfo) < 0) {
return -1;
@@ -5115,7 +5307,7 @@ Pointer_init(CDataObject *self, PyObject *args, PyObject *kw)
static PyObject *
Pointer_new(PyTypeObject *type, PyObject *args, PyObject *kw)
{
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_def(Py_TYPE(type));
StgInfo *info;
if (PyStgInfo_FromType(st, (PyObject *)type, &info) < 0) {
return NULL;
@@ -5195,7 +5387,7 @@ Pointer_subscript(PyObject *myself, PyObject *item)
else
len = (stop - start + 1) / step + 1;
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(myself)));
StgInfo *stginfo;
if (PyStgInfo_FromObject(st, (PyObject *)self, &stginfo) < 0) {
return NULL;
@@ -5424,7 +5616,13 @@ cast_check_pointertype(ctypes_state *st, PyObject *arg)
static PyObject *
cast(void *ptr, PyObject *src, PyObject *ctype)
{
- ctypes_state *st = GLOBAL_STATE();
+ PyObject *mod = PyType_GetModuleByDef(Py_TYPE(ctype), &_ctypesmodule);
+ if (!mod) {
+ PyErr_SetString(PyExc_TypeError,
+ "cast() argument 2 must be a pointer type");
+ return NULL;
+ }
+ ctypes_state *st = get_module_state(mod);
CDataObject *result;
if (cast_check_pointertype(st, ctype) == 0) {
@@ -5493,15 +5691,6 @@ wstring_at(const wchar_t *ptr, int size)
}
-static struct PyModuleDef _ctypesmodule = {
- PyModuleDef_HEAD_INIT,
- .m_name = "_ctypes",
- .m_doc = _ctypes__doc__,
- .m_size = -1,
- .m_methods = _ctypes_module_methods,
-};
-
-
static int
_ctypes_add_types(PyObject *mod)
{
@@ -5525,7 +5714,7 @@ _ctypes_add_types(PyObject *mod)
} \
} while (0)
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state(mod);
/* Note:
ob_type is the metatype (the 'type'), defaults to PyType_Type,
@@ -5610,7 +5799,7 @@ _ctypes_add_objects(PyObject *mod)
} \
} while (0)
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state(mod);
MOD_ADD("_pointer_type_cache", Py_NewRef(st->_ctypes_ptrtype_cache));
#ifdef MS_WIN32
@@ -5653,7 +5842,7 @@ _ctypes_add_objects(PyObject *mod)
static int
_ctypes_mod_exec(PyObject *mod)
{
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state(mod);
st->_unpickle = PyObject_GetAttrString(mod, "_unpickle");
if (st->_unpickle == NULL) {
return -1;
@@ -5680,19 +5869,104 @@ _ctypes_mod_exec(PyObject *mod)
}
+static int
+module_traverse(PyObject *module, visitproc visit, void *arg) {
+ ctypes_state *st = get_module_state(module);
+ Py_VISIT(st->_ctypes_ptrtype_cache);
+ Py_VISIT(st->_unpickle);
+ Py_VISIT(st->array_cache);
+ Py_VISIT(st->error_object_name);
+ Py_VISIT(st->PyExc_ArgError);
+ Py_VISIT(st->swapped_suffix);
+
+ Py_VISIT(st->DictRemover_Type);
+ Py_VISIT(st->PyCArg_Type);
+ Py_VISIT(st->PyCField_Type);
+ Py_VISIT(st->PyCThunk_Type);
+ Py_VISIT(st->StructParam_Type);
+ Py_VISIT(st->PyCStructType_Type);
+ Py_VISIT(st->UnionType_Type);
+ Py_VISIT(st->PyCPointerType_Type);
+ Py_VISIT(st->PyCArrayType_Type);
+ Py_VISIT(st->PyCSimpleType_Type);
+ Py_VISIT(st->PyCFuncPtrType_Type);
+ Py_VISIT(st->PyCData_Type);
+ Py_VISIT(st->Struct_Type);
+ Py_VISIT(st->Union_Type);
+ Py_VISIT(st->PyCArray_Type);
+ Py_VISIT(st->Simple_Type);
+ Py_VISIT(st->PyCPointer_Type);
+ Py_VISIT(st->PyCFuncPtr_Type);
+#ifdef MS_WIN32
+ Py_VISIT(st->PyComError_Type);
+#endif
+ Py_VISIT(st->PyCType_Type);
+ return 0;
+}
+
+static int
+module_clear(PyObject *module) {
+ ctypes_state *st = get_module_state(module);
+ Py_CLEAR(st->_ctypes_ptrtype_cache);
+ Py_CLEAR(st->_unpickle);
+ Py_CLEAR(st->array_cache);
+ Py_CLEAR(st->error_object_name);
+ Py_CLEAR(st->PyExc_ArgError);
+ Py_CLEAR(st->swapped_suffix);
+
+ Py_CLEAR(st->DictRemover_Type);
+ Py_CLEAR(st->PyCArg_Type);
+ Py_CLEAR(st->PyCField_Type);
+ Py_CLEAR(st->PyCThunk_Type);
+ Py_CLEAR(st->StructParam_Type);
+ Py_CLEAR(st->PyCStructType_Type);
+ Py_CLEAR(st->UnionType_Type);
+ Py_CLEAR(st->PyCPointerType_Type);
+ Py_CLEAR(st->PyCArrayType_Type);
+ Py_CLEAR(st->PyCSimpleType_Type);
+ Py_CLEAR(st->PyCFuncPtrType_Type);
+ Py_CLEAR(st->PyCData_Type);
+ Py_CLEAR(st->Struct_Type);
+ Py_CLEAR(st->Union_Type);
+ Py_CLEAR(st->PyCArray_Type);
+ Py_CLEAR(st->Simple_Type);
+ Py_CLEAR(st->PyCPointer_Type);
+ Py_CLEAR(st->PyCFuncPtr_Type);
+#ifdef MS_WIN32
+ Py_CLEAR(st->PyComError_Type);
+#endif
+ Py_CLEAR(st->PyCType_Type);
+ return 0;
+}
+
+static void
+module_free(void *module)
+{
+ (void)module_clear((PyObject *)module);
+}
+
+static PyModuleDef_Slot module_slots[] = {
+ {Py_mod_exec, _ctypes_mod_exec},
+ {Py_mod_multiple_interpreters, Py_MOD_MULTIPLE_INTERPRETERS_NOT_SUPPORTED},
+ {0, NULL}
+};
+
+struct PyModuleDef _ctypesmodule = {
+ PyModuleDef_HEAD_INIT,
+ .m_name = "_ctypes",
+ .m_doc = _ctypes__doc__,
+ .m_size = sizeof(ctypes_state),
+ .m_methods = _ctypes_module_methods,
+ .m_slots = module_slots,
+ .m_traverse = module_traverse,
+ .m_clear = module_clear,
+ .m_free = module_free,
+};
+
PyMODINIT_FUNC
PyInit__ctypes(void)
{
- PyObject *mod = PyModule_Create(&_ctypesmodule);
- if (!mod) {
- return NULL;
- }
-
- if (_ctypes_mod_exec(mod) < 0) {
- Py_DECREF(mod);
- return NULL;
- }
- return mod;
+ return PyModuleDef_Init(&_ctypesmodule);
}
/*
diff --git a/Modules/_ctypes/callbacks.c b/Modules/_ctypes/callbacks.c
index b6f98e92e1ba88..7b9f6437c7d55f 100644
--- a/Modules/_ctypes/callbacks.c
+++ b/Modules/_ctypes/callbacks.c
@@ -136,6 +136,8 @@ TryAddRef(PyObject *cnv, CDataObject *obj)
* Call the python object with all arguments
*
*/
+
+// BEWARE: The GIL needs to be held throughout the function
static void _CallPythonObject(ctypes_state *st,
void *mem,
ffi_type *restype,
@@ -149,7 +151,6 @@ static void _CallPythonObject(ctypes_state *st,
Py_ssize_t i = 0, j = 0, nargs = 0;
PyObject *error_object = NULL;
int *space;
- PyGILState_STATE state = PyGILState_Ensure();
assert(PyTuple_Check(converters));
nargs = PyTuple_GET_SIZE(converters);
@@ -294,7 +295,6 @@ static void _CallPythonObject(ctypes_state *st,
for (j = 0; j < i; j++) {
Py_DECREF(args[j]);
}
- PyGILState_Release(state);
}
static void closure_fcn(ffi_cif *cif,
@@ -302,8 +302,10 @@ static void closure_fcn(ffi_cif *cif,
void **args,
void *userdata)
{
+ PyGILState_STATE state = PyGILState_Ensure();
+
CThunkObject *p = (CThunkObject *)userdata;
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_class(Py_TYPE(p));
_CallPythonObject(st,
resp,
@@ -313,6 +315,8 @@ static void closure_fcn(ffi_cif *cif,
p->converters,
p->flags,
args);
+
+ PyGILState_Release(state);
}
static CThunkObject* CThunkObject_new(ctypes_state *st, Py_ssize_t nargs)
diff --git a/Modules/_ctypes/callproc.c b/Modules/_ctypes/callproc.c
index 67d6ade43a2667..cbed2f32caa6c4 100644
--- a/Modules/_ctypes/callproc.c
+++ b/Modules/_ctypes/callproc.c
@@ -201,7 +201,7 @@ static PyObject *
get_error_internal(PyObject *self, PyObject *args, int index)
{
int *space;
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state(self);
PyObject *errobj = _ctypes_get_errobj(st, &space);
PyObject *result;
@@ -222,7 +222,7 @@ set_error_internal(PyObject *self, PyObject *args, int index)
if (!PyArg_ParseTuple(args, "i", &new_errno)) {
return NULL;
}
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state(self);
errobj = _ctypes_get_errobj(st, &space);
if (errobj == NULL)
return NULL;
@@ -1464,7 +1464,7 @@ copy_com_pointer(PyObject *self, PyObject *args)
return NULL;
a.keep = b.keep = NULL;
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state(self);
if (ConvParam(st, p1, 0, &a) < 0 || ConvParam(st, p2, 1, &b) < 0) {
goto done;
}
@@ -1646,7 +1646,7 @@ call_function(PyObject *self, PyObject *args)
return NULL;
}
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state(self);
result = _ctypes_callproc(st,
(PPROC)func,
arguments,
@@ -1683,7 +1683,7 @@ call_cdeclfunction(PyObject *self, PyObject *args)
return NULL;
}
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state(self);
result = _ctypes_callproc(st,
(PPROC)func,
arguments,
@@ -1709,7 +1709,7 @@ PyDoc_STRVAR(sizeof_doc,
static PyObject *
sizeof_func(PyObject *self, PyObject *obj)
{
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state(self);
StgInfo *info;
if (PyStgInfo_FromType(st, obj, &info) < 0) {
@@ -1735,7 +1735,7 @@ PyDoc_STRVAR(alignment_doc,
static PyObject *
align_func(PyObject *self, PyObject *obj)
{
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state(self);
StgInfo *info;
if (PyStgInfo_FromAny(st, obj, &info) < 0) {
return NULL;
@@ -1773,7 +1773,7 @@ byref(PyObject *self, PyObject *args)
if (offset == -1 && PyErr_Occurred())
return NULL;
}
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state(self);
if (!CDataObject_Check(st, obj)) {
PyErr_Format(PyExc_TypeError,
"byref() argument must be a ctypes instance, not '%s'",
@@ -1799,7 +1799,7 @@ PyDoc_STRVAR(addressof_doc,
static PyObject *
addressof(PyObject *self, PyObject *obj)
{
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state(self);
if (!CDataObject_Check(st, obj)) {
PyErr_SetString(PyExc_TypeError,
"invalid type");
@@ -1858,7 +1858,7 @@ resize(PyObject *self, PyObject *args)
&obj, &size))
return NULL;
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state(self);
StgInfo *info;
int result = PyStgInfo_FromObject(st, (PyObject *)obj, &info);
if (result < 0) {
@@ -1956,7 +1956,8 @@ create_pointer_type(PyObject *module, PyObject *cls)
PyTypeObject *typ;
PyObject *key;
- ctypes_state *st = GLOBAL_STATE();
+ assert(module);
+ ctypes_state *st = get_module_state(module);
if (PyDict_GetItemRef(st->_ctypes_ptrtype_cache, cls, &result) != 0) {
// found or error
return result;
@@ -2019,12 +2020,12 @@ create_pointer_inst(PyObject *module, PyObject *arg)
PyObject *result;
PyObject *typ;
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state(module);
if (PyDict_GetItemRef(st->_ctypes_ptrtype_cache, (PyObject *)Py_TYPE(arg), &typ) < 0) {
return NULL;
}
if (typ == NULL) {
- typ = create_pointer_type(NULL, (PyObject *)Py_TYPE(arg));
+ typ = create_pointer_type(module, (PyObject *)Py_TYPE(arg));
if (typ == NULL)
return NULL;
}
@@ -2039,7 +2040,7 @@ buffer_info(PyObject *self, PyObject *arg)
PyObject *shape;
Py_ssize_t i;
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state(self);
StgInfo *info;
if (PyStgInfo_FromAny(st, arg, &info) < 0) {
return NULL;
diff --git a/Modules/_ctypes/cfield.c b/Modules/_ctypes/cfield.c
index ffe00e25aff49f..7472a4c36868a8 100644
--- a/Modules/_ctypes/cfield.c
+++ b/Modules/_ctypes/cfield.c
@@ -216,7 +216,7 @@ PyCField_set(CFieldObject *self, PyObject *inst, PyObject *value)
{
CDataObject *dst;
char *ptr;
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_class(Py_TYPE(self));
if (!CDataObject_Check(st, inst)) {
PyErr_SetString(PyExc_TypeError,
"not a ctype instance");
@@ -240,7 +240,7 @@ PyCField_get(CFieldObject *self, PyObject *inst, PyTypeObject *type)
if (inst == NULL) {
return Py_NewRef(self);
}
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_class(Py_TYPE(self));
if (!CDataObject_Check(st, inst)) {
PyErr_SetString(PyExc_TypeError,
"not a ctype instance");
diff --git a/Modules/_ctypes/clinic/_ctypes.c.h b/Modules/_ctypes/clinic/_ctypes.c.h
new file mode 100644
index 00000000000000..98a84cc14f4386
--- /dev/null
+++ b/Modules/_ctypes/clinic/_ctypes.c.h
@@ -0,0 +1,610 @@
+/*[clinic input]
+preserve
+[clinic start generated code]*/
+
+#include "pycore_abstract.h" // _PyNumber_Index()
+#include "pycore_modsupport.h" // _PyArg_UnpackKeywords()
+
+PyDoc_STRVAR(_ctypes_CType_Type___sizeof____doc__,
+"__sizeof__($self, /)\n"
+"--\n"
+"\n"
+"Return memory consumption of the type object.");
+
+#define _CTYPES_CTYPE_TYPE___SIZEOF___METHODDEF \
+ {"__sizeof__", _PyCFunction_CAST(_ctypes_CType_Type___sizeof__), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _ctypes_CType_Type___sizeof____doc__},
+
+static PyObject *
+_ctypes_CType_Type___sizeof___impl(PyObject *self, PyTypeObject *cls);
+
+static PyObject *
+_ctypes_CType_Type___sizeof__(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) {
+ PyErr_SetString(PyExc_TypeError, "__sizeof__() takes no arguments");
+ return NULL;
+ }
+ return _ctypes_CType_Type___sizeof___impl(self, cls);
+}
+
+PyDoc_STRVAR(CDataType_from_address__doc__,
+"from_address($self, value, /)\n"
+"--\n"
+"\n"
+"C.from_address(integer) -> C instance\n"
+"\n"
+"Access a C instance at the specified address.");
+
+#define CDATATYPE_FROM_ADDRESS_METHODDEF \
+ {"from_address", _PyCFunction_CAST(CDataType_from_address), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, CDataType_from_address__doc__},
+
+static PyObject *
+CDataType_from_address_impl(PyObject *type, PyTypeObject *cls,
+ PyObject *value);
+
+static PyObject *
+CDataType_from_address(PyObject *type, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *return_value = NULL;
+ #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
+ # define KWTUPLE (PyObject *)&_Py_SINGLETON(tuple_empty)
+ #else
+ # define KWTUPLE NULL
+ #endif
+
+ static const char * const _keywords[] = {"", NULL};
+ static _PyArg_Parser _parser = {
+ .keywords = _keywords,
+ .fname = "from_address",
+ .kwtuple = KWTUPLE,
+ };
+ #undef KWTUPLE
+ PyObject *argsbuf[1];
+ PyObject *value;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ value = args[0];
+ return_value = CDataType_from_address_impl(type, cls, value);
+
+exit:
+ return return_value;
+}
+
+PyDoc_STRVAR(CDataType_from_buffer__doc__,
+"from_buffer($self, obj, offset=0, /)\n"
+"--\n"
+"\n"
+"C.from_buffer(object, offset=0) -> C instance\n"
+"\n"
+"Create a C instance from a writeable buffer.");
+
+#define CDATATYPE_FROM_BUFFER_METHODDEF \
+ {"from_buffer", _PyCFunction_CAST(CDataType_from_buffer), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, CDataType_from_buffer__doc__},
+
+static PyObject *
+CDataType_from_buffer_impl(PyObject *type, PyTypeObject *cls, PyObject *obj,
+ Py_ssize_t offset);
+
+static PyObject *
+CDataType_from_buffer(PyObject *type, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *return_value = NULL;
+ #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
+ # define KWTUPLE (PyObject *)&_Py_SINGLETON(tuple_empty)
+ #else
+ # define KWTUPLE NULL
+ #endif
+
+ static const char * const _keywords[] = {"", "", NULL};
+ static _PyArg_Parser _parser = {
+ .keywords = _keywords,
+ .fname = "from_buffer",
+ .kwtuple = KWTUPLE,
+ };
+ #undef KWTUPLE
+ PyObject *argsbuf[2];
+ PyObject *obj;
+ Py_ssize_t offset = 0;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 2, 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ obj = args[0];
+ if (nargs < 2) {
+ goto skip_optional_posonly;
+ }
+ {
+ Py_ssize_t ival = -1;
+ PyObject *iobj = _PyNumber_Index(args[1]);
+ if (iobj != NULL) {
+ ival = PyLong_AsSsize_t(iobj);
+ Py_DECREF(iobj);
+ }
+ if (ival == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ offset = ival;
+ }
+skip_optional_posonly:
+ return_value = CDataType_from_buffer_impl(type, cls, obj, offset);
+
+exit:
+ return return_value;
+}
+
+PyDoc_STRVAR(CDataType_from_buffer_copy__doc__,
+"from_buffer_copy($self, buffer, offset=0, /)\n"
+"--\n"
+"\n"
+"C.from_buffer_copy(object, offset=0) -> C instance\n"
+"\n"
+"Create a C instance from a readable buffer.");
+
+#define CDATATYPE_FROM_BUFFER_COPY_METHODDEF \
+ {"from_buffer_copy", _PyCFunction_CAST(CDataType_from_buffer_copy), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, CDataType_from_buffer_copy__doc__},
+
+static PyObject *
+CDataType_from_buffer_copy_impl(PyObject *type, PyTypeObject *cls,
+ Py_buffer *buffer, Py_ssize_t offset);
+
+static PyObject *
+CDataType_from_buffer_copy(PyObject *type, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *return_value = NULL;
+ #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
+ # define KWTUPLE (PyObject *)&_Py_SINGLETON(tuple_empty)
+ #else
+ # define KWTUPLE NULL
+ #endif
+
+ static const char * const _keywords[] = {"", "", NULL};
+ static _PyArg_Parser _parser = {
+ .keywords = _keywords,
+ .fname = "from_buffer_copy",
+ .kwtuple = KWTUPLE,
+ };
+ #undef KWTUPLE
+ PyObject *argsbuf[2];
+ Py_buffer buffer = {NULL, NULL};
+ Py_ssize_t offset = 0;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 2, 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ if (PyObject_GetBuffer(args[0], &buffer, PyBUF_SIMPLE) != 0) {
+ goto exit;
+ }
+ if (nargs < 2) {
+ goto skip_optional_posonly;
+ }
+ {
+ Py_ssize_t ival = -1;
+ PyObject *iobj = _PyNumber_Index(args[1]);
+ if (iobj != NULL) {
+ ival = PyLong_AsSsize_t(iobj);
+ Py_DECREF(iobj);
+ }
+ if (ival == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ offset = ival;
+ }
+skip_optional_posonly:
+ return_value = CDataType_from_buffer_copy_impl(type, cls, &buffer, offset);
+
+exit:
+ /* Cleanup for buffer */
+ if (buffer.obj) {
+ PyBuffer_Release(&buffer);
+ }
+
+ return return_value;
+}
+
+PyDoc_STRVAR(CDataType_in_dll__doc__,
+"in_dll($self, dll, name, /)\n"
+"--\n"
+"\n"
+"C.in_dll(dll, name) -> C instance\n"
+"\n"
+"Access a C instance in a dll.");
+
+#define CDATATYPE_IN_DLL_METHODDEF \
+ {"in_dll", _PyCFunction_CAST(CDataType_in_dll), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, CDataType_in_dll__doc__},
+
+static PyObject *
+CDataType_in_dll_impl(PyObject *type, PyTypeObject *cls, PyObject *dll,
+ const char *name);
+
+static PyObject *
+CDataType_in_dll(PyObject *type, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *return_value = NULL;
+ #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
+ # define KWTUPLE (PyObject *)&_Py_SINGLETON(tuple_empty)
+ #else
+ # define KWTUPLE NULL
+ #endif
+
+ static const char * const _keywords[] = {"", "", NULL};
+ static _PyArg_Parser _parser = {
+ .keywords = _keywords,
+ .fname = "in_dll",
+ .kwtuple = KWTUPLE,
+ };
+ #undef KWTUPLE
+ PyObject *argsbuf[2];
+ PyObject *dll;
+ const char *name;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 2, 2, 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ dll = args[0];
+ if (!PyUnicode_Check(args[1])) {
+ _PyArg_BadArgument("in_dll", "argument 2", "str", args[1]);
+ goto exit;
+ }
+ Py_ssize_t name_length;
+ name = PyUnicode_AsUTF8AndSize(args[1], &name_length);
+ if (name == NULL) {
+ goto exit;
+ }
+ if (strlen(name) != (size_t)name_length) {
+ PyErr_SetString(PyExc_ValueError, "embedded null character");
+ goto exit;
+ }
+ return_value = CDataType_in_dll_impl(type, cls, dll, name);
+
+exit:
+ return return_value;
+}
+
+PyDoc_STRVAR(CDataType_from_param__doc__,
+"from_param($self, value, /)\n"
+"--\n"
+"\n"
+"Convert a Python object into a function call parameter.");
+
+#define CDATATYPE_FROM_PARAM_METHODDEF \
+ {"from_param", _PyCFunction_CAST(CDataType_from_param), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, CDataType_from_param__doc__},
+
+static PyObject *
+CDataType_from_param_impl(PyObject *type, PyTypeObject *cls, PyObject *value);
+
+static PyObject *
+CDataType_from_param(PyObject *type, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *return_value = NULL;
+ #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
+ # define KWTUPLE (PyObject *)&_Py_SINGLETON(tuple_empty)
+ #else
+ # define KWTUPLE NULL
+ #endif
+
+ static const char * const _keywords[] = {"", NULL};
+ static _PyArg_Parser _parser = {
+ .keywords = _keywords,
+ .fname = "from_param",
+ .kwtuple = KWTUPLE,
+ };
+ #undef KWTUPLE
+ PyObject *argsbuf[1];
+ PyObject *value;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ value = args[0];
+ return_value = CDataType_from_param_impl(type, cls, value);
+
+exit:
+ return return_value;
+}
+
+PyDoc_STRVAR(PyCPointerType_set_type__doc__,
+"set_type($self, type, /)\n"
+"--\n"
+"\n");
+
+#define PYCPOINTERTYPE_SET_TYPE_METHODDEF \
+ {"set_type", _PyCFunction_CAST(PyCPointerType_set_type), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, PyCPointerType_set_type__doc__},
+
+static PyObject *
+PyCPointerType_set_type_impl(PyTypeObject *self, PyTypeObject *cls,
+ PyObject *type);
+
+static PyObject *
+PyCPointerType_set_type(PyTypeObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *return_value = NULL;
+ #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
+ # define KWTUPLE (PyObject *)&_Py_SINGLETON(tuple_empty)
+ #else
+ # define KWTUPLE NULL
+ #endif
+
+ static const char * const _keywords[] = {"", NULL};
+ static _PyArg_Parser _parser = {
+ .keywords = _keywords,
+ .fname = "set_type",
+ .kwtuple = KWTUPLE,
+ };
+ #undef KWTUPLE
+ PyObject *argsbuf[1];
+ PyObject *type;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ type = args[0];
+ return_value = PyCPointerType_set_type_impl(self, cls, type);
+
+exit:
+ return return_value;
+}
+
+PyDoc_STRVAR(PyCPointerType_from_param__doc__,
+"from_param($self, value, /)\n"
+"--\n"
+"\n"
+"Convert a Python object into a function call parameter.");
+
+#define PYCPOINTERTYPE_FROM_PARAM_METHODDEF \
+ {"from_param", _PyCFunction_CAST(PyCPointerType_from_param), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, PyCPointerType_from_param__doc__},
+
+static PyObject *
+PyCPointerType_from_param_impl(PyObject *type, PyTypeObject *cls,
+ PyObject *value);
+
+static PyObject *
+PyCPointerType_from_param(PyObject *type, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *return_value = NULL;
+ #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
+ # define KWTUPLE (PyObject *)&_Py_SINGLETON(tuple_empty)
+ #else
+ # define KWTUPLE NULL
+ #endif
+
+ static const char * const _keywords[] = {"", NULL};
+ static _PyArg_Parser _parser = {
+ .keywords = _keywords,
+ .fname = "from_param",
+ .kwtuple = KWTUPLE,
+ };
+ #undef KWTUPLE
+ PyObject *argsbuf[1];
+ PyObject *value;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ value = args[0];
+ return_value = PyCPointerType_from_param_impl(type, cls, value);
+
+exit:
+ return return_value;
+}
+
+PyDoc_STRVAR(c_wchar_p_from_param__doc__,
+"from_param($self, value, /)\n"
+"--\n"
+"\n");
+
+#define C_WCHAR_P_FROM_PARAM_METHODDEF \
+ {"from_param", _PyCFunction_CAST(c_wchar_p_from_param), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, c_wchar_p_from_param__doc__},
+
+static PyObject *
+c_wchar_p_from_param_impl(PyObject *type, PyTypeObject *cls, PyObject *value);
+
+static PyObject *
+c_wchar_p_from_param(PyObject *type, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *return_value = NULL;
+ #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
+ # define KWTUPLE (PyObject *)&_Py_SINGLETON(tuple_empty)
+ #else
+ # define KWTUPLE NULL
+ #endif
+
+ static const char * const _keywords[] = {"", NULL};
+ static _PyArg_Parser _parser = {
+ .keywords = _keywords,
+ .fname = "from_param",
+ .kwtuple = KWTUPLE,
+ };
+ #undef KWTUPLE
+ PyObject *argsbuf[1];
+ PyObject *value;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ value = args[0];
+ return_value = c_wchar_p_from_param_impl(type, cls, value);
+
+exit:
+ return return_value;
+}
+
+PyDoc_STRVAR(c_char_p_from_param__doc__,
+"from_param($self, value, /)\n"
+"--\n"
+"\n");
+
+#define C_CHAR_P_FROM_PARAM_METHODDEF \
+ {"from_param", _PyCFunction_CAST(c_char_p_from_param), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, c_char_p_from_param__doc__},
+
+static PyObject *
+c_char_p_from_param_impl(PyObject *type, PyTypeObject *cls, PyObject *value);
+
+static PyObject *
+c_char_p_from_param(PyObject *type, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *return_value = NULL;
+ #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
+ # define KWTUPLE (PyObject *)&_Py_SINGLETON(tuple_empty)
+ #else
+ # define KWTUPLE NULL
+ #endif
+
+ static const char * const _keywords[] = {"", NULL};
+ static _PyArg_Parser _parser = {
+ .keywords = _keywords,
+ .fname = "from_param",
+ .kwtuple = KWTUPLE,
+ };
+ #undef KWTUPLE
+ PyObject *argsbuf[1];
+ PyObject *value;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ value = args[0];
+ return_value = c_char_p_from_param_impl(type, cls, value);
+
+exit:
+ return return_value;
+}
+
+PyDoc_STRVAR(c_void_p_from_param__doc__,
+"from_param($self, value, /)\n"
+"--\n"
+"\n");
+
+#define C_VOID_P_FROM_PARAM_METHODDEF \
+ {"from_param", _PyCFunction_CAST(c_void_p_from_param), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, c_void_p_from_param__doc__},
+
+static PyObject *
+c_void_p_from_param_impl(PyObject *type, PyTypeObject *cls, PyObject *value);
+
+static PyObject *
+c_void_p_from_param(PyObject *type, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *return_value = NULL;
+ #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
+ # define KWTUPLE (PyObject *)&_Py_SINGLETON(tuple_empty)
+ #else
+ # define KWTUPLE NULL
+ #endif
+
+ static const char * const _keywords[] = {"", NULL};
+ static _PyArg_Parser _parser = {
+ .keywords = _keywords,
+ .fname = "from_param",
+ .kwtuple = KWTUPLE,
+ };
+ #undef KWTUPLE
+ PyObject *argsbuf[1];
+ PyObject *value;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ value = args[0];
+ return_value = c_void_p_from_param_impl(type, cls, value);
+
+exit:
+ return return_value;
+}
+
+PyDoc_STRVAR(PyCSimpleType_from_param__doc__,
+"from_param($self, value, /)\n"
+"--\n"
+"\n"
+"Convert a Python object into a function call parameter.");
+
+#define PYCSIMPLETYPE_FROM_PARAM_METHODDEF \
+ {"from_param", _PyCFunction_CAST(PyCSimpleType_from_param), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, PyCSimpleType_from_param__doc__},
+
+static PyObject *
+PyCSimpleType_from_param_impl(PyObject *type, PyTypeObject *cls,
+ PyObject *value);
+
+static PyObject *
+PyCSimpleType_from_param(PyObject *type, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *return_value = NULL;
+ #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
+ # define KWTUPLE (PyObject *)&_Py_SINGLETON(tuple_empty)
+ #else
+ # define KWTUPLE NULL
+ #endif
+
+ static const char * const _keywords[] = {"", NULL};
+ static _PyArg_Parser _parser = {
+ .keywords = _keywords,
+ .fname = "from_param",
+ .kwtuple = KWTUPLE,
+ };
+ #undef KWTUPLE
+ PyObject *argsbuf[1];
+ PyObject *value;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ value = args[0];
+ return_value = PyCSimpleType_from_param_impl(type, cls, value);
+
+exit:
+ return return_value;
+}
+
+PyDoc_STRVAR(PyCData_reduce__doc__,
+"__reduce__($self, /)\n"
+"--\n"
+"\n");
+
+#define PYCDATA_REDUCE_METHODDEF \
+ {"__reduce__", _PyCFunction_CAST(PyCData_reduce), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, PyCData_reduce__doc__},
+
+static PyObject *
+PyCData_reduce_impl(PyObject *myself, PyTypeObject *cls);
+
+static PyObject *
+PyCData_reduce(PyObject *myself, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) {
+ PyErr_SetString(PyExc_TypeError, "__reduce__() takes no arguments");
+ return NULL;
+ }
+ return PyCData_reduce_impl(myself, cls);
+}
+
+PyDoc_STRVAR(Simple_from_outparm__doc__,
+"__ctypes_from_outparam__($self, /)\n"
+"--\n"
+"\n");
+
+#define SIMPLE_FROM_OUTPARM_METHODDEF \
+ {"__ctypes_from_outparam__", _PyCFunction_CAST(Simple_from_outparm), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, Simple_from_outparm__doc__},
+
+static PyObject *
+Simple_from_outparm_impl(PyObject *self, PyTypeObject *cls);
+
+static PyObject *
+Simple_from_outparm(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) {
+ PyErr_SetString(PyExc_TypeError, "__ctypes_from_outparam__() takes no arguments");
+ return NULL;
+ }
+ return Simple_from_outparm_impl(self, cls);
+}
+/*[clinic end generated code: output=9c6539a3559e6088 input=a9049054013a1b77]*/
diff --git a/Modules/_ctypes/ctypes.h b/Modules/_ctypes/ctypes.h
index 31b89dca244e8e..20c68134be2804 100644
--- a/Modules/_ctypes/ctypes.h
+++ b/Modules/_ctypes/ctypes.h
@@ -2,6 +2,9 @@
# include
#endif
+#include "pycore_moduleobject.h" // _PyModule_GetState()
+#include "pycore_typeobject.h" // _PyType_GetModuleState()
+
#ifndef MS_WIN32
#define max(a, b) ((a) > (b) ? (a) : (b))
#define min(a, b) ((a) < (b) ? (a) : (b))
@@ -70,9 +73,48 @@ typedef struct {
PyObject *swapped_suffix;
} ctypes_state;
-extern ctypes_state global_state;
-#define GLOBAL_STATE() (&global_state)
+extern struct PyModuleDef _ctypesmodule;
+
+
+static inline ctypes_state *
+get_module_state(PyObject *module)
+{
+ void *state = _PyModule_GetState(module);
+ assert(state != NULL);
+ return (ctypes_state *)state;
+}
+
+static inline ctypes_state *
+get_module_state_by_class(PyTypeObject *cls)
+{
+ ctypes_state *state = (ctypes_state *)_PyType_GetModuleState(cls);
+ assert(state != NULL);
+ return state;
+}
+
+static inline ctypes_state *
+get_module_state_by_def(PyTypeObject *cls)
+{
+ PyObject *mod = PyType_GetModuleByDef(cls, &_ctypesmodule);
+ assert(mod != NULL);
+ return get_module_state(mod);
+}
+
+static inline ctypes_state *
+get_module_state_by_def_final(PyTypeObject *cls)
+{
+ if (cls->tp_mro == NULL) {
+ return NULL;
+ }
+ PyObject *mod = PyType_GetModuleByDef(cls, &_ctypesmodule);
+ if (mod == NULL) {
+ PyErr_Clear();
+ return NULL;
+ }
+ return get_module_state(mod);
+}
+
extern PyType_Spec carg_spec;
extern PyType_Spec cfield_spec;
diff --git a/Modules/_ctypes/stgdict.c b/Modules/_ctypes/stgdict.c
index 7b09bae0dd2a57..ad82e4891c519a 100644
--- a/Modules/_ctypes/stgdict.c
+++ b/Modules/_ctypes/stgdict.c
@@ -94,7 +94,7 @@ MakeFields(PyObject *type, CFieldObject *descr,
if (fieldlist == NULL)
return -1;
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_class(Py_TYPE(descr));
PyTypeObject *cfield_tp = st->PyCField_Type;
for (i = 0; i < PySequence_Fast_GET_SIZE(fieldlist); ++i) {
PyObject *pair = PySequence_Fast_GET_ITEM(fieldlist, i); /* borrowed */
@@ -175,7 +175,7 @@ MakeAnonFields(PyObject *type)
if (anon_names == NULL)
return -1;
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_def(Py_TYPE(type));
PyTypeObject *cfield_tp = st->PyCField_Type;
for (i = 0; i < PySequence_Fast_GET_SIZE(anon_names); ++i) {
PyObject *fname = PySequence_Fast_GET_ITEM(anon_names, i); /* borrowed */
@@ -318,7 +318,7 @@ PyCStructUnionType_update_stginfo(PyObject *type, PyObject *fields, int isStruct
return -1;
}
- ctypes_state *st = GLOBAL_STATE();
+ ctypes_state *st = get_module_state_by_def(Py_TYPE(type));
StgInfo *stginfo;
if (PyStgInfo_FromType(st, type, &stginfo) < 0) {
return -1;
diff --git a/Modules/_datetimemodule.c b/Modules/_datetimemodule.c
index a626bda2ea9be9..06004e258b2eff 100644
--- a/Modules/_datetimemodule.c
+++ b/Modules/_datetimemodule.c
@@ -416,6 +416,10 @@ iso_week1_monday(int year)
static int
iso_to_ymd(const int iso_year, const int iso_week, const int iso_day,
int *year, int *month, int *day) {
+ // Year is bounded to 0 < year < 10000 because 9999-12-31 is (9999, 52, 5)
+ if (iso_year < MINYEAR || iso_year > MAXYEAR) {
+ return -4;
+ }
if (iso_week <= 0 || iso_week >= 53) {
int out_of_range = 1;
if (iso_week == 53) {
@@ -762,7 +766,7 @@ parse_isoformat_date(const char *dtstr, const size_t len, int *year, int *month,
* -2: Inconsistent date separator usage
* -3: Failed to parse ISO week.
* -4: Failed to parse ISO day.
- * -5, -6: Failure in iso_to_ymd
+ * -5, -6, -7: Failure in iso_to_ymd
*/
const char *p = dtstr;
p = parse_digits(p, year, 4);
@@ -3142,15 +3146,13 @@ date_fromisocalendar(PyObject *cls, PyObject *args, PyObject *kw)
return NULL;
}
- // Year is bounded to 0 < year < 10000 because 9999-12-31 is (9999, 52, 5)
- if (year < MINYEAR || year > MAXYEAR) {
- PyErr_Format(PyExc_ValueError, "Year is out of range: %d", year);
- return NULL;
- }
-
int month;
int rv = iso_to_ymd(year, week, day, &year, &month, &day);
+ if (rv == -4) {
+ PyErr_Format(PyExc_ValueError, "Year is out of range: %d", year);
+ return NULL;
+ }
if (rv == -2) {
PyErr_Format(PyExc_ValueError, "Invalid week: %d", week);
@@ -3643,7 +3645,8 @@ static PyMethodDef date_methods[] = {
DATETIME_DATE_REPLACE_METHODDEF
- {"__replace__", _PyCFunction_CAST(datetime_date_replace), METH_FASTCALL | METH_KEYWORDS},
+ {"__replace__", _PyCFunction_CAST(datetime_date_replace), METH_FASTCALL | METH_KEYWORDS,
+ PyDoc_STR("__replace__($self, /, **changes)\n--\n\nThe same as replace().")},
{"__reduce__", (PyCFunction)date_reduce, METH_NOARGS,
PyDoc_STR("__reduce__() -> (cls, state)")},
@@ -4770,7 +4773,8 @@ static PyMethodDef time_methods[] = {
DATETIME_TIME_REPLACE_METHODDEF
- {"__replace__", _PyCFunction_CAST(datetime_time_replace), METH_FASTCALL | METH_KEYWORDS},
+ {"__replace__", _PyCFunction_CAST(datetime_time_replace), METH_FASTCALL | METH_KEYWORDS,
+ PyDoc_STR("__replace__($self, /, **changes)\n--\n\nThe same as replace().")},
{"fromisoformat", (PyCFunction)time_fromisoformat, METH_O | METH_CLASS,
PyDoc_STR("string -> time from a string in ISO 8601 format")},
@@ -6617,7 +6621,8 @@ static PyMethodDef datetime_methods[] = {
DATETIME_DATETIME_REPLACE_METHODDEF
- {"__replace__", _PyCFunction_CAST(datetime_datetime_replace), METH_FASTCALL | METH_KEYWORDS},
+ {"__replace__", _PyCFunction_CAST(datetime_datetime_replace), METH_FASTCALL | METH_KEYWORDS,
+ PyDoc_STR("__replace__($self, /, **changes)\n--\n\nThe same as replace().")},
{"astimezone", _PyCFunction_CAST(datetime_astimezone), METH_VARARGS | METH_KEYWORDS,
PyDoc_STR("tz -> convert to local time in new timezone tz\n")},
diff --git a/Modules/_functoolsmodule.c b/Modules/_functoolsmodule.c
index f23b6e0d62bfb1..25c0ecde73246d 100644
--- a/Modules/_functoolsmodule.c
+++ b/Modules/_functoolsmodule.c
@@ -335,8 +335,9 @@ partial_call(partialobject *pto, PyObject *args, PyObject *kwargs)
}
PyDoc_STRVAR(partial_doc,
-"partial(func, *args, **keywords) - new function with partial application\n\
- of the given arguments and keywords.\n");
+"partial(func, /, *args, **keywords)\n--\n\n\
+Create a new function with partial application of the given arguments\n\
+and keywords.");
#define OFF(x) offsetof(partialobject, x)
static PyMemberDef partial_memberlist[] = {
diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c
index 0e230f332ff6cb..d0b46810dc1489 100644
--- a/Modules/_hashopenssl.c
+++ b/Modules/_hashopenssl.c
@@ -45,9 +45,15 @@
#define MUNCH_SIZE INT_MAX
#define PY_OPENSSL_HAS_SCRYPT 1
+#if defined(NID_sha3_224) && defined(NID_sha3_256) && defined(NID_sha3_384) && defined(NID_sha3_512)
#define PY_OPENSSL_HAS_SHA3 1
+#endif
+#if defined(NID_shake128) || defined(NID_shake256)
#define PY_OPENSSL_HAS_SHAKE 1
+#endif
+#if defined(NID_blake2s256) || defined(NID_blake2b512)
#define PY_OPENSSL_HAS_BLAKE2 1
+#endif
#if OPENSSL_VERSION_NUMBER >= 0x30000000L
#define PY_EVP_MD EVP_MD
@@ -88,22 +94,45 @@ typedef struct {
PY_EVP_MD *evp_nosecurity;
} py_hashentry_t;
+// Fundamental to TLS, assumed always present in any libcrypto:
#define Py_hash_md5 "md5"
#define Py_hash_sha1 "sha1"
#define Py_hash_sha224 "sha224"
#define Py_hash_sha256 "sha256"
#define Py_hash_sha384 "sha384"
#define Py_hash_sha512 "sha512"
-#define Py_hash_sha512_224 "sha512_224"
-#define Py_hash_sha512_256 "sha512_256"
-#define Py_hash_sha3_224 "sha3_224"
-#define Py_hash_sha3_256 "sha3_256"
-#define Py_hash_sha3_384 "sha3_384"
-#define Py_hash_sha3_512 "sha3_512"
-#define Py_hash_shake_128 "shake_128"
-#define Py_hash_shake_256 "shake_256"
-#define Py_hash_blake2s "blake2s"
-#define Py_hash_blake2b "blake2b"
+
+// Not all OpenSSL-like libcrypto libraries provide these:
+#if defined(NID_sha512_224)
+# define Py_hash_sha512_224 "sha512_224"
+#endif
+#if defined(NID_sha512_256)
+# define Py_hash_sha512_256 "sha512_256"
+#endif
+#if defined(NID_sha3_224)
+# define Py_hash_sha3_224 "sha3_224"
+#endif
+#if defined(NID_sha3_256)
+# define Py_hash_sha3_256 "sha3_256"
+#endif
+#if defined(NID_sha3_384)
+# define Py_hash_sha3_384 "sha3_384"
+#endif
+#if defined(NID_sha3_512)
+# define Py_hash_sha3_512 "sha3_512"
+#endif
+#if defined(NID_shake128)
+# define Py_hash_shake_128 "shake_128"
+#endif
+#if defined(NID_shake256)
+# define Py_hash_shake_256 "shake_256"
+#endif
+#if defined(NID_blake2s256)
+# define Py_hash_blake2s "blake2s"
+#endif
+#if defined(NID_blake2b512)
+# define Py_hash_blake2b "blake2b"
+#endif
#define PY_HASH_ENTRY(py_name, py_alias, ossl_name, ossl_nid) \
{py_name, py_alias, ossl_name, ossl_nid, 0, NULL, NULL}
@@ -119,19 +148,39 @@ static const py_hashentry_t py_hashes[] = {
PY_HASH_ENTRY(Py_hash_sha384, "SHA384", SN_sha384, NID_sha384),
PY_HASH_ENTRY(Py_hash_sha512, "SHA512", SN_sha512, NID_sha512),
/* truncated sha2 */
+#ifdef Py_hash_sha512_224
PY_HASH_ENTRY(Py_hash_sha512_224, "SHA512_224", SN_sha512_224, NID_sha512_224),
+#endif
+#ifdef Py_hash_sha512_256
PY_HASH_ENTRY(Py_hash_sha512_256, "SHA512_256", SN_sha512_256, NID_sha512_256),
+#endif
/* sha3 */
+#ifdef Py_hash_sha3_224
PY_HASH_ENTRY(Py_hash_sha3_224, NULL, SN_sha3_224, NID_sha3_224),
+#endif
+#ifdef Py_hash_sha3_256
PY_HASH_ENTRY(Py_hash_sha3_256, NULL, SN_sha3_256, NID_sha3_256),
+#endif
+#ifdef Py_hash_sha3_384
PY_HASH_ENTRY(Py_hash_sha3_384, NULL, SN_sha3_384, NID_sha3_384),
+#endif
+#ifdef Py_hash_sha3_512
PY_HASH_ENTRY(Py_hash_sha3_512, NULL, SN_sha3_512, NID_sha3_512),
+#endif
/* sha3 shake */
+#ifdef Py_hash_shake_128
PY_HASH_ENTRY(Py_hash_shake_128, NULL, SN_shake128, NID_shake128),
+#endif
+#ifdef Py_hash_shake_256
PY_HASH_ENTRY(Py_hash_shake_256, NULL, SN_shake256, NID_shake256),
+#endif
/* blake2 digest */
+#ifdef Py_hash_blake2s
PY_HASH_ENTRY(Py_hash_blake2s, "blake2s256", SN_blake2s256, NID_blake2s256),
+#endif
+#ifdef Py_hash_blake2b
PY_HASH_ENTRY(Py_hash_blake2b, "blake2b512", SN_blake2b512, NID_blake2b512),
+#endif
PY_HASH_ENTRY(NULL, NULL, NULL, 0),
};
diff --git a/Modules/_interpreters_common.h b/Modules/_interpreters_common.h
index de9a60ce657e0c..07120f6ccc7207 100644
--- a/Modules/_interpreters_common.h
+++ b/Modules/_interpreters_common.h
@@ -19,20 +19,3 @@ clear_xid_class(PyTypeObject *cls)
return _PyCrossInterpreterData_UnregisterClass(cls);
}
#endif
-
-
-#ifdef RETURNS_INTERPID_OBJECT
-static PyObject *
-get_interpid_obj(PyInterpreterState *interp)
-{
- if (_PyInterpreterState_IDInitref(interp) != 0) {
- return NULL;
- };
- int64_t id = PyInterpreterState_GetID(interp);
- if (id < 0) {
- return NULL;
- }
- assert(id < LLONG_MAX);
- return PyLong_FromLongLong(id);
-}
-#endif
diff --git a/Modules/_io/bufferedio.c b/Modules/_io/bufferedio.c
index b3450eeaf99401..4133d3438253dd 100644
--- a/Modules/_io/bufferedio.c
+++ b/Modules/_io/bufferedio.c
@@ -2531,8 +2531,8 @@ static PyMethodDef bufferedreader_methods[] = {
_IO__BUFFERED_TRUNCATE_METHODDEF
_IO__BUFFERED___SIZEOF___METHODDEF
- {"__reduce__", _PyIOBase_cannot_pickle, METH_VARARGS},
- {"__reduce_ex__", _PyIOBase_cannot_pickle, METH_VARARGS},
+ {"__reduce__", _PyIOBase_cannot_pickle, METH_NOARGS},
+ {"__reduce_ex__", _PyIOBase_cannot_pickle, METH_O},
{NULL, NULL}
};
@@ -2591,8 +2591,8 @@ static PyMethodDef bufferedwriter_methods[] = {
_IO__BUFFERED_TELL_METHODDEF
_IO__BUFFERED___SIZEOF___METHODDEF
- {"__reduce__", _PyIOBase_cannot_pickle, METH_VARARGS},
- {"__reduce_ex__", _PyIOBase_cannot_pickle, METH_VARARGS},
+ {"__reduce__", _PyIOBase_cannot_pickle, METH_NOARGS},
+ {"__reduce_ex__", _PyIOBase_cannot_pickle, METH_O},
{NULL, NULL}
};
@@ -2709,8 +2709,8 @@ static PyMethodDef bufferedrandom_methods[] = {
_IO_BUFFEREDWRITER_WRITE_METHODDEF
_IO__BUFFERED___SIZEOF___METHODDEF
- {"__reduce__", _PyIOBase_cannot_pickle, METH_VARARGS},
- {"__reduce_ex__", _PyIOBase_cannot_pickle, METH_VARARGS},
+ {"__reduce__", _PyIOBase_cannot_pickle, METH_NOARGS},
+ {"__reduce_ex__", _PyIOBase_cannot_pickle, METH_O},
{NULL, NULL}
};
diff --git a/Modules/_io/fileio.c b/Modules/_io/fileio.c
index 6bb156e41fe43c..b5129ffcbffdcf 100644
--- a/Modules/_io/fileio.c
+++ b/Modules/_io/fileio.c
@@ -1178,8 +1178,8 @@ static PyMethodDef fileio_methods[] = {
_IO_FILEIO_FILENO_METHODDEF
_IO_FILEIO_ISATTY_METHODDEF
{"_dealloc_warn", (PyCFunction)fileio_dealloc_warn, METH_O, NULL},
- {"__reduce__", _PyIOBase_cannot_pickle, METH_VARARGS},
- {"__reduce_ex__", _PyIOBase_cannot_pickle, METH_VARARGS},
+ {"__reduce__", _PyIOBase_cannot_pickle, METH_NOARGS},
+ {"__reduce_ex__", _PyIOBase_cannot_pickle, METH_O},
{NULL, NULL} /* sentinel */
};
diff --git a/Modules/_io/textio.c b/Modules/_io/textio.c
index a3239ec0f52960..9dff8eafb2560f 100644
--- a/Modules/_io/textio.c
+++ b/Modules/_io/textio.c
@@ -3337,8 +3337,8 @@ static PyMethodDef textiowrapper_methods[] = {
_IO_TEXTIOWRAPPER_TELL_METHODDEF
_IO_TEXTIOWRAPPER_TRUNCATE_METHODDEF
- {"__reduce__", _PyIOBase_cannot_pickle, METH_VARARGS},
- {"__reduce_ex__", _PyIOBase_cannot_pickle, METH_VARARGS},
+ {"__reduce__", _PyIOBase_cannot_pickle, METH_NOARGS},
+ {"__reduce_ex__", _PyIOBase_cannot_pickle, METH_O},
{NULL, NULL}
};
diff --git a/Modules/_sqlite/blob.c b/Modules/_sqlite/blob.c
index f099020c5f4e6f..7deb58bf1b9b82 100644
--- a/Modules/_sqlite/blob.c
+++ b/Modules/_sqlite/blob.c
@@ -4,7 +4,6 @@
#include "blob.h"
#include "util.h"
-#include "pycore_weakref.h" // _PyWeakref_GET_REF()
#define clinic_state() (pysqlite_get_state_by_type(Py_TYPE(self)))
#include "clinic/blob.c.h"
@@ -102,8 +101,8 @@ pysqlite_close_all_blobs(pysqlite_Connection *self)
{
for (int i = 0; i < PyList_GET_SIZE(self->blobs); i++) {
PyObject *weakref = PyList_GET_ITEM(self->blobs, i);
- PyObject *blob = _PyWeakref_GET_REF(weakref);
- if (blob == NULL) {
+ PyObject *blob;
+ if (!PyWeakref_GetRef(weakref, &blob)) {
continue;
}
close_blob((pysqlite_Blob *)blob);
diff --git a/Modules/_sqlite/clinic/connection.c.h b/Modules/_sqlite/clinic/connection.c.h
index 811314b5cd8aed..bb0a0278c629d4 100644
--- a/Modules/_sqlite/clinic/connection.c.h
+++ b/Modules/_sqlite/clinic/connection.c.h
@@ -744,7 +744,7 @@ pysqlite_connection_set_authorizer(pysqlite_Connection *self, PyTypeObject *cls,
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 1
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -837,7 +837,7 @@ pysqlite_connection_set_progress_handler(pysqlite_Connection *self, PyTypeObject
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 3
+ #define NUM_KEYWORDS 2
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -925,7 +925,7 @@ pysqlite_connection_set_trace_callback(pysqlite_Connection *self, PyTypeObject *
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 1
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -1866,4 +1866,4 @@ getconfig(pysqlite_Connection *self, PyObject *arg)
#ifndef DESERIALIZE_METHODDEF
#define DESERIALIZE_METHODDEF
#endif /* !defined(DESERIALIZE_METHODDEF) */
-/*[clinic end generated code: output=3c6d0b748fac016f input=a9049054013a1b77]*/
+/*[clinic end generated code: output=7d41a178b7b2b683 input=a9049054013a1b77]*/
diff --git a/Modules/_sqlite/connection.c b/Modules/_sqlite/connection.c
index f97afcf5fcf16e..74984ca5365743 100644
--- a/Modules/_sqlite/connection.c
+++ b/Modules/_sqlite/connection.c
@@ -38,7 +38,7 @@
#include "pycore_modsupport.h" // _PyArg_NoKeywords()
#include "pycore_pyerrors.h" // _PyErr_ChainExceptions1()
#include "pycore_pylifecycle.h" // _Py_IsInterpreterFinalizing()
-#include "pycore_weakref.h" // _PyWeakref_IS_DEAD()
+#include "pycore_weakref.h"
#include
@@ -1065,7 +1065,7 @@ static void _pysqlite_drop_unused_cursor_references(pysqlite_Connection* self)
for (Py_ssize_t i = 0; i < PyList_Size(self->cursors); i++) {
PyObject* weakref = PyList_GetItem(self->cursors, i);
- if (_PyWeakref_IS_DEAD(weakref)) {
+ if (_PyWeakref_IsDead(weakref)) {
continue;
}
if (PyList_Append(new_list, weakref) != 0) {
diff --git a/Modules/_ssl.c b/Modules/_ssl.c
index fbf914c4321922..f7fdbf4b6f90cb 100644
--- a/Modules/_ssl.c
+++ b/Modules/_ssl.c
@@ -29,7 +29,6 @@
#include "pycore_fileutils.h" // _PyIsSelectable_fd()
#include "pycore_pyerrors.h" // _PyErr_ChainExceptions1()
#include "pycore_time.h" // _PyDeadline_Init()
-#include "pycore_weakref.h" // _PyWeakref_GET_REF()
/* Include symbols from _socket module */
#include "socketmodule.h"
@@ -392,8 +391,8 @@ typedef enum {
// Return a borrowed reference.
static inline PySocketSockObject* GET_SOCKET(PySSLSocket *obj) {
if (obj->Socket) {
- PyObject *sock = _PyWeakref_GET_REF(obj->Socket);
- if (sock != NULL) {
+ PyObject *sock;
+ if (PyWeakref_GetRef(obj->Socket, &sock)) {
// GET_SOCKET() returns a borrowed reference
Py_DECREF(sock);
}
@@ -2205,8 +2204,8 @@ PySSL_get_owner(PySSLSocket *self, void *c)
if (self->owner == NULL) {
Py_RETURN_NONE;
}
- PyObject *owner = _PyWeakref_GET_REF(self->owner);
- if (owner == NULL) {
+ PyObject *owner;
+ if (!PyWeakref_GetRef(self->owner, &owner)) {
Py_RETURN_NONE;
}
return owner;
@@ -4433,9 +4432,9 @@ _servername_callback(SSL *s, int *al, void *args)
* will be passed. If both do not exist only then the C-level object is
* passed. */
if (ssl->owner)
- ssl_socket = _PyWeakref_GET_REF(ssl->owner);
+ PyWeakref_GetRef(ssl->owner, &ssl_socket);
else if (ssl->Socket)
- ssl_socket = _PyWeakref_GET_REF(ssl->Socket);
+ PyWeakref_GetRef(ssl->Socket, &ssl_socket);
else
ssl_socket = Py_NewRef(ssl);
diff --git a/Modules/_ssl/debughelpers.c b/Modules/_ssl/debughelpers.c
index 07e9ce7a6fce2d..9c87f8b4d21e68 100644
--- a/Modules/_ssl/debughelpers.c
+++ b/Modules/_ssl/debughelpers.c
@@ -28,12 +28,12 @@ _PySSL_msg_callback(int write_p, int version, int content_type,
PyObject *ssl_socket; /* ssl.SSLSocket or ssl.SSLObject */
if (ssl_obj->owner)
- ssl_socket = _PyWeakref_GET_REF(ssl_obj->owner);
+ PyWeakref_GetRef(ssl_obj->owner, &ssl_socket);
else if (ssl_obj->Socket)
- ssl_socket = _PyWeakref_GET_REF(ssl_obj->Socket);
+ PyWeakref_GetRef(ssl_obj->Socket, &ssl_socket);
else
ssl_socket = (PyObject *)Py_NewRef(ssl_obj);
- assert(ssl_socket != NULL); // _PyWeakref_GET_REF() can return NULL
+ assert(ssl_socket != NULL); // PyWeakref_GetRef() can return NULL
/* assume that OpenSSL verifies all payload and buf len is of sufficient
length */
diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c
index b2af47d05ee196..eff61dd9d833b4 100644
--- a/Modules/_testcapimodule.c
+++ b/Modules/_testcapimodule.c
@@ -2645,107 +2645,60 @@ eval_eval_code_ex(PyObject *mod, PyObject *pos_args)
PyObject **c_kwargs = NULL;
- if (!PyArg_UnpackTuple(pos_args,
- "eval_code_ex",
- 2,
- 8,
- &code,
- &globals,
- &locals,
- &args,
- &kwargs,
- &defaults,
- &kw_defaults,
- &closure))
+ if (!PyArg_ParseTuple(pos_args,
+ "OO|OO!O!O!OO:eval_code_ex",
+ &code,
+ &globals,
+ &locals,
+ &PyTuple_Type, &args,
+ &PyDict_Type, &kwargs,
+ &PyTuple_Type, &defaults,
+ &kw_defaults,
+ &closure))
{
goto exit;
}
- if (!PyCode_Check(code)) {
- PyErr_SetString(PyExc_TypeError,
- "code must be a Python code object");
- goto exit;
- }
-
- if (!PyDict_Check(globals)) {
- PyErr_SetString(PyExc_TypeError, "globals must be a dict");
- goto exit;
- }
-
- if (locals && !PyMapping_Check(locals)) {
- PyErr_SetString(PyExc_TypeError, "locals must be a mapping");
- goto exit;
- }
- if (locals == Py_None) {
- locals = NULL;
- }
+ NULLABLE(code);
+ NULLABLE(globals);
+ NULLABLE(locals);
+ NULLABLE(kw_defaults);
+ NULLABLE(closure);
PyObject **c_args = NULL;
Py_ssize_t c_args_len = 0;
-
- if (args)
- {
- if (!PyTuple_Check(args)) {
- PyErr_SetString(PyExc_TypeError, "args must be a tuple");
- goto exit;
- } else {
- c_args = &PyTuple_GET_ITEM(args, 0);
- c_args_len = PyTuple_Size(args);
- }
+ if (args) {
+ c_args = &PyTuple_GET_ITEM(args, 0);
+ c_args_len = PyTuple_Size(args);
}
Py_ssize_t c_kwargs_len = 0;
+ if (kwargs) {
+ c_kwargs_len = PyDict_Size(kwargs);
+ if (c_kwargs_len > 0) {
+ c_kwargs = PyMem_NEW(PyObject*, 2 * c_kwargs_len);
+ if (!c_kwargs) {
+ PyErr_NoMemory();
+ goto exit;
+ }
- if (kwargs)
- {
- if (!PyDict_Check(kwargs)) {
- PyErr_SetString(PyExc_TypeError, "keywords must be a dict");
- goto exit;
- } else {
- c_kwargs_len = PyDict_Size(kwargs);
- if (c_kwargs_len > 0) {
- c_kwargs = PyMem_NEW(PyObject*, 2 * c_kwargs_len);
- if (!c_kwargs) {
- PyErr_NoMemory();
- goto exit;
- }
-
- Py_ssize_t i = 0;
- Py_ssize_t pos = 0;
-
- while (PyDict_Next(kwargs,
- &pos,
- &c_kwargs[i],
- &c_kwargs[i + 1]))
- {
- i += 2;
- }
- c_kwargs_len = i / 2;
- /* XXX This is broken if the caller deletes dict items! */
+ Py_ssize_t i = 0;
+ Py_ssize_t pos = 0;
+ while (PyDict_Next(kwargs, &pos, &c_kwargs[i], &c_kwargs[i + 1])) {
+ i += 2;
}
+ c_kwargs_len = i / 2;
+ /* XXX This is broken if the caller deletes dict items! */
}
}
-
PyObject **c_defaults = NULL;
Py_ssize_t c_defaults_len = 0;
-
- if (defaults && PyTuple_Check(defaults)) {
+ if (defaults) {
c_defaults = &PyTuple_GET_ITEM(defaults, 0);
c_defaults_len = PyTuple_Size(defaults);
}
- if (kw_defaults && !PyDict_Check(kw_defaults)) {
- PyErr_SetString(PyExc_TypeError, "kw_defaults must be a dict");
- goto exit;
- }
-
- if (closure && !PyTuple_Check(closure)) {
- PyErr_SetString(PyExc_TypeError, "closure must be a tuple of cells");
- goto exit;
- }
-
-
result = PyEval_EvalCodeEx(
code,
globals,
diff --git a/Modules/_testclinic.c b/Modules/_testclinic.c
index fb0936bbccd318..454173b434fb6b 100644
--- a/Modules/_testclinic.c
+++ b/Modules/_testclinic.c
@@ -1219,21 +1219,36 @@ class _testclinic.TestClass "PyObject *" "PyObject"
/*[clinic end generated code: output=da39a3ee5e6b4b0d input=668a591c65bec947]*/
/*[clinic input]
-_testclinic.TestClass.meth_method_no_params
+_testclinic.TestClass.get_defining_class
cls: defining_class
- /
[clinic start generated code]*/
static PyObject *
-_testclinic_TestClass_meth_method_no_params_impl(PyObject *self,
- PyTypeObject *cls)
-/*[clinic end generated code: output=c140f100080c2fc8 input=6bd34503d11c63c1]*/
+_testclinic_TestClass_get_defining_class_impl(PyObject *self,
+ PyTypeObject *cls)
+/*[clinic end generated code: output=94f9b0b5f7add930 input=537c59417471dee3]*/
{
- Py_RETURN_NONE;
+ return Py_NewRef(cls);
+}
+
+/*[clinic input]
+_testclinic.TestClass.get_defining_class_arg
+ cls: defining_class
+ arg: object
+[clinic start generated code]*/
+
+static PyObject *
+_testclinic_TestClass_get_defining_class_arg_impl(PyObject *self,
+ PyTypeObject *cls,
+ PyObject *arg)
+/*[clinic end generated code: output=fe7e49d96cbb7718 input=d1b83d3b853af6d9]*/
+{
+ return Py_BuildValue("(OO)", cls, arg);
}
static struct PyMethodDef test_class_methods[] = {
- _TESTCLINIC_TESTCLASS_METH_METHOD_NO_PARAMS_METHODDEF
+ _TESTCLINIC_TESTCLASS_GET_DEFINING_CLASS_METHODDEF
+ _TESTCLINIC_TESTCLASS_GET_DEFINING_CLASS_ARG_METHODDEF
{NULL, NULL}
};
diff --git a/Modules/_testinternalcapi.c b/Modules/_testinternalcapi.c
index 758e88e288bac6..36dad024d31c95 100644
--- a/Modules/_testinternalcapi.c
+++ b/Modules/_testinternalcapi.c
@@ -1369,56 +1369,284 @@ dict_getitem_knownhash(PyObject *self, PyObject *args)
}
-/* To run some code in a sub-interpreter. */
+static int
+_init_interp_config_from_object(PyInterpreterConfig *config, PyObject *obj)
+{
+ if (obj == NULL) {
+ *config = (PyInterpreterConfig)_PyInterpreterConfig_INIT;
+ return 0;
+ }
+
+ PyObject *dict = PyObject_GetAttrString(obj, "__dict__");
+ if (dict == NULL) {
+ PyErr_Format(PyExc_TypeError, "bad config %R", obj);
+ return -1;
+ }
+ int res = _PyInterpreterConfig_InitFromDict(config, dict);
+ Py_DECREF(dict);
+ if (res < 0) {
+ return -1;
+ }
+ return 0;
+}
+
+static PyInterpreterState *
+_new_interpreter(PyInterpreterConfig *config, long whence)
+{
+ if (whence == _PyInterpreterState_WHENCE_XI) {
+ return _PyXI_NewInterpreter(config, &whence, NULL, NULL);
+ }
+ PyObject *exc = NULL;
+ PyInterpreterState *interp = NULL;
+ if (whence == _PyInterpreterState_WHENCE_UNKNOWN) {
+ assert(config == NULL);
+ interp = PyInterpreterState_New();
+ }
+ else if (whence == _PyInterpreterState_WHENCE_CAPI
+ || whence == _PyInterpreterState_WHENCE_LEGACY_CAPI)
+ {
+ PyThreadState *tstate = NULL;
+ PyThreadState *save_tstate = PyThreadState_Swap(NULL);
+ if (whence == _PyInterpreterState_WHENCE_LEGACY_CAPI) {
+ assert(config == NULL);
+ tstate = Py_NewInterpreter();
+ PyThreadState_Swap(save_tstate);
+ }
+ else {
+ PyStatus status = Py_NewInterpreterFromConfig(&tstate, config);
+ PyThreadState_Swap(save_tstate);
+ if (PyStatus_Exception(status)) {
+ assert(tstate == NULL);
+ _PyErr_SetFromPyStatus(status);
+ exc = PyErr_GetRaisedException();
+ }
+ }
+ if (tstate != NULL) {
+ interp = PyThreadState_GetInterpreter(tstate);
+ // Throw away the initial tstate.
+ PyThreadState_Swap(tstate);
+ PyThreadState_Clear(tstate);
+ PyThreadState_Swap(save_tstate);
+ PyThreadState_Delete(tstate);
+ }
+ }
+ else {
+ PyErr_Format(PyExc_ValueError,
+ "unsupported whence %ld", whence);
+ return NULL;
+ }
+
+ if (interp == NULL) {
+ PyErr_SetString(PyExc_InterpreterError,
+ "sub-interpreter creation failed");
+ if (exc != NULL) {
+ _PyErr_ChainExceptions1(exc);
+ }
+ }
+ return interp;
+}
+
+// This exists mostly for testing the _interpreters module, as an
+// alternative to _interpreters.create()
static PyObject *
-run_in_subinterp_with_config(PyObject *self, PyObject *args, PyObject *kwargs)
+create_interpreter(PyObject *self, PyObject *args, PyObject *kwargs)
{
- const char *code;
- PyObject *configobj;
- static char *kwlist[] = {"code", "config", NULL};
+ static char *kwlist[] = {"config", "whence", NULL};
+ PyObject *configobj = NULL;
+ long whence = _PyInterpreterState_WHENCE_XI;
if (!PyArg_ParseTupleAndKeywords(args, kwargs,
- "sO:run_in_subinterp_with_config", kwlist,
- &code, &configobj))
+ "|O$l:create_interpreter", kwlist,
+ &configobj, &whence))
{
return NULL;
}
+ if (configobj == Py_None) {
+ configobj = NULL;
+ }
- PyInterpreterConfig config;
- PyObject *dict = PyObject_GetAttrString(configobj, "__dict__");
- if (dict == NULL) {
- PyErr_Format(PyExc_TypeError, "bad config %R", configobj);
+ // Resolve the config.
+ PyInterpreterConfig *config = NULL;
+ PyInterpreterConfig _config;
+ if (whence == _PyInterpreterState_WHENCE_UNKNOWN
+ || whence == _PyInterpreterState_WHENCE_LEGACY_CAPI)
+ {
+ if (configobj != NULL) {
+ PyErr_SetString(PyExc_ValueError, "got unexpected config");
+ return NULL;
+ }
+ }
+ else {
+ config = &_config;
+ if (_init_interp_config_from_object(config, configobj) < 0) {
+ return NULL;
+ }
+ }
+
+ // Create the interpreter.
+ PyInterpreterState *interp = _new_interpreter(config, whence);
+ if (interp == NULL) {
return NULL;
}
- int res = _PyInterpreterConfig_InitFromDict(&config, dict);
- Py_DECREF(dict);
- if (res < 0) {
+
+ // Return the ID.
+ PyObject *idobj = _PyInterpreterState_GetIDObject(interp);
+ if (idobj == NULL) {
+ _PyXI_EndInterpreter(interp, NULL, NULL);
return NULL;
}
- PyThreadState *mainstate = PyThreadState_Get();
+ return idobj;
+}
- PyThreadState_Swap(NULL);
+// This exists mostly for testing the _interpreters module, as an
+// alternative to _interpreters.destroy()
+static PyObject *
+destroy_interpreter(PyObject *self, PyObject *args, PyObject *kwargs)
+{
+ static char *kwlist[] = {"id", NULL};
+ PyObject *idobj = NULL;
+ if (!PyArg_ParseTupleAndKeywords(args, kwargs,
+ "O:destroy_interpreter", kwlist,
+ &idobj))
+ {
+ return NULL;
+ }
- PyThreadState *substate;
- PyStatus status = Py_NewInterpreterFromConfig(&substate, &config);
- if (PyStatus_Exception(status)) {
- /* Since no new thread state was created, there is no exception to
- propagate; raise a fresh one after swapping in the old thread
- state. */
- PyThreadState_Swap(mainstate);
- _PyErr_SetFromPyStatus(status);
- PyObject *exc = PyErr_GetRaisedException();
- PyErr_SetString(PyExc_RuntimeError, "sub-interpreter creation failed");
- _PyErr_ChainExceptions1(exc);
+ PyInterpreterState *interp = _PyInterpreterState_LookUpIDObject(idobj);
+ if (interp == NULL) {
return NULL;
}
- assert(substate != NULL);
+
+ _PyXI_EndInterpreter(interp, NULL, NULL);
+ Py_RETURN_NONE;
+}
+
+// This exists mostly for testing the _interpreters module, as an
+// alternative to _interpreters.destroy()
+static PyObject *
+exec_interpreter(PyObject *self, PyObject *args, PyObject *kwargs)
+{
+ static char *kwlist[] = {"id", "code", "main", NULL};
+ PyObject *idobj;
+ const char *code;
+ int runningmain = 0;
+ if (!PyArg_ParseTupleAndKeywords(args, kwargs,
+ "Os|$p:exec_interpreter", kwlist,
+ &idobj, &code, &runningmain))
+ {
+ return NULL;
+ }
+
+ PyInterpreterState *interp = _PyInterpreterState_LookUpIDObject(idobj);
+ if (interp == NULL) {
+ return NULL;
+ }
+
+ PyObject *res = NULL;
+ PyThreadState *tstate = PyThreadState_New(interp);
+ _PyThreadState_SetWhence(tstate, _PyThreadState_WHENCE_EXEC);
+
+ PyThreadState *save_tstate = PyThreadState_Swap(tstate);
+
+ if (runningmain) {
+ if (_PyInterpreterState_SetRunningMain(interp) < 0) {
+ goto finally;
+ }
+ }
+
/* only initialise 'cflags.cf_flags' to test backwards compatibility */
PyCompilerFlags cflags = {0};
int r = PyRun_SimpleStringFlags(code, &cflags);
- Py_EndInterpreter(substate);
+ if (PyErr_Occurred()) {
+ PyErr_PrintEx(0);
+ }
+
+ if (runningmain) {
+ _PyInterpreterState_SetNotRunningMain(interp);
+ }
+
+ res = PyLong_FromLong(r);
+
+finally:
+ PyThreadState_Clear(tstate);
+ PyThreadState_Swap(save_tstate);
+ PyThreadState_Delete(tstate);
+ return res;
+}
+
+
+/* To run some code in a sub-interpreter.
+
+Generally you can use test.support.interpreters,
+but we keep this helper as a distinct implementation.
+That's especially important for testing test.support.interpreters.
+*/
+static PyObject *
+run_in_subinterp_with_config(PyObject *self, PyObject *args, PyObject *kwargs)
+{
+ const char *code;
+ PyObject *configobj;
+ int xi = 0;
+ static char *kwlist[] = {"code", "config", "xi", NULL};
+ if (!PyArg_ParseTupleAndKeywords(args, kwargs,
+ "sO|$p:run_in_subinterp_with_config", kwlist,
+ &code, &configobj, &xi))
+ {
+ return NULL;
+ }
- PyThreadState_Swap(mainstate);
+ PyInterpreterConfig config;
+ if (_init_interp_config_from_object(&config, configobj) < 0) {
+ return NULL;
+ }
+
+ /* only initialise 'cflags.cf_flags' to test backwards compatibility */
+ PyCompilerFlags cflags = {0};
+
+ int r;
+ if (xi) {
+ PyThreadState *save_tstate;
+ PyThreadState *tstate;
+
+ /* Create an interpreter, staying switched to it. */
+ PyInterpreterState *interp = \
+ _PyXI_NewInterpreter(&config, NULL, &tstate, &save_tstate);
+ if (interp == NULL) {
+ return NULL;
+ }
+
+ /* Exec the code in the new interpreter. */
+ r = PyRun_SimpleStringFlags(code, &cflags);
+
+ /* clean up post-exec. */
+ _PyXI_EndInterpreter(interp, tstate, &save_tstate);
+ }
+ else {
+ PyThreadState *substate;
+ PyThreadState *mainstate = PyThreadState_Swap(NULL);
+
+ /* Create an interpreter, staying switched to it. */
+ PyStatus status = Py_NewInterpreterFromConfig(&substate, &config);
+ if (PyStatus_Exception(status)) {
+ /* Since no new thread state was created, there is no exception to
+ propagate; raise a fresh one after swapping in the old thread
+ state. */
+ PyThreadState_Swap(mainstate);
+ _PyErr_SetFromPyStatus(status);
+ PyObject *exc = PyErr_GetRaisedException();
+ PyErr_SetString(PyExc_InterpreterError,
+ "sub-interpreter creation failed");
+ _PyErr_ChainExceptions1(exc);
+ return NULL;
+ }
+
+ /* Exec the code in the new interpreter. */
+ r = PyRun_SimpleStringFlags(code, &cflags);
+
+ /* clean up post-exec. */
+ Py_EndInterpreter(substate);
+ PyThreadState_Swap(mainstate);
+ }
return PyLong_FromLong(r);
}
@@ -1434,6 +1662,13 @@ normalize_interp_id(PyObject *self, PyObject *idobj)
return PyLong_FromLongLong(interpid);
}
+static PyObject *
+next_interpreter_id(PyObject *self, PyObject *Py_UNUSED(ignored))
+{
+ int64_t interpid = _PyRuntime.interpreters.next_id;
+ return PyLong_FromLongLong(interpid);
+}
+
static PyObject *
unused_interpreter_id(PyObject *self, PyObject *Py_UNUSED(ignored))
{
@@ -1751,10 +1986,17 @@ static PyMethodDef module_functions[] = {
{"get_object_dict_values", get_object_dict_values, METH_O},
{"hamt", new_hamt, METH_NOARGS},
{"dict_getitem_knownhash", dict_getitem_knownhash, METH_VARARGS},
+ {"create_interpreter", _PyCFunction_CAST(create_interpreter),
+ METH_VARARGS | METH_KEYWORDS},
+ {"destroy_interpreter", _PyCFunction_CAST(destroy_interpreter),
+ METH_VARARGS | METH_KEYWORDS},
+ {"exec_interpreter", _PyCFunction_CAST(exec_interpreter),
+ METH_VARARGS | METH_KEYWORDS},
{"run_in_subinterp_with_config",
_PyCFunction_CAST(run_in_subinterp_with_config),
METH_VARARGS | METH_KEYWORDS},
{"normalize_interp_id", normalize_interp_id, METH_O},
+ {"next_interpreter_id", next_interpreter_id, METH_NOARGS},
{"unused_interpreter_id", unused_interpreter_id, METH_NOARGS},
{"interpreter_exists", interpreter_exists, METH_O},
{"get_interpreter_refcount", get_interpreter_refcount, METH_O},
diff --git a/Modules/_threadmodule.c b/Modules/_threadmodule.c
index 4912cd776ef5ae..5aa719c3834e61 100644
--- a/Modules/_threadmodule.c
+++ b/Modules/_threadmodule.c
@@ -800,8 +800,8 @@ lock_PyThread_acquire_lock(lockobject *self, PyObject *args, PyObject *kwds)
}
PyDoc_STRVAR(acquire_doc,
-"acquire(blocking=True, timeout=-1) -> bool\n\
-(acquire_lock() is an obsolete synonym)\n\
+"acquire($self, /, blocking=True, timeout=-1)\n\
+--\n\
\n\
Lock the lock. Without argument, this blocks if the lock is already\n\
locked (even by the same thread), waiting for another thread to release\n\
@@ -810,6 +810,18 @@ With an argument, this will only block if the argument is true,\n\
and the return value reflects whether the lock is acquired.\n\
The blocking operation is interruptible.");
+PyDoc_STRVAR(acquire_lock_doc,
+"acquire_lock($self, /, blocking=True, timeout=-1)\n\
+--\n\
+\n\
+An obsolete synonym of acquire().");
+
+PyDoc_STRVAR(enter_doc,
+"__enter__($self, /)\n\
+--\n\
+\n\
+Lock the lock.");
+
static PyObject *
lock_PyThread_release_lock(lockobject *self, PyObject *Py_UNUSED(ignored))
{
@@ -825,13 +837,25 @@ lock_PyThread_release_lock(lockobject *self, PyObject *Py_UNUSED(ignored))
}
PyDoc_STRVAR(release_doc,
-"release()\n\
-(release_lock() is an obsolete synonym)\n\
+"release($self, /)\n\
+--\n\
\n\
Release the lock, allowing another thread that is blocked waiting for\n\
the lock to acquire the lock. The lock must be in the locked state,\n\
but it needn't be locked by the same thread that unlocks it.");
+PyDoc_STRVAR(release_lock_doc,
+"release_lock($self, /)\n\
+--\n\
+\n\
+An obsolete synonym of release().");
+
+PyDoc_STRVAR(lock_exit_doc,
+"__exit__($self, /, *exc_info)\n\
+--\n\
+\n\
+Release the lock.");
+
static PyObject *
lock_locked_lock(lockobject *self, PyObject *Py_UNUSED(ignored))
{
@@ -839,11 +863,17 @@ lock_locked_lock(lockobject *self, PyObject *Py_UNUSED(ignored))
}
PyDoc_STRVAR(locked_doc,
-"locked() -> bool\n\
-(locked_lock() is an obsolete synonym)\n\
+"locked($self, /)\n\
+--\n\
\n\
Return whether the lock is in the locked state.");
+PyDoc_STRVAR(locked_lock_doc,
+"locked_lock($self, /)\n\
+--\n\
+\n\
+An obsolete synonym of locked().");
+
static PyObject *
lock_repr(lockobject *self)
{
@@ -890,21 +920,21 @@ lock_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
static PyMethodDef lock_methods[] = {
{"acquire_lock", _PyCFunction_CAST(lock_PyThread_acquire_lock),
- METH_VARARGS | METH_KEYWORDS, acquire_doc},
+ METH_VARARGS | METH_KEYWORDS, acquire_lock_doc},
{"acquire", _PyCFunction_CAST(lock_PyThread_acquire_lock),
METH_VARARGS | METH_KEYWORDS, acquire_doc},
{"release_lock", (PyCFunction)lock_PyThread_release_lock,
- METH_NOARGS, release_doc},
+ METH_NOARGS, release_lock_doc},
{"release", (PyCFunction)lock_PyThread_release_lock,
METH_NOARGS, release_doc},
{"locked_lock", (PyCFunction)lock_locked_lock,
- METH_NOARGS, locked_doc},
+ METH_NOARGS, locked_lock_doc},
{"locked", (PyCFunction)lock_locked_lock,
METH_NOARGS, locked_doc},
{"__enter__", _PyCFunction_CAST(lock_PyThread_acquire_lock),
- METH_VARARGS | METH_KEYWORDS, acquire_doc},
+ METH_VARARGS | METH_KEYWORDS, enter_doc},
{"__exit__", (PyCFunction)lock_PyThread_release_lock,
- METH_VARARGS, release_doc},
+ METH_VARARGS, lock_exit_doc},
#ifdef HAVE_FORK
{"_at_fork_reinit", (PyCFunction)lock__at_fork_reinit,
METH_NOARGS, NULL},
@@ -913,7 +943,10 @@ static PyMethodDef lock_methods[] = {
};
PyDoc_STRVAR(lock_doc,
-"A lock object is a synchronization primitive. To create a lock,\n\
+"lock()\n\
+--\n\
+\n\
+A lock object is a synchronization primitive. To create a lock,\n\
call threading.Lock(). Methods are:\n\
\n\
acquire() -- lock the lock, possibly blocking until it can be obtained\n\
@@ -1029,7 +1062,8 @@ rlock_acquire(rlockobject *self, PyObject *args, PyObject *kwds)
}
PyDoc_STRVAR(rlock_acquire_doc,
-"acquire(blocking=True) -> bool\n\
+"acquire($self, /, blocking=True, timeout=-1)\n\
+--\n\
\n\
Lock the lock. `blocking` indicates whether we should wait\n\
for the lock to be available or not. If `blocking` is False\n\
@@ -1044,6 +1078,12 @@ Precisely, if the current thread already holds the lock, its\n\
internal counter is simply incremented. If nobody holds the lock,\n\
the lock is taken and its internal counter initialized to 1.");
+PyDoc_STRVAR(rlock_enter_doc,
+"__enter__($self, /)\n\
+--\n\
+\n\
+Lock the lock.");
+
static PyObject *
rlock_release(rlockobject *self, PyObject *Py_UNUSED(ignored))
{
@@ -1062,7 +1102,8 @@ rlock_release(rlockobject *self, PyObject *Py_UNUSED(ignored))
}
PyDoc_STRVAR(rlock_release_doc,
-"release()\n\
+"release($self, /)\n\
+--\n\
\n\
Release the lock, allowing another thread that is blocked waiting for\n\
the lock to acquire the lock. The lock must be in the locked state,\n\
@@ -1073,6 +1114,12 @@ Do note that if the lock was acquire()d several times in a row by the\n\
current thread, release() needs to be called as many times for the lock\n\
to be available for other threads.");
+PyDoc_STRVAR(rlock_exit_doc,
+"__exit__($self, /, *exc_info)\n\
+--\n\
+\n\
+Release the lock.");
+
static PyObject *
rlock_acquire_restore(rlockobject *self, PyObject *args)
{
@@ -1100,7 +1147,8 @@ rlock_acquire_restore(rlockobject *self, PyObject *args)
}
PyDoc_STRVAR(rlock_acquire_restore_doc,
-"_acquire_restore(state) -> None\n\
+"_acquire_restore($self, state, /)\n\
+--\n\
\n\
For internal use by `threading.Condition`.");
@@ -1125,7 +1173,8 @@ rlock_release_save(rlockobject *self, PyObject *Py_UNUSED(ignored))
}
PyDoc_STRVAR(rlock_release_save_doc,
-"_release_save() -> tuple\n\
+"_release_save($self, /)\n\
+--\n\
\n\
For internal use by `threading.Condition`.");
@@ -1139,7 +1188,8 @@ rlock_recursion_count(rlockobject *self, PyObject *Py_UNUSED(ignored))
}
PyDoc_STRVAR(rlock_recursion_count_doc,
-"_recursion_count() -> int\n\
+"_recursion_count($self, /)\n\
+--\n\
\n\
For internal use by reentrancy checks.");
@@ -1155,7 +1205,8 @@ rlock_is_owned(rlockobject *self, PyObject *Py_UNUSED(ignored))
}
PyDoc_STRVAR(rlock_is_owned_doc,
-"_is_owned() -> bool\n\
+"_is_owned($self, /)\n\
+--\n\
\n\
For internal use by `threading.Condition`.");
@@ -1223,9 +1274,9 @@ static PyMethodDef rlock_methods[] = {
{"_recursion_count", (PyCFunction)rlock_recursion_count,
METH_NOARGS, rlock_recursion_count_doc},
{"__enter__", _PyCFunction_CAST(rlock_acquire),
- METH_VARARGS | METH_KEYWORDS, rlock_acquire_doc},
+ METH_VARARGS | METH_KEYWORDS, rlock_enter_doc},
{"__exit__", (PyCFunction)rlock_release,
- METH_VARARGS, rlock_release_doc},
+ METH_VARARGS, rlock_exit_doc},
#ifdef HAVE_FORK
{"_at_fork_reinit", (PyCFunction)rlock__at_fork_reinit,
METH_NOARGS, NULL},
@@ -1626,7 +1677,7 @@ static PyType_Slot local_type_slots[] = {
{Py_tp_dealloc, (destructor)local_dealloc},
{Py_tp_getattro, (getattrofunc)local_getattro},
{Py_tp_setattro, (setattrofunc)local_setattro},
- {Py_tp_doc, "Thread-local data"},
+ {Py_tp_doc, "_local()\n--\n\nThread-local data"},
{Py_tp_traverse, (traverseproc)local_traverse},
{Py_tp_clear, (inquiry)local_clear},
{Py_tp_new, local_new},
@@ -1714,7 +1765,8 @@ thread_daemon_threads_allowed(PyObject *module, PyObject *Py_UNUSED(ignored))
}
PyDoc_STRVAR(daemon_threads_allowed_doc,
-"daemon_threads_allowed()\n\
+"daemon_threads_allowed($module, /)\n\
+--\n\
\n\
Return True if daemon threads are allowed in the current interpreter,\n\
and False otherwise.\n");
@@ -1798,9 +1850,9 @@ thread_PyThread_start_new_thread(PyObject *module, PyObject *fargs)
return PyLong_FromUnsignedLongLong(ident);
}
-PyDoc_STRVAR(start_new_doc,
-"start_new_thread(function, args[, kwargs])\n\
-(start_new() is an obsolete synonym)\n\
+PyDoc_STRVAR(start_new_thread_doc,
+"start_new_thread($module, function, args, kwargs={}, /)\n\
+--\n\
\n\
Start a new thread and return its identifier.\n\
\n\
@@ -1809,7 +1861,13 @@ tuple args and keyword arguments taken from the optional dictionary\n\
kwargs. The thread exits when the function returns; the return value\n\
is ignored. The thread will also exit when the function raises an\n\
unhandled exception; a stack trace will be printed unless the exception\n\
-is SystemExit.\n");
+is SystemExit.");
+
+PyDoc_STRVAR(start_new_doc,
+"start_new($module, function, args, kwargs={}, /)\n\
+--\n\
+\n\
+An obsolete synonym of start_new_thread().");
static PyObject *
thread_PyThread_start_joinable_thread(PyObject *module, PyObject *fargs,
@@ -1870,7 +1928,8 @@ thread_PyThread_start_joinable_thread(PyObject *module, PyObject *fargs,
}
PyDoc_STRVAR(start_joinable_doc,
-"start_joinable_thread(function[, daemon=True[, handle=None]])\n\
+"start_joinable_thread($module, /, function, handle=None, daemon=True)\n\
+--\n\
\n\
*For internal use only*: start a new thread.\n\
\n\
@@ -1890,12 +1949,18 @@ thread_PyThread_exit_thread(PyObject *self, PyObject *Py_UNUSED(ignored))
}
PyDoc_STRVAR(exit_doc,
-"exit()\n\
-(exit_thread() is an obsolete synonym)\n\
+"exit($module, /)\n\
+--\n\
\n\
This is synonymous to ``raise SystemExit''. It will cause the current\n\
thread to exit silently unless the exception is caught.");
+PyDoc_STRVAR(exit_thread_doc,
+"exit_thread($module, /)\n\
+--\n\
+\n\
+An obsolete synonym of exit().");
+
static PyObject *
thread_PyThread_interrupt_main(PyObject *self, PyObject *args)
{
@@ -1912,7 +1977,8 @@ thread_PyThread_interrupt_main(PyObject *self, PyObject *args)
}
PyDoc_STRVAR(interrupt_doc,
-"interrupt_main(signum=signal.SIGINT, /)\n\
+"interrupt_main($module, signum=signal.SIGINT, /)\n\
+--\n\
\n\
Simulate the arrival of the given signal in the main thread,\n\
where the corresponding signal handler will be executed.\n\
@@ -1928,13 +1994,19 @@ thread_PyThread_allocate_lock(PyObject *module, PyObject *Py_UNUSED(ignored))
return (PyObject *) newlockobject(module);
}
-PyDoc_STRVAR(allocate_doc,
-"allocate_lock() -> lock object\n\
-(allocate() is an obsolete synonym)\n\
+PyDoc_STRVAR(allocate_lock_doc,
+"allocate_lock($module, /)\n\
+--\n\
\n\
Create a new lock object. See help(type(threading.Lock())) for\n\
information about locks.");
+PyDoc_STRVAR(allocate_doc,
+"allocate($module, /)\n\
+--\n\
+\n\
+An obsolete synonym of allocate_lock().");
+
static PyObject *
thread_get_ident(PyObject *self, PyObject *Py_UNUSED(ignored))
{
@@ -1947,7 +2019,8 @@ thread_get_ident(PyObject *self, PyObject *Py_UNUSED(ignored))
}
PyDoc_STRVAR(get_ident_doc,
-"get_ident() -> integer\n\
+"get_ident($module, /)\n\
+--\n\
\n\
Return a non-zero integer that uniquely identifies the current thread\n\
amongst other threads that exist simultaneously.\n\
@@ -1966,7 +2039,8 @@ thread_get_native_id(PyObject *self, PyObject *Py_UNUSED(ignored))
}
PyDoc_STRVAR(get_native_id_doc,
-"get_native_id() -> integer\n\
+"get_native_id($module, /)\n\
+--\n\
\n\
Return a non-negative integer identifying the thread as reported\n\
by the OS (kernel). This may be used to uniquely identify a\n\
@@ -1981,9 +2055,9 @@ thread__count(PyObject *self, PyObject *Py_UNUSED(ignored))
}
PyDoc_STRVAR(_count_doc,
-"_count() -> integer\n\
+"_count($module, /)\n\
+--\n\
\n\
-\
Return the number of currently running Python threads, excluding\n\
the main thread. The returned number comprises all threads created\n\
through `start_new_thread()` as well as `threading.Thread`, and not\n\
@@ -2027,7 +2101,8 @@ thread_stack_size(PyObject *self, PyObject *args)
}
PyDoc_STRVAR(stack_size_doc,
-"stack_size([size]) -> size\n\
+"stack_size($module, size=0, /)\n\
+--\n\
\n\
Return the thread stack size used when creating new threads. The\n\
optional size argument specifies the stack size (in bytes) to be used\n\
@@ -2182,7 +2257,8 @@ thread_excepthook(PyObject *module, PyObject *args)
}
PyDoc_STRVAR(excepthook_doc,
-"excepthook(exc_type, exc_value, exc_traceback, thread)\n\
+"_excepthook($module, (exc_type, exc_value, exc_traceback, thread), /)\n\
+--\n\
\n\
Handle uncaught Thread.run() exception.");
@@ -2194,7 +2270,8 @@ thread__is_main_interpreter(PyObject *module, PyObject *Py_UNUSED(ignored))
}
PyDoc_STRVAR(thread__is_main_interpreter_doc,
-"_is_main_interpreter()\n\
+"_is_main_interpreter($module, /)\n\
+--\n\
\n\
Return True if the current interpreter is the main Python interpreter.");
@@ -2240,7 +2317,8 @@ thread_shutdown(PyObject *self, PyObject *args)
}
PyDoc_STRVAR(shutdown_doc,
-"_shutdown()\n\
+"_shutdown($module, /)\n\
+--\n\
\n\
Wait for all non-daemon threads (other than the calling thread) to stop.");
@@ -2269,7 +2347,8 @@ thread__make_thread_handle(PyObject *module, PyObject *identobj)
}
PyDoc_STRVAR(thread__make_thread_handle_doc,
-"_make_thread_handle(ident)\n\
+"_make_thread_handle($module, ident, /)\n\
+--\n\
\n\
Internal only. Make a thread handle for threads not spawned\n\
by the _thread or threading module.");
@@ -2281,14 +2360,15 @@ thread__get_main_thread_ident(PyObject *module, PyObject *Py_UNUSED(ignored))
}
PyDoc_STRVAR(thread__get_main_thread_ident_doc,
-"_get_main_thread_ident()\n\
+"_get_main_thread_ident($module, /)\n\
+--\n\
\n\
Internal only. Return a non-zero integer that uniquely identifies the main thread\n\
of the main interpreter.");
static PyMethodDef thread_methods[] = {
{"start_new_thread", (PyCFunction)thread_PyThread_start_new_thread,
- METH_VARARGS, start_new_doc},
+ METH_VARARGS, start_new_thread_doc},
{"start_new", (PyCFunction)thread_PyThread_start_new_thread,
METH_VARARGS, start_new_doc},
{"start_joinable_thread", _PyCFunction_CAST(thread_PyThread_start_joinable_thread),
@@ -2296,11 +2376,11 @@ static PyMethodDef thread_methods[] = {
{"daemon_threads_allowed", (PyCFunction)thread_daemon_threads_allowed,
METH_NOARGS, daemon_threads_allowed_doc},
{"allocate_lock", thread_PyThread_allocate_lock,
- METH_NOARGS, allocate_doc},
+ METH_NOARGS, allocate_lock_doc},
{"allocate", thread_PyThread_allocate_lock,
METH_NOARGS, allocate_doc},
{"exit_thread", thread_PyThread_exit_thread,
- METH_NOARGS, exit_doc},
+ METH_NOARGS, exit_thread_doc},
{"exit", thread_PyThread_exit_thread,
METH_NOARGS, exit_doc},
{"interrupt_main", (PyCFunction)thread_PyThread_interrupt_main,
diff --git a/Modules/_weakref.c b/Modules/_weakref.c
index 7225dbc9ce4a1b..1ea3ed5e40b761 100644
--- a/Modules/_weakref.c
+++ b/Modules/_weakref.c
@@ -14,7 +14,6 @@ module _weakref
#include "clinic/_weakref.c.h"
/*[clinic input]
-@critical_section object
_weakref.getweakrefcount -> Py_ssize_t
object: object
@@ -25,14 +24,9 @@ Return the number of weak references to 'object'.
static Py_ssize_t
_weakref_getweakrefcount_impl(PyObject *module, PyObject *object)
-/*[clinic end generated code: output=301806d59558ff3e input=6535a580f1d0ebdc]*/
+/*[clinic end generated code: output=301806d59558ff3e input=7d4d04fcaccf64d5]*/
{
- if (!_PyType_SUPPORTS_WEAKREFS(Py_TYPE(object))) {
- return 0;
- }
- PyWeakReference **list = GET_WEAKREFS_LISTPTR(object);
- Py_ssize_t count = _PyWeakref_GetWeakrefCount(*list);
- return count;
+ return _PyWeakref_GetWeakrefCount(object);
}
@@ -77,7 +71,6 @@ _weakref__remove_dead_weakref_impl(PyObject *module, PyObject *dct,
/*[clinic input]
-@critical_section object
_weakref.getweakrefs
object: object
/
@@ -86,26 +79,39 @@ Return a list of all weak reference objects pointing to 'object'.
[clinic start generated code]*/
static PyObject *
-_weakref_getweakrefs_impl(PyObject *module, PyObject *object)
-/*[clinic end generated code: output=5ec268989fb8f035 input=3dea95b8f5b31bbb]*/
+_weakref_getweakrefs(PyObject *module, PyObject *object)
+/*[clinic end generated code: output=25c7731d8e011824 input=00c6d0e5d3206693]*/
{
if (!_PyType_SUPPORTS_WEAKREFS(Py_TYPE(object))) {
return PyList_New(0);
}
- PyWeakReference **list = GET_WEAKREFS_LISTPTR(object);
- Py_ssize_t count = _PyWeakref_GetWeakrefCount(*list);
-
- PyObject *result = PyList_New(count);
+ PyObject *result = PyList_New(0);
if (result == NULL) {
return NULL;
}
- PyWeakReference *current = *list;
- for (Py_ssize_t i = 0; i < count; ++i) {
- PyList_SET_ITEM(result, i, Py_NewRef(current));
+ LOCK_WEAKREFS(object);
+ PyWeakReference *current = *GET_WEAKREFS_LISTPTR(object);
+ while (current != NULL) {
+ PyObject *curobj = (PyObject *) current;
+ if (_Py_TryIncref(curobj)) {
+ if (PyList_Append(result, curobj)) {
+ UNLOCK_WEAKREFS(object);
+ Py_DECREF(curobj);
+ Py_DECREF(result);
+ return NULL;
+ }
+ else {
+ // Undo our _Py_TryIncref. This is safe to do with the lock
+ // held in free-threaded builds; the list holds a reference to
+ // curobj so we're guaranteed not to invoke the destructor.
+ Py_DECREF(curobj);
+ }
+ }
current = current->wr_next;
}
+ UNLOCK_WEAKREFS(object);
return result;
}
diff --git a/Modules/_winapi.c b/Modules/_winapi.c
index 8f9b8520bb3f34..57b8bdc7ea2448 100644
--- a/Modules/_winapi.c
+++ b/Modules/_winapi.c
@@ -1517,6 +1517,49 @@ _winapi_GetLastError_impl(PyObject *module)
return GetLastError();
}
+
+/*[clinic input]
+_winapi.GetLongPathName
+
+ path: LPCWSTR
+
+Return the long version of the provided path.
+
+If the path is already in its long form, returns the same value.
+
+The path must already be a 'str'. If the type is not known, use
+os.fsdecode before calling this function.
+[clinic start generated code]*/
+
+static PyObject *
+_winapi_GetLongPathName_impl(PyObject *module, LPCWSTR path)
+/*[clinic end generated code: output=c4774b080275a2d0 input=9872e211e3a4a88f]*/
+{
+ DWORD cchBuffer;
+ PyObject *result = NULL;
+
+ Py_BEGIN_ALLOW_THREADS
+ cchBuffer = GetLongPathNameW(path, NULL, 0);
+ Py_END_ALLOW_THREADS
+ if (cchBuffer) {
+ WCHAR *buffer = (WCHAR *)PyMem_Malloc(cchBuffer * sizeof(WCHAR));
+ if (buffer) {
+ Py_BEGIN_ALLOW_THREADS
+ cchBuffer = GetLongPathNameW(path, buffer, cchBuffer);
+ Py_END_ALLOW_THREADS
+ if (cchBuffer) {
+ result = PyUnicode_FromWideChar(buffer, cchBuffer);
+ } else {
+ PyErr_SetFromWindowsErr(0);
+ }
+ PyMem_Free((void *)buffer);
+ }
+ } else {
+ PyErr_SetFromWindowsErr(0);
+ }
+ return result;
+}
+
/*[clinic input]
_winapi.GetModuleFileName
@@ -1551,6 +1594,48 @@ _winapi_GetModuleFileName_impl(PyObject *module, HMODULE module_handle)
return PyUnicode_FromWideChar(filename, wcslen(filename));
}
+/*[clinic input]
+_winapi.GetShortPathName
+
+ path: LPCWSTR
+
+Return the short version of the provided path.
+
+If the path is already in its short form, returns the same value.
+
+The path must already be a 'str'. If the type is not known, use
+os.fsdecode before calling this function.
+[clinic start generated code]*/
+
+static PyObject *
+_winapi_GetShortPathName_impl(PyObject *module, LPCWSTR path)
+/*[clinic end generated code: output=dab6ae494c621e81 input=43fa349aaf2ac718]*/
+{
+ DWORD cchBuffer;
+ PyObject *result = NULL;
+
+ Py_BEGIN_ALLOW_THREADS
+ cchBuffer = GetShortPathNameW(path, NULL, 0);
+ Py_END_ALLOW_THREADS
+ if (cchBuffer) {
+ WCHAR *buffer = (WCHAR *)PyMem_Malloc(cchBuffer * sizeof(WCHAR));
+ if (buffer) {
+ Py_BEGIN_ALLOW_THREADS
+ cchBuffer = GetShortPathNameW(path, buffer, cchBuffer);
+ Py_END_ALLOW_THREADS
+ if (cchBuffer) {
+ result = PyUnicode_FromWideChar(buffer, cchBuffer);
+ } else {
+ PyErr_SetFromWindowsErr(0);
+ }
+ PyMem_Free((void *)buffer);
+ }
+ } else {
+ PyErr_SetFromWindowsErr(0);
+ }
+ return result;
+}
+
/*[clinic input]
_winapi.GetStdHandle -> HANDLE
@@ -2846,7 +2931,9 @@ static PyMethodDef winapi_functions[] = {
_WINAPI_GETCURRENTPROCESS_METHODDEF
_WINAPI_GETEXITCODEPROCESS_METHODDEF
_WINAPI_GETLASTERROR_METHODDEF
+ _WINAPI_GETLONGPATHNAME_METHODDEF
_WINAPI_GETMODULEFILENAME_METHODDEF
+ _WINAPI_GETSHORTPATHNAME_METHODDEF
_WINAPI_GETSTDHANDLE_METHODDEF
_WINAPI_GETVERSION_METHODDEF
_WINAPI_MAPVIEWOFFILE_METHODDEF
diff --git a/Modules/_xxinterpchannelsmodule.c b/Modules/_xxinterpchannelsmodule.c
index b63a3aab8263bc..bea0a6cf93fa02 100644
--- a/Modules/_xxinterpchannelsmodule.c
+++ b/Modules/_xxinterpchannelsmodule.c
@@ -8,6 +8,7 @@
#include "Python.h"
#include "pycore_crossinterp.h" // struct _xid
#include "pycore_interp.h" // _PyInterpreterState_LookUpID()
+#include "pycore_pystate.h" // _PyInterpreterState_GetIDObject()
#ifdef MS_WINDOWS
#define WIN32_LEAN_AND_MEAN
@@ -17,9 +18,7 @@
#endif
#define REGISTERS_HEAP_TYPES
-#define RETURNS_INTERPID_OBJECT
#include "_interpreters_common.h"
-#undef RETURNS_INTERPID_OBJECT
#undef REGISTERS_HEAP_TYPES
@@ -2909,7 +2908,7 @@ channelsmod_list_interpreters(PyObject *self, PyObject *args, PyObject *kwds)
goto except;
}
if (res) {
- interpid_obj = get_interpid_obj(interp);
+ interpid_obj = _PyInterpreterState_GetIDObject(interp);
if (interpid_obj == NULL) {
goto except;
}
diff --git a/Modules/_xxsubinterpretersmodule.c b/Modules/_xxsubinterpretersmodule.c
index 94b8ee35001732..8fcd4fc4154882 100644
--- a/Modules/_xxsubinterpretersmodule.c
+++ b/Modules/_xxsubinterpretersmodule.c
@@ -20,9 +20,7 @@
#include "marshal.h" // PyMarshal_ReadObjectFromString()
-#define RETURNS_INTERPID_OBJECT
#include "_interpreters_common.h"
-#undef RETURNS_INTERPID_OBJECT
#define MODULE_NAME _xxsubinterpreters
@@ -425,59 +423,6 @@ config_from_object(PyObject *configobj, PyInterpreterConfig *config)
}
-static PyInterpreterState *
-new_interpreter(PyInterpreterConfig *config, PyObject **p_idobj, PyThreadState **p_tstate)
-{
- PyThreadState *save_tstate = PyThreadState_Get();
- assert(save_tstate != NULL);
- PyThreadState *tstate = NULL;
- // XXX Possible GILState issues?
- PyStatus status = Py_NewInterpreterFromConfig(&tstate, config);
- PyThreadState_Swap(save_tstate);
- if (PyStatus_Exception(status)) {
- /* Since no new thread state was created, there is no exception to
- propagate; raise a fresh one after swapping in the old thread
- state. */
- _PyErr_SetFromPyStatus(status);
- return NULL;
- }
- assert(tstate != NULL);
- PyInterpreterState *interp = PyThreadState_GetInterpreter(tstate);
-
- if (_PyInterpreterState_IDInitref(interp) < 0) {
- goto error;
- }
-
- if (p_idobj != NULL) {
- // We create the object using the original interpreter.
- PyObject *idobj = get_interpid_obj(interp);
- if (idobj == NULL) {
- goto error;
- }
- *p_idobj = idobj;
- }
-
- if (p_tstate != NULL) {
- *p_tstate = tstate;
- }
- else {
- PyThreadState_Swap(tstate);
- PyThreadState_Clear(tstate);
- PyThreadState_Swap(save_tstate);
- PyThreadState_Delete(tstate);
- }
-
- return interp;
-
-error:
- // XXX Possible GILState issues?
- save_tstate = PyThreadState_Swap(tstate);
- Py_EndInterpreter(tstate);
- PyThreadState_Swap(save_tstate);
- return NULL;
-}
-
-
static int
_run_script(PyObject *ns, const char *codestr, Py_ssize_t codestrlen, int flags)
{
@@ -546,6 +491,75 @@ _run_in_interpreter(PyInterpreterState *interp,
/* module level code ********************************************************/
+static long
+get_whence(PyInterpreterState *interp)
+{
+ return _PyInterpreterState_GetWhence(interp);
+}
+
+
+static PyInterpreterState *
+resolve_interp(PyObject *idobj, int restricted, int reqready, const char *op)
+{
+ PyInterpreterState *interp;
+ if (idobj == NULL) {
+ interp = PyInterpreterState_Get();
+ }
+ else {
+ interp = look_up_interp(idobj);
+ if (interp == NULL) {
+ return NULL;
+ }
+ }
+
+ if (reqready && !_PyInterpreterState_IsReady(interp)) {
+ if (idobj == NULL) {
+ PyErr_Format(PyExc_InterpreterError,
+ "cannot %s current interpreter (not ready)", op);
+ }
+ else {
+ PyErr_Format(PyExc_InterpreterError,
+ "cannot %s interpreter %R (not ready)", op, idobj);
+ }
+ return NULL;
+ }
+
+ if (restricted && get_whence(interp) != _PyInterpreterState_WHENCE_STDLIB) {
+ if (idobj == NULL) {
+ PyErr_Format(PyExc_InterpreterError,
+ "cannot %s unrecognized current interpreter", op);
+ }
+ else {
+ PyErr_Format(PyExc_InterpreterError,
+ "cannot %s unrecognized interpreter %R", op, idobj);
+ }
+ return NULL;
+ }
+
+ return interp;
+}
+
+
+static PyObject *
+get_summary(PyInterpreterState *interp)
+{
+ PyObject *idobj = _PyInterpreterState_GetIDObject(interp);
+ if (idobj == NULL) {
+ return NULL;
+ }
+ PyObject *whenceobj = PyLong_FromLong(
+ get_whence(interp));
+ if (whenceobj == NULL) {
+ Py_DECREF(idobj);
+ return NULL;
+ }
+ PyObject *res = PyTuple_Pack(2, idobj, whenceobj);
+ Py_DECREF(idobj);
+ Py_DECREF(whenceobj);
+ return res;
+}
+
+
static PyObject *
interp_new_config(PyObject *self, PyObject *args, PyObject *kwds)
{
@@ -606,8 +620,9 @@ interp_create(PyObject *self, PyObject *args, PyObject *kwds)
return NULL;
}
- PyObject *idobj = NULL;
- PyInterpreterState *interp = new_interpreter(&config, &idobj, NULL);
+ long whence = _PyInterpreterState_WHENCE_STDLIB;
+ PyInterpreterState *interp = \
+ _PyXI_NewInterpreter(&config, &whence, NULL, NULL);
if (interp == NULL) {
// XXX Move the chained exception to interpreters.create()?
PyObject *exc = PyErr_GetRaisedException();
@@ -616,6 +631,13 @@ interp_create(PyObject *self, PyObject *args, PyObject *kwds)
_PyErr_ChainExceptions1(exc);
return NULL;
}
+ assert(_PyInterpreterState_IsReady(interp));
+
+ PyObject *idobj = _PyInterpreterState_GetIDObject(interp);
+ if (idobj == NULL) {
+ _PyXI_EndInterpreter(interp, NULL, NULL);
+ return NULL;
+ }
if (reqrefs) {
// Decref to 0 will destroy the interpreter.
@@ -644,16 +666,20 @@ is \"isolated\".");
static PyObject *
interp_destroy(PyObject *self, PyObject *args, PyObject *kwds)
{
- static char *kwlist[] = {"id", NULL};
+ static char *kwlist[] = {"id", "restrict", NULL};
PyObject *id;
+ int restricted = 0;
// XXX Use "L" for id?
if (!PyArg_ParseTupleAndKeywords(args, kwds,
- "O:destroy", kwlist, &id)) {
+ "O|$p:destroy", kwlist, &id, &restricted))
+ {
return NULL;
}
// Look up the interpreter.
- PyInterpreterState *interp = look_up_interp(id);
+ int reqready = 0;
+ PyInterpreterState *interp = \
+ resolve_interp(id, restricted, reqready, "destroy");
if (interp == NULL) {
return NULL;
}
@@ -678,18 +704,13 @@ interp_destroy(PyObject *self, PyObject *args, PyObject *kwds)
}
// Destroy the interpreter.
- PyThreadState *tstate = PyThreadState_New(interp);
- _PyThreadState_SetWhence(tstate, _PyThreadState_WHENCE_INTERP);
- // XXX Possible GILState issues?
- PyThreadState *save_tstate = PyThreadState_Swap(tstate);
- Py_EndInterpreter(tstate);
- PyThreadState_Swap(save_tstate);
+ _PyXI_EndInterpreter(interp, NULL, NULL);
Py_RETURN_NONE;
}
PyDoc_STRVAR(destroy_doc,
-"destroy(id)\n\
+"destroy(id, *, restrict=False)\n\
\n\
Destroy the identified interpreter.\n\
\n\
@@ -698,31 +719,39 @@ So does an unrecognized ID.");
static PyObject *
-interp_list_all(PyObject *self, PyObject *Py_UNUSED(ignored))
+interp_list_all(PyObject *self, PyObject *args, PyObject *kwargs)
{
- PyObject *ids, *id;
- PyInterpreterState *interp;
+ static char *kwlist[] = {"require_ready", NULL};
+ int reqready = 0;
+ if (!PyArg_ParseTupleAndKeywords(args, kwargs,
+ "|$p:" MODULE_NAME_STR ".list_all",
+ kwlist, &reqready))
+ {
+ return NULL;
+ }
- ids = PyList_New(0);
+ PyObject *ids = PyList_New(0);
if (ids == NULL) {
return NULL;
}
- interp = PyInterpreterState_Head();
+ PyInterpreterState *interp = PyInterpreterState_Head();
while (interp != NULL) {
- id = get_interpid_obj(interp);
- if (id == NULL) {
- Py_DECREF(ids);
- return NULL;
- }
- // insert at front of list
- int res = PyList_Insert(ids, 0, id);
- Py_DECREF(id);
- if (res < 0) {
- Py_DECREF(ids);
- return NULL;
- }
+ if (!reqready || _PyInterpreterState_IsReady(interp)) {
+ PyObject *item = get_summary(interp);
+ if (item == NULL) {
+ Py_DECREF(ids);
+ return NULL;
+ }
+ // insert at front of list
+ int res = PyList_Insert(ids, 0, item);
+ Py_DECREF(item);
+ if (res < 0) {
+ Py_DECREF(ids);
+ return NULL;
+ }
+ }
interp = PyInterpreterState_Next(interp);
}
@@ -730,7 +759,7 @@ interp_list_all(PyObject *self, PyObject *Py_UNUSED(ignored))
}
PyDoc_STRVAR(list_all_doc,
-"list_all() -> [ID]\n\
+"list_all() -> [(ID, whence)]\n\
\n\
Return a list containing the ID of every existing interpreter.");
@@ -742,11 +771,12 @@ interp_get_current(PyObject *self, PyObject *Py_UNUSED(ignored))
if (interp == NULL) {
return NULL;
}
- return get_interpid_obj(interp);
+ assert(_PyInterpreterState_IsReady(interp));
+ return get_summary(interp);
}
PyDoc_STRVAR(get_current_doc,
-"get_current() -> ID\n\
+"get_current() -> (ID, whence)\n\
\n\
Return the ID of current interpreter.");
@@ -754,28 +784,34 @@ Return the ID of current interpreter.");
static PyObject *
interp_get_main(PyObject *self, PyObject *Py_UNUSED(ignored))
{
- // Currently, 0 is always the main interpreter.
- int64_t id = 0;
- return PyLong_FromLongLong(id);
+ PyInterpreterState *interp = _PyInterpreterState_Main();
+ assert(_PyInterpreterState_IsReady(interp));
+ return get_summary(interp);
}
PyDoc_STRVAR(get_main_doc,
-"get_main() -> ID\n\
+"get_main() -> (ID, whence)\n\
\n\
Return the ID of main interpreter.");
+
static PyObject *
-interp_set___main___attrs(PyObject *self, PyObject *args)
+interp_set___main___attrs(PyObject *self, PyObject *args, PyObject *kwargs)
{
+ static char *kwlist[] = {"id", "updates", "restrict", NULL};
PyObject *id, *updates;
- if (!PyArg_ParseTuple(args, "OO:" MODULE_NAME_STR ".set___main___attrs",
- &id, &updates))
+ int restricted = 0;
+ if (!PyArg_ParseTupleAndKeywords(args, kwargs,
+ "OO|$p:" MODULE_NAME_STR ".set___main___attrs",
+ kwlist, &id, &updates, &restricted))
{
return NULL;
}
// Look up the interpreter.
- PyInterpreterState *interp = look_up_interp(id);
+ int reqready = 1;
+ PyInterpreterState *interp = \
+ resolve_interp(id, restricted, reqready, "update __main__ for");
if (interp == NULL) {
return NULL;
}
@@ -814,10 +850,11 @@ interp_set___main___attrs(PyObject *self, PyObject *args)
}
PyDoc_STRVAR(set___main___attrs_doc,
-"set___main___attrs(id, ns)\n\
+"set___main___attrs(id, ns, *, restrict=False)\n\
\n\
Bind the given attributes in the interpreter's __main__ module.");
+
static PyUnicodeObject *
convert_script_arg(PyObject *arg, const char *fname, const char *displayname,
const char *expected)
@@ -895,16 +932,9 @@ convert_code_arg(PyObject *arg, const char *fname, const char *displayname,
}
static int
-_interp_exec(PyObject *self,
- PyObject *id_arg, PyObject *code_arg, PyObject *shared_arg,
- PyObject **p_excinfo)
+_interp_exec(PyObject *self, PyInterpreterState *interp,
+ PyObject *code_arg, PyObject *shared_arg, PyObject **p_excinfo)
{
- // Look up the interpreter.
- PyInterpreterState *interp = look_up_interp(id_arg);
- if (interp == NULL) {
- return -1;
- }
-
// Extract code.
Py_ssize_t codestrlen = -1;
PyObject *bytes_obj = NULL;
@@ -929,12 +959,21 @@ _interp_exec(PyObject *self,
static PyObject *
interp_exec(PyObject *self, PyObject *args, PyObject *kwds)
{
- static char *kwlist[] = {"id", "code", "shared", NULL};
+ static char *kwlist[] = {"id", "code", "shared", "restrict", NULL};
PyObject *id, *code;
PyObject *shared = NULL;
+ int restricted = 0;
if (!PyArg_ParseTupleAndKeywords(args, kwds,
- "OO|O:" MODULE_NAME_STR ".exec", kwlist,
- &id, &code, &shared)) {
+ "OO|O$p:" MODULE_NAME_STR ".exec", kwlist,
+ &id, &code, &shared, &restricted))
+ {
+ return NULL;
+ }
+
+ int reqready = 1;
+ PyInterpreterState *interp = \
+ resolve_interp(id, restricted, reqready, "exec code for");
+ if (interp == NULL) {
return NULL;
}
@@ -952,7 +991,7 @@ interp_exec(PyObject *self, PyObject *args, PyObject *kwds)
}
PyObject *excinfo = NULL;
- int res = _interp_exec(self, id, code, shared, &excinfo);
+ int res = _interp_exec(self, interp, code, shared, &excinfo);
Py_DECREF(code);
if (res < 0) {
assert((excinfo == NULL) != (PyErr_Occurred() == NULL));
@@ -962,7 +1001,7 @@ interp_exec(PyObject *self, PyObject *args, PyObject *kwds)
}
PyDoc_STRVAR(exec_doc,
-"exec(id, code, shared=None)\n\
+"exec(id, code, shared=None, *, restrict=False)\n\
\n\
Execute the provided code in the identified interpreter.\n\
This is equivalent to running the builtin exec() under the target\n\
@@ -981,13 +1020,24 @@ is ignored, including its __globals__ dict.");
static PyObject *
interp_call(PyObject *self, PyObject *args, PyObject *kwds)
{
- static char *kwlist[] = {"id", "callable", "args", "kwargs", NULL};
+ static char *kwlist[] = {"id", "callable", "args", "kwargs",
+ "restrict", NULL};
PyObject *id, *callable;
PyObject *args_obj = NULL;
PyObject *kwargs_obj = NULL;
+ int restricted = 0;
if (!PyArg_ParseTupleAndKeywords(args, kwds,
- "OO|OO:" MODULE_NAME_STR ".call", kwlist,
- &id, &callable, &args_obj, &kwargs_obj)) {
+ "OO|OO$p:" MODULE_NAME_STR ".call", kwlist,
+ &id, &callable, &args_obj, &kwargs_obj,
+ &restricted))
+ {
+ return NULL;
+ }
+
+ int reqready = 1;
+ PyInterpreterState *interp = \
+ resolve_interp(id, restricted, reqready, "make a call in");
+ if (interp == NULL) {
return NULL;
}
@@ -1007,7 +1057,7 @@ interp_call(PyObject *self, PyObject *args, PyObject *kwds)
}
PyObject *excinfo = NULL;
- int res = _interp_exec(self, id, code, NULL, &excinfo);
+ int res = _interp_exec(self, interp, code, NULL, &excinfo);
Py_DECREF(code);
if (res < 0) {
assert((excinfo == NULL) != (PyErr_Occurred() == NULL));
@@ -1017,7 +1067,7 @@ interp_call(PyObject *self, PyObject *args, PyObject *kwds)
}
PyDoc_STRVAR(call_doc,
-"call(id, callable, args=None, kwargs=None)\n\
+"call(id, callable, args=None, kwargs=None, *, restrict=False)\n\
\n\
Call the provided object in the identified interpreter.\n\
Pass the given args and kwargs, if possible.\n\
@@ -1031,12 +1081,21 @@ is ignored, including its __globals__ dict.");
static PyObject *
interp_run_string(PyObject *self, PyObject *args, PyObject *kwds)
{
- static char *kwlist[] = {"id", "script", "shared", NULL};
+ static char *kwlist[] = {"id", "script", "shared", "restrict", NULL};
PyObject *id, *script;
PyObject *shared = NULL;
+ int restricted = 0;
if (!PyArg_ParseTupleAndKeywords(args, kwds,
- "OU|O:" MODULE_NAME_STR ".run_string", kwlist,
- &id, &script, &shared)) {
+ "OU|O$p:" MODULE_NAME_STR ".run_string",
+ kwlist, &id, &script, &shared, &restricted))
+ {
+ return NULL;
+ }
+
+ int reqready = 1;
+ PyInterpreterState *interp = \
+ resolve_interp(id, restricted, reqready, "run a string in");
+ if (interp == NULL) {
return NULL;
}
@@ -1047,7 +1106,7 @@ interp_run_string(PyObject *self, PyObject *args, PyObject *kwds)
}
PyObject *excinfo = NULL;
- int res = _interp_exec(self, id, script, shared, &excinfo);
+ int res = _interp_exec(self, interp, script, shared, &excinfo);
Py_DECREF(script);
if (res < 0) {
assert((excinfo == NULL) != (PyErr_Occurred() == NULL));
@@ -1057,7 +1116,7 @@ interp_run_string(PyObject *self, PyObject *args, PyObject *kwds)
}
PyDoc_STRVAR(run_string_doc,
-"run_string(id, script, shared=None)\n\
+"run_string(id, script, shared=None, *, restrict=False)\n\
\n\
Execute the provided string in the identified interpreter.\n\
\n\
@@ -1066,12 +1125,21 @@ Execute the provided string in the identified interpreter.\n\
static PyObject *
interp_run_func(PyObject *self, PyObject *args, PyObject *kwds)
{
- static char *kwlist[] = {"id", "func", "shared", NULL};
+ static char *kwlist[] = {"id", "func", "shared", "restrict", NULL};
PyObject *id, *func;
PyObject *shared = NULL;
+ int restricted = 0;
if (!PyArg_ParseTupleAndKeywords(args, kwds,
- "OO|O:" MODULE_NAME_STR ".run_func", kwlist,
- &id, &func, &shared)) {
+ "OO|O$p:" MODULE_NAME_STR ".run_func",
+ kwlist, &id, &func, &shared, &restricted))
+ {
+ return NULL;
+ }
+
+ int reqready = 1;
+ PyInterpreterState *interp = \
+ resolve_interp(id, restricted, reqready, "run a function in");
+ if (interp == NULL) {
return NULL;
}
@@ -1083,7 +1151,7 @@ interp_run_func(PyObject *self, PyObject *args, PyObject *kwds)
}
PyObject *excinfo = NULL;
- int res = _interp_exec(self, id, (PyObject *)code, shared, &excinfo);
+ int res = _interp_exec(self, interp, (PyObject *)code, shared, &excinfo);
Py_DECREF(code);
if (res < 0) {
assert((excinfo == NULL) != (PyErr_Occurred() == NULL));
@@ -1093,7 +1161,7 @@ interp_run_func(PyObject *self, PyObject *args, PyObject *kwds)
}
PyDoc_STRVAR(run_func_doc,
-"run_func(id, func, shared=None)\n\
+"run_func(id, func, shared=None, *, restrict=False)\n\
\n\
Execute the body of the provided function in the identified interpreter.\n\
Code objects are also supported. In both cases, closures and args\n\
@@ -1129,17 +1197,23 @@ False otherwise.");
static PyObject *
interp_is_running(PyObject *self, PyObject *args, PyObject *kwds)
{
- static char *kwlist[] = {"id", NULL};
+ static char *kwlist[] = {"id", "restrict", NULL};
PyObject *id;
+ int restricted = 0;
if (!PyArg_ParseTupleAndKeywords(args, kwds,
- "O:is_running", kwlist, &id)) {
+ "O|$p:is_running", kwlist,
+ &id, &restricted))
+ {
return NULL;
}
- PyInterpreterState *interp = look_up_interp(id);
+ int reqready = 1;
+ PyInterpreterState *interp = \
+ resolve_interp(id, restricted, reqready, "check if running for");
if (interp == NULL) {
return NULL;
}
+
if (is_running_main(interp)) {
Py_RETURN_TRUE;
}
@@ -1147,7 +1221,7 @@ interp_is_running(PyObject *self, PyObject *args, PyObject *kwds)
}
PyDoc_STRVAR(is_running_doc,
-"is_running(id) -> bool\n\
+"is_running(id, *, restrict=False) -> bool\n\
\n\
Return whether or not the identified interpreter is running.");
@@ -1155,23 +1229,24 @@ Return whether or not the identified interpreter is running.");
static PyObject *
interp_get_config(PyObject *self, PyObject *args, PyObject *kwds)
{
- static char *kwlist[] = {"id", NULL};
+ static char *kwlist[] = {"id", "restrict", NULL};
PyObject *idobj = NULL;
+ int restricted = 0;
if (!PyArg_ParseTupleAndKeywords(args, kwds,
- "O:get_config", kwlist, &idobj))
+ "O|$p:get_config", kwlist,
+ &idobj, &restricted))
{
return NULL;
}
-
- PyInterpreterState *interp;
- if (idobj == NULL) {
- interp = PyInterpreterState_Get();
+ if (idobj == Py_None) {
+ idobj = NULL;
}
- else {
- interp = _PyInterpreterState_LookUpIDObject(idobj);
- if (interp == NULL) {
- return NULL;
- }
+
+ int reqready = 0;
+ PyInterpreterState *interp = \
+ resolve_interp(idobj, restricted, reqready, "get the config of");
+ if (interp == NULL) {
+ return NULL;
}
PyInterpreterConfig config;
@@ -1189,25 +1264,54 @@ interp_get_config(PyObject *self, PyObject *args, PyObject *kwds)
}
PyDoc_STRVAR(get_config_doc,
-"get_config(id) -> types.SimpleNamespace\n\
+"get_config(id, *, restrict=False) -> types.SimpleNamespace\n\
\n\
Return a representation of the config used to initialize the interpreter.");
+static PyObject *
+interp_whence(PyObject *self, PyObject *args, PyObject *kwds)
+{
+ static char *kwlist[] = {"id", NULL};
+ PyObject *id;
+ if (!PyArg_ParseTupleAndKeywords(args, kwds,
+ "O:whence", kwlist, &id))
+ {
+ return NULL;
+ }
+
+ PyInterpreterState *interp = look_up_interp(id);
+ if (interp == NULL) {
+ return NULL;
+ }
+
+ long whence = get_whence(interp);
+ return PyLong_FromLong(whence);
+}
+
+PyDoc_STRVAR(whence_doc,
+"whence(id) -> int\n\
+\n\
+Return an identifier for where the interpreter was created.");
+
+
static PyObject *
interp_incref(PyObject *self, PyObject *args, PyObject *kwds)
{
- static char *kwlist[] = {"id", "implieslink", NULL};
+ static char *kwlist[] = {"id", "implieslink", "restrict", NULL};
PyObject *id;
int implieslink = 0;
+ int restricted = 0;
if (!PyArg_ParseTupleAndKeywords(args, kwds,
- "O|$p:incref", kwlist,
- &id, &implieslink))
+ "O|$pp:incref", kwlist,
+ &id, &implieslink, &restricted))
{
return NULL;
}
- PyInterpreterState *interp = look_up_interp(id);
+ int reqready = 1;
+ PyInterpreterState *interp = \
+ resolve_interp(id, restricted, reqready, "incref");
if (interp == NULL) {
return NULL;
}
@@ -1225,32 +1329,106 @@ interp_incref(PyObject *self, PyObject *args, PyObject *kwds)
static PyObject *
interp_decref(PyObject *self, PyObject *args, PyObject *kwds)
{
- static char *kwlist[] = {"id", NULL};
+ static char *kwlist[] = {"id", "restrict", NULL};
PyObject *id;
+ int restricted = 0;
if (!PyArg_ParseTupleAndKeywords(args, kwds,
- "O:decref", kwlist, &id)) {
+ "O|$p:decref", kwlist, &id, &restricted))
+ {
return NULL;
}
- PyInterpreterState *interp = look_up_interp(id);
+ int reqready = 1;
+ PyInterpreterState *interp = \
+ resolve_interp(id, restricted, reqready, "decref");
if (interp == NULL) {
return NULL;
}
+
_PyInterpreterState_IDDecref(interp);
Py_RETURN_NONE;
}
+static PyObject *
+capture_exception(PyObject *self, PyObject *args, PyObject *kwds)
+{
+ static char *kwlist[] = {"exc", NULL};
+ PyObject *exc_arg = NULL;
+ if (!PyArg_ParseTupleAndKeywords(args, kwds,
+ "|O:capture_exception", kwlist,
+ &exc_arg))
+ {
+ return NULL;
+ }
+
+ PyObject *exc = exc_arg;
+ if (exc == NULL || exc == Py_None) {
+ exc = PyErr_GetRaisedException();
+ if (exc == NULL) {
+ Py_RETURN_NONE;
+ }
+ }
+ else if (!PyExceptionInstance_Check(exc)) {
+ PyErr_Format(PyExc_TypeError, "expected exception, got %R", exc);
+ return NULL;
+ }
+ PyObject *captured = NULL;
+
+ _PyXI_excinfo info = {0};
+ if (_PyXI_InitExcInfo(&info, exc) < 0) {
+ goto finally;
+ }
+ captured = _PyXI_ExcInfoAsObject(&info);
+ if (captured == NULL) {
+ goto finally;
+ }
+
+ PyObject *formatted = _PyXI_FormatExcInfo(&info);
+ if (formatted == NULL) {
+ Py_CLEAR(captured);
+ goto finally;
+ }
+ int res = PyObject_SetAttrString(captured, "formatted", formatted);
+ Py_DECREF(formatted);
+ if (res < 0) {
+ Py_CLEAR(captured);
+ goto finally;
+ }
+
+finally:
+ _PyXI_ClearExcInfo(&info);
+ if (exc != exc_arg) {
+ if (PyErr_Occurred()) {
+ PyErr_SetRaisedException(exc);
+ }
+ else {
+ _PyErr_ChainExceptions1(exc);
+ }
+ }
+ return captured;
+}
+
+PyDoc_STRVAR(capture_exception_doc,
+"capture_exception(exc=None) -> types.SimpleNamespace\n\
+\n\
+Return a snapshot of an exception. If \"exc\" is None\n\
+then the current exception, if any, is used (but not cleared).\n\
+\n\
+The returned snapshot is the same as what _interpreters.exec() returns.");
+
+
static PyMethodDef module_functions[] = {
{"new_config", _PyCFunction_CAST(interp_new_config),
METH_VARARGS | METH_KEYWORDS, new_config_doc},
+
{"create", _PyCFunction_CAST(interp_create),
METH_VARARGS | METH_KEYWORDS, create_doc},
{"destroy", _PyCFunction_CAST(interp_destroy),
METH_VARARGS | METH_KEYWORDS, destroy_doc},
- {"list_all", interp_list_all,
- METH_NOARGS, list_all_doc},
+ {"list_all", _PyCFunction_CAST(interp_list_all),
+ METH_VARARGS | METH_KEYWORDS, list_all_doc},
{"get_current", interp_get_current,
METH_NOARGS, get_current_doc},
{"get_main", interp_get_main,
@@ -1260,6 +1438,8 @@ static PyMethodDef module_functions[] = {
METH_VARARGS | METH_KEYWORDS, is_running_doc},
{"get_config", _PyCFunction_CAST(interp_get_config),
METH_VARARGS | METH_KEYWORDS, get_config_doc},
+ {"whence", _PyCFunction_CAST(interp_whence),
+ METH_VARARGS | METH_KEYWORDS, whence_doc},
{"exec", _PyCFunction_CAST(interp_exec),
METH_VARARGS | METH_KEYWORDS, exec_doc},
{"call", _PyCFunction_CAST(interp_call),
@@ -1270,15 +1450,19 @@ static PyMethodDef module_functions[] = {
METH_VARARGS | METH_KEYWORDS, run_func_doc},
{"set___main___attrs", _PyCFunction_CAST(interp_set___main___attrs),
- METH_VARARGS, set___main___attrs_doc},
- {"is_shareable", _PyCFunction_CAST(object_is_shareable),
- METH_VARARGS | METH_KEYWORDS, is_shareable_doc},
+ METH_VARARGS | METH_KEYWORDS, set___main___attrs_doc},
{"incref", _PyCFunction_CAST(interp_incref),
METH_VARARGS | METH_KEYWORDS, NULL},
{"decref", _PyCFunction_CAST(interp_decref),
METH_VARARGS | METH_KEYWORDS, NULL},
+ {"is_shareable", _PyCFunction_CAST(object_is_shareable),
+ METH_VARARGS | METH_KEYWORDS, is_shareable_doc},
+
+ {"capture_exception", _PyCFunction_CAST(capture_exception),
+ METH_VARARGS | METH_KEYWORDS, capture_exception_doc},
+
{NULL, NULL} /* sentinel */
};
@@ -1295,6 +1479,20 @@ module_exec(PyObject *mod)
PyInterpreterState *interp = PyInterpreterState_Get();
module_state *state = get_module_state(mod);
+#define ADD_WHENCE(NAME) \
+ if (PyModule_AddIntConstant(mod, "WHENCE_" #NAME, \
+ _PyInterpreterState_WHENCE_##NAME) < 0) \
+ { \
+ goto error; \
+ }
+ ADD_WHENCE(UNKNOWN)
+ ADD_WHENCE(RUNTIME)
+ ADD_WHENCE(LEGACY_CAPI)
+ ADD_WHENCE(CAPI)
+ ADD_WHENCE(XI)
+ ADD_WHENCE(STDLIB)
+#undef ADD_WHENCE
+
// exceptions
if (PyModule_AddType(mod, (PyTypeObject *)PyExc_InterpreterError) < 0) {
goto error;
diff --git a/Modules/atexitmodule.c b/Modules/atexitmodule.c
index b6f1bcbca67916..8e908da2534c55 100644
--- a/Modules/atexitmodule.c
+++ b/Modules/atexitmodule.c
@@ -164,7 +164,8 @@ _PyAtExit_Call(PyInterpreterState *interp)
PyDoc_STRVAR(atexit_register__doc__,
-"register(func, *args, **kwargs) -> func\n\
+"register($module, func, /, *args, **kwargs)\n\
+--\n\
\n\
Register a function to be executed upon normal program termination\n\
\n\
@@ -221,7 +222,8 @@ atexit_register(PyObject *module, PyObject *args, PyObject *kwargs)
}
PyDoc_STRVAR(atexit_run_exitfuncs__doc__,
-"_run_exitfuncs() -> None\n\
+"_run_exitfuncs($module, /)\n\
+--\n\
\n\
Run all registered exit functions.\n\
\n\
@@ -236,7 +238,8 @@ atexit_run_exitfuncs(PyObject *module, PyObject *unused)
}
PyDoc_STRVAR(atexit_clear__doc__,
-"_clear() -> None\n\
+"_clear($module, /)\n\
+--\n\
\n\
Clear the list of previously registered exit functions.");
@@ -248,7 +251,8 @@ atexit_clear(PyObject *module, PyObject *unused)
}
PyDoc_STRVAR(atexit_ncallbacks__doc__,
-"_ncallbacks() -> int\n\
+"_ncallbacks($module, /)\n\
+--\n\
\n\
Return the number of registered exit functions.");
@@ -260,7 +264,8 @@ atexit_ncallbacks(PyObject *module, PyObject *unused)
}
PyDoc_STRVAR(atexit_unregister__doc__,
-"unregister(func) -> None\n\
+"unregister($module, func, /)\n\
+--\n\
\n\
Unregister an exit function which was previously registered using\n\
atexit.register\n\
diff --git a/Modules/clinic/_testclinic.c.h b/Modules/clinic/_testclinic.c.h
index bb516be37ec3f0..6a59baa2137b75 100644
--- a/Modules/clinic/_testclinic.c.h
+++ b/Modules/clinic/_testclinic.c.h
@@ -3142,25 +3142,81 @@ clone_with_conv_f2(PyObject *module, PyObject *const *args, Py_ssize_t nargs, Py
return return_value;
}
-PyDoc_STRVAR(_testclinic_TestClass_meth_method_no_params__doc__,
-"meth_method_no_params($self, /)\n"
+PyDoc_STRVAR(_testclinic_TestClass_get_defining_class__doc__,
+"get_defining_class($self, /)\n"
"--\n"
"\n");
-#define _TESTCLINIC_TESTCLASS_METH_METHOD_NO_PARAMS_METHODDEF \
- {"meth_method_no_params", _PyCFunction_CAST(_testclinic_TestClass_meth_method_no_params), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _testclinic_TestClass_meth_method_no_params__doc__},
+#define _TESTCLINIC_TESTCLASS_GET_DEFINING_CLASS_METHODDEF \
+ {"get_defining_class", _PyCFunction_CAST(_testclinic_TestClass_get_defining_class), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _testclinic_TestClass_get_defining_class__doc__},
static PyObject *
-_testclinic_TestClass_meth_method_no_params_impl(PyObject *self,
- PyTypeObject *cls);
+_testclinic_TestClass_get_defining_class_impl(PyObject *self,
+ PyTypeObject *cls);
static PyObject *
-_testclinic_TestClass_meth_method_no_params(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+_testclinic_TestClass_get_defining_class(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
{
if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) {
- PyErr_SetString(PyExc_TypeError, "meth_method_no_params() takes no arguments");
+ PyErr_SetString(PyExc_TypeError, "get_defining_class() takes no arguments");
return NULL;
}
- return _testclinic_TestClass_meth_method_no_params_impl(self, cls);
+ return _testclinic_TestClass_get_defining_class_impl(self, cls);
}
-/*[clinic end generated code: output=6520c1ca5392a3f0 input=a9049054013a1b77]*/
+
+PyDoc_STRVAR(_testclinic_TestClass_get_defining_class_arg__doc__,
+"get_defining_class_arg($self, /, arg)\n"
+"--\n"
+"\n");
+
+#define _TESTCLINIC_TESTCLASS_GET_DEFINING_CLASS_ARG_METHODDEF \
+ {"get_defining_class_arg", _PyCFunction_CAST(_testclinic_TestClass_get_defining_class_arg), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _testclinic_TestClass_get_defining_class_arg__doc__},
+
+static PyObject *
+_testclinic_TestClass_get_defining_class_arg_impl(PyObject *self,
+ PyTypeObject *cls,
+ PyObject *arg);
+
+static PyObject *
+_testclinic_TestClass_get_defining_class_arg(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *return_value = NULL;
+ #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
+
+ #define NUM_KEYWORDS 1
+ static struct {
+ PyGC_Head _this_is_not_used;
+ PyObject_VAR_HEAD
+ PyObject *ob_item[NUM_KEYWORDS];
+ } _kwtuple = {
+ .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
+ .ob_item = { &_Py_ID(arg), },
+ };
+ #undef NUM_KEYWORDS
+ #define KWTUPLE (&_kwtuple.ob_base.ob_base)
+
+ #else // !Py_BUILD_CORE
+ # define KWTUPLE NULL
+ #endif // !Py_BUILD_CORE
+
+ static const char * const _keywords[] = {"arg", NULL};
+ static _PyArg_Parser _parser = {
+ .keywords = _keywords,
+ .fname = "get_defining_class_arg",
+ .kwtuple = KWTUPLE,
+ };
+ #undef KWTUPLE
+ PyObject *argsbuf[1];
+ PyObject *arg;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ arg = args[0];
+ return_value = _testclinic_TestClass_get_defining_class_arg_impl(self, cls, arg);
+
+exit:
+ return return_value;
+}
+/*[clinic end generated code: output=aa352c3a67300056 input=a9049054013a1b77]*/
diff --git a/Modules/clinic/_weakref.c.h b/Modules/clinic/_weakref.c.h
index 550b6c4d71a015..8d7bc5dc936610 100644
--- a/Modules/clinic/_weakref.c.h
+++ b/Modules/clinic/_weakref.c.h
@@ -2,7 +2,6 @@
preserve
[clinic start generated code]*/
-#include "pycore_critical_section.h"// Py_BEGIN_CRITICAL_SECTION()
#include "pycore_modsupport.h" // _PyArg_CheckPositional()
PyDoc_STRVAR(_weakref_getweakrefcount__doc__,
@@ -23,9 +22,7 @@ _weakref_getweakrefcount(PyObject *module, PyObject *object)
PyObject *return_value = NULL;
Py_ssize_t _return_value;
- Py_BEGIN_CRITICAL_SECTION(object);
_return_value = _weakref_getweakrefcount_impl(module, object);
- Py_END_CRITICAL_SECTION();
if ((_return_value == -1) && PyErr_Occurred()) {
goto exit;
}
@@ -79,21 +76,6 @@ PyDoc_STRVAR(_weakref_getweakrefs__doc__,
#define _WEAKREF_GETWEAKREFS_METHODDEF \
{"getweakrefs", (PyCFunction)_weakref_getweakrefs, METH_O, _weakref_getweakrefs__doc__},
-static PyObject *
-_weakref_getweakrefs_impl(PyObject *module, PyObject *object);
-
-static PyObject *
-_weakref_getweakrefs(PyObject *module, PyObject *object)
-{
- PyObject *return_value = NULL;
-
- Py_BEGIN_CRITICAL_SECTION(object);
- return_value = _weakref_getweakrefs_impl(module, object);
- Py_END_CRITICAL_SECTION();
-
- return return_value;
-}
-
PyDoc_STRVAR(_weakref_proxy__doc__,
"proxy($module, object, callback=None, /)\n"
"--\n"
@@ -130,4 +112,4 @@ _weakref_proxy(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
exit:
return return_value;
}
-/*[clinic end generated code: output=d5d30707212a9870 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=60f59adc1dc9eab8 input=a9049054013a1b77]*/
diff --git a/Modules/clinic/_winapi.c.h b/Modules/clinic/_winapi.c.h
index 468457e624c691..9acb2dc4fe7eba 100644
--- a/Modules/clinic/_winapi.c.h
+++ b/Modules/clinic/_winapi.c.h
@@ -741,6 +741,76 @@ _winapi_GetLastError(PyObject *module, PyObject *Py_UNUSED(ignored))
return return_value;
}
+PyDoc_STRVAR(_winapi_GetLongPathName__doc__,
+"GetLongPathName($module, /, path)\n"
+"--\n"
+"\n"
+"Return the long version of the provided path.\n"
+"\n"
+"If the path is already in its long form, returns the same value.\n"
+"\n"
+"The path must already be a \'str\'. If the type is not known, use\n"
+"os.fsdecode before calling this function.");
+
+#define _WINAPI_GETLONGPATHNAME_METHODDEF \
+ {"GetLongPathName", _PyCFunction_CAST(_winapi_GetLongPathName), METH_FASTCALL|METH_KEYWORDS, _winapi_GetLongPathName__doc__},
+
+static PyObject *
+_winapi_GetLongPathName_impl(PyObject *module, LPCWSTR path);
+
+static PyObject *
+_winapi_GetLongPathName(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *return_value = NULL;
+ #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
+
+ #define NUM_KEYWORDS 1
+ static struct {
+ PyGC_Head _this_is_not_used;
+ PyObject_VAR_HEAD
+ PyObject *ob_item[NUM_KEYWORDS];
+ } _kwtuple = {
+ .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
+ .ob_item = { &_Py_ID(path), },
+ };
+ #undef NUM_KEYWORDS
+ #define KWTUPLE (&_kwtuple.ob_base.ob_base)
+
+ #else // !Py_BUILD_CORE
+ # define KWTUPLE NULL
+ #endif // !Py_BUILD_CORE
+
+ static const char * const _keywords[] = {"path", NULL};
+ static _PyArg_Parser _parser = {
+ .keywords = _keywords,
+ .fname = "GetLongPathName",
+ .kwtuple = KWTUPLE,
+ };
+ #undef KWTUPLE
+ PyObject *argsbuf[1];
+ LPCWSTR path = NULL;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ if (!PyUnicode_Check(args[0])) {
+ _PyArg_BadArgument("GetLongPathName", "argument 'path'", "str", args[0]);
+ goto exit;
+ }
+ path = PyUnicode_AsWideCharString(args[0], NULL);
+ if (path == NULL) {
+ goto exit;
+ }
+ return_value = _winapi_GetLongPathName_impl(module, path);
+
+exit:
+ /* Cleanup for path */
+ PyMem_Free((void *)path);
+
+ return return_value;
+}
+
PyDoc_STRVAR(_winapi_GetModuleFileName__doc__,
"GetModuleFileName($module, module_handle, /)\n"
"--\n"
@@ -775,6 +845,76 @@ _winapi_GetModuleFileName(PyObject *module, PyObject *arg)
return return_value;
}
+PyDoc_STRVAR(_winapi_GetShortPathName__doc__,
+"GetShortPathName($module, /, path)\n"
+"--\n"
+"\n"
+"Return the short version of the provided path.\n"
+"\n"
+"If the path is already in its short form, returns the same value.\n"
+"\n"
+"The path must already be a \'str\'. If the type is not known, use\n"
+"os.fsdecode before calling this function.");
+
+#define _WINAPI_GETSHORTPATHNAME_METHODDEF \
+ {"GetShortPathName", _PyCFunction_CAST(_winapi_GetShortPathName), METH_FASTCALL|METH_KEYWORDS, _winapi_GetShortPathName__doc__},
+
+static PyObject *
+_winapi_GetShortPathName_impl(PyObject *module, LPCWSTR path);
+
+static PyObject *
+_winapi_GetShortPathName(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *return_value = NULL;
+ #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
+
+ #define NUM_KEYWORDS 1
+ static struct {
+ PyGC_Head _this_is_not_used;
+ PyObject_VAR_HEAD
+ PyObject *ob_item[NUM_KEYWORDS];
+ } _kwtuple = {
+ .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
+ .ob_item = { &_Py_ID(path), },
+ };
+ #undef NUM_KEYWORDS
+ #define KWTUPLE (&_kwtuple.ob_base.ob_base)
+
+ #else // !Py_BUILD_CORE
+ # define KWTUPLE NULL
+ #endif // !Py_BUILD_CORE
+
+ static const char * const _keywords[] = {"path", NULL};
+ static _PyArg_Parser _parser = {
+ .keywords = _keywords,
+ .fname = "GetShortPathName",
+ .kwtuple = KWTUPLE,
+ };
+ #undef KWTUPLE
+ PyObject *argsbuf[1];
+ LPCWSTR path = NULL;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ if (!PyUnicode_Check(args[0])) {
+ _PyArg_BadArgument("GetShortPathName", "argument 'path'", "str", args[0]);
+ goto exit;
+ }
+ path = PyUnicode_AsWideCharString(args[0], NULL);
+ if (path == NULL) {
+ goto exit;
+ }
+ return_value = _winapi_GetShortPathName_impl(module, path);
+
+exit:
+ /* Cleanup for path */
+ PyMem_Free((void *)path);
+
+ return return_value;
+}
+
PyDoc_STRVAR(_winapi_GetStdHandle__doc__,
"GetStdHandle($module, std_handle, /)\n"
"--\n"
@@ -1978,4 +2118,4 @@ _winapi_CopyFile2(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyO
return return_value;
}
-/*[clinic end generated code: output=1f5bbcfa8d1847c5 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=ed94a2482ede3744 input=a9049054013a1b77]*/
diff --git a/Modules/clinic/signalmodule.c.h b/Modules/clinic/signalmodule.c.h
index bc33e066654364..d074cc30d1e746 100644
--- a/Modules/clinic/signalmodule.c.h
+++ b/Modules/clinic/signalmodule.c.h
@@ -2,6 +2,10 @@
preserve
[clinic start generated code]*/
+#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
+# include "pycore_gc.h" // PyGC_Head
+# include "pycore_runtime.h" // _Py_ID()
+#endif
#include "pycore_modsupport.h" // _PyArg_CheckPositional()
PyDoc_STRVAR(signal_default_int_handler__doc__,
@@ -276,6 +280,77 @@ signal_siginterrupt(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
#endif /* defined(HAVE_SIGINTERRUPT) */
+PyDoc_STRVAR(signal_set_wakeup_fd__doc__,
+"set_wakeup_fd($module, fd, /, *, warn_on_full_buffer=True)\n"
+"--\n"
+"\n"
+"Sets the fd to be written to (with the signal number) when a signal comes in.\n"
+"\n"
+"A library can use this to wakeup select or poll.\n"
+"The previous fd or -1 is returned.\n"
+"\n"
+"The fd must be non-blocking.");
+
+#define SIGNAL_SET_WAKEUP_FD_METHODDEF \
+ {"set_wakeup_fd", _PyCFunction_CAST(signal_set_wakeup_fd), METH_FASTCALL|METH_KEYWORDS, signal_set_wakeup_fd__doc__},
+
+static PyObject *
+signal_set_wakeup_fd_impl(PyObject *module, PyObject *fdobj,
+ int warn_on_full_buffer);
+
+static PyObject *
+signal_set_wakeup_fd(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+{
+ PyObject *return_value = NULL;
+ #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
+
+ #define NUM_KEYWORDS 1
+ static struct {
+ PyGC_Head _this_is_not_used;
+ PyObject_VAR_HEAD
+ PyObject *ob_item[NUM_KEYWORDS];
+ } _kwtuple = {
+ .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
+ .ob_item = { &_Py_ID(warn_on_full_buffer), },
+ };
+ #undef NUM_KEYWORDS
+ #define KWTUPLE (&_kwtuple.ob_base.ob_base)
+
+ #else // !Py_BUILD_CORE
+ # define KWTUPLE NULL
+ #endif // !Py_BUILD_CORE
+
+ static const char * const _keywords[] = {"", "warn_on_full_buffer", NULL};
+ static _PyArg_Parser _parser = {
+ .keywords = _keywords,
+ .fname = "set_wakeup_fd",
+ .kwtuple = KWTUPLE,
+ };
+ #undef KWTUPLE
+ PyObject *argsbuf[2];
+ Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1;
+ PyObject *fdobj;
+ int warn_on_full_buffer = 1;
+
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ fdobj = args[0];
+ if (!noptargs) {
+ goto skip_optional_kwonly;
+ }
+ warn_on_full_buffer = PyObject_IsTrue(args[1]);
+ if (warn_on_full_buffer < 0) {
+ goto exit;
+ }
+skip_optional_kwonly:
+ return_value = signal_set_wakeup_fd_impl(module, fdobj, warn_on_full_buffer);
+
+exit:
+ return return_value;
+}
+
#if defined(HAVE_SETITIMER)
PyDoc_STRVAR(signal_setitimer__doc__,
@@ -701,4 +776,4 @@ signal_pidfd_send_signal(PyObject *module, PyObject *const *args, Py_ssize_t nar
#ifndef SIGNAL_PIDFD_SEND_SIGNAL_METHODDEF
#define SIGNAL_PIDFD_SEND_SIGNAL_METHODDEF
#endif /* !defined(SIGNAL_PIDFD_SEND_SIGNAL_METHODDEF) */
-/*[clinic end generated code: output=5a9928cb2dc75b5f input=a9049054013a1b77]*/
+/*[clinic end generated code: output=1c11c1b6f12f26be input=a9049054013a1b77]*/
diff --git a/Modules/faulthandler.c b/Modules/faulthandler.c
index 02e94a21191483..c70d43a36b5cc7 100644
--- a/Modules/faulthandler.c
+++ b/Modules/faulthandler.c
@@ -1192,58 +1192,67 @@ PyDoc_STRVAR(module_doc,
static PyMethodDef module_methods[] = {
{"enable",
_PyCFunction_CAST(faulthandler_py_enable), METH_VARARGS|METH_KEYWORDS,
- PyDoc_STR("enable(file=sys.stderr, all_threads=True): "
- "enable the fault handler")},
+ PyDoc_STR("enable($module, /, file=sys.stderr, all_threads=True)\n--\n\n"
+ "Enable the fault handler.")},
{"disable", faulthandler_disable_py, METH_NOARGS,
- PyDoc_STR("disable(): disable the fault handler")},
+ PyDoc_STR("disable($module, /)\n--\n\n"
+ "Disable the fault handler.")},
{"is_enabled", faulthandler_is_enabled, METH_NOARGS,
- PyDoc_STR("is_enabled()->bool: check if the handler is enabled")},
+ PyDoc_STR("is_enabled($module, /)\n--\n\n"
+ "Check if the handler is enabled.")},
{"dump_traceback",
_PyCFunction_CAST(faulthandler_dump_traceback_py), METH_VARARGS|METH_KEYWORDS,
- PyDoc_STR("dump_traceback(file=sys.stderr, all_threads=True): "
- "dump the traceback of the current thread, or of all threads "
- "if all_threads is True, into file")},
+ PyDoc_STR("dump_traceback($module, /, file=sys.stderr, all_threads=True)\n--\n\n"
+ "Dump the traceback of the current thread, or of all threads "
+ "if all_threads is True, into file.")},
{"dump_traceback_later",
_PyCFunction_CAST(faulthandler_dump_traceback_later), METH_VARARGS|METH_KEYWORDS,
- PyDoc_STR("dump_traceback_later(timeout, repeat=False, file=sys.stderr, exit=False):\n"
- "dump the traceback of all threads in timeout seconds,\n"
+ PyDoc_STR("dump_traceback_later($module, /, timeout, repeat=False, file=sys.stderr, exit=False)\n--\n\n"
+ "Dump the traceback of all threads in timeout seconds,\n"
"or each timeout seconds if repeat is True. If exit is True, "
"call _exit(1) which is not safe.")},
{"cancel_dump_traceback_later",
faulthandler_cancel_dump_traceback_later_py, METH_NOARGS,
- PyDoc_STR("cancel_dump_traceback_later():\ncancel the previous call "
- "to dump_traceback_later().")},
+ PyDoc_STR("cancel_dump_traceback_later($module, /)\n--\n\n"
+ "Cancel the previous call to dump_traceback_later().")},
#ifdef FAULTHANDLER_USER
{"register",
_PyCFunction_CAST(faulthandler_register_py), METH_VARARGS|METH_KEYWORDS,
- PyDoc_STR("register(signum, file=sys.stderr, all_threads=True, chain=False): "
- "register a handler for the signal 'signum': dump the "
+ PyDoc_STR("register($module, /, signum, file=sys.stderr, all_threads=True, chain=False)\n--\n\n"
+ "Register a handler for the signal 'signum': dump the "
"traceback of the current thread, or of all threads if "
- "all_threads is True, into file")},
+ "all_threads is True, into file.")},
{"unregister",
- _PyCFunction_CAST(faulthandler_unregister_py), METH_VARARGS|METH_KEYWORDS,
- PyDoc_STR("unregister(signum): unregister the handler of the signal "
- "'signum' registered by register()")},
+ _PyCFunction_CAST(faulthandler_unregister_py), METH_VARARGS,
+ PyDoc_STR("unregister($module, signum, /)\n--\n\n"
+ "Unregister the handler of the signal "
+ "'signum' registered by register().")},
#endif
{"_read_null", faulthandler_read_null, METH_NOARGS,
- PyDoc_STR("_read_null(): read from NULL, raise "
- "a SIGSEGV or SIGBUS signal depending on the platform")},
+ PyDoc_STR("_read_null($module, /)\n--\n\n"
+ "Read from NULL, raise "
+ "a SIGSEGV or SIGBUS signal depending on the platform.")},
{"_sigsegv", faulthandler_sigsegv, METH_VARARGS,
- PyDoc_STR("_sigsegv(release_gil=False): raise a SIGSEGV signal")},
+ PyDoc_STR("_sigsegv($module, release_gil=False, /)\n--\n\n"
+ "Raise a SIGSEGV signal.")},
{"_fatal_error_c_thread", faulthandler_fatal_error_c_thread, METH_NOARGS,
- PyDoc_STR("fatal_error_c_thread(): "
- "call Py_FatalError() in a new C thread.")},
+ PyDoc_STR("_fatal_error_c_thread($module, /)\n--\n\n"
+ "Call Py_FatalError() in a new C thread.")},
{"_sigabrt", faulthandler_sigabrt, METH_NOARGS,
- PyDoc_STR("_sigabrt(): raise a SIGABRT signal")},
+ PyDoc_STR("_sigabrt($module, /)\n--\n\n"
+ "Raise a SIGABRT signal.")},
{"_sigfpe", (PyCFunction)faulthandler_sigfpe, METH_NOARGS,
- PyDoc_STR("_sigfpe(): raise a SIGFPE signal")},
+ PyDoc_STR("_sigfpe($module, /)\n--\n\n"
+ "Raise a SIGFPE signal.")},
#ifdef FAULTHANDLER_STACK_OVERFLOW
{"_stack_overflow", faulthandler_stack_overflow, METH_NOARGS,
- PyDoc_STR("_stack_overflow(): recursive call to raise a stack overflow")},
+ PyDoc_STR("_stack_overflow($module, /)\n--\n\n"
+ "Recursive call to raise a stack overflow.")},
#endif
#ifdef MS_WINDOWS
{"_raise_exception", faulthandler_raise_exception, METH_VARARGS,
- PyDoc_STR("raise_exception(code, flags=0): Call RaiseException(code, flags).")},
+ PyDoc_STR("_raise_exception($module, code, flags=0, /)\n--\n\n"
+ "Call RaiseException(code, flags).")},
#endif
{NULL, NULL} /* sentinel */
};
diff --git a/Modules/itertoolsmodule.c b/Modules/itertoolsmodule.c
index 44b92f8dcffe4d..6ee447ef6a8cd6 100644
--- a/Modules/itertoolsmodule.c
+++ b/Modules/itertoolsmodule.c
@@ -2189,7 +2189,8 @@ chain_setstate(chainobject *lz, PyObject *state)
}
PyDoc_STRVAR(chain_doc,
-"chain(*iterables) --> chain object\n\
+"chain(*iterables)\n\
+--\n\
\n\
Return a chain object whose .__next__() method returns elements from the\n\
first iterable until it is exhausted, then elements from the next\n\
@@ -2528,7 +2529,8 @@ static PyMethodDef product_methods[] = {
};
PyDoc_STRVAR(product_doc,
-"product(*iterables, repeat=1) --> product object\n\
+"product(*iterables, repeat=1)\n\
+--\n\
\n\
Cartesian product of input iterables. Equivalent to nested for-loops.\n\n\
For example, product(A, B) returns the same as: ((x,y) for x in A for y in B).\n\
@@ -4573,7 +4575,8 @@ static PyMethodDef zip_longest_methods[] = {
};
PyDoc_STRVAR(zip_longest_doc,
-"zip_longest(iter1 [,iter2 [...]], [fillvalue=None]) --> zip_longest object\n\
+"zip_longest(*iterables, fillvalue=None)\n\
+--\n\
\n\
Return a zip_longest object whose .__next__() method returns a tuple where\n\
the i-th element comes from the i-th iterable argument. The .__next__()\n\
diff --git a/Modules/signalmodule.c b/Modules/signalmodule.c
index 5804e30af1b426..08fedeacd96d28 100644
--- a/Modules/signalmodule.c
+++ b/Modules/signalmodule.c
@@ -706,35 +706,43 @@ signal_siginterrupt_impl(PyObject *module, int signalnum, int flag)
#endif
-static PyObject*
-signal_set_wakeup_fd(PyObject *self, PyObject *args, PyObject *kwds)
+/*[clinic input]
+signal.set_wakeup_fd
+
+ fd as fdobj: object
+ /
+ *
+ warn_on_full_buffer: bool = True
+
+Sets the fd to be written to (with the signal number) when a signal comes in.
+
+A library can use this to wakeup select or poll.
+The previous fd or -1 is returned.
+
+The fd must be non-blocking.
+[clinic start generated code]*/
+
+static PyObject *
+signal_set_wakeup_fd_impl(PyObject *module, PyObject *fdobj,
+ int warn_on_full_buffer)
+/*[clinic end generated code: output=2280d72dd2a54c4f input=5b545946a28b8339]*/
{
struct _Py_stat_struct status;
- static char *kwlist[] = {
- "", "warn_on_full_buffer", NULL,
- };
- int warn_on_full_buffer = 1;
#ifdef MS_WINDOWS
- PyObject *fdobj;
SOCKET_T sockfd, old_sockfd;
int res;
int res_size = sizeof res;
PyObject *mod;
int is_socket;
- if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|$p:set_wakeup_fd", kwlist,
- &fdobj, &warn_on_full_buffer))
- return NULL;
-
sockfd = PyLong_AsSocket_t(fdobj);
if (sockfd == (SOCKET_T)(-1) && PyErr_Occurred())
return NULL;
#else
- int fd;
-
- if (!PyArg_ParseTupleAndKeywords(args, kwds, "i|$p:set_wakeup_fd", kwlist,
- &fd, &warn_on_full_buffer))
+ int fd = PyLong_AsInt(fdobj);
+ if (fd == -1 && PyErr_Occurred()) {
return NULL;
+ }
#endif
PyThreadState *tstate = _PyThreadState_GET();
@@ -820,15 +828,6 @@ signal_set_wakeup_fd(PyObject *self, PyObject *args, PyObject *kwds)
#endif
}
-PyDoc_STRVAR(set_wakeup_fd_doc,
-"set_wakeup_fd(fd, *, warn_on_full_buffer=True) -> fd\n\
-\n\
-Sets the fd to be written to (with the signal number) when a signal\n\
-comes in. A library can use this to wakeup select or poll.\n\
-The previous fd or -1 is returned.\n\
-\n\
-The fd must be non-blocking.");
-
/* C API for the same, without all the error checking */
int
PySignal_SetWakeupFd(int fd)
@@ -1344,7 +1343,7 @@ static PyMethodDef signal_methods[] = {
SIGNAL_RAISE_SIGNAL_METHODDEF
SIGNAL_STRSIGNAL_METHODDEF
SIGNAL_GETSIGNAL_METHODDEF
- {"set_wakeup_fd", _PyCFunction_CAST(signal_set_wakeup_fd), METH_VARARGS | METH_KEYWORDS, set_wakeup_fd_doc},
+ SIGNAL_SET_WAKEUP_FD_METHODDEF
SIGNAL_SIGINTERRUPT_METHODDEF
SIGNAL_PAUSE_METHODDEF
SIGNAL_PIDFD_SEND_SIGNAL_METHODDEF
diff --git a/Objects/bytearrayobject.c b/Objects/bytearrayobject.c
index 8639496727536a..80679f93cd4c13 100644
--- a/Objects/bytearrayobject.c
+++ b/Objects/bytearrayobject.c
@@ -1121,16 +1121,44 @@ bytearray_dealloc(PyByteArrayObject *self)
#include "stringlib/transmogrify.h"
+/*[clinic input]
+@text_signature "($self, sub[, start[, end]], /)"
+bytearray.find
+
+ sub: object
+ start: slice_index(accept={int, NoneType}, c_default='0') = None
+ Optional start position. Default: start of the bytes.
+ end: slice_index(accept={int, NoneType}, c_default='PY_SSIZE_T_MAX') = None
+ Optional stop position. Default: end of the bytes.
+ /
+
+Return the lowest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start:end].
+
+Return -1 on failure.
+[clinic start generated code]*/
+
static PyObject *
-bytearray_find(PyByteArrayObject *self, PyObject *args)
+bytearray_find_impl(PyByteArrayObject *self, PyObject *sub, Py_ssize_t start,
+ Py_ssize_t end)
+/*[clinic end generated code: output=413e1cab2ae87da0 input=793dfad803e2952f]*/
{
- return _Py_bytes_find(PyByteArray_AS_STRING(self), PyByteArray_GET_SIZE(self), args);
+ return _Py_bytes_find(PyByteArray_AS_STRING(self), PyByteArray_GET_SIZE(self),
+ sub, start, end);
}
+/*[clinic input]
+bytearray.count = bytearray.find
+
+Return the number of non-overlapping occurrences of subsection 'sub' in bytes B[start:end].
+[clinic start generated code]*/
+
static PyObject *
-bytearray_count(PyByteArrayObject *self, PyObject *args)
+bytearray_count_impl(PyByteArrayObject *self, PyObject *sub,
+ Py_ssize_t start, Py_ssize_t end)
+/*[clinic end generated code: output=a21ee2692e4f1233 input=4deb529db38deda8]*/
{
- return _Py_bytes_count(PyByteArray_AS_STRING(self), PyByteArray_GET_SIZE(self), args);
+ return _Py_bytes_count(PyByteArray_AS_STRING(self), PyByteArray_GET_SIZE(self),
+ sub, start, end);
}
/*[clinic input]
@@ -1162,22 +1190,55 @@ bytearray_copy_impl(PyByteArrayObject *self)
PyByteArray_GET_SIZE(self));
}
+/*[clinic input]
+bytearray.index = bytearray.find
+
+Return the lowest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start:end].
+
+Raise ValueError if the subsection is not found.
+[clinic start generated code]*/
+
static PyObject *
-bytearray_index(PyByteArrayObject *self, PyObject *args)
+bytearray_index_impl(PyByteArrayObject *self, PyObject *sub,
+ Py_ssize_t start, Py_ssize_t end)
+/*[clinic end generated code: output=067a1e78efc672a7 input=8cbaf6836dbd2a9a]*/
{
- return _Py_bytes_index(PyByteArray_AS_STRING(self), PyByteArray_GET_SIZE(self), args);
+ return _Py_bytes_index(PyByteArray_AS_STRING(self), PyByteArray_GET_SIZE(self),
+ sub, start, end);
}
+/*[clinic input]
+bytearray.rfind = bytearray.find
+
+Return the highest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start:end].
+
+Return -1 on failure.
+[clinic start generated code]*/
+
static PyObject *
-bytearray_rfind(PyByteArrayObject *self, PyObject *args)
+bytearray_rfind_impl(PyByteArrayObject *self, PyObject *sub,
+ Py_ssize_t start, Py_ssize_t end)
+/*[clinic end generated code: output=51bf886f932b283c input=eaa107468a158423]*/
{
- return _Py_bytes_rfind(PyByteArray_AS_STRING(self), PyByteArray_GET_SIZE(self), args);
+ return _Py_bytes_rfind(PyByteArray_AS_STRING(self), PyByteArray_GET_SIZE(self),
+ sub, start, end);
}
+/*[clinic input]
+bytearray.rindex = bytearray.find
+
+Return the highest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start:end].
+
+Raise ValueError if the subsection is not found.
+[clinic start generated code]*/
+
static PyObject *
-bytearray_rindex(PyByteArrayObject *self, PyObject *args)
+bytearray_rindex_impl(PyByteArrayObject *self, PyObject *sub,
+ Py_ssize_t start, Py_ssize_t end)
+/*[clinic end generated code: output=38e1cf66bafb08b9 input=81cf49d0af4d5bd0]*/
{
- return _Py_bytes_rindex(PyByteArray_AS_STRING(self), PyByteArray_GET_SIZE(self), args);
+ return _Py_bytes_rindex(PyByteArray_AS_STRING(self), PyByteArray_GET_SIZE(self),
+ sub, start, end);
}
static int
@@ -2236,17 +2297,15 @@ bytearray_methods[] = {
STRINGLIB_CENTER_METHODDEF
BYTEARRAY_CLEAR_METHODDEF
BYTEARRAY_COPY_METHODDEF
- {"count", (PyCFunction)bytearray_count, METH_VARARGS,
- _Py_count__doc__},
+ BYTEARRAY_COUNT_METHODDEF
BYTEARRAY_DECODE_METHODDEF
BYTEARRAY_ENDSWITH_METHODDEF
STRINGLIB_EXPANDTABS_METHODDEF
BYTEARRAY_EXTEND_METHODDEF
- {"find", (PyCFunction)bytearray_find, METH_VARARGS,
- _Py_find__doc__},
+ BYTEARRAY_FIND_METHODDEF
BYTEARRAY_FROMHEX_METHODDEF
BYTEARRAY_HEX_METHODDEF
- {"index", (PyCFunction)bytearray_index, METH_VARARGS, _Py_index__doc__},
+ BYTEARRAY_INDEX_METHODDEF
BYTEARRAY_INSERT_METHODDEF
{"isalnum", stringlib_isalnum, METH_NOARGS,
_Py_isalnum__doc__},
@@ -2276,8 +2335,8 @@ bytearray_methods[] = {
BYTEARRAY_REMOVEPREFIX_METHODDEF
BYTEARRAY_REMOVESUFFIX_METHODDEF
BYTEARRAY_REVERSE_METHODDEF
- {"rfind", (PyCFunction)bytearray_rfind, METH_VARARGS, _Py_rfind__doc__},
- {"rindex", (PyCFunction)bytearray_rindex, METH_VARARGS, _Py_rindex__doc__},
+ BYTEARRAY_RFIND_METHODDEF
+ BYTEARRAY_RINDEX_METHODDEF
STRINGLIB_RJUST_METHODDEF
BYTEARRAY_RPARTITION_METHODDEF
BYTEARRAY_RSPLIT_METHODDEF
diff --git a/Objects/bytes_methods.c b/Objects/bytes_methods.c
index 21b6668171bf61..981aa57164385e 100644
--- a/Objects/bytes_methods.c
+++ b/Objects/bytes_methods.c
@@ -453,31 +453,21 @@ stringlib_parse_args_finds().
*/
Py_LOCAL_INLINE(int)
-parse_args_finds_byte(const char *function_name, PyObject *args,
- PyObject **subobj, char *byte,
- Py_ssize_t *start, Py_ssize_t *end)
+parse_args_finds_byte(const char *function_name, PyObject **subobj, char *byte)
{
- PyObject *tmp_subobj;
- Py_ssize_t ival;
-
- if(!stringlib_parse_args_finds(function_name, args, &tmp_subobj,
- start, end))
- return 0;
-
- if (PyObject_CheckBuffer(tmp_subobj)) {
- *subobj = tmp_subobj;
+ if (PyObject_CheckBuffer(*subobj)) {
return 1;
}
- if (!_PyIndex_Check(tmp_subobj)) {
+ if (!_PyIndex_Check(*subobj)) {
PyErr_Format(PyExc_TypeError,
"argument should be integer or bytes-like object, "
"not '%.200s'",
- Py_TYPE(tmp_subobj)->tp_name);
+ Py_TYPE(*subobj)->tp_name);
return 0;
}
- ival = PyNumber_AsSsize_t(tmp_subobj, NULL);
+ Py_ssize_t ival = PyNumber_AsSsize_t(*subobj, NULL);
if (ival == -1 && PyErr_Occurred()) {
return 0;
}
@@ -508,19 +498,19 @@ parse_args_finds_byte(const char *function_name, PyObject *args,
Py_LOCAL_INLINE(Py_ssize_t)
find_internal(const char *str, Py_ssize_t len,
- const char *function_name, PyObject *args, int dir)
+ const char *function_name, PyObject *subobj,
+ Py_ssize_t start, Py_ssize_t end,
+ int dir)
{
- PyObject *subobj;
char byte;
Py_buffer subbuf;
const char *sub;
Py_ssize_t sub_len;
- Py_ssize_t start = 0, end = PY_SSIZE_T_MAX;
Py_ssize_t res;
- if (!parse_args_finds_byte(function_name, args,
- &subobj, &byte, &start, &end))
+ if (!parse_args_finds_byte(function_name, &subobj, &byte)) {
return -2;
+ }
if (subobj) {
if (PyObject_GetBuffer(subobj, &subbuf, PyBUF_SIMPLE) != 0)
@@ -566,37 +556,21 @@ find_internal(const char *str, Py_ssize_t len,
return res;
}
-PyDoc_STRVAR_shared(_Py_find__doc__,
-"B.find(sub[, start[, end]]) -> int\n\
-\n\
-Return the lowest index in B where subsection sub is found,\n\
-such that sub is contained within B[start,end]. Optional\n\
-arguments start and end are interpreted as in slice notation.\n\
-\n\
-Return -1 on failure.");
-
PyObject *
-_Py_bytes_find(const char *str, Py_ssize_t len, PyObject *args)
+_Py_bytes_find(const char *str, Py_ssize_t len, PyObject *sub,
+ Py_ssize_t start, Py_ssize_t end)
{
- Py_ssize_t result = find_internal(str, len, "find", args, +1);
+ Py_ssize_t result = find_internal(str, len, "find", sub, start, end, +1);
if (result == -2)
return NULL;
return PyLong_FromSsize_t(result);
}
-PyDoc_STRVAR_shared(_Py_index__doc__,
-"B.index(sub[, start[, end]]) -> int\n\
-\n\
-Return the lowest index in B where subsection sub is found,\n\
-such that sub is contained within B[start,end]. Optional\n\
-arguments start and end are interpreted as in slice notation.\n\
-\n\
-Raises ValueError when the subsection is not found.");
-
PyObject *
-_Py_bytes_index(const char *str, Py_ssize_t len, PyObject *args)
+_Py_bytes_index(const char *str, Py_ssize_t len, PyObject *sub,
+ Py_ssize_t start, Py_ssize_t end)
{
- Py_ssize_t result = find_internal(str, len, "index", args, +1);
+ Py_ssize_t result = find_internal(str, len, "index", sub, start, end, +1);
if (result == -2)
return NULL;
if (result == -1) {
@@ -607,37 +581,21 @@ _Py_bytes_index(const char *str, Py_ssize_t len, PyObject *args)
return PyLong_FromSsize_t(result);
}
-PyDoc_STRVAR_shared(_Py_rfind__doc__,
-"B.rfind(sub[, start[, end]]) -> int\n\
-\n\
-Return the highest index in B where subsection sub is found,\n\
-such that sub is contained within B[start,end]. Optional\n\
-arguments start and end are interpreted as in slice notation.\n\
-\n\
-Return -1 on failure.");
-
PyObject *
-_Py_bytes_rfind(const char *str, Py_ssize_t len, PyObject *args)
+_Py_bytes_rfind(const char *str, Py_ssize_t len, PyObject *sub,
+ Py_ssize_t start, Py_ssize_t end)
{
- Py_ssize_t result = find_internal(str, len, "rfind", args, -1);
+ Py_ssize_t result = find_internal(str, len, "rfind", sub, start, end, -1);
if (result == -2)
return NULL;
return PyLong_FromSsize_t(result);
}
-PyDoc_STRVAR_shared(_Py_rindex__doc__,
-"B.rindex(sub[, start[, end]]) -> int\n\
-\n\
-Return the highest index in B where subsection sub is found,\n\
-such that sub is contained within B[start,end]. Optional\n\
-arguments start and end are interpreted as in slice notation.\n\
-\n\
-Raise ValueError when the subsection is not found.");
-
PyObject *
-_Py_bytes_rindex(const char *str, Py_ssize_t len, PyObject *args)
+_Py_bytes_rindex(const char *str, Py_ssize_t len, PyObject *sub,
+ Py_ssize_t start, Py_ssize_t end)
{
- Py_ssize_t result = find_internal(str, len, "rindex", args, -1);
+ Py_ssize_t result = find_internal(str, len, "rindex", sub, start, end, -1);
if (result == -2)
return NULL;
if (result == -1) {
@@ -648,28 +606,20 @@ _Py_bytes_rindex(const char *str, Py_ssize_t len, PyObject *args)
return PyLong_FromSsize_t(result);
}
-PyDoc_STRVAR_shared(_Py_count__doc__,
-"B.count(sub[, start[, end]]) -> int\n\
-\n\
-Return the number of non-overlapping occurrences of subsection sub in\n\
-bytes B[start:end]. Optional arguments start and end are interpreted\n\
-as in slice notation.");
-
PyObject *
-_Py_bytes_count(const char *str, Py_ssize_t len, PyObject *args)
+_Py_bytes_count(const char *str, Py_ssize_t len, PyObject *sub_obj,
+ Py_ssize_t start, Py_ssize_t end)
{
- PyObject *sub_obj;
const char *sub;
Py_ssize_t sub_len;
char byte;
- Py_ssize_t start = 0, end = PY_SSIZE_T_MAX;
Py_buffer vsub;
PyObject *count_obj;
- if (!parse_args_finds_byte("count", args,
- &sub_obj, &byte, &start, &end))
+ if (!parse_args_finds_byte("count", &sub_obj, &byte)) {
return NULL;
+ }
if (sub_obj) {
if (PyObject_GetBuffer(sub_obj, &vsub, PyBUF_SIMPLE) != 0)
diff --git a/Objects/bytesobject.c b/Objects/bytesobject.c
index d7b0c6b7b01aa9..cd799a926ae63c 100644
--- a/Objects/bytesobject.c
+++ b/Objects/bytesobject.c
@@ -1863,30 +1863,80 @@ _PyBytes_Join(PyObject *sep, PyObject *x)
return bytes_join((PyBytesObject*)sep, x);
}
+/*[clinic input]
+@text_signature "($self, sub[, start[, end]], /)"
+bytes.find
+
+ sub: object
+ start: slice_index(accept={int, NoneType}, c_default='0') = None
+ Optional start position. Default: start of the bytes.
+ end: slice_index(accept={int, NoneType}, c_default='PY_SSIZE_T_MAX') = None
+ Optional stop position. Default: end of the bytes.
+ /
+
+Return the lowest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start,end].
+
+Return -1 on failure.
+[clinic start generated code]*/
+
static PyObject *
-bytes_find(PyBytesObject *self, PyObject *args)
+bytes_find_impl(PyBytesObject *self, PyObject *sub, Py_ssize_t start,
+ Py_ssize_t end)
+/*[clinic end generated code: output=d5961a1c77b472a1 input=3171e62a8ae7f240]*/
{
- return _Py_bytes_find(PyBytes_AS_STRING(self), PyBytes_GET_SIZE(self), args);
+ return _Py_bytes_find(PyBytes_AS_STRING(self), PyBytes_GET_SIZE(self),
+ sub, start, end);
}
+/*[clinic input]
+bytes.index = bytes.find
+
+Return the lowest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start,end].
+
+Raise ValueError if the subsection is not found.
+[clinic start generated code]*/
+
static PyObject *
-bytes_index(PyBytesObject *self, PyObject *args)
+bytes_index_impl(PyBytesObject *self, PyObject *sub, Py_ssize_t start,
+ Py_ssize_t end)
+/*[clinic end generated code: output=0da25cc74683ba42 input=aa34ad71ba0bafe3]*/
{
- return _Py_bytes_index(PyBytes_AS_STRING(self), PyBytes_GET_SIZE(self), args);
+ return _Py_bytes_index(PyBytes_AS_STRING(self), PyBytes_GET_SIZE(self),
+ sub, start, end);
}
+/*[clinic input]
+bytes.rfind = bytes.find
+
+Return the highest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start,end].
+
+Return -1 on failure.
+[clinic start generated code]*/
static PyObject *
-bytes_rfind(PyBytesObject *self, PyObject *args)
+bytes_rfind_impl(PyBytesObject *self, PyObject *sub, Py_ssize_t start,
+ Py_ssize_t end)
+/*[clinic end generated code: output=51b60fa4ad011c09 input=864c3e7f3010b33c]*/
{
- return _Py_bytes_rfind(PyBytes_AS_STRING(self), PyBytes_GET_SIZE(self), args);
+ return _Py_bytes_rfind(PyBytes_AS_STRING(self), PyBytes_GET_SIZE(self),
+ sub, start, end);
}
+/*[clinic input]
+bytes.rindex = bytes.find
+
+Return the highest index in B where subsection 'sub' is found, such that 'sub' is contained within B[start,end].
+
+Raise ValueError if the subsection is not found.
+[clinic start generated code]*/
static PyObject *
-bytes_rindex(PyBytesObject *self, PyObject *args)
+bytes_rindex_impl(PyBytesObject *self, PyObject *sub, Py_ssize_t start,
+ Py_ssize_t end)
+/*[clinic end generated code: output=42bf674e0a0aabf6 input=21051fc5cfeacf2c]*/
{
- return _Py_bytes_rindex(PyBytes_AS_STRING(self), PyBytes_GET_SIZE(self), args);
+ return _Py_bytes_rindex(PyBytes_AS_STRING(self), PyBytes_GET_SIZE(self),
+ sub, start, end);
}
@@ -2023,10 +2073,19 @@ bytes_rstrip_impl(PyBytesObject *self, PyObject *bytes)
}
+/*[clinic input]
+bytes.count = bytes.find
+
+Return the number of non-overlapping occurrences of subsection 'sub' in bytes B[start:end].
+[clinic start generated code]*/
+
static PyObject *
-bytes_count(PyBytesObject *self, PyObject *args)
+bytes_count_impl(PyBytesObject *self, PyObject *sub, Py_ssize_t start,
+ Py_ssize_t end)
+/*[clinic end generated code: output=9848140b9be17d0f input=b6e4a5ed515e1e59]*/
{
- return _Py_bytes_count(PyBytes_AS_STRING(self), PyBytes_GET_SIZE(self), args);
+ return _Py_bytes_count(PyBytes_AS_STRING(self), PyBytes_GET_SIZE(self),
+ sub, start, end);
}
@@ -2524,16 +2583,14 @@ bytes_methods[] = {
{"capitalize", stringlib_capitalize, METH_NOARGS,
_Py_capitalize__doc__},
STRINGLIB_CENTER_METHODDEF
- {"count", (PyCFunction)bytes_count, METH_VARARGS,
- _Py_count__doc__},
+ BYTES_COUNT_METHODDEF
BYTES_DECODE_METHODDEF
BYTES_ENDSWITH_METHODDEF
STRINGLIB_EXPANDTABS_METHODDEF
- {"find", (PyCFunction)bytes_find, METH_VARARGS,
- _Py_find__doc__},
+ BYTES_FIND_METHODDEF
BYTES_FROMHEX_METHODDEF
BYTES_HEX_METHODDEF
- {"index", (PyCFunction)bytes_index, METH_VARARGS, _Py_index__doc__},
+ BYTES_INDEX_METHODDEF
{"isalnum", stringlib_isalnum, METH_NOARGS,
_Py_isalnum__doc__},
{"isalpha", stringlib_isalpha, METH_NOARGS,
@@ -2559,8 +2616,8 @@ bytes_methods[] = {
BYTES_REPLACE_METHODDEF
BYTES_REMOVEPREFIX_METHODDEF
BYTES_REMOVESUFFIX_METHODDEF
- {"rfind", (PyCFunction)bytes_rfind, METH_VARARGS, _Py_rfind__doc__},
- {"rindex", (PyCFunction)bytes_rindex, METH_VARARGS, _Py_rindex__doc__},
+ BYTES_RFIND_METHODDEF
+ BYTES_RINDEX_METHODDEF
STRINGLIB_RJUST_METHODDEF
BYTES_RPARTITION_METHODDEF
BYTES_RSPLIT_METHODDEF
@@ -3118,7 +3175,7 @@ _PyBytes_Resize(PyObject **pv, Py_ssize_t newsize)
PyObject_Realloc(v, PyBytesObject_SIZE + newsize);
if (*pv == NULL) {
#ifdef Py_REF_DEBUG
- _Py_DecRefTotal(_PyInterpreterState_GET());
+ _Py_DecRefTotal(_PyThreadState_GET());
#endif
PyObject_Free(v);
PyErr_NoMemory();
diff --git a/Objects/clinic/bytearrayobject.c.h b/Objects/clinic/bytearrayobject.c.h
index dabc2b16c94fce..c748c53e1c0a75 100644
--- a/Objects/clinic/bytearrayobject.c.h
+++ b/Objects/clinic/bytearrayobject.c.h
@@ -101,6 +101,106 @@ bytearray___init__(PyObject *self, PyObject *args, PyObject *kwargs)
return return_value;
}
+PyDoc_STRVAR(bytearray_find__doc__,
+"find($self, sub[, start[, end]], /)\n"
+"--\n"
+"\n"
+"Return the lowest index in B where subsection \'sub\' is found, such that \'sub\' is contained within B[start:end].\n"
+"\n"
+" start\n"
+" Optional start position. Default: start of the bytes.\n"
+" end\n"
+" Optional stop position. Default: end of the bytes.\n"
+"\n"
+"Return -1 on failure.");
+
+#define BYTEARRAY_FIND_METHODDEF \
+ {"find", _PyCFunction_CAST(bytearray_find), METH_FASTCALL, bytearray_find__doc__},
+
+static PyObject *
+bytearray_find_impl(PyByteArrayObject *self, PyObject *sub, Py_ssize_t start,
+ Py_ssize_t end);
+
+static PyObject *
+bytearray_find(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs)
+{
+ PyObject *return_value = NULL;
+ PyObject *sub;
+ Py_ssize_t start = 0;
+ Py_ssize_t end = PY_SSIZE_T_MAX;
+
+ if (!_PyArg_CheckPositional("find", nargs, 1, 3)) {
+ goto exit;
+ }
+ sub = args[0];
+ if (nargs < 2) {
+ goto skip_optional;
+ }
+ if (!_PyEval_SliceIndex(args[1], &start)) {
+ goto exit;
+ }
+ if (nargs < 3) {
+ goto skip_optional;
+ }
+ if (!_PyEval_SliceIndex(args[2], &end)) {
+ goto exit;
+ }
+skip_optional:
+ return_value = bytearray_find_impl(self, sub, start, end);
+
+exit:
+ return return_value;
+}
+
+PyDoc_STRVAR(bytearray_count__doc__,
+"count($self, sub[, start[, end]], /)\n"
+"--\n"
+"\n"
+"Return the number of non-overlapping occurrences of subsection \'sub\' in bytes B[start:end].\n"
+"\n"
+" start\n"
+" Optional start position. Default: start of the bytes.\n"
+" end\n"
+" Optional stop position. Default: end of the bytes.");
+
+#define BYTEARRAY_COUNT_METHODDEF \
+ {"count", _PyCFunction_CAST(bytearray_count), METH_FASTCALL, bytearray_count__doc__},
+
+static PyObject *
+bytearray_count_impl(PyByteArrayObject *self, PyObject *sub,
+ Py_ssize_t start, Py_ssize_t end);
+
+static PyObject *
+bytearray_count(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs)
+{
+ PyObject *return_value = NULL;
+ PyObject *sub;
+ Py_ssize_t start = 0;
+ Py_ssize_t end = PY_SSIZE_T_MAX;
+
+ if (!_PyArg_CheckPositional("count", nargs, 1, 3)) {
+ goto exit;
+ }
+ sub = args[0];
+ if (nargs < 2) {
+ goto skip_optional;
+ }
+ if (!_PyEval_SliceIndex(args[1], &start)) {
+ goto exit;
+ }
+ if (nargs < 3) {
+ goto skip_optional;
+ }
+ if (!_PyEval_SliceIndex(args[2], &end)) {
+ goto exit;
+ }
+skip_optional:
+ return_value = bytearray_count_impl(self, sub, start, end);
+
+exit:
+ return return_value;
+}
+
PyDoc_STRVAR(bytearray_clear__doc__,
"clear($self, /)\n"
"--\n"
@@ -137,6 +237,159 @@ bytearray_copy(PyByteArrayObject *self, PyObject *Py_UNUSED(ignored))
return bytearray_copy_impl(self);
}
+PyDoc_STRVAR(bytearray_index__doc__,
+"index($self, sub[, start[, end]], /)\n"
+"--\n"
+"\n"
+"Return the lowest index in B where subsection \'sub\' is found, such that \'sub\' is contained within B[start:end].\n"
+"\n"
+" start\n"
+" Optional start position. Default: start of the bytes.\n"
+" end\n"
+" Optional stop position. Default: end of the bytes.\n"
+"\n"
+"Raise ValueError if the subsection is not found.");
+
+#define BYTEARRAY_INDEX_METHODDEF \
+ {"index", _PyCFunction_CAST(bytearray_index), METH_FASTCALL, bytearray_index__doc__},
+
+static PyObject *
+bytearray_index_impl(PyByteArrayObject *self, PyObject *sub,
+ Py_ssize_t start, Py_ssize_t end);
+
+static PyObject *
+bytearray_index(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs)
+{
+ PyObject *return_value = NULL;
+ PyObject *sub;
+ Py_ssize_t start = 0;
+ Py_ssize_t end = PY_SSIZE_T_MAX;
+
+ if (!_PyArg_CheckPositional("index", nargs, 1, 3)) {
+ goto exit;
+ }
+ sub = args[0];
+ if (nargs < 2) {
+ goto skip_optional;
+ }
+ if (!_PyEval_SliceIndex(args[1], &start)) {
+ goto exit;
+ }
+ if (nargs < 3) {
+ goto skip_optional;
+ }
+ if (!_PyEval_SliceIndex(args[2], &end)) {
+ goto exit;
+ }
+skip_optional:
+ return_value = bytearray_index_impl(self, sub, start, end);
+
+exit:
+ return return_value;
+}
+
+PyDoc_STRVAR(bytearray_rfind__doc__,
+"rfind($self, sub[, start[, end]], /)\n"
+"--\n"
+"\n"
+"Return the highest index in B where subsection \'sub\' is found, such that \'sub\' is contained within B[start:end].\n"
+"\n"
+" start\n"
+" Optional start position. Default: start of the bytes.\n"
+" end\n"
+" Optional stop position. Default: end of the bytes.\n"
+"\n"
+"Return -1 on failure.");
+
+#define BYTEARRAY_RFIND_METHODDEF \
+ {"rfind", _PyCFunction_CAST(bytearray_rfind), METH_FASTCALL, bytearray_rfind__doc__},
+
+static PyObject *
+bytearray_rfind_impl(PyByteArrayObject *self, PyObject *sub,
+ Py_ssize_t start, Py_ssize_t end);
+
+static PyObject *
+bytearray_rfind(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs)
+{
+ PyObject *return_value = NULL;
+ PyObject *sub;
+ Py_ssize_t start = 0;
+ Py_ssize_t end = PY_SSIZE_T_MAX;
+
+ if (!_PyArg_CheckPositional("rfind", nargs, 1, 3)) {
+ goto exit;
+ }
+ sub = args[0];
+ if (nargs < 2) {
+ goto skip_optional;
+ }
+ if (!_PyEval_SliceIndex(args[1], &start)) {
+ goto exit;
+ }
+ if (nargs < 3) {
+ goto skip_optional;
+ }
+ if (!_PyEval_SliceIndex(args[2], &end)) {
+ goto exit;
+ }
+skip_optional:
+ return_value = bytearray_rfind_impl(self, sub, start, end);
+
+exit:
+ return return_value;
+}
+
+PyDoc_STRVAR(bytearray_rindex__doc__,
+"rindex($self, sub[, start[, end]], /)\n"
+"--\n"
+"\n"
+"Return the highest index in B where subsection \'sub\' is found, such that \'sub\' is contained within B[start:end].\n"
+"\n"
+" start\n"
+" Optional start position. Default: start of the bytes.\n"
+" end\n"
+" Optional stop position. Default: end of the bytes.\n"
+"\n"
+"Raise ValueError if the subsection is not found.");
+
+#define BYTEARRAY_RINDEX_METHODDEF \
+ {"rindex", _PyCFunction_CAST(bytearray_rindex), METH_FASTCALL, bytearray_rindex__doc__},
+
+static PyObject *
+bytearray_rindex_impl(PyByteArrayObject *self, PyObject *sub,
+ Py_ssize_t start, Py_ssize_t end);
+
+static PyObject *
+bytearray_rindex(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs)
+{
+ PyObject *return_value = NULL;
+ PyObject *sub;
+ Py_ssize_t start = 0;
+ Py_ssize_t end = PY_SSIZE_T_MAX;
+
+ if (!_PyArg_CheckPositional("rindex", nargs, 1, 3)) {
+ goto exit;
+ }
+ sub = args[0];
+ if (nargs < 2) {
+ goto skip_optional;
+ }
+ if (!_PyEval_SliceIndex(args[1], &start)) {
+ goto exit;
+ }
+ if (nargs < 3) {
+ goto skip_optional;
+ }
+ if (!_PyEval_SliceIndex(args[2], &end)) {
+ goto exit;
+ }
+skip_optional:
+ return_value = bytearray_rindex_impl(self, sub, start, end);
+
+exit:
+ return return_value;
+}
+
PyDoc_STRVAR(bytearray_startswith__doc__,
"startswith($self, prefix[, start[, end]], /)\n"
"--\n"
@@ -1363,4 +1616,4 @@ bytearray_sizeof(PyByteArrayObject *self, PyObject *Py_UNUSED(ignored))
{
return bytearray_sizeof_impl(self);
}
-/*[clinic end generated code: output=0147908e97ebe882 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=5f861b02e3fa278b input=a9049054013a1b77]*/
diff --git a/Objects/clinic/bytesobject.c.h b/Objects/clinic/bytesobject.c.h
index 05e182778aece1..0b4b37501735c1 100644
--- a/Objects/clinic/bytesobject.c.h
+++ b/Objects/clinic/bytesobject.c.h
@@ -294,6 +294,210 @@ PyDoc_STRVAR(bytes_join__doc__,
#define BYTES_JOIN_METHODDEF \
{"join", (PyCFunction)bytes_join, METH_O, bytes_join__doc__},
+PyDoc_STRVAR(bytes_find__doc__,
+"find($self, sub[, start[, end]], /)\n"
+"--\n"
+"\n"
+"Return the lowest index in B where subsection \'sub\' is found, such that \'sub\' is contained within B[start,end].\n"
+"\n"
+" start\n"
+" Optional start position. Default: start of the bytes.\n"
+" end\n"
+" Optional stop position. Default: end of the bytes.\n"
+"\n"
+"Return -1 on failure.");
+
+#define BYTES_FIND_METHODDEF \
+ {"find", _PyCFunction_CAST(bytes_find), METH_FASTCALL, bytes_find__doc__},
+
+static PyObject *
+bytes_find_impl(PyBytesObject *self, PyObject *sub, Py_ssize_t start,
+ Py_ssize_t end);
+
+static PyObject *
+bytes_find(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs)
+{
+ PyObject *return_value = NULL;
+ PyObject *sub;
+ Py_ssize_t start = 0;
+ Py_ssize_t end = PY_SSIZE_T_MAX;
+
+ if (!_PyArg_CheckPositional("find", nargs, 1, 3)) {
+ goto exit;
+ }
+ sub = args[0];
+ if (nargs < 2) {
+ goto skip_optional;
+ }
+ if (!_PyEval_SliceIndex(args[1], &start)) {
+ goto exit;
+ }
+ if (nargs < 3) {
+ goto skip_optional;
+ }
+ if (!_PyEval_SliceIndex(args[2], &end)) {
+ goto exit;
+ }
+skip_optional:
+ return_value = bytes_find_impl(self, sub, start, end);
+
+exit:
+ return return_value;
+}
+
+PyDoc_STRVAR(bytes_index__doc__,
+"index($self, sub[, start[, end]], /)\n"
+"--\n"
+"\n"
+"Return the lowest index in B where subsection \'sub\' is found, such that \'sub\' is contained within B[start,end].\n"
+"\n"
+" start\n"
+" Optional start position. Default: start of the bytes.\n"
+" end\n"
+" Optional stop position. Default: end of the bytes.\n"
+"\n"
+"Raise ValueError if the subsection is not found.");
+
+#define BYTES_INDEX_METHODDEF \
+ {"index", _PyCFunction_CAST(bytes_index), METH_FASTCALL, bytes_index__doc__},
+
+static PyObject *
+bytes_index_impl(PyBytesObject *self, PyObject *sub, Py_ssize_t start,
+ Py_ssize_t end);
+
+static PyObject *
+bytes_index(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs)
+{
+ PyObject *return_value = NULL;
+ PyObject *sub;
+ Py_ssize_t start = 0;
+ Py_ssize_t end = PY_SSIZE_T_MAX;
+
+ if (!_PyArg_CheckPositional("index", nargs, 1, 3)) {
+ goto exit;
+ }
+ sub = args[0];
+ if (nargs < 2) {
+ goto skip_optional;
+ }
+ if (!_PyEval_SliceIndex(args[1], &start)) {
+ goto exit;
+ }
+ if (nargs < 3) {
+ goto skip_optional;
+ }
+ if (!_PyEval_SliceIndex(args[2], &end)) {
+ goto exit;
+ }
+skip_optional:
+ return_value = bytes_index_impl(self, sub, start, end);
+
+exit:
+ return return_value;
+}
+
+PyDoc_STRVAR(bytes_rfind__doc__,
+"rfind($self, sub[, start[, end]], /)\n"
+"--\n"
+"\n"
+"Return the highest index in B where subsection \'sub\' is found, such that \'sub\' is contained within B[start,end].\n"
+"\n"
+" start\n"
+" Optional start position. Default: start of the bytes.\n"
+" end\n"
+" Optional stop position. Default: end of the bytes.\n"
+"\n"
+"Return -1 on failure.");
+
+#define BYTES_RFIND_METHODDEF \
+ {"rfind", _PyCFunction_CAST(bytes_rfind), METH_FASTCALL, bytes_rfind__doc__},
+
+static PyObject *
+bytes_rfind_impl(PyBytesObject *self, PyObject *sub, Py_ssize_t start,
+ Py_ssize_t end);
+
+static PyObject *
+bytes_rfind(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs)
+{
+ PyObject *return_value = NULL;
+ PyObject *sub;
+ Py_ssize_t start = 0;
+ Py_ssize_t end = PY_SSIZE_T_MAX;
+
+ if (!_PyArg_CheckPositional("rfind", nargs, 1, 3)) {
+ goto exit;
+ }
+ sub = args[0];
+ if (nargs < 2) {
+ goto skip_optional;
+ }
+ if (!_PyEval_SliceIndex(args[1], &start)) {
+ goto exit;
+ }
+ if (nargs < 3) {
+ goto skip_optional;
+ }
+ if (!_PyEval_SliceIndex(args[2], &end)) {
+ goto exit;
+ }
+skip_optional:
+ return_value = bytes_rfind_impl(self, sub, start, end);
+
+exit:
+ return return_value;
+}
+
+PyDoc_STRVAR(bytes_rindex__doc__,
+"rindex($self, sub[, start[, end]], /)\n"
+"--\n"
+"\n"
+"Return the highest index in B where subsection \'sub\' is found, such that \'sub\' is contained within B[start,end].\n"
+"\n"
+" start\n"
+" Optional start position. Default: start of the bytes.\n"
+" end\n"
+" Optional stop position. Default: end of the bytes.\n"
+"\n"
+"Raise ValueError if the subsection is not found.");
+
+#define BYTES_RINDEX_METHODDEF \
+ {"rindex", _PyCFunction_CAST(bytes_rindex), METH_FASTCALL, bytes_rindex__doc__},
+
+static PyObject *
+bytes_rindex_impl(PyBytesObject *self, PyObject *sub, Py_ssize_t start,
+ Py_ssize_t end);
+
+static PyObject *
+bytes_rindex(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs)
+{
+ PyObject *return_value = NULL;
+ PyObject *sub;
+ Py_ssize_t start = 0;
+ Py_ssize_t end = PY_SSIZE_T_MAX;
+
+ if (!_PyArg_CheckPositional("rindex", nargs, 1, 3)) {
+ goto exit;
+ }
+ sub = args[0];
+ if (nargs < 2) {
+ goto skip_optional;
+ }
+ if (!_PyEval_SliceIndex(args[1], &start)) {
+ goto exit;
+ }
+ if (nargs < 3) {
+ goto skip_optional;
+ }
+ if (!_PyEval_SliceIndex(args[2], &end)) {
+ goto exit;
+ }
+skip_optional:
+ return_value = bytes_rindex_impl(self, sub, start, end);
+
+exit:
+ return return_value;
+}
+
PyDoc_STRVAR(bytes_strip__doc__,
"strip($self, bytes=None, /)\n"
"--\n"
@@ -396,6 +600,55 @@ bytes_rstrip(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs)
return return_value;
}
+PyDoc_STRVAR(bytes_count__doc__,
+"count($self, sub[, start[, end]], /)\n"
+"--\n"
+"\n"
+"Return the number of non-overlapping occurrences of subsection \'sub\' in bytes B[start:end].\n"
+"\n"
+" start\n"
+" Optional start position. Default: start of the bytes.\n"
+" end\n"
+" Optional stop position. Default: end of the bytes.");
+
+#define BYTES_COUNT_METHODDEF \
+ {"count", _PyCFunction_CAST(bytes_count), METH_FASTCALL, bytes_count__doc__},
+
+static PyObject *
+bytes_count_impl(PyBytesObject *self, PyObject *sub, Py_ssize_t start,
+ Py_ssize_t end);
+
+static PyObject *
+bytes_count(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs)
+{
+ PyObject *return_value = NULL;
+ PyObject *sub;
+ Py_ssize_t start = 0;
+ Py_ssize_t end = PY_SSIZE_T_MAX;
+
+ if (!_PyArg_CheckPositional("count", nargs, 1, 3)) {
+ goto exit;
+ }
+ sub = args[0];
+ if (nargs < 2) {
+ goto skip_optional;
+ }
+ if (!_PyEval_SliceIndex(args[1], &start)) {
+ goto exit;
+ }
+ if (nargs < 3) {
+ goto skip_optional;
+ }
+ if (!_PyEval_SliceIndex(args[2], &end)) {
+ goto exit;
+ }
+skip_optional:
+ return_value = bytes_count_impl(self, sub, start, end);
+
+exit:
+ return return_value;
+}
+
PyDoc_STRVAR(bytes_translate__doc__,
"translate($self, table, /, delete=b\'\')\n"
"--\n"
@@ -1131,4 +1384,4 @@ bytes_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
exit:
return return_value;
}
-/*[clinic end generated code: output=f2b10ccd2e3155c3 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=d6801c6001e57f91 input=a9049054013a1b77]*/
diff --git a/Objects/clinic/descrobject.c.h b/Objects/clinic/descrobject.c.h
index 02fb440d9c83af..d79be80d3ec165 100644
--- a/Objects/clinic/descrobject.c.h
+++ b/Objects/clinic/descrobject.c.h
@@ -8,6 +8,12 @@ preserve
#endif
#include "pycore_modsupport.h" // _PyArg_UnpackKeywords()
+PyDoc_STRVAR(mappingproxy_new__doc__,
+"mappingproxy(mapping)\n"
+"--\n"
+"\n"
+"Read-only proxy of a mapping.");
+
static PyObject *
mappingproxy_new_impl(PyTypeObject *type, PyObject *mapping);
@@ -167,4 +173,4 @@ property_init(PyObject *self, PyObject *args, PyObject *kwargs)
exit:
return return_value;
}
-/*[clinic end generated code: output=a4664ccf3da10f5a input=a9049054013a1b77]*/
+/*[clinic end generated code: output=050e331316a04207 input=a9049054013a1b77]*/
diff --git a/Objects/clinic/unicodeobject.c.h b/Objects/clinic/unicodeobject.c.h
index 01c40b90d9b4b8..78e14b0021d006 100644
--- a/Objects/clinic/unicodeobject.c.h
+++ b/Objects/clinic/unicodeobject.c.h
@@ -357,7 +357,7 @@ unicode_expandtabs(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyOb
}
PyDoc_STRVAR(unicode_find__doc__,
-"find($self, sub, start=None, end=None, /)\n"
+"find($self, sub[, start[, end]], /)\n"
"--\n"
"\n"
"Return the lowest index in S where substring sub is found, such that sub is contained within S[start:end].\n"
@@ -413,7 +413,7 @@ unicode_find(PyObject *str, PyObject *const *args, Py_ssize_t nargs)
}
PyDoc_STRVAR(unicode_index__doc__,
-"index($self, sub, start=None, end=None, /)\n"
+"index($self, sub[, start[, end]], /)\n"
"--\n"
"\n"
"Return the lowest index in S where substring sub is found, such that sub is contained within S[start:end].\n"
@@ -1060,7 +1060,7 @@ unicode_removesuffix(PyObject *self, PyObject *arg)
}
PyDoc_STRVAR(unicode_rfind__doc__,
-"rfind($self, sub, start=None, end=None, /)\n"
+"rfind($self, sub[, start[, end]], /)\n"
"--\n"
"\n"
"Return the highest index in S where substring sub is found, such that sub is contained within S[start:end].\n"
@@ -1116,7 +1116,7 @@ unicode_rfind(PyObject *str, PyObject *const *args, Py_ssize_t nargs)
}
PyDoc_STRVAR(unicode_rindex__doc__,
-"rindex($self, sub, start=None, end=None, /)\n"
+"rindex($self, sub[, start[, end]], /)\n"
"--\n"
"\n"
"Return the highest index in S where substring sub is found, such that sub is contained within S[start:end].\n"
@@ -1888,4 +1888,4 @@ unicode_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
exit:
return return_value;
}
-/*[clinic end generated code: output=3aa49013ffa3fa93 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=9fee62bd337f809b input=a9049054013a1b77]*/
diff --git a/Objects/codeobject.c b/Objects/codeobject.c
index f14ff73394b168..014632962bfcf3 100644
--- a/Objects/codeobject.c
+++ b/Objects/codeobject.c
@@ -6,6 +6,7 @@
#include "pycore_code.h" // _PyCodeConstructor
#include "pycore_frame.h" // FRAME_SPECIALS_SIZE
#include "pycore_interp.h" // PyInterpreterState.co_extra_freefuncs
+#include "pycore_object.h" // _PyObject_SetDeferredRefcount
#include "pycore_opcode_metadata.h" // _PyOpcode_Deopt, _PyOpcode_Caches
#include "pycore_opcode_utils.h" // RESUME_AT_FUNC_START
#include "pycore_pystate.h" // _PyInterpreterState_GET()
@@ -557,13 +558,22 @@ _PyCode_New(struct _PyCodeConstructor *con)
}
Py_ssize_t size = PyBytes_GET_SIZE(con->code) / sizeof(_Py_CODEUNIT);
- PyCodeObject *co = PyObject_NewVar(PyCodeObject, &PyCode_Type, size);
+ PyCodeObject *co;
+#ifdef Py_GIL_DISABLED
+ co = PyObject_GC_NewVar(PyCodeObject, &PyCode_Type, size);
+#else
+ co = PyObject_NewVar(PyCodeObject, &PyCode_Type, size);
+#endif
if (co == NULL) {
Py_XDECREF(replacement_locations);
PyErr_NoMemory();
return NULL;
}
init_code(co, con);
+#ifdef Py_GIL_DISABLED
+ _PyObject_SetDeferredRefcount((PyObject *)co);
+ _PyObject_GC_TRACK(co);
+#endif
Py_XDECREF(replacement_locations);
return co;
}
@@ -1710,6 +1720,10 @@ code_dealloc(PyCodeObject *co)
}
Py_SET_REFCNT(co, 0);
+#ifdef Py_GIL_DISABLED
+ PyObject_GC_UnTrack(co);
+#endif
+
_PyFunction_ClearCodeByVersion(co->co_version);
if (co->co_extra != NULL) {
PyInterpreterState *interp = _PyInterpreterState_GET();
@@ -1752,6 +1766,15 @@ code_dealloc(PyCodeObject *co)
PyObject_Free(co);
}
+#ifdef Py_GIL_DISABLED
+static int
+code_traverse(PyCodeObject *co, visitproc visit, void *arg)
+{
+ Py_VISIT(co->co_consts);
+ return 0;
+}
+#endif
+
static PyObject *
code_repr(PyCodeObject *co)
{
@@ -2170,7 +2193,8 @@ static struct PyMethodDef code_methods[] = {
{"co_positions", (PyCFunction)code_positionsiterator, METH_NOARGS},
CODE_REPLACE_METHODDEF
CODE__VARNAME_FROM_OPARG_METHODDEF
- {"__replace__", _PyCFunction_CAST(code_replace), METH_FASTCALL|METH_KEYWORDS},
+ {"__replace__", _PyCFunction_CAST(code_replace), METH_FASTCALL|METH_KEYWORDS,
+ PyDoc_STR("__replace__($self, /, **changes)\n--\n\nThe same as replace().")},
{NULL, NULL} /* sentinel */
};
@@ -2195,9 +2219,17 @@ PyTypeObject PyCode_Type = {
PyObject_GenericGetAttr, /* tp_getattro */
0, /* tp_setattro */
0, /* tp_as_buffer */
+#ifdef Py_GIL_DISABLED
+ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC, /* tp_flags */
+#else
Py_TPFLAGS_DEFAULT, /* tp_flags */
+#endif
code_new__doc__, /* tp_doc */
+#ifdef Py_GIL_DISABLED
+ (traverseproc)code_traverse, /* tp_traverse */
+#else
0, /* tp_traverse */
+#endif
0, /* tp_clear */
code_richcompare, /* tp_richcompare */
offsetof(PyCodeObject, co_weakreflist), /* tp_weaklistoffset */
diff --git a/Objects/descrobject.c b/Objects/descrobject.c
index 3423f152ce862d..1b7e2fde3ceccd 100644
--- a/Objects/descrobject.c
+++ b/Objects/descrobject.c
@@ -909,6 +909,7 @@ descr_new(PyTypeObject *descrtype, PyTypeObject *type, const char *name)
descr = (PyDescrObject *)PyType_GenericAlloc(descrtype, 0);
if (descr != NULL) {
+ _PyObject_SetDeferredRefcount((PyObject *)descr);
descr->d_type = (PyTypeObject*)Py_XNewRef(type);
descr->d_name = PyUnicode_InternFromString(name);
if (descr->d_name == NULL) {
@@ -1165,8 +1166,8 @@ mappingproxy_reversed(PyObject *self, PyObject *Py_UNUSED(ignored))
static PyMethodDef mappingproxy_methods[] = {
{"get", _PyCFunction_CAST(mappingproxy_get), METH_FASTCALL,
- PyDoc_STR("D.get(k[,d]) -> D[k] if k in D, else d."
- " d defaults to None.")},
+ PyDoc_STR("get($self, key, default=None, /)\n--\n\n"
+ "Return the value for key if key is in the mapping, else default.")},
{"keys", mappingproxy_keys, METH_NOARGS,
PyDoc_STR("D.keys() -> a set-like object providing a view on D's keys")},
{"values", mappingproxy_values, METH_NOARGS,
@@ -1254,11 +1255,12 @@ mappingproxy.__new__ as mappingproxy_new
mapping: object
+Read-only proxy of a mapping.
[clinic start generated code]*/
static PyObject *
mappingproxy_new_impl(PyTypeObject *type, PyObject *mapping)
-/*[clinic end generated code: output=65f27f02d5b68fa7 input=d2d620d4f598d4f8]*/
+/*[clinic end generated code: output=65f27f02d5b68fa7 input=c156df096ef7590c]*/
{
mappingproxyobject *mappingproxy;
@@ -2024,7 +2026,7 @@ PyTypeObject PyDictProxy_Type = {
0, /* tp_as_buffer */
Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC |
Py_TPFLAGS_MAPPING, /* tp_flags */
- 0, /* tp_doc */
+ mappingproxy_new__doc__, /* tp_doc */
mappingproxy_traverse, /* tp_traverse */
0, /* tp_clear */
mappingproxy_richcompare, /* tp_richcompare */
diff --git a/Objects/dictobject.c b/Objects/dictobject.c
index b62d39ad6c5192..003a03fd741702 100644
--- a/Objects/dictobject.c
+++ b/Objects/dictobject.c
@@ -445,7 +445,7 @@ dictkeys_incref(PyDictKeysObject *dk)
return;
}
#ifdef Py_REF_DEBUG
- _Py_IncRefTotal(_PyInterpreterState_GET());
+ _Py_IncRefTotal(_PyThreadState_GET());
#endif
INCREF_KEYS(dk);
}
@@ -458,7 +458,7 @@ dictkeys_decref(PyInterpreterState *interp, PyDictKeysObject *dk, bool use_qsbr)
}
assert(dk->dk_refcnt > 0);
#ifdef Py_REF_DEBUG
- _Py_DecRefTotal(_PyInterpreterState_GET());
+ _Py_DecRefTotal(_PyThreadState_GET());
#endif
if (DECREF_KEYS(dk) == 1) {
if (DK_IS_UNICODE(dk)) {
@@ -790,7 +790,7 @@ new_keys_object(PyInterpreterState *interp, uint8_t log2_size, bool unicode)
}
}
#ifdef Py_REF_DEBUG
- _Py_IncRefTotal(_PyInterpreterState_GET());
+ _Py_IncRefTotal(_PyThreadState_GET());
#endif
dk->dk_refcnt = 1;
dk->dk_log2_size = log2_size;
@@ -978,7 +978,7 @@ clone_combined_dict_keys(PyDictObject *orig)
we have it now; calling dictkeys_incref would be an error as
keys->dk_refcnt is already set to 1 (after memcpy). */
#ifdef Py_REF_DEBUG
- _Py_IncRefTotal(_PyInterpreterState_GET());
+ _Py_IncRefTotal(_PyThreadState_GET());
#endif
return keys;
}
@@ -1286,7 +1286,7 @@ Py_ssize_t compare_unicode_generic_threadsafe(PyDictObject *mp, PyDictKeysObject
assert(!PyUnicode_CheckExact(key));
if (startkey != NULL) {
- if (!_Py_TryIncref(&ep->me_key, startkey)) {
+ if (!_Py_TryIncrefCompare(&ep->me_key, startkey)) {
return DKIX_KEY_CHANGED;
}
@@ -1334,7 +1334,7 @@ compare_unicode_unicode_threadsafe(PyDictObject *mp, PyDictKeysObject *dk,
return unicode_get_hash(startkey) == hash && unicode_eq(startkey, key);
}
else {
- if (!_Py_TryIncref(&ep->me_key, startkey)) {
+ if (!_Py_TryIncrefCompare(&ep->me_key, startkey)) {
return DKIX_KEY_CHANGED;
}
if (unicode_get_hash(startkey) == hash && unicode_eq(startkey, key)) {
@@ -1364,7 +1364,7 @@ Py_ssize_t compare_generic_threadsafe(PyDictObject *mp, PyDictKeysObject *dk,
}
Py_ssize_t ep_hash = _Py_atomic_load_ssize_relaxed(&ep->me_hash);
if (ep_hash == hash) {
- if (startkey == NULL || !_Py_TryIncref(&ep->me_key, startkey)) {
+ if (startkey == NULL || !_Py_TryIncrefCompare(&ep->me_key, startkey)) {
return DKIX_KEY_CHANGED;
}
int cmp = PyObject_RichCompareBool(startkey, key, Py_EQ);
@@ -2021,7 +2021,7 @@ dictresize(PyInterpreterState *interp, PyDictObject *mp,
if (oldkeys != Py_EMPTY_KEYS) {
#ifdef Py_REF_DEBUG
- _Py_DecRefTotal(_PyInterpreterState_GET());
+ _Py_DecRefTotal(_PyThreadState_GET());
#endif
assert(oldkeys->dk_kind != DICT_KEYS_SPLIT);
assert(oldkeys->dk_refcnt == 1);
@@ -2603,7 +2603,7 @@ static int
delitemif_lock_held(PyObject *op, PyObject *key,
int (*predicate)(PyObject *value))
{
- Py_ssize_t hashpos, ix;
+ Py_ssize_t ix;
PyDictObject *mp;
Py_hash_t hash;
PyObject *old_value;
@@ -2632,14 +2632,11 @@ delitemif_lock_held(PyObject *op, PyObject *key,
if (res == -1)
return -1;
- hashpos = lookdict_index(mp->ma_keys, hash, ix);
- assert(hashpos >= 0);
-
if (res > 0) {
PyInterpreterState *interp = _PyInterpreterState_GET();
uint64_t new_version = _PyDict_NotifyEvent(
interp, PyDict_EVENT_DELETED, mp, key, NULL);
- return delitem_common(mp, hashpos, ix, old_value, new_version);
+ return delitem_common(mp, hash, ix, old_value, new_version);
} else {
return 0;
}
@@ -2684,25 +2681,28 @@ clear_lock_held(PyObject *op)
interp, PyDict_EVENT_CLEARED, mp, NULL, NULL);
// We don't inc ref empty keys because they're immortal
ensure_shared_on_resize(mp);
-
- set_keys(mp, Py_EMPTY_KEYS);
- set_values(mp, NULL);
- mp->ma_used = 0;
mp->ma_version_tag = new_version;
- /* ...then clear the keys and values */
- if (oldvalues != NULL) {
- if (!oldvalues->embedded) {
- n = oldkeys->dk_nentries;
- for (i = 0; i < n; i++)
- Py_CLEAR(oldvalues->values[i]);
- free_values(oldvalues, IS_DICT_SHARED(mp));
- }
- dictkeys_decref(interp, oldkeys, false);
- }
- else {
+ mp->ma_used = 0;
+ if (oldvalues == NULL) {
+ set_keys(mp, Py_EMPTY_KEYS);
assert(oldkeys->dk_refcnt == 1);
dictkeys_decref(interp, oldkeys, IS_DICT_SHARED(mp));
}
+ else {
+ n = oldkeys->dk_nentries;
+ for (i = 0; i < n; i++) {
+ Py_CLEAR(oldvalues->values[i]);
+ }
+ if (oldvalues->embedded) {
+ oldvalues->size = 0;
+ }
+ else {
+ set_values(mp, NULL);
+ set_keys(mp, Py_EMPTY_KEYS);
+ free_values(oldvalues, IS_DICT_SHARED(mp));
+ dictkeys_decref(interp, oldkeys, false);
+ }
+ }
ASSERT_CONSISTENT(mp);
}
@@ -5308,7 +5308,7 @@ acquire_key_value(PyObject **key_loc, PyObject *value, PyObject **value_loc,
}
if (out_value) {
- if (!_Py_TryIncref(value_loc, value)) {
+ if (!_Py_TryIncrefCompare(value_loc, value)) {
if (out_key) {
Py_DECREF(*out_key);
}
diff --git a/Objects/funcobject.c b/Objects/funcobject.c
index a3c0800e7891d3..276b3db2970371 100644
--- a/Objects/funcobject.c
+++ b/Objects/funcobject.c
@@ -127,6 +127,9 @@ _PyFunction_FromConstructor(PyFrameConstructor *constr)
op->func_typeparams = NULL;
op->vectorcall = _PyFunction_Vectorcall;
op->func_version = 0;
+ // NOTE: functions created via FrameConstructor do not use deferred
+ // reference counting because they are typically not part of cycles
+ // nor accessed by multiple threads.
_PyObject_GC_TRACK(op);
handle_func_event(PyFunction_EVENT_CREATE, op, NULL);
return op;
@@ -202,6 +205,12 @@ PyFunction_NewWithQualName(PyObject *code, PyObject *globals, PyObject *qualname
op->func_typeparams = NULL;
op->vectorcall = _PyFunction_Vectorcall;
op->func_version = 0;
+ if ((code_obj->co_flags & CO_NESTED) == 0) {
+ // Use deferred reference counting for top-level functions, but not
+ // nested functions because they are more likely to capture variables,
+ // which makes prompt deallocation more important.
+ _PyObject_SetDeferredRefcount((PyObject *)op);
+ }
_PyObject_GC_TRACK(op);
handle_func_event(PyFunction_EVENT_CREATE, op, NULL);
return (PyObject *)op;
diff --git a/Objects/genericaliasobject.c b/Objects/genericaliasobject.c
index c045d495e85526..2779baf0bd1c61 100644
--- a/Objects/genericaliasobject.c
+++ b/Objects/genericaliasobject.c
@@ -537,6 +537,8 @@ _Py_subs_parameters(PyObject *self, PyObject *args, PyObject *parameters, PyObje
}
PyDoc_STRVAR(genericalias__doc__,
+"GenericAlias(origin, args, /)\n"
+"--\n\n"
"Represent a PEP 585 generic type\n"
"\n"
"E.g. for t = list[int], t.__origin__ is list and t.__args__ is (int,).");
diff --git a/Objects/memoryobject.c b/Objects/memoryobject.c
index 6a38952fdc1f3b..5caa6504272301 100644
--- a/Objects/memoryobject.c
+++ b/Objects/memoryobject.c
@@ -3255,6 +3255,9 @@ PyDoc_STRVAR(memory_f_contiguous_doc,
"A bool indicating whether the memory is Fortran contiguous.");
PyDoc_STRVAR(memory_contiguous_doc,
"A bool indicating whether the memory is contiguous.");
+PyDoc_STRVAR(memory_exit_doc,
+ "__exit__($self, /, *exc_info)\n--\n\n"
+ "Release the underlying buffer exposed by the memoryview object.");
static PyGetSetDef memory_getsetlist[] = {
@@ -3283,7 +3286,7 @@ static PyMethodDef memory_methods[] = {
MEMORYVIEW_TOREADONLY_METHODDEF
MEMORYVIEW__FROM_FLAGS_METHODDEF
{"__enter__", memory_enter, METH_NOARGS, NULL},
- {"__exit__", memory_exit, METH_VARARGS, NULL},
+ {"__exit__", memory_exit, METH_VARARGS, memory_exit_doc},
{NULL, NULL}
};
diff --git a/Objects/mimalloc/prim/unix/prim.c b/Objects/mimalloc/prim/unix/prim.c
index ec8447ab40d70c..c6ea05bbe7a2ac 100644
--- a/Objects/mimalloc/prim/unix/prim.c
+++ b/Objects/mimalloc/prim/unix/prim.c
@@ -50,7 +50,7 @@ terms of the MIT license. A copy of the license can be found in the file
#include
#endif
-#if !defined(__HAIKU__) && !defined(__APPLE__) && !defined(__CYGWIN__) && !defined(_AIX) && !defined(__FreeBSD__) && !defined(__sun)
+#if !defined(__HAIKU__) && !defined(__APPLE__) && !defined(__CYGWIN__) && !defined(_AIX) && !defined(__OpenBSD__) && !defined(__FreeBSD__) && !defined(__sun)
#define MI_HAS_SYSCALL_H
#include
#endif
@@ -76,7 +76,7 @@ static int mi_prim_access(const char *fpath, int mode) {
return syscall(SYS_access,fpath,mode);
}
-#elif !defined(__APPLE__) && !defined(_AIX) && !defined(__FreeBSD__) && !defined(__sun) // avoid unused warnings
+#elif !defined(__APPLE__) && !defined(_AIX) && !defined(__OpenBSD__) && !defined(__FreeBSD__) && !defined(__sun) // avoid unused warnings
static int mi_prim_open(const char* fpath, int open_flags) {
return open(fpath,open_flags);
diff --git a/Objects/moduleobject.c b/Objects/moduleobject.c
index 9cd98fb4345fdd..da6a276c41be1f 100644
--- a/Objects/moduleobject.c
+++ b/Objects/moduleobject.c
@@ -88,21 +88,31 @@ new_module_notrack(PyTypeObject *mt)
m->md_weaklist = NULL;
m->md_name = NULL;
m->md_dict = PyDict_New();
- if (m->md_dict != NULL) {
- return m;
+ if (m->md_dict == NULL) {
+ Py_DECREF(m);
+ return NULL;
}
- Py_DECREF(m);
- return NULL;
+ return m;
+}
+
+static void
+track_module(PyModuleObject *m)
+{
+ _PyObject_SetDeferredRefcount(m->md_dict);
+ PyObject_GC_Track(m->md_dict);
+
+ _PyObject_SetDeferredRefcount((PyObject *)m);
+ PyObject_GC_Track(m);
}
static PyObject *
new_module(PyTypeObject *mt, PyObject *args, PyObject *kws)
{
- PyObject *m = (PyObject *)new_module_notrack(mt);
+ PyModuleObject *m = new_module_notrack(mt);
if (m != NULL) {
- PyObject_GC_Track(m);
+ track_module(m);
}
- return m;
+ return (PyObject *)m;
}
PyObject *
@@ -113,7 +123,7 @@ PyModule_NewObject(PyObject *name)
return NULL;
if (module_init_dict(m, m->md_dict, name, NULL) != 0)
goto fail;
- PyObject_GC_Track(m);
+ track_module(m);
return (PyObject *)m;
fail:
@@ -705,16 +715,7 @@ static int
module___init___impl(PyModuleObject *self, PyObject *name, PyObject *doc)
/*[clinic end generated code: output=e7e721c26ce7aad7 input=57f9e177401e5e1e]*/
{
- PyObject *dict = self->md_dict;
- if (dict == NULL) {
- dict = PyDict_New();
- if (dict == NULL)
- return -1;
- self->md_dict = dict;
- }
- if (module_init_dict(self, dict, name, doc) < 0)
- return -1;
- return 0;
+ return module_init_dict(self, self->md_dict, name, doc);
}
static void
diff --git a/Objects/namespaceobject.c b/Objects/namespaceobject.c
index b975bcfeea2cdf..b2a224b9b2bda5 100644
--- a/Objects/namespaceobject.c
+++ b/Objects/namespaceobject.c
@@ -219,15 +219,17 @@ namespace_replace(PyObject *self, PyObject *args, PyObject *kwargs)
static PyMethodDef namespace_methods[] = {
{"__reduce__", (PyCFunction)namespace_reduce, METH_NOARGS,
namespace_reduce__doc__},
- {"__replace__", _PyCFunction_CAST(namespace_replace), METH_VARARGS|METH_KEYWORDS, NULL},
+ {"__replace__", _PyCFunction_CAST(namespace_replace), METH_VARARGS|METH_KEYWORDS,
+ PyDoc_STR("__replace__($self, /, **changes)\n--\n\n"
+ "Return a copy of the namespace object with new values for the specified attributes.")},
{NULL, NULL} // sentinel
};
PyDoc_STRVAR(namespace_doc,
-"A simple attribute-based namespace.\n\
-\n\
-SimpleNamespace(**kwargs)");
+"SimpleNamespace(**kwargs)\n\
+--\n\n\
+A simple attribute-based namespace.");
PyTypeObject _PyNamespace_Type = {
PyVarObject_HEAD_INIT(&PyType_Type, 0)
diff --git a/Objects/object.c b/Objects/object.c
index 60642d899bcafa..016d0e1ded92d8 100644
--- a/Objects/object.c
+++ b/Objects/object.c
@@ -73,21 +73,16 @@ get_legacy_reftotal(void)
interp->object_state.reftotal
static inline void
-reftotal_increment(PyInterpreterState *interp)
+reftotal_add(PyThreadState *tstate, Py_ssize_t n)
{
- REFTOTAL(interp)++;
-}
-
-static inline void
-reftotal_decrement(PyInterpreterState *interp)
-{
- REFTOTAL(interp)--;
-}
-
-static inline void
-reftotal_add(PyInterpreterState *interp, Py_ssize_t n)
-{
- REFTOTAL(interp) += n;
+#ifdef Py_GIL_DISABLED
+ _PyThreadStateImpl *tstate_impl = (_PyThreadStateImpl *)tstate;
+ // relaxed store to avoid data race with read in get_reftotal()
+ Py_ssize_t reftotal = tstate_impl->reftotal + n;
+ _Py_atomic_store_ssize_relaxed(&tstate_impl->reftotal, reftotal);
+#else
+ REFTOTAL(tstate->interp) += n;
+#endif
}
static inline Py_ssize_t get_global_reftotal(_PyRuntimeState *);
@@ -117,7 +112,15 @@ get_reftotal(PyInterpreterState *interp)
{
/* For a single interpreter, we ignore the legacy _Py_RefTotal,
since we can't determine which interpreter updated it. */
- return REFTOTAL(interp);
+ Py_ssize_t total = REFTOTAL(interp);
+#ifdef Py_GIL_DISABLED
+ for (PyThreadState *p = interp->threads.head; p != NULL; p = p->next) {
+ /* This may race with other threads modifications to their reftotal */
+ _PyThreadStateImpl *tstate_impl = (_PyThreadStateImpl *)p;
+ total += _Py_atomic_load_ssize_relaxed(&tstate_impl->reftotal);
+ }
+#endif
+ return total;
}
static inline Py_ssize_t
@@ -129,7 +132,7 @@ get_global_reftotal(_PyRuntimeState *runtime)
HEAD_LOCK(&_PyRuntime);
PyInterpreterState *interp = PyInterpreterState_Head();
for (; interp != NULL; interp = PyInterpreterState_Next(interp)) {
- total += REFTOTAL(interp);
+ total += get_reftotal(interp);
}
HEAD_UNLOCK(&_PyRuntime);
@@ -222,32 +225,32 @@ _Py_NegativeRefcount(const char *filename, int lineno, PyObject *op)
void
_Py_INCREF_IncRefTotal(void)
{
- reftotal_increment(_PyInterpreterState_GET());
+ reftotal_add(_PyThreadState_GET(), 1);
}
/* This is used strictly by Py_DECREF(). */
void
_Py_DECREF_DecRefTotal(void)
{
- reftotal_decrement(_PyInterpreterState_GET());
+ reftotal_add(_PyThreadState_GET(), -1);
}
void
-_Py_IncRefTotal(PyInterpreterState *interp)
+_Py_IncRefTotal(PyThreadState *tstate)
{
- reftotal_increment(interp);
+ reftotal_add(tstate, 1);
}
void
-_Py_DecRefTotal(PyInterpreterState *interp)
+_Py_DecRefTotal(PyThreadState *tstate)
{
- reftotal_decrement(interp);
+ reftotal_add(tstate, -1);
}
void
-_Py_AddRefTotal(PyInterpreterState *interp, Py_ssize_t n)
+_Py_AddRefTotal(PyThreadState *tstate, Py_ssize_t n)
{
- reftotal_add(interp, n);
+ reftotal_add(tstate, n);
}
/* This includes the legacy total
@@ -267,7 +270,10 @@ _Py_GetLegacyRefTotal(void)
Py_ssize_t
_PyInterpreterState_GetRefTotal(PyInterpreterState *interp)
{
- return get_reftotal(interp);
+ HEAD_LOCK(&_PyRuntime);
+ Py_ssize_t total = get_reftotal(interp);
+ HEAD_UNLOCK(&_PyRuntime);
+ return total;
}
#endif /* Py_REF_DEBUG */
@@ -345,7 +351,7 @@ _Py_DecRefSharedDebug(PyObject *o, const char *filename, int lineno)
if (should_queue) {
#ifdef Py_REF_DEBUG
- _Py_IncRefTotal(_PyInterpreterState_GET());
+ _Py_IncRefTotal(_PyThreadState_GET());
#endif
_Py_brc_queue_object(o);
}
@@ -405,7 +411,7 @@ _Py_ExplicitMergeRefcount(PyObject *op, Py_ssize_t extra)
&shared, new_shared));
#ifdef Py_REF_DEBUG
- _Py_AddRefTotal(_PyInterpreterState_GET(), extra);
+ _Py_AddRefTotal(_PyThreadState_GET(), extra);
#endif
_Py_atomic_store_uint32_relaxed(&op->ob_ref_local, 0);
@@ -2001,6 +2007,11 @@ static PyNumberMethods none_as_number = {
0, /* nb_index */
};
+PyDoc_STRVAR(none_doc,
+"NoneType()\n"
+"--\n\n"
+"The type of the None singleton.");
+
PyTypeObject _PyNone_Type = {
PyVarObject_HEAD_INIT(&PyType_Type, 0)
"NoneType",
@@ -2022,7 +2033,7 @@ PyTypeObject _PyNone_Type = {
0, /*tp_setattro */
0, /*tp_as_buffer */
Py_TPFLAGS_DEFAULT, /*tp_flags */
- 0, /*tp_doc */
+ none_doc, /*tp_doc */
0, /*tp_traverse */
0, /*tp_clear */
_Py_BaseObject_RichCompare, /*tp_richcompare */
@@ -2100,6 +2111,11 @@ static PyNumberMethods notimplemented_as_number = {
.nb_bool = notimplemented_bool,
};
+PyDoc_STRVAR(notimplemented_doc,
+"NotImplementedType()\n"
+"--\n\n"
+"The type of the NotImplemented singleton.");
+
PyTypeObject _PyNotImplemented_Type = {
PyVarObject_HEAD_INIT(&PyType_Type, 0)
"NotImplementedType",
@@ -2121,7 +2137,7 @@ PyTypeObject _PyNotImplemented_Type = {
0, /*tp_setattro */
0, /*tp_as_buffer */
Py_TPFLAGS_DEFAULT, /*tp_flags */
- 0, /*tp_doc */
+ notimplemented_doc, /*tp_doc */
0, /*tp_traverse */
0, /*tp_clear */
0, /*tp_richcompare */
@@ -2376,7 +2392,7 @@ void
_Py_NewReference(PyObject *op)
{
#ifdef Py_REF_DEBUG
- reftotal_increment(_PyInterpreterState_GET());
+ _Py_IncRefTotal(_PyThreadState_GET());
#endif
new_reference(op);
}
@@ -2408,6 +2424,19 @@ _Py_SetImmortal(PyObject *op)
_Py_SetImmortalUntracked(op);
}
+void
+_PyObject_SetDeferredRefcount(PyObject *op)
+{
+#ifdef Py_GIL_DISABLED
+ assert(PyType_IS_GC(Py_TYPE(op)));
+ assert(_Py_IsOwnedByCurrentThread(op));
+ assert(op->ob_ref_shared == 0);
+ op->ob_gc_bits |= _PyGC_BITS_DEFERRED;
+ op->ob_ref_local += 1;
+ op->ob_ref_shared = _Py_REF_QUEUED;
+#endif
+}
+
void
_Py_ResurrectReference(PyObject *op)
{
diff --git a/Objects/rangeobject.c b/Objects/rangeobject.c
index ce9eef69ad75a8..7da6162744ffd6 100644
--- a/Objects/rangeobject.c
+++ b/Objects/rangeobject.c
@@ -751,7 +751,7 @@ PyDoc_STRVAR(index_doc,
static PyMethodDef range_methods[] = {
{"__reversed__", range_reverse, METH_NOARGS, reverse_doc},
- {"__reduce__", (PyCFunction)range_reduce, METH_VARARGS},
+ {"__reduce__", (PyCFunction)range_reduce, METH_NOARGS},
{"count", (PyCFunction)range_count, METH_O, count_doc},
{"index", (PyCFunction)range_index, METH_O, index_doc},
{NULL, NULL} /* sentinel */
diff --git a/Objects/setobject.c b/Objects/setobject.c
index 592711f305cbaf..66ca80e8fc25f9 100644
--- a/Objects/setobject.c
+++ b/Objects/setobject.c
@@ -834,7 +834,7 @@ static PyMethodDef setiter_methods[] = {
static PyObject *setiter_iternext(setiterobject *si)
{
- PyObject *key;
+ PyObject *key = NULL;
Py_ssize_t i, mask;
setentry *entry;
PySetObject *so = si->si_set;
@@ -843,30 +843,35 @@ static PyObject *setiter_iternext(setiterobject *si)
return NULL;
assert (PyAnySet_Check(so));
- if (si->si_used != so->used) {
+ Py_ssize_t so_used = FT_ATOMIC_LOAD_SSIZE(so->used);
+ Py_ssize_t si_used = FT_ATOMIC_LOAD_SSIZE(si->si_used);
+ if (si_used != so_used) {
PyErr_SetString(PyExc_RuntimeError,
"Set changed size during iteration");
si->si_used = -1; /* Make this state sticky */
return NULL;
}
+ Py_BEGIN_CRITICAL_SECTION(so);
i = si->si_pos;
assert(i>=0);
entry = so->table;
mask = so->mask;
- while (i <= mask && (entry[i].key == NULL || entry[i].key == dummy))
+ while (i <= mask && (entry[i].key == NULL || entry[i].key == dummy)) {
i++;
+ }
+ if (i <= mask) {
+ key = Py_NewRef(entry[i].key);
+ }
+ Py_END_CRITICAL_SECTION();
si->si_pos = i+1;
- if (i > mask)
- goto fail;
+ if (key == NULL) {
+ si->si_set = NULL;
+ Py_DECREF(so);
+ return NULL;
+ }
si->len--;
- key = entry[i].key;
- return Py_NewRef(key);
-
-fail:
- si->si_set = NULL;
- Py_DECREF(so);
- return NULL;
+ return key;
}
PyTypeObject PySetIter_Type = {
diff --git a/Objects/sliceobject.c b/Objects/sliceobject.c
index 7333aea91e5648..245bea98d58509 100644
--- a/Objects/sliceobject.c
+++ b/Objects/sliceobject.c
@@ -57,6 +57,11 @@ static PyMethodDef ellipsis_methods[] = {
{NULL, NULL}
};
+PyDoc_STRVAR(ellipsis_doc,
+"ellipsis()\n"
+"--\n\n"
+"The type of the Ellipsis singleton.");
+
PyTypeObject PyEllipsis_Type = {
PyVarObject_HEAD_INIT(&PyType_Type, 0)
"ellipsis", /* tp_name */
@@ -78,7 +83,7 @@ PyTypeObject PyEllipsis_Type = {
0, /* tp_setattro */
0, /* tp_as_buffer */
Py_TPFLAGS_DEFAULT, /* tp_flags */
- 0, /* tp_doc */
+ ellipsis_doc, /* tp_doc */
0, /* tp_traverse */
0, /* tp_clear */
0, /* tp_richcompare */
diff --git a/Objects/stringlib/find.h b/Objects/stringlib/find.h
index 509b9297396be8..c385718a5b2692 100644
--- a/Objects/stringlib/find.h
+++ b/Objects/stringlib/find.h
@@ -70,50 +70,3 @@ STRINGLIB(contains_obj)(PyObject* str, PyObject* sub)
}
#endif /* STRINGLIB_WANT_CONTAINS_OBJ */
-
-/*
-This function is a helper for the "find" family (find, rfind, index,
-rindex) and for count, startswith and endswith, because they all have
-the same behaviour for the arguments.
-
-It does not touch the variables received until it knows everything
-is ok.
-*/
-
-#define FORMAT_BUFFER_SIZE 50
-
-Py_LOCAL_INLINE(int)
-STRINGLIB(parse_args_finds)(const char * function_name, PyObject *args,
- PyObject **subobj,
- Py_ssize_t *start, Py_ssize_t *end)
-{
- PyObject *tmp_subobj;
- Py_ssize_t tmp_start = 0;
- Py_ssize_t tmp_end = PY_SSIZE_T_MAX;
- PyObject *obj_start=Py_None, *obj_end=Py_None;
- char format[FORMAT_BUFFER_SIZE] = "O|OO:";
- size_t len = strlen(format);
-
- strncpy(format + len, function_name, FORMAT_BUFFER_SIZE - len - 1);
- format[FORMAT_BUFFER_SIZE - 1] = '\0';
-
- if (!PyArg_ParseTuple(args, format, &tmp_subobj, &obj_start, &obj_end))
- return 0;
-
- /* To support None in "start" and "end" arguments, meaning
- the same as if they were not passed.
- */
- if (obj_start != Py_None)
- if (!_PyEval_SliceIndex(obj_start, &tmp_start))
- return 0;
- if (obj_end != Py_None)
- if (!_PyEval_SliceIndex(obj_end, &tmp_end))
- return 0;
-
- *start = tmp_start;
- *end = tmp_end;
- *subobj = tmp_subobj;
- return 1;
-}
-
-#undef FORMAT_BUFFER_SIZE
diff --git a/Objects/structseq.c b/Objects/structseq.c
index 661d96a968fb80..ec5c5ab45ba813 100644
--- a/Objects/structseq.c
+++ b/Objects/structseq.c
@@ -453,7 +453,9 @@ structseq_replace(PyStructSequence *self, PyObject *args, PyObject *kwargs)
static PyMethodDef structseq_methods[] = {
{"__reduce__", (PyCFunction)structseq_reduce, METH_NOARGS, NULL},
- {"__replace__", _PyCFunction_CAST(structseq_replace), METH_VARARGS | METH_KEYWORDS, NULL},
+ {"__replace__", _PyCFunction_CAST(structseq_replace), METH_VARARGS | METH_KEYWORDS,
+ PyDoc_STR("__replace__($self, /, **changes)\n--\n\n"
+ "Return a copy of the structure with new values for the specified fields.")},
{NULL, NULL} // sentinel
};
diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c
index d9dc00da368a84..5ae1ee9a89af84 100644
--- a/Objects/tupleobject.c
+++ b/Objects/tupleobject.c
@@ -946,7 +946,7 @@ _PyTuple_Resize(PyObject **pv, Py_ssize_t newsize)
if (sv == NULL) {
*pv = NULL;
#ifdef Py_REF_DEBUG
- _Py_DecRefTotal(_PyInterpreterState_GET());
+ _Py_DecRefTotal(_PyThreadState_GET());
#endif
PyObject_GC_Del(v);
return -1;
diff --git a/Objects/typeobject.c b/Objects/typeobject.c
index 51ceb7d7de1cb6..1cb53516a9ae76 100644
--- a/Objects/typeobject.c
+++ b/Objects/typeobject.c
@@ -162,9 +162,14 @@ _PyStaticType_GetState(PyInterpreterState *interp, PyTypeObject *self)
static void
static_builtin_state_init(PyInterpreterState *interp, PyTypeObject *self)
{
- if (!static_builtin_index_is_set(self)) {
+ if (_Py_IsMainInterpreter(interp)) {
+ assert(!static_builtin_index_is_set(self));
static_builtin_index_set(self, interp->types.num_builtins_initialized);
}
+ else {
+ assert(static_builtin_index_get(self) ==
+ interp->types.num_builtins_initialized);
+ }
static_builtin_state *state = static_builtin_state_get(interp, self);
/* It should only be called once for each builtin type. */
@@ -378,7 +383,7 @@ _PyType_GetMRO(PyTypeObject *self)
if (mro == NULL) {
return NULL;
}
- if (_Py_TryIncref(&self->tp_mro, mro)) {
+ if (_Py_TryIncrefCompare(&self->tp_mro, mro)) {
return mro;
}
@@ -2193,15 +2198,7 @@ subtype_dealloc(PyObject *self)
finalizers since they might rely on part of the object
being finalized that has already been destroyed. */
if (type->tp_weaklistoffset && !base->tp_weaklistoffset) {
- /* Modeled after GET_WEAKREFS_LISTPTR().
-
- This is never triggered for static types so we can avoid the
- (slightly) more costly _PyObject_GET_WEAKREFS_LISTPTR(). */
- PyWeakReference **list = \
- _PyObject_GET_WEAKREFS_LISTPTR_FROM_OFFSET(self);
- while (*list) {
- _PyWeakref_ClearRef(*list);
- }
+ _PyWeakref_ClearWeakRefsExceptCallbacks(self);
}
}
@@ -3589,6 +3586,8 @@ type_new_alloc(type_new_ctx *ctx)
et->ht_module = NULL;
et->_ht_tpname = NULL;
+ _PyObject_SetDeferredRefcount((PyObject *)et);
+
return type;
}
@@ -5125,6 +5124,52 @@ _PyType_LookupId(PyTypeObject *type, _Py_Identifier *name)
return _PyType_Lookup(type, oname);
}
+static void
+set_flags(PyTypeObject *self, unsigned long mask, unsigned long flags)
+{
+ ASSERT_TYPE_LOCK_HELD();
+ self->tp_flags = (self->tp_flags & ~mask) | flags;
+}
+
+void
+_PyType_SetFlags(PyTypeObject *self, unsigned long mask, unsigned long flags)
+{
+ BEGIN_TYPE_LOCK();
+ set_flags(self, mask, flags);
+ END_TYPE_LOCK();
+}
+
+static void
+set_flags_recursive(PyTypeObject *self, unsigned long mask, unsigned long flags)
+{
+ if (PyType_HasFeature(self, Py_TPFLAGS_IMMUTABLETYPE) ||
+ (self->tp_flags & mask) == flags)
+ {
+ return;
+ }
+
+ set_flags(self, mask, flags);
+
+ PyObject *children = _PyType_GetSubclasses(self);
+ if (children == NULL) {
+ return;
+ }
+
+ for (Py_ssize_t i = 0; i < PyList_GET_SIZE(children); i++) {
+ PyObject *child = PyList_GET_ITEM(children, i);
+ set_flags_recursive((PyTypeObject *)child, mask, flags);
+ }
+ Py_DECREF(children);
+}
+
+void
+_PyType_SetFlagsRecursive(PyTypeObject *self, unsigned long mask, unsigned long flags)
+{
+ BEGIN_TYPE_LOCK();
+ set_flags_recursive(self, mask, flags);
+ END_TYPE_LOCK();
+}
+
/* This is similar to PyObject_GenericGetAttr(),
but uses _PyType_Lookup() instead of just looking in type->tp_dict.
@@ -6879,7 +6924,7 @@ static PyMethodDef object_methods[] = {
OBJECT___REDUCE_EX___METHODDEF
OBJECT___REDUCE___METHODDEF
OBJECT___GETSTATE___METHODDEF
- {"__subclasshook__", object_subclasshook, METH_CLASS | METH_VARARGS,
+ {"__subclasshook__", object_subclasshook, METH_CLASS | METH_O,
object_subclasshook_doc},
{"__init_subclass__", object_init_subclass, METH_CLASS | METH_NOARGS,
object_init_subclass_doc},
@@ -9855,7 +9900,8 @@ static pytype_slotdef slotdefs[] = {
TPSLOT(__getattribute__, tp_getattro, _Py_slot_tp_getattr_hook,
wrap_binaryfunc,
"__getattribute__($self, name, /)\n--\n\nReturn getattr(self, name)."),
- TPSLOT(__getattr__, tp_getattro, _Py_slot_tp_getattr_hook, NULL, ""),
+ TPSLOT(__getattr__, tp_getattro, _Py_slot_tp_getattr_hook, NULL,
+ "__getattr__($self, name, /)\n--\n\nImplement getattr(self, name)."),
TPSLOT(__setattr__, tp_setattro, slot_tp_setattro, wrap_setattr,
"__setattr__($self, name, value, /)\n--\n\nImplement setattr(self, name, value)."),
TPSLOT(__delattr__, tp_setattro, slot_tp_setattro, wrap_delattr,
@@ -9890,7 +9936,9 @@ static pytype_slotdef slotdefs[] = {
TPSLOT(__new__, tp_new, slot_tp_new, NULL,
"__new__(type, /, *args, **kwargs)\n--\n\n"
"Create and return new object. See help(type) for accurate signature."),
- TPSLOT(__del__, tp_finalize, slot_tp_finalize, (wrapperfunc)wrap_del, ""),
+ TPSLOT(__del__, tp_finalize, slot_tp_finalize, (wrapperfunc)wrap_del,
+ "__del__($self, /)\n--\n\n"
+ "Called when the instance is about to be destroyed."),
BUFSLOT(__buffer__, bf_getbuffer, slot_bf_getbuffer, wrap_buffer,
"__buffer__($self, flags, /)\n--\n\n"
diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c
index e135638c696fa4..2c259b7e869efe 100644
--- a/Objects/unicodeobject.c
+++ b/Objects/unicodeobject.c
@@ -2468,6 +2468,7 @@ unicode_fromformat_arg(_PyUnicodeWriter *writer,
switch (*f++) {
case '-': flags |= F_LJUST; continue;
case '0': flags |= F_ZERO; continue;
+ case '#': flags |= F_ALT; continue;
}
f--;
break;
@@ -2797,9 +2798,8 @@ unicode_fromformat_arg(_PyUnicodeWriter *writer,
PyTypeObject *type = (PyTypeObject *)Py_NewRef(Py_TYPE(obj));
PyObject *type_name;
- if (f[1] == '#') {
+ if (flags & F_ALT) {
type_name = _PyType_GetFullyQualifiedName(type, ':');
- f++;
}
else {
type_name = PyType_GetFullyQualifiedName(type);
@@ -2830,9 +2830,8 @@ unicode_fromformat_arg(_PyUnicodeWriter *writer,
PyTypeObject *type = (PyTypeObject*)type_raw;
PyObject *type_name;
- if (f[1] == '#') {
+ if (flags & F_ALT) {
type_name = _PyType_GetFullyQualifiedName(type, ':');
- f++;
}
else {
type_name = PyType_GetFullyQualifiedName(type);
@@ -14618,6 +14617,56 @@ unicode_new_impl(PyTypeObject *type, PyObject *x, const char *encoding,
return unicode;
}
+static const char *
+arg_as_utf8(PyObject *obj, const char *name)
+{
+ if (!PyUnicode_Check(obj)) {
+ PyErr_Format(PyExc_TypeError,
+ "str() argument '%s' must be str, not %T",
+ name, obj);
+ return NULL;
+ }
+ return _PyUnicode_AsUTF8NoNUL(obj);
+}
+
+static PyObject *
+unicode_vectorcall(PyObject *type, PyObject *const *args,
+ size_t nargsf, PyObject *kwnames)
+{
+ assert(Py_Is(_PyType_CAST(type), &PyUnicode_Type));
+
+ Py_ssize_t nargs = PyVectorcall_NARGS(nargsf);
+ if (kwnames != NULL && PyTuple_GET_SIZE(kwnames) != 0) {
+ // Fallback to unicode_new()
+ PyObject *tuple = _PyTuple_FromArray(args, nargs);
+ if (tuple == NULL) {
+ return NULL;
+ }
+ PyObject *dict = _PyStack_AsDict(args + nargs, kwnames);
+ if (dict == NULL) {
+ Py_DECREF(tuple);
+ return NULL;
+ }
+ PyObject *ret = unicode_new(_PyType_CAST(type), tuple, dict);
+ Py_DECREF(tuple);
+ Py_DECREF(dict);
+ return ret;
+ }
+ if (!_PyArg_CheckPositional("str", nargs, 0, 3)) {
+ return NULL;
+ }
+ if (nargs == 0) {
+ return unicode_get_empty();
+ }
+ PyObject *object = args[0];
+ if (nargs == 1) {
+ return PyObject_Str(object);
+ }
+ const char *encoding = arg_as_utf8(args[1], "encoding");
+ const char *errors = (nargs == 3) ? arg_as_utf8(args[2], "errors") : NULL;
+ return PyUnicode_FromEncodedObject(object, encoding, errors);
+}
+
static PyObject *
unicode_subtype_new(PyTypeObject *type, PyObject *unicode)
{
@@ -14759,6 +14808,7 @@ PyTypeObject PyUnicode_Type = {
0, /* tp_alloc */
unicode_new, /* tp_new */
PyObject_Del, /* tp_free */
+ .tp_vectorcall = unicode_vectorcall,
};
/* Initialize the Unicode implementation */
@@ -14916,7 +14966,7 @@ _PyUnicode_InternInPlace(PyInterpreterState *interp, PyObject **p)
decrements to these objects will not be registered so they
need to be accounted for in here. */
for (Py_ssize_t i = 0; i < Py_REFCNT(s) - 2; i++) {
- _Py_DecRefTotal(_PyInterpreterState_GET());
+ _Py_DecRefTotal(_PyThreadState_GET());
}
#endif
_Py_SetImmortal(s);
diff --git a/Objects/weakrefobject.c b/Objects/weakrefobject.c
index d8dd6aea3aff02..206107e8505dc7 100644
--- a/Objects/weakrefobject.c
+++ b/Objects/weakrefobject.c
@@ -1,24 +1,58 @@
#include "Python.h"
+#include "pycore_critical_section.h"
+#include "pycore_lock.h"
#include "pycore_modsupport.h" // _PyArg_NoKwnames()
#include "pycore_object.h" // _PyObject_GET_WEAKREFS_LISTPTR()
#include "pycore_pyerrors.h" // _PyErr_ChainExceptions1()
+#include "pycore_pystate.h"
#include "pycore_weakref.h" // _PyWeakref_GET_REF()
+#ifdef Py_GIL_DISABLED
+/*
+ * Thread-safety for free-threaded builds
+ * ======================================
+ *
+ * In free-threaded builds we need to protect mutable state of:
+ *
+ * - The weakref (wr_object, hash, wr_callback)
+ * - The referenced object (its head-of-list pointer)
+ * - The linked list of weakrefs
+ *
+ * For now we've chosen to address this in a straightforward way:
+ *
+ * - The weakref's hash is protected using the weakref's per-object lock.
+ * - The other mutable is protected by a striped lock keyed on the referenced
+ * object's address.
+ * - The striped lock must be locked using `_Py_LOCK_DONT_DETACH` in order to
+ * support atomic deletion from WeakValueDictionaries. As a result, we must
+ * be careful not to perform any operations that could suspend while the
+ * lock is held.
+ *
+ * Since the world is stopped when the GC runs, it is free to clear weakrefs
+ * without acquiring any locks.
+ */
+#endif
#define GET_WEAKREFS_LISTPTR(o) \
((PyWeakReference **) _PyObject_GET_WEAKREFS_LISTPTR(o))
Py_ssize_t
-_PyWeakref_GetWeakrefCount(PyWeakReference *head)
+_PyWeakref_GetWeakrefCount(PyObject *obj)
{
- Py_ssize_t count = 0;
+ if (!_PyType_SUPPORTS_WEAKREFS(Py_TYPE(obj))) {
+ return 0;
+ }
+ LOCK_WEAKREFS(obj);
+ Py_ssize_t count = 0;
+ PyWeakReference *head = *GET_WEAKREFS_LISTPTR(obj);
while (head != NULL) {
++count;
head = head->wr_next;
}
+ UNLOCK_WEAKREFS(obj);
return count;
}
@@ -33,54 +67,55 @@ init_weakref(PyWeakReference *self, PyObject *ob, PyObject *callback)
self->wr_next = NULL;
self->wr_callback = Py_XNewRef(callback);
self->vectorcall = weakref_vectorcall;
+#ifdef Py_GIL_DISABLED
+ self->weakrefs_lock = &WEAKREF_LIST_LOCK(ob);
+ _PyObject_SetMaybeWeakref(ob);
+ _PyObject_SetMaybeWeakref((PyObject *)self);
+#endif
}
-static PyWeakReference *
-new_weakref(PyObject *ob, PyObject *callback)
-{
- PyWeakReference *result;
-
- result = PyObject_GC_New(PyWeakReference, &_PyWeakref_RefType);
- if (result) {
- init_weakref(result, ob, callback);
- PyObject_GC_Track(result);
- }
- return result;
-}
-
-
-/* This function clears the passed-in reference and removes it from the
- * list of weak references for the referent. This is the only code that
- * removes an item from the doubly-linked list of weak references for an
- * object; it is also responsible for clearing the callback slot.
- */
+// Clear the weakref and steal its callback into `callback`, if provided.
static void
-clear_weakref(PyWeakReference *self)
+clear_weakref_lock_held(PyWeakReference *self, PyObject **callback)
{
- PyObject *callback = self->wr_callback;
-
if (self->wr_object != Py_None) {
PyWeakReference **list = GET_WEAKREFS_LISTPTR(self->wr_object);
-
- if (*list == self)
- /* If 'self' is the end of the list (and thus self->wr_next == NULL)
- then the weakref list itself (and thus the value of *list) will
- end up being set to NULL. */
- *list = self->wr_next;
- self->wr_object = Py_None;
- if (self->wr_prev != NULL)
+ if (*list == self) {
+ /* If 'self' is the end of the list (and thus self->wr_next ==
+ NULL) then the weakref list itself (and thus the value of *list)
+ will end up being set to NULL. */
+ FT_ATOMIC_STORE_PTR(*list, self->wr_next);
+ }
+ FT_ATOMIC_STORE_PTR(self->wr_object, Py_None);
+ if (self->wr_prev != NULL) {
self->wr_prev->wr_next = self->wr_next;
- if (self->wr_next != NULL)
+ }
+ if (self->wr_next != NULL) {
self->wr_next->wr_prev = self->wr_prev;
+ }
self->wr_prev = NULL;
self->wr_next = NULL;
}
if (callback != NULL) {
- Py_DECREF(callback);
+ *callback = self->wr_callback;
self->wr_callback = NULL;
}
}
+// Clear the weakref and its callback
+static void
+clear_weakref(PyWeakReference *self)
+{
+ PyObject *callback = NULL;
+ // self->wr_object may be Py_None if the GC cleared the weakref, so lock
+ // using the pointer in the weakref.
+ LOCK_WEAKREFS_FOR_WR(self);
+ clear_weakref_lock_held(self, &callback);
+ UNLOCK_WEAKREFS_FOR_WR(self);
+ Py_XDECREF(callback);
+}
+
+
/* Cyclic gc uses this to *just* clear the passed-in reference, leaving
* the callback intact and uncalled. It must be possible to call self's
* tp_dealloc() after calling this, so self has to be left in a sane enough
@@ -95,15 +130,9 @@ clear_weakref(PyWeakReference *self)
void
_PyWeakref_ClearRef(PyWeakReference *self)
{
- PyObject *callback;
-
assert(self != NULL);
assert(PyWeakref_Check(self));
- /* Preserve and restore the callback around clear_weakref. */
- callback = self->wr_callback;
- self->wr_callback = NULL;
- clear_weakref(self);
- self->wr_callback = callback;
+ clear_weakref_lock_held(self, NULL);
}
static void
@@ -126,7 +155,11 @@ gc_traverse(PyWeakReference *self, visitproc visit, void *arg)
static int
gc_clear(PyWeakReference *self)
{
- clear_weakref(self);
+ PyObject *callback;
+ // The world is stopped during GC in free-threaded builds. It's safe to
+ // call this without holding the lock.
+ clear_weakref_lock_held(self, &callback);
+ Py_XDECREF(callback);
return 0;
}
@@ -150,7 +183,7 @@ weakref_vectorcall(PyObject *self, PyObject *const *args,
}
static Py_hash_t
-weakref_hash(PyWeakReference *self)
+weakref_hash_lock_held(PyWeakReference *self)
{
if (self->hash != -1)
return self->hash;
@@ -164,6 +197,15 @@ weakref_hash(PyWeakReference *self)
return self->hash;
}
+static Py_hash_t
+weakref_hash(PyWeakReference *self)
+{
+ Py_hash_t hash;
+ Py_BEGIN_CRITICAL_SECTION(self);
+ hash = weakref_hash_lock_held(self);
+ Py_END_CRITICAL_SECTION();
+ return hash;
+}
static PyObject *
weakref_repr(PyObject *self)
@@ -276,6 +318,128 @@ insert_head(PyWeakReference *newref, PyWeakReference **list)
*list = newref;
}
+/* See if we can reuse either the basic ref or proxy in list instead of
+ * creating a new weakref
+ */
+static PyWeakReference *
+try_reuse_basic_ref(PyWeakReference *list, PyTypeObject *type,
+ PyObject *callback)
+{
+ if (callback != NULL) {
+ return NULL;
+ }
+
+ PyWeakReference *ref, *proxy;
+ get_basic_refs(list, &ref, &proxy);
+
+ PyWeakReference *cand = NULL;
+ if (type == &_PyWeakref_RefType) {
+ cand = ref;
+ }
+ if ((type == &_PyWeakref_ProxyType) ||
+ (type == &_PyWeakref_CallableProxyType)) {
+ cand = proxy;
+ }
+
+ if (cand != NULL && _Py_TryIncref((PyObject *) cand)) {
+ return cand;
+ }
+ return NULL;
+}
+
+static int
+is_basic_ref(PyWeakReference *ref)
+{
+ return (ref->wr_callback == NULL) && PyWeakref_CheckRefExact(ref);
+}
+
+static int
+is_basic_proxy(PyWeakReference *proxy)
+{
+ return (proxy->wr_callback == NULL) && PyWeakref_CheckProxy(proxy);
+}
+
+static int
+is_basic_ref_or_proxy(PyWeakReference *wr)
+{
+ return is_basic_ref(wr) || is_basic_proxy(wr);
+}
+
+/* Insert `newref` in the appropriate position in `list` */
+static void
+insert_weakref(PyWeakReference *newref, PyWeakReference **list)
+{
+ PyWeakReference *ref, *proxy;
+ get_basic_refs(*list, &ref, &proxy);
+
+ PyWeakReference *prev;
+ if (is_basic_ref(newref)) {
+ prev = NULL;
+ }
+ else if (is_basic_proxy(newref)) {
+ prev = ref;
+ }
+ else {
+ prev = (proxy == NULL) ? ref : proxy;
+ }
+
+ if (prev == NULL) {
+ insert_head(newref, list);
+ }
+ else {
+ insert_after(newref, prev);
+ }
+}
+
+static PyWeakReference *
+allocate_weakref(PyTypeObject *type, PyObject *obj, PyObject *callback)
+{
+ PyWeakReference *newref = (PyWeakReference *) type->tp_alloc(type, 0);
+ if (newref == NULL) {
+ return NULL;
+ }
+ init_weakref(newref, obj, callback);
+ return newref;
+}
+
+static PyWeakReference *
+get_or_create_weakref(PyTypeObject *type, PyObject *obj, PyObject *callback)
+{
+ if (!_PyType_SUPPORTS_WEAKREFS(Py_TYPE(obj))) {
+ PyErr_Format(PyExc_TypeError,
+ "cannot create weak reference to '%s' object",
+ Py_TYPE(obj)->tp_name);
+ return NULL;
+ }
+ if (callback == Py_None)
+ callback = NULL;
+
+ PyWeakReference **list = GET_WEAKREFS_LISTPTR(obj);
+ if ((type == &_PyWeakref_RefType) ||
+ (type == &_PyWeakref_ProxyType) ||
+ (type == &_PyWeakref_CallableProxyType))
+ {
+ LOCK_WEAKREFS(obj);
+ PyWeakReference *basic_ref = try_reuse_basic_ref(*list, type, callback);
+ if (basic_ref != NULL) {
+ UNLOCK_WEAKREFS(obj);
+ return basic_ref;
+ }
+ PyWeakReference *newref = allocate_weakref(type, obj, callback);
+ insert_weakref(newref, list);
+ UNLOCK_WEAKREFS(obj);
+ return newref;
+ }
+ else {
+ // We may not be able to safely allocate inside the lock
+ PyWeakReference *newref = allocate_weakref(type, obj, callback);
+ LOCK_WEAKREFS(obj);
+ insert_weakref(newref, list);
+ UNLOCK_WEAKREFS(obj);
+ return newref;
+ }
+}
+
static int
parse_weakref_init_args(const char *funcname, PyObject *args, PyObject *kwargs,
PyObject **obp, PyObject **callbackp)
@@ -286,54 +450,11 @@ parse_weakref_init_args(const char *funcname, PyObject *args, PyObject *kwargs,
static PyObject *
weakref___new__(PyTypeObject *type, PyObject *args, PyObject *kwargs)
{
- PyWeakReference *self = NULL;
PyObject *ob, *callback = NULL;
-
if (parse_weakref_init_args("__new__", args, kwargs, &ob, &callback)) {
- PyWeakReference *ref, *proxy;
- PyWeakReference **list;
-
- if (!_PyType_SUPPORTS_WEAKREFS(Py_TYPE(ob))) {
- PyErr_Format(PyExc_TypeError,
- "cannot create weak reference to '%s' object",
- Py_TYPE(ob)->tp_name);
- return NULL;
- }
- if (callback == Py_None)
- callback = NULL;
- list = GET_WEAKREFS_LISTPTR(ob);
- get_basic_refs(*list, &ref, &proxy);
- if (callback == NULL && type == &_PyWeakref_RefType) {
- if (ref != NULL) {
- /* We can re-use an existing reference. */
- return Py_NewRef(ref);
- }
- }
- /* We have to create a new reference. */
- /* Note: the tp_alloc() can trigger cyclic GC, so the weakref
- list on ob can be mutated. This means that the ref and
- proxy pointers we got back earlier may have been collected,
- so we need to compute these values again before we use
- them. */
- self = (PyWeakReference *) (type->tp_alloc(type, 0));
- if (self != NULL) {
- init_weakref(self, ob, callback);
- if (callback == NULL && type == &_PyWeakref_RefType) {
- insert_head(self, list);
- }
- else {
- PyWeakReference *prev;
-
- get_basic_refs(*list, &ref, &proxy);
- prev = (proxy == NULL) ? ref : proxy;
- if (prev == NULL)
- insert_head(self, list);
- else
- insert_after(self, prev);
- }
- }
+ return (PyObject *)get_or_create_weakref(type, ob, callback);
}
- return (PyObject *)self;
+ return NULL;
}
static int
@@ -562,8 +683,6 @@ static void
proxy_dealloc(PyWeakReference *self)
{
PyObject_GC_UnTrack(self);
- if (self->wr_callback != NULL)
- PyObject_GC_UnTrack((PyObject *)self);
clear_weakref(self);
PyObject_GC_Del(self);
}
@@ -784,104 +903,21 @@ _PyWeakref_CallableProxyType = {
proxy_iternext, /* tp_iternext */
};
-
-
PyObject *
PyWeakref_NewRef(PyObject *ob, PyObject *callback)
{
- PyWeakReference *result = NULL;
- PyWeakReference **list;
- PyWeakReference *ref, *proxy;
-
- if (!_PyType_SUPPORTS_WEAKREFS(Py_TYPE(ob))) {
- PyErr_Format(PyExc_TypeError,
- "cannot create weak reference to '%s' object",
- Py_TYPE(ob)->tp_name);
- return NULL;
- }
- list = GET_WEAKREFS_LISTPTR(ob);
- get_basic_refs(*list, &ref, &proxy);
- if (callback == Py_None)
- callback = NULL;
- if (callback == NULL)
- /* return existing weak reference if it exists */
- result = ref;
- if (result != NULL)
- Py_INCREF(result);
- else {
- /* We do not need to recompute ref/proxy; new_weakref() cannot
- trigger GC.
- */
- result = new_weakref(ob, callback);
- if (result != NULL) {
- if (callback == NULL) {
- assert(ref == NULL);
- insert_head(result, list);
- }
- else {
- PyWeakReference *prev;
-
- prev = (proxy == NULL) ? ref : proxy;
- if (prev == NULL)
- insert_head(result, list);
- else
- insert_after(result, prev);
- }
- }
- }
- return (PyObject *) result;
+ return (PyObject *)get_or_create_weakref(&_PyWeakref_RefType, ob,
+ callback);
}
-
PyObject *
PyWeakref_NewProxy(PyObject *ob, PyObject *callback)
{
- PyWeakReference *result = NULL;
- PyWeakReference **list;
- PyWeakReference *ref, *proxy;
-
- if (!_PyType_SUPPORTS_WEAKREFS(Py_TYPE(ob))) {
- PyErr_Format(PyExc_TypeError,
- "cannot create weak reference to '%s' object",
- Py_TYPE(ob)->tp_name);
- return NULL;
- }
- list = GET_WEAKREFS_LISTPTR(ob);
- get_basic_refs(*list, &ref, &proxy);
- if (callback == Py_None)
- callback = NULL;
- if (callback == NULL)
- /* attempt to return an existing weak reference if it exists */
- result = proxy;
- if (result != NULL)
- Py_INCREF(result);
- else {
- /* We do not need to recompute ref/proxy; new_weakref cannot
- trigger GC.
- */
- result = new_weakref(ob, callback);
- if (result != NULL) {
- PyWeakReference *prev;
-
- if (PyCallable_Check(ob)) {
- Py_SET_TYPE(result, &_PyWeakref_CallableProxyType);
- }
- else {
- Py_SET_TYPE(result, &_PyWeakref_ProxyType);
- }
- if (callback == NULL) {
- prev = ref;
- }
- else
- prev = (proxy == NULL) ? ref : proxy;
-
- if (prev == NULL)
- insert_head(result, list);
- else
- insert_after(result, prev);
- }
+ PyTypeObject *type = &_PyWeakref_ProxyType;
+ if (PyCallable_Check(ob)) {
+ type = &_PyWeakref_CallableProxyType;
}
- return (PyObject *) result;
+ return (PyObject *)get_or_create_weakref(type, ob, callback);
}
@@ -950,68 +986,73 @@ PyObject_ClearWeakRefs(PyObject *object)
PyErr_BadInternalCall();
return;
}
+
list = GET_WEAKREFS_LISTPTR(object);
- /* Remove the callback-less basic and proxy references */
- if (*list != NULL && (*list)->wr_callback == NULL) {
- clear_weakref(*list);
- if (*list != NULL && (*list)->wr_callback == NULL)
- clear_weakref(*list);
+ if (FT_ATOMIC_LOAD_PTR(list) == NULL) {
+ // Fast path for the common case
+ return;
}
- if (*list != NULL) {
- PyWeakReference *current = *list;
- Py_ssize_t count = _PyWeakref_GetWeakrefCount(current);
- PyObject *exc = PyErr_GetRaisedException();
-
- if (count == 1) {
- PyObject *callback = current->wr_callback;
-
- current->wr_callback = NULL;
- clear_weakref(current);
- if (callback != NULL) {
- if (Py_REFCNT((PyObject *)current) > 0) {
- handle_callback(current, callback);
- }
- Py_DECREF(callback);
- }
+
+ /* Remove the callback-less basic and proxy references, which always appear
+ at the head of the list.
+ */
+ for (int done = 0; !done;) {
+ LOCK_WEAKREFS(object);
+ if (*list != NULL && is_basic_ref_or_proxy(*list)) {
+ PyObject *callback;
+ clear_weakref_lock_held(*list, &callback);
+ assert(callback == NULL);
}
- else {
- PyObject *tuple;
- Py_ssize_t i = 0;
-
- tuple = PyTuple_New(count * 2);
- if (tuple == NULL) {
- _PyErr_ChainExceptions1(exc);
- return;
- }
+ done = (*list == NULL) || !is_basic_ref_or_proxy(*list);
+ UNLOCK_WEAKREFS(object);
+ }
- for (i = 0; i < count; ++i) {
- PyWeakReference *next = current->wr_next;
-
- if (Py_REFCNT((PyObject *)current) > 0) {
- PyTuple_SET_ITEM(tuple, i * 2, Py_NewRef(current));
- PyTuple_SET_ITEM(tuple, i * 2 + 1, current->wr_callback);
- }
- else {
- Py_DECREF(current->wr_callback);
- }
- current->wr_callback = NULL;
- clear_weakref(current);
- current = next;
- }
- for (i = 0; i < count; ++i) {
- PyObject *callback = PyTuple_GET_ITEM(tuple, i * 2 + 1);
-
- /* The tuple may have slots left to NULL */
- if (callback != NULL) {
- PyObject *item = PyTuple_GET_ITEM(tuple, i * 2);
- handle_callback((PyWeakReference *)item, callback);
- }
+ /* Deal with non-canonical (subtypes or refs with callbacks) references. */
+ Py_ssize_t num_weakrefs = _PyWeakref_GetWeakrefCount(object);
+ if (num_weakrefs == 0) {
+ return;
+ }
+
+ PyObject *exc = PyErr_GetRaisedException();
+ PyObject *tuple = PyTuple_New(num_weakrefs * 2);
+ if (tuple == NULL) {
+ _PyErr_ChainExceptions1(exc);
+ return;
+ }
+
+ Py_ssize_t num_items = 0;
+ for (int done = 0; !done;) {
+ PyObject *callback = NULL;
+ LOCK_WEAKREFS(object);
+ PyWeakReference *cur = *list;
+ if (cur != NULL) {
+ clear_weakref_lock_held(cur, &callback);
+ if (_Py_TryIncref((PyObject *) cur)) {
+ assert(num_items / 2 < num_weakrefs);
+ PyTuple_SET_ITEM(tuple, num_items, (PyObject *) cur);
+ PyTuple_SET_ITEM(tuple, num_items + 1, callback);
+ num_items += 2;
+ callback = NULL;
}
- Py_DECREF(tuple);
}
- assert(!PyErr_Occurred());
- PyErr_SetRaisedException(exc);
+ done = (*list == NULL);
+ UNLOCK_WEAKREFS(object);
+
+ Py_XDECREF(callback);
}
+
+ for (Py_ssize_t i = 0; i < num_items; i += 2) {
+ PyObject *callback = PyTuple_GET_ITEM(tuple, i + 1);
+ if (callback != NULL) {
+ PyObject *weakref = PyTuple_GET_ITEM(tuple, i);
+ handle_callback((PyWeakReference *)weakref, callback);
+ }
+ }
+
+ Py_DECREF(tuple);
+
+ assert(!PyErr_Occurred());
+ PyErr_SetRaisedException(exc);
}
/* This function is called by _PyStaticType_Dealloc() to clear weak references.
@@ -1025,10 +1066,30 @@ _PyStaticType_ClearWeakRefs(PyInterpreterState *interp, PyTypeObject *type)
{
static_builtin_state *state = _PyStaticType_GetState(interp, type);
PyObject **list = _PyStaticType_GET_WEAKREFS_LISTPTR(state);
- while (*list != NULL) {
- /* Note that clear_weakref() pops the first ref off the type's
- weaklist before clearing its wr_object and wr_callback.
- That is how we're able to loop over the list. */
- clear_weakref((PyWeakReference *)*list);
+ // This is safe to do without holding the lock in free-threaded builds;
+ // there is only one thread running and no new threads can be created.
+ while (*list) {
+ _PyWeakref_ClearRef((PyWeakReference *)*list);
+ }
+}
+
+void
+_PyWeakref_ClearWeakRefsExceptCallbacks(PyObject *obj)
+{
+ /* Modeled after GET_WEAKREFS_LISTPTR().
+
+ This is never triggered for static types so we can avoid the
+ (slightly) more costly _PyObject_GET_WEAKREFS_LISTPTR(). */
+ PyWeakReference **list = _PyObject_GET_WEAKREFS_LISTPTR_FROM_OFFSET(obj);
+ LOCK_WEAKREFS(obj);
+ while (*list) {
+ _PyWeakref_ClearRef(*list);
}
+ UNLOCK_WEAKREFS(obj);
+}
+
+int
+_PyWeakref_IsDead(PyObject *weakref)
+{
+ return _PyWeakref_IS_DEAD(weakref);
}
diff --git a/PC/_wmimodule.cpp b/PC/_wmimodule.cpp
index 5ab6dcb032550b..22ed05276e6f07 100644
--- a/PC/_wmimodule.cpp
+++ b/PC/_wmimodule.cpp
@@ -279,9 +279,11 @@ _wmi_exec_query_impl(PyObject *module, PyObject *query)
// a timeout. The initEvent will be set after COM initialization, it will
// take a longer time when first initialized. The connectEvent will be set
// after connected to WMI.
- err = wait_event(data.initEvent, 1000);
if (!err) {
- err = wait_event(data.connectEvent, 100);
+ err = wait_event(data.initEvent, 1000);
+ if (!err) {
+ err = wait_event(data.connectEvent, 100);
+ }
}
while (!err) {
@@ -305,28 +307,33 @@ _wmi_exec_query_impl(PyObject *module, PyObject *query)
CloseHandle(data.readPipe);
}
- // Allow the thread some time to clean up
- switch (WaitForSingleObject(hThread, 100)) {
- case WAIT_OBJECT_0:
- // Thread ended cleanly
- if (!GetExitCodeThread(hThread, (LPDWORD)&err)) {
- err = GetLastError();
- }
- break;
- case WAIT_TIMEOUT:
- // Probably stuck - there's not much we can do, unfortunately
- if (err == 0 || err == ERROR_BROKEN_PIPE) {
- err = WAIT_TIMEOUT;
+ if (hThread) {
+ // Allow the thread some time to clean up
+ int thread_err;
+ switch (WaitForSingleObject(hThread, 100)) {
+ case WAIT_OBJECT_0:
+ // Thread ended cleanly
+ if (!GetExitCodeThread(hThread, (LPDWORD)&thread_err)) {
+ thread_err = GetLastError();
+ }
+ break;
+ case WAIT_TIMEOUT:
+ // Probably stuck - there's not much we can do, unfortunately
+ thread_err = WAIT_TIMEOUT;
+ break;
+ default:
+ thread_err = GetLastError();
+ break;
}
- break;
- default:
+ // An error on our side is more likely to be relevant than one from
+ // the thread, but if we don't have one on our side we'll take theirs.
if (err == 0 || err == ERROR_BROKEN_PIPE) {
- err = GetLastError();
+ err = thread_err;
}
- break;
+
+ CloseHandle(hThread);
}
- CloseHandle(hThread);
CloseHandle(data.initEvent);
CloseHandle(data.connectEvent);
hThread = NULL;
diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c
index f66a8c07c6f872..7af3ac9c5158d6 100644
--- a/Python/bltinmodule.c
+++ b/Python/bltinmodule.c
@@ -475,7 +475,7 @@ builtin_breakpoint(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyOb
}
PyDoc_STRVAR(breakpoint_doc,
-"breakpoint(*args, **kws)\n\
+"breakpoint($module, /, *args, **kws)\n\
--\n\
\n\
Call sys.breakpointhook(*args, **kws). sys.breakpointhook() must accept\n\
@@ -1703,16 +1703,16 @@ anext as builtin_anext
default: object = NULL
/
-async anext(aiterator[, default])
+Return the next item from the async iterator.
-Return the next item from the async iterator. If default is given and the async
-iterator is exhausted, it is returned instead of raising StopAsyncIteration.
+If default is given and the async iterator is exhausted,
+it is returned instead of raising StopAsyncIteration.
[clinic start generated code]*/
static PyObject *
builtin_anext_impl(PyObject *module, PyObject *aiterator,
PyObject *default_value)
-/*[clinic end generated code: output=f02c060c163a81fa input=8f63f4f78590bb4c]*/
+/*[clinic end generated code: output=f02c060c163a81fa input=2900e4a370d39550]*/
{
PyTypeObject *t;
PyObject *awaitable;
diff --git a/Python/ceval.c b/Python/ceval.c
index f718a77fb029cb..c0783f7377a8ee 100644
--- a/Python/ceval.c
+++ b/Python/ceval.c
@@ -995,6 +995,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int
; // dummy statement after a label, before a declaration
uint16_t uopcode;
#ifdef Py_STATS
+ int lastuop = 0;
uint64_t trace_uop_execution_counter = 0;
#endif
@@ -1018,6 +1019,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int
next_uop++;
OPT_STAT_INC(uops_executed);
UOP_STAT_INC(uopcode, execution_count);
+ UOP_PAIR_INC(uopcode, lastuop);
#ifdef Py_STATS
trace_uop_execution_counter++;
#endif
diff --git a/Python/clinic/bltinmodule.c.h b/Python/clinic/bltinmodule.c.h
index 3898f987cd61ea..3f005bcbfb6a1a 100644
--- a/Python/clinic/bltinmodule.c.h
+++ b/Python/clinic/bltinmodule.c.h
@@ -693,10 +693,10 @@ PyDoc_STRVAR(builtin_anext__doc__,
"anext($module, aiterator, default=, /)\n"
"--\n"
"\n"
-"async anext(aiterator[, default])\n"
+"Return the next item from the async iterator.\n"
"\n"
-"Return the next item from the async iterator. If default is given and the async\n"
-"iterator is exhausted, it is returned instead of raising StopAsyncIteration.");
+"If default is given and the async iterator is exhausted,\n"
+"it is returned instead of raising StopAsyncIteration.");
#define BUILTIN_ANEXT_METHODDEF \
{"anext", _PyCFunction_CAST(builtin_anext), METH_FASTCALL, builtin_anext__doc__},
@@ -1193,4 +1193,4 @@ builtin_issubclass(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
exit:
return return_value;
}
-/*[clinic end generated code: output=643a8d5f900e0c36 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=6d15edfc194b2c08 input=a9049054013a1b77]*/
diff --git a/Python/clinic/sysmodule.c.h b/Python/clinic/sysmodule.c.h
index 13f4ea81eb8984..31f66e807a8547 100644
--- a/Python/clinic/sysmodule.c.h
+++ b/Python/clinic/sysmodule.c.h
@@ -323,8 +323,20 @@ sys__is_interned(PyObject *module, PyObject *arg)
return return_value;
}
+PyDoc_STRVAR(sys_settrace__doc__,
+"settrace($module, function, /)\n"
+"--\n"
+"\n"
+"Set the global debug tracing function.\n"
+"\n"
+"It will be called on each function call. See the debugger chapter\n"
+"in the library manual.");
+
+#define SYS_SETTRACE_METHODDEF \
+ {"settrace", (PyCFunction)sys_settrace, METH_O, sys_settrace__doc__},
+
PyDoc_STRVAR(sys__settraceallthreads__doc__,
-"_settraceallthreads($module, arg, /)\n"
+"_settraceallthreads($module, function, /)\n"
"--\n"
"\n"
"Set the global debug tracing function in all running threads belonging to the current interpreter.\n"
@@ -355,14 +367,26 @@ sys_gettrace(PyObject *module, PyObject *Py_UNUSED(ignored))
return sys_gettrace_impl(module);
}
+PyDoc_STRVAR(sys_setprofile__doc__,
+"setprofile($module, function, /)\n"
+"--\n"
+"\n"
+"Set the profiling function.\n"
+"\n"
+"It will be called on each function call and return. See the profiler\n"
+"chapter in the library manual.");
+
+#define SYS_SETPROFILE_METHODDEF \
+ {"setprofile", (PyCFunction)sys_setprofile, METH_O, sys_setprofile__doc__},
+
PyDoc_STRVAR(sys__setprofileallthreads__doc__,
-"_setprofileallthreads($module, arg, /)\n"
+"_setprofileallthreads($module, function, /)\n"
"--\n"
"\n"
"Set the profiling function in all running threads belonging to the current interpreter.\n"
"\n"
-"It will be called on each function call and return. See the profiler chapter\n"
-"in the library manual.");
+"It will be called on each function call and return. See the profiler\n"
+"chapter in the library manual.");
#define SYS__SETPROFILEALLTHREADS_METHODDEF \
{"_setprofileallthreads", (PyCFunction)sys__setprofileallthreads, METH_O, sys__setprofileallthreads__doc__},
@@ -1504,4 +1528,4 @@ sys__get_cpu_count_config(PyObject *module, PyObject *Py_UNUSED(ignored))
#ifndef SYS_GETANDROIDAPILEVEL_METHODDEF
#define SYS_GETANDROIDAPILEVEL_METHODDEF
#endif /* !defined(SYS_GETANDROIDAPILEVEL_METHODDEF) */
-/*[clinic end generated code: output=b8b1c53e04c3b20c input=a9049054013a1b77]*/
+/*[clinic end generated code: output=518424ee03e353b0 input=a9049054013a1b77]*/
diff --git a/Python/clinic/traceback.c.h b/Python/clinic/traceback.c.h
index aee08d6ad97047..fe53a2786d1ad6 100644
--- a/Python/clinic/traceback.c.h
+++ b/Python/clinic/traceback.c.h
@@ -9,7 +9,7 @@ preserve
#include "pycore_modsupport.h" // _PyArg_UnpackKeywords()
PyDoc_STRVAR(tb_new__doc__,
-"TracebackType(tb_next, tb_frame, tb_lasti, tb_lineno)\n"
+"traceback(tb_next, tb_frame, tb_lasti, tb_lineno)\n"
"--\n"
"\n"
"Create a new traceback object.");
@@ -43,7 +43,7 @@ tb_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
static const char * const _keywords[] = {"tb_next", "tb_frame", "tb_lasti", "tb_lineno", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
- .fname = "TracebackType",
+ .fname = "traceback",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
@@ -61,7 +61,7 @@ tb_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
}
tb_next = fastargs[0];
if (!PyObject_TypeCheck(fastargs[1], &PyFrame_Type)) {
- _PyArg_BadArgument("TracebackType", "argument 'tb_frame'", (&PyFrame_Type)->tp_name, fastargs[1]);
+ _PyArg_BadArgument("traceback", "argument 'tb_frame'", (&PyFrame_Type)->tp_name, fastargs[1]);
goto exit;
}
tb_frame = (PyFrameObject *)fastargs[1];
@@ -78,4 +78,4 @@ tb_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
exit:
return return_value;
}
-/*[clinic end generated code: output=4e2f6b935841b09c input=a9049054013a1b77]*/
+/*[clinic end generated code: output=916a759875507c5a input=a9049054013a1b77]*/
diff --git a/Python/crossinterp.c b/Python/crossinterp.c
index 16efe9c3958f87..367e29d40d895a 100644
--- a/Python/crossinterp.c
+++ b/Python/crossinterp.c
@@ -468,7 +468,7 @@ _release_xid_data(_PyCrossInterpreterData *data, int rawfree)
/***********************/
static int
-_excinfo_init_type(struct _excinfo_type *info, PyObject *exc)
+_excinfo_init_type_from_exception(struct _excinfo_type *info, PyObject *exc)
{
/* Note that this copies directly rather than into an intermediate
struct and does not clear on error. If we need that then we
@@ -504,7 +504,7 @@ _excinfo_init_type(struct _excinfo_type *info, PyObject *exc)
}
info->qualname = _copy_string_obj_raw(strobj, NULL);
Py_DECREF(strobj);
- if (info->name == NULL) {
+ if (info->qualname == NULL) {
return -1;
}
@@ -515,10 +515,51 @@ _excinfo_init_type(struct _excinfo_type *info, PyObject *exc)
}
info->module = _copy_string_obj_raw(strobj, NULL);
Py_DECREF(strobj);
+ if (info->module == NULL) {
+ return -1;
+ }
+
+ return 0;
+}
+
+static int
+_excinfo_init_type_from_object(struct _excinfo_type *info, PyObject *exctype)
+{
+ PyObject *strobj = NULL;
+
+ // __name__
+ strobj = PyObject_GetAttrString(exctype, "__name__");
+ if (strobj == NULL) {
+ return -1;
+ }
+ info->name = _copy_string_obj_raw(strobj, NULL);
+ Py_DECREF(strobj);
if (info->name == NULL) {
return -1;
}
+ // __qualname__
+ strobj = PyObject_GetAttrString(exctype, "__qualname__");
+ if (strobj == NULL) {
+ return -1;
+ }
+ info->qualname = _copy_string_obj_raw(strobj, NULL);
+ Py_DECREF(strobj);
+ if (info->qualname == NULL) {
+ return -1;
+ }
+
+ // __module__
+ strobj = PyObject_GetAttrString(exctype, "__module__");
+ if (strobj == NULL) {
+ return -1;
+ }
+ info->module = _copy_string_obj_raw(strobj, NULL);
+ Py_DECREF(strobj);
+ if (info->module == NULL) {
+ return -1;
+ }
+
return 0;
}
@@ -584,7 +625,7 @@ _PyXI_excinfo_Clear(_PyXI_excinfo *info)
*info = (_PyXI_excinfo){{NULL}};
}
-static PyObject *
+PyObject *
_PyXI_excinfo_format(_PyXI_excinfo *info)
{
const char *module, *qualname;
@@ -627,7 +668,7 @@ _PyXI_excinfo_InitFromException(_PyXI_excinfo *info, PyObject *exc)
}
const char *failure = NULL;
- if (_excinfo_init_type(&info->type, exc) < 0) {
+ if (_excinfo_init_type_from_exception(&info->type, exc) < 0) {
failure = "error while initializing exception type snapshot";
goto error;
}
@@ -672,6 +713,57 @@ _PyXI_excinfo_InitFromException(_PyXI_excinfo *info, PyObject *exc)
return failure;
}
+static const char *
+_PyXI_excinfo_InitFromObject(_PyXI_excinfo *info, PyObject *obj)
+{
+ const char *failure = NULL;
+
+ PyObject *exctype = PyObject_GetAttrString(obj, "type");
+ if (exctype == NULL) {
+ failure = "exception snapshot missing 'type' attribute";
+ goto error;
+ }
+ int res = _excinfo_init_type_from_object(&info->type, exctype);
+ Py_DECREF(exctype);
+ if (res < 0) {
+ failure = "error while initializing exception type snapshot";
+ goto error;
+ }
+
+ // Extract the exception message.
+ PyObject *msgobj = PyObject_GetAttrString(obj, "msg");
+ if (msgobj == NULL) {
+ failure = "exception snapshot missing 'msg' attribute";
+ goto error;
+ }
+ info->msg = _copy_string_obj_raw(msgobj, NULL);
+ Py_DECREF(msgobj);
+ if (info->msg == NULL) {
+ failure = "error while copying exception message";
+ goto error;
+ }
+
+ // Pickle a traceback.TracebackException.
+ PyObject *errdisplay = PyObject_GetAttrString(obj, "errdisplay");
+ if (errdisplay == NULL) {
+ failure = "exception snapshot missing 'errdisplay' attribute";
+ goto error;
+ }
+ info->errdisplay = _copy_string_obj_raw(errdisplay, NULL);
+ Py_DECREF(errdisplay);
+ if (info->errdisplay == NULL) {
+ failure = "error while copying exception error display";
+ goto error;
+ }
+
+ return NULL;
+
+error:
+ assert(failure != NULL);
+ _PyXI_excinfo_Clear(info);
+ return failure;
+}
+
static void
_PyXI_excinfo_Apply(_PyXI_excinfo *info, PyObject *exctype)
{
@@ -825,6 +917,47 @@ _PyXI_excinfo_AsObject(_PyXI_excinfo *info)
}
+int
+_PyXI_InitExcInfo(_PyXI_excinfo *info, PyObject *exc)
+{
+ assert(!PyErr_Occurred());
+ if (exc == NULL || exc == Py_None) {
+ PyErr_SetString(PyExc_ValueError, "missing exc");
+ return -1;
+ }
+ const char *failure;
+ if (PyExceptionInstance_Check(exc) || PyExceptionClass_Check(exc)) {
+ failure = _PyXI_excinfo_InitFromException(info, exc);
+ }
+ else {
+ failure = _PyXI_excinfo_InitFromObject(info, exc);
+ }
+ if (failure != NULL) {
+ PyErr_SetString(PyExc_Exception, failure);
+ return -1;
+ }
+ return 0;
+}
+
+PyObject *
+_PyXI_FormatExcInfo(_PyXI_excinfo *info)
+{
+ return _PyXI_excinfo_format(info);
+}
+
+PyObject *
+_PyXI_ExcInfoAsObject(_PyXI_excinfo *info)
+{
+ return _PyXI_excinfo_AsObject(info);
+}
+
+void
+_PyXI_ClearExcInfo(_PyXI_excinfo *info)
+{
+ _PyXI_excinfo_Clear(info);
+}
+
+
/***************************/
/* short-term data sharing */
/***************************/
@@ -1682,3 +1815,104 @@ _PyXI_FiniTypes(PyInterpreterState *interp)
{
fini_exceptions(interp);
}
+
+
+/*************/
+/* other API */
+/*************/
+
+PyInterpreterState *
+_PyXI_NewInterpreter(PyInterpreterConfig *config, long *maybe_whence,
+ PyThreadState **p_tstate, PyThreadState **p_save_tstate)
+{
+ PyThreadState *save_tstate = PyThreadState_Swap(NULL);
+ assert(save_tstate != NULL);
+
+ PyThreadState *tstate;
+ PyStatus status = Py_NewInterpreterFromConfig(&tstate, config);
+ if (PyStatus_Exception(status)) {
+ // Since no new thread state was created, there is no exception
+ // to propagate; raise a fresh one after swapping back in the
+ // old thread state.
+ PyThreadState_Swap(save_tstate);
+ _PyErr_SetFromPyStatus(status);
+ PyObject *exc = PyErr_GetRaisedException();
+ PyErr_SetString(PyExc_InterpreterError,
+ "sub-interpreter creation failed");
+ _PyErr_ChainExceptions1(exc);
+ return NULL;
+ }
+ assert(tstate != NULL);
+ PyInterpreterState *interp = PyThreadState_GetInterpreter(tstate);
+
+ long whence = _PyInterpreterState_WHENCE_XI;
+ if (maybe_whence != NULL) {
+ whence = *maybe_whence;
+ }
+ _PyInterpreterState_SetWhence(interp, whence);
+
+ if (p_tstate != NULL) {
+ // We leave the new thread state as the current one.
+ *p_tstate = tstate;
+ }
+ else {
+ // Throw away the initial tstate.
+ PyThreadState_Clear(tstate);
+ PyThreadState_Swap(save_tstate);
+ PyThreadState_Delete(tstate);
+ save_tstate = NULL;
+ }
+ if (p_save_tstate != NULL) {
+ *p_save_tstate = save_tstate;
+ }
+ return interp;
+}
+
+void
+_PyXI_EndInterpreter(PyInterpreterState *interp,
+ PyThreadState *tstate, PyThreadState **p_save_tstate)
+{
+#ifndef NDEBUG
+ long whence = _PyInterpreterState_GetWhence(interp);
+#endif
+ assert(whence != _PyInterpreterState_WHENCE_RUNTIME);
+
+ if (!_PyInterpreterState_IsReady(interp)) {
+ assert(whence == _PyInterpreterState_WHENCE_UNKNOWN);
+ // PyInterpreterState_Clear() requires the GIL,
+ // which a not-ready does not have, so we don't clear it.
+ // That means there may be leaks here until clearing the
+ // interpreter is fixed.
+ PyInterpreterState_Delete(interp);
+ return;
+ }
+ assert(whence != _PyInterpreterState_WHENCE_UNKNOWN);
+
+ PyThreadState *save_tstate = NULL;
+ PyThreadState *cur_tstate = PyThreadState_GET();
+ if (tstate == NULL) {
+ if (PyThreadState_GetInterpreter(cur_tstate) == interp) {
+ tstate = cur_tstate;
+ }
+ else {
+ tstate = PyThreadState_New(interp);
+ _PyThreadState_SetWhence(tstate, _PyThreadState_WHENCE_INTERP);
+ assert(tstate != NULL);
+ save_tstate = PyThreadState_Swap(tstate);
+ }
+ }
+ else {
+ assert(PyThreadState_GetInterpreter(tstate) == interp);
+ if (tstate != cur_tstate) {
+ assert(PyThreadState_GetInterpreter(cur_tstate) != interp);
+ save_tstate = PyThreadState_Swap(tstate);
+ }
+ }
+
+ Py_EndInterpreter(tstate);
+
+ if (p_save_tstate != NULL) {
+ save_tstate = *p_save_tstate;
+ }
+ PyThreadState_Swap(save_tstate);
+}
diff --git a/Python/crossinterp_exceptions.h b/Python/crossinterp_exceptions.h
index 0f324bac48a2d8..6ecc10c7955fd8 100644
--- a/Python/crossinterp_exceptions.h
+++ b/Python/crossinterp_exceptions.h
@@ -6,9 +6,9 @@ static PyTypeObject _PyExc_InterpreterError = {
.tp_name = "interpreters.InterpreterError",
.tp_doc = PyDoc_STR("A cross-interpreter operation failed"),
.tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC,
- //.tp_traverse = ((PyTypeObject *)PyExc_BaseException)->tp_traverse,
- //.tp_clear = ((PyTypeObject *)PyExc_BaseException)->tp_clear,
- //.tp_base = (PyTypeObject *)PyExc_BaseException,
+ //.tp_traverse = ((PyTypeObject *)PyExc_Exception)->tp_traverse,
+ //.tp_clear = ((PyTypeObject *)PyExc_Exception)->tp_clear,
+ //.tp_base = (PyTypeObject *)PyExc_Exception,
};
PyObject *PyExc_InterpreterError = (PyObject *)&_PyExc_InterpreterError;
@@ -19,8 +19,8 @@ static PyTypeObject _PyExc_InterpreterNotFoundError = {
.tp_name = "interpreters.InterpreterNotFoundError",
.tp_doc = PyDoc_STR("An interpreter was not found"),
.tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC,
- //.tp_traverse = ((PyTypeObject *)PyExc_BaseException)->tp_traverse,
- //.tp_clear = ((PyTypeObject *)PyExc_BaseException)->tp_clear,
+ //.tp_traverse = ((PyTypeObject *)PyExc_Exception)->tp_traverse,
+ //.tp_clear = ((PyTypeObject *)PyExc_Exception)->tp_clear,
.tp_base = &_PyExc_InterpreterError,
};
PyObject *PyExc_InterpreterNotFoundError = (PyObject *)&_PyExc_InterpreterNotFoundError;
@@ -61,7 +61,7 @@ _get_not_shareable_error_type(PyInterpreterState *interp)
static int
init_exceptions(PyInterpreterState *interp)
{
- PyTypeObject *base = (PyTypeObject *)PyExc_BaseException;
+ PyTypeObject *base = (PyTypeObject *)PyExc_Exception;
// builtin static types
diff --git a/Python/gc_free_threading.c b/Python/gc_free_threading.c
index 7e4137a8e342b1..9cf0e989d0993f 100644
--- a/Python/gc_free_threading.c
+++ b/Python/gc_free_threading.c
@@ -159,6 +159,15 @@ gc_decref(PyObject *op)
op->ob_tid -= 1;
}
+static void
+disable_deferred_refcounting(PyObject *op)
+{
+ if (_PyObject_HasDeferredRefcount(op)) {
+ op->ob_gc_bits &= ~_PyGC_BITS_DEFERRED;
+ op->ob_ref_shared -= (1 << _Py_REF_SHARED_SHIFT);
+ }
+}
+
static Py_ssize_t
merge_refcount(PyObject *op, Py_ssize_t extra)
{
@@ -168,7 +177,7 @@ merge_refcount(PyObject *op, Py_ssize_t extra)
refcount += extra;
#ifdef Py_REF_DEBUG
- _Py_AddRefTotal(_PyInterpreterState_GET(), extra);
+ _Py_AddRefTotal(_PyThreadState_GET(), extra);
#endif
// No atomics necessary; all other threads in this interpreter are paused.
@@ -307,7 +316,7 @@ merge_queued_objects(_PyThreadStateImpl *tstate, struct collection_state *state)
// decref and deallocate the object once we start the world again.
op->ob_ref_shared += (1 << _Py_REF_SHARED_SHIFT);
#ifdef Py_REF_DEBUG
- _Py_IncRefTotal(_PyInterpreterState_GET());
+ _Py_IncRefTotal(_PyThreadState_GET());
#endif
worklist_push(&state->objs_to_decref, op);
}
@@ -375,9 +384,10 @@ update_refs(const mi_heap_t *heap, const mi_heap_area_t *area,
}
Py_ssize_t refcount = Py_REFCNT(op);
+ refcount -= _PyObject_HasDeferredRefcount(op);
_PyObject_ASSERT(op, refcount >= 0);
- if (refcount > 0) {
+ if (refcount > 0 && !_PyObject_HasDeferredRefcount(op)) {
// Untrack tuples and dicts as necessary in this pass, but not objects
// with zero refcount, which we will want to collect.
if (PyTuple_CheckExact(op)) {
@@ -466,6 +476,9 @@ mark_heap_visitor(const mi_heap_t *heap, const mi_heap_area_t *area,
return true;
}
+ _PyObject_ASSERT_WITH_MSG(op, gc_get_refs(op) >= 0,
+ "refcount is too small");
+
if (gc_is_unreachable(op) && gc_get_refs(op) != 0) {
// Object is reachable but currently marked as unreachable.
// Mark it as reachable and traverse its pointers to find
@@ -499,6 +512,10 @@ scan_heap_visitor(const mi_heap_t *heap, const mi_heap_area_t *area,
struct collection_state *state = (struct collection_state *)args;
if (gc_is_unreachable(op)) {
+ // Disable deferred refcounting for unreachable objects so that they
+ // are collected immediately after finalization.
+ disable_deferred_refcounting(op);
+
// Merge and add one to the refcount to prevent deallocation while we
// are holding on to it in a worklist.
merge_refcount(op, 1);
diff --git a/Python/import.c b/Python/import.c
index 6544a84d895d4a..b040c7d5c0f7f5 100644
--- a/Python/import.c
+++ b/Python/import.c
@@ -3696,9 +3696,16 @@ _imp__override_multi_interp_extensions_check_impl(PyObject *module,
"cannot be used in the main interpreter");
return NULL;
}
+#ifdef Py_GIL_DISABLED
+ PyErr_SetString(PyExc_RuntimeError,
+ "_imp._override_multi_interp_extensions_check() "
+ "cannot be used in the free-threaded build");
+ return NULL;
+#else
int oldvalue = OVERRIDE_MULTI_INTERP_EXTENSIONS_CHECK(interp);
OVERRIDE_MULTI_INTERP_EXTENSIONS_CHECK(interp) = override;
return PyLong_FromLong(oldvalue);
+#endif
}
#ifdef HAVE_DYNAMIC_LOADING
diff --git a/Python/instrumentation.c b/Python/instrumentation.c
index 0f60290865000c..3866144a19bf74 100644
--- a/Python/instrumentation.c
+++ b/Python/instrumentation.c
@@ -1197,7 +1197,7 @@ _Py_call_instrumentation_line(PyThreadState *tstate, _PyInterpreterFrame* frame,
/* Special case sys.settrace to avoid boxing the line number,
* only to immediately unbox it. */
if (tools & (1 << PY_MONITORING_SYS_TRACE_ID)) {
- if (tstate->c_tracefunc != NULL && line >= 0) {
+ if (tstate->c_tracefunc != NULL) {
PyFrameObject *frame_obj = _PyFrame_GetFrameObject(frame);
if (frame_obj == NULL) {
return -1;
diff --git a/Python/jit.c b/Python/jit.c
index 03bcf1142715f3..8782adb847cfd6 100644
--- a/Python/jit.c
+++ b/Python/jit.c
@@ -149,12 +149,12 @@ set_bits(uint32_t *loc, uint8_t loc_start, uint64_t value, uint8_t value_start,
// Fill all of stencil's holes in the memory pointed to by base, using the
// values in patches.
static void
-patch(unsigned char *base, const Stencil *stencil, uint64_t *patches)
+patch(unsigned char *base, const Stencil *stencil, uintptr_t patches[])
{
- for (uint64_t i = 0; i < stencil->holes_size; i++) {
+ for (size_t i = 0; i < stencil->holes_size; i++) {
const Hole *hole = &stencil->holes[i];
unsigned char *location = base + hole->offset;
- uint64_t value = patches[hole->value] + (uint64_t)hole->symbol + hole->addend;
+ uint64_t value = patches[hole->value] + (uintptr_t)hole->symbol + hole->addend;
uint8_t *loc8 = (uint8_t *)location;
uint32_t *loc32 = (uint32_t *)location;
uint64_t *loc64 = (uint64_t *)location;
@@ -228,7 +228,7 @@ patch(unsigned char *base, const Stencil *stencil, uint64_t *patches)
case HoleKind_X86_64_RELOC_SIGNED:
case HoleKind_X86_64_RELOC_BRANCH:
// 32-bit relative address.
- value -= (uint64_t)location;
+ value -= (uintptr_t)location;
// Check that we're not out of range of 32 signed bits:
assert((int64_t)value >= -(1LL << 31));
assert((int64_t)value < (1LL << 31));
@@ -239,7 +239,7 @@ patch(unsigned char *base, const Stencil *stencil, uint64_t *patches)
case HoleKind_R_AARCH64_JUMP26:
// 28-bit relative branch.
assert(IS_AARCH64_BRANCH(*loc32));
- value -= (uint64_t)location;
+ value -= (uintptr_t)location;
// Check that we're not out of range of 28 signed bits:
assert((int64_t)value >= -(1 << 27));
assert((int64_t)value < (1 << 27));
@@ -313,7 +313,7 @@ patch(unsigned char *base, const Stencil *stencil, uint64_t *patches)
i++;
continue;
}
- relaxed = (uint64_t)value - (uint64_t)location;
+ relaxed = value - (uintptr_t)location;
if ((relaxed & 0x3) == 0 &&
(int64_t)relaxed >= -(1L << 19) &&
(int64_t)relaxed < (1L << 19))
@@ -328,7 +328,7 @@ patch(unsigned char *base, const Stencil *stencil, uint64_t *patches)
// Fall through...
case HoleKind_ARM64_RELOC_PAGE21:
// Number of pages between this page and the value's page:
- value = (value >> 12) - ((uint64_t)location >> 12);
+ value = (value >> 12) - ((uintptr_t)location >> 12);
// Check that we're not out of range of 21 signed bits:
assert((int64_t)value >= -(1 << 20));
assert((int64_t)value < (1 << 20));
@@ -363,14 +363,14 @@ patch(unsigned char *base, const Stencil *stencil, uint64_t *patches)
}
static void
-copy_and_patch(unsigned char *base, const Stencil *stencil, uint64_t *patches)
+copy_and_patch(unsigned char *base, const Stencil *stencil, uintptr_t patches[])
{
memcpy(base, stencil->body, stencil->body_size);
patch(base, stencil, patches);
}
static void
-emit(const StencilGroup *group, uint64_t patches[])
+emit(const StencilGroup *group, uintptr_t patches[])
{
copy_and_patch((unsigned char *)patches[HoleValue_DATA], &group->data, patches);
copy_and_patch((unsigned char *)patches[HoleValue_CODE], &group->code, patches);
@@ -381,9 +381,9 @@ int
_PyJIT_Compile(_PyExecutorObject *executor, const _PyUOpInstruction *trace, size_t length)
{
// Loop once to find the total compiled size:
- uint32_t instruction_starts[UOP_MAX_TRACE_LENGTH];
- uint32_t code_size = 0;
- uint32_t data_size = 0;
+ size_t instruction_starts[UOP_MAX_TRACE_LENGTH];
+ size_t code_size = 0;
+ size_t data_size = 0;
for (size_t i = 0; i < length; i++) {
_PyUOpInstruction *instruction = (_PyUOpInstruction *)&trace[i];
const StencilGroup *group = &stencil_groups[instruction->opcode];
@@ -409,14 +409,20 @@ _PyJIT_Compile(_PyExecutorObject *executor, const _PyUOpInstruction *trace, size
for (size_t i = 0; i < length; i++) {
_PyUOpInstruction *instruction = (_PyUOpInstruction *)&trace[i];
const StencilGroup *group = &stencil_groups[instruction->opcode];
- // Think of patches as a dictionary mapping HoleValue to uint64_t:
- uint64_t patches[] = GET_PATCHES();
- patches[HoleValue_CODE] = (uint64_t)code;
- patches[HoleValue_CONTINUE] = (uint64_t)code + group->code.body_size;
- patches[HoleValue_DATA] = (uint64_t)data;
- patches[HoleValue_EXECUTOR] = (uint64_t)executor;
+ // Think of patches as a dictionary mapping HoleValue to uintptr_t:
+ uintptr_t patches[] = GET_PATCHES();
+ patches[HoleValue_CODE] = (uintptr_t)code;
+ patches[HoleValue_CONTINUE] = (uintptr_t)code + group->code.body_size;
+ patches[HoleValue_DATA] = (uintptr_t)data;
+ patches[HoleValue_EXECUTOR] = (uintptr_t)executor;
patches[HoleValue_OPARG] = instruction->oparg;
+ #if SIZEOF_VOID_P == 8
patches[HoleValue_OPERAND] = instruction->operand;
+ #else
+ assert(SIZEOF_VOID_P == 4);
+ patches[HoleValue_OPERAND_HI] = instruction->operand >> 32;
+ patches[HoleValue_OPERAND_LO] = instruction->operand & UINT32_MAX;
+ #endif
switch (instruction->format) {
case UOP_FORMAT_TARGET:
patches[HoleValue_TARGET] = instruction->target;
@@ -425,21 +431,21 @@ _PyJIT_Compile(_PyExecutorObject *executor, const _PyUOpInstruction *trace, size
assert(instruction->exit_index < executor->exit_count);
patches[HoleValue_EXIT_INDEX] = instruction->exit_index;
if (instruction->error_target < length) {
- patches[HoleValue_ERROR_TARGET] = (uint64_t)memory + instruction_starts[instruction->error_target];
+ patches[HoleValue_ERROR_TARGET] = (uintptr_t)memory + instruction_starts[instruction->error_target];
}
break;
case UOP_FORMAT_JUMP:
assert(instruction->jump_target < length);
- patches[HoleValue_JUMP_TARGET] = (uint64_t)memory + instruction_starts[instruction->jump_target];
+ patches[HoleValue_JUMP_TARGET] = (uintptr_t)memory + instruction_starts[instruction->jump_target];
if (instruction->error_target < length) {
- patches[HoleValue_ERROR_TARGET] = (uint64_t)memory + instruction_starts[instruction->error_target];
+ patches[HoleValue_ERROR_TARGET] = (uintptr_t)memory + instruction_starts[instruction->error_target];
}
break;
default:
assert(0);
Py_FatalError("Illegal instruction format");
}
- patches[HoleValue_TOP] = (uint64_t)memory + instruction_starts[1];
+ patches[HoleValue_TOP] = (uintptr_t)memory + instruction_starts[1];
patches[HoleValue_ZERO] = 0;
emit(group, patches);
code += group->code.body_size;
@@ -447,12 +453,12 @@ _PyJIT_Compile(_PyExecutorObject *executor, const _PyUOpInstruction *trace, size
}
// Protect against accidental buffer overrun into data:
const StencilGroup *group = &stencil_groups[_FATAL_ERROR];
- uint64_t patches[] = GET_PATCHES();
- patches[HoleValue_CODE] = (uint64_t)code;
- patches[HoleValue_CONTINUE] = (uint64_t)code;
- patches[HoleValue_DATA] = (uint64_t)data;
- patches[HoleValue_EXECUTOR] = (uint64_t)executor;
- patches[HoleValue_TOP] = (uint64_t)code;
+ uintptr_t patches[] = GET_PATCHES();
+ patches[HoleValue_CODE] = (uintptr_t)code;
+ patches[HoleValue_CONTINUE] = (uintptr_t)code;
+ patches[HoleValue_DATA] = (uintptr_t)data;
+ patches[HoleValue_EXECUTOR] = (uintptr_t)executor;
+ patches[HoleValue_TOP] = (uintptr_t)code;
patches[HoleValue_ZERO] = 0;
emit(group, patches);
code += group->code.body_size;
diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c
index 1d315b80d88ce0..efb25878312d85 100644
--- a/Python/pylifecycle.c
+++ b/Python/pylifecycle.c
@@ -477,6 +477,7 @@ pyinit_core_reconfigure(_PyRuntimeState *runtime,
if (interp == NULL) {
return _PyStatus_ERR("can't make main interpreter");
}
+ assert(interp->_ready);
status = _PyConfig_Write(config, runtime);
if (_PyStatus_EXCEPTION(status)) {
@@ -558,6 +559,15 @@ init_interp_settings(PyInterpreterState *interp,
return _PyStatus_ERR("per-interpreter obmalloc does not support "
"single-phase init extension modules");
}
+#ifdef Py_GIL_DISABLED
+ if (!_Py_IsMainInterpreter(interp) &&
+ !config->check_multi_interp_extensions)
+ {
+ return _PyStatus_ERR("The free-threaded build does not support "
+ "single-phase init extension modules in "
+ "subinterpreters");
+ }
+#endif
if (config->allow_fork) {
interp->feature_flags |= Py_RTFLAGS_FORK;
@@ -631,6 +641,8 @@ pycore_create_interpreter(_PyRuntimeState *runtime,
}
assert(interp != NULL);
assert(_Py_IsMainInterpreter(interp));
+ _PyInterpreterState_SetWhence(interp, _PyInterpreterState_WHENCE_RUNTIME);
+ interp->_ready = 1;
status = _PyConfig_Copy(&interp->config, src_config);
if (_PyStatus_EXCEPTION(status)) {
@@ -644,8 +656,10 @@ pycore_create_interpreter(_PyRuntimeState *runtime,
}
PyInterpreterConfig config = _PyInterpreterConfig_LEGACY_INIT;
- // The main interpreter always has its own GIL.
+ // The main interpreter always has its own GIL and supports single-phase
+ // init extensions.
config.gil = PyInterpreterConfig_OWN_GIL;
+ config.check_multi_interp_extensions = 0;
status = init_interp_settings(interp, &config);
if (_PyStatus_EXCEPTION(status)) {
return status;
@@ -2120,7 +2134,8 @@ Py_Finalize(void)
*/
static PyStatus
-new_interpreter(PyThreadState **tstate_p, const PyInterpreterConfig *config)
+new_interpreter(PyThreadState **tstate_p,
+ const PyInterpreterConfig *config, long whence)
{
PyStatus status;
@@ -2143,6 +2158,8 @@ new_interpreter(PyThreadState **tstate_p, const PyInterpreterConfig *config)
*tstate_p = NULL;
return _PyStatus_OK();
}
+ _PyInterpreterState_SetWhence(interp, whence);
+ interp->_ready = 1;
// XXX Might new_interpreter() have been called without the GIL held?
PyThreadState *save_tstate = _PyThreadState_GET();
@@ -2231,15 +2248,17 @@ PyStatus
Py_NewInterpreterFromConfig(PyThreadState **tstate_p,
const PyInterpreterConfig *config)
{
- return new_interpreter(tstate_p, config);
+ long whence = _PyInterpreterState_WHENCE_CAPI;
+ return new_interpreter(tstate_p, config, whence);
}
PyThreadState *
Py_NewInterpreter(void)
{
PyThreadState *tstate = NULL;
+ long whence = _PyInterpreterState_WHENCE_LEGACY_CAPI;
const PyInterpreterConfig config = _PyInterpreterConfig_LEGACY_INIT;
- PyStatus status = new_interpreter(&tstate, &config);
+ PyStatus status = new_interpreter(&tstate, &config, whence);
if (_PyStatus_EXCEPTION(status)) {
Py_ExitStatusException(status);
}
diff --git a/Python/pystate.c b/Python/pystate.c
index 892e740493cdfd..ac38866d301d59 100644
--- a/Python/pystate.c
+++ b/Python/pystate.c
@@ -506,6 +506,15 @@ _PyRuntimeState_ReInitThreads(_PyRuntimeState *runtime)
for (size_t i = 0; i < Py_ARRAY_LENGTH(locks); i++) {
_PyMutex_at_fork_reinit(locks[i]);
}
+#ifdef Py_GIL_DISABLED
+ for (PyInterpreterState *interp = runtime->interpreters.head;
+ interp != NULL; interp = interp->next)
+ {
+ for (int i = 0; i < NUM_WEAKREF_LIST_LOCKS; i++) {
+ _PyMutex_at_fork_reinit(&interp->weakref_locks[i]);
+ }
+ }
+#endif
_PyTypes_AfterFork();
@@ -574,6 +583,8 @@ free_interpreter(PyInterpreterState *interp)
}
}
+static inline int check_interpreter_whence(long);
+
/* Get the interpreter state to a minimal consistent state.
Further init happens in pylifecycle.c before it can be used.
All fields not initialized here are expected to be zeroed out,
@@ -596,12 +607,17 @@ free_interpreter(PyInterpreterState *interp)
static PyStatus
init_interpreter(PyInterpreterState *interp,
_PyRuntimeState *runtime, int64_t id,
- PyInterpreterState *next)
+ PyInterpreterState *next,
+ long whence)
{
if (interp->_initialized) {
return _PyStatus_ERR("interpreter already initialized");
}
+ assert(interp->_whence == _PyInterpreterState_WHENCE_NOTSET);
+ assert(check_interpreter_whence(whence) == 0);
+ interp->_whence = whence;
+
assert(runtime != NULL);
interp->runtime = runtime;
@@ -709,8 +725,9 @@ _PyInterpreterState_New(PyThreadState *tstate, PyInterpreterState **pinterp)
}
interpreters->head = interp;
+ long whence = _PyInterpreterState_WHENCE_UNKNOWN;
status = init_interpreter(interp, runtime,
- id, old_head);
+ id, old_head, whence);
if (_PyStatus_EXCEPTION(status)) {
goto error;
}
@@ -1094,6 +1111,41 @@ _PyInterpreterState_ReinitRunningMain(PyThreadState *tstate)
// accessors
//----------
+int
+_PyInterpreterState_IsReady(PyInterpreterState *interp)
+{
+ return interp->_ready;
+}
+
+
+static inline int
+check_interpreter_whence(long whence)
+{
+ if(whence < 0) {
+ return -1;
+ }
+ if (whence > _PyInterpreterState_WHENCE_MAX) {
+ return -1;
+ }
+ return 0;
+}
+
+long
+_PyInterpreterState_GetWhence(PyInterpreterState *interp)
+{
+ assert(check_interpreter_whence(interp->_whence) == 0);
+ return interp->_whence;
+}
+
+void
+_PyInterpreterState_SetWhence(PyInterpreterState *interp, long whence)
+{
+ assert(interp->_whence != _PyInterpreterState_WHENCE_NOTSET);
+ assert(check_interpreter_whence(whence) == 0);
+ interp->_whence = whence;
+}
+
+
PyObject *
PyUnstable_InterpreterState_GetMainModule(PyInterpreterState *interp)
{
@@ -1105,6 +1157,7 @@ PyUnstable_InterpreterState_GetMainModule(PyInterpreterState *interp)
return PyMapping_GetItemString(modules, "__main__");
}
+
PyObject *
PyInterpreterState_GetDict(PyInterpreterState *interp)
{
@@ -1167,6 +1220,20 @@ PyInterpreterState_GetID(PyInterpreterState *interp)
return interp->id;
}
+PyObject *
+_PyInterpreterState_GetIDObject(PyInterpreterState *interp)
+{
+ if (_PyInterpreterState_IDInitref(interp) != 0) {
+ return NULL;
+ };
+ int64_t interpid = interp->id;
+ if (interpid < 0) {
+ return NULL;
+ }
+ assert(interpid < LLONG_MAX);
+ return PyLong_FromLongLong(interpid);
+}
+
int
_PyInterpreterState_IDInitref(PyInterpreterState *interp)
@@ -1689,6 +1756,14 @@ tstate_delete_common(PyThreadState *tstate)
decrement_stoptheworld_countdown(&runtime->stoptheworld);
}
}
+
+#if defined(Py_REF_DEBUG) && defined(Py_GIL_DISABLED)
+ // Add our portion of the total refcount to the interpreter's total.
+ _PyThreadStateImpl *tstate_impl = (_PyThreadStateImpl *)tstate;
+ tstate->interp->object_state.reftotal += tstate_impl->reftotal;
+ tstate_impl->reftotal = 0;
+#endif
+
HEAD_UNLOCK(runtime);
#ifdef Py_GIL_DISABLED
@@ -1928,7 +2003,7 @@ tstate_try_attach(PyThreadState *tstate)
static void
tstate_set_detached(PyThreadState *tstate, int detached_state)
{
- assert(tstate->state == _Py_THREAD_ATTACHED);
+ assert(_Py_atomic_load_int_relaxed(&tstate->state) == _Py_THREAD_ATTACHED);
#ifdef Py_GIL_DISABLED
_Py_atomic_store_int(&tstate->state, detached_state);
#else
@@ -1993,7 +2068,7 @@ static void
detach_thread(PyThreadState *tstate, int detached_state)
{
// XXX assert(tstate_is_alive(tstate) && tstate_is_bound(tstate));
- assert(tstate->state == _Py_THREAD_ATTACHED);
+ assert(_Py_atomic_load_int_relaxed(&tstate->state) == _Py_THREAD_ATTACHED);
assert(tstate == current_fast_get());
if (tstate->critical_section != 0) {
_PyCriticalSection_SuspendAll(tstate);
@@ -2173,7 +2248,8 @@ start_the_world(struct _stoptheworld_state *stw)
PyThreadState *t;
_Py_FOR_EACH_THREAD(stw, i, t) {
if (t != stw->requester) {
- assert(t->state == _Py_THREAD_SUSPENDED);
+ assert(_Py_atomic_load_int_relaxed(&t->state) ==
+ _Py_THREAD_SUSPENDED);
_Py_atomic_store_int(&t->state, _Py_THREAD_DETACHED);
_PyParkingLot_UnparkAll(&t->state);
}
diff --git a/Python/specialize.c b/Python/specialize.c
index 0b4b199a23e297..5e14bb56b30036 100644
--- a/Python/specialize.c
+++ b/Python/specialize.c
@@ -11,6 +11,7 @@
#include "pycore_object.h"
#include "pycore_opcode_metadata.h" // _PyOpcode_Caches
#include "pycore_uop_metadata.h" // _PyOpcode_uop_name
+#include "pycore_uop_ids.h" // MAX_UOP_ID
#include "pycore_opcode_utils.h" // RESUME_AT_FUNC_START
#include "pycore_pylifecycle.h" // _PyOS_URandomNonblock()
#include "pycore_runtime.h" // _Py_ID()
@@ -269,6 +270,14 @@ print_optimization_stats(FILE *out, OptimizationStats *stats)
}
}
+ for (int i = 1; i <= MAX_UOP_ID; i++){
+ for (int j = 1; j <= MAX_UOP_ID; j++) {
+ if (stats->opcode[i].pair_count[j]) {
+ fprintf(out, "uop[%s].pair_count[%s] : %" PRIu64 "\n",
+ _PyOpcode_uop_name[i], _PyOpcode_uop_name[j], stats->opcode[i].pair_count[j]);
+ }
+ }
+ }
for (int i = 0; i < MAX_UOP_ID; i++) {
if (stats->error_in_opcode[i]) {
fprintf(
diff --git a/Python/sysmodule.c b/Python/sysmodule.c
index cd193c1581c679..7b4a643bccd1dd 100644
--- a/Python/sysmodule.c
+++ b/Python/sysmodule.c
@@ -500,7 +500,8 @@ sys_addaudithook_impl(PyObject *module, PyObject *hook)
}
PyDoc_STRVAR(audit_doc,
-"audit(event, *args)\n\
+"audit($module, event, /, *args)\n\
+--\n\
\n\
Passes the event to any audit hooks that are attached.");
@@ -644,7 +645,8 @@ sys_breakpointhook(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyOb
}
PyDoc_STRVAR(breakpointhook_doc,
-"breakpointhook(*args, **kws)\n"
+"breakpointhook($module, /, *args, **kwargs)\n"
+"--\n"
"\n"
"This hook function is called by built-in breakpoint().\n"
);
@@ -1085,34 +1087,40 @@ trace_trampoline(PyObject *self, PyFrameObject *frame,
return 0;
}
+/*[clinic input]
+sys.settrace
+
+ function: object
+ /
+
+Set the global debug tracing function.
+
+It will be called on each function call. See the debugger chapter
+in the library manual.
+[clinic start generated code]*/
+
static PyObject *
-sys_settrace(PyObject *self, PyObject *args)
+sys_settrace(PyObject *module, PyObject *function)
+/*[clinic end generated code: output=999d12e9d6ec4678 input=8107feb01c5f1c4e]*/
{
PyThreadState *tstate = _PyThreadState_GET();
- if (args == Py_None) {
+ if (function == Py_None) {
if (_PyEval_SetTrace(tstate, NULL, NULL) < 0) {
return NULL;
}
}
else {
- if (_PyEval_SetTrace(tstate, trace_trampoline, args) < 0) {
+ if (_PyEval_SetTrace(tstate, trace_trampoline, function) < 0) {
return NULL;
}
}
Py_RETURN_NONE;
}
-PyDoc_STRVAR(settrace_doc,
-"settrace(function)\n\
-\n\
-Set the global debug tracing function. It will be called on each\n\
-function call. See the debugger chapter in the library manual."
-);
-
/*[clinic input]
sys._settraceallthreads
- arg: object
+ function as arg: object
/
Set the global debug tracing function in all running threads belonging to the current interpreter.
@@ -1123,7 +1131,7 @@ in the library manual.
static PyObject *
sys__settraceallthreads(PyObject *module, PyObject *arg)
-/*[clinic end generated code: output=161cca30207bf3ca input=5906aa1485a50289]*/
+/*[clinic end generated code: output=161cca30207bf3ca input=d4bde1f810d73675]*/
{
PyObject* argument = NULL;
Py_tracefunc func = NULL;
@@ -1159,45 +1167,51 @@ sys_gettrace_impl(PyObject *module)
return Py_NewRef(temp);
}
+/*[clinic input]
+sys.setprofile
+
+ function: object
+ /
+
+Set the profiling function.
+
+It will be called on each function call and return. See the profiler
+chapter in the library manual.
+[clinic start generated code]*/
+
static PyObject *
-sys_setprofile(PyObject *self, PyObject *args)
+sys_setprofile(PyObject *module, PyObject *function)
+/*[clinic end generated code: output=1c3503105939db9c input=055d0d7961413a62]*/
{
PyThreadState *tstate = _PyThreadState_GET();
- if (args == Py_None) {
+ if (function == Py_None) {
if (_PyEval_SetProfile(tstate, NULL, NULL) < 0) {
return NULL;
}
}
else {
- if (_PyEval_SetProfile(tstate, profile_trampoline, args) < 0) {
+ if (_PyEval_SetProfile(tstate, profile_trampoline, function) < 0) {
return NULL;
}
}
Py_RETURN_NONE;
}
-PyDoc_STRVAR(setprofile_doc,
-"setprofile(function)\n\
-\n\
-Set the profiling function. It will be called on each function call\n\
-and return. See the profiler chapter in the library manual."
-);
-
/*[clinic input]
sys._setprofileallthreads
- arg: object
+ function as arg: object
/
Set the profiling function in all running threads belonging to the current interpreter.
-It will be called on each function call and return. See the profiler chapter
-in the library manual.
+It will be called on each function call and return. See the profiler
+chapter in the library manual.
[clinic start generated code]*/
static PyObject *
sys__setprofileallthreads(PyObject *module, PyObject *arg)
-/*[clinic end generated code: output=2d61319e27b309fe input=d1a356d3f4f9060a]*/
+/*[clinic end generated code: output=2d61319e27b309fe input=a10589439ba20cee]*/
{
PyObject* argument = NULL;
Py_tracefunc func = NULL;
@@ -2525,11 +2539,11 @@ static PyMethodDef sys_methods[] = {
SYS_SETSWITCHINTERVAL_METHODDEF
SYS_GETSWITCHINTERVAL_METHODDEF
SYS_SETDLOPENFLAGS_METHODDEF
- {"setprofile", sys_setprofile, METH_O, setprofile_doc},
+ SYS_SETPROFILE_METHODDEF
SYS__SETPROFILEALLTHREADS_METHODDEF
SYS_GETPROFILE_METHODDEF
SYS_SETRECURSIONLIMIT_METHODDEF
- {"settrace", sys_settrace, METH_O, settrace_doc},
+ SYS_SETTRACE_METHODDEF
SYS__SETTRACEALLTHREADS_METHODDEF
SYS_GETTRACE_METHODDEF
SYS_CALL_TRACING_METHODDEF
diff --git a/Python/traceback.c b/Python/traceback.c
index 2564a7db5dcfec..47b77c9108dd9a 100644
--- a/Python/traceback.c
+++ b/Python/traceback.c
@@ -34,9 +34,9 @@
extern char* _PyTokenizer_FindEncodingFilename(int, PyObject *);
/*[clinic input]
-class TracebackType "PyTracebackObject *" "&PyTraceback_Type"
+class traceback "PyTracebackObject *" "&PyTraceback_Type"
[clinic start generated code]*/
-/*[clinic end generated code: output=da39a3ee5e6b4b0d input=928fa06c10151120]*/
+/*[clinic end generated code: output=da39a3ee5e6b4b0d input=cf96294b2bebc811]*/
#include "clinic/traceback.c.h"
@@ -63,7 +63,7 @@ tb_create_raw(PyTracebackObject *next, PyFrameObject *frame, int lasti,
/*[clinic input]
@classmethod
-TracebackType.__new__ as tb_new
+traceback.__new__ as tb_new
tb_next: object
tb_frame: object(type='PyFrameObject *', subclass_of='&PyFrame_Type')
@@ -76,7 +76,7 @@ Create a new traceback object.
static PyObject *
tb_new_impl(PyTypeObject *type, PyObject *tb_next, PyFrameObject *tb_frame,
int tb_lasti, int tb_lineno)
-/*[clinic end generated code: output=fa077debd72d861a input=01cbe8ec8783fca7]*/
+/*[clinic end generated code: output=fa077debd72d861a input=b88143145454cb59]*/
{
if (tb_next == Py_None) {
tb_next = NULL;
diff --git a/README.rst b/README.rst
index 46167b38eab566..cab9519bd7a76c 100644
--- a/README.rst
+++ b/README.rst
@@ -1,4 +1,4 @@
-This is Python version 3.13.0 alpha 5
+This is Python version 3.13.0 alpha 6
=====================================
.. image:: https://github.com/python/cpython/workflows/Tests/badge.svg
diff --git a/Tools/c-analyzer/cpython/ignored.tsv b/Tools/c-analyzer/cpython/ignored.tsv
index 965346b9b04a32..e0ae39036c128d 100644
--- a/Tools/c-analyzer/cpython/ignored.tsv
+++ b/Tools/c-analyzer/cpython/ignored.tsv
@@ -198,6 +198,7 @@ Python/pystate.c - _Py_tss_tstate -
Include/internal/pycore_blocks_output_buffer.h - BUFFER_BLOCK_SIZE -
Modules/_csv.c - quote_styles -
+Modules/_ctypes/_ctypes.c - _ctypesmodule -
Modules/_ctypes/cfield.c - ffi_type_double -
Modules/_ctypes/cfield.c - ffi_type_float -
Modules/_ctypes/cfield.c - ffi_type_longdouble -
diff --git a/Tools/cases_generator/analyzer.py b/Tools/cases_generator/analyzer.py
index 4261378d459107..e38ab3c9047039 100644
--- a/Tools/cases_generator/analyzer.py
+++ b/Tools/cases_generator/analyzer.py
@@ -24,7 +24,6 @@ class Properties:
has_free: bool
side_exit: bool
pure: bool
- passthrough: bool
tier: int | None = None
oparg_and_1: bool = False
const_oparg: int = -1
@@ -54,7 +53,6 @@ def from_list(properties: list["Properties"]) -> "Properties":
has_free=any(p.has_free for p in properties),
side_exit=any(p.side_exit for p in properties),
pure=all(p.pure for p in properties),
- passthrough=all(p.passthrough for p in properties),
)
@property
@@ -81,7 +79,6 @@ def infallible(self) -> bool:
has_free=False,
side_exit=False,
pure=False,
- passthrough=False,
)
@@ -106,9 +103,6 @@ class StackItem:
condition: str | None
size: str
peek: bool = False
- type_prop: None | tuple[str, None | str] = field(
- default_factory=lambda: None, init=True, compare=False, hash=False
- )
def __str__(self) -> str:
cond = f" if ({self.condition})" if self.condition else ""
@@ -536,8 +530,6 @@ def compute_properties(op: parser.InstDef) -> Properties:
)
error_with_pop = has_error_with_pop(op)
error_without_pop = has_error_without_pop(op)
- infallible = not error_with_pop and not error_without_pop
- passthrough = stack_effect_only_peeks(op) and infallible
return Properties(
escapes=makes_escaping_api_call(op),
error_with_pop=error_with_pop,
@@ -557,7 +549,6 @@ def compute_properties(op: parser.InstDef) -> Properties:
and not has_free,
has_free=has_free,
pure="pure" in op.annotations,
- passthrough=passthrough,
tier=tier_variable(op),
)
diff --git a/Tools/cases_generator/generators_common.py b/Tools/cases_generator/generators_common.py
index 0addcf0ab570f6..cc9eb8a0e90eeb 100644
--- a/Tools/cases_generator/generators_common.py
+++ b/Tools/cases_generator/generators_common.py
@@ -234,8 +234,6 @@ def cflags(p: Properties) -> str:
flags.append("HAS_ESCAPES_FLAG")
if p.pure:
flags.append("HAS_PURE_FLAG")
- if p.passthrough:
- flags.append("HAS_PASSTHROUGH_FLAG")
if p.oparg_and_1:
flags.append("HAS_OPARG_AND_1_FLAG")
if flags:
diff --git a/Tools/clinic/libclinic/app.py b/Tools/clinic/libclinic/app.py
index 47a897712d053e..632bed3ce53dde 100644
--- a/Tools/clinic/libclinic/app.py
+++ b/Tools/clinic/libclinic/app.py
@@ -9,8 +9,7 @@
from libclinic import fail, warn
from libclinic.function import Class
from libclinic.block_parser import Block, BlockParser
-from libclinic.crenderdata import Include
-from libclinic.codegen import BlockPrinter, Destination
+from libclinic.codegen import BlockPrinter, Destination, CodeGen
from libclinic.parser import Parser, PythonParser
from libclinic.dsl_parser import DSLParser
if TYPE_CHECKING:
@@ -102,8 +101,7 @@ def __init__(
self.modules: ModuleDict = {}
self.classes: ClassDict = {}
self.functions: list[Function] = []
- # dict: include name => Include instance
- self.includes: dict[str, Include] = {}
+ self.codegen = CodeGen(self.limited_capi)
self.line_prefix = self.line_suffix = ''
@@ -132,7 +130,6 @@ def __init__(
DestBufferList = list[DestBufferType]
self.destination_buffers_stack: DestBufferList = []
- self.ifndef_symbols: set[str] = set()
self.presets: dict[str, dict[Any, Any]] = {}
preset = None
@@ -159,24 +156,6 @@ def __init__(
assert name in self.destination_buffers
preset[name] = buffer
- def add_include(self, name: str, reason: str,
- *, condition: str | None = None) -> None:
- try:
- existing = self.includes[name]
- except KeyError:
- pass
- else:
- if existing.condition and not condition:
- # If the previous include has a condition and the new one is
- # unconditional, override the include.
- pass
- else:
- # Already included, do nothing. Only mention a single reason,
- # no need to list all of them.
- return
-
- self.includes[name] = Include(name, reason, condition)
-
def add_destination(
self,
name: str,
@@ -212,9 +191,7 @@ def parse(self, input: str) -> str:
self.parsers[dsl_name] = parsers[dsl_name](self)
parser = self.parsers[dsl_name]
parser.parse(block)
- printer.print_block(block,
- limited_capi=self.limited_capi,
- header_includes=self.includes)
+ printer.print_block(block)
# these are destinations not buffers
for name, destination in self.destinations.items():
@@ -229,9 +206,7 @@ def parse(self, input: str) -> str:
block.input = "dump " + name + "\n"
warn("Destination buffer " + repr(name) + " not empty at end of file, emptying.")
printer.write("\n")
- printer.print_block(block,
- limited_capi=self.limited_capi,
- header_includes=self.includes)
+ printer.print_block(block)
continue
if destination.type == 'file':
@@ -255,11 +230,10 @@ def parse(self, input: str) -> str:
pass
block.input = 'preserve\n'
+ includes = self.codegen.get_includes()
+
printer_2 = BlockPrinter(self.language)
- printer_2.print_block(block,
- core_includes=True,
- limited_capi=self.limited_capi,
- header_includes=self.includes)
+ printer_2.print_block(block, header_includes=includes)
libclinic.write_file(destination.filename,
printer_2.f.getvalue())
continue
diff --git a/Tools/clinic/libclinic/clanguage.py b/Tools/clinic/libclinic/clanguage.py
index ed08d12d8bfb29..10efedd5cb9cea 100644
--- a/Tools/clinic/libclinic/clanguage.py
+++ b/Tools/clinic/libclinic/clanguage.py
@@ -8,93 +8,19 @@
import libclinic
from libclinic import (
- unspecified, fail, warn, Sentinels, VersionTuple)
-from libclinic.function import (
- GETTER, SETTER, METHOD_INIT, METHOD_NEW)
-from libclinic.crenderdata import CRenderData, TemplateDict
+ unspecified, fail, Sentinels, VersionTuple)
+from libclinic.codegen import CRenderData, TemplateDict, CodeGen
from libclinic.language import Language
from libclinic.function import (
Module, Class, Function, Parameter,
- permute_optional_groups)
-from libclinic.converters import (
- defining_class_converter, object_converter, self_converter)
+ permute_optional_groups,
+ GETTER, SETTER, METHOD_INIT)
+from libclinic.converters import self_converter
+from libclinic.parse_args import ParseArgsCodeGen
if TYPE_CHECKING:
from libclinic.app import Clinic
-def declare_parser(
- f: Function,
- *,
- hasformat: bool = False,
- clinic: Clinic,
- limited_capi: bool,
-) -> str:
- """
- Generates the code template for a static local PyArg_Parser variable,
- with an initializer. For core code (incl. builtin modules) the
- kwtuple field is also statically initialized. Otherwise
- it is initialized at runtime.
- """
- if hasformat:
- fname = ''
- format_ = '.format = "{format_units}:{name}",'
- else:
- fname = '.fname = "{name}",'
- format_ = ''
-
- num_keywords = len([
- p for p in f.parameters.values()
- if not p.is_positional_only() and not p.is_vararg()
- ])
- if limited_capi:
- declarations = """
- #define KWTUPLE NULL
- """
- elif num_keywords == 0:
- declarations = """
- #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- # define KWTUPLE (PyObject *)&_Py_SINGLETON(tuple_empty)
- #else
- # define KWTUPLE NULL
- #endif
- """
- else:
- declarations = """
- #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
-
- #define NUM_KEYWORDS %d
- static struct {{
- PyGC_Head _this_is_not_used;
- PyObject_VAR_HEAD
- PyObject *ob_item[NUM_KEYWORDS];
- }} _kwtuple = {{
- .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
- .ob_item = {{ {keywords_py} }},
- }};
- #undef NUM_KEYWORDS
- #define KWTUPLE (&_kwtuple.ob_base.ob_base)
-
- #else // !Py_BUILD_CORE
- # define KWTUPLE NULL
- #endif // !Py_BUILD_CORE
- """ % num_keywords
-
- condition = '#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)'
- clinic.add_include('pycore_gc.h', 'PyGC_Head', condition=condition)
- clinic.add_include('pycore_runtime.h', '_Py_ID()', condition=condition)
-
- declarations += """
- static const char * const _keywords[] = {{{keywords_c} NULL}};
- static _PyArg_Parser _parser = {{
- .keywords = _keywords,
- %s
- .kwtuple = KWTUPLE,
- }};
- #undef KWTUPLE
- """ % (format_ or fname)
- return libclinic.normalize_snippet(declarations)
-
-
class CLanguage(Language):
body_prefix = "#"
@@ -104,99 +30,6 @@ class CLanguage(Language):
stop_line = "[{dsl_name} start generated code]*/"
checksum_line = "/*[{dsl_name} end generated code: {arguments}]*/"
- NO_VARARG: Final[str] = "PY_SSIZE_T_MAX"
-
- PARSER_PROTOTYPE_KEYWORD: Final[str] = libclinic.normalize_snippet("""
- static PyObject *
- {c_basename}({self_type}{self_name}, PyObject *args, PyObject *kwargs)
- """)
- PARSER_PROTOTYPE_KEYWORD___INIT__: Final[str] = libclinic.normalize_snippet("""
- static int
- {c_basename}({self_type}{self_name}, PyObject *args, PyObject *kwargs)
- """)
- PARSER_PROTOTYPE_VARARGS: Final[str] = libclinic.normalize_snippet("""
- static PyObject *
- {c_basename}({self_type}{self_name}, PyObject *args)
- """)
- PARSER_PROTOTYPE_FASTCALL: Final[str] = libclinic.normalize_snippet("""
- static PyObject *
- {c_basename}({self_type}{self_name}, PyObject *const *args, Py_ssize_t nargs)
- """)
- PARSER_PROTOTYPE_FASTCALL_KEYWORDS: Final[str] = libclinic.normalize_snippet("""
- static PyObject *
- {c_basename}({self_type}{self_name}, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
- """)
- PARSER_PROTOTYPE_DEF_CLASS: Final[str] = libclinic.normalize_snippet("""
- static PyObject *
- {c_basename}({self_type}{self_name}, PyTypeObject *{defining_class_name}, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
- """)
- PARSER_PROTOTYPE_NOARGS: Final[str] = libclinic.normalize_snippet("""
- static PyObject *
- {c_basename}({self_type}{self_name}, PyObject *Py_UNUSED(ignored))
- """)
- PARSER_PROTOTYPE_GETTER: Final[str] = libclinic.normalize_snippet("""
- static PyObject *
- {c_basename}({self_type}{self_name}, void *Py_UNUSED(context))
- """)
- PARSER_PROTOTYPE_SETTER: Final[str] = libclinic.normalize_snippet("""
- static int
- {c_basename}({self_type}{self_name}, PyObject *value, void *Py_UNUSED(context))
- """)
- METH_O_PROTOTYPE: Final[str] = libclinic.normalize_snippet("""
- static PyObject *
- {c_basename}({impl_parameters})
- """)
- DOCSTRING_PROTOTYPE_VAR: Final[str] = libclinic.normalize_snippet("""
- PyDoc_VAR({c_basename}__doc__);
- """)
- DOCSTRING_PROTOTYPE_STRVAR: Final[str] = libclinic.normalize_snippet("""
- PyDoc_STRVAR({c_basename}__doc__,
- {docstring});
- """)
- GETSET_DOCSTRING_PROTOTYPE_STRVAR: Final[str] = libclinic.normalize_snippet("""
- PyDoc_STRVAR({getset_basename}__doc__,
- {docstring});
- #define {getset_basename}_HAS_DOCSTR
- """)
- IMPL_DEFINITION_PROTOTYPE: Final[str] = libclinic.normalize_snippet("""
- static {impl_return_type}
- {c_basename}_impl({impl_parameters})
- """)
- METHODDEF_PROTOTYPE_DEFINE: Final[str] = libclinic.normalize_snippet(r"""
- #define {methoddef_name} \
- {{"{name}", {methoddef_cast}{c_basename}{methoddef_cast_end}, {methoddef_flags}, {c_basename}__doc__}},
- """)
- GETTERDEF_PROTOTYPE_DEFINE: Final[str] = libclinic.normalize_snippet(r"""
- #if defined({getset_basename}_HAS_DOCSTR)
- # define {getset_basename}_DOCSTR {getset_basename}__doc__
- #else
- # define {getset_basename}_DOCSTR NULL
- #endif
- #if defined({getset_name}_GETSETDEF)
- # undef {getset_name}_GETSETDEF
- # define {getset_name}_GETSETDEF {{"{name}", (getter){getset_basename}_get, (setter){getset_basename}_set, {getset_basename}_DOCSTR}},
- #else
- # define {getset_name}_GETSETDEF {{"{name}", (getter){getset_basename}_get, NULL, {getset_basename}_DOCSTR}},
- #endif
- """)
- SETTERDEF_PROTOTYPE_DEFINE: Final[str] = libclinic.normalize_snippet(r"""
- #if defined({getset_name}_HAS_DOCSTR)
- # define {getset_basename}_DOCSTR {getset_basename}__doc__
- #else
- # define {getset_basename}_DOCSTR NULL
- #endif
- #if defined({getset_name}_GETSETDEF)
- # undef {getset_name}_GETSETDEF
- # define {getset_name}_GETSETDEF {{"{name}", (getter){getset_basename}_get, (setter){getset_basename}_set, {getset_basename}_DOCSTR}},
- #else
- # define {getset_name}_GETSETDEF {{"{name}", NULL, (setter){getset_basename}_set, NULL}},
- #endif
- """)
- METHODDEF_PROTOTYPE_IFNDEF: Final[str] = libclinic.normalize_snippet("""
- #ifndef {methoddef_name}
- #define {methoddef_name}
- #endif /* !defined({methoddef_name}) */
- """)
COMPILER_DEPRECATION_WARNING_PROTOTYPE: Final[str] = r"""
// Emit compiler warnings when we get to Python {major}.{minor}.
#if PY_VERSION_HEX >= 0x{major:02x}{minor:02x}00C0
@@ -317,14 +150,14 @@ def deprecate_keyword_use(
self,
func: Function,
params: dict[int, Parameter],
- argname_fmt: str | None,
+ argname_fmt: str | None = None,
*,
fastcall: bool,
- limited_capi: bool,
- clinic: Clinic,
+ codegen: CodeGen,
) -> str:
assert len(params) > 0
last_param = next(reversed(params.values()))
+ limited_capi = codegen.limited_capi
# Format the deprecation message.
containscheck = ""
@@ -336,11 +169,11 @@ def deprecate_keyword_use(
elif fastcall:
conditions.append(f"nargs < {i+1} && PySequence_Contains(kwnames, &_Py_ID({p.name}))")
containscheck = "PySequence_Contains"
- clinic.add_include('pycore_runtime.h', '_Py_ID()')
+ codegen.add_include('pycore_runtime.h', '_Py_ID()')
else:
conditions.append(f"nargs < {i+1} && PyDict_Contains(kwargs, &_Py_ID({p.name}))")
containscheck = "PyDict_Contains"
- clinic.add_include('pycore_runtime.h', '_Py_ID()')
+ codegen.add_include('pycore_runtime.h', '_Py_ID()')
else:
conditions = [f"nargs < {i+1}"]
condition = ") || (".join(conditions)
@@ -399,681 +232,10 @@ def deprecate_keyword_use(
def output_templates(
self,
f: Function,
- clinic: Clinic
+ codegen: CodeGen,
) -> dict[str, str]:
- parameters = list(f.parameters.values())
- assert parameters
- first_param = parameters.pop(0)
- assert isinstance(first_param.converter, self_converter)
- requires_defining_class = False
- if parameters and isinstance(parameters[0].converter, defining_class_converter):
- requires_defining_class = True
- del parameters[0]
- converters = [p.converter for p in parameters]
-
- if f.critical_section:
- clinic.add_include('pycore_critical_section.h', 'Py_BEGIN_CRITICAL_SECTION()')
- has_option_groups = parameters and (parameters[0].group or parameters[-1].group)
- simple_return = (f.return_converter.type == 'PyObject *'
- and not f.critical_section)
- new_or_init = f.kind.new_or_init
-
- vararg: int | str = self.NO_VARARG
- pos_only = min_pos = max_pos = min_kw_only = pseudo_args = 0
- for i, p in enumerate(parameters, 1):
- if p.is_keyword_only():
- assert not p.is_positional_only()
- if not p.is_optional():
- min_kw_only = i - max_pos
- elif p.is_vararg():
- pseudo_args += 1
- vararg = i - 1
- else:
- if vararg == self.NO_VARARG:
- max_pos = i
- if p.is_positional_only():
- pos_only = i
- if not p.is_optional():
- min_pos = i
-
- meth_o = (len(parameters) == 1 and
- parameters[0].is_positional_only() and
- not converters[0].is_optional() and
- not requires_defining_class and
- not new_or_init)
-
- # we have to set these things before we're done:
- #
- # docstring_prototype
- # docstring_definition
- # impl_prototype
- # methoddef_define
- # parser_prototype
- # parser_definition
- # impl_definition
- # cpp_if
- # cpp_endif
- # methoddef_ifndef
-
- return_value_declaration = "PyObject *return_value = NULL;"
- methoddef_define = self.METHODDEF_PROTOTYPE_DEFINE
- if new_or_init and not f.docstring:
- docstring_prototype = docstring_definition = ''
- elif f.kind is GETTER:
- methoddef_define = self.GETTERDEF_PROTOTYPE_DEFINE
- if f.docstring:
- docstring_prototype = ''
- docstring_definition = self.GETSET_DOCSTRING_PROTOTYPE_STRVAR
- else:
- docstring_prototype = docstring_definition = ''
- elif f.kind is SETTER:
- if f.docstring:
- fail("docstrings are only supported for @getter, not @setter")
- return_value_declaration = "int {return_value};"
- methoddef_define = self.SETTERDEF_PROTOTYPE_DEFINE
- docstring_prototype = docstring_definition = ''
- else:
- docstring_prototype = self.DOCSTRING_PROTOTYPE_VAR
- docstring_definition = self.DOCSTRING_PROTOTYPE_STRVAR
- impl_definition = self.IMPL_DEFINITION_PROTOTYPE
- impl_prototype = parser_prototype = parser_definition = None
-
- # parser_body_fields remembers the fields passed in to the
- # previous call to parser_body. this is used for an awful hack.
- parser_body_fields: tuple[str, ...] = ()
- def parser_body(
- prototype: str,
- *fields: str,
- declarations: str = ''
- ) -> str:
- nonlocal parser_body_fields
- lines = []
- lines.append(prototype)
- parser_body_fields = fields
-
- preamble = libclinic.normalize_snippet("""
- {{
- {return_value_declaration}
- {parser_declarations}
- {declarations}
- {initializers}
- """) + "\n"
- finale = libclinic.normalize_snippet("""
- {modifications}
- {lock}
- {return_value} = {c_basename}_impl({impl_arguments});
- {unlock}
- {return_conversion}
- {post_parsing}
-
- {exit_label}
- {cleanup}
- return return_value;
- }}
- """)
- for field in preamble, *fields, finale:
- lines.append(field)
- return libclinic.linear_format("\n".join(lines),
- parser_declarations=declarations)
-
- fastcall = not new_or_init
- limited_capi = clinic.limited_capi
- if limited_capi and (pseudo_args or
- (any(p.is_optional() for p in parameters) and
- any(p.is_keyword_only() and not p.is_optional() for p in parameters)) or
- any(c.broken_limited_capi for c in converters)):
- warn(f"Function {f.full_name} cannot use limited C API")
- limited_capi = False
-
- parsearg: str | None
- if not parameters:
- parser_code: list[str] | None
- if f.kind is GETTER:
- flags = "" # This should end up unused
- parser_prototype = self.PARSER_PROTOTYPE_GETTER
- parser_code = []
- elif f.kind is SETTER:
- flags = ""
- parser_prototype = self.PARSER_PROTOTYPE_SETTER
- parser_code = []
- elif not requires_defining_class:
- # no parameters, METH_NOARGS
- flags = "METH_NOARGS"
- parser_prototype = self.PARSER_PROTOTYPE_NOARGS
- parser_code = []
- else:
- assert fastcall
-
- flags = "METH_METHOD|METH_FASTCALL|METH_KEYWORDS"
- parser_prototype = self.PARSER_PROTOTYPE_DEF_CLASS
- return_error = ('return NULL;' if simple_return
- else 'goto exit;')
- parser_code = [libclinic.normalize_snippet("""
- if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) {{
- PyErr_SetString(PyExc_TypeError, "{name}() takes no arguments");
- %s
- }}
- """ % return_error, indent=4)]
-
- if simple_return:
- parser_definition = '\n'.join([
- parser_prototype,
- '{{',
- *parser_code,
- ' return {c_basename}_impl({impl_arguments});',
- '}}'])
- else:
- parser_definition = parser_body(parser_prototype, *parser_code)
-
- elif meth_o:
- flags = "METH_O"
-
- if (isinstance(converters[0], object_converter) and
- converters[0].format_unit == 'O'):
- meth_o_prototype = self.METH_O_PROTOTYPE
-
- if simple_return:
- # maps perfectly to METH_O, doesn't need a return converter.
- # so we skip making a parse function
- # and call directly into the impl function.
- impl_prototype = parser_prototype = parser_definition = ''
- impl_definition = meth_o_prototype
- else:
- # SLIGHT HACK
- # use impl_parameters for the parser here!
- parser_prototype = meth_o_prototype
- parser_definition = parser_body(parser_prototype)
-
- else:
- argname = 'arg'
- if parameters[0].name == argname:
- argname += '_'
- parser_prototype = libclinic.normalize_snippet("""
- static PyObject *
- {c_basename}({self_type}{self_name}, PyObject *%s)
- """ % argname)
-
- displayname = parameters[0].get_displayname(0)
- parsearg = converters[0].parse_arg(argname, displayname, limited_capi=limited_capi)
- if parsearg is None:
- converters[0].use_converter()
- parsearg = """
- if (!PyArg_Parse(%s, "{format_units}:{name}", {parse_arguments})) {{
- goto exit;
- }}
- """ % argname
- parser_definition = parser_body(parser_prototype,
- libclinic.normalize_snippet(parsearg, indent=4))
-
- elif has_option_groups:
- # positional parameters with option groups
- # (we have to generate lots of PyArg_ParseTuple calls
- # in a big switch statement)
-
- flags = "METH_VARARGS"
- parser_prototype = self.PARSER_PROTOTYPE_VARARGS
- parser_definition = parser_body(parser_prototype, ' {option_group_parsing}')
-
- elif not requires_defining_class and pos_only == len(parameters) - pseudo_args:
- if fastcall:
- # positional-only, but no option groups
- # we only need one call to _PyArg_ParseStack
-
- flags = "METH_FASTCALL"
- parser_prototype = self.PARSER_PROTOTYPE_FASTCALL
- nargs = 'nargs'
- argname_fmt = 'args[%d]'
- else:
- # positional-only, but no option groups
- # we only need one call to PyArg_ParseTuple
-
- flags = "METH_VARARGS"
- parser_prototype = self.PARSER_PROTOTYPE_VARARGS
- if limited_capi:
- nargs = 'PyTuple_Size(args)'
- argname_fmt = 'PyTuple_GetItem(args, %d)'
- else:
- nargs = 'PyTuple_GET_SIZE(args)'
- argname_fmt = 'PyTuple_GET_ITEM(args, %d)'
-
- left_args = f"{nargs} - {max_pos}"
- max_args = self.NO_VARARG if (vararg != self.NO_VARARG) else max_pos
- if limited_capi:
- parser_code = []
- if nargs != 'nargs':
- nargs_def = f'Py_ssize_t nargs = {nargs};'
- parser_code.append(libclinic.normalize_snippet(nargs_def, indent=4))
- nargs = 'nargs'
- if min_pos == max_args:
- pl = '' if min_pos == 1 else 's'
- parser_code.append(libclinic.normalize_snippet(f"""
- if ({nargs} != {min_pos}) {{{{
- PyErr_Format(PyExc_TypeError, "{{name}} expected {min_pos} argument{pl}, got %zd", {nargs});
- goto exit;
- }}}}
- """,
- indent=4))
- else:
- if min_pos:
- pl = '' if min_pos == 1 else 's'
- parser_code.append(libclinic.normalize_snippet(f"""
- if ({nargs} < {min_pos}) {{{{
- PyErr_Format(PyExc_TypeError, "{{name}} expected at least {min_pos} argument{pl}, got %zd", {nargs});
- goto exit;
- }}}}
- """,
- indent=4))
- if max_args != self.NO_VARARG:
- pl = '' if max_args == 1 else 's'
- parser_code.append(libclinic.normalize_snippet(f"""
- if ({nargs} > {max_args}) {{{{
- PyErr_Format(PyExc_TypeError, "{{name}} expected at most {max_args} argument{pl}, got %zd", {nargs});
- goto exit;
- }}}}
- """,
- indent=4))
- else:
- clinic.add_include('pycore_modsupport.h',
- '_PyArg_CheckPositional()')
- parser_code = [libclinic.normalize_snippet(f"""
- if (!_PyArg_CheckPositional("{{name}}", {nargs}, {min_pos}, {max_args})) {{{{
- goto exit;
- }}}}
- """, indent=4)]
-
- has_optional = False
- for i, p in enumerate(parameters):
- if p.is_vararg():
- if fastcall:
- parser_code.append(libclinic.normalize_snippet("""
- %s = PyTuple_New(%s);
- if (!%s) {{
- goto exit;
- }}
- for (Py_ssize_t i = 0; i < %s; ++i) {{
- PyTuple_SET_ITEM(%s, i, Py_NewRef(args[%d + i]));
- }}
- """ % (
- p.converter.parser_name,
- left_args,
- p.converter.parser_name,
- left_args,
- p.converter.parser_name,
- max_pos
- ), indent=4))
- else:
- parser_code.append(libclinic.normalize_snippet("""
- %s = PyTuple_GetSlice(%d, -1);
- """ % (
- p.converter.parser_name,
- max_pos
- ), indent=4))
- continue
-
- displayname = p.get_displayname(i+1)
- argname = argname_fmt % i
- parsearg = p.converter.parse_arg(argname, displayname, limited_capi=limited_capi)
- if parsearg is None:
- parser_code = None
- break
- if has_optional or p.is_optional():
- has_optional = True
- parser_code.append(libclinic.normalize_snippet("""
- if (%s < %d) {{
- goto skip_optional;
- }}
- """, indent=4) % (nargs, i + 1))
- parser_code.append(libclinic.normalize_snippet(parsearg, indent=4))
-
- if parser_code is not None:
- if has_optional:
- parser_code.append("skip_optional:")
- else:
- for parameter in parameters:
- parameter.converter.use_converter()
-
- if limited_capi:
- fastcall = False
- if fastcall:
- clinic.add_include('pycore_modsupport.h',
- '_PyArg_ParseStack()')
- parser_code = [libclinic.normalize_snippet("""
- if (!_PyArg_ParseStack(args, nargs, "{format_units}:{name}",
- {parse_arguments})) {{
- goto exit;
- }}
- """, indent=4)]
- else:
- flags = "METH_VARARGS"
- parser_prototype = self.PARSER_PROTOTYPE_VARARGS
- parser_code = [libclinic.normalize_snippet("""
- if (!PyArg_ParseTuple(args, "{format_units}:{name}",
- {parse_arguments})) {{
- goto exit;
- }}
- """, indent=4)]
- parser_definition = parser_body(parser_prototype, *parser_code)
-
- else:
- deprecated_positionals: dict[int, Parameter] = {}
- deprecated_keywords: dict[int, Parameter] = {}
- for i, p in enumerate(parameters):
- if p.deprecated_positional:
- deprecated_positionals[i] = p
- if p.deprecated_keyword:
- deprecated_keywords[i] = p
-
- has_optional_kw = (
- max(pos_only, min_pos) + min_kw_only
- < len(converters) - int(vararg != self.NO_VARARG)
- )
-
- if limited_capi:
- parser_code = None
- fastcall = False
- else:
- if vararg == self.NO_VARARG:
- clinic.add_include('pycore_modsupport.h',
- '_PyArg_UnpackKeywords()')
- args_declaration = "_PyArg_UnpackKeywords", "%s, %s, %s" % (
- min_pos,
- max_pos,
- min_kw_only
- )
- nargs = "nargs"
- else:
- clinic.add_include('pycore_modsupport.h',
- '_PyArg_UnpackKeywordsWithVararg()')
- args_declaration = "_PyArg_UnpackKeywordsWithVararg", "%s, %s, %s, %s" % (
- min_pos,
- max_pos,
- min_kw_only,
- vararg
- )
- nargs = f"Py_MIN(nargs, {max_pos})" if max_pos else "0"
-
- if fastcall:
- flags = "METH_FASTCALL|METH_KEYWORDS"
- parser_prototype = self.PARSER_PROTOTYPE_FASTCALL_KEYWORDS
- argname_fmt = 'args[%d]'
- declarations = declare_parser(f, clinic=clinic,
- limited_capi=clinic.limited_capi)
- declarations += "\nPyObject *argsbuf[%s];" % len(converters)
- if has_optional_kw:
- declarations += "\nPy_ssize_t noptargs = %s + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - %d;" % (nargs, min_pos + min_kw_only)
- parser_code = [libclinic.normalize_snippet("""
- args = %s(args, nargs, NULL, kwnames, &_parser, %s, argsbuf);
- if (!args) {{
- goto exit;
- }}
- """ % args_declaration, indent=4)]
- else:
- # positional-or-keyword arguments
- flags = "METH_VARARGS|METH_KEYWORDS"
- parser_prototype = self.PARSER_PROTOTYPE_KEYWORD
- argname_fmt = 'fastargs[%d]'
- declarations = declare_parser(f, clinic=clinic,
- limited_capi=clinic.limited_capi)
- declarations += "\nPyObject *argsbuf[%s];" % len(converters)
- declarations += "\nPyObject * const *fastargs;"
- declarations += "\nPy_ssize_t nargs = PyTuple_GET_SIZE(args);"
- if has_optional_kw:
- declarations += "\nPy_ssize_t noptargs = %s + (kwargs ? PyDict_GET_SIZE(kwargs) : 0) - %d;" % (nargs, min_pos + min_kw_only)
- parser_code = [libclinic.normalize_snippet("""
- fastargs = %s(_PyTuple_CAST(args)->ob_item, nargs, kwargs, NULL, &_parser, %s, argsbuf);
- if (!fastargs) {{
- goto exit;
- }}
- """ % args_declaration, indent=4)]
-
- if requires_defining_class:
- flags = 'METH_METHOD|' + flags
- parser_prototype = self.PARSER_PROTOTYPE_DEF_CLASS
-
- if parser_code is not None:
- if deprecated_keywords:
- code = self.deprecate_keyword_use(f, deprecated_keywords, argname_fmt,
- clinic=clinic,
- fastcall=fastcall,
- limited_capi=limited_capi)
- parser_code.append(code)
-
- add_label: str | None = None
- for i, p in enumerate(parameters):
- if isinstance(p.converter, defining_class_converter):
- raise ValueError("defining_class should be the first "
- "parameter (after self)")
- displayname = p.get_displayname(i+1)
- parsearg = p.converter.parse_arg(argname_fmt % i, displayname, limited_capi=limited_capi)
- if parsearg is None:
- parser_code = None
- break
- if add_label and (i == pos_only or i == max_pos):
- parser_code.append("%s:" % add_label)
- add_label = None
- if not p.is_optional():
- parser_code.append(libclinic.normalize_snippet(parsearg, indent=4))
- elif i < pos_only:
- add_label = 'skip_optional_posonly'
- parser_code.append(libclinic.normalize_snippet("""
- if (nargs < %d) {{
- goto %s;
- }}
- """ % (i + 1, add_label), indent=4))
- if has_optional_kw:
- parser_code.append(libclinic.normalize_snippet("""
- noptargs--;
- """, indent=4))
- parser_code.append(libclinic.normalize_snippet(parsearg, indent=4))
- else:
- if i < max_pos:
- label = 'skip_optional_pos'
- first_opt = max(min_pos, pos_only)
- else:
- label = 'skip_optional_kwonly'
- first_opt = max_pos + min_kw_only
- if vararg != self.NO_VARARG:
- first_opt += 1
- if i == first_opt:
- add_label = label
- parser_code.append(libclinic.normalize_snippet("""
- if (!noptargs) {{
- goto %s;
- }}
- """ % add_label, indent=4))
- if i + 1 == len(parameters):
- parser_code.append(libclinic.normalize_snippet(parsearg, indent=4))
- else:
- add_label = label
- parser_code.append(libclinic.normalize_snippet("""
- if (%s) {{
- """ % (argname_fmt % i), indent=4))
- parser_code.append(libclinic.normalize_snippet(parsearg, indent=8))
- parser_code.append(libclinic.normalize_snippet("""
- if (!--noptargs) {{
- goto %s;
- }}
- }}
- """ % add_label, indent=4))
-
- if parser_code is not None:
- if add_label:
- parser_code.append("%s:" % add_label)
- else:
- for parameter in parameters:
- parameter.converter.use_converter()
-
- declarations = declare_parser(f, clinic=clinic,
- hasformat=True,
- limited_capi=limited_capi)
- if limited_capi:
- # positional-or-keyword arguments
- assert not fastcall
- flags = "METH_VARARGS|METH_KEYWORDS"
- parser_prototype = self.PARSER_PROTOTYPE_KEYWORD
- parser_code = [libclinic.normalize_snippet("""
- if (!PyArg_ParseTupleAndKeywords(args, kwargs, "{format_units}:{name}", _keywords,
- {parse_arguments}))
- goto exit;
- """, indent=4)]
- declarations = "static char *_keywords[] = {{{keywords_c} NULL}};"
- if deprecated_positionals or deprecated_keywords:
- declarations += "\nPy_ssize_t nargs = PyTuple_Size(args);"
-
- elif fastcall:
- clinic.add_include('pycore_modsupport.h',
- '_PyArg_ParseStackAndKeywords()')
- parser_code = [libclinic.normalize_snippet("""
- if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser{parse_arguments_comma}
- {parse_arguments})) {{
- goto exit;
- }}
- """, indent=4)]
- else:
- clinic.add_include('pycore_modsupport.h',
- '_PyArg_ParseTupleAndKeywordsFast()')
- parser_code = [libclinic.normalize_snippet("""
- if (!_PyArg_ParseTupleAndKeywordsFast(args, kwargs, &_parser,
- {parse_arguments})) {{
- goto exit;
- }}
- """, indent=4)]
- if deprecated_positionals or deprecated_keywords:
- declarations += "\nPy_ssize_t nargs = PyTuple_GET_SIZE(args);"
- if deprecated_keywords:
- code = self.deprecate_keyword_use(f, deprecated_keywords, None,
- clinic=clinic,
- fastcall=fastcall,
- limited_capi=limited_capi)
- parser_code.append(code)
-
- if deprecated_positionals:
- code = self.deprecate_positional_use(f, deprecated_positionals)
- # Insert the deprecation code before parameter parsing.
- parser_code.insert(0, code)
-
- assert parser_prototype is not None
- parser_definition = parser_body(parser_prototype, *parser_code,
- declarations=declarations)
-
-
- # Copy includes from parameters to Clinic after parse_arg() has been
- # called above.
- for converter in converters:
- for include in converter.includes:
- clinic.add_include(include.filename, include.reason,
- condition=include.condition)
-
- if new_or_init:
- methoddef_define = ''
-
- if f.kind is METHOD_NEW:
- parser_prototype = self.PARSER_PROTOTYPE_KEYWORD
- else:
- return_value_declaration = "int return_value = -1;"
- parser_prototype = self.PARSER_PROTOTYPE_KEYWORD___INIT__
-
- fields = list(parser_body_fields)
- parses_positional = 'METH_NOARGS' not in flags
- parses_keywords = 'METH_KEYWORDS' in flags
- if parses_keywords:
- assert parses_positional
-
- if requires_defining_class:
- raise ValueError("Slot methods cannot access their defining class.")
-
- if not parses_keywords:
- declarations = '{base_type_ptr}'
- clinic.add_include('pycore_modsupport.h',
- '_PyArg_NoKeywords()')
- fields.insert(0, libclinic.normalize_snippet("""
- if ({self_type_check}!_PyArg_NoKeywords("{name}", kwargs)) {{
- goto exit;
- }}
- """, indent=4))
- if not parses_positional:
- clinic.add_include('pycore_modsupport.h',
- '_PyArg_NoPositional()')
- fields.insert(0, libclinic.normalize_snippet("""
- if ({self_type_check}!_PyArg_NoPositional("{name}", args)) {{
- goto exit;
- }}
- """, indent=4))
-
- parser_definition = parser_body(parser_prototype, *fields,
- declarations=declarations)
-
-
- methoddef_cast_end = ""
- if flags in ('METH_NOARGS', 'METH_O', 'METH_VARARGS'):
- methoddef_cast = "(PyCFunction)"
- elif f.kind is GETTER:
- methoddef_cast = "" # This should end up unused
- elif limited_capi:
- methoddef_cast = "(PyCFunction)(void(*)(void))"
- else:
- methoddef_cast = "_PyCFunction_CAST("
- methoddef_cast_end = ")"
-
- if f.methoddef_flags:
- flags += '|' + f.methoddef_flags
-
- methoddef_define = methoddef_define.replace('{methoddef_flags}', flags)
- methoddef_define = methoddef_define.replace('{methoddef_cast}', methoddef_cast)
- methoddef_define = methoddef_define.replace('{methoddef_cast_end}', methoddef_cast_end)
-
- methoddef_ifndef = ''
- conditional = self.cpp.condition()
- if not conditional:
- cpp_if = cpp_endif = ''
- else:
- cpp_if = "#if " + conditional
- cpp_endif = "#endif /* " + conditional + " */"
-
- if methoddef_define and f.full_name not in clinic.ifndef_symbols:
- clinic.ifndef_symbols.add(f.full_name)
- methoddef_ifndef = self.METHODDEF_PROTOTYPE_IFNDEF
-
- # add ';' to the end of parser_prototype and impl_prototype
- # (they mustn't be None, but they could be an empty string.)
- assert parser_prototype is not None
- if parser_prototype:
- assert not parser_prototype.endswith(';')
- parser_prototype += ';'
-
- if impl_prototype is None:
- impl_prototype = impl_definition
- if impl_prototype:
- impl_prototype += ";"
-
- parser_definition = parser_definition.replace("{return_value_declaration}", return_value_declaration)
-
- compiler_warning = self.compiler_deprecated_warning(f, parameters)
- if compiler_warning:
- parser_definition = compiler_warning + "\n\n" + parser_definition
-
- d = {
- "docstring_prototype" : docstring_prototype,
- "docstring_definition" : docstring_definition,
- "impl_prototype" : impl_prototype,
- "methoddef_define" : methoddef_define,
- "parser_prototype" : parser_prototype,
- "parser_definition" : parser_definition,
- "impl_definition" : impl_definition,
- "cpp_if" : cpp_if,
- "cpp_endif" : cpp_endif,
- "methoddef_ifndef" : methoddef_ifndef,
- }
-
- # make sure we didn't forget to assign something,
- # and wrap each non-empty value in \n's
- d2 = {}
- for name, value in d.items():
- assert value is not None, "got a None value for template " + repr(name)
- if value:
- value = '\n' + value + '\n'
- d2[name] = value
- return d2
+ args = ParseArgsCodeGen(f, codegen)
+ return args.parse_args(self)
@staticmethod
def group_to_variable_name(group: int) -> str:
@@ -1190,16 +352,17 @@ def render_function(
clinic: Clinic,
f: Function | None
) -> str:
- if f is None or clinic is None:
+ if f is None:
return ""
+ codegen = clinic.codegen
data = CRenderData()
assert f.parameters, "We should always have a 'self' at this point!"
parameters = f.render_parameters
converters = [p.converter for p in parameters]
- templates = self.output_templates(f, clinic)
+ templates = self.output_templates(f, codegen)
f_self = parameters[0]
selfless = parameters[1:]
@@ -1323,7 +486,7 @@ def render_function(
if has_option_groups:
self.render_option_group_parsing(f, template_dict,
- limited_capi=clinic.limited_capi)
+ limited_capi=codegen.limited_capi)
# buffers, not destination
for name, destination in clinic.destination_buffers.items():
diff --git a/Tools/clinic/libclinic/codegen.py b/Tools/clinic/libclinic/codegen.py
index ad08e22e2e1c2c..b2f1db6f8ef8da 100644
--- a/Tools/clinic/libclinic/codegen.py
+++ b/Tools/clinic/libclinic/codegen.py
@@ -6,13 +6,92 @@
import libclinic
from libclinic import fail
-from libclinic.crenderdata import Include
from libclinic.language import Language
from libclinic.block_parser import Block
if TYPE_CHECKING:
from libclinic.app import Clinic
+TemplateDict = dict[str, str]
+
+
+class CRenderData:
+ def __init__(self) -> None:
+
+ # The C statements to declare variables.
+ # Should be full lines with \n eol characters.
+ self.declarations: list[str] = []
+
+ # The C statements required to initialize the variables before the parse call.
+ # Should be full lines with \n eol characters.
+ self.initializers: list[str] = []
+
+ # The C statements needed to dynamically modify the values
+ # parsed by the parse call, before calling the impl.
+ self.modifications: list[str] = []
+
+ # The entries for the "keywords" array for PyArg_ParseTuple.
+ # Should be individual strings representing the names.
+ self.keywords: list[str] = []
+
+ # The "format units" for PyArg_ParseTuple.
+ # Should be individual strings that will get
+ self.format_units: list[str] = []
+
+ # The varargs arguments for PyArg_ParseTuple.
+ self.parse_arguments: list[str] = []
+
+ # The parameter declarations for the impl function.
+ self.impl_parameters: list[str] = []
+
+ # The arguments to the impl function at the time it's called.
+ self.impl_arguments: list[str] = []
+
+ # For return converters: the name of the variable that
+ # should receive the value returned by the impl.
+ self.return_value = "return_value"
+
+ # For return converters: the code to convert the return
+ # value from the parse function. This is also where
+ # you should check the _return_value for errors, and
+ # "goto exit" if there are any.
+ self.return_conversion: list[str] = []
+ self.converter_retval = "_return_value"
+
+ # The C statements required to do some operations
+ # after the end of parsing but before cleaning up.
+ # These operations may be, for example, memory deallocations which
+ # can only be done without any error happening during argument parsing.
+ self.post_parsing: list[str] = []
+
+ # The C statements required to clean up after the impl call.
+ self.cleanup: list[str] = []
+
+ # The C statements to generate critical sections (per-object locking).
+ self.lock: list[str] = []
+ self.unlock: list[str] = []
+
+
+@dc.dataclass(slots=True, frozen=True)
+class Include:
+ """
+ An include like: #include "pycore_long.h" // _Py_ID()
+ """
+ # Example: "pycore_long.h".
+ filename: str
+
+ # Example: "_Py_ID()".
+ reason: str
+
+ # None means unconditional include.
+ # Example: "#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)".
+ condition: str | None
+
+ def sort_key(self) -> tuple[str, str]:
+ # order: '#if' comes before 'NO_CONDITION'
+ return (self.condition or 'NO_CONDITION', self.filename)
+
+
@dc.dataclass(slots=True)
class BlockPrinter:
language: Language
@@ -25,9 +104,7 @@ def print_block(
self,
block: Block,
*,
- core_includes: bool = False,
- limited_capi: bool,
- header_includes: dict[str, Include],
+ header_includes: list[Include] | None = None,
) -> None:
input = block.input
output = block.output
@@ -56,13 +133,12 @@ def print_block(
write("\n")
output = ''
- if core_includes and header_includes:
+ if header_includes:
# Emit optional "#include" directives for C headers
output += '\n'
current_condition: str | None = None
- includes = sorted(header_includes.values(), key=Include.sort_key)
- for include in includes:
+ for include in header_includes:
if include.condition != current_condition:
if current_condition:
output += '#endif\n'
@@ -188,3 +264,39 @@ def dump(self) -> str:
DestinationDict = dict[str, Destination]
+
+
+class CodeGen:
+ def __init__(self, limited_capi: bool) -> None:
+ self.limited_capi = limited_capi
+ self._ifndef_symbols: set[str] = set()
+ # dict: include name => Include instance
+ self._includes: dict[str, Include] = {}
+
+ def add_ifndef_symbol(self, name: str) -> bool:
+ if name in self._ifndef_symbols:
+ return False
+ self._ifndef_symbols.add(name)
+ return True
+
+ def add_include(self, name: str, reason: str,
+ *, condition: str | None = None) -> None:
+ try:
+ existing = self._includes[name]
+ except KeyError:
+ pass
+ else:
+ if existing.condition and not condition:
+ # If the previous include has a condition and the new one is
+ # unconditional, override the include.
+ pass
+ else:
+ # Already included, do nothing. Only mention a single reason,
+ # no need to list all of them.
+ return
+
+ self._includes[name] = Include(name, reason, condition)
+
+ def get_includes(self) -> list[Include]:
+ return sorted(self._includes.values(),
+ key=Include.sort_key)
diff --git a/Tools/clinic/libclinic/converter.py b/Tools/clinic/libclinic/converter.py
index ac78be3f7958da..86853bb4fba253 100644
--- a/Tools/clinic/libclinic/converter.py
+++ b/Tools/clinic/libclinic/converter.py
@@ -7,7 +7,7 @@
import libclinic
from libclinic import fail
from libclinic import Sentinels, unspecified, unknown
-from libclinic.crenderdata import CRenderData, Include, TemplateDict
+from libclinic.codegen import CRenderData, Include, TemplateDict
from libclinic.function import Function, Parameter
@@ -180,7 +180,7 @@ def __init__(self,
self.name = libclinic.ensure_legal_c_identifier(name)
self.py_name = py_name
self.unused = unused
- self.includes: list[Include] = []
+ self._includes: list[Include] = []
if default is not unspecified:
if (self.default_type
@@ -513,7 +513,10 @@ def parser_name(self) -> str:
def add_include(self, name: str, reason: str,
*, condition: str | None = None) -> None:
include = Include(name, reason, condition)
- self.includes.append(include)
+ self._includes.append(include)
+
+ def get_includes(self) -> list[Include]:
+ return self._includes
ConverterType = Callable[..., CConverter]
diff --git a/Tools/clinic/libclinic/converters.py b/Tools/clinic/libclinic/converters.py
index 7fc16f17450aaa..0778961f5b5875 100644
--- a/Tools/clinic/libclinic/converters.py
+++ b/Tools/clinic/libclinic/converters.py
@@ -9,7 +9,7 @@
Function, Parameter,
CALLABLE, STATIC_METHOD, CLASS_METHOD, METHOD_INIT, METHOD_NEW,
GETTER, SETTER)
-from libclinic.crenderdata import CRenderData, TemplateDict
+from libclinic.codegen import CRenderData, TemplateDict
from libclinic.converter import (
CConverter, legacy_converters, add_legacy_c_converter)
diff --git a/Tools/clinic/libclinic/crenderdata.py b/Tools/clinic/libclinic/crenderdata.py
deleted file mode 100644
index 58976b8185ebae..00000000000000
--- a/Tools/clinic/libclinic/crenderdata.py
+++ /dev/null
@@ -1,81 +0,0 @@
-import dataclasses as dc
-
-
-TemplateDict = dict[str, str]
-
-
-class CRenderData:
- def __init__(self) -> None:
-
- # The C statements to declare variables.
- # Should be full lines with \n eol characters.
- self.declarations: list[str] = []
-
- # The C statements required to initialize the variables before the parse call.
- # Should be full lines with \n eol characters.
- self.initializers: list[str] = []
-
- # The C statements needed to dynamically modify the values
- # parsed by the parse call, before calling the impl.
- self.modifications: list[str] = []
-
- # The entries for the "keywords" array for PyArg_ParseTuple.
- # Should be individual strings representing the names.
- self.keywords: list[str] = []
-
- # The "format units" for PyArg_ParseTuple.
- # Should be individual strings that will get
- self.format_units: list[str] = []
-
- # The varargs arguments for PyArg_ParseTuple.
- self.parse_arguments: list[str] = []
-
- # The parameter declarations for the impl function.
- self.impl_parameters: list[str] = []
-
- # The arguments to the impl function at the time it's called.
- self.impl_arguments: list[str] = []
-
- # For return converters: the name of the variable that
- # should receive the value returned by the impl.
- self.return_value = "return_value"
-
- # For return converters: the code to convert the return
- # value from the parse function. This is also where
- # you should check the _return_value for errors, and
- # "goto exit" if there are any.
- self.return_conversion: list[str] = []
- self.converter_retval = "_return_value"
-
- # The C statements required to do some operations
- # after the end of parsing but before cleaning up.
- # These operations may be, for example, memory deallocations which
- # can only be done without any error happening during argument parsing.
- self.post_parsing: list[str] = []
-
- # The C statements required to clean up after the impl call.
- self.cleanup: list[str] = []
-
- # The C statements to generate critical sections (per-object locking).
- self.lock: list[str] = []
- self.unlock: list[str] = []
-
-
-@dc.dataclass(slots=True, frozen=True)
-class Include:
- """
- An include like: #include "pycore_long.h" // _Py_ID()
- """
- # Example: "pycore_long.h".
- filename: str
-
- # Example: "_Py_ID()".
- reason: str
-
- # None means unconditional include.
- # Example: "#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)".
- condition: str | None
-
- def sort_key(self) -> tuple[str, str]:
- # order: '#if' comes before 'NO_CONDITION'
- return (self.condition or 'NO_CONDITION', self.filename)
diff --git a/Tools/clinic/libclinic/dsl_parser.py b/Tools/clinic/libclinic/dsl_parser.py
index 4c739efe1066e4..56c6dca3db3d1d 100644
--- a/Tools/clinic/libclinic/dsl_parser.py
+++ b/Tools/clinic/libclinic/dsl_parser.py
@@ -666,6 +666,8 @@ def state_modulename_name(self, line: str) -> None:
if equals:
existing = existing.strip()
if libclinic.is_legal_py_identifier(existing):
+ if self.forced_text_signature:
+ fail("Cannot use @text_signature when cloning a function")
# we're cloning!
names = self.parse_function_names(before)
return self.parse_cloned_function(names, existing)
@@ -689,7 +691,8 @@ def state_modulename_name(self, line: str) -> None:
kind=self.kind,
coexist=self.coexist,
critical_section=self.critical_section,
- target_critical_section=self.target_critical_section
+ target_critical_section=self.target_critical_section,
+ forced_text_signature=self.forced_text_signature
)
self.add_function(func)
@@ -1099,6 +1102,7 @@ def bad_node(self, node: ast.AST) -> None:
fail("A 'defining_class' parameter cannot have a default value.")
if self.group:
fail("A 'defining_class' parameter cannot be in an optional group.")
+ kind = inspect.Parameter.POSITIONAL_ONLY
else:
fail("A 'defining_class' parameter, if specified, must either "
"be the first thing in the parameter block, or come just "
@@ -1324,13 +1328,14 @@ def state_function_docstring(self, line: str) -> None:
self.docstring_append(self.function, line)
+ @staticmethod
def format_docstring_signature(
- self, f: Function, parameters: list[Parameter]
+ f: Function, parameters: list[Parameter]
) -> str:
lines = []
lines.append(f.displayname)
- if self.forced_text_signature:
- lines.append(self.forced_text_signature)
+ if f.forced_text_signature:
+ lines.append(f.forced_text_signature)
elif f.kind in {GETTER, SETTER}:
# @getter and @setter do not need signatures like a method or a function.
return ''
diff --git a/Tools/clinic/libclinic/function.py b/Tools/clinic/libclinic/function.py
index 1563fdf9065b7e..93901263e44c04 100644
--- a/Tools/clinic/libclinic/function.py
+++ b/Tools/clinic/libclinic/function.py
@@ -53,7 +53,6 @@ def __repr__(self) -> str:
class FunctionKind(enum.Enum):
- INVALID = enum.auto()
CALLABLE = enum.auto()
STATIC_METHOD = enum.auto()
CLASS_METHOD = enum.auto()
@@ -70,7 +69,6 @@ def __repr__(self) -> str:
return f""
-INVALID: Final = FunctionKind.INVALID
CALLABLE: Final = FunctionKind.CALLABLE
STATIC_METHOD: Final = FunctionKind.STATIC_METHOD
CLASS_METHOD: Final = FunctionKind.CLASS_METHOD
@@ -109,6 +107,7 @@ class Function:
# functions with optional groups because we can't represent
# those accurately with inspect.Signature in 3.4.
docstring_only: bool = False
+ forced_text_signature: str | None = None
critical_section: bool = False
target_critical_section: list[str] = dc.field(default_factory=list)
diff --git a/Tools/clinic/libclinic/parse_args.py b/Tools/clinic/libclinic/parse_args.py
new file mode 100644
index 00000000000000..905f2a0ba94f4c
--- /dev/null
+++ b/Tools/clinic/libclinic/parse_args.py
@@ -0,0 +1,940 @@
+from __future__ import annotations
+from typing import TYPE_CHECKING, Final
+
+import libclinic
+from libclinic import fail, warn
+from libclinic.function import (
+ Function, Parameter,
+ GETTER, SETTER, METHOD_NEW)
+from libclinic.converter import CConverter
+from libclinic.converters import (
+ defining_class_converter, object_converter, self_converter)
+if TYPE_CHECKING:
+ from libclinic.clanguage import CLanguage
+ from libclinic.codegen import CodeGen
+
+
+def declare_parser(
+ f: Function,
+ *,
+ hasformat: bool = False,
+ codegen: CodeGen,
+) -> str:
+ """
+ Generates the code template for a static local PyArg_Parser variable,
+ with an initializer. For core code (incl. builtin modules) the
+ kwtuple field is also statically initialized. Otherwise
+ it is initialized at runtime.
+ """
+ limited_capi = codegen.limited_capi
+ if hasformat:
+ fname = ''
+ format_ = '.format = "{format_units}:{name}",'
+ else:
+ fname = '.fname = "{name}",'
+ format_ = ''
+
+ num_keywords = len([
+ p for p in f.parameters.values()
+ if not p.is_positional_only() and not p.is_vararg()
+ ])
+ if limited_capi:
+ declarations = """
+ #define KWTUPLE NULL
+ """
+ elif num_keywords == 0:
+ declarations = """
+ #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
+ # define KWTUPLE (PyObject *)&_Py_SINGLETON(tuple_empty)
+ #else
+ # define KWTUPLE NULL
+ #endif
+ """
+ else:
+ declarations = """
+ #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
+
+ #define NUM_KEYWORDS %d
+ static struct {{
+ PyGC_Head _this_is_not_used;
+ PyObject_VAR_HEAD
+ PyObject *ob_item[NUM_KEYWORDS];
+ }} _kwtuple = {{
+ .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
+ .ob_item = {{ {keywords_py} }},
+ }};
+ #undef NUM_KEYWORDS
+ #define KWTUPLE (&_kwtuple.ob_base.ob_base)
+
+ #else // !Py_BUILD_CORE
+ # define KWTUPLE NULL
+ #endif // !Py_BUILD_CORE
+ """ % num_keywords
+
+ condition = '#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)'
+ codegen.add_include('pycore_gc.h', 'PyGC_Head', condition=condition)
+ codegen.add_include('pycore_runtime.h', '_Py_ID()', condition=condition)
+
+ declarations += """
+ static const char * const _keywords[] = {{{keywords_c} NULL}};
+ static _PyArg_Parser _parser = {{
+ .keywords = _keywords,
+ %s
+ .kwtuple = KWTUPLE,
+ }};
+ #undef KWTUPLE
+ """ % (format_ or fname)
+ return libclinic.normalize_snippet(declarations)
+
+
+NO_VARARG: Final[str] = "PY_SSIZE_T_MAX"
+PARSER_PROTOTYPE_KEYWORD: Final[str] = libclinic.normalize_snippet("""
+ static PyObject *
+ {c_basename}({self_type}{self_name}, PyObject *args, PyObject *kwargs)
+""")
+PARSER_PROTOTYPE_KEYWORD___INIT__: Final[str] = libclinic.normalize_snippet("""
+ static int
+ {c_basename}({self_type}{self_name}, PyObject *args, PyObject *kwargs)
+""")
+PARSER_PROTOTYPE_VARARGS: Final[str] = libclinic.normalize_snippet("""
+ static PyObject *
+ {c_basename}({self_type}{self_name}, PyObject *args)
+""")
+PARSER_PROTOTYPE_FASTCALL: Final[str] = libclinic.normalize_snippet("""
+ static PyObject *
+ {c_basename}({self_type}{self_name}, PyObject *const *args, Py_ssize_t nargs)
+""")
+PARSER_PROTOTYPE_FASTCALL_KEYWORDS: Final[str] = libclinic.normalize_snippet("""
+ static PyObject *
+ {c_basename}({self_type}{self_name}, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+""")
+PARSER_PROTOTYPE_DEF_CLASS: Final[str] = libclinic.normalize_snippet("""
+ static PyObject *
+ {c_basename}({self_type}{self_name}, PyTypeObject *{defining_class_name}, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+""")
+PARSER_PROTOTYPE_NOARGS: Final[str] = libclinic.normalize_snippet("""
+ static PyObject *
+ {c_basename}({self_type}{self_name}, PyObject *Py_UNUSED(ignored))
+""")
+PARSER_PROTOTYPE_GETTER: Final[str] = libclinic.normalize_snippet("""
+ static PyObject *
+ {c_basename}({self_type}{self_name}, void *Py_UNUSED(context))
+""")
+PARSER_PROTOTYPE_SETTER: Final[str] = libclinic.normalize_snippet("""
+ static int
+ {c_basename}({self_type}{self_name}, PyObject *value, void *Py_UNUSED(context))
+""")
+METH_O_PROTOTYPE: Final[str] = libclinic.normalize_snippet("""
+ static PyObject *
+ {c_basename}({impl_parameters})
+""")
+DOCSTRING_PROTOTYPE_VAR: Final[str] = libclinic.normalize_snippet("""
+ PyDoc_VAR({c_basename}__doc__);
+""")
+DOCSTRING_PROTOTYPE_STRVAR: Final[str] = libclinic.normalize_snippet("""
+ PyDoc_STRVAR({c_basename}__doc__,
+ {docstring});
+""")
+GETSET_DOCSTRING_PROTOTYPE_STRVAR: Final[str] = libclinic.normalize_snippet("""
+ PyDoc_STRVAR({getset_basename}__doc__,
+ {docstring});
+ #define {getset_basename}_HAS_DOCSTR
+""")
+IMPL_DEFINITION_PROTOTYPE: Final[str] = libclinic.normalize_snippet("""
+ static {impl_return_type}
+ {c_basename}_impl({impl_parameters})
+""")
+METHODDEF_PROTOTYPE_DEFINE: Final[str] = libclinic.normalize_snippet(r"""
+ #define {methoddef_name} \
+ {{"{name}", {methoddef_cast}{c_basename}{methoddef_cast_end}, {methoddef_flags}, {c_basename}__doc__}},
+""")
+GETTERDEF_PROTOTYPE_DEFINE: Final[str] = libclinic.normalize_snippet(r"""
+ #if defined({getset_basename}_HAS_DOCSTR)
+ # define {getset_basename}_DOCSTR {getset_basename}__doc__
+ #else
+ # define {getset_basename}_DOCSTR NULL
+ #endif
+ #if defined({getset_name}_GETSETDEF)
+ # undef {getset_name}_GETSETDEF
+ # define {getset_name}_GETSETDEF {{"{name}", (getter){getset_basename}_get, (setter){getset_basename}_set, {getset_basename}_DOCSTR}},
+ #else
+ # define {getset_name}_GETSETDEF {{"{name}", (getter){getset_basename}_get, NULL, {getset_basename}_DOCSTR}},
+ #endif
+""")
+SETTERDEF_PROTOTYPE_DEFINE: Final[str] = libclinic.normalize_snippet(r"""
+ #if defined({getset_name}_HAS_DOCSTR)
+ # define {getset_basename}_DOCSTR {getset_basename}__doc__
+ #else
+ # define {getset_basename}_DOCSTR NULL
+ #endif
+ #if defined({getset_name}_GETSETDEF)
+ # undef {getset_name}_GETSETDEF
+ # define {getset_name}_GETSETDEF {{"{name}", (getter){getset_basename}_get, (setter){getset_basename}_set, {getset_basename}_DOCSTR}},
+ #else
+ # define {getset_name}_GETSETDEF {{"{name}", NULL, (setter){getset_basename}_set, NULL}},
+ #endif
+""")
+METHODDEF_PROTOTYPE_IFNDEF: Final[str] = libclinic.normalize_snippet("""
+ #ifndef {methoddef_name}
+ #define {methoddef_name}
+ #endif /* !defined({methoddef_name}) */
+""")
+
+
+class ParseArgsCodeGen:
+ func: Function
+ codegen: CodeGen
+ limited_capi: bool = False
+
+ # Function parameters
+ parameters: list[Parameter]
+ converters: list[CConverter]
+
+ # Is 'defining_class' used for the first parameter?
+ requires_defining_class: bool
+
+ # Use METH_FASTCALL calling convention?
+ fastcall: bool
+
+ # Declaration of the return variable (ex: "int return_value;")
+ return_value_declaration: str
+
+ # Calling convention (ex: "METH_NOARGS")
+ flags: str
+
+ # Variables declarations
+ declarations: str
+
+ pos_only: int = 0
+ min_pos: int = 0
+ max_pos: int = 0
+ min_kw_only: int = 0
+ pseudo_args: int = 0
+ vararg: int | str = NO_VARARG
+
+ docstring_prototype: str
+ docstring_definition: str
+ impl_prototype: str | None
+ impl_definition: str
+ methoddef_define: str
+ parser_prototype: str
+ parser_definition: str
+ cpp_if: str
+ cpp_endif: str
+ methoddef_ifndef: str
+
+ parser_body_fields: tuple[str, ...]
+
+ def __init__(self, func: Function, codegen: CodeGen) -> None:
+ self.func = func
+ self.codegen = codegen
+
+ self.parameters = list(self.func.parameters.values())
+ first_param = self.parameters.pop(0)
+ if not isinstance(first_param.converter, self_converter):
+ raise ValueError("the first parameter must use self_converter")
+
+ self.requires_defining_class = False
+ if self.parameters and isinstance(self.parameters[0].converter, defining_class_converter):
+ self.requires_defining_class = True
+ del self.parameters[0]
+ self.converters = [p.converter for p in self.parameters]
+
+ if self.func.critical_section:
+ self.codegen.add_include('pycore_critical_section.h',
+ 'Py_BEGIN_CRITICAL_SECTION()')
+ self.fastcall = not self.is_new_or_init()
+
+ self.pos_only = 0
+ self.min_pos = 0
+ self.max_pos = 0
+ self.min_kw_only = 0
+ self.pseudo_args = 0
+ for i, p in enumerate(self.parameters, 1):
+ if p.is_keyword_only():
+ assert not p.is_positional_only()
+ if not p.is_optional():
+ self.min_kw_only = i - self.max_pos
+ elif p.is_vararg():
+ self.pseudo_args += 1
+ self.vararg = i - 1
+ else:
+ if self.vararg == NO_VARARG:
+ self.max_pos = i
+ if p.is_positional_only():
+ self.pos_only = i
+ if not p.is_optional():
+ self.min_pos = i
+
+ def is_new_or_init(self) -> bool:
+ return self.func.kind.new_or_init
+
+ def has_option_groups(self) -> bool:
+ return (bool(self.parameters
+ and (self.parameters[0].group or self.parameters[-1].group)))
+
+ def use_meth_o(self) -> bool:
+ return (len(self.parameters) == 1
+ and self.parameters[0].is_positional_only()
+ and not self.converters[0].is_optional()
+ and not self.requires_defining_class
+ and not self.is_new_or_init())
+
+ def use_simple_return(self) -> bool:
+ return (self.func.return_converter.type == 'PyObject *'
+ and not self.func.critical_section)
+
+ def select_prototypes(self) -> None:
+ self.docstring_prototype = ''
+ self.docstring_definition = ''
+ self.methoddef_define = METHODDEF_PROTOTYPE_DEFINE
+ self.return_value_declaration = "PyObject *return_value = NULL;"
+
+ if self.is_new_or_init() and not self.func.docstring:
+ pass
+ elif self.func.kind is GETTER:
+ self.methoddef_define = GETTERDEF_PROTOTYPE_DEFINE
+ if self.func.docstring:
+ self.docstring_definition = GETSET_DOCSTRING_PROTOTYPE_STRVAR
+ elif self.func.kind is SETTER:
+ if self.func.docstring:
+ fail("docstrings are only supported for @getter, not @setter")
+ self.return_value_declaration = "int {return_value};"
+ self.methoddef_define = SETTERDEF_PROTOTYPE_DEFINE
+ else:
+ self.docstring_prototype = DOCSTRING_PROTOTYPE_VAR
+ self.docstring_definition = DOCSTRING_PROTOTYPE_STRVAR
+
+ def init_limited_capi(self) -> None:
+ self.limited_capi = self.codegen.limited_capi
+ if self.limited_capi and (self.pseudo_args or
+ (any(p.is_optional() for p in self.parameters) and
+ any(p.is_keyword_only() and not p.is_optional() for p in self.parameters)) or
+ any(c.broken_limited_capi for c in self.converters)):
+ warn(f"Function {self.func.full_name} cannot use limited C API")
+ self.limited_capi = False
+
+ def parser_body(
+ self,
+ *fields: str,
+ declarations: str = ''
+ ) -> None:
+ lines = [self.parser_prototype]
+ self.parser_body_fields = fields
+
+ preamble = libclinic.normalize_snippet("""
+ {{
+ {return_value_declaration}
+ {parser_declarations}
+ {declarations}
+ {initializers}
+ """) + "\n"
+ finale = libclinic.normalize_snippet("""
+ {modifications}
+ {lock}
+ {return_value} = {c_basename}_impl({impl_arguments});
+ {unlock}
+ {return_conversion}
+ {post_parsing}
+
+ {exit_label}
+ {cleanup}
+ return return_value;
+ }}
+ """)
+ for field in preamble, *fields, finale:
+ lines.append(field)
+ code = libclinic.linear_format("\n".join(lines),
+ parser_declarations=self.declarations)
+ self.parser_definition = code
+
+ def parse_no_args(self) -> None:
+ parser_code: list[str] | None
+ simple_return = self.use_simple_return()
+ if self.func.kind is GETTER:
+ self.parser_prototype = PARSER_PROTOTYPE_GETTER
+ parser_code = []
+ elif self.func.kind is SETTER:
+ self.parser_prototype = PARSER_PROTOTYPE_SETTER
+ parser_code = []
+ elif not self.requires_defining_class:
+ # no self.parameters, METH_NOARGS
+ self.flags = "METH_NOARGS"
+ self.parser_prototype = PARSER_PROTOTYPE_NOARGS
+ parser_code = []
+ else:
+ assert self.fastcall
+
+ self.flags = "METH_METHOD|METH_FASTCALL|METH_KEYWORDS"
+ self.parser_prototype = PARSER_PROTOTYPE_DEF_CLASS
+ return_error = ('return NULL;' if simple_return
+ else 'goto exit;')
+ parser_code = [libclinic.normalize_snippet("""
+ if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) {{
+ PyErr_SetString(PyExc_TypeError, "{name}() takes no arguments");
+ %s
+ }}
+ """ % return_error, indent=4)]
+
+ if simple_return:
+ self.parser_definition = '\n'.join([
+ self.parser_prototype,
+ '{{',
+ *parser_code,
+ ' return {c_basename}_impl({impl_arguments});',
+ '}}'])
+ else:
+ self.parser_body(*parser_code)
+
+ def parse_one_arg(self) -> None:
+ self.flags = "METH_O"
+
+ if (isinstance(self.converters[0], object_converter) and
+ self.converters[0].format_unit == 'O'):
+ meth_o_prototype = METH_O_PROTOTYPE
+
+ if self.use_simple_return():
+ # maps perfectly to METH_O, doesn't need a return converter.
+ # so we skip making a parse function
+ # and call directly into the impl function.
+ self.impl_prototype = ''
+ self.impl_definition = meth_o_prototype
+ else:
+ # SLIGHT HACK
+ # use impl_parameters for the parser here!
+ self.parser_prototype = meth_o_prototype
+ self.parser_body()
+
+ else:
+ argname = 'arg'
+ if self.parameters[0].name == argname:
+ argname += '_'
+ self.parser_prototype = libclinic.normalize_snippet("""
+ static PyObject *
+ {c_basename}({self_type}{self_name}, PyObject *%s)
+ """ % argname)
+
+ displayname = self.parameters[0].get_displayname(0)
+ parsearg: str | None
+ parsearg = self.converters[0].parse_arg(argname, displayname,
+ limited_capi=self.limited_capi)
+ if parsearg is None:
+ self.converters[0].use_converter()
+ parsearg = """
+ if (!PyArg_Parse(%s, "{format_units}:{name}", {parse_arguments})) {{
+ goto exit;
+ }}
+ """ % argname
+
+ parser_code = libclinic.normalize_snippet(parsearg, indent=4)
+ self.parser_body(parser_code)
+
+ def parse_option_groups(self) -> None:
+ # positional parameters with option groups
+ # (we have to generate lots of PyArg_ParseTuple calls
+ # in a big switch statement)
+
+ self.flags = "METH_VARARGS"
+ self.parser_prototype = PARSER_PROTOTYPE_VARARGS
+ parser_code = ' {option_group_parsing}'
+ self.parser_body(parser_code)
+
+ def parse_pos_only(self) -> None:
+ if self.fastcall:
+ # positional-only, but no option groups
+ # we only need one call to _PyArg_ParseStack
+
+ self.flags = "METH_FASTCALL"
+ self.parser_prototype = PARSER_PROTOTYPE_FASTCALL
+ nargs = 'nargs'
+ argname_fmt = 'args[%d]'
+ else:
+ # positional-only, but no option groups
+ # we only need one call to PyArg_ParseTuple
+
+ self.flags = "METH_VARARGS"
+ self.parser_prototype = PARSER_PROTOTYPE_VARARGS
+ if self.limited_capi:
+ nargs = 'PyTuple_Size(args)'
+ argname_fmt = 'PyTuple_GetItem(args, %d)'
+ else:
+ nargs = 'PyTuple_GET_SIZE(args)'
+ argname_fmt = 'PyTuple_GET_ITEM(args, %d)'
+
+ left_args = f"{nargs} - {self.max_pos}"
+ max_args = NO_VARARG if (self.vararg != NO_VARARG) else self.max_pos
+ if self.limited_capi:
+ parser_code = []
+ if nargs != 'nargs':
+ nargs_def = f'Py_ssize_t nargs = {nargs};'
+ parser_code.append(libclinic.normalize_snippet(nargs_def, indent=4))
+ nargs = 'nargs'
+ if self.min_pos == max_args:
+ pl = '' if self.min_pos == 1 else 's'
+ parser_code.append(libclinic.normalize_snippet(f"""
+ if ({nargs} != {self.min_pos}) {{{{
+ PyErr_Format(PyExc_TypeError, "{{name}} expected {self.min_pos} argument{pl}, got %zd", {nargs});
+ goto exit;
+ }}}}
+ """,
+ indent=4))
+ else:
+ if self.min_pos:
+ pl = '' if self.min_pos == 1 else 's'
+ parser_code.append(libclinic.normalize_snippet(f"""
+ if ({nargs} < {self.min_pos}) {{{{
+ PyErr_Format(PyExc_TypeError, "{{name}} expected at least {self.min_pos} argument{pl}, got %zd", {nargs});
+ goto exit;
+ }}}}
+ """,
+ indent=4))
+ if max_args != NO_VARARG:
+ pl = '' if max_args == 1 else 's'
+ parser_code.append(libclinic.normalize_snippet(f"""
+ if ({nargs} > {max_args}) {{{{
+ PyErr_Format(PyExc_TypeError, "{{name}} expected at most {max_args} argument{pl}, got %zd", {nargs});
+ goto exit;
+ }}}}
+ """,
+ indent=4))
+ else:
+ self.codegen.add_include('pycore_modsupport.h',
+ '_PyArg_CheckPositional()')
+ parser_code = [libclinic.normalize_snippet(f"""
+ if (!_PyArg_CheckPositional("{{name}}", {nargs}, {self.min_pos}, {max_args})) {{{{
+ goto exit;
+ }}}}
+ """, indent=4)]
+
+ has_optional = False
+ use_parser_code = True
+ for i, p in enumerate(self.parameters):
+ if p.is_vararg():
+ if self.fastcall:
+ parser_code.append(libclinic.normalize_snippet("""
+ %s = PyTuple_New(%s);
+ if (!%s) {{
+ goto exit;
+ }}
+ for (Py_ssize_t i = 0; i < %s; ++i) {{
+ PyTuple_SET_ITEM(%s, i, Py_NewRef(args[%d + i]));
+ }}
+ """ % (
+ p.converter.parser_name,
+ left_args,
+ p.converter.parser_name,
+ left_args,
+ p.converter.parser_name,
+ self.max_pos
+ ), indent=4))
+ else:
+ parser_code.append(libclinic.normalize_snippet("""
+ %s = PyTuple_GetSlice(%d, -1);
+ """ % (
+ p.converter.parser_name,
+ self.max_pos
+ ), indent=4))
+ continue
+
+ displayname = p.get_displayname(i+1)
+ argname = argname_fmt % i
+ parsearg: str | None
+ parsearg = p.converter.parse_arg(argname, displayname, limited_capi=self.limited_capi)
+ if parsearg is None:
+ use_parser_code = False
+ parser_code = []
+ break
+ if has_optional or p.is_optional():
+ has_optional = True
+ parser_code.append(libclinic.normalize_snippet("""
+ if (%s < %d) {{
+ goto skip_optional;
+ }}
+ """, indent=4) % (nargs, i + 1))
+ parser_code.append(libclinic.normalize_snippet(parsearg, indent=4))
+
+ if use_parser_code:
+ if has_optional:
+ parser_code.append("skip_optional:")
+ else:
+ for parameter in self.parameters:
+ parameter.converter.use_converter()
+
+ if self.limited_capi:
+ self.fastcall = False
+ if self.fastcall:
+ self.codegen.add_include('pycore_modsupport.h',
+ '_PyArg_ParseStack()')
+ parser_code = [libclinic.normalize_snippet("""
+ if (!_PyArg_ParseStack(args, nargs, "{format_units}:{name}",
+ {parse_arguments})) {{
+ goto exit;
+ }}
+ """, indent=4)]
+ else:
+ self.flags = "METH_VARARGS"
+ self.parser_prototype = PARSER_PROTOTYPE_VARARGS
+ parser_code = [libclinic.normalize_snippet("""
+ if (!PyArg_ParseTuple(args, "{format_units}:{name}",
+ {parse_arguments})) {{
+ goto exit;
+ }}
+ """, indent=4)]
+ self.parser_body(*parser_code)
+
+ def parse_general(self, clang: CLanguage) -> None:
+ parsearg: str | None
+ deprecated_positionals: dict[int, Parameter] = {}
+ deprecated_keywords: dict[int, Parameter] = {}
+ for i, p in enumerate(self.parameters):
+ if p.deprecated_positional:
+ deprecated_positionals[i] = p
+ if p.deprecated_keyword:
+ deprecated_keywords[i] = p
+
+ has_optional_kw = (
+ max(self.pos_only, self.min_pos) + self.min_kw_only
+ < len(self.converters) - int(self.vararg != NO_VARARG)
+ )
+
+ use_parser_code = True
+ if self.limited_capi:
+ parser_code = []
+ use_parser_code = False
+ self.fastcall = False
+ else:
+ if self.vararg == NO_VARARG:
+ self.codegen.add_include('pycore_modsupport.h',
+ '_PyArg_UnpackKeywords()')
+ args_declaration = "_PyArg_UnpackKeywords", "%s, %s, %s" % (
+ self.min_pos,
+ self.max_pos,
+ self.min_kw_only
+ )
+ nargs = "nargs"
+ else:
+ self.codegen.add_include('pycore_modsupport.h',
+ '_PyArg_UnpackKeywordsWithVararg()')
+ args_declaration = "_PyArg_UnpackKeywordsWithVararg", "%s, %s, %s, %s" % (
+ self.min_pos,
+ self.max_pos,
+ self.min_kw_only,
+ self.vararg
+ )
+ nargs = f"Py_MIN(nargs, {self.max_pos})" if self.max_pos else "0"
+
+ if self.fastcall:
+ self.flags = "METH_FASTCALL|METH_KEYWORDS"
+ self.parser_prototype = PARSER_PROTOTYPE_FASTCALL_KEYWORDS
+ argname_fmt = 'args[%d]'
+ self.declarations = declare_parser(self.func, codegen=self.codegen)
+ self.declarations += "\nPyObject *argsbuf[%s];" % len(self.converters)
+ if has_optional_kw:
+ self.declarations += "\nPy_ssize_t noptargs = %s + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - %d;" % (nargs, self.min_pos + self.min_kw_only)
+ parser_code = [libclinic.normalize_snippet("""
+ args = %s(args, nargs, NULL, kwnames, &_parser, %s, argsbuf);
+ if (!args) {{
+ goto exit;
+ }}
+ """ % args_declaration, indent=4)]
+ else:
+ # positional-or-keyword arguments
+ self.flags = "METH_VARARGS|METH_KEYWORDS"
+ self.parser_prototype = PARSER_PROTOTYPE_KEYWORD
+ argname_fmt = 'fastargs[%d]'
+ self.declarations = declare_parser(self.func, codegen=self.codegen)
+ self.declarations += "\nPyObject *argsbuf[%s];" % len(self.converters)
+ self.declarations += "\nPyObject * const *fastargs;"
+ self.declarations += "\nPy_ssize_t nargs = PyTuple_GET_SIZE(args);"
+ if has_optional_kw:
+ self.declarations += "\nPy_ssize_t noptargs = %s + (kwargs ? PyDict_GET_SIZE(kwargs) : 0) - %d;" % (nargs, self.min_pos + self.min_kw_only)
+ parser_code = [libclinic.normalize_snippet("""
+ fastargs = %s(_PyTuple_CAST(args)->ob_item, nargs, kwargs, NULL, &_parser, %s, argsbuf);
+ if (!fastargs) {{
+ goto exit;
+ }}
+ """ % args_declaration, indent=4)]
+
+ if self.requires_defining_class:
+ self.flags = 'METH_METHOD|' + self.flags
+ self.parser_prototype = PARSER_PROTOTYPE_DEF_CLASS
+
+ if use_parser_code:
+ if deprecated_keywords:
+ code = clang.deprecate_keyword_use(self.func, deprecated_keywords,
+ argname_fmt,
+ codegen=self.codegen,
+ fastcall=self.fastcall)
+ parser_code.append(code)
+
+ add_label: str | None = None
+ for i, p in enumerate(self.parameters):
+ if isinstance(p.converter, defining_class_converter):
+ raise ValueError("defining_class should be the first "
+ "parameter (after clang)")
+ displayname = p.get_displayname(i+1)
+ parsearg = p.converter.parse_arg(argname_fmt % i, displayname, limited_capi=self.limited_capi)
+ if parsearg is None:
+ parser_code = []
+ use_parser_code = False
+ break
+ if add_label and (i == self.pos_only or i == self.max_pos):
+ parser_code.append("%s:" % add_label)
+ add_label = None
+ if not p.is_optional():
+ parser_code.append(libclinic.normalize_snippet(parsearg, indent=4))
+ elif i < self.pos_only:
+ add_label = 'skip_optional_posonly'
+ parser_code.append(libclinic.normalize_snippet("""
+ if (nargs < %d) {{
+ goto %s;
+ }}
+ """ % (i + 1, add_label), indent=4))
+ if has_optional_kw:
+ parser_code.append(libclinic.normalize_snippet("""
+ noptargs--;
+ """, indent=4))
+ parser_code.append(libclinic.normalize_snippet(parsearg, indent=4))
+ else:
+ if i < self.max_pos:
+ label = 'skip_optional_pos'
+ first_opt = max(self.min_pos, self.pos_only)
+ else:
+ label = 'skip_optional_kwonly'
+ first_opt = self.max_pos + self.min_kw_only
+ if self.vararg != NO_VARARG:
+ first_opt += 1
+ if i == first_opt:
+ add_label = label
+ parser_code.append(libclinic.normalize_snippet("""
+ if (!noptargs) {{
+ goto %s;
+ }}
+ """ % add_label, indent=4))
+ if i + 1 == len(self.parameters):
+ parser_code.append(libclinic.normalize_snippet(parsearg, indent=4))
+ else:
+ add_label = label
+ parser_code.append(libclinic.normalize_snippet("""
+ if (%s) {{
+ """ % (argname_fmt % i), indent=4))
+ parser_code.append(libclinic.normalize_snippet(parsearg, indent=8))
+ parser_code.append(libclinic.normalize_snippet("""
+ if (!--noptargs) {{
+ goto %s;
+ }}
+ }}
+ """ % add_label, indent=4))
+
+ if use_parser_code:
+ if add_label:
+ parser_code.append("%s:" % add_label)
+ else:
+ for parameter in self.parameters:
+ parameter.converter.use_converter()
+
+ self.declarations = declare_parser(self.func, codegen=self.codegen,
+ hasformat=True)
+ if self.limited_capi:
+ # positional-or-keyword arguments
+ assert not self.fastcall
+ self.flags = "METH_VARARGS|METH_KEYWORDS"
+ self.parser_prototype = PARSER_PROTOTYPE_KEYWORD
+ parser_code = [libclinic.normalize_snippet("""
+ if (!PyArg_ParseTupleAndKeywords(args, kwargs, "{format_units}:{name}", _keywords,
+ {parse_arguments}))
+ goto exit;
+ """, indent=4)]
+ self.declarations = "static char *_keywords[] = {{{keywords_c} NULL}};"
+ if deprecated_positionals or deprecated_keywords:
+ self.declarations += "\nPy_ssize_t nargs = PyTuple_Size(args);"
+
+ elif self.fastcall:
+ self.codegen.add_include('pycore_modsupport.h',
+ '_PyArg_ParseStackAndKeywords()')
+ parser_code = [libclinic.normalize_snippet("""
+ if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser{parse_arguments_comma}
+ {parse_arguments})) {{
+ goto exit;
+ }}
+ """, indent=4)]
+ else:
+ self.codegen.add_include('pycore_modsupport.h',
+ '_PyArg_ParseTupleAndKeywordsFast()')
+ parser_code = [libclinic.normalize_snippet("""
+ if (!_PyArg_ParseTupleAndKeywordsFast(args, kwargs, &_parser,
+ {parse_arguments})) {{
+ goto exit;
+ }}
+ """, indent=4)]
+ if deprecated_positionals or deprecated_keywords:
+ self.declarations += "\nPy_ssize_t nargs = PyTuple_GET_SIZE(args);"
+ if deprecated_keywords:
+ code = clang.deprecate_keyword_use(self.func, deprecated_keywords,
+ codegen=self.codegen,
+ fastcall=self.fastcall)
+ parser_code.append(code)
+
+ if deprecated_positionals:
+ code = clang.deprecate_positional_use(self.func, deprecated_positionals)
+ # Insert the deprecation code before parameter parsing.
+ parser_code.insert(0, code)
+
+ assert self.parser_prototype is not None
+ self.parser_body(*parser_code, declarations=self.declarations)
+
+ def copy_includes(self) -> None:
+ # Copy includes from parameters to Clinic after parse_arg()
+ # has been called above.
+ for converter in self.converters:
+ for include in converter.get_includes():
+ self.codegen.add_include(
+ include.filename,
+ include.reason,
+ condition=include.condition)
+
+ def handle_new_or_init(self) -> None:
+ self.methoddef_define = ''
+
+ if self.func.kind is METHOD_NEW:
+ self.parser_prototype = PARSER_PROTOTYPE_KEYWORD
+ else:
+ self.return_value_declaration = "int return_value = -1;"
+ self.parser_prototype = PARSER_PROTOTYPE_KEYWORD___INIT__
+
+ fields: list[str] = list(self.parser_body_fields)
+ parses_positional = 'METH_NOARGS' not in self.flags
+ parses_keywords = 'METH_KEYWORDS' in self.flags
+ if parses_keywords:
+ assert parses_positional
+
+ if self.requires_defining_class:
+ raise ValueError("Slot methods cannot access their defining class.")
+
+ if not parses_keywords:
+ self.declarations = '{base_type_ptr}'
+ self.codegen.add_include('pycore_modsupport.h',
+ '_PyArg_NoKeywords()')
+ fields.insert(0, libclinic.normalize_snippet("""
+ if ({self_type_check}!_PyArg_NoKeywords("{name}", kwargs)) {{
+ goto exit;
+ }}
+ """, indent=4))
+ if not parses_positional:
+ self.codegen.add_include('pycore_modsupport.h',
+ '_PyArg_NoPositional()')
+ fields.insert(0, libclinic.normalize_snippet("""
+ if ({self_type_check}!_PyArg_NoPositional("{name}", args)) {{
+ goto exit;
+ }}
+ """, indent=4))
+
+ self.parser_body(*fields, declarations=self.declarations)
+
+ def process_methoddef(self, clang: CLanguage) -> None:
+ methoddef_cast_end = ""
+ if self.flags in ('METH_NOARGS', 'METH_O', 'METH_VARARGS'):
+ methoddef_cast = "(PyCFunction)"
+ elif self.func.kind is GETTER:
+ methoddef_cast = "" # This should end up unused
+ elif self.limited_capi:
+ methoddef_cast = "(PyCFunction)(void(*)(void))"
+ else:
+ methoddef_cast = "_PyCFunction_CAST("
+ methoddef_cast_end = ")"
+
+ if self.func.methoddef_flags:
+ self.flags += '|' + self.func.methoddef_flags
+
+ self.methoddef_define = self.methoddef_define.replace('{methoddef_flags}', self.flags)
+ self.methoddef_define = self.methoddef_define.replace('{methoddef_cast}', methoddef_cast)
+ self.methoddef_define = self.methoddef_define.replace('{methoddef_cast_end}', methoddef_cast_end)
+
+ self.methoddef_ifndef = ''
+ conditional = clang.cpp.condition()
+ if not conditional:
+ self.cpp_if = self.cpp_endif = ''
+ else:
+ self.cpp_if = "#if " + conditional
+ self.cpp_endif = "#endif /* " + conditional + " */"
+
+ if self.methoddef_define and self.codegen.add_ifndef_symbol(self.func.full_name):
+ self.methoddef_ifndef = METHODDEF_PROTOTYPE_IFNDEF
+
+ def finalize(self, clang: CLanguage) -> None:
+ # add ';' to the end of self.parser_prototype and self.impl_prototype
+ # (they mustn't be None, but they could be an empty string.)
+ assert self.parser_prototype is not None
+ if self.parser_prototype:
+ assert not self.parser_prototype.endswith(';')
+ self.parser_prototype += ';'
+
+ if self.impl_prototype is None:
+ self.impl_prototype = self.impl_definition
+ if self.impl_prototype:
+ self.impl_prototype += ";"
+
+ self.parser_definition = self.parser_definition.replace("{return_value_declaration}", self.return_value_declaration)
+
+ compiler_warning = clang.compiler_deprecated_warning(self.func, self.parameters)
+ if compiler_warning:
+ self.parser_definition = compiler_warning + "\n\n" + self.parser_definition
+
+ def create_template_dict(self) -> dict[str, str]:
+ d = {
+ "docstring_prototype" : self.docstring_prototype,
+ "docstring_definition" : self.docstring_definition,
+ "impl_prototype" : self.impl_prototype,
+ "methoddef_define" : self.methoddef_define,
+ "parser_prototype" : self.parser_prototype,
+ "parser_definition" : self.parser_definition,
+ "impl_definition" : self.impl_definition,
+ "cpp_if" : self.cpp_if,
+ "cpp_endif" : self.cpp_endif,
+ "methoddef_ifndef" : self.methoddef_ifndef,
+ }
+
+ # make sure we didn't forget to assign something,
+ # and wrap each non-empty value in \n's
+ d2 = {}
+ for name, value in d.items():
+ assert value is not None, "got a None value for template " + repr(name)
+ if value:
+ value = '\n' + value + '\n'
+ d2[name] = value
+ return d2
+
+ def parse_args(self, clang: CLanguage) -> dict[str, str]:
+ self.select_prototypes()
+ self.init_limited_capi()
+
+ self.flags = ""
+ self.declarations = ""
+ self.parser_prototype = ""
+ self.parser_definition = ""
+ self.impl_prototype = None
+ self.impl_definition = IMPL_DEFINITION_PROTOTYPE
+
+ # parser_body_fields remembers the fields passed in to the
+ # previous call to parser_body. this is used for an awful hack.
+ self.parser_body_fields: tuple[str, ...] = ()
+
+ if not self.parameters:
+ self.parse_no_args()
+ elif self.use_meth_o():
+ self.parse_one_arg()
+ elif self.has_option_groups():
+ self.parse_option_groups()
+ elif (not self.requires_defining_class
+ and self.pos_only == len(self.parameters) - self.pseudo_args):
+ self.parse_pos_only()
+ else:
+ self.parse_general(clang)
+
+ self.copy_includes()
+ if self.is_new_or_init():
+ self.handle_new_or_init()
+ self.process_methoddef(clang)
+ self.finalize(clang)
+
+ return self.create_template_dict()
diff --git a/Tools/clinic/libclinic/return_converters.py b/Tools/clinic/libclinic/return_converters.py
index 7bdd257cfa3443..b41e053bae5f3a 100644
--- a/Tools/clinic/libclinic/return_converters.py
+++ b/Tools/clinic/libclinic/return_converters.py
@@ -1,6 +1,6 @@
import sys
from collections.abc import Callable
-from libclinic.crenderdata import CRenderData
+from libclinic.codegen import CRenderData
from libclinic.function import Function
from typing import Any
diff --git a/Tools/jit/_stencils.py b/Tools/jit/_stencils.py
index 601ea0b70701a5..243bb3dd134f70 100644
--- a/Tools/jit/_stencils.py
+++ b/Tools/jit/_stencils.py
@@ -27,8 +27,11 @@ class HoleValue(enum.Enum):
GOT = enum.auto()
# The current uop's oparg (exposed as _JIT_OPARG):
OPARG = enum.auto()
- # The current uop's operand (exposed as _JIT_OPERAND):
+ # The current uop's operand on 64-bit platforms (exposed as _JIT_OPERAND):
OPERAND = enum.auto()
+ # The current uop's operand on 32-bit platforms (exposed as _JIT_OPERAND_HI and _JIT_OPERAND_LO):
+ OPERAND_HI = enum.auto()
+ OPERAND_LO = enum.auto()
# The current uop's target (exposed as _JIT_TARGET):
TARGET = enum.auto()
# The base address of the machine code for the jump target (exposed as _JIT_JUMP_TARGET):
diff --git a/Tools/jit/_writer.py b/Tools/jit/_writer.py
index 8a2a42e75cfb9b..cbc1ed2fa6543a 100644
--- a/Tools/jit/_writer.py
+++ b/Tools/jit/_writer.py
@@ -17,7 +17,7 @@ def _dump_header() -> typing.Iterator[str]:
yield "} HoleValue;"
yield ""
yield "typedef struct {"
- yield " const uint64_t offset;"
+ yield " const size_t offset;"
yield " const HoleKind kind;"
yield " const HoleValue value;"
yield " const void *symbol;"
@@ -58,7 +58,7 @@ def _dump_footer(opnames: typing.Iterable[str]) -> typing.Iterator[str]:
yield ""
yield "#define GET_PATCHES() { \\"
for value in _stencils.HoleValue:
- yield f" [HoleValue_{value.name}] = (uint64_t)0xBADBADBADBADBADB, \\"
+ yield f" [HoleValue_{value.name}] = (uintptr_t)0xBADBADBADBADBADB, \\"
yield "}"
diff --git a/Tools/jit/template.c b/Tools/jit/template.c
index 2300bd0f1f31ec..b195aff377b3b5 100644
--- a/Tools/jit/template.c
+++ b/Tools/jit/template.c
@@ -88,7 +88,14 @@ _JIT_ENTRY(_PyInterpreterFrame *frame, PyObject **stack_pointer, PyThreadState *
int uopcode = _JIT_OPCODE;
// Other stuff we need handy:
PATCH_VALUE(uint16_t, _oparg, _JIT_OPARG)
+#if SIZEOF_VOID_P == 8
PATCH_VALUE(uint64_t, _operand, _JIT_OPERAND)
+#else
+ assert(SIZEOF_VOID_P == 4);
+ PATCH_VALUE(uint32_t, _operand_hi, _JIT_OPERAND_HI)
+ PATCH_VALUE(uint32_t, _operand_lo, _JIT_OPERAND_LO)
+ uint64_t _operand = ((uint64_t)_operand_hi << 32) | _operand_lo;
+#endif
PATCH_VALUE(uint32_t, _target, _JIT_TARGET)
PATCH_VALUE(uint16_t, _exit_index, _JIT_EXIT_INDEX)
diff --git a/Tools/scripts/summarize_stats.py b/Tools/scripts/summarize_stats.py
index f7ed98ff6045ab..ffbc40e6a37f3d 100644
--- a/Tools/scripts/summarize_stats.py
+++ b/Tools/scripts/summarize_stats.py
@@ -736,9 +736,9 @@ def execution_count_section() -> Section:
)
-def pair_count_section() -> Section:
+def pair_count_section(prefix: str, title=None) -> Section:
def calc_pair_count_table(stats: Stats) -> Rows:
- opcode_stats = stats.get_opcode_stats("opcode")
+ opcode_stats = stats.get_opcode_stats(prefix)
pair_counts = opcode_stats.get_pair_counts()
total = opcode_stats.get_total_execution_count()
@@ -760,7 +760,7 @@ def calc_pair_count_table(stats: Stats) -> Rows:
return Section(
"Pair counts",
- "Pair counts for top 100 Tier 1 instructions",
+ f"Pair counts for top 100 {title if title else prefix} pairs",
[
Table(
("Pair", "Count:", "Self:", "Cumulative:"),
@@ -1232,6 +1232,7 @@ def iter_optimization_tables(base_stats: Stats, head_stats: Stats | None = None)
)
],
)
+ yield pair_count_section(prefix="uop", title="Non-JIT uop")
yield Section(
"Unsupported opcodes",
"",
@@ -1292,7 +1293,7 @@ def calc_rows(stats: Stats) -> Rows:
LAYOUT = [
execution_count_section(),
- pair_count_section(),
+ pair_count_section("opcode"),
pre_succ_pairs_section(),
specialization_section(),
specialization_effectiveness_section(),
diff --git a/Tools/tsan/suppressions_free_threading.txt b/Tools/tsan/suppressions_free_threading.txt
new file mode 100644
index 00000000000000..889b62e59b14a6
--- /dev/null
+++ b/Tools/tsan/suppressions_free_threading.txt
@@ -0,0 +1,51 @@
+# This file contains suppressions for the free-threaded build. It contains the
+# suppressions for the default build and additional suppressions needed only in
+# the free-threaded build.
+#
+# reference: https://github.com/google/sanitizers/wiki/ThreadSanitizerSuppressions
+
+## Default build suppresssions
+
+race:get_allocator_unlocked
+race:set_allocator_unlocked
+
+## Free-threaded suppressions
+
+race:_add_to_weak_set
+race:_in_weak_set
+race:_mi_heap_delayed_free_partial
+race:_Py_IsImmortal
+race:_Py_IsOwnedByCurrentThread
+race:_PyEval_EvalFrameDefault
+race:_PyFunction_SetVersion
+race:_PyImport_AcquireLock
+race:_PyImport_ReleaseLock
+race:_PyInterpreterState_SetNotRunningMain
+race:_PyInterpreterState_IsRunningMain
+race:_PyObject_GC_IS_SHARED
+race:_PyObject_GC_SET_SHARED
+race:_PyObject_GC_TRACK
+race:_PyType_HasFeature
+race:_PyType_Lookup
+race:assign_version_tag
+race:compare_unicode_unicode
+race:delitem_common
+race:dictkeys_decref
+race:dictkeys_incref
+race:dictresize
+race:gc_collect_main
+race:gc_restore_tid
+race:initialize_new_array
+race:insertdict
+race:lookup_tp_dict
+race:mi_heap_visit_pages
+race:PyMember_GetOne
+race:PyMember_SetOne
+race:new_reference
+race:set_contains_key
+race:set_inheritable
+race:start_the_world
+race:tstate_set_detached
+race:unicode_hash
+race:update_cache
+race:update_cache_gil_disabled
diff --git a/Tools/tsan/supressions.txt b/Tools/tsan/supressions.txt
index 448dfac8005c79..c778c791eacce8 100644
--- a/Tools/tsan/supressions.txt
+++ b/Tools/tsan/supressions.txt
@@ -1,5 +1,4 @@
-## reference: https://github.com/google/sanitizers/wiki/ThreadSanitizerSuppressions
+# This file contains suppressions for the default (with GIL) build.
+# reference: https://github.com/google/sanitizers/wiki/ThreadSanitizerSuppressions
race:get_allocator_unlocked
race:set_allocator_unlocked
-race:mi_heap_visit_pages
-race:_mi_heap_delayed_free_partial
diff --git a/configure b/configure
index f9647566636e4c..80403255a814af 100755
--- a/configure
+++ b/configure
@@ -6110,7 +6110,9 @@ then :
else $as_nop
cat > conftest.c <conftest.out 2>/dev/null; then
ac_cv_cc_name=`grep -v '^#' conftest.out | grep -v '^ *$' | tr -d ' '`
+ if test $(expr "//$CC" : '.*/\(.*\)') = "mpicc"; then
+ ac_cv_cc_name="mpicc"
+ fi
else
ac_cv_cc_name="unknown"
fi
@@ -6312,6 +6317,34 @@ fi
+{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for GCC compatible compiler" >&5
+printf %s "checking for GCC compatible compiler... " >&6; }
+if test ${ac_cv_gcc_compat+y}
+then :
+ printf %s "(cached) " >&6
+else $as_nop
+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h. */
+
+ #if !defined(__GNUC__)
+ #error "not GCC compatible"
+ #else
+ /* GCC compatible! */
+ #endif
+
+_ACEOF
+if ac_fn_c_try_cpp "$LINENO"
+then :
+ ac_cv_gcc_compat=yes
+else $as_nop
+ ac_cv_gcc_compat=no
+fi
+rm -f conftest.err conftest.i conftest.$ac_ext
+fi
+{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ac_cv_gcc_compat" >&5
+printf "%s\n" "$ac_cv_gcc_compat" >&6; }
+
+
preset_cxx="$CXX"
if test -z "$CXX"
@@ -6946,14 +6979,14 @@ case $host/$ac_cv_cc_name in #(
PY_SUPPORT_TIER=1 ;; #(
x86_64-apple-darwin*/clang) :
PY_SUPPORT_TIER=1 ;; #(
+ aarch64-apple-darwin*/clang) :
+ PY_SUPPORT_TIER=1 ;; #(
i686-pc-windows-msvc/msvc) :
PY_SUPPORT_TIER=1 ;; #(
x86_64-pc-windows-msvc/msvc) :
PY_SUPPORT_TIER=1 ;; #(
- aarch64-apple-darwin*/clang) :
- PY_SUPPORT_TIER=2 ;; #(
- aarch64-*-linux-gnu/gcc) :
+ aarch64-*-linux-gnu/gcc) :
PY_SUPPORT_TIER=2 ;; #(
aarch64-*-linux-gnu/clang) :
PY_SUPPORT_TIER=2 ;; #(
@@ -8241,13 +8274,13 @@ if test "$Py_OPT" = 'true' ; then
*gcc*)
{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking whether C compiler accepts -fno-semantic-interposition" >&5
printf %s "checking whether C compiler accepts -fno-semantic-interposition... " >&6; }
-if test ${ax_cv_check_cflags___fno_semantic_interposition+y}
+if test ${ax_cv_check_cflags__Werror__fno_semantic_interposition+y}
then :
printf %s "(cached) " >&6
else $as_nop
ax_check_save_flags=$CFLAGS
- CFLAGS="$CFLAGS -fno-semantic-interposition"
+ CFLAGS="$CFLAGS -Werror -fno-semantic-interposition"
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
@@ -8261,16 +8294,16 @@ main (void)
_ACEOF
if ac_fn_c_try_compile "$LINENO"
then :
- ax_cv_check_cflags___fno_semantic_interposition=yes
+ ax_cv_check_cflags__Werror__fno_semantic_interposition=yes
else $as_nop
- ax_cv_check_cflags___fno_semantic_interposition=no
+ ax_cv_check_cflags__Werror__fno_semantic_interposition=no
fi
rm -f core conftest.err conftest.$ac_objext conftest.beam conftest.$ac_ext
CFLAGS=$ax_check_save_flags
fi
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ax_cv_check_cflags___fno_semantic_interposition" >&5
-printf "%s\n" "$ax_cv_check_cflags___fno_semantic_interposition" >&6; }
-if test "x$ax_cv_check_cflags___fno_semantic_interposition" = xyes
+{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ax_cv_check_cflags__Werror__fno_semantic_interposition" >&5
+printf "%s\n" "$ax_cv_check_cflags__Werror__fno_semantic_interposition" >&6; }
+if test "x$ax_cv_check_cflags__Werror__fno_semantic_interposition" = xyes
then :
CFLAGS_NODIST="$CFLAGS_NODIST -fno-semantic-interposition"
@@ -8826,9 +8859,9 @@ case "$CC_BASENAME" in
*clang*)
# Any changes made here should be reflected in the GCC+Darwin case below
PGO_PROF_GEN_FLAG="-fprofile-instr-generate"
- PGO_PROF_USE_FLAG="-fprofile-instr-use=code.profclangd"
- LLVM_PROF_MERGER="${LLVM_PROFDATA} merge -output=code.profclangd *.profclangr"
- LLVM_PROF_FILE="LLVM_PROFILE_FILE=\"code-%p.profclangr\""
+ PGO_PROF_USE_FLAG="-fprofile-instr-use=\"\$(shell pwd)/code.profclangd\""
+ LLVM_PROF_MERGER=" ${LLVM_PROFDATA} merge -output=\"\$(shell pwd)/code.profclangd\" \"\$(shell pwd)\"/*.profclangr "
+ LLVM_PROF_FILE="LLVM_PROFILE_FILE=\"\$(shell pwd)/code-%p.profclangr\""
if test $LLVM_PROF_FOUND = not-found
then
LLVM_PROF_ERR=yes
@@ -8842,9 +8875,9 @@ case "$CC_BASENAME" in
case $ac_sys_system in
Darwin*)
PGO_PROF_GEN_FLAG="-fprofile-instr-generate"
- PGO_PROF_USE_FLAG="-fprofile-instr-use=code.profclangd"
- LLVM_PROF_MERGER="${LLVM_PROFDATA} merge -output=code.profclangd *.profclangr"
- LLVM_PROF_FILE="LLVM_PROFILE_FILE=\"code-%p.profclangr\""
+ PGO_PROF_USE_FLAG="-fprofile-instr-use=\"\$(shell pwd)/code.profclangd\""
+ LLVM_PROF_MERGER=" ${LLVM_PROFDATA} merge -output=\"\$(shell pwd)/code.profclangd\" \"\$(shell pwd)\"/*.profclangr "
+ LLVM_PROF_FILE="LLVM_PROFILE_FILE=\"\$(shell pwd)/code-%p.profclangr\""
if test "${LLVM_PROF_FOUND}" = "not-found"
then
LLVM_PROF_ERR=yes
@@ -11574,7 +11607,6 @@ printf "%s\n" "#define size_t unsigned int" >>confdefs.h
fi
-
{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for uid_t in sys/types.h" >&5
printf %s "checking for uid_t in sys/types.h... " >&6; }
if test ${ac_cv_type_uid_t+y}
@@ -24334,7 +24366,7 @@ if test "$PY_ENABLE_SHARED" = "1" && ( test -n "$ANDROID_API_LEVEL" || test "$MA
fi
# On iOS the shared libraries must be linked with the Python framework
-if test "$ac_sys_system" == "iOS"; then
+if test "$ac_sys_system" = "iOS"; then
MODULE_DEPS_SHARED="$MODULE_DEPS_SHARED \$(PYTHONFRAMEWORKDIR)/\$(PYTHONFRAMEWORK)"
fi
diff --git a/configure.ac b/configure.ac
index e195e15b39ed21..ec925d4d4a0a5a 100644
--- a/configure.ac
+++ b/configure.ac
@@ -992,7 +992,9 @@ dnl check for GCC last, other compilers set __GNUC__, too.
dnl msvc is listed for completeness.
AC_CACHE_CHECK([for CC compiler name], [ac_cv_cc_name], [
cat > conftest.c <conftest.out 2>/dev/null; then
ac_cv_cc_name=`grep -v '^#' conftest.out | grep -v '^ *$' | tr -d ' '`
+ if test $(expr "//$CC" : '.*/\(.*\)') = "mpicc"; then
+ ac_cv_cc_name="mpicc"
+ fi
else
ac_cv_cc_name="unknown"
fi
@@ -1020,6 +1025,18 @@ rm -f conftest.c conftest.out
# _POSIX_SOURCE, _POSIX_1_SOURCE, and more
AC_USE_SYSTEM_EXTENSIONS
+AC_CACHE_CHECK([for GCC compatible compiler],
+ [ac_cv_gcc_compat],
+ [AC_PREPROC_IFELSE([AC_LANG_SOURCE([
+ #if !defined(__GNUC__)
+ #error "not GCC compatible"
+ #else
+ /* GCC compatible! */
+ #endif
+ ], [])],
+ [ac_cv_gcc_compat=yes],
+ [ac_cv_gcc_compat=no])])
+
AC_SUBST([CXX])
preset_cxx="$CXX"
@@ -1114,10 +1131,10 @@ AC_MSG_CHECKING([for PEP 11 support tier])
AS_CASE([$host/$ac_cv_cc_name],
[x86_64-*-linux-gnu/gcc], [PY_SUPPORT_TIER=1], dnl Linux on AMD64, any vendor, glibc, gcc
[x86_64-apple-darwin*/clang], [PY_SUPPORT_TIER=1], dnl macOS on Intel, any version
+ [aarch64-apple-darwin*/clang], [PY_SUPPORT_TIER=1], dnl macOS on M1, any version
[i686-pc-windows-msvc/msvc], [PY_SUPPORT_TIER=1], dnl 32bit Windows on Intel, MSVC
[x86_64-pc-windows-msvc/msvc], [PY_SUPPORT_TIER=1], dnl 64bit Windows on AMD64, MSVC
- [aarch64-apple-darwin*/clang], [PY_SUPPORT_TIER=2], dnl macOS on M1, any version
[aarch64-*-linux-gnu/gcc], [PY_SUPPORT_TIER=2], dnl Linux ARM64, glibc, gcc+clang
[aarch64-*-linux-gnu/clang], [PY_SUPPORT_TIER=2],
[powerpc64le-*-linux-gnu/gcc], [PY_SUPPORT_TIER=2], dnl Linux on PPC64 little endian, glibc, gcc
@@ -1799,7 +1816,7 @@ if test "$Py_OPT" = 'true' ; then
AX_CHECK_COMPILE_FLAG([-fno-semantic-interposition],[
CFLAGS_NODIST="$CFLAGS_NODIST -fno-semantic-interposition"
LDFLAGS_NODIST="$LDFLAGS_NODIST -fno-semantic-interposition"
- ])
+ ], [], [-Werror])
;;
esac
elif test "$ac_sys_system" = "Emscripten" -o "$ac_sys_system" = "WASI"; then
@@ -2011,9 +2028,13 @@ case "$CC_BASENAME" in
*clang*)
# Any changes made here should be reflected in the GCC+Darwin case below
PGO_PROF_GEN_FLAG="-fprofile-instr-generate"
- PGO_PROF_USE_FLAG="-fprofile-instr-use=code.profclangd"
- LLVM_PROF_MERGER="${LLVM_PROFDATA} merge -output=code.profclangd *.profclangr"
- LLVM_PROF_FILE="LLVM_PROFILE_FILE=\"code-%p.profclangr\""
+ PGO_PROF_USE_FLAG="-fprofile-instr-use=\"\$(shell pwd)/code.profclangd\""
+ LLVM_PROF_MERGER=m4_normalize("
+ ${LLVM_PROFDATA} merge
+ -output=\"\$(shell pwd)/code.profclangd\"
+ \"\$(shell pwd)\"/*.profclangr
+ ")
+ LLVM_PROF_FILE="LLVM_PROFILE_FILE=\"\$(shell pwd)/code-%p.profclangr\""
if test $LLVM_PROF_FOUND = not-found
then
LLVM_PROF_ERR=yes
@@ -2027,9 +2048,13 @@ case "$CC_BASENAME" in
case $ac_sys_system in
Darwin*)
PGO_PROF_GEN_FLAG="-fprofile-instr-generate"
- PGO_PROF_USE_FLAG="-fprofile-instr-use=code.profclangd"
- LLVM_PROF_MERGER="${LLVM_PROFDATA} merge -output=code.profclangd *.profclangr"
- LLVM_PROF_FILE="LLVM_PROFILE_FILE=\"code-%p.profclangr\""
+ PGO_PROF_USE_FLAG="-fprofile-instr-use=\"\$(shell pwd)/code.profclangd\""
+ LLVM_PROF_MERGER=m4_normalize("
+ ${LLVM_PROFDATA} merge
+ -output=\"\$(shell pwd)/code.profclangd\"
+ \"\$(shell pwd)\"/*.profclangr
+ ")
+ LLVM_PROF_FILE="LLVM_PROFILE_FILE=\"\$(shell pwd)/code-%p.profclangr\""
if test "${LLVM_PROF_FOUND}" = "not-found"
then
LLVM_PROF_ERR=yes
@@ -6053,7 +6078,7 @@ if test "$PY_ENABLE_SHARED" = "1" && ( test -n "$ANDROID_API_LEVEL" || test "$MA
fi
# On iOS the shared libraries must be linked with the Python framework
-if test "$ac_sys_system" == "iOS"; then
+if test "$ac_sys_system" = "iOS"; then
MODULE_DEPS_SHARED="$MODULE_DEPS_SHARED \$(PYTHONFRAMEWORKDIR)/\$(PYTHONFRAMEWORK)"
fi