diff --git a/.cirrus-DISABLED.yml b/.cirrus-DISABLED.yml deleted file mode 100644 index f20835cb6cac2a..00000000000000 --- a/.cirrus-DISABLED.yml +++ /dev/null @@ -1,29 +0,0 @@ -# gh-91960: Job disabled since Python is out of free credit (September 2023): -# https://discuss.python.org/t/freebsd-gets-a-new-cirrus-ci-github-action-job-and-a-new-buildbot/33122/26 - -freebsd_task: - freebsd_instance: - matrix: - - image: freebsd-13-2-release-amd64 - # Turn off TCP and UDP blackhole. It is not enabled by default in FreeBSD, - # but it is in the FreeBSD GCE images as used by Cirrus-CI. It causes even - # local local connections to fail with ETIMEDOUT instead of ECONNREFUSED. - # For more information see https://reviews.freebsd.org/D41751 and - # https://github.com/cirruslabs/cirrus-ci-docs/issues/483. - sysctl_script: - - sysctl net.inet.tcp.blackhole=0 - - sysctl net.inet.udp.blackhole=0 - configure_script: - - mkdir build - - cd build - - ../configure --with-pydebug - build_script: - - cd build - - make -j$(sysctl -n hw.ncpu) - pythoninfo_script: - - cd build - - make pythoninfo - test_script: - - cd build - # dtrace fails to build on FreeBSD - see gh-73263 - - make buildbottest TESTOPTS="-j0 -x test_dtrace --timeout=600" diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 2168ec101cf3d9..cc5ecc09fbc592 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -142,6 +142,8 @@ jobs: run: echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV - name: Configure ccache action uses: hendrikmuhs/ccache-action@v1.2 + with: + save: false - name: Check Autoconf and aclocal versions run: | grep "Generated by GNU Autoconf 2.71" configure @@ -284,6 +286,8 @@ jobs: echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV - name: Configure ccache action uses: hendrikmuhs/ccache-action@v1.2 + with: + save: false - name: Configure CPython run: ./configure --config-cache --with-pydebug --with-openssl=$OPENSSL_DIR - name: Build CPython @@ -327,6 +331,8 @@ jobs: echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV - name: Configure ccache action uses: hendrikmuhs/ccache-action@v1.2 + with: + save: false - name: Setup directory envs for out-of-tree builds run: | echo "CPYTHON_RO_SRCDIR=$(realpath -m ${GITHUB_WORKSPACE}/../cpython-ro-srcdir)" >> $GITHUB_ENV @@ -446,6 +452,9 @@ jobs: echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV - name: Configure ccache action uses: hendrikmuhs/ccache-action@v1.2 + with: + save: ${{ github.event_name == 'push' }} + max-size: "200M" - name: Configure CPython run: ./configure --config-cache --with-address-sanitizer --without-pymalloc - name: Build CPython diff --git a/.github/workflows/build_msi.yml b/.github/workflows/build_msi.yml index 29282dffa37ec0..65d32c734e7745 100644 --- a/.github/workflows/build_msi.yml +++ b/.github/workflows/build_msi.yml @@ -32,6 +32,8 @@ jobs: strategy: matrix: type: [x86, x64, arm64] + env: + IncludeFreethreaded: true steps: - uses: actions/checkout@v4 - name: Build CPython installer diff --git a/.github/workflows/reusable-ubuntu.yml b/.github/workflows/reusable-ubuntu.yml index 819b45bda7f980..c2194280c0a50f 100644 --- a/.github/workflows/reusable-ubuntu.yml +++ b/.github/workflows/reusable-ubuntu.yml @@ -41,6 +41,9 @@ jobs: echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV - name: Configure ccache action uses: hendrikmuhs/ccache-action@v1.2 + with: + save: ${{ github.event_name == 'push' }} + max-size: "200M" - name: Setup directory envs for out-of-tree builds run: | echo "CPYTHON_RO_SRCDIR=$(realpath -m ${GITHUB_WORKSPACE}/../cpython-ro-srcdir)" >> $GITHUB_ENV diff --git a/.github/workflows/reusable-windows.yml b/.github/workflows/reusable-windows.yml index ae27c108d8368c..c0209e0e1c92e9 100644 --- a/.github/workflows/reusable-windows.yml +++ b/.github/workflows/reusable-windows.yml @@ -20,7 +20,7 @@ jobs: - name: Display build info run: .\python.bat -m test.pythoninfo - name: Tests - run: .\PCbuild\rt.bat -p Win32 -d -q --fast-ci + run: .\PCbuild\rt.bat -p Win32 -d -q --fast-ci ${{ inputs.free-threading && '--disable-gil' || '' }} build_win_amd64: name: 'build and test (x64)' @@ -37,7 +37,7 @@ jobs: - name: Display build info run: .\python.bat -m test.pythoninfo - name: Tests - run: .\PCbuild\rt.bat -p x64 -d -q --fast-ci + run: .\PCbuild\rt.bat -p x64 -d -q --fast-ci ${{ inputs.free-threading && '--disable-gil' || '' }} build_win_arm64: name: 'build (arm64)' diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9bd9c59a1ddc74..19033ce243d9d3 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -28,7 +28,7 @@ repos: hooks: - id: sphinx-lint args: [--enable=default-role] - files: ^Doc/|^Misc/NEWS.d/next/ + files: ^Doc/|^Misc/NEWS.d/ - repo: meta hooks: diff --git a/Doc/c-api/complex.rst b/Doc/c-api/complex.rst index e3fd001c599c80..5a0474869071d9 100644 --- a/Doc/c-api/complex.rst +++ b/Doc/c-api/complex.rst @@ -117,11 +117,29 @@ Complex Numbers as Python Objects Return the real part of *op* as a C :c:expr:`double`. + If *op* is not a Python complex number object but has a + :meth:`~object.__complex__` method, this method will first be called to + convert *op* to a Python complex number object. If :meth:`!__complex__` is + not defined then it falls back to call :c:func:`PyFloat_AsDouble` and + returns its result. Upon failure, this method returns ``-1.0``, so one + should call :c:func:`PyErr_Occurred` to check for errors. + + .. versionchanged:: 3.13 + Use :meth:`~object.__complex__` if available. .. c:function:: double PyComplex_ImagAsDouble(PyObject *op) Return the imaginary part of *op* as a C :c:expr:`double`. + If *op* is not a Python complex number object but has a + :meth:`~object.__complex__` method, this method will first be called to + convert *op* to a Python complex number object. If :meth:`!__complex__` is + not defined then it falls back to call :c:func:`PyFloat_AsDouble` and + returns ``0.0`` on success. Upon failure, this method returns ``-1.0``, so + one should call :c:func:`PyErr_Occurred` to check for errors. + + .. versionchanged:: 3.13 + Use :meth:`~object.__complex__` if available. .. c:function:: Py_complex PyComplex_AsCComplex(PyObject *op) diff --git a/Doc/c-api/object.rst b/Doc/c-api/object.rst index a4e3e74861a315..8a179690d048e3 100644 --- a/Doc/c-api/object.rst +++ b/Doc/c-api/object.rst @@ -47,9 +47,8 @@ Object Protocol .. c:function:: int PyObject_HasAttr(PyObject *o, PyObject *attr_name) - Returns ``1`` if *o* has the attribute *attr_name*, and ``0`` otherwise. This - is equivalent to the Python expression ``hasattr(o, attr_name)``. This function - always succeeds. + Returns ``1`` if *o* has the attribute *attr_name*, and ``0`` otherwise. + This function always succeeds. .. note:: diff --git a/Doc/c-api/stable.rst b/Doc/c-api/stable.rst index 63a100a6f26f24..5b9e43874c7f2b 100644 --- a/Doc/c-api/stable.rst +++ b/Doc/c-api/stable.rst @@ -16,7 +16,7 @@ CPython's Application Binary Interface (ABI) is forward- and backwards-compatible across a minor release (if these are compiled the same way; see :ref:`stable-abi-platform` below). So, code compiled for Python 3.10.0 will work on 3.10.8 and vice versa, -but will need to be compiled separately for 3.9.x and 3.10.x. +but will need to be compiled separately for 3.9.x and 3.11.x. There are two tiers of C API with different stability expectations: diff --git a/Doc/c-api/structures.rst b/Doc/c-api/structures.rst index 7d82f7839dfcd7..86c779472fd244 100644 --- a/Doc/c-api/structures.rst +++ b/Doc/c-api/structures.rst @@ -399,6 +399,40 @@ definition with the same method name. slot. This is helpful because calls to PyCFunctions are optimized more than wrapper object calls. +.. c:function:: PyObject * PyCMethod_New(PyMethodDef *ml, PyObject *self, PyObject *module, PyTypeObject *cls) + + Turn *ml* into a Python :term:`callable` object. + The caller must ensure that *ml* outlives the :term:`callable`. + Typically, *ml* is defined as a static variable. + + The *self* parameter will be passed as the *self* argument + to the C function in ``ml->ml_meth`` when invoked. + *self* can be ``NULL``. + + The :term:`callable` object's ``__module__`` attribute + can be set from the given *module* argument. + *module* should be a Python string, + which will be used as name of the module the function is defined in. + If unavailable, it can be set to :const:`None` or ``NULL``. + + .. seealso:: :attr:`function.__module__` + + The *cls* parameter will be passed as the *defining_class* + argument to the C function. + Must be set if :c:macro:`METH_METHOD` is set on ``ml->ml_flags``. + + .. versionadded:: 3.9 + + +.. c:function:: PyObject * PyCFunction_NewEx(PyMethodDef *ml, PyObject *self, PyObject *module) + + Equivalent to ``PyCMethod_New(ml, self, module, NULL)``. + + +.. c:function:: PyObject * PyCFunction_New(PyMethodDef *ml, PyObject *self) + + Equivalent to ``PyCMethod_New(ml, self, NULL, NULL)``. + Accessing attributes of extension types --------------------------------------- diff --git a/Doc/conf.py b/Doc/conf.py index dc09b0b51ca84c..458954370debe2 100644 --- a/Doc/conf.py +++ b/Doc/conf.py @@ -74,6 +74,10 @@ # Minimum version of sphinx required needs_sphinx = '4.2' +# Create table of contents entries for domain objects (e.g. functions, classes, +# attributes, etc.). Default is True. +toc_object_entries = False + # Ignore any .rst files in the includes/ directory; # they're embedded in pages but not rendered individually. # Ignore any .rst files in the venv/ directory. diff --git a/Doc/copyright.rst b/Doc/copyright.rst index 9b71683155eebe..8629ed1fc38009 100644 --- a/Doc/copyright.rst +++ b/Doc/copyright.rst @@ -4,7 +4,7 @@ Copyright Python and this documentation is: -Copyright © 2001-2023 Python Software Foundation. All rights reserved. +Copyright © 2001-2024 Python Software Foundation. All rights reserved. Copyright © 2000 BeOpen.com. All rights reserved. diff --git a/Doc/data/refcounts.dat b/Doc/data/refcounts.dat index 0b48512083ced4..f719ce153b239a 100644 --- a/Doc/data/refcounts.dat +++ b/Doc/data/refcounts.dat @@ -402,6 +402,21 @@ PyContextVar_Reset:int::: PyContextVar_Reset:PyObject*:var:0: PyContextVar_Reset:PyObject*:token:-1: +PyCFunction_New:PyObject*::+1: +PyCFunction_New:PyMethodDef*:ml:: +PyCFunction_New:PyObject*:self:+1: + +PyCFunction_NewEx:PyObject*::+1: +PyCFunction_NewEx:PyMethodDef*:ml:: +PyCFunction_NewEx:PyObject*:self:+1: +PyCFunction_NewEx:PyObject*:module:+1: + +PyCMethod_New:PyObject*::+1: +PyCMethod_New:PyMethodDef*:ml:: +PyCMethod_New:PyObject*:self:+1: +PyCMethod_New:PyObject*:module:+1: +PyCMethod_New:PyObject*:cls:+1: + PyDate_Check:int::: PyDate_Check:PyObject*:ob:0: diff --git a/Doc/faq/design.rst b/Doc/faq/design.rst index ae02c443e5938b..300e1b6cc40a58 100644 --- a/Doc/faq/design.rst +++ b/Doc/faq/design.rst @@ -451,7 +451,7 @@ on the key and a per-process seed; for example, ``'Python'`` could hash to to ``1142331976``. The hash code is then used to calculate a location in an internal array where the value will be stored. Assuming that you're storing keys that all have different hash values, this means that dictionaries take -constant time -- O(1), in Big-O notation -- to retrieve a key. +constant time -- *O*\ (1), in Big-O notation -- to retrieve a key. Why must dictionary keys be immutable? diff --git a/Doc/glossary.rst b/Doc/glossary.rst index 601443d5aade94..098bfffb104ef6 100644 --- a/Doc/glossary.rst +++ b/Doc/glossary.rst @@ -742,7 +742,7 @@ Glossary list A built-in Python :term:`sequence`. Despite its name it is more akin to an array in other languages than to a linked list since access to - elements is O(1). + elements is *O*\ (1). list comprehension A compact way to process all or part of the elements in a sequence and diff --git a/Doc/howto/descriptor.rst b/Doc/howto/descriptor.rst index 87274a5133d1cf..75346f2c7618c2 100644 --- a/Doc/howto/descriptor.rst +++ b/Doc/howto/descriptor.rst @@ -1250,7 +1250,7 @@ instance:: >>> d.f.__self__ - <__main__.D object at 0x1012e1f98> + <__main__.D object at 0x00B18C90> If you have ever wondered where *self* comes from in regular methods or where *cls* comes from in class methods, this is it! diff --git a/Doc/howto/urllib2.rst b/Doc/howto/urllib2.rst index 570435d48866d3..7f54a410881514 100644 --- a/Doc/howto/urllib2.rst +++ b/Doc/howto/urllib2.rst @@ -392,16 +392,16 @@ info and geturl =============== The response returned by urlopen (or the :exc:`~urllib.error.HTTPError` instance) has two -useful methods :meth:`info` and :meth:`geturl` and is defined in the module -:mod:`urllib.response`.. +useful methods :meth:`!info` and :meth:`!geturl` and is defined in the module +:mod:`urllib.response`. -**geturl** - this returns the real URL of the page fetched. This is useful -because ``urlopen`` (or the opener object used) may have followed a -redirect. The URL of the page fetched may not be the same as the URL requested. +* **geturl** - this returns the real URL of the page fetched. This is useful + because ``urlopen`` (or the opener object used) may have followed a + redirect. The URL of the page fetched may not be the same as the URL requested. -**info** - this returns a dictionary-like object that describes the page -fetched, particularly the headers sent by the server. It is currently an -:class:`http.client.HTTPMessage` instance. +* **info** - this returns a dictionary-like object that describes the page + fetched, particularly the headers sent by the server. It is currently an + :class:`http.client.HTTPMessage` instance. Typical headers include 'Content-length', 'Content-type', and so on. See the `Quick Reference to HTTP Headers `_ @@ -507,7 +507,7 @@ than the URL you pass to .add_password() will also match. :: In the above example we only supplied our ``HTTPBasicAuthHandler`` to ``build_opener``. By default openers have the handlers for normal situations - -- ``ProxyHandler`` (if a proxy setting such as an :envvar:`http_proxy` + -- ``ProxyHandler`` (if a proxy setting such as an :envvar:`!http_proxy` environment variable is set), ``UnknownHandler``, ``HTTPHandler``, ``HTTPDefaultErrorHandler``, ``HTTPRedirectHandler``, ``FTPHandler``, ``FileHandler``, ``DataHandler``, ``HTTPErrorProcessor``. diff --git a/Doc/library/asyncio-policy.rst b/Doc/library/asyncio-policy.rst index 0d7821e608ec98..346b740a8f757a 100644 --- a/Doc/library/asyncio-policy.rst +++ b/Doc/library/asyncio-policy.rst @@ -237,7 +237,7 @@ implementation used by the asyncio event loop: It works reliably even when the asyncio event loop is run in a non-main OS thread. - There is no noticeable overhead when handling a big number of children (*O(1)* each + There is no noticeable overhead when handling a big number of children (*O*\ (1) each time a child terminates), but starting a thread per process requires extra memory. This watcher is used by default. @@ -257,7 +257,7 @@ implementation used by the asyncio event loop: watcher is installed. The solution is safe but it has a significant overhead when - handling a big number of processes (*O(n)* each time a + handling a big number of processes (*O*\ (*n*) each time a :py:data:`SIGCHLD` is received). .. versionadded:: 3.8 @@ -273,7 +273,7 @@ implementation used by the asyncio event loop: The watcher avoids disrupting other code spawning processes by polling every process explicitly on a :py:data:`SIGCHLD` signal. - This solution is as safe as :class:`MultiLoopChildWatcher` and has the same *O(N)* + This solution is as safe as :class:`MultiLoopChildWatcher` and has the same *O*\ (*n*) complexity but requires a running event loop in the main thread to work. .. deprecated:: 3.12 @@ -285,7 +285,7 @@ implementation used by the asyncio event loop: processes and waiting for their termination. There is no noticeable overhead when handling a big number of - children (*O(1)* each time a child terminates). + children (*O*\ (1) each time a child terminates). This solution requires a running event loop in the main thread to work, as :class:`SafeChildWatcher`. diff --git a/Doc/library/bisect.rst b/Doc/library/bisect.rst index c0923093c1cb06..31c79b91061591 100644 --- a/Doc/library/bisect.rst +++ b/Doc/library/bisect.rst @@ -79,7 +79,7 @@ The following functions are provided: To support inserting records in a table, the *key* function (if any) is applied to *x* for the search step but not for the insertion step. - Keep in mind that the ``O(log n)`` search is dominated by the slow O(n) + Keep in mind that the *O*\ (log *n*) search is dominated by the slow *O*\ (*n*) insertion step. .. versionchanged:: 3.10 @@ -99,7 +99,7 @@ The following functions are provided: To support inserting records in a table, the *key* function (if any) is applied to *x* for the search step but not for the insertion step. - Keep in mind that the ``O(log n)`` search is dominated by the slow O(n) + Keep in mind that the *O*\ (log *n*) search is dominated by the slow *O*\ (*n*) insertion step. .. versionchanged:: 3.10 @@ -115,7 +115,7 @@ thoughts in mind: * Bisection is effective for searching ranges of values. For locating specific values, dictionaries are more performant. -* The *insort()* functions are ``O(n)`` because the logarithmic search step +* The *insort()* functions are *O*\ (*n*) because the logarithmic search step is dominated by the linear time insertion step. * The search functions are stateless and discard key function results after diff --git a/Doc/library/collections.rst b/Doc/library/collections.rst index 233b2c6a771f4a..c246173c1bbf53 100644 --- a/Doc/library/collections.rst +++ b/Doc/library/collections.rst @@ -458,10 +458,10 @@ or subtracting from an empty counter. Deques are a generalization of stacks and queues (the name is pronounced "deck" and is short for "double-ended queue"). Deques support thread-safe, memory efficient appends and pops from either side of the deque with approximately the - same O(1) performance in either direction. + same *O*\ (1) performance in either direction. Though :class:`list` objects support similar operations, they are optimized for - fast fixed-length operations and incur O(n) memory movement costs for + fast fixed-length operations and incur *O*\ (*n*) memory movement costs for ``pop(0)`` and ``insert(0, v)`` operations which change both the size and position of the underlying data representation. @@ -585,7 +585,7 @@ or subtracting from an empty counter. In addition to the above, deques support iteration, pickling, ``len(d)``, ``reversed(d)``, ``copy.copy(d)``, ``copy.deepcopy(d)``, membership testing with the :keyword:`in` operator, and subscript references such as ``d[0]`` to access -the first element. Indexed access is O(1) at both ends but slows to O(n) in +the first element. Indexed access is *O*\ (1) at both ends but slows to *O*\ (*n*) in the middle. For fast random access, use lists instead. Starting in version 3.5, deques support ``__add__()``, ``__mul__()``, diff --git a/Doc/library/concurrent.futures.rst b/Doc/library/concurrent.futures.rst index deefb8606ead84..760e1196d7cf9a 100644 --- a/Doc/library/concurrent.futures.rst +++ b/Doc/library/concurrent.futures.rst @@ -39,14 +39,14 @@ Executor Objects future = executor.submit(pow, 323, 1235) print(future.result()) - .. method:: map(func, *iterables, timeout=None, chunksize=1) + .. method:: map(fn, *iterables, timeout=None, chunksize=1) - Similar to :func:`map(func, *iterables) ` except: + Similar to :func:`map(fn, *iterables) ` except: * the *iterables* are collected immediately rather than lazily; - * *func* is executed asynchronously and several calls to - *func* may be made concurrently. + * *fn* is executed asynchronously and several calls to + *fn* may be made concurrently. The returned iterator raises a :exc:`TimeoutError` if :meth:`~iterator.__next__` is called and the result isn't available @@ -54,7 +54,7 @@ Executor Objects *timeout* can be an int or a float. If *timeout* is not specified or ``None``, there is no limit to the wait time. - If a *func* call raises an exception, then that exception will be + If a *fn* call raises an exception, then that exception will be raised when its value is retrieved from the iterator. When using :class:`ProcessPoolExecutor`, this method chops *iterables* diff --git a/Doc/library/contextvars.rst b/Doc/library/contextvars.rst index 0ac2f3d85749b7..647832447de946 100644 --- a/Doc/library/contextvars.rst +++ b/Doc/library/contextvars.rst @@ -131,7 +131,7 @@ Manual Context Management ctx: Context = copy_context() print(list(ctx.items())) - The function has an O(1) complexity, i.e. works equally fast for + The function has an *O*\ (1) complexity, i.e. works equally fast for contexts with a few context variables and for contexts that have a lot of them. diff --git a/Doc/library/dataclasses.rst b/Doc/library/dataclasses.rst index bbbbcb00d8fef8..cde147d1cbb501 100644 --- a/Doc/library/dataclasses.rst +++ b/Doc/library/dataclasses.rst @@ -536,10 +536,10 @@ class :meth:`~object.__init__` methods. If the base class has an :meth:`~object. that has to be called, it is common to call this method in a :meth:`__post_init__` method:: - @dataclass class Rectangle: - height: float - width: float + def __init__(self, height, width): + self.height = height + self.width = width @dataclass class Square(Rectangle): diff --git a/Doc/library/doctest.rst b/Doc/library/doctest.rst index 8c28e4478bb70e..1bfcd69f72df2e 100644 --- a/Doc/library/doctest.rst +++ b/Doc/library/doctest.rst @@ -134,7 +134,7 @@ That's all you need to know to start making productive use of :mod:`doctest`! Jump in. The following sections provide full details. Note that there are many examples of doctests in the standard Python test suite and libraries. Especially useful examples can be found in the standard test file -:file:`Lib/test/test_doctest.py`. +:file:`Lib/test/test_doctest/test_doctest.py`. .. _doctest-simple-testmod: @@ -280,7 +280,7 @@ searched. Objects imported into the module are not searched. In addition, there are cases when you want tests to be part of a module but not part of the help text, which requires that the tests not be included in the docstring. Doctest looks for a module-level variable called ``__test__`` and uses it to locate other -tests. If ``M.__test__`` exists and is truthy, it must be a dict, and each +tests. If ``M.__test__`` exists, it must be a dict, and each entry maps a (string) name to a function object, class object, or string. Function and class object docstrings found from ``M.__test__`` are searched, and strings are treated as if they were docstrings. In output, a key ``K`` in @@ -944,8 +944,8 @@ and :ref:`doctest-simple-testfile`. (or module :mod:`__main__` if *m* is not supplied or is ``None``), starting with ``m.__doc__``. - Also test examples reachable from dict ``m.__test__``, if it exists and is not - ``None``. ``m.__test__`` maps names (strings) to functions, classes and + Also test examples reachable from dict ``m.__test__``, if it exists. + ``m.__test__`` maps names (strings) to functions, classes and strings; function and class docstrings are searched for examples; strings are searched directly, as if they were docstrings. diff --git a/Doc/library/email.message.rst b/Doc/library/email.message.rst index f58d93da6ed687..adea067e082615 100644 --- a/Doc/library/email.message.rst +++ b/Doc/library/email.message.rst @@ -40,9 +40,9 @@ over the object tree. The :class:`EmailMessage` dictionary-like interface is indexed by the header names, which must be ASCII values. The values of the dictionary are strings with some extra methods. Headers are stored and returned in case-preserving -form, but field names are matched case-insensitively. Unlike a real dict, -there is an ordering to the keys, and there can be duplicate keys. Additional -methods are provided for working with headers that have duplicate keys. +form, but field names are matched case-insensitively. The keys are ordered, +but unlike a real dict, there can be duplicates. Addtional methods are +provided for working with headers that have duplicate keys. The *payload* is either a string or bytes object, in the case of simple message objects, or a list of :class:`EmailMessage` objects, for MIME container diff --git a/Doc/library/enum.rst b/Doc/library/enum.rst index 20222bfb3611ab..07b15e23b2c10a 100644 --- a/Doc/library/enum.rst +++ b/Doc/library/enum.rst @@ -838,7 +838,7 @@ Utilities and Decorators * ``FIRST = auto()`` will work (auto() is replaced with ``1``); * ``SECOND = auto(), -2`` will work (auto is replaced with ``2``, so ``2, -2`` is - used to create the ``SECOND`` enum member; + used to create the ``SECOND`` enum member; * ``THREE = [auto(), -3]`` will *not* work (``, -3`` is used to create the ``THREE`` enum member) diff --git a/Doc/library/fnmatch.rst b/Doc/library/fnmatch.rst index aed8991d44772f..7cddecd5e80887 100644 --- a/Doc/library/fnmatch.rst +++ b/Doc/library/fnmatch.rst @@ -50,10 +50,10 @@ Also note that :func:`functools.lru_cache` with the *maxsize* of 32768 is used t cache the compiled regex patterns in the following functions: :func:`fnmatch`, :func:`fnmatchcase`, :func:`.filter`. -.. function:: fnmatch(filename, pattern) +.. function:: fnmatch(name, pat) - Test whether the *filename* string matches the *pattern* string, returning - :const:`True` or :const:`False`. Both parameters are case-normalized + Test whether the filename string *name* matches the pattern string *pat*, + returning ``True`` or ``False``. Both parameters are case-normalized using :func:`os.path.normcase`. :func:`fnmatchcase` can be used to perform a case-sensitive comparison, regardless of whether that's standard for the operating system. @@ -69,22 +69,24 @@ cache the compiled regex patterns in the following functions: :func:`fnmatch`, print(file) -.. function:: fnmatchcase(filename, pattern) +.. function:: fnmatchcase(name, pat) - Test whether *filename* matches *pattern*, returning :const:`True` or - :const:`False`; the comparison is case-sensitive and does not apply - :func:`os.path.normcase`. + Test whether the filename string *name* matches the pattern string *pat*, + returning ``True`` or ``False``; + the comparison is case-sensitive and does not apply :func:`os.path.normcase`. -.. function:: filter(names, pattern) +.. function:: filter(names, pat) - Construct a list from those elements of the iterable *names* that match *pattern*. It is the same as - ``[n for n in names if fnmatch(n, pattern)]``, but implemented more efficiently. + Construct a list from those elements of the :term:`iterable` *names* + that match pattern *pat*. + It is the same as ``[n for n in names if fnmatch(n, pat)]``, + but implemented more efficiently. -.. function:: translate(pattern) +.. function:: translate(pat) - Return the shell-style *pattern* converted to a regular expression for + Return the shell-style pattern *pat* converted to a regular expression for using with :func:`re.match`. Example: diff --git a/Doc/library/functions.rst b/Doc/library/functions.rst index 4682ec9c924757..37234f7a5a2485 100644 --- a/Doc/library/functions.rst +++ b/Doc/library/functions.rst @@ -668,16 +668,15 @@ are always available. They are listed here in alphabetical order. sign: "+" | "-" infinity: "Infinity" | "inf" nan: "nan" - digitpart: `!digit` (["_"] `!digit`)* + digit: + digitpart: `digit` (["_"] `digit`)* number: [`digitpart`] "." `digitpart` | `digitpart` ["."] exponent: ("e" | "E") ["+" | "-"] `digitpart` floatnumber: number [`exponent`] floatvalue: [`sign`] (`floatnumber` | `infinity` | `nan`) - Here ``digit`` is a Unicode decimal digit (character in the Unicode general - category ``Nd``). Case is not significant, so, for example, "inf", "Inf", - "INFINITY", and "iNfINity" are all acceptable spellings for positive - infinity. + Case is not significant, so, for example, "inf", "Inf", "INFINITY", and + "iNfINity" are all acceptable spellings for positive infinity. Otherwise, if the argument is an integer or a floating point number, a floating point number with the same value (within Python's floating point diff --git a/Doc/library/gc.rst b/Doc/library/gc.rst index 82277aa52aee01..e36a71af2b64ab 100644 --- a/Doc/library/gc.rst +++ b/Doc/library/gc.rst @@ -96,7 +96,7 @@ The :mod:`gc` module provides the following functions: .. versionadded:: 3.4 -.. function:: set_threshold(threshold0[, threshold1[, threshold2]]) +.. function:: set_threshold(threshold0, [threshold1, [threshold2]]) Set the garbage collection thresholds (the collection frequency). Setting *threshold0* to zero disables collection. diff --git a/Doc/library/heapq.rst b/Doc/library/heapq.rst index 8b00f7b27959b6..ddbada13bddf5b 100644 --- a/Doc/library/heapq.rst +++ b/Doc/library/heapq.rst @@ -270,7 +270,7 @@ winner. The simplest algorithmic way to remove it and find the "next" winner is to move some loser (let's say cell 30 in the diagram above) into the 0 position, and then percolate this new 0 down the tree, exchanging values, until the invariant is re-established. This is clearly logarithmic on the total number of -items in the tree. By iterating over all items, you get an O(n log n) sort. +items in the tree. By iterating over all items, you get an *O*\ (*n* log *n*) sort. A nice feature of this sort is that you can efficiently insert new items while the sort is going on, provided that the inserted items are not "better" than the diff --git a/Doc/library/http.client.rst b/Doc/library/http.client.rst index c46314fc5e253b..95b6c1f364bcc5 100644 --- a/Doc/library/http.client.rst +++ b/Doc/library/http.client.rst @@ -92,7 +92,7 @@ The module provides the following classes: .. versionchanged:: 3.4.3 This class now performs all the necessary certificate and hostname checks by default. To revert to the previous, unverified, behavior - :func:`ssl._create_unverified_context` can be passed to the *context* + :func:`!ssl._create_unverified_context` can be passed to the *context* parameter. .. versionchanged:: 3.8 @@ -103,7 +103,7 @@ The module provides the following classes: .. versionchanged:: 3.10 This class now sends an ALPN extension with protocol indicator ``http/1.1`` when no *context* is given. Custom *context* should set - ALPN protocols with :meth:`~ssl.SSLContext.set_alpn_protocol`. + ALPN protocols with :meth:`~ssl.SSLContext.set_alpn_protocols`. .. versionchanged:: 3.12 The deprecated *key_file*, *cert_file* and *check_hostname* parameters @@ -124,7 +124,7 @@ This module provides the following function: .. function:: parse_headers(fp) Parse the headers from a file pointer *fp* representing a HTTP - request/response. The file has to be a :class:`BufferedIOBase` reader + request/response. The file has to be a :class:`~io.BufferedIOBase` reader (i.e. not text) and must provide a valid :rfc:`2822` style header. This function returns an instance of :class:`http.client.HTTPMessage` @@ -416,7 +416,7 @@ HTTPConnection Objects .. versionadded:: 3.7 -As an alternative to using the :meth:`request` method described above, you can +As an alternative to using the :meth:`~HTTPConnection.request` method described above, you can also send your request step by step, by using the four functions below. @@ -648,6 +648,8 @@ method attribute. Here is an example session that uses the ``PUT`` method:: HTTPMessage Objects ------------------- +.. class:: HTTPMessage(email.message.Message) + An :class:`http.client.HTTPMessage` instance holds the headers from an HTTP response. It is implemented using the :class:`email.message.Message` class. diff --git a/Doc/library/idle.rst b/Doc/library/idle.rst index e710d0bacf3fee..249dc0ea6ba735 100644 --- a/Doc/library/idle.rst +++ b/Doc/library/idle.rst @@ -18,8 +18,6 @@ IDLE is Python's Integrated Development and Learning Environment. IDLE has the following features: -* coded in 100% pure Python, using the :mod:`tkinter` GUI toolkit - * cross-platform: works mostly the same on Windows, Unix, and macOS * Python shell window (interactive interpreter) with colorizing @@ -422,41 +420,34 @@ and that other files do not. Run Python code with the Run menu. Key bindings ^^^^^^^^^^^^ -In this section, 'C' refers to the :kbd:`Control` key on Windows and Unix and -the :kbd:`Command` key on macOS. - -* :kbd:`Backspace` deletes to the left; :kbd:`Del` deletes to the right - -* :kbd:`C-Backspace` delete word left; :kbd:`C-Del` delete word to the right - -* Arrow keys and :kbd:`Page Up`/:kbd:`Page Down` to move around - -* :kbd:`C-LeftArrow` and :kbd:`C-RightArrow` moves by words +The IDLE insertion cursor is a thin vertical bar between character +positions. When characters are entered, the insertion cursor and +everything to its right moves right one character and +the new character is entered in the new space. -* :kbd:`Home`/:kbd:`End` go to begin/end of line +Several non-character keys move the cursor and possibly +delete characters. Deletion does not puts text on the clipboard, +but IDLE has an undo list. Wherever this doc discusses keys, +'C' refers to the :kbd:`Control` key on Windows and +Unix and the :kbd:`Command` key on macOS. (And all such dicussions +assume that the keys have not been re-bound to something else.) -* :kbd:`C-Home`/:kbd:`C-End` go to begin/end of file +* Arrow keys move the cursor one character or line. -* Some useful Emacs bindings are inherited from Tcl/Tk: +* :kbd:`C-LeftArrow` and :kbd:`C-RightArrow` moves left or right one word. - * :kbd:`C-a` beginning of line +* :kbd:`Home` and :kbd:`End` go to the beginning or end of the line. - * :kbd:`C-e` end of line +* :kbd:`Page Up` and :kbd:`Page Down` go up or down one screen. - * :kbd:`C-k` kill line (but doesn't put it in clipboard) +* :kbd:`C-Home` and :kbd:`C-End` go to beginning or end of the file. - * :kbd:`C-l` center window around the insertion point +* :kbd:`Backspace` and :kbd:`Del` (or :kbd:`C-d`) delete the previous + or next character. - * :kbd:`C-b` go backward one character without deleting (usually you can - also use the cursor key for this) +* :kbd:`C-Backspace` and :kbd:`C-Del` delete one word left or right. - * :kbd:`C-f` go forward one character without deleting (usually you can - also use the cursor key for this) - - * :kbd:`C-p` go up one line (usually you can also use the cursor key for - this) - - * :kbd:`C-d` delete next character +* :kbd:`C-k` deletes ('kills') everything to the right. Standard keybindings (like :kbd:`C-c` to copy and :kbd:`C-v` to paste) may work. Keybindings are selected in the Configure IDLE dialog. @@ -611,23 +602,18 @@ when one requests a restart on the Shell menu, or when one runs code in an editor window. The editing features described in previous subsections work when entering -code interactively. IDLE's Shell window also responds to the following keys. - -* :kbd:`C-c` interrupts executing command - -* :kbd:`C-d` sends end-of-file; closes window if typed at a ``>>>`` prompt - -* :kbd:`Alt-/` (Expand word) is also useful to reduce typing +code interactively. IDLE's Shell window also responds to the following: - Command history +* :kbd:`C-c` attemps to interrupt statement execution (but may fail). - * :kbd:`Alt-p` retrieves previous command matching what you have typed. On - macOS use :kbd:`C-p`. +* :kbd:`C-d` closes Shell if typed at a ``>>>`` prompt. - * :kbd:`Alt-n` retrieves next. On macOS use :kbd:`C-n`. +* :kbd:`Alt-p` and :kbd:`Alt-n` (:kbd:`C-p` and :kbd:`C-n` on macOS) + retrieve to the current prompt the previous or next previously + entered statement that matches anything already typed. - * :kbd:`Return` while the cursor is on any previous command - retrieves that command +* :kbd:`Return` while the cursor is on any previous statement + appends the latter to anything already typed at the prompt. Text colors ^^^^^^^^^^^ diff --git a/Doc/library/itertools.rst b/Doc/library/itertools.rst index 5c8cc982a89a2c..338a5f9615aae3 100644 --- a/Doc/library/itertools.rst +++ b/Doc/library/itertools.rst @@ -1150,7 +1150,7 @@ The following recipes have a more mathematical flavor: # https://mathworld.wolfram.com/TotientFunction.html # totient(12) --> 4 because len([1, 5, 7, 11]) == 4 for p in unique_justseen(factor(n)): - n = n // p * (p - 1) + n -= n // p return n diff --git a/Doc/library/kde_example.png b/Doc/library/kde_example.png index f4504895699974..4c26f26292faa5 100644 Binary files a/Doc/library/kde_example.png and b/Doc/library/kde_example.png differ diff --git a/Doc/library/marshal.rst b/Doc/library/marshal.rst index 0556f19699dc15..c6a006b7b4028a 100644 --- a/Doc/library/marshal.rst +++ b/Doc/library/marshal.rst @@ -23,7 +23,11 @@ transfer of Python objects through RPC calls, see the modules :mod:`pickle` and :mod:`shelve`. The :mod:`marshal` module exists mainly to support reading and writing the "pseudo-compiled" code for Python modules of :file:`.pyc` files. Therefore, the Python maintainers reserve the right to modify the marshal format -in backward incompatible ways should the need arise. If you're serializing and +in backward incompatible ways should the need arise. +The format of code objects is not compatible between Python versions, +even if the version of the format is the same. +De-serializing a code object in the incorrect Python version has undefined behavior. +If you're serializing and de-serializing Python objects, use the :mod:`pickle` module instead -- the performance is comparable, version independence is guaranteed, and pickle supports a substantially wider range of objects than marshal. @@ -40,7 +44,8 @@ Not all Python object types are supported; in general, only objects whose value is independent from a particular invocation of Python can be written and read by this module. The following types are supported: booleans, integers, floating point numbers, complex numbers, strings, bytes, bytearrays, tuples, lists, sets, -frozensets, dictionaries, and code objects, where it should be understood that +frozensets, dictionaries, and code objects (if *allow_code* is true), +where it should be understood that tuples, lists, sets, frozensets and dictionaries are only supported as long as the values contained therein are themselves supported. The singletons :const:`None`, :const:`Ellipsis` and :exc:`StopIteration` can also be @@ -54,7 +59,7 @@ bytes-like objects. The module defines these functions: -.. function:: dump(value, file[, version]) +.. function:: dump(value, file, version=version, /, *, allow_code=True) Write the value on the open file. The value must be a supported type. The file must be a writeable :term:`binary file`. @@ -62,19 +67,24 @@ The module defines these functions: If the value has (or contains an object that has) an unsupported type, a :exc:`ValueError` exception is raised --- but garbage data will also be written to the file. The object will not be properly read back by :func:`load`. + :ref:`Code objects ` are only supported if *allow_code* is true. The *version* argument indicates the data format that ``dump`` should use (see below). .. audit-event:: marshal.dumps value,version marshal.dump + .. versionchanged:: 3.13 + Added the *allow_code* parameter. -.. function:: load(file) + +.. function:: load(file, /, *, allow_code=True) Read one value from the open file and return it. If no valid value is read (e.g. because the data has a different Python version's incompatible marshal - format), raise :exc:`EOFError`, :exc:`ValueError` or :exc:`TypeError`. The - file must be a readable :term:`binary file`. + format), raise :exc:`EOFError`, :exc:`ValueError` or :exc:`TypeError`. + :ref:`Code objects ` are only supported if *allow_code* is true. + The file must be a readable :term:`binary file`. .. audit-event:: marshal.load "" marshal.load @@ -88,24 +98,32 @@ The module defines these functions: This call used to raise a ``code.__new__`` audit event for each code object. Now it raises a single ``marshal.load`` event for the entire load operation. + .. versionchanged:: 3.13 + Added the *allow_code* parameter. + -.. function:: dumps(value[, version]) +.. function:: dumps(value, version=version, /, *, allow_code=True) Return the bytes object that would be written to a file by ``dump(value, file)``. The value must be a supported type. Raise a :exc:`ValueError` exception if value has (or contains an object that has) an unsupported type. + :ref:`Code objects ` are only supported if *allow_code* is true. The *version* argument indicates the data format that ``dumps`` should use (see below). .. audit-event:: marshal.dumps value,version marshal.dump + .. versionchanged:: 3.13 + Added the *allow_code* parameter. -.. function:: loads(bytes) + +.. function:: loads(bytes, /, *, allow_code=True) Convert the :term:`bytes-like object` to a value. If no valid value is found, raise - :exc:`EOFError`, :exc:`ValueError` or :exc:`TypeError`. Extra bytes in the - input are ignored. + :exc:`EOFError`, :exc:`ValueError` or :exc:`TypeError`. + :ref:`Code objects ` are only supported if *allow_code* is true. + Extra bytes in the input are ignored. .. audit-event:: marshal.loads bytes marshal.load @@ -114,6 +132,9 @@ The module defines these functions: This call used to raise a ``code.__new__`` audit event for each code object. Now it raises a single ``marshal.loads`` event for the entire load operation. + .. versionchanged:: 3.13 + Added the *allow_code* parameter. + In addition, the following constants are defined: diff --git a/Doc/library/mmap.rst b/Doc/library/mmap.rst index 3fa69e717329e4..758721433f77de 100644 --- a/Doc/library/mmap.rst +++ b/Doc/library/mmap.rst @@ -48,7 +48,7 @@ update the underlying file. To map anonymous memory, -1 should be passed as the fileno along with the length. -.. class:: mmap(fileno, length, tagname=None, access=ACCESS_DEFAULT[, offset]) +.. class:: mmap(fileno, length, tagname=None, access=ACCESS_DEFAULT, offset=0) **(Windows version)** Maps *length* bytes from the file specified by the file handle *fileno*, and creates a mmap object. If *length* is larger @@ -62,8 +62,8 @@ To map anonymous memory, -1 should be passed as the fileno along with the length the same file. If you specify the name of an existing tag, that tag is opened, otherwise a new tag of this name is created. If this parameter is omitted or ``None``, the mapping is created without a name. Avoiding the - use of the tag parameter will assist in keeping your code portable between - Unix and Windows. + use of the *tagname* parameter will assist in keeping your code portable + between Unix and Windows. *offset* may be specified as a non-negative integer offset. mmap references will be relative to the offset from the beginning of the file. *offset* @@ -71,7 +71,8 @@ To map anonymous memory, -1 should be passed as the fileno along with the length .. audit-event:: mmap.__new__ fileno,length,access,offset mmap.mmap -.. class:: mmap(fileno, length, flags=MAP_SHARED, prot=PROT_WRITE|PROT_READ, access=ACCESS_DEFAULT[, offset]) +.. class:: mmap(fileno, length, flags=MAP_SHARED, prot=PROT_WRITE|PROT_READ, \ + access=ACCESS_DEFAULT, offset=0, *, trackfd=True) :noindex: **(Unix version)** Maps *length* bytes from the file specified by the file @@ -102,10 +103,20 @@ To map anonymous memory, -1 should be passed as the fileno along with the length defaults to 0. *offset* must be a multiple of :const:`ALLOCATIONGRANULARITY` which is equal to :const:`PAGESIZE` on Unix systems. + If *trackfd* is ``False``, the file descriptor specified by *fileno* will + not be duplicated, and the resulting :class:`!mmap` object will not + be associated with the map's underlying file. + This means that the :meth:`~mmap.mmap.size` and :meth:`~mmap.mmap.resize` + methods will fail. + This mode is useful to limit the number of open file descriptors. + To ensure validity of the created memory mapping the file specified by the descriptor *fileno* is internally automatically synchronized with the physical backing store on macOS. + .. versionchanged:: 3.13 + The *trackfd* parameter was added. + This example shows a simple way of using :class:`~mmap.mmap`:: import mmap @@ -254,9 +265,12 @@ To map anonymous memory, -1 should be passed as the fileno along with the length .. method:: resize(newsize) - Resizes the map and the underlying file, if any. If the mmap was created - with :const:`ACCESS_READ` or :const:`ACCESS_COPY`, resizing the map will - raise a :exc:`TypeError` exception. + Resizes the map and the underlying file, if any. + + Resizing a map created with *access* of :const:`ACCESS_READ` or + :const:`ACCESS_COPY`, will raise a :exc:`TypeError` exception. + Resizing a map created with with *trackfd* set to ``False``, + will raise a :exc:`ValueError` exception. **On Windows**: Resizing the map will raise an :exc:`OSError` if there are other maps against the same named file. Resizing an anonymous map (ie against the @@ -372,14 +386,25 @@ MAP_* Constants .. data:: MAP_SHARED MAP_PRIVATE - MAP_DENYWRITE - MAP_EXECUTABLE + MAP_32BIT + MAP_ALIGNED_SUPER MAP_ANON MAP_ANONYMOUS + MAP_CONCEAL + MAP_DENYWRITE + MAP_EXECUTABLE + MAP_HASSEMAPHORE + MAP_JIT + MAP_NOCACHE + MAP_NOEXTEND + MAP_NORESERVE MAP_POPULATE + MAP_RESILIENT_CODESIGN + MAP_RESILIENT_MEDIA MAP_STACK - MAP_ALIGNED_SUPER - MAP_CONCEAL + MAP_TPRO + MAP_TRANSLATED_ALLOW_EXECUTE + MAP_UNIX03 These are the various flags that can be passed to :meth:`mmap.mmap`. :data:`MAP_ALIGNED_SUPER` is only available at FreeBSD and :data:`MAP_CONCEAL` is only available at OpenBSD. Note @@ -392,5 +417,12 @@ MAP_* Constants Added :data:`MAP_STACK` constant. .. versionadded:: 3.12 - Added :data:`MAP_ALIGNED_SUPER` constant. - Added :data:`MAP_CONCEAL` constant. + Added :data:`MAP_ALIGNED_SUPER` and :data:`MAP_CONCEAL` constants. + + .. versionadded:: 3.13 + Added :data:`MAP_32BIT`, :data:`MAP_HASSEMAPHORE`, :data:`MAP_JIT`, + :data:`MAP_NOCACHE`, :data:`MAP_NOEXTEND`, :data:`MAP_NORESERVE`, + :data:`MAP_RESILIENT_CODESIGN`, :data:`MAP_RESILIENT_MEDIA`, + :data:`MAP_TPRO`, :data:`MAP_TRANSLATED_ALLOW_EXECUTE`, and + :data:`MAP_UNIX03` constants. + diff --git a/Doc/library/msvcrt.rst b/Doc/library/msvcrt.rst index 0b059e746c61af..2a6d980ab78a60 100644 --- a/Doc/library/msvcrt.rst +++ b/Doc/library/msvcrt.rst @@ -75,10 +75,14 @@ File Operations .. function:: open_osfhandle(handle, flags) Create a C runtime file descriptor from the file handle *handle*. The *flags* - parameter should be a bitwise OR of :const:`os.O_APPEND`, :const:`os.O_RDONLY`, - and :const:`os.O_TEXT`. The returned file descriptor may be used as a parameter + parameter should be a bitwise OR of :const:`os.O_APPEND`, + :const:`os.O_RDONLY`, :const:`os.O_TEXT` and :const:`os.O_NOINHERIT`. + The returned file descriptor may be used as a parameter to :func:`os.fdopen` to create a file object. + The file descriptor is inheritable by default. Pass :const:`os.O_NOINHERIT` + flag to make it non inheritable. + .. audit-event:: msvcrt.open_osfhandle handle,flags msvcrt.open_osfhandle diff --git a/Doc/library/multiprocessing.shared_memory.rst b/Doc/library/multiprocessing.shared_memory.rst index 671130d9b29fc0..2e3f5be4dd2335 100644 --- a/Doc/library/multiprocessing.shared_memory.rst +++ b/Doc/library/multiprocessing.shared_memory.rst @@ -20,8 +20,8 @@ and management of shared memory to be accessed by one or more processes on a multicore or symmetric multiprocessor (SMP) machine. To assist with the life-cycle management of shared memory especially across distinct processes, a :class:`~multiprocessing.managers.BaseManager` subclass, -:class:`SharedMemoryManager`, is also provided in the -``multiprocessing.managers`` module. +:class:`~multiprocessing.managers.SharedMemoryManager`, is also provided in the +:mod:`multiprocessing.managers` module. In this module, shared memory refers to "System V style" shared memory blocks (though is not necessarily implemented explicitly as such) and does not refer @@ -38,7 +38,8 @@ copying of data. .. class:: SharedMemory(name=None, create=False, size=0, *, track=True) - Creates a new shared memory block or attaches to an existing shared + Create an instance of the :class:`!SharedMemory` class for either + creating a new shared memory block or attaching to an existing shared memory block. Each shared memory block is assigned a unique name. In this way, one process can create a shared memory block with a particular name and a different process can attach to that same shared @@ -47,58 +48,65 @@ copying of data. As a resource for sharing data across processes, shared memory blocks may outlive the original process that created them. When one process no longer needs access to a shared memory block that might still be - needed by other processes, the :meth:`close()` method should be called. + needed by other processes, the :meth:`close` method should be called. When a shared memory block is no longer needed by any process, the - :meth:`unlink()` method should be called to ensure proper cleanup. - - *name* is the unique name for the requested shared memory, specified as - a string. When creating a new shared memory block, if ``None`` (the - default) is supplied for the name, a novel name will be generated. - - *create* controls whether a new shared memory block is created (``True``) - or an existing shared memory block is attached (``False``). - - *size* specifies the requested number of bytes when creating a new shared - memory block. Because some platforms choose to allocate chunks of memory - based upon that platform's memory page size, the exact size of the shared - memory block may be larger or equal to the size requested. When attaching - to an existing shared memory block, the ``size`` parameter is ignored. - - *track*, when enabled, registers the shared memory block with a resource - tracker process on platforms where the OS does not do this automatically. - The resource tracker ensures proper cleanup of the shared memory even - if all other processes with access to the memory exit without doing so. - Python processes created from a common ancestor using :mod:`multiprocessing` - facilities share a single resource tracker process, and the lifetime of - shared memory segments is handled automatically among these processes. - Python processes created in any other way will receive their own - resource tracker when accessing shared memory with *track* enabled. - This will cause the shared memory to be deleted by the resource tracker - of the first process that terminates. - To avoid this issue, users of :mod:`subprocess` or standalone Python - processes should set *track* to ``False`` when there is already another - process in place that does the bookkeeping. - *track* is ignored on Windows, which has its own tracking and - automatically deletes shared memory when all handles to it have been closed. - - .. versionchanged:: 3.13 Added *track* parameter. + :meth:`unlink` method should be called to ensure proper cleanup. + + :param name: + The unique name for the requested shared memory, specified as a string. + When creating a new shared memory block, if ``None`` (the default) + is supplied for the name, a novel name will be generated. + :type name: str | None + + :param bool create: + Control whether a new shared memory block is created (``True``) + or an existing shared memory block is attached (``False``). + + :param int size: + The requested number of bytes when creating a new shared memory block. + Because some platforms choose to allocate chunks of memory + based upon that platform's memory page size, the exact size of the shared + memory block may be larger or equal to the size requested. + When attaching to an existing shared memory block, + the *size* parameter is ignored. + + :param bool track: + When ``True``, register the shared memory block with a resource + tracker process on platforms where the OS does not do this automatically. + The resource tracker ensures proper cleanup of the shared memory even + if all other processes with access to the memory exit without doing so. + Python processes created from a common ancestor using :mod:`multiprocessing` + facilities share a single resource tracker process, and the lifetime of + shared memory segments is handled automatically among these processes. + Python processes created in any other way will receive their own + resource tracker when accessing shared memory with *track* enabled. + This will cause the shared memory to be deleted by the resource tracker + of the first process that terminates. + To avoid this issue, users of :mod:`subprocess` or standalone Python + processes should set *track* to ``False`` when there is already another + process in place that does the bookkeeping. + *track* is ignored on Windows, which has its own tracking and + automatically deletes shared memory when all handles to it have been closed. + + .. versionadded:: 3.13 + The *track* parameter. .. method:: close() - Closes the file descriptor/handle to the shared memory from this - instance. :meth:`close()` should be called once access to the shared + Close the file descriptor/handle to the shared memory from this + instance. :meth:`close` should be called once access to the shared memory block from this instance is no longer needed. Depending on operating system, the underlying memory may or may not be freed even if all handles to it have been closed. To ensure proper cleanup, - use the :meth:`unlink()` method. + use the :meth:`unlink` method. .. method:: unlink() - Deletes the underlying shared memory block. This should be called only + Delete the underlying shared memory block. This should be called only once per shared memory block regardless of the number of handles to it, even in other processes. - :meth:`unlink()` and :meth:`close()` can be called in any order, but - trying to access data inside a shared memory block after :meth:`unlink()` + :meth:`unlink` and :meth:`close` can be called in any order, but + trying to access data inside a shared memory block after :meth:`unlink` may result in memory access errors, depending on platform. This method has no effect on Windows, where the only way to delete a @@ -145,7 +153,7 @@ instances:: The following example demonstrates a practical use of the :class:`SharedMemory` class with `NumPy arrays `_, accessing the -same ``numpy.ndarray`` from two distinct Python shells: +same :class:`!numpy.ndarray` from two distinct Python shells: .. doctest:: :options: +SKIP @@ -197,43 +205,43 @@ same ``numpy.ndarray`` from two distinct Python shells: .. class:: SharedMemoryManager([address[, authkey]]) :module: multiprocessing.managers - A subclass of :class:`~multiprocessing.managers.BaseManager` which can be + A subclass of :class:`multiprocessing.managers.BaseManager` which can be used for the management of shared memory blocks across processes. A call to :meth:`~multiprocessing.managers.BaseManager.start` on a - :class:`SharedMemoryManager` instance causes a new process to be started. + :class:`!SharedMemoryManager` instance causes a new process to be started. This new process's sole purpose is to manage the life cycle of all shared memory blocks created through it. To trigger the release of all shared memory blocks managed by that process, call - :meth:`~multiprocessing.managers.BaseManager.shutdown()` on the instance. - This triggers a :meth:`SharedMemory.unlink()` call on all of the - :class:`SharedMemory` objects managed by that process and then - stops the process itself. By creating ``SharedMemory`` instances - through a ``SharedMemoryManager``, we avoid the need to manually track + :meth:`~multiprocessing.managers.BaseManager.shutdown` on the instance. + This triggers a :meth:`~multiprocessing.shared_memory.SharedMemory.unlink` call + on all of the :class:`SharedMemory` objects managed by that process and then + stops the process itself. By creating :class:`!SharedMemory` instances + through a :class:`!SharedMemoryManager`, we avoid the need to manually track and trigger the freeing of shared memory resources. This class provides methods for creating and returning :class:`SharedMemory` instances and for creating a list-like object (:class:`ShareableList`) backed by shared memory. - Refer to :class:`multiprocessing.managers.BaseManager` for a description + Refer to :class:`~multiprocessing.managers.BaseManager` for a description of the inherited *address* and *authkey* optional input arguments and how - they may be used to connect to an existing ``SharedMemoryManager`` service + they may be used to connect to an existing :class:`!SharedMemoryManager` service from other processes. .. method:: SharedMemory(size) Create and return a new :class:`SharedMemory` object with the - specified ``size`` in bytes. + specified *size* in bytes. .. method:: ShareableList(sequence) Create and return a new :class:`ShareableList` object, initialized - by the values from the input ``sequence``. + by the values from the input *sequence*. The following example demonstrates the basic mechanisms of a -:class:`SharedMemoryManager`: +:class:`~multiprocessing.managers.SharedMemoryManager`: .. doctest:: :options: +SKIP @@ -251,9 +259,9 @@ The following example demonstrates the basic mechanisms of a >>> smm.shutdown() # Calls unlink() on sl, raw_shm, and another_sl The following example depicts a potentially more convenient pattern for using -:class:`SharedMemoryManager` objects via the :keyword:`with` statement to -ensure that all shared memory blocks are released after they are no longer -needed: +:class:`~multiprocessing.managers.SharedMemoryManager` objects via the +:keyword:`with` statement to ensure that all shared memory blocks are released +after they are no longer needed: .. doctest:: :options: +SKIP @@ -269,38 +277,46 @@ needed: ... p2.join() # Wait for all work to complete in both processes ... total_result = sum(sl) # Consolidate the partial results now in sl -When using a :class:`SharedMemoryManager` in a :keyword:`with` statement, the -shared memory blocks created using that manager are all released when the -:keyword:`with` statement's code block finishes execution. +When using a :class:`~multiprocessing.managers.SharedMemoryManager` +in a :keyword:`with` statement, the shared memory blocks created using that +manager are all released when the :keyword:`!with` statement's code block +finishes execution. -.. class:: ShareableList(sequence=None, \*, name=None) +.. class:: ShareableList(sequence=None, *, name=None) - Provides a mutable list-like object where all values stored within are - stored in a shared memory block. This constrains storable values to - only the ``int`` (signed 64-bit), ``float``, ``bool``, ``str`` (less - than 10M bytes each when encoded as utf-8), ``bytes`` (less than 10M - bytes each), and ``None`` built-in data types. It also notably - differs from the built-in ``list`` type in that these lists can not - change their overall length (i.e. no append, insert, etc.) and do not - support the dynamic creation of new :class:`ShareableList` instances + Provide a mutable list-like object where all values stored within are + stored in a shared memory block. + This constrains storable values to the following built-in data types: + + * :class:`int` (signed 64-bit) + * :class:`float` + * :class:`bool` + * :class:`str` (less than 10M bytes each when encoded as UTF-8) + * :class:`bytes` (less than 10M bytes each) + * ``None`` + + It also notably differs from the built-in :class:`list` type + in that these lists can not change their overall length + (i.e. no :meth:`!append`, :meth:`!insert`, etc.) and do not + support the dynamic creation of new :class:`!ShareableList` instances via slicing. - *sequence* is used in populating a new ``ShareableList`` full of values. + *sequence* is used in populating a new :class:`!ShareableList` full of values. Set to ``None`` to instead attach to an already existing - ``ShareableList`` by its unique shared memory name. + :class:`!ShareableList` by its unique shared memory name. *name* is the unique name for the requested shared memory, as described in the definition for :class:`SharedMemory`. When attaching to an - existing ``ShareableList``, specify its shared memory block's unique - name while leaving ``sequence`` set to ``None``. + existing :class:`!ShareableList`, specify its shared memory block's unique + name while leaving *sequence* set to ``None``. .. note:: A known issue exists for :class:`bytes` and :class:`str` values. If they end with ``\x00`` nul bytes or characters, those may be *silently stripped* when fetching them by index from the - :class:`ShareableList`. This ``.rstrip(b'\x00')`` behavior is + :class:`!ShareableList`. This ``.rstrip(b'\x00')`` behavior is considered a bug and may go away in the future. See :gh:`106939`. For applications where rstripping of trailing nulls is a problem, @@ -326,12 +342,12 @@ shared memory blocks created using that manager are all released when the .. method:: count(value) - Returns the number of occurrences of ``value``. + Return the number of occurrences of *value*. .. method:: index(value) - Returns first index position of ``value``. Raises :exc:`ValueError` if - ``value`` is not present. + Return first index position of *value*. + Raise :exc:`ValueError` if *value* is not present. .. attribute:: format @@ -391,8 +407,8 @@ behind it: >>> c.shm.close() >>> c.shm.unlink() -The following examples demonstrates that ``ShareableList`` -(and underlying ``SharedMemory``) objects +The following examples demonstrates that :class:`ShareableList` +(and underlying :class:`SharedMemory`) objects can be pickled and unpickled if needed. Note, that it will still be the same shared object. This happens, because the deserialized object has diff --git a/Doc/library/os.path.rst b/Doc/library/os.path.rst index 95933f56d50542..3cab7a260df008 100644 --- a/Doc/library/os.path.rst +++ b/Doc/library/os.path.rst @@ -239,12 +239,16 @@ the :mod:`glob` module.) .. function:: isabs(path) Return ``True`` if *path* is an absolute pathname. On Unix, that means it - begins with a slash, on Windows that it begins with a (back)slash after chopping - off a potential drive letter. + begins with a slash, on Windows that it begins with two (back)slashes, or a + drive letter, colon, and (back)slash together. .. versionchanged:: 3.6 Accepts a :term:`path-like object`. + .. versionchanged:: 3.13 + On Windows, returns ``False`` if the given path starts with exactly one + (back)slash. + .. function:: isfile(path) diff --git a/Doc/library/pathlib.rst b/Doc/library/pathlib.rst index 60791725c2323d..b924f470e0be04 100644 --- a/Doc/library/pathlib.rst +++ b/Doc/library/pathlib.rst @@ -485,19 +485,6 @@ Pure paths provide the following methods and properties: 'c:/windows' -.. method:: PurePath.as_uri() - - Represent the path as a ``file`` URI. :exc:`ValueError` is raised if - the path isn't absolute. - - >>> p = PurePosixPath('/etc/passwd') - >>> p.as_uri() - 'file:///etc/passwd' - >>> p = PureWindowsPath('c:/Windows') - >>> p.as_uri() - 'file:///c:/Windows' - - .. method:: PurePath.is_absolute() Return whether the path is absolute or not. A path is considered absolute @@ -813,6 +800,67 @@ bugs or failures in your application):: UnsupportedOperation: cannot instantiate 'WindowsPath' on your system +File URIs +^^^^^^^^^ + +Concrete path objects can be created from, and represented as, 'file' URIs +conforming to :rfc:`8089`. + +.. note:: + + File URIs are not portable across machines with different + :ref:`filesystem encodings `. + +.. classmethod:: Path.from_uri(uri) + + Return a new path object from parsing a 'file' URI. For example:: + + >>> p = Path.from_uri('file:///etc/hosts') + PosixPath('/etc/hosts') + + On Windows, DOS device and UNC paths may be parsed from URIs:: + + >>> p = Path.from_uri('file:///c:/windows') + WindowsPath('c:/windows') + >>> p = Path.from_uri('file://server/share') + WindowsPath('//server/share') + + Several variant forms are supported:: + + >>> p = Path.from_uri('file:////server/share') + WindowsPath('//server/share') + >>> p = Path.from_uri('file://///server/share') + WindowsPath('//server/share') + >>> p = Path.from_uri('file:c:/windows') + WindowsPath('c:/windows') + >>> p = Path.from_uri('file:/c|/windows') + WindowsPath('c:/windows') + + :exc:`ValueError` is raised if the URI does not start with ``file:``, or + the parsed path isn't absolute. + + .. versionadded:: 3.13 + + +.. method:: Path.as_uri() + + Represent the path as a 'file' URI. :exc:`ValueError` is raised if + the path isn't absolute. + + .. code-block:: pycon + + >>> p = PosixPath('/etc/passwd') + >>> p.as_uri() + 'file:///etc/passwd' + >>> p = WindowsPath('c:/Windows') + >>> p.as_uri() + 'file:///c:/Windows' + + For historical reasons, this method is also available from + :class:`PurePath` objects. However, its use of :func:`os.fsencode` makes + it strictly impure. + + Methods ^^^^^^^ @@ -853,42 +901,6 @@ call fails (for example because the path doesn't exist). .. versionadded:: 3.5 -.. classmethod:: Path.from_uri(uri) - - Return a new path object from parsing a 'file' URI conforming to - :rfc:`8089`. For example:: - - >>> p = Path.from_uri('file:///etc/hosts') - PosixPath('/etc/hosts') - - On Windows, DOS device and UNC paths may be parsed from URIs:: - - >>> p = Path.from_uri('file:///c:/windows') - WindowsPath('c:/windows') - >>> p = Path.from_uri('file://server/share') - WindowsPath('//server/share') - - Several variant forms are supported:: - - >>> p = Path.from_uri('file:////server/share') - WindowsPath('//server/share') - >>> p = Path.from_uri('file://///server/share') - WindowsPath('//server/share') - >>> p = Path.from_uri('file:c:/windows') - WindowsPath('c:/windows') - >>> p = Path.from_uri('file:/c|/windows') - WindowsPath('c:/windows') - - :exc:`ValueError` is raised if the URI does not start with ``file:``, or - the parsed path isn't absolute. - - :func:`os.fsdecode` is used to decode percent-escaped byte sequences, and - so file URIs are not portable across machines with different - :ref:`filesystem encodings `. - - .. versionadded:: 3.13 - - .. method:: Path.stat(*, follow_symlinks=True) Return a :class:`os.stat_result` object containing information about this path, like :func:`os.stat`. @@ -993,6 +1005,10 @@ call fails (for example because the path doesn't exist). Set *follow_symlinks* to ``True`` or ``False`` to improve performance of recursive globbing. + This method calls :meth:`Path.is_dir` on the top-level directory and + propagates any :exc:`OSError` exception that is raised. Subsequent + :exc:`OSError` exceptions from scanning directories are suppressed. + By default, or when the *case_sensitive* keyword-only argument is set to ``None``, this method matches paths using platform-specific casing rules: typically, case-sensitive on POSIX, and case-insensitive on Windows. @@ -1020,6 +1036,9 @@ call fails (for example because the path doesn't exist). future Python release, patterns with this ending will match both files and directories. Add a trailing slash to match only directories. + .. versionchanged:: 3.13 + The *pattern* parameter accepts a :term:`path-like object`. + .. method:: Path.group(*, follow_symlinks=True) Return the name of the group owning the file. :exc:`KeyError` is raised @@ -1482,6 +1501,9 @@ call fails (for example because the path doesn't exist). .. versionchanged:: 3.13 The *follow_symlinks* parameter was added. + .. versionchanged:: 3.13 + The *pattern* parameter accepts a :term:`path-like object`. + .. method:: Path.rmdir() Remove this directory. The directory must be empty. diff --git a/Doc/library/pickle.rst b/Doc/library/pickle.rst index 93387fb0b45038..cfb251fca5c7cd 100644 --- a/Doc/library/pickle.rst +++ b/Doc/library/pickle.rst @@ -345,6 +345,10 @@ The :mod:`pickle` module exports three classes, :class:`Pickler`, See :ref:`pickle-persistent` for details and examples of uses. + .. versionchanged:: 3.13 + Add the default implementation of this method in the C implementation + of :class:`!Pickler`. + .. attribute:: dispatch_table A pickler object's dispatch table is a registry of *reduction @@ -446,6 +450,10 @@ The :mod:`pickle` module exports three classes, :class:`Pickler`, See :ref:`pickle-persistent` for details and examples of uses. + .. versionchanged:: 3.13 + Add the default implementation of this method in the C implementation + of :class:`!Unpickler`. + .. method:: find_class(module, name) Import *module* if necessary and return the object called *name* from it, diff --git a/Doc/library/plistlib.rst b/Doc/library/plistlib.rst index 10f1a48fc70a72..7416ca2650bab4 100644 --- a/Doc/library/plistlib.rst +++ b/Doc/library/plistlib.rst @@ -27,7 +27,7 @@ top level object is a dictionary. To write out and to parse a plist file, use the :func:`dump` and :func:`load` functions. -To work with plist data in bytes objects, use :func:`dumps` +To work with plist data in bytes or string objects, use :func:`dumps` and :func:`loads`. Values can be strings, integers, floats, booleans, tuples, lists, dictionaries @@ -89,11 +89,13 @@ This module defines the following functions: .. function:: loads(data, *, fmt=None, dict_type=dict, aware_datetime=False) - Load a plist from a bytes object. See :func:`load` for an explanation of - the keyword arguments. + Load a plist from a bytes or string object. See :func:`load` for an + explanation of the keyword arguments. .. versionadded:: 3.4 + .. versionchanged:: 3.13 + *data* can be a string when *fmt* equals :data:`FMT_XML`. .. function:: dump(value, fp, *, fmt=FMT_XML, sort_keys=True, skipkeys=False, aware_datetime=False) diff --git a/Doc/library/pyclbr.rst b/Doc/library/pyclbr.rst index 1c40ba4838ca75..1e9876849b02f3 100644 --- a/Doc/library/pyclbr.rst +++ b/Doc/library/pyclbr.rst @@ -58,106 +58,115 @@ of these classes. Function Objects ---------------- -Class :class:`Function` instances describe functions defined by def -statements. They have the following attributes: +.. class:: Function -.. attribute:: Function.file + Class :class:`!Function` instances describe functions defined by def + statements. They have the following attributes: - Name of the file in which the function is defined. + .. attribute:: file -.. attribute:: Function.module + Name of the file in which the function is defined. - The name of the module defining the function described. + .. attribute:: module -.. attribute:: Function.name + The name of the module defining the function described. - The name of the function. + .. attribute:: name -.. attribute:: Function.lineno + The name of the function. - The line number in the file where the definition starts. + .. attribute:: lineno -.. attribute:: Function.parent + The line number in the file where the definition starts. - For top-level functions, None. For nested functions, the parent. - .. versionadded:: 3.7 + .. attribute:: parent + For top-level functions, ``None``. For nested functions, the parent. -.. attribute:: Function.children + .. versionadded:: 3.7 - A dictionary mapping names to descriptors for nested functions and - classes. - .. versionadded:: 3.7 + .. attribute:: children + A :class:`dictionary ` mapping names to descriptors for nested functions and + classes. -.. attribute:: Function.is_async + .. versionadded:: 3.7 - ``True`` for functions that are defined with the ``async`` prefix, ``False`` otherwise. - .. versionadded:: 3.10 + .. attribute:: is_async + + ``True`` for functions that are defined with the + :keyword:`async ` prefix, ``False`` otherwise. + + .. versionadded:: 3.10 .. _pyclbr-class-objects: Class Objects ------------- -Class :class:`Class` instances describe classes defined by class -statements. They have the same attributes as Functions and two more. + +.. class:: Class + + Class :class:`!Class` instances describe classes defined by class + statements. They have the same attributes as :class:`Functions ` + and two more. -.. attribute:: Class.file + .. attribute:: file - Name of the file in which the class is defined. + Name of the file in which the class is defined. -.. attribute:: Class.module + .. attribute:: module - The name of the module defining the class described. + The name of the module defining the class described. -.. attribute:: Class.name + .. attribute:: name - The name of the class. + The name of the class. -.. attribute:: Class.lineno + .. attribute:: lineno - The line number in the file where the definition starts. + The line number in the file where the definition starts. -.. attribute:: Class.parent + .. attribute:: parent - For top-level classes, None. For nested classes, the parent. + For top-level classes, None. For nested classes, the parent. - .. versionadded:: 3.7 + .. versionadded:: 3.7 -.. attribute:: Class.children + .. attribute:: children - A dictionary mapping names to descriptors for nested functions and - classes. + A dictionary mapping names to descriptors for nested functions and + classes. - .. versionadded:: 3.7 + .. versionadded:: 3.7 -.. attribute:: Class.super + .. attribute:: super - A list of :class:`Class` objects which describe the immediate base - classes of the class being described. Classes which are named as - superclasses but which are not discoverable by :func:`readmodule_ex` - are listed as a string with the class name instead of as - :class:`Class` objects. + A list of :class:`!Class` objects which describe the immediate base + classes of the class being described. Classes which are named as + superclasses but which are not discoverable by :func:`readmodule_ex` + are listed as a string with the class name instead of as + :class:`!Class` objects. -.. attribute:: Class.methods + .. attribute:: methods - A dictionary mapping method names to line numbers. This can be - derived from the newer children dictionary, but remains for - back-compatibility. + A :class:`dictionary ` mapping method names to line numbers. + This can be derived from the newer :attr:`children` dictionary, + but remains for + back-compatibility. diff --git a/Doc/library/re.rst b/Doc/library/re.rst index 302f7224de4a7a..5bb93390aa5f79 100644 --- a/Doc/library/re.rst +++ b/Doc/library/re.rst @@ -17,7 +17,7 @@ those found in Perl. Both patterns and strings to be searched can be Unicode strings (:class:`str`) as well as 8-bit strings (:class:`bytes`). However, Unicode strings and 8-bit strings cannot be mixed: -that is, you cannot match a Unicode string with a byte pattern or +that is, you cannot match a Unicode string with a bytes pattern or vice-versa; similarly, when asking for a substitution, the replacement string must be of the same type as both the pattern and the search string. @@ -257,8 +257,7 @@ The special characters are: .. index:: single: \ (backslash); in regular expressions * Character classes such as ``\w`` or ``\S`` (defined below) are also accepted - inside a set, although the characters they match depends on whether - :const:`ASCII` or :const:`LOCALE` mode is in force. + inside a set, although the characters they match depend on the flags_ used. .. index:: single: ^ (caret); in regular expressions @@ -326,18 +325,24 @@ The special characters are: currently supported extensions. ``(?aiLmsux)`` - (One or more letters from the set ``'a'``, ``'i'``, ``'L'``, ``'m'``, - ``'s'``, ``'u'``, ``'x'``.) The group matches the empty string; the - letters set the corresponding flags: :const:`re.A` (ASCII-only matching), - :const:`re.I` (ignore case), :const:`re.L` (locale dependent), - :const:`re.M` (multi-line), :const:`re.S` (dot matches all), - :const:`re.U` (Unicode matching), and :const:`re.X` (verbose), - for the entire regular expression. + (One or more letters from the set + ``'a'``, ``'i'``, ``'L'``, ``'m'``, ``'s'``, ``'u'``, ``'x'``.) + The group matches the empty string; + the letters set the corresponding flags for the entire regular expression: + + * :const:`re.A` (ASCII-only matching) + * :const:`re.I` (ignore case) + * :const:`re.L` (locale dependent) + * :const:`re.M` (multi-line) + * :const:`re.S` (dot matches all) + * :const:`re.U` (Unicode matching) + * :const:`re.X` (verbose) + (The flags are described in :ref:`contents-of-module-re`.) This is useful if you wish to include the flags as part of the regular expression, instead of passing a *flag* argument to the - :func:`re.compile` function. Flags should be used first in the - expression string. + :func:`re.compile` function. + Flags should be used first in the expression string. .. versionchanged:: 3.11 This construction can only be used at the start of the expression. @@ -351,14 +356,20 @@ The special characters are: pattern. ``(?aiLmsux-imsx:...)`` - (Zero or more letters from the set ``'a'``, ``'i'``, ``'L'``, ``'m'``, - ``'s'``, ``'u'``, ``'x'``, optionally followed by ``'-'`` followed by + (Zero or more letters from the set + ``'a'``, ``'i'``, ``'L'``, ``'m'``, ``'s'``, ``'u'``, ``'x'``, + optionally followed by ``'-'`` followed by one or more letters from the ``'i'``, ``'m'``, ``'s'``, ``'x'``.) - The letters set or remove the corresponding flags: - :const:`re.A` (ASCII-only matching), :const:`re.I` (ignore case), - :const:`re.L` (locale dependent), :const:`re.M` (multi-line), - :const:`re.S` (dot matches all), :const:`re.U` (Unicode matching), - and :const:`re.X` (verbose), for the part of the expression. + The letters set or remove the corresponding flags for the part of the expression: + + * :const:`re.A` (ASCII-only matching) + * :const:`re.I` (ignore case) + * :const:`re.L` (locale dependent) + * :const:`re.M` (multi-line) + * :const:`re.S` (dot matches all) + * :const:`re.U` (Unicode matching) + * :const:`re.X` (verbose) + (The flags are described in :ref:`contents-of-module-re`.) The letters ``'a'``, ``'L'`` and ``'u'`` are mutually exclusive when used @@ -366,7 +377,7 @@ The special characters are: when one of them appears in an inline group, it overrides the matching mode in the enclosing group. In Unicode patterns ``(?a:...)`` switches to ASCII-only matching, and ``(?u:...)`` switches to Unicode matching - (default). In byte pattern ``(?L:...)`` switches to locale depending + (default). In bytes patterns ``(?L:...)`` switches to locale dependent matching, and ``(?a:...)`` switches to ASCII-only matching (default). This override is only in effect for the narrow inline group, and the original matching mode is restored outside of the group. @@ -529,47 +540,61 @@ character ``'$'``. ``\b`` Matches the empty string, but only at the beginning or end of a word. - A word is defined as a sequence of word characters. Note that formally, - ``\b`` is defined as the boundary between a ``\w`` and a ``\W`` character - (or vice versa), or between ``\w`` and the beginning/end of the string. - This means that ``r'\bfoo\b'`` matches ``'foo'``, ``'foo.'``, ``'(foo)'``, - ``'bar foo baz'`` but not ``'foobar'`` or ``'foo3'``. - - By default Unicode alphanumerics are the ones used in Unicode patterns, but - this can be changed by using the :const:`ASCII` flag. Word boundaries are - determined by the current locale if the :const:`LOCALE` flag is used. - Inside a character range, ``\b`` represents the backspace character, for - compatibility with Python's string literals. + A word is defined as a sequence of word characters. + Note that formally, ``\b`` is defined as the boundary + between a ``\w`` and a ``\W`` character (or vice versa), + or between ``\w`` and the beginning or end of the string. + This means that ``r'\bat\b'`` matches ``'at'``, ``'at.'``, ``'(at)'``, + and ``'as at ay'`` but not ``'attempt'`` or ``'atlas'``. + + The default word characters in Unicode (str) patterns + are Unicode alphanumerics and the underscore, + but this can be changed by using the :py:const:`~re.ASCII` flag. + Word boundaries are determined by the current locale + if the :py:const:`~re.LOCALE` flag is used. + + .. note:: + + Inside a character range, ``\b`` represents the backspace character, + for compatibility with Python's string literals. .. index:: single: \B; in regular expressions ``\B`` - Matches the empty string, but only when it is *not* at the beginning or end - of a word. This means that ``r'py\B'`` matches ``'python'``, ``'py3'``, - ``'py2'``, but not ``'py'``, ``'py.'``, or ``'py!'``. - ``\B`` is just the opposite of ``\b``, so word characters in Unicode - patterns are Unicode alphanumerics or the underscore, although this can - be changed by using the :const:`ASCII` flag. Word boundaries are - determined by the current locale if the :const:`LOCALE` flag is used. + Matches the empty string, + but only when it is *not* at the beginning or end of a word. + This means that ``r'at\B'`` matches ``'athens'``, ``'atom'``, + ``'attorney'``, but not ``'at'``, ``'at.'``, or ``'at!'``. + ``\B`` is the opposite of ``\b``, + so word characters in Unicode (str) patterns + are Unicode alphanumerics or the underscore, + although this can be changed by using the :py:const:`~re.ASCII` flag. + Word boundaries are determined by the current locale + if the :py:const:`~re.LOCALE` flag is used. .. index:: single: \d; in regular expressions ``\d`` For Unicode (str) patterns: - Matches any Unicode decimal digit (that is, any character in - Unicode character category [Nd]). This includes ``[0-9]``, and - also many other digit characters. If the :const:`ASCII` flag is - used only ``[0-9]`` is matched. + Matches any Unicode decimal digit + (that is, any character in Unicode character category `[Nd]`__). + This includes ``[0-9]``, and also many other digit characters. + + Matches ``[0-9]`` if the :py:const:`~re.ASCII` flag is used. + + __ https://www.unicode.org/versions/Unicode15.0.0/ch04.pdf#G134153 For 8-bit (bytes) patterns: - Matches any decimal digit; this is equivalent to ``[0-9]``. + Matches any decimal digit in the ASCII character set; + this is equivalent to ``[0-9]``. .. index:: single: \D; in regular expressions ``\D`` - Matches any character which is not a decimal digit. This is - the opposite of ``\d``. If the :const:`ASCII` flag is used this - becomes the equivalent of ``[^0-9]``. + Matches any character which is not a decimal digit. + This is the opposite of ``\d``. + + Matches ``[^0-9]`` if the :py:const:`~re.ASCII` flag is used. .. index:: single: \s; in regular expressions @@ -578,8 +603,9 @@ character ``'$'``. Matches Unicode whitespace characters (which includes ``[ \t\n\r\f\v]``, and also many other characters, for example the non-breaking spaces mandated by typography rules in many - languages). If the :const:`ASCII` flag is used, only - ``[ \t\n\r\f\v]`` is matched. + languages). + + Matches ``[ \t\n\r\f\v]`` if the :py:const:`~re.ASCII` flag is used. For 8-bit (bytes) patterns: Matches characters considered whitespace in the ASCII character set; @@ -589,30 +615,39 @@ character ``'$'``. ``\S`` Matches any character which is not a whitespace character. This is - the opposite of ``\s``. If the :const:`ASCII` flag is used this - becomes the equivalent of ``[^ \t\n\r\f\v]``. + the opposite of ``\s``. + + Matches ``[^ \t\n\r\f\v]`` if the :py:const:`~re.ASCII` flag is used. .. index:: single: \w; in regular expressions ``\w`` For Unicode (str) patterns: - Matches Unicode word characters; this includes alphanumeric characters (as defined by :meth:`str.isalnum`) + Matches Unicode word characters; + this includes all Unicode alphanumeric characters + (as defined by :py:meth:`str.isalnum`), as well as the underscore (``_``). - If the :const:`ASCII` flag is used, only ``[a-zA-Z0-9_]`` is matched. + + Matches ``[a-zA-Z0-9_]`` if the :py:const:`~re.ASCII` flag is used. For 8-bit (bytes) patterns: Matches characters considered alphanumeric in the ASCII character set; - this is equivalent to ``[a-zA-Z0-9_]``. If the :const:`LOCALE` flag is - used, matches characters considered alphanumeric in the current locale - and the underscore. + this is equivalent to ``[a-zA-Z0-9_]``. + If the :py:const:`~re.LOCALE` flag is used, + matches characters considered alphanumeric in the current locale and the underscore. .. index:: single: \W; in regular expressions ``\W`` - Matches any character which is not a word character. This is - the opposite of ``\w``. If the :const:`ASCII` flag is used this - becomes the equivalent of ``[^a-zA-Z0-9_]``. If the :const:`LOCALE` flag is - used, matches characters which are neither alphanumeric in the current locale + Matches any character which is not a word character. + This is the opposite of ``\w``. + By default, matches non-underscore (``_``) characters + for which :py:meth:`str.isalnum` returns ``False``. + + Matches ``[^a-zA-Z0-9_]`` if the :py:const:`~re.ASCII` flag is used. + + If the :py:const:`~re.LOCALE` flag is used, + matches characters which are neither alphanumeric in the current locale nor the underscore. .. index:: single: \Z; in regular expressions @@ -644,9 +679,11 @@ string literals are also accepted by the regular expression parser:: (Note that ``\b`` is used to represent word boundaries, and means "backspace" only inside character classes.) -``'\u'``, ``'\U'``, and ``'\N'`` escape sequences are only recognized in Unicode -patterns. In bytes patterns they are errors. Unknown escapes of ASCII -letters are reserved for future use and treated as errors. +``'\u'``, ``'\U'``, and ``'\N'`` escape sequences are +only recognized in Unicode (str) patterns. +In bytes patterns they are errors. +Unknown escapes of ASCII letters are reserved +for future use and treated as errors. Octal escapes are included in a limited form. If the first digit is a 0, or if there are three octal digits, it is considered an octal escape. Otherwise, it is @@ -694,30 +731,37 @@ Flags Make ``\w``, ``\W``, ``\b``, ``\B``, ``\d``, ``\D``, ``\s`` and ``\S`` perform ASCII-only matching instead of full Unicode matching. This is only - meaningful for Unicode patterns, and is ignored for byte patterns. + meaningful for Unicode (str) patterns, and is ignored for bytes patterns. + Corresponds to the inline flag ``(?a)``. - Note that for backward compatibility, the :const:`re.U` flag still - exists (as well as its synonym :const:`re.UNICODE` and its embedded - counterpart ``(?u)``), but these are redundant in Python 3 since - matches are Unicode by default for strings (and Unicode matching - isn't allowed for bytes). + .. note:: + + The :py:const:`~re.U` flag still exists for backward compatibility, + but is redundant in Python 3 since + matches are Unicode by default for ``str`` patterns, + and Unicode matching isn't allowed for bytes patterns. + :py:const:`~re.UNICODE` and the inline flag ``(?u)`` are similarly redundant. .. data:: DEBUG Display debug information about compiled expression. + No corresponding inline flag. .. data:: I IGNORECASE - Perform case-insensitive matching; expressions like ``[A-Z]`` will also - match lowercase letters. Full Unicode matching (such as ``Ü`` matching - ``ü``) also works unless the :const:`re.ASCII` flag is used to disable - non-ASCII matches. The current locale does not change the effect of this - flag unless the :const:`re.LOCALE` flag is also used. + Perform case-insensitive matching; + expressions like ``[A-Z]`` will also match lowercase letters. + Full Unicode matching (such as ``Ü`` matching ``ü``) + also works unless the :py:const:`~re.ASCII` flag + is used to disable non-ASCII matches. + The current locale does not change the effect of this flag + unless the :py:const:`~re.LOCALE` flag is also used. + Corresponds to the inline flag ``(?i)``. Note that when the Unicode patterns ``[a-z]`` or ``[A-Z]`` are used in @@ -725,29 +769,35 @@ Flags letters and 4 additional non-ASCII letters: 'İ' (U+0130, Latin capital letter I with dot above), 'ı' (U+0131, Latin small letter dotless i), 'ſ' (U+017F, Latin small letter long s) and 'K' (U+212A, Kelvin sign). - If the :const:`ASCII` flag is used, only letters 'a' to 'z' + If the :py:const:`~re.ASCII` flag is used, only letters 'a' to 'z' and 'A' to 'Z' are matched. .. data:: L LOCALE Make ``\w``, ``\W``, ``\b``, ``\B`` and case-insensitive matching - dependent on the current locale. This flag can be used only with bytes - patterns. The use of this flag is discouraged as the locale mechanism - is very unreliable, it only handles one "culture" at a time, and it only - works with 8-bit locales. Unicode matching is already enabled by default - in Python 3 for Unicode (str) patterns, and it is able to handle different - locales/languages. + dependent on the current locale. + This flag can be used only with bytes patterns. + Corresponds to the inline flag ``(?L)``. + .. warning:: + + This flag is discouraged; consider Unicode matching instead. + The locale mechanism is very unreliable + as it only handles one "culture" at a time + and only works with 8-bit locales. + Unicode matching is enabled by default for Unicode (str) patterns + and it is able to handle different locales and languages. + .. versionchanged:: 3.6 - :const:`re.LOCALE` can be used only with bytes patterns and is - not compatible with :const:`re.ASCII`. + :py:const:`~re.LOCALE` can be used only with bytes patterns + and is not compatible with :py:const:`~re.ASCII`. .. versionchanged:: 3.7 - Compiled regular expression objects with the :const:`re.LOCALE` flag no - longer depend on the locale at compile time. Only the locale at - matching time affects the result of matching. + Compiled regular expression objects with the :py:const:`~re.LOCALE` flag + no longer depend on the locale at compile time. + Only the locale at matching time affects the result of matching. .. data:: M @@ -759,6 +809,7 @@ Flags end of each line (immediately preceding each newline). By default, ``'^'`` matches only at the beginning of the string, and ``'$'`` only at the end of the string and immediately before the newline (if any) at the end of the string. + Corresponds to the inline flag ``(?m)``. .. data:: NOFLAG @@ -778,19 +829,19 @@ Flags Make the ``'.'`` special character match any character at all, including a newline; without this flag, ``'.'`` will match anything *except* a newline. + Corresponds to the inline flag ``(?s)``. .. data:: U UNICODE - In Python 2, this flag made :ref:`special sequences ` - include Unicode characters in matches. Since Python 3, Unicode characters - are matched by default. - - See :const:`A` for restricting matching on ASCII characters instead. + In Python 3, Unicode characters are matched by default + for ``str`` patterns. + This flag is therefore redundant with **no effect** + and is only kept for backward compatibility. - This flag is only kept for backward compatibility. + See :py:const:`~re.ASCII` to restrict matching to ASCII characters instead. .. data:: X VERBOSE @@ -914,6 +965,8 @@ Functions Empty matches for the pattern split the string only when not adjacent to a previous empty match. + .. code:: pycon + >>> re.split(r'\b', 'Words, words, words.') ['', 'Words', ', ', 'words', ', ', 'words', '.'] >>> re.split(r'\W*', '...words...') @@ -1237,7 +1290,7 @@ Regular Expression Objects The regex matching flags. This is a combination of the flags given to :func:`.compile`, any ``(?...)`` inline flags in the pattern, and implicit - flags such as :data:`UNICODE` if the pattern is a Unicode string. + flags such as :py:const:`~re.UNICODE` if the pattern is a Unicode string. .. attribute:: Pattern.groups diff --git a/Doc/library/select.rst b/Doc/library/select.rst index c2941e628d9d78..a0058046d0ce4c 100644 --- a/Doc/library/select.rst +++ b/Doc/library/select.rst @@ -185,8 +185,8 @@ The module defines the following: ----------------------------- Solaris and derivatives have ``/dev/poll``. While :c:func:`!select` is -O(highest file descriptor) and :c:func:`!poll` is O(number of file -descriptors), ``/dev/poll`` is O(active file descriptors). +*O*\ (*highest file descriptor*) and :c:func:`!poll` is *O*\ (*number of file +descriptors*), ``/dev/poll`` is *O*\ (*active file descriptors*). ``/dev/poll`` behaviour is very close to the standard :c:func:`!poll` object. @@ -381,8 +381,8 @@ scalability for network servers that service many, many clients at the same time. :c:func:`!poll` scales better because the system call only requires listing the file descriptors of interest, while :c:func:`!select` builds a bitmap, turns on bits for the fds of interest, and then afterward the whole bitmap has to be -linearly scanned again. :c:func:`!select` is O(highest file descriptor), while -:c:func:`!poll` is O(number of file descriptors). +linearly scanned again. :c:func:`!select` is *O*\ (*highest file descriptor*), while +:c:func:`!poll` is *O*\ (*number of file descriptors*). .. method:: poll.register(fd[, eventmask]) diff --git a/Doc/library/stat.rst b/Doc/library/stat.rst index 77538514598a50..c941d5557e31b5 100644 --- a/Doc/library/stat.rst +++ b/Doc/library/stat.rst @@ -350,6 +350,12 @@ The following flags can also be used in the *mode* argument of :func:`os.chmod`: The following flags can be used in the *flags* argument of :func:`os.chflags`: +.. data:: UF_SETTABLE + + All user settable flags. + + .. versionadded: 3.13 + .. data:: UF_NODUMP Do not dump the file. @@ -374,10 +380,44 @@ The following flags can be used in the *flags* argument of :func:`os.chflags`: The file is stored compressed (macOS 10.6+). +.. data:: UF_TRACKED + + Used for handling document IDs (macOS) + + .. versionadded: 3.13 + +.. data:: UF_DATAVAULT + + The file needs an entitlement for reading or writing (macOS 10.13+) + + .. versionadded: 3.13 + .. data:: UF_HIDDEN The file should not be displayed in a GUI (macOS 10.5+). +.. data:: SF_SETTABLE + + All super-user changeable flags + + .. versionadded: 3.13 + +.. data:: SF_SUPPORTED + + All super-user supported flags + + .. availability:: macOS + + .. versionadded: 3.13 + +.. data:: SF_SYNTHETIC + + All super-user read-only synthetic flags + + .. availability:: macOS + + .. versionadded: 3.13 + .. data:: SF_ARCHIVED The file may be archived. @@ -390,6 +430,12 @@ The following flags can be used in the *flags* argument of :func:`os.chflags`: The file may only be appended to. +.. data:: SF_RESTRICTED + + The file needs an entitlement to write to (macOS 10.13+) + + .. versionadded: 3.13 + .. data:: SF_NOUNLINK The file may not be renamed or deleted. @@ -398,6 +444,18 @@ The following flags can be used in the *flags* argument of :func:`os.chflags`: The file is a snapshot file. +.. data:: SF_FIRMLINK + + The file is a firmlink (macOS 10.15+) + + .. versionadded: 3.13 + +.. data:: SF_DATALESS + + The file is a dataless object (macOS 10.15+) + + .. versionadded: 3.13 + See the \*BSD or macOS systems man page :manpage:`chflags(2)` for more information. On Windows, the following file attribute constants are available for use when diff --git a/Doc/library/statistics.rst b/Doc/library/statistics.rst index 5c8ad3a7dd7380..0417b3f38a9807 100644 --- a/Doc/library/statistics.rst +++ b/Doc/library/statistics.rst @@ -1026,19 +1026,16 @@ probability that the Python room will stay within its capacity limits? >>> round(NormalDist(mu=n*p, sigma=sqrt(n*p*q)).cdf(k + 0.5), 4) 0.8402 - >>> # Solution using the cumulative binomial distribution + >>> # Exact solution using the cumulative binomial distribution >>> from math import comb, fsum >>> round(fsum(comb(n, r) * p**r * q**(n-r) for r in range(k+1)), 4) 0.8402 >>> # Approximation using a simulation - >>> from random import seed, choices + >>> from random import seed, binomialvariate >>> seed(8675309) - >>> def trial(): - ... return choices(('Python', 'Ruby'), (p, q), k=n).count('Python') - ... - >>> mean(trial() <= k for i in range(10_000)) - 0.8398 + >>> mean(binomialvariate(n, p) <= k for i in range(10_000)) + 0.8406 Naive bayesian classifier @@ -1107,17 +1104,15 @@ from a fixed number of discrete samples. The basic idea is to smooth the data using `a kernel function such as a normal distribution, triangular distribution, or uniform distribution `_. -The degree of smoothing is controlled by a single -parameter, ``h``, representing the variance of the kernel function. +The degree of smoothing is controlled by a scaling parameter, ``h``, +which is called the *bandwidth*. .. testcode:: - import math - def kde_normal(sample, h): "Create a continuous probability density function from a sample." - # Smooth the sample with a normal distribution of variance h. - kernel_h = NormalDist(0.0, math.sqrt(h)).pdf + # Smooth the sample with a normal distribution kernel scaled by h. + kernel_h = NormalDist(0.0, h).pdf n = len(sample) def pdf(x): return sum(kernel_h(x - x_i) for x_i in sample) / n @@ -1131,7 +1126,7 @@ a probability density function estimated from a small sample: .. doctest:: >>> sample = [-2.1, -1.3, -0.4, 1.9, 5.1, 6.2] - >>> f_hat = kde_normal(sample, h=2.25) + >>> f_hat = kde_normal(sample, h=1.5) >>> xarr = [i/100 for i in range(-750, 1100)] >>> yarr = [f_hat(x) for x in xarr] diff --git a/Doc/library/string.rst b/Doc/library/string.rst index 262b785bbcbfc1..1867678b2077fc 100644 --- a/Doc/library/string.rst +++ b/Doc/library/string.rst @@ -208,13 +208,13 @@ The grammar for a replacement field is as follows: .. productionlist:: format-string replacement_field: "{" [`field_name`] ["!" `conversion`] [":" `format_spec`] "}" - field_name: arg_name ("." `attribute_name` | "[" `element_index` "]")* - arg_name: [`identifier` | `digit`+] - attribute_name: `identifier` - element_index: `digit`+ | `index_string` + field_name: `arg_name` ("." `attribute_name` | "[" `element_index` "]")* + arg_name: [`~python-grammar:identifier` | `~python-grammar:digit`+] + attribute_name: `~python-grammar:identifier` + element_index: `~python-grammar:digit`+ | `index_string` index_string: + conversion: "r" | "s" | "a" - format_spec: + format_spec: `format-spec:format_spec` In less formal terms, the replacement field can start with a *field_name* that specifies the object whose value is to be formatted and inserted @@ -316,9 +316,9 @@ The general form of a *standard format specifier* is: fill: align: "<" | ">" | "=" | "^" sign: "+" | "-" | " " - width: `digit`+ + width: `~python-grammar:digit`+ grouping_option: "_" | "," - precision: `digit`+ + precision: `~python-grammar:digit`+ type: "b" | "c" | "d" | "e" | "E" | "f" | "F" | "g" | "G" | "n" | "o" | "s" | "x" | "X" | "%" If a valid *align* value is specified, it can be preceded by a *fill* diff --git a/Doc/library/subprocess.rst b/Doc/library/subprocess.rst index d6b892a7ed957d..91e9fcf0263d8d 100644 --- a/Doc/library/subprocess.rst +++ b/Doc/library/subprocess.rst @@ -1461,8 +1461,8 @@ Return code handling translates as follows:: print("There were some errors") -Replacing functions from the :mod:`popen2` module -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Replacing functions from the :mod:`!popen2` module +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. note:: diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst index 2426c37ccb1e0f..c371663934314a 100644 --- a/Doc/library/sys.rst +++ b/Doc/library/sys.rst @@ -1268,10 +1268,13 @@ always available. .. versionchanged:: 3.4 :term:`Module specs ` were introduced in Python 3.4, by - :pep:`451`. Earlier versions of Python looked for a method called - :meth:`!find_module`. - This is still called as a fallback if a :data:`meta_path` entry doesn't - have a :meth:`~importlib.abc.MetaPathFinder.find_spec` method. + :pep:`451`. + + .. versionchanged:: 3.12 + + Removed the fallback that looked for a :meth:`!find_module` method + if a :data:`meta_path` entry didn't have a + :meth:`~importlib.abc.MetaPathFinder.find_spec` method. .. data:: modules diff --git a/Doc/library/tarfile.rst b/Doc/library/tarfile.rst index 7ba29d4a40dedb..34a738a7f1c41f 100644 --- a/Doc/library/tarfile.rst +++ b/Doc/library/tarfile.rst @@ -837,6 +837,36 @@ A ``TarInfo`` object has the following public data attributes: :meth:`~TarFile.extractall`, causing extraction to skip applying this attribute. +.. attribute:: TarInfo.chksum + + Header checksum. + + +.. attribute:: TarInfo.devmajor + + Device major number. + + +.. attribute:: TarInfo.devminor + + Device minor number. + + +.. attribute:: TarInfo.offset + + The tar header starts here. + + +.. attribute:: TarInfo.offset_data + + The file's data starts here. + + +.. attribute:: TarInfo.sparse + + Sparse member information. + + .. attribute:: TarInfo.pax_headers :type: dict diff --git a/Doc/library/time.rst b/Doc/library/time.rst index 577600881676b3..2782a961363666 100644 --- a/Doc/library/time.rst +++ b/Doc/library/time.rst @@ -840,6 +840,15 @@ These constants are used as parameters for :func:`clock_getres` and .. versionadded:: 3.3 +.. data:: CLOCK_MONOTONIC_RAW_APPROX + + Similar to :data:`CLOCK_MONOTONIC_RAW`, but reads a value cached by + the system at context switch and hence has less accuracy. + + .. availability:: macOS >= 10.12. + + .. versionadded:: 3.13 + .. data:: CLOCK_PROCESS_CPUTIME_ID @@ -899,6 +908,15 @@ These constants are used as parameters for :func:`clock_getres` and .. versionadded:: 3.8 +.. data:: CLOCK_UPTIME_RAW_APPROX + + Like :data:`CLOCK_UPTIME_RAW`, but the value is cached by the system + at context switches and therefore has less accuracy. + + .. availability:: macOS >= 10.12. + + .. versionadded:: 3.13 + The following constant is the only parameter that can be sent to :func:`clock_settime`. diff --git a/Doc/library/tkinter.ttk.rst b/Doc/library/tkinter.ttk.rst index 1609dc2ce9218e..bd0d8b3799a0f1 100644 --- a/Doc/library/tkinter.ttk.rst +++ b/Doc/library/tkinter.ttk.rst @@ -1118,7 +1118,7 @@ ttk.Treeview as the item identifier; *iid* must not already exist in the tree. Otherwise, a new unique identifier is generated. - See `Item Options`_ for the list of available points. + See `Item Options`_ for the list of available options. .. method:: item(item, option=None, **kw) diff --git a/Doc/library/unittest.mock.rst b/Doc/library/unittest.mock.rst index f1cc482c5cfe2a..eca20b94ec8e74 100644 --- a/Doc/library/unittest.mock.rst +++ b/Doc/library/unittest.mock.rst @@ -2009,8 +2009,8 @@ Mocking Magic Methods ~~~~~~~~~~~~~~~~~~~~~ :class:`Mock` supports mocking the Python protocol methods, also known as -"magic methods". This allows mock objects to replace containers or other -objects that implement Python protocols. +:term:`"magic methods" `. This allows mock objects to replace +containers or other objects that implement Python protocols. Because magic methods are looked up differently from normal methods [#]_, this support has been specially implemented. This means that only specific magic @@ -2108,8 +2108,8 @@ There are two ``MagicMock`` variants: :class:`MagicMock` and :class:`NonCallable .. class:: MagicMock(*args, **kw) ``MagicMock`` is a subclass of :class:`Mock` with default implementations - of most of the magic methods. You can use ``MagicMock`` without having to - configure the magic methods yourself. + of most of the :term:`magic methods `. You can use + ``MagicMock`` without having to configure the magic methods yourself. The constructor parameters have the same meaning as for :class:`Mock`. diff --git a/Doc/library/unittest.rst b/Doc/library/unittest.rst index 70b4c84c05f818..491009769f5aa6 100644 --- a/Doc/library/unittest.rst +++ b/Doc/library/unittest.rst @@ -2290,7 +2290,7 @@ Loading and running tests The *testRunner* argument can either be a test runner class or an already created instance of it. By default ``main`` calls :func:`sys.exit` with an exit code indicating success (0) or failure (1) of the tests run. - An exit code of 5 indicates that no tests were run. + An exit code of 5 indicates that no tests were run or skipped. The *testLoader* argument has to be a :class:`TestLoader` instance, and defaults to :data:`defaultTestLoader`. diff --git a/Doc/library/urllib.request.rst b/Doc/library/urllib.request.rst index 0e18db73280a63..affdce144cd5fc 100644 --- a/Doc/library/urllib.request.rst +++ b/Doc/library/urllib.request.rst @@ -78,7 +78,7 @@ The :mod:`urllib.request` module defines the following functions: :class:`UnknownHandler` to ensure this never happens). In addition, if proxy settings are detected (for example, when a ``*_proxy`` - environment variable like :envvar:`http_proxy` is set), + environment variable like :envvar:`!http_proxy` is set), :class:`ProxyHandler` is default installed and makes sure the requests are handled through the proxy. @@ -113,7 +113,7 @@ The :mod:`urllib.request` module defines the following functions: .. versionchanged:: 3.10 HTTPS connection now send an ALPN extension with protocol indicator ``http/1.1`` when no *context* is given. Custom *context* should set - ALPN protocols with :meth:`~ssl.SSLContext.set_alpn_protocol`. + ALPN protocols with :meth:`~ssl.SSLContext.set_alpn_protocols`. .. versionchanged:: 3.13 Remove *cafile*, *capath* and *cadefault* parameters: use the *context* @@ -618,25 +618,25 @@ OpenerDirector Objects the actual HTTP code, for example :meth:`http_error_404` would handle HTTP 404 errors. - * :meth:`_open` --- signal that the handler knows how to open *protocol* + * :meth:`!_open` --- signal that the handler knows how to open *protocol* URLs. See |protocol_open|_ for more information. - * :meth:`http_error_\` --- signal that the handler knows how to handle HTTP + * :meth:`!http_error_\` --- signal that the handler knows how to handle HTTP errors with HTTP error code *type*. See |http_error_nnn|_ for more information. - * :meth:`_error` --- signal that the handler knows how to handle errors + * :meth:`!_error` --- signal that the handler knows how to handle errors from (non-\ ``http``) *protocol*. - * :meth:`_request` --- signal that the handler knows how to pre-process + * :meth:`!_request` --- signal that the handler knows how to pre-process *protocol* requests. See |protocol_request|_ for more information. - * :meth:`_response` --- signal that the handler knows how to + * :meth:`!_response` --- signal that the handler knows how to post-process *protocol* responses. See |protocol_response|_ for more information. @@ -663,7 +663,7 @@ OpenerDirector Objects Handle an error of the given protocol. This will call the registered error handlers for the given protocol with the given arguments (which are protocol specific). The HTTP protocol is a special case which uses the HTTP response - code to determine the specific error handler; refer to the :meth:`http_error_\` + code to determine the specific error handler; refer to the :meth:`!http_error_\` methods of the handler classes. Return values and exceptions raised are the same as those of :func:`urlopen`. @@ -673,25 +673,25 @@ OpenerDirector objects open URLs in three stages: The order in which these methods are called within each stage is determined by sorting the handler instances. -#. Every handler with a method named like :meth:`_request` has that +#. Every handler with a method named like :meth:`!_request` has that method called to pre-process the request. -#. Handlers with a method named like :meth:`_open` are called to handle +#. Handlers with a method named like :meth:`!_open` are called to handle the request. This stage ends when a handler either returns a non-\ :const:`None` value (ie. a response), or raises an exception (usually :exc:`~urllib.error.URLError`). Exceptions are allowed to propagate. In fact, the above algorithm is first tried for methods named - :meth:`default_open`. If all such methods return :const:`None`, the algorithm - is repeated for methods named like :meth:`_open`. If all such methods + :meth:`~BaseHandler.default_open`. If all such methods return :const:`None`, the algorithm + is repeated for methods named like :meth:`!_open`. If all such methods return :const:`None`, the algorithm is repeated for methods named - :meth:`unknown_open`. + :meth:`~BaseHandler.unknown_open`. Note that the implementation of these methods may involve calls of the parent :class:`OpenerDirector` instance's :meth:`~OpenerDirector.open` and :meth:`~OpenerDirector.error` methods. -#. Every handler with a method named like :meth:`_response` has that +#. Every handler with a method named like :meth:`!_response` has that method called to post-process the response. @@ -740,7 +740,7 @@ The following attribute and methods should only be used by classes derived from the return value of the :meth:`~OpenerDirector.open` method of :class:`OpenerDirector`, or ``None``. It should raise :exc:`~urllib.error.URLError`, unless a truly exceptional thing happens (for example, :exc:`MemoryError` should not be mapped to - :exc:`URLError`). + :exc:`~urllib.error.URLError`). This method will be called before any protocol-specific open method. @@ -753,7 +753,7 @@ The following attribute and methods should only be used by classes derived from define it if they want to handle URLs with the given protocol. This method, if defined, will be called by the parent :class:`OpenerDirector`. - Return values should be the same as for :meth:`default_open`. + Return values should be the same as for :meth:`~BaseHandler.default_open`. .. method:: BaseHandler.unknown_open(req) @@ -793,7 +793,7 @@ The following attribute and methods should only be used by classes derived from Subclasses should override this method to handle specific HTTP errors. Arguments, return values and exceptions raised should be the same as for - :meth:`http_error_default`. + :meth:`~BaseHandler.http_error_default`. .. _protocol_request: @@ -833,7 +833,7 @@ HTTPRedirectHandler Objects is the case, :exc:`~urllib.error.HTTPError` is raised. See :rfc:`2616` for details of the precise meanings of the various redirection codes. - An :class:`HTTPError` exception raised as a security consideration if the + An :exc:`~urllib.error.HTTPError` exception raised as a security consideration if the HTTPRedirectHandler is presented with a redirected URL which is not an HTTP, HTTPS or FTP URL. @@ -910,7 +910,7 @@ ProxyHandler Objects .. method:: ProxyHandler._open(request) :noindex: - The :class:`ProxyHandler` will have a method :meth:`_open` for every + The :class:`ProxyHandler` will have a method :meth:`!_open` for every *protocol* which has a proxy in the *proxies* dictionary given in the constructor. The method will modify requests to go through the proxy, by calling ``request.set_proxy()``, and call the next handler in the chain to @@ -1166,7 +1166,7 @@ HTTPErrorProcessor Objects For 200 error codes, the response object is returned immediately. For non-200 error codes, this simply passes the job on to the - :meth:`http_error_\` handler methods, via :meth:`OpenerDirector.error`. + :meth:`!http_error_\` handler methods, via :meth:`OpenerDirector.error`. Eventually, :class:`HTTPDefaultErrorHandler` will raise an :exc:`~urllib.error.HTTPError` if no other handler handles the error. @@ -1273,7 +1273,7 @@ Use of Basic HTTP Authentication:: :func:`build_opener` provides many handlers by default, including a :class:`ProxyHandler`. By default, :class:`ProxyHandler` uses the environment variables named ``_proxy``, where ```` is the URL scheme -involved. For example, the :envvar:`http_proxy` environment variable is read to +involved. For example, the :envvar:`!http_proxy` environment variable is read to obtain the HTTP proxy's URL. This example replaces the default :class:`ProxyHandler` with one that uses @@ -1368,7 +1368,7 @@ some point in the future. points to a local file, the object will not be copied unless filename is supplied. Return a tuple ``(filename, headers)`` where *filename* is the local file name under which the object can be found, and *headers* is whatever - the :meth:`info` method of the object returned by :func:`urlopen` returned (for + the :meth:`!info` method of the object returned by :func:`urlopen` returned (for a remote object). Exceptions are the same as for :func:`urlopen`. The second argument, if present, specifies the file location to copy to (if @@ -1393,7 +1393,7 @@ some point in the future. :mimetype:`application/x-www-form-urlencoded` format; see the :func:`urllib.parse.urlencode` function. - :func:`urlretrieve` will raise :exc:`ContentTooShortError` when it detects that + :func:`urlretrieve` will raise :exc:`~urllib.error.ContentTooShortError` when it detects that the amount of data available was less than the expected amount (which is the size reported by a *Content-Length* header). This can occur, for example, when the download is interrupted. @@ -1402,8 +1402,8 @@ some point in the future. urlretrieve reads more data, but if less data is available, it raises the exception. - You can still retrieve the downloaded data in this case, it is stored in the - :attr:`content` attribute of the exception instance. + You can still retrieve the downloaded data in this case, it is stored in the + :attr:`!content` attribute of the exception instance. If no *Content-Length* header was supplied, urlretrieve can not check the size of the data it has downloaded, and just returns it. In this case you just have @@ -1497,7 +1497,7 @@ some point in the future. authentication is performed. For the 30x response codes, recursion is bounded by the value of the *maxtries* attribute, which defaults to 10. - For all other response codes, the method :meth:`http_error_default` is called + For all other response codes, the method :meth:`~BaseHandler.http_error_default` is called which you can override in subclasses to handle the error appropriately. .. note:: diff --git a/Doc/library/zipfile.rst b/Doc/library/zipfile.rst index a77e49a7643826..c70f2ec561de8f 100644 --- a/Doc/library/zipfile.rst +++ b/Doc/library/zipfile.rst @@ -79,6 +79,11 @@ The module defines the following items: of the last modification to the file; the fields are described in section :ref:`zipinfo-objects`. + .. versionadded:: 3.13 + A public ``.compress_level`` attribute has been added to expose the + formerly protected ``._compresslevel``. The older protected name + continues to work as a property for backwards compatibility. + .. function:: is_zipfile(filename) Returns ``True`` if *filename* is a valid ZIP file based on its magic number, diff --git a/Doc/license.rst b/Doc/license.rst index 8aad93062a5a88..9fc0ff7161a591 100644 --- a/Doc/license.rst +++ b/Doc/license.rst @@ -100,7 +100,7 @@ PSF LICENSE AGREEMENT FOR PYTHON |release| analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use Python |release| alone or in any derivative version, provided, however, that PSF's License Agreement and PSF's notice of - copyright, i.e., "Copyright © 2001-2023 Python Software Foundation; All Rights + copyright, i.e., "Copyright © 2001-2024 Python Software Foundation; All Rights Reserved" are retained in Python |release| alone or in any derivative version prepared by Licensee. @@ -1066,3 +1066,32 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + +asyncio +---------- + +Parts of the :mod:`asyncio` module are incorporated from +`uvloop 0.16 `_, +which is distributed under the MIT license:: + + Copyright (c) 2015-2021 MagicStack Inc. http://magic.io + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE + LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION + WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/Doc/reference/datamodel.rst b/Doc/reference/datamodel.rst index d611bda298b509..ca29a3712dfa38 100644 --- a/Doc/reference/datamodel.rst +++ b/Doc/reference/datamodel.rst @@ -1263,20 +1263,20 @@ Methods on code objects * ``start`` (an :class:`int`) represents the offset (inclusive) of the start of the :term:`bytecode` range - * ``end`` (an :class:`int`) represents the offset (inclusive) of the end of + * ``end`` (an :class:`int`) represents the offset (exclusive) of the end of the :term:`bytecode` range * ``lineno`` is an :class:`int` representing the line number of the :term:`bytecode` range, or ``None`` if the bytecodes in the given range have no line number - The items yielded generated will have the following properties: + The items yielded will have the following properties: * The first range yielded will have a ``start`` of 0. * The ``(start, end)`` ranges will be non-decreasing and consecutive. That is, for any pair of :class:`tuple`\s, the ``start`` of the second will be equal to the ``end`` of the first. * No range will be backwards: ``end >= start`` for all triples. - * The :class:`tuple` yielded will have ``end`` equal to the size of the + * The last :class:`tuple` yielded will have ``end`` equal to the size of the :term:`bytecode`. Zero-width ranges, where ``start == end``, are allowed. Zero-width ranges @@ -1876,7 +1876,7 @@ Basic customization This is intended to provide protection against a denial-of-service caused by carefully chosen inputs that exploit the worst case performance of a - dict insertion, O(n\ :sup:`2`) complexity. See + dict insertion, *O*\ (*n*\ :sup:`2`) complexity. See http://ocert.org/advisories/ocert-2011-003.html for details. Changing hash values affects the iteration order of sets. @@ -2308,7 +2308,7 @@ class defining the method. this method is implicitly converted to a class method. Keyword arguments which are given to a new class are passed to - the parent's class ``__init_subclass__``. For compatibility with + the parent class's ``__init_subclass__``. For compatibility with other classes using ``__init_subclass__``, one should take out the needed keyword arguments and pass the others over to the base class, as in:: diff --git a/Doc/reference/expressions.rst b/Doc/reference/expressions.rst index 3f6d5bfafee9d1..e543c1228d4d19 100644 --- a/Doc/reference/expressions.rst +++ b/Doc/reference/expressions.rst @@ -14,7 +14,7 @@ be used to describe syntax, not lexical analysis. When (one alternative of) a syntax rule has the form .. productionlist:: python-grammar - name: `othername` + name: othername and no semantics are given, the semantics of this form of ``name`` are the same as for ``othername``. @@ -422,7 +422,8 @@ Yield expressions .. productionlist:: python-grammar yield_atom: "(" `yield_expression` ")" - yield_expression: "yield" [`expression_list` | "from" `expression`] + yield_from: "yield" "from" `expression` + yield_expression: "yield" `expression_list` | `yield_from` The yield expression is used when defining a :term:`generator` function or an :term:`asynchronous generator` function and diff --git a/Doc/tools/.nitignore b/Doc/tools/.nitignore index 05df332fa7c9a8..5935d059877a91 100644 --- a/Doc/tools/.nitignore +++ b/Doc/tools/.nitignore @@ -24,7 +24,6 @@ Doc/glossary.rst Doc/howto/descriptor.rst Doc/howto/enum.rst Doc/howto/logging.rst -Doc/howto/urllib2.rst Doc/library/ast.rst Doc/library/asyncio-extending.rst Doc/library/asyncio-policy.rst @@ -49,7 +48,6 @@ Doc/library/faulthandler.rst Doc/library/fcntl.rst Doc/library/ftplib.rst Doc/library/functools.rst -Doc/library/http.client.rst Doc/library/http.cookiejar.rst Doc/library/http.server.rst Doc/library/importlib.rst @@ -59,7 +57,6 @@ Doc/library/logging.handlers.rst Doc/library/lzma.rst Doc/library/mmap.rst Doc/library/multiprocessing.rst -Doc/library/multiprocessing.shared_memory.rst Doc/library/optparse.rst Doc/library/os.rst Doc/library/pickle.rst @@ -67,7 +64,6 @@ Doc/library/pickletools.rst Doc/library/platform.rst Doc/library/plistlib.rst Doc/library/profile.rst -Doc/library/pyclbr.rst Doc/library/pydoc.rst Doc/library/pyexpat.rst Doc/library/readline.rst @@ -78,7 +74,6 @@ Doc/library/smtplib.rst Doc/library/socket.rst Doc/library/ssl.rst Doc/library/stdtypes.rst -Doc/library/string.rst Doc/library/subprocess.rst Doc/library/termios.rst Doc/library/test.rst diff --git a/Doc/tools/extensions/c_annotations.py b/Doc/tools/extensions/c_annotations.py index ba37634545c2cf..a8b6d8995e3f40 100644 --- a/Doc/tools/extensions/c_annotations.py +++ b/Doc/tools/extensions/c_annotations.py @@ -42,7 +42,7 @@ } -# Monkeypatch nodes.Node.findall for forwards compatability +# Monkeypatch nodes.Node.findall for forwards compatibility # This patch can be dropped when the minimum Sphinx version is 4.4.0 # or the minimum Docutils version is 0.18.1. if docutils.__version_info__ < (0, 18, 1): diff --git a/Doc/tools/extensions/pyspecific.py b/Doc/tools/extensions/pyspecific.py index 31c2544caf601c..cd441836f62bde 100644 --- a/Doc/tools/extensions/pyspecific.py +++ b/Doc/tools/extensions/pyspecific.py @@ -48,6 +48,11 @@ Body.enum.converters['lowerroman'] = \ Body.enum.converters['upperroman'] = lambda x: None +# monkey-patch the productionlist directive to allow hyphens in group names +# https://github.com/sphinx-doc/sphinx/issues/11854 +from sphinx.domains import std + +std.token_re = re.compile(r'`((~?[\w-]*:)?\w+)`') # Support for marking up and linking to bugs.python.org issues diff --git a/Doc/tutorial/appendix.rst b/Doc/tutorial/appendix.rst index 588591fcdb726f..4bea0d8a49ce20 100644 --- a/Doc/tutorial/appendix.rst +++ b/Doc/tutorial/appendix.rst @@ -20,7 +20,7 @@ In interactive mode, it then returns to the primary prompt; when input came from a file, it exits with a nonzero exit status after printing the stack trace. (Exceptions handled by an :keyword:`except` clause in a :keyword:`try` statement are not errors in this context.) Some errors are unconditionally fatal and -cause an exit with a nonzero exit; this applies to internal inconsistencies and +cause an exit with a nonzero exit status; this applies to internal inconsistencies and some cases of running out of memory. All error messages are written to the standard error stream; normal output from executed commands is written to standard output. diff --git a/Doc/tutorial/classes.rst b/Doc/tutorial/classes.rst index 3bf138ca225ee5..d1c303ef037027 100644 --- a/Doc/tutorial/classes.rst +++ b/Doc/tutorial/classes.rst @@ -386,12 +386,11 @@ general, calling a method with a list of *n* arguments is equivalent to calling the corresponding function with an argument list that is created by inserting the method's instance object before the first argument. -If you still don't understand how methods work, a look at the implementation can -perhaps clarify matters. When a non-data attribute of an instance is -referenced, the instance's class is searched. If the name denotes a valid class -attribute that is a function object, a method object is created by packing -(pointers to) the instance object and the function object just found together in -an abstract object: this is the method object. When the method object is called +In general, methods work as follows. When a non-data attribute +of an instance is referenced, the instance's class is searched. +If the name denotes a valid class attribute that is a function object, +references to both the instance object and the function object +are packed into a method object. When the method object is called with an argument list, a new argument list is constructed from the instance object and the argument list, and the function object is called with this new argument list. diff --git a/Doc/using/cmdline.rst b/Doc/using/cmdline.rst index e032a1971bc6d6..df8b07c6118599 100644 --- a/Doc/using/cmdline.rst +++ b/Doc/using/cmdline.rst @@ -369,7 +369,7 @@ Miscellaneous options Hash randomization is intended to provide protection against a denial-of-service caused by carefully chosen inputs that exploit the worst - case performance of a dict construction, O(n\ :sup:`2`) complexity. See + case performance of a dict construction, *O*\ (*n*\ :sup:`2`) complexity. See http://ocert.org/advisories/ocert-2011-003.html for details. :envvar:`PYTHONHASHSEED` allows you to set a fixed value for the hash @@ -1139,6 +1139,14 @@ conflict. .. versionadded:: 3.13 +.. envvar:: PYTHON_HISTORY + + This environment variable can be used to set the location of a + ``.python_history`` file (by default, it is ``.python_history`` in the + user's home directory). + + .. versionadded:: 3.13 + Debug-mode variables ~~~~~~~~~~~~~~~~~~~~ diff --git a/Doc/using/win_install_freethreaded.png b/Doc/using/win_install_freethreaded.png new file mode 100644 index 00000000000000..0aa01c1df6e051 Binary files /dev/null and b/Doc/using/win_install_freethreaded.png differ diff --git a/Doc/using/windows.rst b/Doc/using/windows.rst index 598bf3ca9bcc04..2a0e7b4b06f586 100644 --- a/Doc/using/windows.rst +++ b/Doc/using/windows.rst @@ -307,6 +307,46 @@ settings and replace any that have been removed or modified. "Uninstall" will remove Python entirely, with the exception of the :ref:`launcher`, which has its own entry in Programs and Features. +.. _install-freethreaded-windows: + +Installing Free-threaded Binaries +--------------------------------- + +.. versionadded:: 3.13 (Experimental) + +.. note:: + + Everything described in this section is considered experimental, + and should be expected to change in future releases. + +To install pre-built binaries with free-threading enabled (see :pep:`703`), you +should select "Customize installation". The second page of options includes the +"Download free-threaded binaries" checkbox. + +.. image:: win_install_freethreaded.png + +Selecting this option will download and install additional binaries to the same +location as the main Python install. The main executable is called +``python3.13t.exe``, and other binaries either receive a ``t`` suffix or a full +ABI suffix. Python source files and bundled third-party dependencies are shared +with the main install. + +The free-threaded version is registered as a regular Python install with the +tag ``3.13t`` (with a ``-32`` or ``-arm64`` suffix as normal for those +platforms). This allows tools to discover it, and for the :ref:`launcher` to +support ``py.exe -3.13t``. Note that the launcher will interpret ``py.exe -3`` +(or a ``python3`` shebang) as "the latest 3.x install", which will prefer the +free-threaded binaries over the regular ones, while ``py.exe -3.13`` will not. +If you use the short style of option, you may prefer to not install the +free-threaded binaries at this time. + +To specify the install option at the command line, use +``Include_freethreaded=1``. See :ref:`install-layout-option` for instructions on +pre-emptively downloading the additional binaries for offline install. The +options to include debug symbols and binaries also apply to the free-threaded +builds. + +Free-threaded binaries are also available :ref:`on nuget.org `. .. _windows-store: @@ -450,9 +490,29 @@ automatically use the headers and import libraries in your build. The package information pages on nuget.org are `www.nuget.org/packages/python `_ -for the 64-bit version and `www.nuget.org/packages/pythonx86 -`_ for the 32-bit version. +for the 64-bit version, `www.nuget.org/packages/pythonx86 +`_ for the 32-bit version, and +`www.nuget.org/packages/pythonarm64 +`_ for the ARM64 version + +Free-threaded packages +---------------------- + +.. versionadded:: 3.13 (Experimental) + +.. note:: + Everything described in this section is considered experimental, + and should be expected to change in future releases. + +Packages containing free-threaded binaries are named +`python-freethreaded `_ +for the 64-bit version, `pythonx86-freethreaded +`_ for the 32-bit +version, and `pythonarm64-freethreaded +`_ for the ARM64 +version. These packages contain both the ``python3.13t.exe`` and +``python.exe`` entry points, both of which run free threaded. .. _windows-embeddable: diff --git a/Doc/whatsnew/2.0.rst b/Doc/whatsnew/2.0.rst index b0e495b0651789..f4a9d23699de53 100644 --- a/Doc/whatsnew/2.0.rst +++ b/Doc/whatsnew/2.0.rst @@ -130,7 +130,7 @@ Guidelines": Read the rest of :pep:`1` for the details of the PEP editorial process, style, and format. PEPs are kept in the Python CVS tree on SourceForge, though they're not part of the Python 2.0 distribution, and are also available in HTML form from -https://peps.python.org/. As of September 2000, there are 25 PEPS, ranging +https://peps.python.org/. As of September 2000, there are 25 PEPs, ranging from :pep:`201`, "Lockstep Iteration", to PEP 225, "Elementwise/Objectwise Operators". @@ -1095,8 +1095,8 @@ module. GNU gettext message catalog library. (Integrated by Barry Warsaw, from separate contributions by Martin von Löwis, Peter Funk, and James Henstridge.) -* :mod:`linuxaudiodev`: Support for the :file:`/dev/audio` device on Linux, a - twin to the existing :mod:`sunaudiodev` module. (Contributed by Peter Bosch, +* :mod:`!linuxaudiodev`: Support for the :file:`/dev/audio` device on Linux, a + twin to the existing :mod:`!sunaudiodev` module. (Contributed by Peter Bosch, with fixes by Jeremy Hylton.) * :mod:`mmap`: An interface to memory-mapped files on both Windows and Unix. A @@ -1139,8 +1139,8 @@ module. Unix, not to be confused with :program:`gzip`\ -format files (which are supported by the :mod:`gzip` module) (Contributed by James C. Ahlstrom.) -* :mod:`imputil`: A module that provides a simpler way for writing customized - import hooks, in comparison to the existing :mod:`ihooks` module. (Implemented +* :mod:`!imputil`: A module that provides a simpler way for writing customized + import hooks, in comparison to the existing :mod:`!ihooks` module. (Implemented by Greg Stein, with much discussion on python-dev along the way.) .. ====================================================================== diff --git a/Doc/whatsnew/2.2.rst b/Doc/whatsnew/2.2.rst index 6dfe79cef00987..6efc23a82de923 100644 --- a/Doc/whatsnew/2.2.rst +++ b/Doc/whatsnew/2.2.rst @@ -143,8 +143,8 @@ To make the set of types complete, new type objects such as :func:`dict` and return fcntl.lockf(self.fileno(), operation, length, start, whence) -The now-obsolete :mod:`posixfile` module contained a class that emulated all of -a file object's methods and also added a :meth:`lock` method, but this class +The now-obsolete :mod:`!posixfile` module contained a class that emulated all of +a file object's methods and also added a :meth:`!lock` method, but this class couldn't be passed to internal functions that expected a built-in file, something which is possible with our new :class:`LockableFile`. diff --git a/Doc/whatsnew/2.3.rst b/Doc/whatsnew/2.3.rst index 8ebcbfaf248551..37cd41add8132c 100644 --- a/Doc/whatsnew/2.3.rst +++ b/Doc/whatsnew/2.3.rst @@ -1196,7 +1196,7 @@ Optimizations * Multiplication of large long integers is now much faster thanks to an implementation of Karatsuba multiplication, an algorithm that scales better than - the O(n\*n) required for the grade-school multiplication algorithm. (Original + the *O*\ (*n*\ :sup:`2`) required for the grade-school multiplication algorithm. (Original patch by Christopher A. Craig, and significantly reworked by Tim Peters.) * The ``SET_LINENO`` opcode is now gone. This may provide a small speed @@ -1308,7 +1308,7 @@ complete list of changes, or look through the CVS logs for all the details. partially sorted order such that, for every index *k*, ``heap[k] <= heap[2*k+1]`` and ``heap[k] <= heap[2*k+2]``. This makes it quick to remove the smallest item, and inserting a new item while maintaining the heap property is - O(lg n). (See https://xlinux.nist.gov/dads//HTML/priorityque.html for more + *O*\ (log *n*). (See https://xlinux.nist.gov/dads//HTML/priorityque.html for more information about the priority queue data structure.) The :mod:`heapq` module provides :func:`~heapq.heappush` and :func:`~heapq.heappop` functions diff --git a/Doc/whatsnew/2.4.rst b/Doc/whatsnew/2.4.rst index e9a59f4a62551a..15d4003622c506 100644 --- a/Doc/whatsnew/2.4.rst +++ b/Doc/whatsnew/2.4.rst @@ -387,13 +387,13 @@ The standard library provides a number of ways to execute a subprocess, offering different features and different levels of complexity. ``os.system(command)`` is easy to use, but slow (it runs a shell process which executes the command) and dangerous (you have to be careful about escaping -the shell's metacharacters). The :mod:`popen2` module offers classes that can +the shell's metacharacters). The :mod:`!popen2` module offers classes that can capture standard output and standard error from the subprocess, but the naming is confusing. The :mod:`subprocess` module cleans this up, providing a unified interface that offers all the features you might need. -Instead of :mod:`popen2`'s collection of classes, :mod:`subprocess` contains a -single class called :class:`Popen` whose constructor supports a number of +Instead of :mod:`!popen2`'s collection of classes, :mod:`subprocess` contains a +single class called :class:`subprocess.Popen` whose constructor supports a number of different keyword arguments. :: class Popen(args, bufsize=0, executable=None, @@ -1529,7 +1529,7 @@ code: will now always be unequal, and relative comparisons (``<``, ``>``) will raise a :exc:`TypeError`. -* :func:`dircache.listdir` now passes exceptions to the caller instead of +* :func:`!dircache.listdir` now passes exceptions to the caller instead of returning empty lists. * :func:`LexicalHandler.startDTD` used to receive the public and system IDs in diff --git a/Doc/whatsnew/2.5.rst b/Doc/whatsnew/2.5.rst index 627c918dd6d8b4..f45d70ea5a19a0 100644 --- a/Doc/whatsnew/2.5.rst +++ b/Doc/whatsnew/2.5.rst @@ -1680,7 +1680,7 @@ The ctypes package The :mod:`ctypes` package, written by Thomas Heller, has been added to the standard library. :mod:`ctypes` lets you call arbitrary functions in shared -libraries or DLLs. Long-time users may remember the :mod:`dl` module, which +libraries or DLLs. Long-time users may remember the :mod:`!dl` module, which provides functions for loading shared libraries and calling functions in them. The :mod:`ctypes` package is much fancier. @@ -1877,12 +1877,12 @@ The hashlib package ------------------- A new :mod:`hashlib` module, written by Gregory P. Smith, has been added to -replace the :mod:`md5` and :mod:`sha` modules. :mod:`hashlib` adds support for +replace the :mod:`!md5` and :mod:`!sha` modules. :mod:`hashlib` adds support for additional secure hashes (SHA-224, SHA-256, SHA-384, and SHA-512). When available, the module uses OpenSSL for fast platform optimized implementations of algorithms. -The old :mod:`md5` and :mod:`sha` modules still exist as wrappers around hashlib +The old :mod:`!md5` and :mod:`!sha` modules still exist as wrappers around hashlib to preserve backwards compatibility. The new module's interface is very close to that of the old modules, but not identical. The most significant difference is that the constructor functions for creating new hashing objects are named diff --git a/Doc/whatsnew/2.6.rst b/Doc/whatsnew/2.6.rst index d947f61b50cfe0..c6bab93b7efdda 100644 --- a/Doc/whatsnew/2.6.rst +++ b/Doc/whatsnew/2.6.rst @@ -4,8 +4,6 @@ What's New in Python 2.6 **************************** -.. XXX add trademark info for Apple, Microsoft, SourceForge. - :Author: A.M. Kuchling (amk at amk.ca) .. $Id$ @@ -128,7 +126,7 @@ and to C extension code as :c:data:`!Py_Py3kWarningFlag`. The 3\ *xxx* series of PEPs, which contains proposals for Python 3.0. :pep:`3000` describes the development process for Python 3.0. Start with :pep:`3100` that describes the general goals for Python - 3.0, and then explore the higher-numbered PEPS that propose + 3.0, and then explore the higher-numbered PEPs that propose specific features. @@ -1051,8 +1049,6 @@ the :mod:`io` module: sockets, but Python 2.6 hasn't restructured its file and socket objects in this way. - .. XXX should 2.6 register them in io.py? - * :class:`BufferedIOBase` is an abstract base class that buffers data in memory to reduce the number of system calls used, making I/O processing more efficient. @@ -1133,8 +1129,6 @@ while an external caller could be modifying the contents, so there's a corresponding ``PyBuffer_Release(Py_buffer *view)`` to indicate that the external caller is done. -.. XXX PyObject_GetBuffer not documented in c-api - The *flags* argument to :c:func:`PyObject_GetBuffer` specifies constraints upon the memory returned. Some examples are: @@ -2916,8 +2910,8 @@ Deprecations and Removals * Changes to the :class:`Exception` interface as dictated by :pep:`352` continue to be made. For 2.6, - the :attr:`message` attribute is being deprecated in favor of the - :attr:`args` attribute. + the :attr:`!message` attribute is being deprecated in favor of the + :attr:`~BaseException.args` attribute. * (3.0-warning mode) Python 3.0 will feature a reorganized standard library that will drop many outdated modules and rename others. @@ -2925,51 +2919,51 @@ Deprecations and Removals when they are imported. The list of deprecated modules is: - :mod:`audiodev`, - :mod:`bgenlocations`, - :mod:`buildtools`, - :mod:`bundlebuilder`, - :mod:`Canvas`, - :mod:`compiler`, - :mod:`dircache`, - :mod:`dl`, - :mod:`fpformat`, - :mod:`gensuitemodule`, - :mod:`ihooks`, - :mod:`imageop`, - :mod:`imgfile`, - :mod:`linuxaudiodev`, - :mod:`mhlib`, - :mod:`mimetools`, - :mod:`multifile`, - :mod:`new`, - :mod:`pure`, - :mod:`statvfs`, - :mod:`sunaudiodev`, - :mod:`test.testall`, and - :mod:`toaiff`. - -* The :mod:`gopherlib` module has been removed. - -* The :mod:`MimeWriter` module and :mod:`mimify` module + :mod:`!audiodev`, + :mod:`!bgenlocations`, + :mod:`!buildtools`, + :mod:`!bundlebuilder`, + :mod:`!Canvas`, + :mod:`!compiler`, + :mod:`!dircache`, + :mod:`!dl`, + :mod:`!fpformat`, + :mod:`!gensuitemodule`, + :mod:`!ihooks`, + :mod:`!imageop`, + :mod:`!imgfile`, + :mod:`!linuxaudiodev`, + :mod:`!mhlib`, + :mod:`!mimetools`, + :mod:`!multifile`, + :mod:`!new`, + :mod:`!pure`, + :mod:`!statvfs`, + :mod:`!sunaudiodev`, + :mod:`!test.testall`, and + :mod:`!toaiff`. + +* The :mod:`!gopherlib` module has been removed. + +* The :mod:`!MimeWriter` module and :mod:`!mimify` module have been deprecated; use the :mod:`email` package instead. -* The :mod:`md5` module has been deprecated; use the :mod:`hashlib` module +* The :mod:`!md5` module has been deprecated; use the :mod:`hashlib` module instead. -* The :mod:`posixfile` module has been deprecated; :func:`fcntl.lockf` +* The :mod:`!posixfile` module has been deprecated; :func:`fcntl.lockf` provides better locking. -* The :mod:`popen2` module has been deprecated; use the :mod:`subprocess` +* The :mod:`!popen2` module has been deprecated; use the :mod:`subprocess` module. -* The :mod:`rgbimg` module has been removed. +* The :mod:`!rgbimg` module has been removed. -* The :mod:`sets` module has been deprecated; it's better to +* The :mod:`!sets` module has been deprecated; it's better to use the built-in :class:`set` and :class:`frozenset` types. -* The :mod:`sha` module has been deprecated; use the :mod:`hashlib` module +* The :mod:`!sha` module has been deprecated; use the :mod:`hashlib` module instead. @@ -3110,8 +3104,8 @@ Port-Specific Changes: Windows * The :mod:`msvcrt` module now supports both the normal and wide char variants of the console I/O - API. The :func:`getwch` function reads a keypress and returns a Unicode - value, as does the :func:`getwche` function. The :func:`putwch` function + API. The :func:`~msvcrt.getwch` function reads a keypress and returns a Unicode + value, as does the :func:`~msvcrt.getwche` function. The :func:`~msvcrt.putwch` function takes a Unicode character and writes it to the console. (Contributed by Christian Heimes.) @@ -3120,24 +3114,24 @@ Port-Specific Changes: Windows directory path. (Contributed by Josiah Carlson; :issue:`957650`.) * The :mod:`socket` module's socket objects now have an - :meth:`ioctl` method that provides a limited interface to the + :meth:`~socket.socket.ioctl` method that provides a limited interface to the :c:func:`WSAIoctl` system interface. -* The :mod:`_winreg` module now has a function, - :func:`ExpandEnvironmentStrings`, +* The :mod:`_winreg ` module now has a function, + :func:`~winreg.ExpandEnvironmentStrings`, that expands environment variable references such as ``%NAME%`` in an input string. The handle objects provided by this module now support the context protocol, so they can be used in :keyword:`with` statements. (Contributed by Christian Heimes.) - :mod:`_winreg` also has better support for x64 systems, - exposing the :func:`DisableReflectionKey`, :func:`EnableReflectionKey`, - and :func:`QueryReflectionKey` functions, which enable and disable + :mod:`_winreg ` also has better support for x64 systems, + exposing the :func:`~winreg.DisableReflectionKey`, :func:`~winreg.EnableReflectionKey`, + and :func:`~winreg.QueryReflectionKey` functions, which enable and disable registry reflection for 32-bit processes running on 64-bit systems. (:issue:`1753245`) -* The :mod:`!msilib` module's :class:`Record` object - gained :meth:`GetInteger` and :meth:`GetString` methods that +* The :mod:`!msilib` module's :class:`!Record` object + gained :meth:`!GetInteger` and :meth:`!GetString` methods that return field values as an integer or a string. (Contributed by Floris Bruynooghe; :issue:`2125`.) @@ -3151,49 +3145,49 @@ Port-Specific Changes: Mac OS X :option:`!--with-framework-name=` option to the :program:`configure` script. -* The :mod:`macfs` module has been removed. This in turn required the - :func:`macostools.touched` function to be removed because it depended on the - :mod:`macfs` module. (:issue:`1490190`) +* The :mod:`!macfs` module has been removed. This in turn required the + :func:`!macostools.touched` function to be removed because it depended on the + :mod:`!macfs` module. (:issue:`1490190`) * Many other Mac OS modules have been deprecated and will be removed in Python 3.0: - :mod:`_builtinSuites`, - :mod:`aepack`, - :mod:`aetools`, - :mod:`aetypes`, - :mod:`applesingle`, - :mod:`appletrawmain`, - :mod:`appletrunner`, - :mod:`argvemulator`, - :mod:`Audio_mac`, - :mod:`autoGIL`, - :mod:`Carbon`, - :mod:`cfmfile`, - :mod:`CodeWarrior`, - :mod:`ColorPicker`, - :mod:`EasyDialogs`, - :mod:`Explorer`, - :mod:`Finder`, - :mod:`FrameWork`, - :mod:`findertools`, - :mod:`ic`, - :mod:`icglue`, - :mod:`icopen`, - :mod:`macerrors`, - :mod:`MacOS`, - :mod:`macfs`, - :mod:`macostools`, - :mod:`macresource`, - :mod:`MiniAEFrame`, - :mod:`Nav`, - :mod:`Netscape`, - :mod:`OSATerminology`, - :mod:`pimp`, - :mod:`PixMapWrapper`, - :mod:`StdSuites`, - :mod:`SystemEvents`, - :mod:`Terminal`, and - :mod:`terminalcommand`. + :mod:`!_builtinSuites`, + :mod:`!aepack`, + :mod:`!aetools`, + :mod:`!aetypes`, + :mod:`!applesingle`, + :mod:`!appletrawmain`, + :mod:`!appletrunner`, + :mod:`!argvemulator`, + :mod:`!Audio_mac`, + :mod:`!autoGIL`, + :mod:`!Carbon`, + :mod:`!cfmfile`, + :mod:`!CodeWarrior`, + :mod:`!ColorPicker`, + :mod:`!EasyDialogs`, + :mod:`!Explorer`, + :mod:`!Finder`, + :mod:`!FrameWork`, + :mod:`!findertools`, + :mod:`!ic`, + :mod:`!icglue`, + :mod:`!icopen`, + :mod:`!macerrors`, + :mod:`!MacOS`, + :mod:`!macfs`, + :mod:`!macostools`, + :mod:`!macresource`, + :mod:`!MiniAEFrame`, + :mod:`!Nav`, + :mod:`!Netscape`, + :mod:`!OSATerminology`, + :mod:`!pimp`, + :mod:`!PixMapWrapper`, + :mod:`!StdSuites`, + :mod:`!SystemEvents`, + :mod:`!Terminal`, and + :mod:`!terminalcommand`. .. ====================================================================== @@ -3202,29 +3196,29 @@ Port-Specific Changes: IRIX A number of old IRIX-specific modules were deprecated and will be removed in Python 3.0: -:mod:`al` and :mod:`AL`, -:mod:`cd`, -:mod:`cddb`, -:mod:`cdplayer`, -:mod:`CL` and :mod:`cl`, -:mod:`DEVICE`, -:mod:`ERRNO`, -:mod:`FILE`, -:mod:`FL` and :mod:`fl`, -:mod:`flp`, -:mod:`fm`, -:mod:`GET`, -:mod:`GLWS`, -:mod:`GL` and :mod:`gl`, -:mod:`IN`, -:mod:`IOCTL`, -:mod:`jpeg`, -:mod:`panelparser`, -:mod:`readcd`, -:mod:`SV` and :mod:`sv`, -:mod:`torgb`, -:mod:`videoreader`, and -:mod:`WAIT`. +:mod:`!al` and :mod:`!AL`, +:mod:`!cd`, +:mod:`!cddb`, +:mod:`!cdplayer`, +:mod:`!CL` and :mod:`!cl`, +:mod:`!DEVICE`, +:mod:`!ERRNO`, +:mod:`!FILE`, +:mod:`!FL` and :mod:`!fl`, +:mod:`!flp`, +:mod:`!fm`, +:mod:`!GET`, +:mod:`!GLWS`, +:mod:`!GL` and :mod:`!gl`, +:mod:`!IN`, +:mod:`!IOCTL`, +:mod:`!jpeg`, +:mod:`!panelparser`, +:mod:`!readcd`, +:mod:`!SV` and :mod:`!sv`, +:mod:`!torgb`, +:mod:`!videoreader`, and +:mod:`!WAIT`. .. ====================================================================== diff --git a/Doc/whatsnew/2.7.rst b/Doc/whatsnew/2.7.rst index 81fe132d50e1f1..241d58720399af 100644 --- a/Doc/whatsnew/2.7.rst +++ b/Doc/whatsnew/2.7.rst @@ -282,7 +282,7 @@ How does the :class:`~collections.OrderedDict` work? It maintains a doubly linked list of keys, appending new keys to the list as they're inserted. A secondary dictionary maps keys to their corresponding list node, so deletion doesn't have to traverse the entire linked list and therefore -remains O(1). +remains *O*\ (1). The standard library now supports use of ordered dictionaries in several modules. @@ -1315,9 +1315,9 @@ changes, or look through the Subversion logs for all the details. giving the source address that will be used for the connection. (Contributed by Eldon Ziegler; :issue:`3972`.) -* The :mod:`ihooks` module now supports relative imports. Note that - :mod:`ihooks` is an older module for customizing imports, - superseded by the :mod:`imputil` module added in Python 2.0. +* The :mod:`!ihooks` module now supports relative imports. Note that + :mod:`!ihooks` is an older module for customizing imports, + superseded by the :mod:`!imputil` module added in Python 2.0. (Relative import support added by Neil Schemenauer.) .. revision 75423 diff --git a/Doc/whatsnew/3.0.rst b/Doc/whatsnew/3.0.rst index 89e12062abaddd..1df5209f22c6a5 100644 --- a/Doc/whatsnew/3.0.rst +++ b/Doc/whatsnew/3.0.rst @@ -555,8 +555,8 @@ very extensive changes to the standard library. :pep:`3108` is the reference for the major changes to the library. Here's a capsule review: -* Many old modules were removed. Some, like :mod:`gopherlib` (no - longer used) and :mod:`md5` (replaced by :mod:`hashlib`), were +* Many old modules were removed. Some, like :mod:`!gopherlib` (no + longer used) and :mod:`!md5` (replaced by :mod:`hashlib`), were already deprecated by :pep:`4`. Others were removed as a result of the removal of support for various platforms such as Irix, BeOS and Mac OS 9 (see :pep:`11`). Some modules were also selected for @@ -626,7 +626,7 @@ review: Some other changes to standard library modules, not covered by :pep:`3108`: -* Killed :mod:`sets`. Use the built-in :func:`set` class. +* Killed :mod:`!sets`. Use the built-in :func:`set` class. * Cleanup of the :mod:`sys` module: removed :func:`sys.exitfunc`, :func:`sys.exc_clear`, :data:`sys.exc_type`, :data:`sys.exc_value`, @@ -648,7 +648,7 @@ Some other changes to standard library modules, not covered by * Cleanup of the :mod:`random` module: removed the :func:`jumpahead` API. -* The :mod:`new` module is gone. +* The :mod:`!new` module is gone. * The functions :func:`os.tmpnam`, :func:`os.tempnam` and :func:`os.tmpfile` have been removed in favor of the :mod:`tempfile` diff --git a/Doc/whatsnew/3.10.rst b/Doc/whatsnew/3.10.rst index cd86c82caffc56..7dc06e9af694d9 100644 --- a/Doc/whatsnew/3.10.rst +++ b/Doc/whatsnew/3.10.rst @@ -1316,8 +1316,8 @@ pyclbr ------ Add an ``end_lineno`` attribute to the ``Function`` and ``Class`` -objects in the tree returned by :func:`pyclbr.readline` and -:func:`pyclbr.readline_ex`. It matches the existing (start) ``lineno``. +objects in the tree returned by :func:`pyclbr.readmodule` and +:func:`pyclbr.readmodule_ex`. It matches the existing (start) ``lineno``. (Contributed by Aviral Srivastava in :issue:`38307`.) shelve diff --git a/Doc/whatsnew/3.12.rst b/Doc/whatsnew/3.12.rst index 9a2ccf7ebc6a68..77b12f9284ba0d 100644 --- a/Doc/whatsnew/3.12.rst +++ b/Doc/whatsnew/3.12.rst @@ -1374,6 +1374,18 @@ APIs: * :meth:`!unittest.TestProgram.usageExit` (:gh:`67048`) * :class:`!webbrowser.MacOSX` (:gh:`86421`) * :class:`classmethod` descriptor chaining (:gh:`89519`) +* :mod:`importlib.resources` deprecated methods: + + * ``contents()`` + * ``is_resource()`` + * ``open_binary()`` + * ``open_text()`` + * ``path()`` + * ``read_binary()`` + * ``read_text()`` + + Use :func:`importlib.resources.files()` instead. Refer to `importlib-resources: Migrating from Legacy + `_ (:gh:`106531`) Pending Removal in Python 3.14 ------------------------------ diff --git a/Doc/whatsnew/3.13.rst b/Doc/whatsnew/3.13.rst index 3ab6d1ddc6ef21..40f0cd37fe9318 100644 --- a/Doc/whatsnew/3.13.rst +++ b/Doc/whatsnew/3.13.rst @@ -132,6 +132,11 @@ Other Language Changes equivalent of the :option:`-X frozen_modules <-X>` command-line option. (Contributed by Yilei Yang in :gh:`111374`.) +* The new :envvar:`PYTHON_HISTORY` environment variable can be used to change + the location of a ``.python_history`` file. + (Contributed by Levi Sabah, Zackery Spytz and Hugo van Kemenade in + :gh:`73965`.) + New Modules =========== @@ -242,6 +247,14 @@ ipaddress * Add the :attr:`ipaddress.IPv4Address.ipv6_mapped` property, which returns the IPv4-mapped IPv6 address. (Contributed by Charles Machalow in :gh:`109466`.) +marshal +------- + +* Add the *allow_code* parameter in module functions. + Passing ``allow_code=False`` prevents serialization and de-serialization of + code objects which are incompatible between Python versions. + (Contributed by Serhiy Storchaka in :gh:`113626`.) + mmap ---- @@ -249,6 +262,9 @@ mmap that can be used where it requires a file-like object with seekable and the :meth:`~mmap.mmap.seek` method return the new absolute position. (Contributed by Donghee Na and Sylvie Liberman in :gh:`111835`.) +* :class:`mmap.mmap` now has a *trackfd* parameter on Unix; if it is ``False``, + the file descriptor specified by *fileno* will not be duplicated. + (Contributed by Zackery Spytz and Petr Viktorin in :gh:`78502`.) opcode ------ @@ -302,6 +318,13 @@ os :c:func:`!posix_spawn_file_actions_addclosefrom_np`. (Contributed by Jakub Kulik in :gh:`113117`.) +os.path +------- + +* On Windows, :func:`os.path.isabs` no longer considers paths starting with + exactly one (back)slash to be absolute. + (Contributed by Barney Gale and Jon Foster in :gh:`44626`.) + pathlib ------- diff --git a/Doc/whatsnew/3.3.rst b/Doc/whatsnew/3.3.rst index 760324ae66a3af..29b4034e32821c 100644 --- a/Doc/whatsnew/3.3.rst +++ b/Doc/whatsnew/3.3.rst @@ -174,7 +174,7 @@ Features b or c are now hashable. (Contributed by Antoine Pitrou in :issue:`13411`.) * Arbitrary slicing of any 1-D arrays type is supported. For example, it - is now possible to reverse a memoryview in O(1) by using a negative step. + is now possible to reverse a memoryview in *O*\ (1) by using a negative step. API changes ----------- diff --git a/Grammar/python.gram b/Grammar/python.gram index c9269b058bc7f6..174b4dbb6f7842 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -968,6 +968,8 @@ for_if_clause[comprehension_ty]: CHECK_VERSION(comprehension_ty, 6, "Async comprehensions are", _PyAST_comprehension(a, b, c, 1, p->arena)) } | 'for' a=star_targets 'in' ~ b=disjunction c[asdl_expr_seq*]=('if' z=disjunction { z })* { _PyAST_comprehension(a, b, c, 0, p->arena) } + | 'async'? 'for' (bitwise_or (',' bitwise_or)* [',']) !'in' { + RAISE_SYNTAX_ERROR("'in' expected after for-loop variables") } | invalid_for_target listcomp[expr_ty]: diff --git a/Include/cpython/code.h b/Include/cpython/code.h index cf715c55a2b3b8..1f47d99fb60443 100644 --- a/Include/cpython/code.h +++ b/Include/cpython/code.h @@ -208,6 +208,8 @@ struct PyCodeObject _PyCode_DEF(1); #define CO_FUTURE_GENERATOR_STOP 0x800000 #define CO_FUTURE_ANNOTATIONS 0x1000000 +#define CO_NO_MONITORING_EVENTS 0x2000000 + /* This should be defined if a future statement modifies the syntax. For example, when a keyword is added. */ diff --git a/Include/cpython/object.h b/Include/cpython/object.h index d6482f4bc689a1..c93931634fee05 100644 --- a/Include/cpython/object.h +++ b/Include/cpython/object.h @@ -4,6 +4,7 @@ PyAPI_FUNC(void) _Py_NewReference(PyObject *op); PyAPI_FUNC(void) _Py_NewReferenceNoTotal(PyObject *op); +PyAPI_FUNC(void) _Py_ResurrectReference(PyObject *op); #ifdef Py_REF_DEBUG /* These are useful as debugging aids when chasing down refleaks. */ diff --git a/Include/cpython/optimizer.h b/Include/cpython/optimizer.h index d521eac79d1b97..96e829f8fbe97d 100644 --- a/Include/cpython/optimizer.h +++ b/Include/cpython/optimizer.h @@ -29,12 +29,17 @@ typedef struct { _PyExecutorLinkListNode links; } _PyVMData; +typedef struct { + uint16_t opcode; + uint16_t oparg; + uint32_t target; + uint64_t operand; // A cache entry +} _PyUOpInstruction; + typedef struct _PyExecutorObject { PyObject_VAR_HEAD - /* WARNING: execute consumes a reference to self. This is necessary to allow executors to tail call into each other. */ - _Py_CODEUNIT *(*execute)(struct _PyExecutorObject *self, struct _PyInterpreterFrame *frame, PyObject **stack_pointer); _PyVMData vm_data; /* Used by the VM, but opaque to the optimizer */ - /* Data needed by the executor goes here, but is opaque to the VM */ + _PyUOpInstruction trace[1]; } _PyExecutorObject; typedef struct _PyOptimizerObject _PyOptimizerObject; @@ -52,6 +57,12 @@ typedef struct _PyOptimizerObject { /* Data needed by the optimizer goes here, but is opaque to the VM */ } _PyOptimizerObject; +/** Test support **/ +typedef struct { + _PyOptimizerObject base; + int64_t count; +} _PyCounterOptimizerObject; + PyAPI_FUNC(int) PyUnstable_Replace_Executor(PyCodeObject *code, _Py_CODEUNIT *instr, _PyExecutorObject *executor); PyAPI_FUNC(void) PyUnstable_SetOptimizer(_PyOptimizerObject* optimizer); @@ -61,7 +72,7 @@ PyAPI_FUNC(_PyOptimizerObject *) PyUnstable_GetOptimizer(void); PyAPI_FUNC(_PyExecutorObject *) PyUnstable_GetExecutor(PyCodeObject *code, int offset); int -_PyOptimizer_BackEdge(struct _PyInterpreterFrame *frame, _Py_CODEUNIT *src, _Py_CODEUNIT *dest, PyObject **stack_pointer); +_PyOptimizer_Optimize(struct _PyInterpreterFrame *frame, _Py_CODEUNIT *start, PyObject **stack_pointer); extern _PyOptimizerObject _PyOptimizer_Default; diff --git a/Include/cpython/pystate.h b/Include/cpython/pystate.h index ed7dd829d4b6f0..10913943c1140d 100644 --- a/Include/cpython/pystate.h +++ b/Include/cpython/pystate.h @@ -224,10 +224,14 @@ struct _ts { // recursions, sometimes less. 500 is a more conservative limit. # define Py_C_RECURSION_LIMIT 500 #elif defined(__s390x__) -# define Py_C_RECURSION_LIMIT 1200 +# define Py_C_RECURSION_LIMIT 800 +#elif defined(_WIN32) +# define Py_C_RECURSION_LIMIT 4000 +#elif defined(_Py_ADDRESS_SANITIZER) +# define Py_C_RECURSION_LIMIT 4000 #else // This value is duplicated in Lib/test/support/__init__.py -# define Py_C_RECURSION_LIMIT 8000 +# define Py_C_RECURSION_LIMIT 10000 #endif diff --git a/Include/internal/mimalloc/mimalloc/internal.h b/Include/internal/mimalloc/mimalloc/internal.h index cb6e211de5bb63..887bf26c956982 100644 --- a/Include/internal/mimalloc/mimalloc/internal.h +++ b/Include/internal/mimalloc/mimalloc/internal.h @@ -23,23 +23,6 @@ terms of the MIT license. A copy of the license can be found in the file #define mi_trace_message(...) #endif -#define MI_CACHE_LINE 64 -#if defined(_MSC_VER) -#pragma warning(disable:4127) // suppress constant conditional warning (due to MI_SECURE paths) -#pragma warning(disable:26812) // unscoped enum warning -#define mi_decl_noinline __declspec(noinline) -#define mi_decl_thread __declspec(thread) -#define mi_decl_cache_align __declspec(align(MI_CACHE_LINE)) -#elif (defined(__GNUC__) && (__GNUC__ >= 3)) || defined(__clang__) // includes clang and icc -#define mi_decl_noinline __attribute__((noinline)) -#define mi_decl_thread __thread -#define mi_decl_cache_align __attribute__((aligned(MI_CACHE_LINE))) -#else -#define mi_decl_noinline -#define mi_decl_thread __thread // hope for the best :-) -#define mi_decl_cache_align -#endif - #if defined(__EMSCRIPTEN__) && !defined(__wasi__) #define __wasi__ #endif @@ -131,6 +114,7 @@ void _mi_segment_map_allocated_at(const mi_segment_t* segment); void _mi_segment_map_freed_at(const mi_segment_t* segment); // "segment.c" +extern mi_abandoned_pool_t _mi_abandoned_default; // global abandoned pool mi_page_t* _mi_segment_page_alloc(mi_heap_t* heap, size_t block_size, size_t page_alignment, mi_segments_tld_t* tld, mi_os_tld_t* os_tld); void _mi_segment_page_free(mi_page_t* page, bool force, mi_segments_tld_t* tld); void _mi_segment_page_abandon(mi_page_t* page, mi_segments_tld_t* tld); @@ -145,7 +129,7 @@ void _mi_segment_huge_page_reset(mi_segment_t* segment, mi_page_t* page, m uint8_t* _mi_segment_page_start(const mi_segment_t* segment, const mi_page_t* page, size_t* page_size); // page start for any page void _mi_abandoned_reclaim_all(mi_heap_t* heap, mi_segments_tld_t* tld); -void _mi_abandoned_await_readers(void); +void _mi_abandoned_await_readers(mi_abandoned_pool_t *pool); void _mi_abandoned_collect(mi_heap_t* heap, bool force, mi_segments_tld_t* tld); // "page.c" @@ -171,7 +155,7 @@ size_t _mi_bin_size(uint8_t bin); // for stats uint8_t _mi_bin(size_t size); // for stats // "heap.c" -void _mi_heap_init_ex(mi_heap_t* heap, mi_tld_t* tld, mi_arena_id_t arena_id); +void _mi_heap_init_ex(mi_heap_t* heap, mi_tld_t* tld, mi_arena_id_t arena_id, bool no_reclaim, uint8_t tag); void _mi_heap_destroy_pages(mi_heap_t* heap); void _mi_heap_collect_abandon(mi_heap_t* heap); void _mi_heap_set_default_direct(mi_heap_t* heap); diff --git a/Include/internal/mimalloc/mimalloc/types.h b/Include/internal/mimalloc/mimalloc/types.h index 7616f37e4b978f..b8cae24507fc5e 100644 --- a/Include/internal/mimalloc/mimalloc/types.h +++ b/Include/internal/mimalloc/mimalloc/types.h @@ -33,6 +33,23 @@ terms of the MIT license. A copy of the license can be found in the file #define MI_MAX_ALIGN_SIZE 16 // sizeof(max_align_t) #endif +#define MI_CACHE_LINE 64 +#if defined(_MSC_VER) +#pragma warning(disable:4127) // suppress constant conditional warning (due to MI_SECURE paths) +#pragma warning(disable:26812) // unscoped enum warning +#define mi_decl_noinline __declspec(noinline) +#define mi_decl_thread __declspec(thread) +#define mi_decl_cache_align __declspec(align(MI_CACHE_LINE)) +#elif (defined(__GNUC__) && (__GNUC__ >= 3)) || defined(__clang__) // includes clang and icc +#define mi_decl_noinline __attribute__((noinline)) +#define mi_decl_thread __thread +#define mi_decl_cache_align __attribute__((aligned(MI_CACHE_LINE))) +#else +#define mi_decl_noinline +#define mi_decl_thread __thread // hope for the best :-) +#define mi_decl_cache_align +#endif + // ------------------------------------------------------ // Variants // ------------------------------------------------------ @@ -294,6 +311,7 @@ typedef struct mi_page_s { uint32_t slice_offset; // distance from the actual page data slice (0 if a page) uint8_t is_committed : 1; // `true` if the page virtual memory is committed uint8_t is_zero_init : 1; // `true` if the page was initially zero initialized + uint8_t tag : 4; // tag from the owning heap // layout like this to optimize access in `mi_malloc` and `mi_free` uint16_t capacity; // number of blocks committed, must be the first field, see `segment.c:page_clear` @@ -445,6 +463,28 @@ typedef struct mi_segment_s { mi_slice_t slices[MI_SLICES_PER_SEGMENT+1]; // one more for huge blocks with large alignment } mi_segment_t; +typedef uintptr_t mi_tagged_segment_t; + +// Segments unowned by any thread are put in a shared pool +typedef struct mi_abandoned_pool_s { + // This is a list of visited abandoned pages that were full at the time. + // this list migrates to `abandoned` when that becomes NULL. The use of + // this list reduces contention and the rate at which segments are visited. + mi_decl_cache_align _Atomic(mi_segment_t*) abandoned_visited; // = NULL + + // The abandoned page list (tagged as it supports pop) + mi_decl_cache_align _Atomic(mi_tagged_segment_t) abandoned; // = NULL + + // Maintain these for debug purposes (these counts may be a bit off) + mi_decl_cache_align _Atomic(size_t) abandoned_count; + mi_decl_cache_align _Atomic(size_t) abandoned_visited_count; + + // We also maintain a count of current readers of the abandoned list + // in order to prevent resetting/decommitting segment memory if it might + // still be read. + mi_decl_cache_align _Atomic(size_t) abandoned_readers; // = 0 +} mi_abandoned_pool_t; + // ------------------------------------------------------ // Heaps @@ -512,6 +552,7 @@ struct mi_heap_s { size_t page_retired_max; // largest retired index into the `pages` array. mi_heap_t* next; // list of heaps per thread bool no_reclaim; // `true` if this heap should not reclaim abandoned pages + uint8_t tag; // custom identifier for this heap }; @@ -654,6 +695,7 @@ typedef struct mi_segments_tld_s { size_t peak_size; // peak size of all segments mi_stats_t* stats; // points to tld stats mi_os_tld_t* os; // points to os stats + mi_abandoned_pool_t* abandoned; // pool of abandoned segments } mi_segments_tld_t; // Thread local data diff --git a/Include/internal/pycore_context.h b/Include/internal/pycore_context.h index ec884e9e0f55a9..3284efba2b6f4c 100644 --- a/Include/internal/pycore_context.h +++ b/Include/internal/pycore_context.h @@ -5,6 +5,7 @@ # error "this header requires Py_BUILD_CORE define" #endif +#include "pycore_freelist.h" // _PyFreeListState #include "pycore_hamt.h" // PyHamtObject @@ -13,7 +14,7 @@ extern PyTypeObject _PyContextTokenMissing_Type; /* runtime lifecycle */ PyStatus _PyContext_Init(PyInterpreterState *); -void _PyContext_Fini(PyInterpreterState *); +void _PyContext_Fini(_PyFreeListState *); /* other API */ @@ -22,23 +23,6 @@ typedef struct { PyObject_HEAD } _PyContextTokenMissing; -#ifndef WITH_FREELISTS -// without freelists -# define PyContext_MAXFREELIST 0 -#endif - -#ifndef PyContext_MAXFREELIST -# define PyContext_MAXFREELIST 255 -#endif - -struct _Py_context_state { -#if PyContext_MAXFREELIST > 0 - // List of free PyContext objects - PyContext *freelist; - int numfree; -#endif -}; - struct _pycontextobject { PyObject_HEAD PyContext *ctx_prev; diff --git a/Include/internal/pycore_floatobject.h b/Include/internal/pycore_floatobject.h index 4e5474841bc25d..038578e1f9680a 100644 --- a/Include/internal/pycore_floatobject.h +++ b/Include/internal/pycore_floatobject.h @@ -8,14 +8,14 @@ extern "C" { # error "this header requires Py_BUILD_CORE define" #endif - +#include "pycore_freelist.h" // _PyFreeListState #include "pycore_unicodeobject.h" // _PyUnicodeWriter /* runtime lifecycle */ extern void _PyFloat_InitState(PyInterpreterState *); extern PyStatus _PyFloat_InitTypes(PyInterpreterState *); -extern void _PyFloat_Fini(PyInterpreterState *); +extern void _PyFloat_Fini(_PyFreeListState *); extern void _PyFloat_FiniType(PyInterpreterState *); @@ -33,24 +33,7 @@ struct _Py_float_runtime_state { }; -#ifndef WITH_FREELISTS -// without freelists -# define PyFloat_MAXFREELIST 0 -#endif - -#ifndef PyFloat_MAXFREELIST -# define PyFloat_MAXFREELIST 100 -#endif -struct _Py_float_state { -#if PyFloat_MAXFREELIST > 0 - /* Special free list - free_list is a singly-linked list of available PyFloatObjects, - linked via abuse of their ob_type members. */ - int numfree; - PyFloatObject *free_list; -#endif -}; void _PyFloat_ExactDealloc(PyObject *op); diff --git a/Include/internal/pycore_freelist.h b/Include/internal/pycore_freelist.h new file mode 100644 index 00000000000000..4ab93ee2bf6c32 --- /dev/null +++ b/Include/internal/pycore_freelist.h @@ -0,0 +1,108 @@ +#ifndef Py_INTERNAL_FREELIST_H +#define Py_INTERNAL_FREELIST_H +#ifdef __cplusplus +extern "C" { +#endif + +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" +#endif + +// PyTuple_MAXSAVESIZE - largest tuple to save on free list +// PyTuple_MAXFREELIST - maximum number of tuples of each size to save + +#ifdef WITH_FREELISTS +// with freelists +# define PyTuple_MAXSAVESIZE 20 +# define PyTuple_NFREELISTS PyTuple_MAXSAVESIZE +# define PyTuple_MAXFREELIST 2000 +# define PyList_MAXFREELIST 80 +# define PyFloat_MAXFREELIST 100 +# define PyContext_MAXFREELIST 255 +# define _PyAsyncGen_MAXFREELIST 80 +#else +# define PyTuple_NFREELISTS 0 +# define PyTuple_MAXFREELIST 0 +# define PyList_MAXFREELIST 0 +# define PyFloat_MAXFREELIST 0 +# define PyContext_MAXFREELIST 0 +# define _PyAsyncGen_MAXFREELIST 0 +#endif + +struct _Py_list_state { +#ifdef WITH_FREELISTS + PyListObject *free_list[PyList_MAXFREELIST]; + int numfree; +#endif +}; + +struct _Py_tuple_state { +#if WITH_FREELISTS + /* There is one freelist for each size from 1 to PyTuple_MAXSAVESIZE. + The empty tuple is handled separately. + + Each tuple stored in the array is the head of the linked list + (and the next available tuple) for that size. The actual tuple + object is used as the linked list node, with its first item + (ob_item[0]) pointing to the next node (i.e. the previous head). + Each linked list is initially NULL. */ + PyTupleObject *free_list[PyTuple_NFREELISTS]; + int numfree[PyTuple_NFREELISTS]; +#else + char _unused; // Empty structs are not allowed. +#endif +}; + +struct _Py_float_state { +#ifdef WITH_FREELISTS + /* Special free list + free_list is a singly-linked list of available PyFloatObjects, + linked via abuse of their ob_type members. */ + int numfree; + PyFloatObject *free_list; +#endif +}; + +struct _Py_slice_state { +#ifdef WITH_FREELISTS + /* Using a cache is very effective since typically only a single slice is + created and then deleted again. */ + PySliceObject *slice_cache; +#endif +}; + +struct _Py_context_state { +#ifdef WITH_FREELISTS + // List of free PyContext objects + PyContext *freelist; + int numfree; +#endif +}; + +struct _Py_async_gen_state { +#ifdef WITH_FREELISTS + /* Freelists boost performance 6-10%; they also reduce memory + fragmentation, as _PyAsyncGenWrappedValue and PyAsyncGenASend + are short-living objects that are instantiated for every + __anext__() call. */ + struct _PyAsyncGenWrappedValue* value_freelist[_PyAsyncGen_MAXFREELIST]; + int value_numfree; + + struct PyAsyncGenASend* asend_freelist[_PyAsyncGen_MAXFREELIST]; + int asend_numfree; +#endif +}; + +typedef struct _Py_freelist_state { + struct _Py_float_state float_state; + struct _Py_tuple_state tuple_state; + struct _Py_list_state list_state; + struct _Py_slice_state slice_state; + struct _Py_context_state context_state; + struct _Py_async_gen_state async_gen_state; +} _PyFreeListState; + +#ifdef __cplusplus +} +#endif +#endif /* !Py_INTERNAL_FREELIST_H */ diff --git a/Include/internal/pycore_gc.h b/Include/internal/pycore_gc.h index 2d33aa76d78229..d53de97709a782 100644 --- a/Include/internal/pycore_gc.h +++ b/Include/internal/pycore_gc.h @@ -8,6 +8,8 @@ extern "C" { # error "this header requires Py_BUILD_CORE define" #endif +#include "pycore_freelist.h" // _PyFreeListState + /* GC information is stored BEFORE the object structure. */ typedef struct { // Pointer to next object in the list. @@ -64,6 +66,26 @@ static inline int _PyObject_GC_MAY_BE_TRACKED(PyObject *obj) { #define _PyGC_PREV_SHIFT (2) #define _PyGC_PREV_MASK (((uintptr_t) -1) << _PyGC_PREV_SHIFT) +/* set for debugging information */ +#define _PyGC_DEBUG_STATS (1<<0) /* print collection statistics */ +#define _PyGC_DEBUG_COLLECTABLE (1<<1) /* print collectable objects */ +#define _PyGC_DEBUG_UNCOLLECTABLE (1<<2) /* print uncollectable objects */ +#define _PyGC_DEBUG_SAVEALL (1<<5) /* save all garbage in gc.garbage */ +#define _PyGC_DEBUG_LEAK _PyGC_DEBUG_COLLECTABLE | \ + _PyGC_DEBUG_UNCOLLECTABLE | \ + _PyGC_DEBUG_SAVEALL + +typedef enum { + // GC was triggered by heap allocation + _Py_GC_REASON_HEAP, + + // GC was called during shutdown + _Py_GC_REASON_SHUTDOWN, + + // GC was called by gc.collect() or PyGC_Collect() + _Py_GC_REASON_MANUAL +} _PyGC_Reason; + // Lowest bit of _gc_next is used for flags only in GC. // But it is always 0 for normal code. static inline PyGC_Head* _PyGCHead_NEXT(PyGC_Head *gc) { @@ -100,6 +122,10 @@ static inline void _PyGC_SET_FINALIZED(PyObject *op) { PyGC_Head *gc = _Py_AS_GC(op); _PyGCHead_SET_FINALIZED(gc); } +static inline void _PyGC_CLEAR_FINALIZED(PyObject *op) { + PyGC_Head *gc = _Py_AS_GC(op); + gc->_gc_prev &= ~_PyGC_PREV_MASK_FINALIZED; +} /* GC runtime state */ @@ -203,16 +229,30 @@ struct _gc_runtime_state { extern void _PyGC_InitState(struct _gc_runtime_state *); +extern Py_ssize_t _PyGC_Collect(PyThreadState *tstate, int generation, + _PyGC_Reason reason); extern Py_ssize_t _PyGC_CollectNoFail(PyThreadState *tstate); +/* Freeze objects tracked by the GC and ignore them in future collections. */ +extern void _PyGC_Freeze(PyInterpreterState *interp); +/* Unfreezes objects placing them in the oldest generation */ +extern void _PyGC_Unfreeze(PyInterpreterState *interp); +/* Number of frozen objects */ +extern Py_ssize_t _PyGC_GetFreezeCount(PyInterpreterState *interp); + +extern PyObject *_PyGC_GetObjects(PyInterpreterState *interp, Py_ssize_t generation); +extern PyObject *_PyGC_GetReferrers(PyInterpreterState *interp, PyObject *objs); // Functions to clear types free lists -extern void _PyTuple_ClearFreeList(PyInterpreterState *interp); -extern void _PyFloat_ClearFreeList(PyInterpreterState *interp); -extern void _PyList_ClearFreeList(PyInterpreterState *interp); +extern void _PyGC_ClearAllFreeLists(PyInterpreterState *interp); +extern void _Py_ClearFreeLists(_PyFreeListState *state, int is_finalization); +extern void _PyTuple_ClearFreeList(_PyFreeListState *state, int is_finalization); +extern void _PyFloat_ClearFreeList(_PyFreeListState *state, int is_finalization); +extern void _PyList_ClearFreeList(_PyFreeListState *state, int is_finalization); +extern void _PySlice_ClearCache(_PyFreeListState *state); extern void _PyDict_ClearFreeList(PyInterpreterState *interp); -extern void _PyAsyncGen_ClearFreeLists(PyInterpreterState *interp); -extern void _PyContext_ClearFreeList(PyInterpreterState *interp); +extern void _PyAsyncGen_ClearFreeLists(_PyFreeListState *state, int is_finalization); +extern void _PyContext_ClearFreeList(_PyFreeListState *state, int is_finalization); extern void _Py_ScheduleGC(PyInterpreterState *interp); extern void _Py_RunGC(PyThreadState *tstate); diff --git a/Include/internal/pycore_genobject.h b/Include/internal/pycore_genobject.h index cf58a2750a31f9..5ad63658051e86 100644 --- a/Include/internal/pycore_genobject.h +++ b/Include/internal/pycore_genobject.h @@ -8,6 +8,8 @@ extern "C" { # error "this header requires Py_BUILD_CORE define" #endif +#include "pycore_freelist.h" + extern PyObject *_PyGen_yf(PyGenObject *); extern void _PyGen_Finalize(PyObject *self); @@ -26,34 +28,7 @@ extern PyTypeObject _PyAsyncGenAThrow_Type; /* runtime lifecycle */ -extern void _PyAsyncGen_Fini(PyInterpreterState *); - - -/* other API */ - -#ifndef WITH_FREELISTS -// without freelists -# define _PyAsyncGen_MAXFREELIST 0 -#endif - -#ifndef _PyAsyncGen_MAXFREELIST -# define _PyAsyncGen_MAXFREELIST 80 -#endif - -struct _Py_async_gen_state { -#if _PyAsyncGen_MAXFREELIST > 0 - /* Freelists boost performance 6-10%; they also reduce memory - fragmentation, as _PyAsyncGenWrappedValue and PyAsyncGenASend - are short-living objects that are instantiated for every - __anext__() call. */ - struct _PyAsyncGenWrappedValue* value_freelist[_PyAsyncGen_MAXFREELIST]; - int value_numfree; - - struct PyAsyncGenASend* asend_freelist[_PyAsyncGen_MAXFREELIST]; - int asend_numfree; -#endif -}; - +extern void _PyAsyncGen_Fini(_PyFreeListState *); #ifdef __cplusplus } diff --git a/Include/internal/pycore_global_objects_fini_generated.h b/Include/internal/pycore_global_objects_fini_generated.h index 89ec8cbbbcd649..0a24b127192c9b 100644 --- a/Include/internal/pycore_global_objects_fini_generated.h +++ b/Include/internal/pycore_global_objects_fini_generated.h @@ -787,6 +787,7 @@ _PyStaticObjects_CheckRefcnt(PyInterpreterState *interp) { _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(after_in_child)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(after_in_parent)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(aggregate_class)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(allow_code)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(append)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(argdefs)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(arguments)); @@ -1015,6 +1016,7 @@ _PyStaticObjects_CheckRefcnt(PyInterpreterState *interp) { _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(kw)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(kw1)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(kw2)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(kwdefaults)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(lambda)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(last)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(last_exc)); @@ -1164,7 +1166,6 @@ _PyStaticObjects_CheckRefcnt(PyInterpreterState *interp) { _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(seek)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(seekable)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(selectors)); - _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(self)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(send)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(sep)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(sequence)); diff --git a/Include/internal/pycore_global_strings.h b/Include/internal/pycore_global_strings.h index 62c3ee3ae2a0bd..efb659c5806e6e 100644 --- a/Include/internal/pycore_global_strings.h +++ b/Include/internal/pycore_global_strings.h @@ -276,6 +276,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(after_in_child) STRUCT_FOR_ID(after_in_parent) STRUCT_FOR_ID(aggregate_class) + STRUCT_FOR_ID(allow_code) STRUCT_FOR_ID(append) STRUCT_FOR_ID(argdefs) STRUCT_FOR_ID(arguments) @@ -504,6 +505,7 @@ struct _Py_global_strings { STRUCT_FOR_ID(kw) STRUCT_FOR_ID(kw1) STRUCT_FOR_ID(kw2) + STRUCT_FOR_ID(kwdefaults) STRUCT_FOR_ID(lambda) STRUCT_FOR_ID(last) STRUCT_FOR_ID(last_exc) @@ -653,7 +655,6 @@ struct _Py_global_strings { STRUCT_FOR_ID(seek) STRUCT_FOR_ID(seekable) STRUCT_FOR_ID(selectors) - STRUCT_FOR_ID(self) STRUCT_FOR_ID(send) STRUCT_FOR_ID(sep) STRUCT_FOR_ID(sequence) diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index 04d7a6a615e370..922c84543a1393 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -27,6 +27,7 @@ extern "C" { #include "pycore_import.h" // struct _import_state #include "pycore_instruments.h" // _PY_MONITORING_EVENTS #include "pycore_list.h" // struct _Py_list_state +#include "pycore_mimalloc.h" // struct _mimalloc_interp_state #include "pycore_object_state.h" // struct _py_object_state #include "pycore_obmalloc.h" // struct _obmalloc_state #include "pycore_tstate.h" // _PyThreadStateImpl @@ -166,6 +167,10 @@ struct _is { struct _warnings_runtime_state warnings; struct atexit_state atexit; +#if defined(Py_GIL_DISABLED) + struct _mimalloc_interp_state mimalloc; +#endif + struct _obmalloc_state obmalloc; PyObject *audit_hooks; @@ -174,21 +179,17 @@ struct _is { // One bit is set for each non-NULL entry in code_watchers uint8_t active_code_watchers; +#if !defined(Py_GIL_DISABLED) + struct _Py_freelist_state freelist_state; +#endif struct _py_object_state object_state; struct _Py_unicode_state unicode; - struct _Py_float_state float_state; struct _Py_long_state long_state; struct _dtoa_state dtoa; struct _py_func_state func_state; - /* Using a cache is very effective since typically only a single slice is - created and then deleted again. */ - PySliceObject *slice_cache; struct _Py_tuple_state tuple; - struct _Py_list_state list; struct _Py_dict_state dict_state; - struct _Py_async_gen_state async_gen; - struct _Py_context_state context; struct _Py_exc_state exc_state; struct ast_state ast; diff --git a/Include/internal/pycore_list.h b/Include/internal/pycore_list.h index 55d67b32bc8a63..6c29d882335512 100644 --- a/Include/internal/pycore_list.h +++ b/Include/internal/pycore_list.h @@ -8,6 +8,7 @@ extern "C" { # error "this header requires Py_BUILD_CORE define" #endif +#include "pycore_freelist.h" // _PyFreeListState extern PyObject* _PyList_Extend(PyListObject *, PyObject *); extern void _PyList_DebugMallocStats(FILE *out); @@ -15,28 +16,9 @@ extern void _PyList_DebugMallocStats(FILE *out); /* runtime lifecycle */ -extern void _PyList_Fini(PyInterpreterState *); +extern void _PyList_Fini(_PyFreeListState *); -/* other API */ - -#ifndef WITH_FREELISTS -// without freelists -# define PyList_MAXFREELIST 0 -#endif - -/* Empty list reuse scheme to save calls to malloc and free */ -#ifndef PyList_MAXFREELIST -# define PyList_MAXFREELIST 80 -#endif - -struct _Py_list_state { -#if PyList_MAXFREELIST > 0 - PyListObject *free_list[PyList_MAXFREELIST]; - int numfree; -#endif -}; - #define _PyList_ITEMS(op) _Py_RVALUE(_PyList_CAST(op)->ob_item) extern int diff --git a/Include/internal/pycore_mimalloc.h b/Include/internal/pycore_mimalloc.h index adebb559dae658..1e7ed5a4ca62e2 100644 --- a/Include/internal/pycore_mimalloc.h +++ b/Include/internal/pycore_mimalloc.h @@ -35,6 +35,12 @@ typedef enum { #endif #ifdef Py_GIL_DISABLED +struct _mimalloc_interp_state { + // When exiting, threads place any segments with live blocks in this + // shared pool for other threads to claim and reuse. + mi_abandoned_pool_t abandoned_pool; +}; + struct _mimalloc_thread_state { mi_heap_t *current_object_heap; mi_heap_t heaps[_Py_MIMALLOC_HEAP_COUNT]; diff --git a/Include/internal/pycore_opcode_metadata.h b/Include/internal/pycore_opcode_metadata.h index 7d39e4bc03099c..fbb448f663369a 100644 --- a/Include/internal/pycore_opcode_metadata.h +++ b/Include/internal/pycore_opcode_metadata.h @@ -909,6 +909,8 @@ enum InstructionFormat { #define HAS_DEOPT_FLAG (128) #define HAS_ERROR_FLAG (256) #define HAS_ESCAPES_FLAG (512) +#define HAS_PURE_FLAG (1024) +#define HAS_PASSTHROUGH_FLAG (2048) #define OPCODE_HAS_ARG(OP) (_PyOpcode_opcode_metadata[OP].flags & (HAS_ARG_FLAG)) #define OPCODE_HAS_CONST(OP) (_PyOpcode_opcode_metadata[OP].flags & (HAS_CONST_FLAG)) #define OPCODE_HAS_NAME(OP) (_PyOpcode_opcode_metadata[OP].flags & (HAS_NAME_FLAG)) @@ -919,6 +921,8 @@ enum InstructionFormat { #define OPCODE_HAS_DEOPT(OP) (_PyOpcode_opcode_metadata[OP].flags & (HAS_DEOPT_FLAG)) #define OPCODE_HAS_ERROR(OP) (_PyOpcode_opcode_metadata[OP].flags & (HAS_ERROR_FLAG)) #define OPCODE_HAS_ESCAPES(OP) (_PyOpcode_opcode_metadata[OP].flags & (HAS_ESCAPES_FLAG)) +#define OPCODE_HAS_PURE(OP) (_PyOpcode_opcode_metadata[OP].flags & (HAS_PURE_FLAG)) +#define OPCODE_HAS_PASSTHROUGH(OP) (_PyOpcode_opcode_metadata[OP].flags & (HAS_PASSTHROUGH_FLAG)) #define OPARG_FULL 0 #define OPARG_CACHE_1 1 @@ -996,7 +1000,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[268] = { [COMPARE_OP_STR] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, [CONTAINS_OP] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [CONVERT_VALUE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG }, - [COPY] = { true, INSTR_FMT_IB, HAS_ARG_FLAG }, + [COPY] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_PURE_FLAG }, [COPY_FREE_VARS] = { true, INSTR_FMT_IB, HAS_ARG_FLAG }, [DELETE_ATTR] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [DELETE_DEREF] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, @@ -1007,9 +1011,9 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[268] = { [DICT_MERGE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [DICT_UPDATE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [END_ASYNC_FOR] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [END_FOR] = { true, INSTR_FMT_IX, 0 }, - [END_SEND] = { true, INSTR_FMT_IX, 0 }, - [ENTER_EXECUTOR] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [END_FOR] = { true, INSTR_FMT_IX, HAS_PURE_FLAG }, + [END_SEND] = { true, INSTR_FMT_IX, HAS_PURE_FLAG }, + [ENTER_EXECUTOR] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG }, [EXIT_INIT_CHECK] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [EXTENDED_ARG] = { true, INSTR_FMT_IB, HAS_ARG_FLAG }, [FORMAT_SIMPLE] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, @@ -1067,9 +1071,9 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[268] = { [LOAD_ATTR_SLOT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, [LOAD_ATTR_WITH_HINT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, [LOAD_BUILD_CLASS] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [LOAD_CONST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_CONST_FLAG }, + [LOAD_CONST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_CONST_FLAG | HAS_PURE_FLAG }, [LOAD_DEREF] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [LOAD_FAST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG }, + [LOAD_FAST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG | HAS_PURE_FLAG }, [LOAD_FAST_AND_CLEAR] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG }, [LOAD_FAST_CHECK] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG | HAS_ERROR_FLAG }, [LOAD_FAST_LOAD_FAST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG }, @@ -1096,9 +1100,9 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[268] = { [POP_JUMP_IF_NONE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG }, [POP_JUMP_IF_NOT_NONE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG }, [POP_JUMP_IF_TRUE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG }, - [POP_TOP] = { true, INSTR_FMT_IX, 0 }, + [POP_TOP] = { true, INSTR_FMT_IX, HAS_PURE_FLAG }, [PUSH_EXC_INFO] = { true, INSTR_FMT_IX, 0 }, - [PUSH_NULL] = { true, INSTR_FMT_IX, 0 }, + [PUSH_NULL] = { true, INSTR_FMT_IX, HAS_PURE_FLAG }, [RAISE_VARARGS] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [RERAISE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [RESERVED] = { true, INSTR_FMT_IX, HAS_ESCAPES_FLAG }, @@ -1127,7 +1131,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[268] = { [STORE_SUBSCR] = { true, INSTR_FMT_IXC, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [STORE_SUBSCR_DICT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [STORE_SUBSCR_LIST_INT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, - [SWAP] = { true, INSTR_FMT_IB, HAS_ARG_FLAG }, + [SWAP] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_PURE_FLAG }, [TO_BOOL] = { true, INSTR_FMT_IXC00, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [TO_BOOL_ALWAYS_TRUE] = { true, INSTR_FMT_IXC00, HAS_DEOPT_FLAG }, [TO_BOOL_BOOL] = { true, INSTR_FMT_IXC00, HAS_DEOPT_FLAG }, @@ -1137,7 +1141,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[268] = { [TO_BOOL_STR] = { true, INSTR_FMT_IXC00, HAS_DEOPT_FLAG }, [UNARY_INVERT] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [UNARY_NEGATIVE] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [UNARY_NOT] = { true, INSTR_FMT_IX, 0 }, + [UNARY_NOT] = { true, INSTR_FMT_IX, HAS_PURE_FLAG }, [UNPACK_EX] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [UNPACK_SEQUENCE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [UNPACK_SEQUENCE_LIST] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, @@ -1147,7 +1151,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[268] = { [YIELD_VALUE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ESCAPES_FLAG }, [JUMP] = { true, -1, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [JUMP_NO_INTERRUPT] = { true, -1, HAS_ARG_FLAG | HAS_JUMP_FLAG }, - [LOAD_CLOSURE] = { true, -1, HAS_ARG_FLAG | HAS_LOCAL_FLAG }, + [LOAD_CLOSURE] = { true, -1, HAS_ARG_FLAG | HAS_LOCAL_FLAG | HAS_PURE_FLAG }, [LOAD_METHOD] = { true, -1, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [LOAD_SUPER_METHOD] = { true, -1, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [LOAD_ZERO_SUPER_ATTR] = { true, -1, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, diff --git a/Include/internal/pycore_optimizer.h b/Include/internal/pycore_optimizer.h index b052460b44b791..31f30c673f207a 100644 --- a/Include/internal/pycore_optimizer.h +++ b/Include/internal/pycore_optimizer.h @@ -8,8 +8,6 @@ extern "C" { # error "this header requires Py_BUILD_CORE define" #endif -#include "pycore_uops.h" // _PyUOpInstruction - int _Py_uop_analyze_and_optimize(PyCodeObject *code, _PyUOpInstruction *trace, int trace_len, int curr_stackentries); diff --git a/Include/internal/pycore_pymem.h b/Include/internal/pycore_pymem.h index 8631ca34a5e616..1a72d07b50b738 100644 --- a/Include/internal/pycore_pymem.h +++ b/Include/internal/pycore_pymem.h @@ -44,6 +44,7 @@ struct _pymem_allocators { debug_alloc_api_t mem; debug_alloc_api_t obj; } debug; + int is_debug_enabled; PyObjectArenaAllocator obj_arena; }; @@ -64,7 +65,7 @@ extern int _PyMem_SetDefaultAllocator( - PYMEM_FORBIDDENBYTE: untouchable bytes at each end of a block Byte patterns 0xCB, 0xDB and 0xFB have been replaced with 0xCD, 0xDD and - 0xFD to use the same values than Windows CRT debug malloc() and free(). + 0xFD to use the same values as Windows CRT debug malloc() and free(). If modified, _PyMem_IsPtrFreed() should be updated as well. */ #define PYMEM_CLEANBYTE 0xCD #define PYMEM_DEADBYTE 0xDD @@ -106,6 +107,8 @@ extern int _PyMem_GetAllocatorName( PYMEM_ALLOCATOR_NOT_SET does nothing. */ extern int _PyMem_SetupAllocators(PyMemAllocatorName allocator); +/* Is the debug allocator enabled? */ +extern int _PyMem_DebugEnabled(void); #ifdef __cplusplus } diff --git a/Include/internal/pycore_pymem_init.h b/Include/internal/pycore_pymem_init.h index 360fb9218a9cda..96c49ed7338d6d 100644 --- a/Include/internal/pycore_pymem_init.h +++ b/Include/internal/pycore_pymem_init.h @@ -70,6 +70,7 @@ extern void _PyMem_ArenaFree(void *, void *, size_t); PYDBGMEM_ALLOC(runtime), \ PYDBGOBJ_ALLOC(runtime), \ } +# define _pymem_is_debug_enabled_INIT 1 #else # define _pymem_allocators_standard_INIT(runtime) \ { \ @@ -77,6 +78,7 @@ extern void _PyMem_ArenaFree(void *, void *, size_t); PYMEM_ALLOC, \ PYOBJ_ALLOC, \ } +# define _pymem_is_debug_enabled_INIT 0 #endif #define _pymem_allocators_debug_INIT \ diff --git a/Include/internal/pycore_pystate.h b/Include/internal/pycore_pystate.h index 37b45faf8a74a0..348c5c634284b0 100644 --- a/Include/internal/pycore_pystate.h +++ b/Include/internal/pycore_pystate.h @@ -8,7 +8,9 @@ extern "C" { # error "this header requires Py_BUILD_CORE define" #endif +#include "pycore_freelist.h" // _PyFreeListState #include "pycore_runtime.h" // _PyRuntime +#include "pycore_tstate.h" // _PyThreadStateImpl // Values for PyThreadState.state. A thread must be in the "attached" state @@ -239,6 +241,20 @@ PyAPI_FUNC(const PyConfig*) _Py_GetConfig(void); // See also PyInterpreterState_Get() and _PyInterpreterState_GET(). extern PyInterpreterState* _PyGILState_GetInterpreterStateUnsafe(void); +static inline _PyFreeListState* _PyFreeListState_GET(void) +{ + PyThreadState *tstate = _PyThreadState_GET(); +#ifdef Py_DEBUG + _Py_EnsureTstateNotNULL(tstate); +#endif + +#ifdef Py_GIL_DISABLED + return &((_PyThreadStateImpl*)tstate)->freelist_state; +#else + return &tstate->interp->freelist_state; +#endif +} + #ifdef __cplusplus } #endif diff --git a/Include/internal/pycore_runtime_init.h b/Include/internal/pycore_runtime_init.h index d324a94278839c..5f47d60de37825 100644 --- a/Include/internal/pycore_runtime_init.h +++ b/Include/internal/pycore_runtime_init.h @@ -86,6 +86,7 @@ extern PyTypeObject _PyExc_MemoryError; .standard = _pymem_allocators_standard_INIT(runtime), \ .debug = _pymem_allocators_debug_INIT, \ .obj_arena = _pymem_allocators_obj_arena_INIT, \ + .is_debug_enabled = _pymem_is_debug_enabled_INIT, \ }, \ .obmalloc = _obmalloc_global_state_INIT, \ .pyhash_state = pyhash_state_INIT, \ diff --git a/Include/internal/pycore_runtime_init_generated.h b/Include/internal/pycore_runtime_init_generated.h index 1defa39f816e78..e3ebd80745e610 100644 --- a/Include/internal/pycore_runtime_init_generated.h +++ b/Include/internal/pycore_runtime_init_generated.h @@ -785,6 +785,7 @@ extern "C" { INIT_ID(after_in_child), \ INIT_ID(after_in_parent), \ INIT_ID(aggregate_class), \ + INIT_ID(allow_code), \ INIT_ID(append), \ INIT_ID(argdefs), \ INIT_ID(arguments), \ @@ -1013,6 +1014,7 @@ extern "C" { INIT_ID(kw), \ INIT_ID(kw1), \ INIT_ID(kw2), \ + INIT_ID(kwdefaults), \ INIT_ID(lambda), \ INIT_ID(last), \ INIT_ID(last_exc), \ @@ -1162,7 +1164,6 @@ extern "C" { INIT_ID(seek), \ INIT_ID(seekable), \ INIT_ID(selectors), \ - INIT_ID(self), \ INIT_ID(send), \ INIT_ID(sep), \ INIT_ID(sequence), \ diff --git a/Include/internal/pycore_sliceobject.h b/Include/internal/pycore_sliceobject.h index 98665c3859d574..0c72d3ee6225c5 100644 --- a/Include/internal/pycore_sliceobject.h +++ b/Include/internal/pycore_sliceobject.h @@ -11,7 +11,7 @@ extern "C" { /* runtime lifecycle */ -extern void _PySlice_Fini(PyInterpreterState *); +extern void _PySlice_Fini(_PyFreeListState *); extern PyObject * _PyBuildSlice_ConsumeRefs(PyObject *start, PyObject *stop); diff --git a/Include/internal/pycore_tstate.h b/Include/internal/pycore_tstate.h index 856ddd5e7e5ff0..472fa08154e8f9 100644 --- a/Include/internal/pycore_tstate.h +++ b/Include/internal/pycore_tstate.h @@ -8,6 +8,7 @@ extern "C" { # error "this header requires Py_BUILD_CORE define" #endif +#include "pycore_freelist.h" // struct _Py_freelist_state #include "pycore_mimalloc.h" // struct _mimalloc_thread_state @@ -20,6 +21,7 @@ typedef struct _PyThreadStateImpl { #ifdef Py_GIL_DISABLED struct _mimalloc_thread_state mimalloc; + struct _Py_freelist_state freelist_state; #endif } _PyThreadStateImpl; diff --git a/Include/internal/pycore_tuple.h b/Include/internal/pycore_tuple.h index 4fa7a12206bcb2..b348339a505b0f 100644 --- a/Include/internal/pycore_tuple.h +++ b/Include/internal/pycore_tuple.h @@ -14,59 +14,16 @@ extern void _PyTuple_DebugMallocStats(FILE *out); /* runtime lifecycle */ extern PyStatus _PyTuple_InitGlobalObjects(PyInterpreterState *); -extern void _PyTuple_Fini(PyInterpreterState *); +extern void _PyTuple_Fini(_PyFreeListState *); /* other API */ -// PyTuple_MAXSAVESIZE - largest tuple to save on free list -// PyTuple_MAXFREELIST - maximum number of tuples of each size to save - -#if defined(PyTuple_MAXSAVESIZE) && PyTuple_MAXSAVESIZE <= 0 - // A build indicated that tuple freelists should not be used. -# define PyTuple_NFREELISTS 0 -# undef PyTuple_MAXSAVESIZE -# undef PyTuple_MAXFREELIST - -#elif !defined(WITH_FREELISTS) -# define PyTuple_NFREELISTS 0 -# undef PyTuple_MAXSAVESIZE -# undef PyTuple_MAXFREELIST - -#else - // We are using a freelist for tuples. -# ifndef PyTuple_MAXSAVESIZE -# define PyTuple_MAXSAVESIZE 20 -# endif -# define PyTuple_NFREELISTS PyTuple_MAXSAVESIZE -# ifndef PyTuple_MAXFREELIST -# define PyTuple_MAXFREELIST 2000 -# endif -#endif - -struct _Py_tuple_state { -#if PyTuple_NFREELISTS > 0 - /* There is one freelist for each size from 1 to PyTuple_MAXSAVESIZE. - The empty tuple is handled separately. - - Each tuple stored in the array is the head of the linked list - (and the next available tuple) for that size. The actual tuple - object is used as the linked list node, with its first item - (ob_item[0]) pointing to the next node (i.e. the previous head). - Each linked list is initially NULL. */ - PyTupleObject *free_list[PyTuple_NFREELISTS]; - int numfree[PyTuple_NFREELISTS]; -#else - char _unused; // Empty structs are not allowed. -#endif -}; - #define _PyTuple_ITEMS(op) _Py_RVALUE(_PyTuple_CAST(op)->ob_item) extern PyObject *_PyTuple_FromArray(PyObject *const *, Py_ssize_t); extern PyObject *_PyTuple_FromArraySteal(PyObject *const *, Py_ssize_t); - typedef struct { PyObject_HEAD Py_ssize_t it_index; diff --git a/Include/internal/pycore_unicodeobject_generated.h b/Include/internal/pycore_unicodeobject_generated.h index be9baa3eebecfc..9fa6c896c1a328 100644 --- a/Include/internal/pycore_unicodeobject_generated.h +++ b/Include/internal/pycore_unicodeobject_generated.h @@ -669,6 +669,9 @@ _PyUnicode_InitStaticStrings(PyInterpreterState *interp) { string = &_Py_ID(aggregate_class); assert(_PyUnicode_CheckConsistency(string, 1)); _PyUnicode_InternInPlace(interp, &string); + string = &_Py_ID(allow_code); + assert(_PyUnicode_CheckConsistency(string, 1)); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(append); assert(_PyUnicode_CheckConsistency(string, 1)); _PyUnicode_InternInPlace(interp, &string); @@ -1353,6 +1356,9 @@ _PyUnicode_InitStaticStrings(PyInterpreterState *interp) { string = &_Py_ID(kw2); assert(_PyUnicode_CheckConsistency(string, 1)); _PyUnicode_InternInPlace(interp, &string); + string = &_Py_ID(kwdefaults); + assert(_PyUnicode_CheckConsistency(string, 1)); + _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(lambda); assert(_PyUnicode_CheckConsistency(string, 1)); _PyUnicode_InternInPlace(interp, &string); @@ -1800,9 +1806,6 @@ _PyUnicode_InitStaticStrings(PyInterpreterState *interp) { string = &_Py_ID(selectors); assert(_PyUnicode_CheckConsistency(string, 1)); _PyUnicode_InternInPlace(interp, &string); - string = &_Py_ID(self); - assert(_PyUnicode_CheckConsistency(string, 1)); - _PyUnicode_InternInPlace(interp, &string); string = &_Py_ID(send); assert(_PyUnicode_CheckConsistency(string, 1)); _PyUnicode_InternInPlace(interp, &string); diff --git a/Include/internal/pycore_uop_ids.h b/Include/internal/pycore_uop_ids.h index 4a9a00ba352d33..8ee90d79a13c2f 100644 --- a/Include/internal/pycore_uop_ids.h +++ b/Include/internal/pycore_uop_ids.h @@ -229,9 +229,10 @@ extern "C" { #define _GUARD_IS_NOT_NONE_POP 376 #define _JUMP_TO_TOP 377 #define _SAVE_RETURN_OFFSET 378 -#define _INSERT 379 -#define _CHECK_VALIDITY 380 -#define MAX_UOP_ID 380 +#define _CHECK_VALIDITY 379 +#define _LOAD_CONST_INLINE_BORROW 380 +#define _INTERNAL_INCREMENT_OPT_COUNTER 381 +#define MAX_UOP_ID 381 #ifdef __cplusplus } diff --git a/Include/internal/pycore_uop_metadata.h b/Include/internal/pycore_uop_metadata.h index 300bd3baa7b377..9bfb4f4f3a4dea 100644 --- a/Include/internal/pycore_uop_metadata.h +++ b/Include/internal/pycore_uop_metadata.h @@ -19,36 +19,36 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { [_NOP] = 0, [_RESUME_CHECK] = HAS_DEOPT_FLAG, [_LOAD_FAST_CHECK] = HAS_ARG_FLAG | HAS_LOCAL_FLAG | HAS_ERROR_FLAG, - [_LOAD_FAST] = HAS_ARG_FLAG | HAS_LOCAL_FLAG, + [_LOAD_FAST] = HAS_ARG_FLAG | HAS_LOCAL_FLAG | HAS_PURE_FLAG, [_LOAD_FAST_AND_CLEAR] = HAS_ARG_FLAG | HAS_LOCAL_FLAG, [_LOAD_FAST_LOAD_FAST] = HAS_ARG_FLAG | HAS_LOCAL_FLAG, - [_LOAD_CONST] = HAS_ARG_FLAG | HAS_CONST_FLAG, + [_LOAD_CONST] = HAS_ARG_FLAG | HAS_CONST_FLAG | HAS_PURE_FLAG, [_STORE_FAST] = HAS_ARG_FLAG | HAS_LOCAL_FLAG, [_STORE_FAST_LOAD_FAST] = HAS_ARG_FLAG | HAS_LOCAL_FLAG, [_STORE_FAST_STORE_FAST] = HAS_ARG_FLAG | HAS_LOCAL_FLAG, - [_POP_TOP] = 0, - [_PUSH_NULL] = 0, - [_END_SEND] = 0, + [_POP_TOP] = HAS_PURE_FLAG, + [_PUSH_NULL] = HAS_PURE_FLAG, + [_END_SEND] = HAS_PURE_FLAG, [_UNARY_NEGATIVE] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, - [_UNARY_NOT] = 0, + [_UNARY_NOT] = HAS_PURE_FLAG, [_TO_BOOL] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, - [_TO_BOOL_BOOL] = HAS_DEOPT_FLAG, + [_TO_BOOL_BOOL] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG, [_TO_BOOL_INT] = HAS_DEOPT_FLAG, [_TO_BOOL_LIST] = HAS_DEOPT_FLAG, [_TO_BOOL_NONE] = HAS_DEOPT_FLAG, [_TO_BOOL_STR] = HAS_DEOPT_FLAG, [_TO_BOOL_ALWAYS_TRUE] = HAS_DEOPT_FLAG, [_UNARY_INVERT] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, - [_GUARD_BOTH_INT] = HAS_DEOPT_FLAG, - [_BINARY_OP_MULTIPLY_INT] = HAS_ERROR_FLAG, - [_BINARY_OP_ADD_INT] = HAS_ERROR_FLAG, - [_BINARY_OP_SUBTRACT_INT] = HAS_ERROR_FLAG, - [_GUARD_BOTH_FLOAT] = HAS_DEOPT_FLAG, - [_BINARY_OP_MULTIPLY_FLOAT] = 0, - [_BINARY_OP_ADD_FLOAT] = 0, - [_BINARY_OP_SUBTRACT_FLOAT] = 0, - [_GUARD_BOTH_UNICODE] = HAS_DEOPT_FLAG, - [_BINARY_OP_ADD_UNICODE] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, + [_GUARD_BOTH_INT] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG, + [_BINARY_OP_MULTIPLY_INT] = HAS_ERROR_FLAG | HAS_PURE_FLAG, + [_BINARY_OP_ADD_INT] = HAS_ERROR_FLAG | HAS_PURE_FLAG, + [_BINARY_OP_SUBTRACT_INT] = HAS_ERROR_FLAG | HAS_PURE_FLAG, + [_GUARD_BOTH_FLOAT] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG, + [_BINARY_OP_MULTIPLY_FLOAT] = HAS_PURE_FLAG, + [_BINARY_OP_ADD_FLOAT] = HAS_PURE_FLAG, + [_BINARY_OP_SUBTRACT_FLOAT] = HAS_PURE_FLAG, + [_GUARD_BOTH_UNICODE] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG, + [_BINARY_OP_ADD_UNICODE] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG | HAS_PURE_FLAG, [_BINARY_SUBSCR] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_BINARY_SLICE] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_STORE_SLICE] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, @@ -112,17 +112,17 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { [_LOAD_SUPER_ATTR_ATTR] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_LOAD_SUPER_ATTR_METHOD] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_LOAD_ATTR] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, - [_GUARD_TYPE_VERSION] = HAS_DEOPT_FLAG, - [_CHECK_MANAGED_OBJECT_HAS_VALUES] = HAS_DEOPT_FLAG, + [_GUARD_TYPE_VERSION] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG, + [_CHECK_MANAGED_OBJECT_HAS_VALUES] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG, [_LOAD_ATTR_INSTANCE_VALUE] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, - [_CHECK_ATTR_MODULE] = HAS_DEOPT_FLAG, + [_CHECK_ATTR_MODULE] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG, [_LOAD_ATTR_MODULE] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, - [_CHECK_ATTR_WITH_HINT] = HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG, + [_CHECK_ATTR_WITH_HINT] = HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG | HAS_PASSTHROUGH_FLAG, [_LOAD_ATTR_WITH_HINT] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG, [_LOAD_ATTR_SLOT] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, - [_CHECK_ATTR_CLASS] = HAS_DEOPT_FLAG, + [_CHECK_ATTR_CLASS] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG, [_LOAD_ATTR_CLASS] = HAS_ARG_FLAG, - [_GUARD_DORV_VALUES] = HAS_DEOPT_FLAG, + [_GUARD_DORV_VALUES] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG, [_STORE_ATTR_INSTANCE_VALUE] = HAS_ESCAPES_FLAG, [_STORE_ATTR_SLOT] = HAS_ESCAPES_FLAG, [_COMPARE_OP] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, @@ -142,34 +142,34 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { [_GET_ITER] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_GET_YIELD_FROM_ITER] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_FOR_ITER_TIER_TWO] = HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, - [_ITER_CHECK_LIST] = HAS_DEOPT_FLAG, - [_GUARD_NOT_EXHAUSTED_LIST] = HAS_DEOPT_FLAG, + [_ITER_CHECK_LIST] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG, + [_GUARD_NOT_EXHAUSTED_LIST] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG, [_ITER_NEXT_LIST] = 0, - [_ITER_CHECK_TUPLE] = HAS_DEOPT_FLAG, - [_GUARD_NOT_EXHAUSTED_TUPLE] = HAS_DEOPT_FLAG, + [_ITER_CHECK_TUPLE] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG, + [_GUARD_NOT_EXHAUSTED_TUPLE] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG, [_ITER_NEXT_TUPLE] = 0, - [_ITER_CHECK_RANGE] = HAS_DEOPT_FLAG, - [_GUARD_NOT_EXHAUSTED_RANGE] = HAS_DEOPT_FLAG, + [_ITER_CHECK_RANGE] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG, + [_GUARD_NOT_EXHAUSTED_RANGE] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG, [_ITER_NEXT_RANGE] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_BEFORE_ASYNC_WITH] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_BEFORE_WITH] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_WITH_EXCEPT_START] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_PUSH_EXC_INFO] = 0, - [_GUARD_DORV_VALUES_INST_ATTR_FROM_DICT] = HAS_DEOPT_FLAG, - [_GUARD_KEYS_VERSION] = HAS_DEOPT_FLAG, + [_GUARD_DORV_VALUES_INST_ATTR_FROM_DICT] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG, + [_GUARD_KEYS_VERSION] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG, [_LOAD_ATTR_METHOD_WITH_VALUES] = HAS_ARG_FLAG | HAS_ESCAPES_FLAG, [_LOAD_ATTR_METHOD_NO_DICT] = HAS_ARG_FLAG | HAS_ESCAPES_FLAG, [_LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES] = HAS_ARG_FLAG, [_LOAD_ATTR_NONDESCRIPTOR_NO_DICT] = HAS_ARG_FLAG, - [_CHECK_ATTR_METHOD_LAZY_DICT] = HAS_DEOPT_FLAG, + [_CHECK_ATTR_METHOD_LAZY_DICT] = HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG, [_LOAD_ATTR_METHOD_LAZY_DICT] = HAS_ARG_FLAG | HAS_ESCAPES_FLAG, - [_CHECK_CALL_BOUND_METHOD_EXACT_ARGS] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, + [_CHECK_CALL_BOUND_METHOD_EXACT_ARGS] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG, [_INIT_CALL_BOUND_METHOD_EXACT_ARGS] = HAS_ARG_FLAG, [_CHECK_PEP_523] = HAS_DEOPT_FLAG, - [_CHECK_FUNCTION_EXACT_ARGS] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, - [_CHECK_STACK_SPACE] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, - [_INIT_CALL_PY_EXACT_ARGS] = HAS_ARG_FLAG | HAS_ESCAPES_FLAG, - [_PUSH_FRAME] = 0, + [_CHECK_FUNCTION_EXACT_ARGS] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG, + [_CHECK_STACK_SPACE] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_PASSTHROUGH_FLAG, + [_INIT_CALL_PY_EXACT_ARGS] = HAS_ARG_FLAG | HAS_ESCAPES_FLAG | HAS_PURE_FLAG, + [_PUSH_FRAME] = HAS_ESCAPES_FLAG, [_CALL_TYPE_1] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, [_CALL_STR_1] = HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_CALL_TUPLE_1] = HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, @@ -190,9 +190,9 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { [_CONVERT_VALUE] = HAS_ARG_FLAG | HAS_ERROR_FLAG, [_FORMAT_SIMPLE] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_FORMAT_WITH_SPEC] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, - [_COPY] = HAS_ARG_FLAG, + [_COPY] = HAS_ARG_FLAG | HAS_PURE_FLAG, [_BINARY_OP] = HAS_ARG_FLAG | HAS_ERROR_FLAG, - [_SWAP] = HAS_ARG_FLAG, + [_SWAP] = HAS_ARG_FLAG | HAS_PURE_FLAG, [_GUARD_IS_TRUE_POP] = HAS_DEOPT_FLAG, [_GUARD_IS_FALSE_POP] = HAS_DEOPT_FLAG, [_GUARD_IS_NONE_POP] = HAS_DEOPT_FLAG, @@ -201,8 +201,9 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { [_SET_IP] = HAS_ARG_FLAG | HAS_ESCAPES_FLAG, [_SAVE_RETURN_OFFSET] = HAS_ARG_FLAG, [_EXIT_TRACE] = HAS_DEOPT_FLAG, - [_INSERT] = HAS_ARG_FLAG, [_CHECK_VALIDITY] = HAS_DEOPT_FLAG, + [_LOAD_CONST_INLINE_BORROW] = 0, + [_INTERNAL_INCREMENT_OPT_COUNTER] = 0, }; const char *const _PyOpcode_uop_name[MAX_UOP_ID+1] = { @@ -302,7 +303,7 @@ const char *const _PyOpcode_uop_name[MAX_UOP_ID+1] = { [_GUARD_TYPE_VERSION] = "_GUARD_TYPE_VERSION", [_INIT_CALL_BOUND_METHOD_EXACT_ARGS] = "_INIT_CALL_BOUND_METHOD_EXACT_ARGS", [_INIT_CALL_PY_EXACT_ARGS] = "_INIT_CALL_PY_EXACT_ARGS", - [_INSERT] = "_INSERT", + [_INTERNAL_INCREMENT_OPT_COUNTER] = "_INTERNAL_INCREMENT_OPT_COUNTER", [_IS_NONE] = "_IS_NONE", [_IS_OP] = "_IS_OP", [_ITER_CHECK_LIST] = "_ITER_CHECK_LIST", @@ -328,6 +329,7 @@ const char *const _PyOpcode_uop_name[MAX_UOP_ID+1] = { [_LOAD_ATTR_WITH_HINT] = "_LOAD_ATTR_WITH_HINT", [_LOAD_BUILD_CLASS] = "_LOAD_BUILD_CLASS", [_LOAD_CONST] = "_LOAD_CONST", + [_LOAD_CONST_INLINE_BORROW] = "_LOAD_CONST_INLINE_BORROW", [_LOAD_DEREF] = "_LOAD_DEREF", [_LOAD_FAST] = "_LOAD_FAST", [_LOAD_FAST_AND_CLEAR] = "_LOAD_FAST_AND_CLEAR", diff --git a/Include/internal/pycore_uops.h b/Include/internal/pycore_uops.h deleted file mode 100644 index 153884f4bd2902..00000000000000 --- a/Include/internal/pycore_uops.h +++ /dev/null @@ -1,35 +0,0 @@ -#ifndef Py_INTERNAL_UOPS_H -#define Py_INTERNAL_UOPS_H -#ifdef __cplusplus -extern "C" { -#endif - -#ifndef Py_BUILD_CORE -# error "this header requires Py_BUILD_CORE define" -#endif - -#include "pycore_frame.h" // _PyInterpreterFrame - -#define _Py_UOP_MAX_TRACE_LENGTH 512 - -typedef struct { - uint16_t opcode; - uint16_t oparg; - uint32_t target; - uint64_t operand; // A cache entry -} _PyUOpInstruction; - -typedef struct { - _PyExecutorObject base; - _PyUOpInstruction trace[1]; -} _PyUOpExecutorObject; - -_Py_CODEUNIT *_PyUOpExecute( - _PyExecutorObject *executor, - _PyInterpreterFrame *frame, - PyObject **stack_pointer); - -#ifdef __cplusplus -} -#endif -#endif /* !Py_INTERNAL_UOPS_H */ diff --git a/Include/patchlevel.h b/Include/patchlevel.h index fad79ecfda7b28..5f9b720f8b1671 100644 --- a/Include/patchlevel.h +++ b/Include/patchlevel.h @@ -20,10 +20,10 @@ #define PY_MINOR_VERSION 13 #define PY_MICRO_VERSION 0 #define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_ALPHA -#define PY_RELEASE_SERIAL 2 +#define PY_RELEASE_SERIAL 3 /* Version as a string */ -#define PY_VERSION "3.13.0a2+" +#define PY_VERSION "3.13.0a3+" /*--end constants--*/ /* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2. diff --git a/LICENSE b/LICENSE index f26bcf4d2de6eb..14603b95c2e23b 100644 --- a/LICENSE +++ b/LICENSE @@ -83,10 +83,8 @@ grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use Python alone or in any derivative version, provided, however, that PSF's License Agreement and PSF's notice of copyright, -i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023 Python Software Foundation; -All Rights Reserved" are retained in Python alone or in any derivative version -prepared by Licensee. +i.e., "Copyright (c) 2001-2024 Python Software Foundation; All Rights Reserved" +are retained in Python alone or in any derivative version prepared by Licensee. 3. In the event Licensee prepares a derivative work that is based on or incorporates Python or any part thereof, and wants to make diff --git a/Lib/_pyio.py b/Lib/_pyio.py index 32698abac78d25..df2c29bfa9caee 100644 --- a/Lib/_pyio.py +++ b/Lib/_pyio.py @@ -2198,8 +2198,9 @@ def write(self, s): self.buffer.write(b) if self._line_buffering and (haslf or "\r" in s): self.flush() - self._set_decoded_chars('') - self._snapshot = None + if self._snapshot is not None: + self._set_decoded_chars('') + self._snapshot = None if self._decoder: self._decoder.reset() return length @@ -2513,8 +2514,9 @@ def read(self, size=None): # Read everything. result = (self._get_decoded_chars() + decoder.decode(self.buffer.read(), final=True)) - self._set_decoded_chars('') - self._snapshot = None + if self._snapshot is not None: + self._set_decoded_chars('') + self._snapshot = None return result else: # Keep reading chunks until we have size characters to return. diff --git a/Lib/ast.py b/Lib/ast.py index f7888d18859ae4..43703a8325cc5e 100644 --- a/Lib/ast.py +++ b/Lib/ast.py @@ -728,12 +728,11 @@ class _Unparser(NodeVisitor): output source code for the abstract syntax; original formatting is disregarded.""" - def __init__(self, *, _avoid_backslashes=False): + def __init__(self): self._source = [] self._precedences = {} self._type_ignores = {} self._indent = 0 - self._avoid_backslashes = _avoid_backslashes self._in_try_star = False def interleave(self, inter, f, seq): @@ -1270,14 +1269,14 @@ def visit_JoinedStr(self, node): quote_type = quote_types[0] self.write(f"{quote_type}{value}{quote_type}") - def _write_fstring_inner(self, node, scape_newlines=False): + def _write_fstring_inner(self, node, escape_newlines=False): if isinstance(node, JoinedStr): # for both the f-string itself, and format_spec for value in node.values: - self._write_fstring_inner(value, scape_newlines=scape_newlines) + self._write_fstring_inner(value, escape_newlines=escape_newlines) elif isinstance(node, Constant) and isinstance(node.value, str): value = node.value.replace("{", "{{").replace("}", "}}") - if scape_newlines: + if escape_newlines: value = value.replace("\n", "\\n") self.write(value) elif isinstance(node, FormattedValue): @@ -1303,7 +1302,7 @@ def unparse_inner(inner): self.write(":") self._write_fstring_inner( node.format_spec, - scape_newlines=True + escape_newlines=True ) def visit_Name(self, node): @@ -1324,8 +1323,6 @@ def _write_constant(self, value): .replace("inf", _INFSTR) .replace("nan", f"({_INFSTR}-{_INFSTR})") ) - elif self._avoid_backslashes and isinstance(value, str): - self._write_str_avoiding_backslashes(value) else: self.write(repr(value)) @@ -1812,8 +1809,7 @@ def main(): import argparse parser = argparse.ArgumentParser(prog='python -m ast') - parser.add_argument('infile', type=argparse.FileType(mode='rb'), nargs='?', - default='-', + parser.add_argument('infile', nargs='?', default='-', help='the file to parse; defaults to stdin') parser.add_argument('-m', '--mode', default='exec', choices=('exec', 'single', 'eval', 'func_type'), @@ -1827,9 +1823,14 @@ def main(): help='indentation of nodes (number of spaces)') args = parser.parse_args() - with args.infile as infile: - source = infile.read() - tree = parse(source, args.infile.name, args.mode, type_comments=args.no_type_comments) + if args.infile == '-': + name = '' + source = sys.stdin.buffer.read() + else: + name = args.infile + with open(args.infile, 'rb') as infile: + source = infile.read() + tree = parse(source, name, args.mode, type_comments=args.no_type_comments) print(dump(tree, include_attributes=args.include_attributes, indent=args.indent)) if __name__ == '__main__': diff --git a/Lib/asyncio/constants.py b/Lib/asyncio/constants.py index f0ce0433a7a8a6..b60c1e4236af1f 100644 --- a/Lib/asyncio/constants.py +++ b/Lib/asyncio/constants.py @@ -1,3 +1,7 @@ +# Contains code from https://github.com/MagicStack/uvloop/tree/v0.16.0 +# SPDX-License-Identifier: PSF-2.0 AND (MIT OR Apache-2.0) +# SPDX-FileCopyrightText: Copyright (c) 2015-2021 MagicStack Inc. http://magic.io + import enum # After the connection is lost, log warnings after this many write()s. diff --git a/Lib/asyncio/events.py b/Lib/asyncio/events.py index ebc3836bdc0c4d..072a99fee123c3 100644 --- a/Lib/asyncio/events.py +++ b/Lib/asyncio/events.py @@ -1,5 +1,9 @@ """Event loop and event loop policy.""" +# Contains code from https://github.com/MagicStack/uvloop/tree/v0.16.0 +# SPDX-License-Identifier: PSF-2.0 AND (MIT OR Apache-2.0) +# SPDX-FileCopyrightText: Copyright (c) 2015-2021 MagicStack Inc. http://magic.io + __all__ = ( 'AbstractEventLoopPolicy', 'AbstractEventLoop', 'AbstractServer', diff --git a/Lib/asyncio/futures.py b/Lib/asyncio/futures.py index 97fc4e3fcb60ee..5d35321db7943b 100644 --- a/Lib/asyncio/futures.py +++ b/Lib/asyncio/futures.py @@ -138,9 +138,6 @@ def _make_cancelled_error(self): exc = exceptions.CancelledError() else: exc = exceptions.CancelledError(self._cancel_message) - exc.__context__ = self._cancelled_exc - # Remove the reference since we don't need this anymore. - self._cancelled_exc = None return exc def cancel(self, msg=None): @@ -272,9 +269,13 @@ def set_exception(self, exception): raise exceptions.InvalidStateError(f'{self._state}: {self!r}') if isinstance(exception, type): exception = exception() - if type(exception) is StopIteration: - raise TypeError("StopIteration interacts badly with generators " - "and cannot be raised into a Future") + if isinstance(exception, StopIteration): + new_exc = RuntimeError("StopIteration interacts badly with " + "generators and cannot be raised into a " + "Future") + new_exc.__cause__ = exception + new_exc.__context__ = exception + exception = new_exc self._exception = exception self._exception_tb = exception.__traceback__ self._state = _FINISHED diff --git a/Lib/asyncio/locks.py b/Lib/asyncio/locks.py index ce5d8d5bfb2e81..04158e667a895f 100644 --- a/Lib/asyncio/locks.py +++ b/Lib/asyncio/locks.py @@ -95,6 +95,8 @@ async def acquire(self): This method blocks until the lock is unlocked, then sets it to locked and returns True. """ + # Implement fair scheduling, where thread always waits + # its turn. Jumping the queue if all are cancelled is an optimization. if (not self._locked and (self._waiters is None or all(w.cancelled() for w in self._waiters))): self._locked = True @@ -105,19 +107,22 @@ async def acquire(self): fut = self._get_loop().create_future() self._waiters.append(fut) - # Finally block should be called before the CancelledError - # handling as we don't want CancelledError to call - # _wake_up_first() and attempt to wake up itself. try: try: await fut finally: self._waiters.remove(fut) except exceptions.CancelledError: + # Currently the only exception designed be able to occur here. + + # Ensure the lock invariant: If lock is not claimed (or about + # to be claimed by us) and there is a Task in waiters, + # ensure that the Task at the head will run. if not self._locked: self._wake_up_first() raise + # assert self._locked is False self._locked = True return True @@ -139,7 +144,7 @@ def release(self): raise RuntimeError('Lock is not acquired.') def _wake_up_first(self): - """Wake up the first waiter if it isn't done.""" + """Ensure that the first waiter will wake up.""" if not self._waiters: return try: @@ -147,9 +152,7 @@ def _wake_up_first(self): except StopIteration: return - # .done() necessarily means that a waiter will wake up later on and - # either take the lock, or, if it was cancelled and lock wasn't - # taken already, will hit this again and wake up a new waiter. + # .done() means that the waiter is already set to wake up. if not fut.done(): fut.set_result(True) @@ -269,17 +272,22 @@ async def wait(self): self._waiters.remove(fut) finally: - # Must reacquire lock even if wait is cancelled - cancelled = False + # Must re-acquire lock even if wait is cancelled. + # We only catch CancelledError here, since we don't want any + # other (fatal) errors with the future to cause us to spin. + err = None while True: try: await self.acquire() break - except exceptions.CancelledError: - cancelled = True + except exceptions.CancelledError as e: + err = e - if cancelled: - raise exceptions.CancelledError + if err: + try: + raise err # Re-raise most recent exception instance. + finally: + err = None # Break reference cycles. async def wait_for(self, predicate): """Wait until a predicate becomes true. @@ -357,6 +365,7 @@ def __repr__(self): def locked(self): """Returns True if semaphore cannot be acquired immediately.""" + # Due to state, or FIFO rules (must allow others to run first). return self._value == 0 or ( any(not w.cancelled() for w in (self._waiters or ()))) @@ -370,6 +379,7 @@ async def acquire(self): True. """ if not self.locked(): + # Maintain FIFO, wait for others to start even if _value > 0. self._value -= 1 return True @@ -378,22 +388,27 @@ async def acquire(self): fut = self._get_loop().create_future() self._waiters.append(fut) - # Finally block should be called before the CancelledError - # handling as we don't want CancelledError to call - # _wake_up_first() and attempt to wake up itself. try: try: await fut finally: self._waiters.remove(fut) except exceptions.CancelledError: - if not fut.cancelled(): + # Currently the only exception designed be able to occur here. + if fut.done() and not fut.cancelled(): + # Our Future was successfully set to True via _wake_up_next(), + # but we are not about to successfully acquire(). Therefore we + # must undo the bookkeeping already done and attempt to wake + # up someone else. self._value += 1 - self._wake_up_next() raise - if self._value > 0: - self._wake_up_next() + finally: + # New waiters may have arrived but had to wait due to FIFO. + # Wake up as many as are allowed. + while self._value > 0: + if not self._wake_up_next(): + break # There was no-one to wake up. return True def release(self): @@ -408,13 +423,15 @@ def release(self): def _wake_up_next(self): """Wake up the first waiter that isn't done.""" if not self._waiters: - return + return False for fut in self._waiters: if not fut.done(): self._value -= 1 fut.set_result(True) - return + # `fut` is now `done()` and not `cancelled()`. + return True + return False class BoundedSemaphore(Semaphore): diff --git a/Lib/asyncio/sslproto.py b/Lib/asyncio/sslproto.py index cbb6527d0b28e0..599e91ba0003d1 100644 --- a/Lib/asyncio/sslproto.py +++ b/Lib/asyncio/sslproto.py @@ -1,3 +1,7 @@ +# Contains code from https://github.com/MagicStack/uvloop/tree/v0.16.0 +# SPDX-License-Identifier: PSF-2.0 AND (MIT OR Apache-2.0) +# SPDX-FileCopyrightText: Copyright (c) 2015-2021 MagicStack Inc. http://magic.io + import collections import enum import warnings diff --git a/Lib/asyncio/taskgroups.py b/Lib/asyncio/taskgroups.py index cb9c1ce4d7d1d2..e1c56d140bef7d 100644 --- a/Lib/asyncio/taskgroups.py +++ b/Lib/asyncio/taskgroups.py @@ -73,8 +73,10 @@ async def __aexit__(self, et, exc, tb): self._base_error is None): self._base_error = exc - propagate_cancellation_error = \ - exc if et is exceptions.CancelledError else None + if et is not None and issubclass(et, exceptions.CancelledError): + propagate_cancellation_error = exc + else: + propagate_cancellation_error = None if self._parent_cancel_requested: # If this flag is set we *must* call uncancel(). if self._parent_task.uncancel() == 0: @@ -133,7 +135,7 @@ async def __aexit__(self, et, exc, tb): if propagate_cancellation_error and not self._errors: raise propagate_cancellation_error - if et is not None and et is not exceptions.CancelledError: + if et is not None and not issubclass(et, exceptions.CancelledError): self._errors.append(exc) if self._errors: diff --git a/Lib/asyncio/timeouts.py b/Lib/asyncio/timeouts.py index 30042abb3ad804..e6f5100691d362 100644 --- a/Lib/asyncio/timeouts.py +++ b/Lib/asyncio/timeouts.py @@ -109,10 +109,16 @@ async def __aexit__( if self._state is _State.EXPIRING: self._state = _State.EXPIRED - if self._task.uncancel() <= self._cancelling and exc_type is exceptions.CancelledError: + if self._task.uncancel() <= self._cancelling and exc_type is not None: # Since there are no new cancel requests, we're # handling this. - raise TimeoutError from exc_val + if issubclass(exc_type, exceptions.CancelledError): + raise TimeoutError from exc_val + elif exc_val is not None: + self._insert_timeout_error(exc_val) + if isinstance(exc_val, ExceptionGroup): + for exc in exc_val.exceptions: + self._insert_timeout_error(exc) elif self._state is _State.ENTERED: self._state = _State.EXITED @@ -125,6 +131,16 @@ def _on_timeout(self) -> None: # drop the reference early self._timeout_handler = None + @staticmethod + def _insert_timeout_error(exc_val: BaseException) -> None: + while exc_val.__context__ is not None: + if isinstance(exc_val.__context__, exceptions.CancelledError): + te = TimeoutError() + te.__context__ = te.__cause__ = exc_val.__context__ + exc_val.__context__ = te + break + exc_val = exc_val.__context__ + def timeout(delay: Optional[float]) -> Timeout: """Timeout async context manager. diff --git a/Lib/csv.py b/Lib/csv.py index 77f30c8d2b1f61..a079279b8b8cbc 100644 --- a/Lib/csv.py +++ b/Lib/csv.py @@ -1,28 +1,90 @@ -""" -csv.py - read/write/investigate CSV files +r""" +CSV parsing and writing. + +This module provides classes that assist in the reading and writing +of Comma Separated Value (CSV) files, and implements the interface +described by PEP 305. Although many CSV files are simple to parse, +the format is not formally defined by a stable specification and +is subtle enough that parsing lines of a CSV file with something +like line.split(",") is bound to fail. The module supports three +basic APIs: reading, writing, and registration of dialects. + + +DIALECT REGISTRATION: + +Readers and writers support a dialect argument, which is a convenient +handle on a group of settings. When the dialect argument is a string, +it identifies one of the dialects previously registered with the module. +If it is a class or instance, the attributes of the argument are used as +the settings for the reader or writer: + + class excel: + delimiter = ',' + quotechar = '"' + escapechar = None + doublequote = True + skipinitialspace = False + lineterminator = '\r\n' + quoting = QUOTE_MINIMAL + +SETTINGS: + + * quotechar - specifies a one-character string to use as the + quoting character. It defaults to '"'. + * delimiter - specifies a one-character string to use as the + field separator. It defaults to ','. + * skipinitialspace - specifies how to interpret spaces which + immediately follow a delimiter. It defaults to False, which + means that spaces immediately following a delimiter is part + of the following field. + * lineterminator - specifies the character sequence which should + terminate rows. + * quoting - controls when quotes should be generated by the writer. + It can take on any of the following module constants: + + csv.QUOTE_MINIMAL means only when required, for example, when a + field contains either the quotechar or the delimiter + csv.QUOTE_ALL means that quotes are always placed around fields. + csv.QUOTE_NONNUMERIC means that quotes are always placed around + fields which do not parse as integers or floating point + numbers. + csv.QUOTE_STRINGS means that quotes are always placed around + fields which are strings. Note that the Python value None + is not a string. + csv.QUOTE_NOTNULL means that quotes are only placed around fields + that are not the Python value None. + csv.QUOTE_NONE means that quotes are never placed around fields. + * escapechar - specifies a one-character string used to escape + the delimiter when quoting is set to QUOTE_NONE. + * doublequote - controls the handling of quotes inside fields. When + True, two consecutive quotes are interpreted as one during read, + and when writing, each quote character embedded in the data is + written as two quotes """ import re import types -from _csv import Error, __version__, writer, reader, register_dialect, \ +from _csv import Error, writer, reader, register_dialect, \ unregister_dialect, get_dialect, list_dialects, \ field_size_limit, \ QUOTE_MINIMAL, QUOTE_ALL, QUOTE_NONNUMERIC, QUOTE_NONE, \ - QUOTE_STRINGS, QUOTE_NOTNULL, \ - __doc__ + QUOTE_STRINGS, QUOTE_NOTNULL from _csv import Dialect as _Dialect from io import StringIO __all__ = ["QUOTE_MINIMAL", "QUOTE_ALL", "QUOTE_NONNUMERIC", "QUOTE_NONE", "QUOTE_STRINGS", "QUOTE_NOTNULL", - "Error", "Dialect", "__doc__", "excel", "excel_tab", + "Error", "Dialect", "excel", "excel_tab", "field_size_limit", "reader", "writer", "register_dialect", "get_dialect", "list_dialects", "Sniffer", - "unregister_dialect", "__version__", "DictReader", "DictWriter", + "unregister_dialect", "DictReader", "DictWriter", "unix_dialect"] +__version__ = "1.0" + + class Dialect: """Describe a CSV dialect. diff --git a/Lib/ctypes/_endian.py b/Lib/ctypes/_endian.py index 3febb3118b8230..6382dd22b8acc8 100644 --- a/Lib/ctypes/_endian.py +++ b/Lib/ctypes/_endian.py @@ -15,8 +15,8 @@ def _other_endian(typ): # if typ is array if isinstance(typ, _array_type): return _other_endian(typ._type_) * typ._length_ - # if typ is structure - if issubclass(typ, Structure): + # if typ is structure or union + if issubclass(typ, (Structure, Union)): return typ raise TypeError("This type does not support other endian: %s" % typ) diff --git a/Lib/dataclasses.py b/Lib/dataclasses.py index 2fba32b5ffbc1e..3335821f3f32e9 100644 --- a/Lib/dataclasses.py +++ b/Lib/dataclasses.py @@ -1332,58 +1332,69 @@ class C: def _asdict_inner(obj, dict_factory): - if type(obj) in _ATOMIC_TYPES: + obj_type = type(obj) + if obj_type in _ATOMIC_TYPES: return obj - elif _is_dataclass_instance(obj): - # fast path for the common case + elif hasattr(obj_type, _FIELDS): + # dataclass instance: fast path for the common case if dict_factory is dict: return { f.name: _asdict_inner(getattr(obj, f.name), dict) for f in fields(obj) } else: - result = [] - for f in fields(obj): - value = _asdict_inner(getattr(obj, f.name), dict_factory) - result.append((f.name, value)) - return dict_factory(result) - elif isinstance(obj, tuple) and hasattr(obj, '_fields'): - # obj is a namedtuple. Recurse into it, but the returned - # object is another namedtuple of the same type. This is - # similar to how other list- or tuple-derived classes are - # treated (see below), but we just need to create them - # differently because a namedtuple's __init__ needs to be - # called differently (see bpo-34363). - - # I'm not using namedtuple's _asdict() - # method, because: - # - it does not recurse in to the namedtuple fields and - # convert them to dicts (using dict_factory). - # - I don't actually want to return a dict here. The main - # use case here is json.dumps, and it handles converting - # namedtuples to lists. Admittedly we're losing some - # information here when we produce a json list instead of a - # dict. Note that if we returned dicts here instead of - # namedtuples, we could no longer call asdict() on a data - # structure where a namedtuple was used as a dict key. - - return type(obj)(*[_asdict_inner(v, dict_factory) for v in obj]) - elif isinstance(obj, (list, tuple)): - # Assume we can create an object of this type by passing in a - # generator (which is not true for namedtuples, handled - # above). - return type(obj)(_asdict_inner(v, dict_factory) for v in obj) - elif isinstance(obj, dict): - if hasattr(type(obj), 'default_factory'): + return dict_factory([ + (f.name, _asdict_inner(getattr(obj, f.name), dict_factory)) + for f in fields(obj) + ]) + # handle the builtin types first for speed; subclasses handled below + elif obj_type is list: + return [_asdict_inner(v, dict_factory) for v in obj] + elif obj_type is dict: + return { + _asdict_inner(k, dict_factory): _asdict_inner(v, dict_factory) + for k, v in obj.items() + } + elif obj_type is tuple: + return tuple([_asdict_inner(v, dict_factory) for v in obj]) + elif issubclass(obj_type, tuple): + if hasattr(obj, '_fields'): + # obj is a namedtuple. Recurse into it, but the returned + # object is another namedtuple of the same type. This is + # similar to how other list- or tuple-derived classes are + # treated (see below), but we just need to create them + # differently because a namedtuple's __init__ needs to be + # called differently (see bpo-34363). + + # I'm not using namedtuple's _asdict() + # method, because: + # - it does not recurse in to the namedtuple fields and + # convert them to dicts (using dict_factory). + # - I don't actually want to return a dict here. The main + # use case here is json.dumps, and it handles converting + # namedtuples to lists. Admittedly we're losing some + # information here when we produce a json list instead of a + # dict. Note that if we returned dicts here instead of + # namedtuples, we could no longer call asdict() on a data + # structure where a namedtuple was used as a dict key. + return obj_type(*[_asdict_inner(v, dict_factory) for v in obj]) + else: + return obj_type(_asdict_inner(v, dict_factory) for v in obj) + elif issubclass(obj_type, dict): + if hasattr(obj_type, 'default_factory'): # obj is a defaultdict, which has a different constructor from # dict as it requires the default_factory as its first arg. - result = type(obj)(getattr(obj, 'default_factory')) + result = obj_type(obj.default_factory) for k, v in obj.items(): result[_asdict_inner(k, dict_factory)] = _asdict_inner(v, dict_factory) return result - return type(obj)((_asdict_inner(k, dict_factory), - _asdict_inner(v, dict_factory)) - for k, v in obj.items()) + return obj_type((_asdict_inner(k, dict_factory), + _asdict_inner(v, dict_factory)) + for k, v in obj.items()) + elif issubclass(obj_type, list): + # Assume we can create an object of this type by passing in a + # generator + return obj_type(_asdict_inner(v, dict_factory) for v in obj) else: return copy.deepcopy(obj) @@ -1416,11 +1427,10 @@ def _astuple_inner(obj, tuple_factory): if type(obj) in _ATOMIC_TYPES: return obj elif _is_dataclass_instance(obj): - result = [] - for f in fields(obj): - value = _astuple_inner(getattr(obj, f.name), tuple_factory) - result.append(value) - return tuple_factory(result) + return tuple_factory([ + _astuple_inner(getattr(obj, f.name), tuple_factory) + for f in fields(obj) + ]) elif isinstance(obj, tuple) and hasattr(obj, '_fields'): # obj is a namedtuple. Recurse into it, but the returned # object is another namedtuple of the same type. This is @@ -1558,14 +1568,14 @@ class C: return _replace(obj, **changes) -def _replace(obj, /, **changes): +def _replace(self, /, **changes): # We're going to mutate 'changes', but that's okay because it's a - # new dict, even if called with 'replace(obj, **my_changes)'. + # new dict, even if called with 'replace(self, **my_changes)'. # It's an error to have init=False fields in 'changes'. - # If a field is not in 'changes', read its value from the provided obj. + # If a field is not in 'changes', read its value from the provided 'self'. - for f in getattr(obj, _FIELDS).values(): + for f in getattr(self, _FIELDS).values(): # Only consider normal fields or InitVars. if f._field_type is _FIELD_CLASSVAR: continue @@ -1582,11 +1592,11 @@ def _replace(obj, /, **changes): if f._field_type is _FIELD_INITVAR and f.default is MISSING: raise TypeError(f"InitVar {f.name!r} " f'must be specified with replace()') - changes[f.name] = getattr(obj, f.name) + changes[f.name] = getattr(self, f.name) # Create the new object, which calls __init__() and # __post_init__() (if defined), using all of the init fields we've # added and/or left in 'changes'. If there are values supplied in # changes that aren't fields, this will correctly raise a # TypeError. - return obj.__class__(**changes) + return self.__class__(**changes) diff --git a/Lib/dis.py b/Lib/dis.py index 1a2f1032d500af..f05ea1a24f45a7 100644 --- a/Lib/dis.py +++ b/Lib/dis.py @@ -1032,11 +1032,16 @@ def main(): help='show inline caches') parser.add_argument('-O', '--show-offsets', action='store_true', help='show instruction offsets') - parser.add_argument('infile', type=argparse.FileType('rb'), nargs='?', default='-') + parser.add_argument('infile', nargs='?', default='-') args = parser.parse_args() - with args.infile as infile: - source = infile.read() - code = compile(source, args.infile.name, "exec") + if args.infile == '-': + name = '' + source = sys.stdin.buffer.read() + else: + name = args.infile + with open(args.infile, 'rb') as infile: + source = infile.read() + code = compile(source, name, "exec") dis(code, show_caches=args.show_caches, show_offsets=args.show_offsets) if __name__ == "__main__": diff --git a/Lib/email/_header_value_parser.py b/Lib/email/_header_value_parser.py index 0d6bd812475eea..5b653f66c18554 100644 --- a/Lib/email/_header_value_parser.py +++ b/Lib/email/_header_value_parser.py @@ -2766,6 +2766,7 @@ def _refold_parse_tree(parse_tree, *, policy): encoding = 'utf-8' if policy.utf8 else 'us-ascii' lines = [''] last_ew = None + last_charset = None wrap_as_ew_blocked = 0 want_encoding = False end_ew_not_allowed = Terminal('', 'wrap_as_ew_blocked') @@ -2820,8 +2821,14 @@ def _refold_parse_tree(parse_tree, *, policy): else: # It's a terminal, wrap it as an encoded word, possibly # combining it with previously encoded words if allowed. + if (last_ew is not None and + charset != last_charset and + (last_charset == 'unknown-8bit' or + last_charset == 'utf-8' and charset != 'us-ascii')): + last_ew = None last_ew = _fold_as_ew(tstr, lines, maxlen, last_ew, part.ew_combine_allowed, charset) + last_charset = charset want_encoding = False continue if len(tstr) <= maxlen - len(lines[-1]): diff --git a/Lib/fractions.py b/Lib/fractions.py index 6532d5d54e3c35..389ab386b6a8a4 100644 --- a/Lib/fractions.py +++ b/Lib/fractions.py @@ -55,17 +55,17 @@ def _hash_algorithm(numerator, denominator): return -2 if result == -1 else result _RATIONAL_FORMAT = re.compile(r""" - \A\s* # optional whitespace at the start, - (?P[-+]?) # an optional sign, then - (?=\d|\.\d) # lookahead for digit or .digit - (?P\d*|\d+(_\d+)*) # numerator (possibly empty) - (?: # followed by - (?:\s*/\s*(?P\d+(_\d+)*))? # an optional denominator - | # or - (?:\.(?Pd*|\d+(_\d+)*))? # an optional fractional part - (?:E(?P[-+]?\d+(_\d+)*))? # and optional exponent + \A\s* # optional whitespace at the start, + (?P[-+]?) # an optional sign, then + (?=\d|\.\d) # lookahead for digit or .digit + (?P\d*|\d+(_\d+)*) # numerator (possibly empty) + (?: # followed by + (?:\s*/\s*(?P\d+(_\d+)*))? # an optional denominator + | # or + (?:\.(?P\d*|\d+(_\d+)*))? # an optional fractional part + (?:E(?P[-+]?\d+(_\d+)*))? # and optional exponent ) - \s*\Z # and optional whitespace to finish + \s*\Z # and optional whitespace to finish """, re.VERBOSE | re.IGNORECASE) diff --git a/Lib/idlelib/News3.txt b/Lib/idlelib/News3.txt index f38cc96eceb766..241b1f48e5c1d8 100644 --- a/Lib/idlelib/News3.txt +++ b/Lib/idlelib/News3.txt @@ -4,6 +4,15 @@ Released on 2024-10-xx ========================= +gh-96905: In idlelib code, stop redefining built-ins 'dict' and 'object'. + +gh-72284: Improve the lists of features, editor key bindings, +and shell key bingings in the IDLE doc. + +gh-113903: Fix rare failure of test.test_idle, in test_configdialog. + +gh-113729: Fix the "Help -> IDLE Doc" menu bug in 3.11.7 and 3.12.1. + gh-57795: Enter selected text into the Find box when opening a Replace dialog. Patch by Roger Serwy and Zackery Spytz. diff --git a/Lib/idlelib/debugger.py b/Lib/idlelib/debugger.py index f487b4c4b16a60..d90dbcd11f9f61 100644 --- a/Lib/idlelib/debugger.py +++ b/Lib/idlelib/debugger.py @@ -508,11 +508,11 @@ def show_source(self, index): class NamespaceViewer: "Global/local namespace viewer for debugger GUI." - def __init__(self, master, title, dict=None): + def __init__(self, master, title, odict=None): # XXX odict never passed. width = 0 height = 40 - if dict: - height = 20*len(dict) # XXX 20 == observed height of Entry widget + if odict: + height = 20*len(odict) # XXX 20 == observed height of Entry widget self.master = master self.title = title import reprlib @@ -533,24 +533,24 @@ def __init__(self, master, title, dict=None): canvas["yscrollcommand"] = vbar.set self.subframe = subframe = Frame(canvas) self.sfid = canvas.create_window(0, 0, window=subframe, anchor="nw") - self.load_dict(dict) + self.load_dict(odict) - dict = -1 + prev_odict = -1 # Needed for initial comparison below. - def load_dict(self, dict, force=0, rpc_client=None): - if dict is self.dict and not force: + def load_dict(self, odict, force=0, rpc_client=None): + if odict is self.prev_odict and not force: return subframe = self.subframe frame = self.frame for c in list(subframe.children.values()): c.destroy() - self.dict = None - if not dict: + self.prev_odict = None + if not odict: l = Label(subframe, text="None") l.grid(row=0, column=0) else: #names = sorted(dict) - ### + # # Because of (temporary) limitations on the dict_keys type (not yet # public or pickleable), have the subprocess to send a list of # keys, not a dict_keys object. sorted() will take a dict_keys @@ -560,12 +560,12 @@ def load_dict(self, dict, force=0, rpc_client=None): # interpreter gets into a loop requesting non-existing dict[0], # dict[1], dict[2], etc from the debugger_r.DictProxy. # TODO recheck above; see debugger_r 159ff, debugobj 60. - keys_list = dict.keys() + keys_list = odict.keys() names = sorted(keys_list) - ### + row = 0 for name in names: - value = dict[name] + value = odict[name] svalue = self.repr.repr(value) # repr(value) # Strip extra quotes caused by calling repr on the (already) # repr'd value sent across the RPC interface: @@ -577,7 +577,7 @@ def load_dict(self, dict, force=0, rpc_client=None): l.insert(0, svalue) l.grid(row=row, column=1, sticky="nw") row = row+1 - self.dict = dict + self.prev_odict = odict # XXX Could we use a callback for the following? subframe.update_idletasks() # Alas! width = subframe.winfo_reqwidth() diff --git a/Lib/idlelib/debugger_r.py b/Lib/idlelib/debugger_r.py index 26204438858d8a..ad3355d9f82765 100644 --- a/Lib/idlelib/debugger_r.py +++ b/Lib/idlelib/debugger_r.py @@ -125,16 +125,16 @@ def frame_attr(self, fid, name): def frame_globals(self, fid): frame = frametable[fid] - dict = frame.f_globals - did = id(dict) - dicttable[did] = dict + gdict = frame.f_globals + did = id(gdict) + dicttable[did] = gdict return did def frame_locals(self, fid): frame = frametable[fid] - dict = frame.f_locals - did = id(dict) - dicttable[did] = dict + ldict = frame.f_locals + did = id(ldict) + dicttable[did] = ldict return did def frame_code(self, fid): @@ -158,20 +158,17 @@ def code_filename(self, cid): def dict_keys(self, did): raise NotImplementedError("dict_keys not public or pickleable") -## dict = dicttable[did] -## return dict.keys() +## return dicttable[did].keys() - ### Needed until dict_keys is type is finished and pickealable. + ### Needed until dict_keys type is finished and pickleable. + # xxx finished. pickleable? ### Will probably need to extend rpc.py:SocketIO._proxify at that time. def dict_keys_list(self, did): - dict = dicttable[did] - return list(dict.keys()) + return list(dicttable[did].keys()) def dict_item(self, did, key): - dict = dicttable[did] - value = dict[key] - value = reprlib.repr(value) ### can't pickle module 'builtins' - return value + value = dicttable[did][key] + return reprlib.repr(value) # Can't pickle module 'builtins'. #----------end class IdbAdapter---------- diff --git a/Lib/idlelib/debugobj.py b/Lib/idlelib/debugobj.py index 156377f8ed26ac..fb448ece2fa25e 100644 --- a/Lib/idlelib/debugobj.py +++ b/Lib/idlelib/debugobj.py @@ -1,3 +1,5 @@ +"""Define tree items for debug stackviewer, which is only user. +""" # XXX TO DO: # - popup menu # - support partial or total redisplay @@ -17,9 +19,9 @@ myrepr.maxother = 100 class ObjectTreeItem(TreeItem): - def __init__(self, labeltext, object, setfunction=None): + def __init__(self, labeltext, object_, setfunction=None): self.labeltext = labeltext - self.object = object + self.object = object_ self.setfunction = setfunction def GetLabelText(self): return self.labeltext @@ -51,8 +53,8 @@ def GetSubList(self): item = make_objecttreeitem( str(key) + " =", value, - lambda value, key=key, object=self.object: - setattr(object, key, value)) + lambda value, key=key, object_=self.object: + setattr(object_, key, value)) sublist.append(item) return sublist @@ -85,8 +87,8 @@ def GetSubList(self): value = self.object[key] except KeyError: continue - def setfunction(value, key=key, object=self.object): - object[key] = value + def setfunction(value, key=key, object_=self.object): + object_[key] = value item = make_objecttreeitem(f"{key!r}:", value, setfunction) sublist.append(item) return sublist @@ -111,13 +113,13 @@ def keys(self): type: ClassTreeItem, } -def make_objecttreeitem(labeltext, object, setfunction=None): - t = type(object) +def make_objecttreeitem(labeltext, object_, setfunction=None): + t = type(object_) if t in dispatch: c = dispatch[t] else: c = ObjectTreeItem - return c(labeltext, object, setfunction) + return c(labeltext, object_, setfunction) def _debug_object_browser(parent): # htest # diff --git a/Lib/idlelib/help.html b/Lib/idlelib/help.html index 722406b81a8ae6..827d230b54e159 100644 --- a/Lib/idlelib/help.html +++ b/Lib/idlelib/help.html @@ -1,33 +1,31 @@ - - + - IDLE — Python 3.12.0a0 documentation + IDLE — Python 3.13.0a2 documentation - + + - - - + - + @@ -41,35 +39,48 @@ } } - + + + +
-

Next topic

@@ -1117,7 +1123,7 @@

Navigation

next |
  • - previous |
  • python logo
  • @@ -1130,7 +1136,7 @@

    Navigation

  • - 3.12.0a0 Documentation » + 3.13.0a2 Documentation »
  • @@ -1141,19 +1147,26 @@

    Navigation

    | +
  • + |
  • diff --git a/Lib/idlelib/help.py b/Lib/idlelib/help.py index 3cc7e36e35555b..bdf4b2b29f11a2 100644 --- a/Lib/idlelib/help.py +++ b/Lib/idlelib/help.py @@ -102,7 +102,7 @@ def handle_starttag(self, tag, attrs): if self.level > 0: self.nested_dl = True elif tag == 'li': - s = '\n* ' if self.simplelist else '\n\n* ' + s = '\n* ' elif tag == 'dt': s = '\n\n' if not self.nested_dl else '\n' # Avoid extra line. self.nested_dl = False @@ -241,12 +241,13 @@ def __init__(self, parent, filename, title): Toplevel.__init__(self, parent) self.wm_title(title) self.protocol("WM_DELETE_WINDOW", self.destroy) - HelpFrame(self, filename).grid(column=0, row=0, sticky='nsew') + self.frame = HelpFrame(self, filename) + self.frame.grid(column=0, row=0, sticky='nsew') self.grid_columnconfigure(0, weight=1) self.grid_rowconfigure(0, weight=1) -def copy_strip(): +def copy_strip(): # pragma: no cover """Copy idle.html to idlelib/help.html, stripping trailing whitespace. Files with trailing whitespace cannot be pushed to the git cpython @@ -279,13 +280,13 @@ def copy_strip(): print(f'{src} copied to {dst}') -def _helpwindow(parent): +def show_idlehelp(parent): "Create HelpWindow; called from Idle Help event handler." filename = join(abspath(dirname(__file__)), 'help.html') - if not isfile(filename): + if not isfile(filename): # pragma: no cover # Try copy_strip, present message. return - HelpWindow(parent, filename, 'IDLE Help (%s)' % python_version()) + return HelpWindow(parent, filename, 'IDLE Doc (%s)' % python_version()) if __name__ == '__main__': @@ -293,4 +294,4 @@ def _helpwindow(parent): main('idlelib.idle_test.test_help', verbosity=2, exit=False) from idlelib.idle_test.htest import run - run(_helpwindow) + run(show_idlehelp) diff --git a/Lib/idlelib/idle_test/htest.py b/Lib/idlelib/idle_test/htest.py index 997f85ff5a78b2..a7293774eecaeb 100644 --- a/Lib/idlelib/idle_test/htest.py +++ b/Lib/idlelib/idle_test/htest.py @@ -190,13 +190,6 @@ ", [Cancel], or [X] prints None to shell" } -_helpwindow_spec = { - 'file': 'help', - 'kwds': {}, - 'msg': "If the help text displays, this works.\n" - "Text is selectable. Window is scrollable." - } - _io_binding_spec = { 'file': 'iomenu', 'kwds': {}, @@ -299,6 +292,13 @@ "Its only action is to close." } +show_idlehelp_spec = { + 'file': 'help', + 'kwds': {}, + 'msg': "If the help text displays, this works.\n" + "Text is selectable. Window is scrollable." + } + _sidebar_number_scrolling_spec = { 'file': 'sidebar', 'kwds': {}, diff --git a/Lib/idlelib/idle_test/test_calltip.py b/Lib/idlelib/idle_test/test_calltip.py index 15e1ff3f3cf717..28c196a42672fc 100644 --- a/Lib/idlelib/idle_test/test_calltip.py +++ b/Lib/idlelib/idle_test/test_calltip.py @@ -79,6 +79,7 @@ class SB: __call__ = None tiptest(list.append, '(self, object, /)' + append_doc) tiptest(List.append, '(self, object, /)' + append_doc) tiptest([].append, '(object, /)' + append_doc) + # The use of 'object' above matches the signature text. tiptest(types.MethodType, '(function, instance, /)\n' diff --git a/Lib/idlelib/idle_test/test_configdialog.py b/Lib/idlelib/idle_test/test_configdialog.py index 6f8518a9bb19d0..5099d093382445 100644 --- a/Lib/idlelib/idle_test/test_configdialog.py +++ b/Lib/idlelib/idle_test/test_configdialog.py @@ -420,20 +420,14 @@ def test_highlight_target_text_mouse(self): # Set highlight_target through clicking highlight_sample. eq = self.assertEqual d = self.page - - elem = {} - count = 0 hs = d.highlight_sample hs.focus_force() - hs.see(1.0) - hs.update_idletasks() - def tag_to_element(elem): - for element, tag in d.theme_elements.items(): - elem[tag] = element - - def click_it(start): - x, y, dx, dy = hs.bbox(start) + def click_char(index): + "Simulate click on character at *index*." + hs.see(index) + hs.update_idletasks() + x, y, dx, dy = hs.bbox(index) x += dx // 2 y += dy // 2 hs.event_generate('', x=0, y=0) @@ -441,17 +435,20 @@ def click_it(start): hs.event_generate('', x=x, y=y) hs.event_generate('', x=x, y=y) - # Flip theme_elements to make the tag the key. - tag_to_element(elem) + # Reverse theme_elements to make the tag the key. + elem = {tag: element for element, tag in d.theme_elements.items()} # If highlight_sample has a tag that isn't in theme_elements, there # will be a KeyError in the test run. + count = 0 for tag in hs.tag_names(): - for start_index in hs.tag_ranges(tag)[0::2]: - count += 1 - click_it(start_index) + try: + click_char(hs.tag_nextrange(tag, "1.0")[0]) eq(d.highlight_target.get(), elem[tag]) + count += 1 eq(d.set_highlight_target.called, count) + except IndexError: + pass # Skip unused theme_elements tag, like 'sel'. def test_highlight_sample_double_click(self): # Test double click on highlight_sample. diff --git a/Lib/idlelib/idle_test/test_help.py b/Lib/idlelib/idle_test/test_help.py index b542659981894d..c528d4e77f8ca7 100644 --- a/Lib/idlelib/idle_test/test_help.py +++ b/Lib/idlelib/idle_test/test_help.py @@ -1,4 +1,4 @@ -"Test help, coverage 87%." +"Test help, coverage 94%." from idlelib import help import unittest @@ -8,25 +8,27 @@ from tkinter import Tk -class HelpFrameTest(unittest.TestCase): +class IdleDocTest(unittest.TestCase): @classmethod def setUpClass(cls): "By itself, this tests that file parsed without exception." cls.root = root = Tk() root.withdraw() - helpfile = join(dirname(dirname(abspath(__file__))), 'help.html') - cls.frame = help.HelpFrame(root, helpfile) + cls.window = help.show_idlehelp(root) @classmethod def tearDownClass(cls): - del cls.frame + del cls.window cls.root.update_idletasks() cls.root.destroy() del cls.root - def test_line1(self): - text = self.frame.text + def test_1window(self): + self.assertIn('IDLE Doc', self.window.wm_title()) + + def test_4text(self): + text = self.window.frame.text self.assertEqual(text.get('1.0', '1.end'), ' IDLE ') diff --git a/Lib/idlelib/rpc.py b/Lib/idlelib/rpc.py index b08b80c9004551..3f0b2230dd185d 100644 --- a/Lib/idlelib/rpc.py +++ b/Lib/idlelib/rpc.py @@ -158,8 +158,8 @@ def debug(self, *args): s = s + " " + str(a) print(s, file=sys.__stderr__) - def register(self, oid, object): - self.objtable[oid] = object + def register(self, oid, object_): + self.objtable[oid] = object_ def unregister(self, oid): try: diff --git a/Lib/idlelib/stackviewer.py b/Lib/idlelib/stackviewer.py index 977c56ef15f2ae..95042d4debdc03 100644 --- a/Lib/idlelib/stackviewer.py +++ b/Lib/idlelib/stackviewer.py @@ -106,8 +106,8 @@ def GetSubList(self): value = self.object[key] except KeyError: continue - def setfunction(value, key=key, object=self.object): - object[key] = value + def setfunction(value, key=key, object_=self.object): + object_[key] = value item = make_objecttreeitem(key + " =", value, setfunction) sublist.append(item) return sublist diff --git a/Lib/importlib/_bootstrap_external.py b/Lib/importlib/_bootstrap_external.py index d537598ac16433..a4d2b7e0184409 100644 --- a/Lib/importlib/_bootstrap_external.py +++ b/Lib/importlib/_bootstrap_external.py @@ -462,6 +462,8 @@ def _write_atomic(path, data, mode=0o666): # Python 3.13a1 3563 (Add CALL_KW and remove KW_NAMES) # Python 3.13a1 3564 (Removed oparg from YIELD_VALUE, changed oparg values of RESUME) # Python 3.13a1 3565 (Oparg of YIELD_VALUE indicates whether it is in a yield-from) +# Python 3.13a1 3566 (Emit JUMP_NO_INTERRUPT instead of JUMP for non-loop no-lineno cases) +# Python 3.13a1 3567 (Reimplement line number propagation by the compiler) # Python 3.14 will start with 3600 @@ -478,7 +480,7 @@ def _write_atomic(path, data, mode=0o666): # Whenever MAGIC_NUMBER is changed, the ranges in the magic_values array # in PC/launcher.c must also be updated. -MAGIC_NUMBER = (3565).to_bytes(2, 'little') + b'\r\n' +MAGIC_NUMBER = (3567).to_bytes(2, 'little') + b'\r\n' _RAW_MAGIC_NUMBER = int.from_bytes(MAGIC_NUMBER, 'little') # For import.c diff --git a/Lib/importlib/resources/__init__.py b/Lib/importlib/resources/__init__.py index e6b60c18caa052..ae83cd07c4d4fb 100644 --- a/Lib/importlib/resources/__init__.py +++ b/Lib/importlib/resources/__init__.py @@ -4,6 +4,7 @@ as_file, files, Package, + Anchor, ) from .abc import ResourceReader @@ -11,6 +12,7 @@ __all__ = [ 'Package', + 'Anchor', 'ResourceReader', 'as_file', 'files', diff --git a/Lib/json/tool.py b/Lib/json/tool.py index 0490b8c0be11df..fdfc3372bcca02 100644 --- a/Lib/json/tool.py +++ b/Lib/json/tool.py @@ -13,7 +13,6 @@ import argparse import json import sys -from pathlib import Path def main(): @@ -22,11 +21,9 @@ def main(): 'to validate and pretty-print JSON objects.') parser = argparse.ArgumentParser(prog=prog, description=description) parser.add_argument('infile', nargs='?', - type=argparse.FileType(encoding="utf-8"), help='a JSON file to be validated or pretty-printed', - default=sys.stdin) + default='-') parser.add_argument('outfile', nargs='?', - type=Path, help='write the output of infile to outfile', default=None) parser.add_argument('--sort-keys', action='store_true', default=False, @@ -59,23 +56,30 @@ def main(): dump_args['indent'] = None dump_args['separators'] = ',', ':' - with options.infile as infile: + try: + if options.infile == '-': + infile = sys.stdin + else: + infile = open(options.infile, encoding='utf-8') try: if options.json_lines: objs = (json.loads(line) for line in infile) else: objs = (json.load(infile),) + finally: + if infile is not sys.stdin: + infile.close() - if options.outfile is None: - out = sys.stdout - else: - out = options.outfile.open('w', encoding='utf-8') - with out as outfile: - for obj in objs: - json.dump(obj, outfile, **dump_args) - outfile.write('\n') - except ValueError as e: - raise SystemExit(e) + if options.outfile is None: + outfile = sys.stdout + else: + outfile = open(options.outfile, 'w', encoding='utf-8') + with outfile: + for obj in objs: + json.dump(obj, outfile, **dump_args) + outfile.write('\n') + except ValueError as e: + raise SystemExit(e) if __name__ == '__main__': diff --git a/Lib/mailbox.py b/Lib/mailbox.py index 0e1d49b399d077..81ea210cf815a4 100644 --- a/Lib/mailbox.py +++ b/Lib/mailbox.py @@ -1141,10 +1141,24 @@ def __len__(self): """Return a count of messages in the mailbox.""" return len(list(self.iterkeys())) + def _open_mh_sequences_file(self, text): + mode = '' if text else 'b' + kwargs = {'encoding': 'ASCII'} if text else {} + path = os.path.join(self._path, '.mh_sequences') + while True: + try: + return open(path, 'r+' + mode, **kwargs) + except FileNotFoundError: + pass + try: + return open(path, 'x+' + mode, **kwargs) + except FileExistsError: + pass + def lock(self): """Lock the mailbox.""" if not self._locked: - self._file = open(os.path.join(self._path, '.mh_sequences'), 'rb+') + self._file = self._open_mh_sequences_file(text=False) _lock_file(self._file) self._locked = True @@ -1225,8 +1239,9 @@ def get_sequences(self): def set_sequences(self, sequences): """Set sequences using the given name-to-key-list dictionary.""" - f = open(os.path.join(self._path, '.mh_sequences'), 'w', encoding='ASCII') + f = self._open_mh_sequences_file(text=True) try: + os.close(os.open(f.name, os.O_WRONLY | os.O_TRUNC)) for name, keys in sequences.items(): if len(keys) == 0: continue diff --git a/Lib/multiprocessing/managers.py b/Lib/multiprocessing/managers.py index 96cebc6eabec89..76b915de74d94e 100644 --- a/Lib/multiprocessing/managers.py +++ b/Lib/multiprocessing/managers.py @@ -156,7 +156,7 @@ def __init__(self, registry, address, authkey, serializer): Listener, Client = listener_client[serializer] # do authentication later - self.listener = Listener(address=address, backlog=16) + self.listener = Listener(address=address, backlog=128) self.address = self.listener.address self.id_to_obj = {'0': (None, ())} diff --git a/Lib/multiprocessing/resource_sharer.py b/Lib/multiprocessing/resource_sharer.py index 66076509a1202e..b8afb0fbed3a3c 100644 --- a/Lib/multiprocessing/resource_sharer.py +++ b/Lib/multiprocessing/resource_sharer.py @@ -123,7 +123,7 @@ def _start(self): from .connection import Listener assert self._listener is None, "Already have Listener" util.debug('starting listener and thread for sending handles') - self._listener = Listener(authkey=process.current_process().authkey) + self._listener = Listener(authkey=process.current_process().authkey, backlog=128) self._address = self._listener.address t = threading.Thread(target=self._serve) t.daemon = True diff --git a/Lib/ntpath.py b/Lib/ntpath.py index 3061a4a5ef4c56..aa0e018eb668c2 100644 --- a/Lib/ntpath.py +++ b/Lib/ntpath.py @@ -77,12 +77,6 @@ def normcase(s): return s.replace('/', '\\').lower() -# Return whether a path is absolute. -# Trivial in Posix, harder on Windows. -# For Windows it is absolute if it starts with a slash or backslash (current -# volume), or if a pathname after the volume-letter-and-colon or UNC-resource -# starts with a slash or backslash. - def isabs(s): """Test whether a path is absolute""" s = os.fspath(s) @@ -90,16 +84,15 @@ def isabs(s): sep = b'\\' altsep = b'/' colon_sep = b':\\' + double_sep = b'\\\\' else: sep = '\\' altsep = '/' colon_sep = ':\\' + double_sep = '\\\\' s = s[:3].replace(altsep, sep) # Absolute: UNC, device, and paths with a drive and root. - # LEGACY BUG: isabs("/x") should be false since the path has no drive. - if s.startswith(sep) or s.startswith(colon_sep, 1): - return True - return False + return s.startswith(colon_sep, 1) or s.startswith(double_sep) # Join two (or more) paths. diff --git a/Lib/pathlib/__init__.py b/Lib/pathlib/__init__.py index 79b8b4917f6cc4..b043aed12b3849 100644 --- a/Lib/pathlib/__init__.py +++ b/Lib/pathlib/__init__.py @@ -9,6 +9,10 @@ import ntpath import os import posixpath +import sys +import warnings +from itertools import chain +from _collections_abc import Sequence try: import pwd @@ -29,6 +33,44 @@ ] +# Reference for Windows paths can be found at +# https://learn.microsoft.com/en-gb/windows/win32/fileio/naming-a-file . +_WIN_RESERVED_NAMES = frozenset( + {'CON', 'PRN', 'AUX', 'NUL', 'CONIN$', 'CONOUT$'} | + {f'COM{c}' for c in '123456789\xb9\xb2\xb3'} | + {f'LPT{c}' for c in '123456789\xb9\xb2\xb3'} +) + + +class _PathParents(Sequence): + """This object provides sequence-like access to the logical ancestors + of a path. Don't try to construct it yourself.""" + __slots__ = ('_path', '_drv', '_root', '_tail') + + def __init__(self, path): + self._path = path + self._drv = path.drive + self._root = path.root + self._tail = path._tail + + def __len__(self): + return len(self._tail) + + def __getitem__(self, idx): + if isinstance(idx, slice): + return tuple(self[i] for i in range(*idx.indices(len(self)))) + + if idx >= len(self) or idx < -len(self): + raise IndexError(idx) + if idx < 0: + idx += len(self) + return self._path._from_parsed_parts(self._drv, self._root, + self._tail[:-idx - 1]) + + def __repr__(self): + return "<{}.parents>".format(type(self._path).__name__) + + UnsupportedOperation = _abc.UnsupportedOperation @@ -43,6 +85,24 @@ class PurePath(_abc.PurePathBase): """ __slots__ = ( + # The `_raw_paths` slot stores unnormalized string paths. This is set + # in the `__init__()` method. + '_raw_paths', + + # The `_drv`, `_root` and `_tail_cached` slots store parsed and + # normalized parts of the path. They are set when any of the `drive`, + # `root` or `_tail` properties are accessed for the first time. The + # three-part division corresponds to the result of + # `os.path.splitroot()`, except that the tail is further split on path + # separators (i.e. it is a list of strings), and that the root and + # tail are normalized. + '_drv', '_root', '_tail_cached', + + # The `_str` slot stores the string representation of the path, + # computed from the drive, root and tail when `__str__()` is called + # for the first time. It's used to implement `_str_normcase` + '_str', + # The `_str_normcase_cached` slot stores the string path with # normalized case. It is set when the `_str_normcase` property is # accessed for the first time. It's used to implement `__eq__()` @@ -93,7 +153,26 @@ def __init__(self, *args): paths.append(path) # Avoid calling super().__init__, as an optimisation self._raw_paths = paths - self._resolving = False + + def joinpath(self, *pathsegments): + """Combine this path with one or several arguments, and return a + new path representing either a subpath (if all arguments are relative + paths) or a totally different path (if one of the arguments is + anchored). + """ + return self.with_segments(self, *pathsegments) + + def __truediv__(self, key): + try: + return self.with_segments(self, key) + except TypeError: + return NotImplemented + + def __rtruediv__(self, key): + try: + return self.with_segments(key, self) + except TypeError: + return NotImplemented def __reduce__(self): # Using the parts tuple helps share interned path parts @@ -164,6 +243,209 @@ def __ge__(self, other): return NotImplemented return self._parts_normcase >= other._parts_normcase + def __str__(self): + """Return the string representation of the path, suitable for + passing to system calls.""" + try: + return self._str + except AttributeError: + self._str = self._format_parsed_parts(self.drive, self.root, + self._tail) or '.' + return self._str + + @classmethod + def _format_parsed_parts(cls, drv, root, tail): + if drv or root: + return drv + root + cls.pathmod.sep.join(tail) + elif tail and cls.pathmod.splitdrive(tail[0])[0]: + tail = ['.'] + tail + return cls.pathmod.sep.join(tail) + + def _from_parsed_parts(self, drv, root, tail): + path_str = self._format_parsed_parts(drv, root, tail) + path = self.with_segments(path_str) + path._str = path_str or '.' + path._drv = drv + path._root = root + path._tail_cached = tail + return path + + @classmethod + def _parse_path(cls, path): + if not path: + return '', '', [] + sep = cls.pathmod.sep + altsep = cls.pathmod.altsep + if altsep: + path = path.replace(altsep, sep) + drv, root, rel = cls.pathmod.splitroot(path) + if not root and drv.startswith(sep) and not drv.endswith(sep): + drv_parts = drv.split(sep) + if len(drv_parts) == 4 and drv_parts[2] not in '?.': + # e.g. //server/share + root = sep + elif len(drv_parts) == 6: + # e.g. //?/unc/server/share + root = sep + parsed = [sys.intern(str(x)) for x in rel.split(sep) if x and x != '.'] + return drv, root, parsed + + @property + def _raw_path(self): + """The joined but unnormalized path.""" + paths = self._raw_paths + if len(paths) == 0: + path = '' + elif len(paths) == 1: + path = paths[0] + else: + path = self.pathmod.join(*paths) + return path + + @property + def drive(self): + """The drive prefix (letter or UNC path), if any.""" + try: + return self._drv + except AttributeError: + self._drv, self._root, self._tail_cached = self._parse_path(self._raw_path) + return self._drv + + @property + def root(self): + """The root of the path, if any.""" + try: + return self._root + except AttributeError: + self._drv, self._root, self._tail_cached = self._parse_path(self._raw_path) + return self._root + + @property + def _tail(self): + try: + return self._tail_cached + except AttributeError: + self._drv, self._root, self._tail_cached = self._parse_path(self._raw_path) + return self._tail_cached + + @property + def anchor(self): + """The concatenation of the drive and root, or ''.""" + return self.drive + self.root + + @property + def parts(self): + """An object providing sequence-like access to the + components in the filesystem path.""" + if self.drive or self.root: + return (self.drive + self.root,) + tuple(self._tail) + else: + return tuple(self._tail) + + @property + def parent(self): + """The logical parent of the path.""" + drv = self.drive + root = self.root + tail = self._tail + if not tail: + return self + return self._from_parsed_parts(drv, root, tail[:-1]) + + @property + def parents(self): + """A sequence of this path's logical parents.""" + # The value of this property should not be cached on the path object, + # as doing so would introduce a reference cycle. + return _PathParents(self) + + @property + def name(self): + """The final path component, if any.""" + tail = self._tail + if not tail: + return '' + return tail[-1] + + def with_name(self, name): + """Return a new path with the file name changed.""" + m = self.pathmod + if not name or m.sep in name or (m.altsep and m.altsep in name) or name == '.': + raise ValueError(f"Invalid name {name!r}") + tail = self._tail.copy() + if not tail: + raise ValueError(f"{self!r} has an empty name") + tail[-1] = name + return self._from_parsed_parts(self.drive, self.root, tail) + + def relative_to(self, other, /, *_deprecated, walk_up=False): + """Return the relative path to another path identified by the passed + arguments. If the operation is not possible (because this is not + related to the other path), raise ValueError. + + The *walk_up* parameter controls whether `..` may be used to resolve + the path. + """ + if _deprecated: + msg = ("support for supplying more than one positional argument " + "to pathlib.PurePath.relative_to() is deprecated and " + "scheduled for removal in Python 3.14") + warnings.warn(msg, DeprecationWarning, stacklevel=2) + other = self.with_segments(other, *_deprecated) + elif not isinstance(other, PurePath): + other = self.with_segments(other) + for step, path in enumerate(chain([other], other.parents)): + if path == self or path in self.parents: + break + elif not walk_up: + raise ValueError(f"{str(self)!r} is not in the subpath of {str(other)!r}") + elif path.name == '..': + raise ValueError(f"'..' segment in {str(other)!r} cannot be walked") + else: + raise ValueError(f"{str(self)!r} and {str(other)!r} have different anchors") + parts = ['..'] * step + self._tail[len(path._tail):] + return self._from_parsed_parts('', '', parts) + + def is_relative_to(self, other, /, *_deprecated): + """Return True if the path is relative to another path or False. + """ + if _deprecated: + msg = ("support for supplying more than one argument to " + "pathlib.PurePath.is_relative_to() is deprecated and " + "scheduled for removal in Python 3.14") + warnings.warn(msg, DeprecationWarning, stacklevel=2) + other = self.with_segments(other, *_deprecated) + elif not isinstance(other, PurePath): + other = self.with_segments(other) + return other == self or other in self.parents + + def is_absolute(self): + """True if the path is absolute (has both a root and, if applicable, + a drive).""" + if self.pathmod is posixpath: + # Optimization: work with raw paths on POSIX. + for path in self._raw_paths: + if path.startswith('/'): + return True + return False + return self.pathmod.isabs(self) + + def is_reserved(self): + """Return True if the path contains one of the special names reserved + by the system, if any.""" + if self.pathmod is not ntpath or not self.name: + return False + + # NOTE: the rules for reserved names seem somewhat complicated + # (e.g. r"..\NUL" is reserved but not r"foo\NUL" if "foo" does not + # exist). We err on the side of caution and return True for paths + # which are not considered reserved by Windows. + if self.drive.startswith('\\\\'): + # UNC paths are never reserved. + return False + name = self.name.partition('.')[0].partition(':')[0].rstrip(' ') + return name.upper() in _WIN_RESERVED_NAMES + def as_uri(self): """Return the path as a URI.""" if not self.is_absolute(): @@ -185,6 +467,29 @@ def as_uri(self): from urllib.parse import quote_from_bytes return prefix + quote_from_bytes(os.fsencode(path)) + @property + def _pattern_stack(self): + """Stack of path components, to be used with patterns in glob().""" + parts = self._tail.copy() + pattern = self._raw_path + if self.anchor: + raise NotImplementedError("Non-relative patterns are unsupported") + elif not parts: + raise ValueError("Unacceptable pattern: {!r}".format(pattern)) + elif pattern[-1] in (self.pathmod.sep, self.pathmod.altsep): + # GH-65238: pathlib doesn't preserve trailing slash. Add it back. + parts.append('') + elif parts[-1] == '**': + # GH-70303: '**' only matches directories. Add trailing slash. + warnings.warn( + "Pattern ending '**' will match files and directories in a " + "future Python release. Add a trailing slash to match only " + "directories and remove this warning.", + FutureWarning, 4) + parts.append('') + parts.reverse() + return parts + # Subclassing os.PathLike makes isinstance() checks slower, # which in turn makes Path construction slower. Register instead! @@ -230,7 +535,6 @@ def _unsupported(cls, method_name): def __init__(self, *args, **kwargs): if kwargs: - import warnings msg = ("support for supplying keyword arguments to pathlib.PurePath " "is deprecated and scheduled for removal in Python {remove}") warnings._deprecated("pathlib.PurePath(**kwargs)", msg, remove=(3, 14)) @@ -309,6 +613,52 @@ def _make_child_entry(self, entry): path._tail_cached = self._tail + [entry.name] return path + def _make_child_relpath(self, name): + if not name: + return self + path_str = str(self) + tail = self._tail + if tail: + path_str = f'{path_str}{self.pathmod.sep}{name}' + elif path_str != '.': + path_str = f'{path_str}{name}' + else: + path_str = name + path = self.with_segments(path_str) + path._str = path_str + path._drv = self.drive + path._root = self.root + path._tail_cached = tail + [name] + return path + + def glob(self, pattern, *, case_sensitive=None, follow_symlinks=None): + """Iterate over this subtree and yield all existing files (of any + kind, including directories) matching the given relative pattern. + """ + sys.audit("pathlib.Path.glob", self, pattern) + if not isinstance(pattern, PurePath): + pattern = self.with_segments(pattern) + return _abc.PathBase.glob( + self, pattern, case_sensitive=case_sensitive, follow_symlinks=follow_symlinks) + + def rglob(self, pattern, *, case_sensitive=None, follow_symlinks=None): + """Recursively yield all existing files (of any kind, including + directories) matching the given relative pattern, anywhere in + this subtree. + """ + sys.audit("pathlib.Path.rglob", self, pattern) + if not isinstance(pattern, PurePath): + pattern = self.with_segments(pattern) + pattern = '**' / pattern + return _abc.PathBase.glob( + self, pattern, case_sensitive=case_sensitive, follow_symlinks=follow_symlinks) + + def walk(self, top_down=True, on_error=None, follow_symlinks=False): + """Walk the directory tree from this directory, similar to os.walk().""" + sys.audit("pathlib.Path.walk", self, on_error, follow_symlinks) + return _abc.PathBase.walk( + self, top_down=top_down, on_error=on_error, follow_symlinks=follow_symlinks) + def absolute(self): """Return an absolute version of this path No normalization or symlink resolution is performed. diff --git a/Lib/pathlib/_abc.py b/Lib/pathlib/_abc.py index d76bc054fe498d..553e1a399061d3 100644 --- a/Lib/pathlib/_abc.py +++ b/Lib/pathlib/_abc.py @@ -1,28 +1,24 @@ +""" +Abstract base classes for rich path objects. + +This module is published as a PyPI package called "pathlib-abc". + +This module is also a *PRIVATE* part of the Python standard library, where +it's developed alongside pathlib. If it finds success and maturity as a PyPI +package, it could become a public part of the standard library. + +Two base classes are defined here -- PurePathBase and PathBase -- that +resemble pathlib's PurePath and Path respectively. +""" + import functools -import ntpath -import posixpath -import sys -import warnings -from _collections_abc import Sequence from errno import ENOENT, ENOTDIR, EBADF, ELOOP, EINVAL -from itertools import chain from stat import S_ISDIR, S_ISLNK, S_ISREG, S_ISSOCK, S_ISBLK, S_ISCHR, S_ISFIFO # # Internals # -# Maximum number of symlinks to follow in PathBase.resolve() -_MAX_SYMLINKS = 40 - -# Reference for Windows paths can be found at -# https://learn.microsoft.com/en-gb/windows/win32/fileio/naming-a-file . -_WIN_RESERVED_NAMES = frozenset( - {'CON', 'PRN', 'AUX', 'NUL', 'CONIN$', 'CONOUT$'} | - {f'COM{c}' for c in '123456789\xb9\xb2\xb3'} | - {f'LPT{c}' for c in '123456789\xb9\xb2\xb3'} -) - _WINERROR_NOT_READY = 21 # drive exists but is not accessible _WINERROR_INVALID_NAME = 123 # fix for bpo-35306 _WINERROR_CANT_RESOLVE_FILENAME = 1921 # broken symlink pointing to itself @@ -67,6 +63,12 @@ def _compile_pattern(pat, sep, case_sensitive): return re.compile(regex, flags=flags).match +def _select_special(paths, part): + """Yield special literal children of the given paths.""" + for path in paths: + yield path._make_child_relpath(part) + + def _select_children(parent_paths, dir_only, follow_symlinks, match): """Yield direct children of given paths, filtering by name and type.""" if follow_symlinks is None: @@ -139,33 +141,50 @@ class UnsupportedOperation(NotImplementedError): pass -class _PathParents(Sequence): - """This object provides sequence-like access to the logical ancestors - of a path. Don't try to construct it yourself.""" - __slots__ = ('_path', '_drv', '_root', '_tail') +class PathModuleBase: + """Base class for path modules, which do low-level path manipulation. - def __init__(self, path): - self._path = path - self._drv = path.drive - self._root = path.root - self._tail = path._tail + Path modules provide a subset of the os.path API, specifically those + functions needed to provide PurePathBase functionality. Each PurePathBase + subclass references its path module via a 'pathmod' class attribute. - def __len__(self): - return len(self._tail) + Every method in this base class raises an UnsupportedOperation exception. + """ - def __getitem__(self, idx): - if isinstance(idx, slice): - return tuple(self[i] for i in range(*idx.indices(len(self)))) + @classmethod + def _unsupported(cls, attr): + raise UnsupportedOperation(f"{cls.__name__}.{attr} is unsupported") - if idx >= len(self) or idx < -len(self): - raise IndexError(idx) - if idx < 0: - idx += len(self) - return self._path._from_parsed_parts(self._drv, self._root, - self._tail[:-idx - 1]) + @property + def sep(self): + """The character used to separate path components.""" + self._unsupported('sep') - def __repr__(self): - return "<{}.parents>".format(type(self._path).__name__) + def join(self, path, *paths): + """Join path segments.""" + self._unsupported('join()') + + def split(self, path): + """Split the path into a pair (head, tail), where *head* is everything + before the final path separator, and *tail* is everything after. + Either part may be empty. + """ + self._unsupported('split()') + + def splitdrive(self, path): + """Split the path into a 2-item tuple (drive, tail), where *drive* is + a device name or mount point, and *tail* is everything after the + drive. Either part may be empty.""" + self._unsupported('splitdrive()') + + def normcase(self, path): + """Normalize the case of the path.""" + self._unsupported('normcase()') + + def isabs(self, path): + """Returns whether the path is absolute, i.e. unaffected by the + current directory or drive.""" + self._unsupported('isabs()') class PurePathBase: @@ -178,33 +197,19 @@ class PurePathBase: """ __slots__ = ( - # The `_raw_paths` slot stores unnormalized string paths. This is set - # in the `__init__()` method. - '_raw_paths', - - # The `_drv`, `_root` and `_tail_cached` slots store parsed and - # normalized parts of the path. They are set when any of the `drive`, - # `root` or `_tail` properties are accessed for the first time. The - # three-part division corresponds to the result of - # `os.path.splitroot()`, except that the tail is further split on path - # separators (i.e. it is a list of strings), and that the root and - # tail are normalized. - '_drv', '_root', '_tail_cached', - - # The `_str` slot stores the string representation of the path, - # computed from the drive, root and tail when `__str__()` is called - # for the first time. It's used to implement `_str_normcase` - '_str', + # The `_raw_path` slot store a joined string path. This is set in the + # `__init__()` method. + '_raw_path', # The '_resolving' slot stores a boolean indicating whether the path # is being processed by `PathBase.resolve()`. This prevents duplicate # work from occurring when `resolve()` calls `stat()` or `readlink()`. '_resolving', ) - pathmod = posixpath + pathmod = PathModuleBase() - def __init__(self, *paths): - self._raw_paths = paths + def __init__(self, path, *paths): + self._raw_path = self.pathmod.join(path, *paths) if paths else path self._resolving = False def with_segments(self, *pathsegments): @@ -214,65 +219,10 @@ def with_segments(self, *pathsegments): """ return type(self)(*pathsegments) - @classmethod - def _parse_path(cls, path): - if not path: - return '', '', [] - sep = cls.pathmod.sep - altsep = cls.pathmod.altsep - if altsep: - path = path.replace(altsep, sep) - drv, root, rel = cls.pathmod.splitroot(path) - if not root and drv.startswith(sep) and not drv.endswith(sep): - drv_parts = drv.split(sep) - if len(drv_parts) == 4 and drv_parts[2] not in '?.': - # e.g. //server/share - root = sep - elif len(drv_parts) == 6: - # e.g. //?/unc/server/share - root = sep - parsed = [sys.intern(str(x)) for x in rel.split(sep) if x and x != '.'] - return drv, root, parsed - - def _load_parts(self): - paths = self._raw_paths - if len(paths) == 0: - path = '' - elif len(paths) == 1: - path = paths[0] - else: - path = self.pathmod.join(*paths) - drv, root, tail = self._parse_path(path) - self._drv = drv - self._root = root - self._tail_cached = tail - - def _from_parsed_parts(self, drv, root, tail): - path_str = self._format_parsed_parts(drv, root, tail) - path = self.with_segments(path_str) - path._str = path_str or '.' - path._drv = drv - path._root = root - path._tail_cached = tail - return path - - @classmethod - def _format_parsed_parts(cls, drv, root, tail): - if drv or root: - return drv + root + cls.pathmod.sep.join(tail) - elif tail and cls.pathmod.splitdrive(tail[0])[0]: - tail = ['.'] + tail - return cls.pathmod.sep.join(tail) - def __str__(self): """Return the string representation of the path, suitable for passing to system calls.""" - try: - return self._str - except AttributeError: - self._str = self._format_parsed_parts(self.drive, self.root, - self._tail) or '.' - return self._str + return self._raw_path def as_posix(self): """Return the string representation of the path with forward (/) @@ -282,42 +232,22 @@ def as_posix(self): @property def drive(self): """The drive prefix (letter or UNC path), if any.""" - try: - return self._drv - except AttributeError: - self._load_parts() - return self._drv + return self.pathmod.splitdrive(self.anchor)[0] @property def root(self): """The root of the path, if any.""" - try: - return self._root - except AttributeError: - self._load_parts() - return self._root - - @property - def _tail(self): - try: - return self._tail_cached - except AttributeError: - self._load_parts() - return self._tail_cached + return self.pathmod.splitdrive(self.anchor)[1] @property def anchor(self): """The concatenation of the drive and root, or ''.""" - anchor = self.drive + self.root - return anchor + return self._stack[0] @property def name(self): """The final path component, if any.""" - tail = self._tail - if not tail: - return '' - return tail[-1] + return self.pathmod.split(self._raw_path)[1] @property def suffix(self): @@ -358,14 +288,10 @@ def stem(self): def with_name(self, name): """Return a new path with the file name changed.""" - m = self.pathmod - if not name or m.sep in name or (m.altsep and m.altsep in name) or name == '.': + split = self.pathmod.split + if split(name)[0]: raise ValueError(f"Invalid name {name!r}") - tail = self._tail.copy() - if not tail: - raise ValueError(f"{self!r} has an empty name") - tail[-1] = name - return self._from_parsed_parts(self.drive, self.root, tail) + return self.with_segments(split(self._raw_path)[0], name) def with_stem(self, stem): """Return a new path with the stem changed.""" @@ -383,7 +309,7 @@ def with_suffix(self, suffix): else: raise ValueError(f"Invalid suffix {suffix!r}") - def relative_to(self, other, /, *_deprecated, walk_up=False): + def relative_to(self, other, *, walk_up=False): """Return the relative path to another path identified by the passed arguments. If the operation is not possible (because this is not related to the other path), raise ValueError. @@ -391,47 +317,55 @@ def relative_to(self, other, /, *_deprecated, walk_up=False): The *walk_up* parameter controls whether `..` may be used to resolve the path. """ - if _deprecated: - msg = ("support for supplying more than one positional argument " - "to pathlib.PurePath.relative_to() is deprecated and " - "scheduled for removal in Python 3.14") - warnings.warn(msg, DeprecationWarning, stacklevel=2) - other = self.with_segments(other, *_deprecated) - elif not isinstance(other, PurePathBase): + if not isinstance(other, PurePathBase): other = self.with_segments(other) - for step, path in enumerate(chain([other], other.parents)): - if path == self or path in self.parents: - break + anchor0, parts0 = self._stack + anchor1, parts1 = other._stack + if isinstance(anchor0, str) != isinstance(anchor1, str): + raise TypeError(f"{self._raw_path!r} and {other._raw_path!r} have different types") + if anchor0 != anchor1: + raise ValueError(f"{self._raw_path!r} and {other._raw_path!r} have different anchors") + while parts0 and parts1 and parts0[-1] == parts1[-1]: + parts0.pop() + parts1.pop() + for part in parts1: + if not part or part == '.': + pass elif not walk_up: - raise ValueError(f"{str(self)!r} is not in the subpath of {str(other)!r}") - elif path.name == '..': - raise ValueError(f"'..' segment in {str(other)!r} cannot be walked") - else: - raise ValueError(f"{str(self)!r} and {str(other)!r} have different anchors") - parts = ['..'] * step + self._tail[len(path._tail):] - return self._from_parsed_parts('', '', parts) + raise ValueError(f"{self._raw_path!r} is not in the subpath of {other._raw_path!r}") + elif part == '..': + raise ValueError(f"'..' segment in {other._raw_path!r} cannot be walked") + else: + parts0.append('..') + return self.with_segments('', *reversed(parts0)) - def is_relative_to(self, other, /, *_deprecated): + def is_relative_to(self, other): """Return True if the path is relative to another path or False. """ - if _deprecated: - msg = ("support for supplying more than one argument to " - "pathlib.PurePath.is_relative_to() is deprecated and " - "scheduled for removal in Python 3.14") - warnings.warn(msg, DeprecationWarning, stacklevel=2) - other = self.with_segments(other, *_deprecated) - elif not isinstance(other, PurePathBase): + if not isinstance(other, PurePathBase): other = self.with_segments(other) - return other == self or other in self.parents + anchor0, parts0 = self._stack + anchor1, parts1 = other._stack + if isinstance(anchor0, str) != isinstance(anchor1, str): + raise TypeError(f"{self._raw_path!r} and {other._raw_path!r} have different types") + if anchor0 != anchor1: + return False + while parts0 and parts1 and parts0[-1] == parts1[-1]: + parts0.pop() + parts1.pop() + for part in parts1: + if part and part != '.': + return False + return True @property def parts(self): """An object providing sequence-like access to the components in the filesystem path.""" - if self.drive or self.root: - return (self.drive + self.root,) + tuple(self._tail) - else: - return tuple(self._tail) + anchor, parts = self._stack + if anchor: + parts.append(anchor) + return tuple(reversed(parts)) def joinpath(self, *pathsegments): """Combine this path with one or several arguments, and return a @@ -439,69 +373,73 @@ def joinpath(self, *pathsegments): paths) or a totally different path (if one of the arguments is anchored). """ - return self.with_segments(*self._raw_paths, *pathsegments) + return self.with_segments(self._raw_path, *pathsegments) def __truediv__(self, key): try: - return self.joinpath(key) + return self.with_segments(self._raw_path, key) except TypeError: return NotImplemented def __rtruediv__(self, key): try: - return self.with_segments(key, *self._raw_paths) + return self.with_segments(key, self._raw_path) except TypeError: return NotImplemented + @property + def _stack(self): + """ + Split the path into a 2-tuple (anchor, parts), where *anchor* is the + uppermost parent of the path (equivalent to path.parents[-1]), and + *parts* is a reversed list of parts following the anchor. + """ + split = self.pathmod.split + path = self._raw_path + parent, name = split(path) + names = [] + while path != parent: + names.append(name) + path = parent + parent, name = split(path) + return path, names + @property def parent(self): """The logical parent of the path.""" - drv = self.drive - root = self.root - tail = self._tail - if not tail: - return self - path = self._from_parsed_parts(drv, root, tail[:-1]) - path._resolving = self._resolving - return path + path = self._raw_path + parent = self.pathmod.split(path)[0] + if path != parent: + parent = self.with_segments(parent) + parent._resolving = self._resolving + return parent + return self @property def parents(self): """A sequence of this path's logical parents.""" - # The value of this property should not be cached on the path object, - # as doing so would introduce a reference cycle. - return _PathParents(self) + split = self.pathmod.split + path = self._raw_path + parent = split(path)[0] + parents = [] + while path != parent: + parents.append(self.with_segments(parent)) + path = parent + parent = split(path)[0] + return tuple(parents) def is_absolute(self): """True if the path is absolute (has both a root and, if applicable, a drive).""" - if self.pathmod is ntpath: - # ntpath.isabs() is defective - see GH-44626. - return bool(self.drive and self.root) - elif self.pathmod is posixpath: - # Optimization: work with raw paths on POSIX. - for path in self._raw_paths: - if path.startswith('/'): - return True - return False - else: - return self.pathmod.isabs(str(self)) - - def is_reserved(self): - """Return True if the path contains one of the special names reserved - by the system, if any.""" - if self.pathmod is posixpath or not self._tail: - return False + return self.pathmod.isabs(self._raw_path) - # NOTE: the rules for reserved names seem somewhat complicated - # (e.g. r"..\NUL" is reserved but not r"foo\NUL" if "foo" does not - # exist). We err on the side of caution and return True for paths - # which are not considered reserved by Windows. - if self.drive.startswith('\\\\'): - # UNC paths are never reserved. - return False - name = self._tail[-1].partition('.')[0].partition(':')[0].rstrip(' ') - return name.upper() in _WIN_RESERVED_NAMES + @property + def _pattern_stack(self): + """Stack of path components, to be used with patterns in glob().""" + anchor, parts = self._stack + if anchor: + raise NotImplementedError("Non-relative patterns are unsupported") + return parts def match(self, path_pattern, *, case_sensitive=None): """ @@ -512,11 +450,10 @@ def match(self, path_pattern, *, case_sensitive=None): if case_sensitive is None: case_sensitive = _is_case_sensitive(self.pathmod) sep = path_pattern.pathmod.sep - pattern_str = str(path_pattern) - if path_pattern.drive or path_pattern.root: - pass - elif path_pattern._tail: - pattern_str = f'**{sep}{pattern_str}' + if path_pattern.anchor: + pattern_str = str(path_pattern) + elif path_pattern.parts: + pattern_str = str('**' / path_pattern) else: raise ValueError("empty pattern") match = _compile_pattern(pattern_str, sep, case_sensitive) @@ -539,6 +476,9 @@ class PathBase(PurePathBase): """ __slots__ = () + # Maximum number of symlinks to follow in resolve() + _max_symlinks = 40 + @classmethod def _unsupported(cls, method_name): msg = f"{cls.__name__}.{method_name}() is unsupported" @@ -792,96 +732,41 @@ def _make_child_entry(self, entry): return entry def _make_child_relpath(self, name): - path_str = str(self) - tail = self._tail - if tail: - path_str = f'{path_str}{self.pathmod.sep}{name}' - elif path_str != '.': - path_str = f'{path_str}{name}' - else: - path_str = name - path = self.with_segments(path_str) - path._str = path_str - path._drv = self.drive - path._root = self.root - path._tail_cached = tail + [name] - return path + return self.joinpath(name) def glob(self, pattern, *, case_sensitive=None, follow_symlinks=None): """Iterate over this subtree and yield all existing files (of any kind, including directories) matching the given relative pattern. """ - sys.audit("pathlib.Path.glob", self, pattern) - return self._glob(pattern, case_sensitive, follow_symlinks) - - def rglob(self, pattern, *, case_sensitive=None, follow_symlinks=None): - """Recursively yield all existing files (of any kind, including - directories) matching the given relative pattern, anywhere in - this subtree. - """ - sys.audit("pathlib.Path.rglob", self, pattern) - return self._glob(f'**/{pattern}', case_sensitive, follow_symlinks) - - def _glob(self, pattern, case_sensitive, follow_symlinks): - path_pattern = self.with_segments(pattern) - if path_pattern.drive or path_pattern.root: - raise NotImplementedError("Non-relative patterns are unsupported") - elif not path_pattern._tail: - raise ValueError("Unacceptable pattern: {!r}".format(pattern)) - - pattern_parts = path_pattern._tail.copy() - if pattern[-1] in (self.pathmod.sep, self.pathmod.altsep): - # GH-65238: pathlib doesn't preserve trailing slash. Add it back. - pattern_parts.append('') - if pattern_parts[-1] == '**': - # GH-70303: '**' only matches directories. Add trailing slash. - warnings.warn( - "Pattern ending '**' will match files and directories in a " - "future Python release. Add a trailing slash to match only " - "directories and remove this warning.", - FutureWarning, 3) - pattern_parts.append('') - + if not isinstance(pattern, PurePathBase): + pattern = self.with_segments(pattern) if case_sensitive is None: # TODO: evaluate case-sensitivity of each directory in _select_children(). case_sensitive = _is_case_sensitive(self.pathmod) - # If symlinks are handled consistently, and the pattern does not - # contain '..' components, then we can use a 'walk-and-match' strategy - # when expanding '**' wildcards. When a '**' wildcard is encountered, - # all following pattern parts are immediately consumed and used to - # build a `re.Pattern` object. This pattern is used to filter the - # recursive walk. As a result, pattern parts following a '**' wildcard - # do not perform any filesystem access, which can be much faster! - filter_paths = follow_symlinks is not None and '..' not in pattern_parts + stack = pattern._pattern_stack + specials = ('', '.', '..') + filter_paths = False deduplicate_paths = False sep = self.pathmod.sep - paths = iter([self] if self.is_dir() else []) - part_idx = 0 - while part_idx < len(pattern_parts): - part = pattern_parts[part_idx] - part_idx += 1 - if part == '': - # Trailing slash. - pass - elif part == '..': - paths = (path._make_child_relpath('..') for path in paths) + paths = iter([self.joinpath('')] if self.is_dir() else []) + while stack: + part = stack.pop() + if part in specials: + paths = _select_special(paths, part) elif part == '**': # Consume adjacent '**' components. - while part_idx < len(pattern_parts) and pattern_parts[part_idx] == '**': - part_idx += 1 - - if filter_paths and part_idx < len(pattern_parts) and pattern_parts[part_idx] != '': - dir_only = pattern_parts[-1] == '' - paths = _select_recursive(paths, dir_only, follow_symlinks) + while stack and stack[-1] == '**': + stack.pop() - # Filter out paths that don't match pattern. - prefix_len = len(str(self._make_child_relpath('_'))) - 1 - match = _compile_pattern(str(path_pattern), sep, case_sensitive) - paths = (path for path in paths if match(str(path), prefix_len)) - return paths + # Consume adjacent non-special components and enable post-walk + # regex filtering, provided we're treating symlinks consistently. + if follow_symlinks is not None: + while stack and stack[-1] not in specials: + filter_paths = True + stack.pop() - dir_only = part_idx < len(pattern_parts) + dir_only = bool(stack) paths = _select_recursive(paths, dir_only, follow_symlinks) if deduplicate_paths: # De-duplicate if we've already seen a '**' component. @@ -890,14 +775,28 @@ def _glob(self, pattern, case_sensitive, follow_symlinks): elif '**' in part: raise ValueError("Invalid pattern: '**' can only be an entire path component") else: - dir_only = part_idx < len(pattern_parts) + dir_only = bool(stack) match = _compile_pattern(part, sep, case_sensitive) paths = _select_children(paths, dir_only, follow_symlinks, match) + if filter_paths: + # Filter out paths that don't match pattern. + prefix_len = len(str(self._make_child_relpath('_'))) - 1 + match = _compile_pattern(str(pattern), sep, case_sensitive) + paths = (path for path in paths if match(str(path), prefix_len)) return paths + def rglob(self, pattern, *, case_sensitive=None, follow_symlinks=None): + """Recursively yield all existing files (of any kind, including + directories) matching the given relative pattern, anywhere in + this subtree. + """ + if not isinstance(pattern, PurePathBase): + pattern = self.with_segments(pattern) + pattern = '**' / pattern + return self.glob(pattern, case_sensitive=case_sensitive, follow_symlinks=follow_symlinks) + def walk(self, top_down=True, on_error=None, follow_symlinks=False): """Walk the directory tree from this directory, similar to os.walk().""" - sys.audit("pathlib.Path.walk", self, on_error, follow_symlinks) paths = [self] while paths: @@ -956,7 +855,7 @@ def cwd(cls): # enable users to replace the implementation of 'absolute()' in a # subclass and benefit from the new behaviour here. This works because # os.path.abspath('.') == os.getcwd(). - return cls().absolute() + return cls('').absolute() def expanduser(self): """ Return a new path with expanded ~ and ~user constructs @@ -977,16 +876,6 @@ def readlink(self): self._unsupported("readlink") readlink._supported = False - def _split_stack(self): - """ - Split the path into a 2-tuple (anchor, parts), where *anchor* is the - uppermost parent of the path (equivalent to path.parents[-1]), and - *parts* is a reversed list of parts following the anchor. - """ - if not self._tail: - return self, [] - return self._from_parsed_parts(self.drive, self.root, []), self._tail[::-1] - def resolve(self, strict=False): """ Make the path absolute, resolving all symlinks on the way and also @@ -994,11 +883,15 @@ def resolve(self, strict=False): """ if self._resolving: return self - path, parts = self._split_stack() + path_root, parts = self._stack + path = self.with_segments(path_root) try: path = path.absolute() except UnsupportedOperation: - pass + path_tail = [] + else: + path_root, path_tail = path._stack + path_tail.reverse() # If the user has *not* overridden the `readlink()` method, then symlinks are unsupported # and (in non-strict mode) we can improve performance by not calling `stat()`. @@ -1006,45 +899,49 @@ def resolve(self, strict=False): link_count = 0 while parts: part = parts.pop() + if not part or part == '.': + continue if part == '..': - if not path._tail: - if path.root: + if not path_tail: + if path_root: # Delete '..' segment immediately following root continue - elif path._tail[-1] != '..': + elif path_tail[-1] != '..': # Delete '..' segment and its predecessor - path = path.parent + path_tail.pop() continue - next_path = path._make_child_relpath(part) + path_tail.append(part) if querying and part != '..': - next_path._resolving = True + path = self.with_segments(path_root + self.pathmod.sep.join(path_tail)) + path._resolving = True try: - st = next_path.stat(follow_symlinks=False) + st = path.stat(follow_symlinks=False) if S_ISLNK(st.st_mode): # Like Linux and macOS, raise OSError(errno.ELOOP) if too many symlinks are # encountered during resolution. link_count += 1 - if link_count >= _MAX_SYMLINKS: - raise OSError(ELOOP, "Too many symbolic links in path", str(self)) - target, target_parts = next_path.readlink()._split_stack() + if link_count >= self._max_symlinks: + raise OSError(ELOOP, "Too many symbolic links in path", self._raw_path) + target_root, target_parts = path.readlink()._stack # If the symlink target is absolute (like '/etc/hosts'), set the current # path to its uppermost parent (like '/'). - if target.root: - path = target + if target_root: + path_root = target_root + path_tail.clear() + else: + path_tail.pop() # Add the symlink target's reversed tail parts (like ['hosts', 'etc']) to # the stack of unresolved path parts. parts.extend(target_parts) continue elif parts and not S_ISDIR(st.st_mode): - raise NotADirectoryError(ENOTDIR, "Not a directory", str(self)) + raise NotADirectoryError(ENOTDIR, "Not a directory", self._raw_path) except OSError: if strict: raise else: querying = False - next_path._resolving = False - path = next_path - return path + return self.with_segments(path_root + self.pathmod.sep.join(path_tail)) def symlink_to(self, target, target_is_directory=False): """ diff --git a/Lib/pdb.py b/Lib/pdb.py index 83b7fefec63636..68f810620f8826 100755 --- a/Lib/pdb.py +++ b/Lib/pdb.py @@ -76,6 +76,7 @@ import dis import code import glob +import token import codeop import pprint import signal @@ -601,6 +602,39 @@ def default(self, line): except: self._error_exc() + def _replace_convenience_variables(self, line): + """Replace the convenience variables in 'line' with their values. + e.g. $foo is replaced by __pdb_convenience_variables["foo"]. + Note: such pattern in string literals will be skipped""" + + if "$" not in line: + return line + + dollar_start = dollar_end = -1 + replace_variables = [] + try: + for t in tokenize.generate_tokens(io.StringIO(line).readline): + token_type, token_string, start, end, _ = t + if token_type == token.OP and token_string == '$': + dollar_start, dollar_end = start, end + elif start == dollar_end and token_type == token.NAME: + # line is a one-line command so we only care about column + replace_variables.append((dollar_start[1], end[1], token_string)) + except tokenize.TokenError: + return line + + if not replace_variables: + return line + + last_end = 0 + line_pieces = [] + for start, end, name in replace_variables: + line_pieces.append(line[last_end:start] + f'__pdb_convenience_variables["{name}"]') + last_end = end + line_pieces.append(line[last_end:]) + + return ''.join(line_pieces) + def precmd(self, line): """Handle alias expansion and ';;' separator.""" if not line.strip(): @@ -635,7 +669,7 @@ def precmd(self, line): line = line[:marker].rstrip() # Replace all the convenience variables - line = re.sub(r'\$([a-zA-Z_][a-zA-Z0-9_]*)', r'__pdb_convenience_variables["\1"]', line) + line = self._replace_convenience_variables(line) return line diff --git a/Lib/pickle.py b/Lib/pickle.py index 988c0887341310..33c97c8c5efb28 100644 --- a/Lib/pickle.py +++ b/Lib/pickle.py @@ -1793,7 +1793,7 @@ def _test(): parser = argparse.ArgumentParser( description='display contents of the pickle files') parser.add_argument( - 'pickle_file', type=argparse.FileType('br'), + 'pickle_file', nargs='*', help='the pickle file') parser.add_argument( '-t', '--test', action='store_true', @@ -1809,6 +1809,10 @@ def _test(): parser.print_help() else: import pprint - for f in args.pickle_file: - obj = load(f) + for fn in args.pickle_file: + if fn == '-': + obj = load(sys.stdin.buffer) + else: + with open(fn, 'rb') as f: + obj = load(f) pprint.pprint(obj) diff --git a/Lib/pickletools.py b/Lib/pickletools.py index 95706e746c9870..95a77aeb2afe2a 100644 --- a/Lib/pickletools.py +++ b/Lib/pickletools.py @@ -2848,10 +2848,10 @@ def _test(): parser = argparse.ArgumentParser( description='disassemble one or more pickle files') parser.add_argument( - 'pickle_file', type=argparse.FileType('br'), + 'pickle_file', nargs='*', help='the pickle file') parser.add_argument( - '-o', '--output', default=sys.stdout, type=argparse.FileType('w'), + '-o', '--output', help='the file where the output should be written') parser.add_argument( '-m', '--memo', action='store_true', @@ -2876,15 +2876,26 @@ def _test(): if args.test: _test() else: - annotate = 30 if args.annotate else 0 if not args.pickle_file: parser.print_help() - elif len(args.pickle_file) == 1: - dis(args.pickle_file[0], args.output, None, - args.indentlevel, annotate) else: + annotate = 30 if args.annotate else 0 memo = {} if args.memo else None - for f in args.pickle_file: - preamble = args.preamble.format(name=f.name) - args.output.write(preamble + '\n') - dis(f, args.output, memo, args.indentlevel, annotate) + if args.output is None: + output = sys.stdout + else: + output = open(args.output, 'w') + try: + for arg in args.pickle_file: + if len(args.pickle_file) > 1: + name = '' if arg == '-' else arg + preamble = args.preamble.format(name=name) + output.write(preamble + '\n') + if arg == '-': + dis(sys.stdin.buffer, output, memo, args.indentlevel, annotate) + else: + with open(arg, 'rb') as f: + dis(f, output, memo, args.indentlevel, annotate) + finally: + if output is not sys.stdout: + output.close() diff --git a/Lib/plistlib.py b/Lib/plistlib.py index 0fc1b5cbfa8c49..67e832db217319 100644 --- a/Lib/plistlib.py +++ b/Lib/plistlib.py @@ -600,7 +600,8 @@ def _read_object(self, ref): obj_refs = self._read_refs(s) result = [] self._objects[ref] = result - result.extend(self._read_object(x) for x in obj_refs) + for x in obj_refs: + result.append(self._read_object(x)) # tokenH == 0xB0 is documented as 'ordset', but is not actually # implemented in the Apple reference code. @@ -906,6 +907,11 @@ def loads(value, *, fmt=None, dict_type=dict, aware_datetime=False): """Read a .plist file from a bytes object. Return the unpacked root object (which usually is a dictionary). """ + if isinstance(value, str): + if fmt == FMT_BINARY: + raise TypeError("value must be bytes-like object when fmt is " + "FMT_BINARY") + value = value.encode() fp = BytesIO(value) return load(fp, fmt=fmt, dict_type=dict_type, aware_datetime=aware_datetime) diff --git a/Lib/pydoc_data/topics.py b/Lib/pydoc_data/topics.py index 7c1bdc4dff2ec4..d453c1f038ef07 100644 --- a/Lib/pydoc_data/topics.py +++ b/Lib/pydoc_data/topics.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Autogenerated by Sphinx on Wed Nov 22 11:44:32 2023 +# Autogenerated by Sphinx on Wed Jan 17 13:09:41 2024 # as part of the release process. topics = {'assert': 'The "assert" statement\n' '**********************\n' @@ -864,19 +864,25 @@ '*instance* of the\n' ' owner class.\n' '\n' - 'The attribute "__objclass__" is interpreted by the ' - '"inspect" module as\n' - 'specifying the class where this object was defined ' + 'Instances of descriptors may also have the ' + '"__objclass__" attribute\n' + 'present:\n' + '\n' + 'object.__objclass__\n' + '\n' + ' The attribute "__objclass__" is interpreted by the ' + '"inspect" module\n' + ' as specifying the class where this object was defined ' '(setting this\n' - 'appropriately can assist in runtime introspection of ' + ' appropriately can assist in runtime introspection of ' 'dynamic class\n' - 'attributes). For callables, it may indicate that an ' + ' attributes). For callables, it may indicate that an ' 'instance of the\n' - 'given type (or a subclass) is expected or required as ' + ' given type (or a subclass) is expected or required as ' 'the first\n' - 'positional argument (for example, CPython sets this ' + ' positional argument (for example, CPython sets this ' 'attribute for\n' - 'unbound methods that are implemented in C).\n' + ' unbound methods that are implemented in C).\n' '\n' '\n' 'Invoking Descriptors\n' @@ -1111,16 +1117,23 @@ 'attribute references, which most objects do. This ' 'object is then\n' 'asked to produce the attribute whose name is the ' - 'identifier. This\n' - 'production can be customized by overriding the ' - '"__getattr__()" method.\n' - 'If this attribute is not available, the exception ' - '"AttributeError" is\n' - 'raised. Otherwise, the type and value of the object ' - 'produced is\n' - 'determined by the object. Multiple evaluations of ' - 'the same attribute\n' - 'reference may yield different objects.\n', + 'identifier. The type\n' + 'and value produced is determined by the object. ' + 'Multiple evaluations\n' + 'of the same attribute reference may yield different ' + 'objects.\n' + '\n' + 'This production can be customized by overriding the\n' + '"__getattribute__()" method or the "__getattr__()" ' + 'method. The\n' + '"__getattribute__()" method is called first and ' + 'either returns a value\n' + 'or raises "AttributeError" if the attribute is not ' + 'available.\n' + '\n' + 'If an "AttributeError" is raised and the object has ' + 'a "__getattr__()"\n' + 'method, that method is called as a fallback.\n', 'augassign': 'Augmented assignment statements\n' '*******************************\n' '\n' @@ -3045,8 +3058,7 @@ 'standard\n' 'Python grammar. Triple-quoted strings are supported. Raw ' 'strings and\n' - 'byte strings are supported. Formatted string literals are not\n' - 'supported.\n' + 'byte strings are supported. f-strings are not supported.\n' '\n' 'The forms "signed_number \'+\' NUMBER" and "signed_number \'-\' ' 'NUMBER"\n' @@ -3700,7 +3712,7 @@ ' **PEP 526** - Syntax for Variable Annotations\n' ' Ability to type hint variable declarations, including ' 'class\n' - ' variables and instance variables\n' + ' variables and instance variables.\n' '\n' ' **PEP 563** - Postponed Evaluation of Annotations\n' ' Support for forward references within annotations by ' @@ -3708,6 +3720,11 @@ ' annotations in a string form at runtime instead of eager\n' ' evaluation.\n' '\n' + ' **PEP 318** - Decorators for Functions and Methods\n' + ' Function and method decorators were introduced. Class ' + 'decorators\n' + ' were introduced in **PEP 3129**.\n' + '\n' '\n' 'Class definitions\n' '=================\n' @@ -4824,8 +4841,8 @@ 'denial-of-service caused\n' ' by carefully chosen inputs that exploit the worst ' 'case\n' - ' performance of a dict insertion, O(n^2) complexity. ' - 'See\n' + ' performance of a dict insertion, *O*(*n*^2) ' + 'complexity. See\n' ' http://ocert.org/advisories/ocert-2011-003.html for\n' ' details.Changing hash values affects the iteration ' 'order of sets.\n' @@ -5565,10 +5582,31 @@ 'whose\n' ' global namespace contains all the (global and local) names ' 'found in\n' - ' the current scope.\n' + ' the current scope. Use "exit()" or "quit()" to exit the ' + 'interpreter\n' + ' and return to the debugger.\n' + '\n' + ' Note:\n' + '\n' + ' Because interact creates a new global namespace with the ' + 'current\n' + ' global and local namespace for execution, assignment to ' + 'variables\n' + ' will not affect the original namespaces. However, ' + 'modification to\n' + ' the mutable objects will be reflected in the original ' + 'namespaces.\n' '\n' ' New in version 3.2.\n' '\n' + ' New in version 3.13: "exit()" and "quit()" can be used to ' + 'exit\n' + ' "interact" command.\n' + '\n' + ' Changed in version 3.13: "interact" directs its output to ' + 'the\n' + ' debugger’s output channel rather than "sys.stderr".\n' + '\n' 'alias [name [command]]\n' '\n' ' Create an alias called *name* that executes *command*. The\n' @@ -6554,7 +6592,7 @@ ' index_string ::= ' '+\n' ' conversion ::= "r" | "s" | "a"\n' - ' format_spec ::= \n' + ' format_spec ::= format-spec:format_spec\n' '\n' 'In less formal terms, the replacement field can start with ' 'a\n' @@ -6689,12 +6727,11 @@ 'contained\n' 'within a format string to define how individual values are ' 'presented\n' - '(see Format String Syntax and Formatted string literals). ' - 'They can\n' - 'also be passed directly to the built-in "format()" ' - 'function. Each\n' - 'formattable type may define how the format specification is ' - 'to be\n' + '(see Format String Syntax and f-strings). They can also be ' + 'passed\n' + 'directly to the built-in "format()" function. Each ' + 'formattable type\n' + 'may define how the format specification is to be ' 'interpreted.\n' '\n' 'Most built-in types implement the following options for ' @@ -7509,13 +7546,18 @@ ' **PEP 526** - Syntax for Variable Annotations\n' ' Ability to type hint variable declarations, including ' 'class\n' - ' variables and instance variables\n' + ' variables and instance variables.\n' '\n' ' **PEP 563** - Postponed Evaluation of Annotations\n' ' Support for forward references within annotations by ' 'preserving\n' ' annotations in a string form at runtime instead of eager\n' - ' evaluation.\n', + ' evaluation.\n' + '\n' + ' **PEP 318** - Decorators for Functions and Methods\n' + ' Function and method decorators were introduced. Class ' + 'decorators\n' + ' were introduced in **PEP 3129**.\n', 'global': 'The "global" statement\n' '**********************\n' '\n' @@ -9212,15 +9254,13 @@ '\n' 'A traceback object is normally created automatically when an ' 'exception\n' - 'is raised and attached to it as the "__traceback__" attribute, ' - 'which\n' - 'is writable. You can create an exception and set your own traceback ' - 'in\n' - 'one step using the "with_traceback()" exception method (which ' - 'returns\n' - 'the same exception instance, with its traceback set to its ' - 'argument),\n' - 'like so:\n' + 'is raised and attached to it as the "__traceback__" attribute. You ' + 'can\n' + 'create an exception and set your own traceback in one step using ' + 'the\n' + '"with_traceback()" exception method (which returns the same ' + 'exception\n' + 'instance, with its traceback set to its argument), like so:\n' '\n' ' raise Exception("foo occurred").with_traceback(tracebackobj)\n' '\n' @@ -9246,6 +9286,8 @@ ' ...\n' ' Traceback (most recent call last):\n' ' File "", line 2, in \n' + ' print(1 / 0)\n' + ' ~~^~~\n' ' ZeroDivisionError: division by zero\n' '\n' ' The above exception was the direct cause of the following ' @@ -9253,6 +9295,7 @@ '\n' ' Traceback (most recent call last):\n' ' File "", line 4, in \n' + ' raise RuntimeError("Something bad happened") from exc\n' ' RuntimeError: Something bad happened\n' '\n' 'A similar mechanism works implicitly if a new exception is raised ' @@ -9271,6 +9314,8 @@ ' ...\n' ' Traceback (most recent call last):\n' ' File "", line 2, in \n' + ' print(1 / 0)\n' + ' ~~^~~\n' ' ZeroDivisionError: division by zero\n' '\n' ' During handling of the above exception, another exception ' @@ -9278,6 +9323,7 @@ '\n' ' Traceback (most recent call last):\n' ' File "", line 4, in \n' + ' raise RuntimeError("Something bad happened")\n' ' RuntimeError: Something bad happened\n' '\n' 'Exception chaining can be explicitly suppressed by specifying ' @@ -9466,23 +9512,20 @@ '\n' ' Called to implement evaluation of "self[key]". For ' '*sequence*\n' - ' types, the accepted keys should be integers and slice ' - 'objects.\n' - ' Note that the special interpretation of negative ' - 'indexes (if the\n' - ' class wishes to emulate a *sequence* type) is up to ' - 'the\n' - ' "__getitem__()" method. If *key* is of an inappropriate ' - 'type,\n' - ' "TypeError" may be raised; if of a value outside the ' - 'set of indexes\n' - ' for the sequence (after any special interpretation of ' - 'negative\n' - ' values), "IndexError" should be raised. For *mapping* ' - 'types, if\n' - ' *key* is missing (not in the container), "KeyError" ' - 'should be\n' - ' raised.\n' + ' types, the accepted keys should be integers. ' + 'Optionally, they may\n' + ' support "slice" objects as well. Negative index ' + 'support is also\n' + ' optional. If *key* is of an inappropriate type, ' + '"TypeError" may be\n' + ' raised; if *key* is a value outside the set of indexes ' + 'for the\n' + ' sequence (after any special interpretation of negative ' + 'values),\n' + ' "IndexError" should be raised. For *mapping* types, if ' + '*key* is\n' + ' missing (not in the container), "KeyError" should be ' + 'raised.\n' '\n' ' Note:\n' '\n' @@ -10204,8 +10247,8 @@ ' intended to provide protection against a ' 'denial-of-service caused\n' ' by carefully chosen inputs that exploit the worst case\n' - ' performance of a dict insertion, O(n^2) complexity. ' - 'See\n' + ' performance of a dict insertion, *O*(*n*^2) ' + 'complexity. See\n' ' http://ocert.org/advisories/ocert-2011-003.html for\n' ' details.Changing hash values affects the iteration ' 'order of sets.\n' @@ -10483,19 +10526,25 @@ 'of the\n' ' owner class.\n' '\n' - 'The attribute "__objclass__" is interpreted by the "inspect" ' - 'module as\n' - 'specifying the class where this object was defined (setting ' - 'this\n' - 'appropriately can assist in runtime introspection of dynamic ' - 'class\n' - 'attributes). For callables, it may indicate that an instance ' - 'of the\n' - 'given type (or a subclass) is expected or required as the ' + 'Instances of descriptors may also have the "__objclass__" ' + 'attribute\n' + 'present:\n' + '\n' + 'object.__objclass__\n' + '\n' + ' The attribute "__objclass__" is interpreted by the ' + '"inspect" module\n' + ' as specifying the class where this object was defined ' + '(setting this\n' + ' appropriately can assist in runtime introspection of ' + 'dynamic class\n' + ' attributes). For callables, it may indicate that an ' + 'instance of the\n' + ' given type (or a subclass) is expected or required as the ' 'first\n' - 'positional argument (for example, CPython sets this ' + ' positional argument (for example, CPython sets this ' 'attribute for\n' - 'unbound methods that are implemented in C).\n' + ' unbound methods that are implemented in C).\n' '\n' '\n' 'Invoking Descriptors\n' @@ -10742,7 +10791,7 @@ '\n' ' Keyword arguments which are given to a new class are ' 'passed to the\n' - ' parent’s class "__init_subclass__". For compatibility ' + ' parent class’s "__init_subclass__". For compatibility ' 'with other\n' ' classes using "__init_subclass__", one should take out ' 'the needed\n' @@ -11429,22 +11478,20 @@ '\n' ' Called to implement evaluation of "self[key]". For ' '*sequence*\n' - ' types, the accepted keys should be integers and slice ' - 'objects.\n' - ' Note that the special interpretation of negative indexes ' - '(if the\n' - ' class wishes to emulate a *sequence* type) is up to the\n' - ' "__getitem__()" method. If *key* is of an inappropriate ' - 'type,\n' - ' "TypeError" may be raised; if of a value outside the set ' - 'of indexes\n' - ' for the sequence (after any special interpretation of ' - 'negative\n' - ' values), "IndexError" should be raised. For *mapping* ' - 'types, if\n' - ' *key* is missing (not in the container), "KeyError" ' - 'should be\n' - ' raised.\n' + ' types, the accepted keys should be integers. Optionally, ' + 'they may\n' + ' support "slice" objects as well. Negative index support ' + 'is also\n' + ' optional. If *key* is of an inappropriate type, ' + '"TypeError" may be\n' + ' raised; if *key* is a value outside the set of indexes ' + 'for the\n' + ' sequence (after any special interpretation of negative ' + 'values),\n' + ' "IndexError" should be raised. For *mapping* types, if ' + '*key* is\n' + ' missing (not in the container), "KeyError" should be ' + 'raised.\n' '\n' ' Note:\n' '\n' @@ -13043,12 +13090,11 @@ '\n' 'A string literal with "\'f\'" or "\'F\'" in its prefix is a ' '*formatted\n' - 'string literal*; see Formatted string literals. The "\'f\'" may ' - 'be\n' - 'combined with "\'r\'", but not with "\'b\'" or "\'u\'", therefore ' - 'raw\n' - 'formatted strings are possible, but formatted bytes literals are ' - 'not.\n' + 'string literal*; see f-strings. The "\'f\'" may be combined with ' + '"\'r\'",\n' + 'but not with "\'b\'" or "\'u\'", therefore raw formatted strings ' + 'are\n' + 'possible, but formatted bytes literals are not.\n' '\n' 'In triple-quoted literals, unescaped newlines and quotes are ' 'allowed\n' @@ -13952,130 +13998,117 @@ 'function’s\n' 'formal parameter list.\n' '\n' - 'Special attributes:\n' '\n' - '+---------------------------+---------------------------------+-------------+\n' - '| Attribute | Meaning ' - '| |\n' - '|===========================|=================================|=============|\n' - '| "__doc__" | The function’s documentation | ' - 'Writable |\n' - '| | string, or "None" if ' - '| |\n' - '| | unavailable; not inherited by ' - '| |\n' - '| | subclasses. ' - '| |\n' - '+---------------------------+---------------------------------+-------------+\n' - '| "__name__" | The function’s name. | ' - 'Writable |\n' - '+---------------------------+---------------------------------+-------------+\n' - '| "__qualname__" | The function’s *qualified | ' - 'Writable |\n' - '| | name*. New in version 3.3. ' - '| |\n' - '+---------------------------+---------------------------------+-------------+\n' - '| "__module__" | The name of the module the | ' - 'Writable |\n' - '| | function was defined in, or ' - '| |\n' - '| | "None" if unavailable. ' - '| |\n' - '+---------------------------+---------------------------------+-------------+\n' - '| "__defaults__" | A tuple containing default | ' - 'Writable |\n' - '| | argument values for those ' - '| |\n' - '| | arguments that have defaults, ' - '| |\n' - '| | or "None" if no arguments have ' - '| |\n' - '| | a default value. ' - '| |\n' - '+---------------------------+---------------------------------+-------------+\n' - '| "__code__" | The code object representing | ' - 'Writable |\n' - '| | the compiled function body. ' - '| |\n' - '+---------------------------+---------------------------------+-------------+\n' - '| "__globals__" | A reference to the dictionary | ' - 'Read-only |\n' - '| | that holds the function’s ' - '| |\n' - '| | global variables — the global ' - '| |\n' - '| | namespace of the module in ' - '| |\n' - '| | which the function was defined. ' - '| |\n' - '+---------------------------+---------------------------------+-------------+\n' - '| "__dict__" | The namespace supporting | ' - 'Writable |\n' - '| | arbitrary function attributes. ' - '| |\n' - '+---------------------------+---------------------------------+-------------+\n' - '| "__closure__" | "None" or a tuple of cells that | ' - 'Read-only |\n' - '| | contain bindings for the ' - '| |\n' - '| | function’s free variables. See ' - '| |\n' - '| | below for information on the ' - '| |\n' - '| | "cell_contents" attribute. ' - '| |\n' - '+---------------------------+---------------------------------+-------------+\n' - '| "__annotations__" | A dict containing annotations | ' - 'Writable |\n' - '| | of parameters. The keys of the ' - '| |\n' - '| | dict are the parameter names, ' - '| |\n' - '| | and "\'return\'" for the return ' - '| |\n' - '| | annotation, if provided. For ' - '| |\n' - '| | more information on working ' - '| |\n' - '| | with this attribute, see ' - '| |\n' - '| | Annotations Best Practices. ' - '| |\n' - '+---------------------------+---------------------------------+-------------+\n' - '| "__kwdefaults__" | A dict containing defaults for | ' - 'Writable |\n' - '| | keyword-only parameters. ' - '| |\n' - '+---------------------------+---------------------------------+-------------+\n' - '| "__type_params__" | A tuple containing the type | ' - 'Writable |\n' - '| | parameters of a generic ' - '| |\n' - '| | function. ' - '| |\n' - '+---------------------------+---------------------------------+-------------+\n' - '\n' - 'Most of the attributes labelled “Writable” check the type of the\n' - 'assigned value.\n' + 'Special read-only attributes\n' + '~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n' + '\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| Attribute | ' + 'Meaning |\n' + '|====================================================|====================================================|\n' + '| function.__globals__ | A reference ' + 'to the "dictionary" that holds the |\n' + '| | function’s ' + 'global variables – the global namespace |\n' + '| | of the ' + 'module in which the function was defined. |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| function.__closure__ | "None" or a ' + '"tuple" of cells that contain bindings |\n' + '| | for the ' + 'function’s free variables. A cell object |\n' + '| | has the ' + 'attribute "cell_contents". This can be |\n' + '| | used to get ' + 'the value of the cell, as well as set |\n' + '| | the ' + 'value. |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '\n' + '\n' + 'Special writable attributes\n' + '~~~~~~~~~~~~~~~~~~~~~~~~~~~\n' + '\n' + 'Most of these attributes check the type of the assigned value:\n' + '\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| Attribute | ' + 'Meaning |\n' + '|====================================================|====================================================|\n' + '| function.__doc__ | The ' + 'function’s documentation string, or "None" if |\n' + '| | unavailable. ' + 'Not inherited by subclasses. |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| function.__name__ | The ' + 'function’s name. See also: "__name__ |\n' + '| | ' + 'attributes". |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| function.__qualname__ | The ' + 'function’s *qualified name*. See also: |\n' + '| | ' + '"__qualname__ attributes". New in version 3.3. |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| function.__module__ | The name of ' + 'the module the function was defined |\n' + '| | in, or ' + '"None" if unavailable. |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| function.__defaults__ | A "tuple" ' + 'containing default *parameter* values |\n' + '| | for those ' + 'parameters that have defaults, or "None" |\n' + '| | if no ' + 'parameters have a default value. |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| function.__code__ | The code ' + 'object representing the compiled function |\n' + '| | ' + 'body. |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| function.__dict__ | The ' + 'namespace supporting arbitrary function |\n' + '| | attributes. ' + 'See also: "__dict__ attributes". |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| function.__annotations__ | A ' + '"dictionary" containing annotations of |\n' + '| | ' + '*parameters*. The keys of the dictionary are the |\n' + '| | parameter ' + 'names, and "\'return\'" for the return |\n' + '| | annotation, ' + 'if provided. See also: Annotations |\n' + '| | Best ' + 'Practices. |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| function.__kwdefaults__ | A ' + '"dictionary" containing defaults for keyword- |\n' + '| | only ' + '*parameters*. |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| function.__type_params__ | A "tuple" ' + 'containing the type parameters of a |\n' + '| | generic ' + 'function. New in version 3.12. |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' '\n' 'Function objects also support getting and setting arbitrary\n' 'attributes, which can be used, for example, to attach metadata to\n' 'functions. Regular attribute dot-notation is used to get and set ' 'such\n' - 'attributes. *Note that the current implementation only supports\n' - 'function attributes on user-defined functions. Function attributes ' - 'on\n' - 'built-in functions may be supported in the future.*\n' + 'attributes.\n' '\n' - 'A cell object has the attribute "cell_contents". This can be used ' - 'to\n' - 'get the value of the cell, as well as set the value.\n' + '**CPython implementation detail:** CPython’s current ' + 'implementation\n' + 'only supports function attributes on user-defined functions. ' + 'Function\n' + 'attributes on built-in functions may be supported in the future.\n' '\n' 'Additional information about a function’s definition can be ' 'retrieved\n' - 'from its code object; see the description of internal types below. ' - 'The\n' - '"cell" type can be accessed in the "types" module.\n' + 'from its code object (accessible via the "__code__" attribute).\n' '\n' '\n' 'Instance methods\n' @@ -14085,14 +14118,34 @@ 'any\n' 'callable object (normally a user-defined function).\n' '\n' - 'Special read-only attributes: "__self__" is the class instance ' - 'object,\n' - '"__func__" is the function object; "__doc__" is the method’s\n' - 'documentation (same as "__func__.__doc__"); "__name__" is the ' - 'method\n' - 'name (same as "__func__.__name__"); "__module__" is the name of ' - 'the\n' - 'module the method was defined in, or "None" if unavailable.\n' + 'Special read-only attributes:\n' + '\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| method.__self__ | Refers to ' + 'the class instance object to which the |\n' + '| | method is ' + 'bound |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| method.__func__ | Refers to ' + 'the original function object |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| method.__doc__ | The method’s ' + 'documentation (same as |\n' + '| | ' + '"method.__func__.__doc__"). A "string" if the |\n' + '| | original ' + 'function had a docstring, else "None". |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| method.__name__ | The name of ' + 'the method (same as |\n' + '| | ' + '"method.__func__.__name__") |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| method.__module__ | The name of ' + 'the module the method was defined in, |\n' + '| | or "None" if ' + 'unavailable. |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' '\n' 'Methods also support accessing (but not setting) the arbitrary\n' 'function attributes on the underlying function object.\n' @@ -14101,24 +14154,20 @@ 'attribute\n' 'of a class (perhaps via an instance of that class), if that ' 'attribute\n' - 'is a user-defined function object or a class method object.\n' + 'is a user-defined function object or a "classmethod" object.\n' '\n' 'When an instance method object is created by retrieving a ' 'user-defined\n' 'function object from a class via one of its instances, its ' '"__self__"\n' - 'attribute is the instance, and the method object is said to be ' - 'bound.\n' - 'The new method’s "__func__" attribute is the original function ' - 'object.\n' + 'attribute is the instance, and the method object is said to be\n' + '*bound*. The new method’s "__func__" attribute is the original\n' + 'function object.\n' '\n' - 'When an instance method object is created by retrieving a class ' - 'method\n' - 'object from a class or instance, its "__self__" attribute is the ' - 'class\n' - 'itself, and its "__func__" attribute is the function object ' - 'underlying\n' - 'the class method.\n' + 'When an instance method object is created by retrieving a\n' + '"classmethod" object from a class or instance, its "__self__"\n' + 'attribute is the class itself, and its "__func__" attribute is the\n' + 'function object underlying the class method.\n' '\n' 'When an instance method object is called, the underlying function\n' '("__func__") is called, inserting the class instance ("__self__") ' @@ -14129,7 +14178,7 @@ 'of\n' '"C", calling "x.f(1)" is equivalent to calling "C.f(x, 1)".\n' '\n' - 'When an instance method object is derived from a class method ' + 'When an instance method object is derived from a "classmethod" ' 'object,\n' 'the “class instance” stored in "__self__" will actually be the ' 'class\n' @@ -14217,13 +14266,18 @@ 'of built-in functions are "len()" and "math.sin()" ("math" is a\n' 'standard built-in module). The number and type of the arguments ' 'are\n' - 'determined by the C function. Special read-only attributes: ' - '"__doc__"\n' - 'is the function’s documentation string, or "None" if unavailable;\n' - '"__name__" is the function’s name; "__self__" is set to "None" ' - '(but\n' - 'see the next item); "__module__" is the name of the module the\n' - 'function was defined in or "None" if unavailable.\n' + 'determined by the C function. Special read-only attributes:\n' + '\n' + '* "__doc__" is the function’s documentation string, or "None" if\n' + ' unavailable. See "function.__doc__".\n' + '\n' + '* "__name__" is the function’s name. See "function.__name__".\n' + '\n' + '* "__self__" is set to "None" (but see the next item).\n' + '\n' + '* "__module__" is the name of the module the function was defined ' + 'in\n' + ' or "None" if unavailable. See "function.__module__".\n' '\n' '\n' 'Built-in methods\n' @@ -14235,7 +14289,9 @@ 'argument. An example of a built-in method is "alist.append()",\n' 'assuming *alist* is a list object. In this case, the special ' 'read-only\n' - 'attribute "__self__" is set to the object denoted by *alist*.\n' + 'attribute "__self__" is set to the object denoted by *alist*. (The\n' + 'attribute has the same semantics as it does with "other instance\n' + 'methods".)\n' '\n' '\n' 'Classes\n' @@ -14267,16 +14323,15 @@ 'statement, or by calling functions such as ' '"importlib.import_module()"\n' 'and built-in "__import__()". A module object has a namespace\n' - 'implemented by a dictionary object (this is the dictionary ' - 'referenced\n' - 'by the "__globals__" attribute of functions defined in the ' - 'module).\n' - 'Attribute references are translated to lookups in this dictionary,\n' - 'e.g., "m.x" is equivalent to "m.__dict__["x"]". A module object ' - 'does\n' - 'not contain the code object used to initialize the module (since ' - 'it\n' - 'isn’t needed once the initialization is done).\n' + 'implemented by a "dictionary" object (this is the dictionary\n' + 'referenced by the "__globals__" attribute of functions defined in ' + 'the\n' + 'module). Attribute references are translated to lookups in this\n' + 'dictionary, e.g., "m.x" is equivalent to "m.__dict__["x"]". A ' + 'module\n' + 'object does not contain the code object used to initialize the ' + 'module\n' + '(since it isn’t needed once the initialization is done).\n' '\n' 'Attribute assignment updates the module’s namespace dictionary, ' 'e.g.,\n' @@ -14350,14 +14405,13 @@ 'a\n' 'class method object, it is transformed into an instance method ' 'object\n' - 'whose "__self__" attribute is "C". When it would yield a static\n' - 'method object, it is transformed into the object wrapped by the ' - 'static\n' - 'method object. See section Implementing Descriptors for another way ' - 'in\n' - 'which attributes retrieved from a class may differ from those ' - 'actually\n' - 'contained in its "__dict__".\n' + 'whose "__self__" attribute is "C". When it would yield a\n' + '"staticmethod" object, it is transformed into the object wrapped ' + 'by\n' + 'the static method object. See section Implementing Descriptors for\n' + 'another way in which attributes retrieved from a class may differ ' + 'from\n' + 'those actually contained in its "__dict__".\n' '\n' 'Class attribute assignments update the class’s dictionary, never ' 'the\n' @@ -14480,43 +14534,106 @@ 'code objects are immutable and contain no references (directly or\n' 'indirectly) to mutable objects.\n' '\n' - 'Special read-only attributes: "co_name" gives the function name;\n' - '"co_qualname" gives the fully qualified function name; ' - '"co_argcount"\n' - 'is the total number of positional arguments (including ' - 'positional-only\n' - 'arguments and arguments with default values); "co_posonlyargcount" ' - 'is\n' - 'the number of positional-only arguments (including arguments with\n' - 'default values); "co_kwonlyargcount" is the number of keyword-only\n' - 'arguments (including arguments with default values); "co_nlocals" ' - 'is\n' - 'the number of local variables used by the function (including\n' - 'arguments); "co_varnames" is a tuple containing the names of the ' - 'local\n' - 'variables (starting with the argument names); "co_cellvars" is a ' - 'tuple\n' - 'containing the names of local variables that are referenced by ' - 'nested\n' - 'functions; "co_freevars" is a tuple containing the names of free\n' - 'variables; "co_code" is a string representing the sequence of ' - 'bytecode\n' - 'instructions; "co_consts" is a tuple containing the literals used ' - 'by\n' - 'the bytecode; "co_names" is a tuple containing the names used by ' - 'the\n' - 'bytecode; "co_filename" is the filename from which the code was\n' - 'compiled; "co_firstlineno" is the first line number of the ' - 'function;\n' - '"co_lnotab" is a string encoding the mapping from bytecode offsets ' - 'to\n' - 'line numbers (for details see the source code of the interpreter, ' - 'is\n' - 'deprecated since 3.12 and may be removed in 3.14); "co_stacksize" ' - 'is\n' - 'the required stack size; "co_flags" is an integer encoding a number ' - 'of\n' - 'flags for the interpreter.\n' + '\n' + 'Special read-only attributes\n' + '~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n' + '\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| codeobject.co_name | The function ' + 'name |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| codeobject.co_qualname | The fully ' + 'qualified function name |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| codeobject.co_argcount | The total ' + 'number of positional *parameters* |\n' + '| | (including ' + 'positional-only parameters and |\n' + '| | parameters ' + 'with default values) that the function |\n' + '| | ' + 'has |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| codeobject.co_posonlyargcount | The number ' + 'of positional-only *parameters* |\n' + '| | (including ' + 'arguments with default values) that the |\n' + '| | function ' + 'has |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| codeobject.co_kwonlyargcount | The number ' + 'of keyword-only *parameters* (including |\n' + '| | arguments ' + 'with default values) that the function |\n' + '| | ' + 'has |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| codeobject.co_nlocals | The number ' + 'of local variables used by the function |\n' + '| | (including ' + 'parameters) |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| codeobject.co_varnames | A "tuple" ' + 'containing the names of the local |\n' + '| | variables in ' + 'the function (starting with the |\n' + '| | parameter ' + 'names) |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| codeobject.co_cellvars | A "tuple" ' + 'containing the names of local variables |\n' + '| | that are ' + 'referenced by nested functions inside the |\n' + '| | ' + 'function |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| codeobject.co_freevars | A "tuple" ' + 'containing the names of free variables |\n' + '| | in the ' + 'function |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| codeobject.co_code | A string ' + 'representing the sequence of *bytecode* |\n' + '| | instructions ' + 'in the function |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| codeobject.co_consts | A "tuple" ' + 'containing the literals used by the |\n' + '| | *bytecode* ' + 'in the function |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| codeobject.co_names | A "tuple" ' + 'containing the names used by the |\n' + '| | *bytecode* ' + 'in the function |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| codeobject.co_filename | The name of ' + 'the file from which the code was |\n' + '| | ' + 'compiled |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| codeobject.co_firstlineno | The line ' + 'number of the first line of the function |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| codeobject.co_lnotab | A string ' + 'encoding the mapping from *bytecode* |\n' + '| | offsets to ' + 'line numbers. For details, see the |\n' + '| | source code ' + 'of the interpreter. Deprecated since |\n' + '| | version ' + '3.12: This attribute of code objects is |\n' + '| | deprecated, ' + 'and may be removed in Python 3.14. |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| codeobject.co_stacksize | The required ' + 'stack size of the code object |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| codeobject.co_flags | An "integer" ' + 'encoding a number of flags for the |\n' + '| | ' + 'interpreter. |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' '\n' 'The following flag bits are defined for "co_flags": bit "0x04" is ' 'set\n' @@ -14525,7 +14642,9 @@ 'number of positional arguments; bit "0x08" is set if the function ' 'uses\n' 'the "**keywords" syntax to accept arbitrary keyword arguments; bit\n' - '"0x20" is set if the function is a generator.\n' + '"0x20" is set if the function is a generator. See Code Objects Bit\n' + 'Flags for details on the semantics of each flags that might be\n' + 'present.\n' '\n' 'Future feature declarations ("from __future__ import division") ' 'also\n' @@ -14544,16 +14663,19 @@ 'is the documentation string of the function, or "None" if ' 'undefined.\n' '\n' + '\n' + 'Methods on code objects\n' + '~~~~~~~~~~~~~~~~~~~~~~~\n' + '\n' 'codeobject.co_positions()\n' '\n' - ' Returns an iterable over the source code positions of each ' - 'bytecode\n' - ' instruction in the code object.\n' + ' Returns an iterable over the source code positions of each\n' + ' *bytecode* instruction in the code object.\n' '\n' - ' The iterator returns tuples containing the "(start_line, ' - 'end_line,\n' - ' start_column, end_column)". The *i-th* tuple corresponds to the\n' - ' position of the source code that compiled to the *i-th*\n' + ' The iterator returns "tuple"s containing the "(start_line,\n' + ' end_line, start_column, end_column)". The *i-th* tuple ' + 'corresponds\n' + ' to the position of the source code that compiled to the *i-th*\n' ' instruction. Column information is 0-indexed utf-8 byte offsets ' 'on\n' ' the given source line.\n' @@ -14591,51 +14713,153 @@ 'the\n' ' "PYTHONNODEBUGRANGES" environment variable can be used.\n' '\n' + 'codeobject.co_lines()\n' '\n' - 'Frame objects\n' - '-------------\n' + ' Returns an iterator that yields information about successive ' + 'ranges\n' + ' of *bytecode*s. Each item yielded is a "(start, end, lineno)"\n' + ' "tuple":\n' '\n' - 'Frame objects represent execution frames. They may occur in ' - 'traceback\n' - 'objects (see below), and are also passed to registered trace\n' - 'functions.\n' + ' * "start" (an "int") represents the offset (inclusive) of the ' + 'start\n' + ' of the *bytecode* range\n' '\n' - 'Special read-only attributes: "f_back" is to the previous stack ' - 'frame\n' - '(towards the caller), or "None" if this is the bottom stack frame;\n' - '"f_code" is the code object being executed in this frame; ' - '"f_locals"\n' - 'is the dictionary used to look up local variables; "f_globals" is ' - 'used\n' - 'for global variables; "f_builtins" is used for built-in ' - '(intrinsic)\n' - 'names; "f_lasti" gives the precise instruction (this is an index ' - 'into\n' - 'the bytecode string of the code object).\n' + ' * "end" (an "int") represents the offset (exclusive) of the end ' + 'of\n' + ' the *bytecode* range\n' '\n' - 'Accessing "f_code" raises an auditing event "object.__getattr__" ' - 'with\n' - 'arguments "obj" and ""f_code"".\n' + ' * "lineno" is an "int" representing the line number of the\n' + ' *bytecode* range, or "None" if the bytecodes in the given ' + 'range\n' + ' have no line number\n' '\n' - 'Special writable attributes: "f_trace", if not "None", is a ' - 'function\n' - 'called for various events during code execution (this is used by ' + ' The items yielded will have the following properties:\n' + '\n' + ' * The first range yielded will have a "start" of 0.\n' + '\n' + ' * The "(start, end)" ranges will be non-decreasing and ' + 'consecutive.\n' + ' That is, for any pair of "tuple"s, the "start" of the second ' + 'will\n' + ' be equal to the "end" of the first.\n' + '\n' + ' * No range will be backwards: "end >= start" for all triples.\n' + '\n' + ' * The last "tuple" yielded will have "end" equal to the size of ' 'the\n' - 'debugger). Normally an event is triggered for each new source line ' - '-\n' - 'this can be disabled by setting "f_trace_lines" to "False".\n' - '\n' - 'Implementations *may* allow per-opcode events to be requested by\n' - 'setting "f_trace_opcodes" to "True". Note that this may lead to\n' - 'undefined interpreter behaviour if exceptions raised by the trace\n' - 'function escape to the function being traced.\n' - '\n' - '"f_lineno" is the current line number of the frame — writing to ' - 'this\n' - 'from within a trace function jumps to the given line (only for the\n' - 'bottom-most frame). A debugger can implement a Jump command (aka ' - 'Set\n' - 'Next Statement) by writing to f_lineno.\n' + ' *bytecode*.\n' + '\n' + ' Zero-width ranges, where "start == end", are allowed. ' + 'Zero-width\n' + ' ranges are used for lines that are present in the source code, ' + 'but\n' + ' have been eliminated by the *bytecode* compiler.\n' + '\n' + ' New in version 3.10.\n' + '\n' + ' See also:\n' + '\n' + ' **PEP 626** - Precise line numbers for debugging and other ' + 'tools.\n' + ' The PEP that introduced the "co_lines()" method.\n' + '\n' + '\n' + 'Frame objects\n' + '-------------\n' + '\n' + 'Frame objects represent execution frames. They may occur in ' + 'traceback\n' + 'objects, and are also passed to registered trace functions.\n' + '\n' + '\n' + 'Special read-only attributes\n' + '~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n' + '\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| frame.f_back | Points to ' + 'the previous stack frame (towards the |\n' + '| | caller), or ' + '"None" if this is the bottom stack |\n' + '| | ' + 'frame |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| frame.f_code | The code ' + 'object being executed in this frame. |\n' + '| | Accessing ' + 'this attribute raises an auditing event |\n' + '| | ' + '"object.__getattr__" with arguments "obj" and |\n' + '| | ' + '""f_code"". |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| frame.f_locals | The ' + 'dictionary used by the frame to look up local |\n' + '| | ' + 'variables |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| frame.f_globals | The ' + 'dictionary used by the frame to look up global |\n' + '| | ' + 'variables |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| frame.f_builtins | The ' + 'dictionary used by the frame to look up built- |\n' + '| | in ' + '(intrinsic) names |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| frame.f_lasti | The “precise ' + 'instruction” of the frame object |\n' + '| | (this is an ' + 'index into the *bytecode* string of |\n' + '| | the code ' + 'object) |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '\n' + '\n' + 'Special writable attributes\n' + '~~~~~~~~~~~~~~~~~~~~~~~~~~~\n' + '\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| frame.f_trace | If not ' + '"None", this is a function called for |\n' + '| | various ' + 'events during code execution (this is used |\n' + '| | by ' + 'debuggers). Normally an event is triggered for |\n' + '| | each new ' + 'source line (see "f_trace_lines"). |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| frame.f_trace_lines | Set this ' + 'attribute to "False" to disable |\n' + '| | triggering a ' + 'tracing event for each source line. |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| frame.f_trace_opcodes | Set this ' + 'attribute to "True" to allow per-opcode |\n' + '| | events to be ' + 'requested. Note that this may lead to |\n' + '| | undefined ' + 'interpreter behaviour if exceptions |\n' + '| | raised by ' + 'the trace function escape to the |\n' + '| | function ' + 'being traced. |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| frame.f_lineno | The current ' + 'line number of the frame – writing to |\n' + '| | this from ' + 'within a trace function jumps to the |\n' + '| | given line ' + '(only for the bottom-most frame). A |\n' + '| | debugger can ' + 'implement a Jump command (aka Set |\n' + '| | Next ' + 'Statement) by writing to this attribute. |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '\n' + '\n' + 'Frame object methods\n' + '~~~~~~~~~~~~~~~~~~~~\n' '\n' 'Frame objects support one method:\n' '\n' @@ -14643,7 +14867,7 @@ '\n' ' This method clears all references to local variables held by ' 'the\n' - ' frame. Also, if the frame belonged to a generator, the ' + ' frame. Also, if the frame belonged to a *generator*, the ' 'generator\n' ' is finalized. This helps break reference cycles involving ' 'frame\n' @@ -14664,11 +14888,14 @@ 'Traceback objects\n' '-----------------\n' '\n' - 'Traceback objects represent a stack trace of an exception. A\n' + 'Traceback objects represent the stack trace of an exception. A\n' 'traceback object is implicitly created when an exception occurs, ' 'and\n' 'may also be explicitly created by calling "types.TracebackType".\n' '\n' + 'Changed in version 3.7: Traceback objects can now be explicitly\n' + 'instantiated from Python code.\n' + '\n' 'For implicitly created tracebacks, when the search for an ' 'exception\n' 'handler unwinds the execution stack, at each unwound level a ' @@ -14691,30 +14918,40 @@ 'linked\n' 'to form a full stack trace.\n' '\n' - 'Special read-only attributes: "tb_frame" points to the execution ' - 'frame\n' - 'of the current level; "tb_lineno" gives the line number where the\n' - 'exception occurred; "tb_lasti" indicates the precise instruction. ' - 'The\n' - 'line number and last instruction in the traceback may differ from ' + 'Special read-only attributes:\n' + '\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| traceback.tb_frame | Points to ' + 'the execution frame of the current |\n' + '| | level. ' + 'Accessing this attribute raises an |\n' + '| | auditing ' + 'event "object.__getattr__" with arguments |\n' + '| | "obj" and ' + '""tb_frame"". |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| traceback.tb_lineno | Gives the ' + 'line number where the exception occurred |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '| traceback.tb_lasti | Indicates ' + 'the “precise instruction”. |\n' + '+----------------------------------------------------+----------------------------------------------------+\n' + '\n' + 'The line number and last instruction in the traceback may differ ' + 'from\n' + 'the line number of its frame object if the exception occurred in a\n' + '"try" statement with no matching except clause or with a "finally"\n' + 'clause.\n' + '\n' + 'traceback.tb_next\n' + '\n' + ' The special writable attribute "tb_next" is the next level in ' 'the\n' - 'line number of its frame object if the exception occurred in a ' - '"try"\n' - 'statement with no matching except clause or with a finally clause.\n' - '\n' - 'Accessing "tb_frame" raises an auditing event "object.__getattr__"\n' - 'with arguments "obj" and ""tb_frame"".\n' - '\n' - 'Special writable attribute: "tb_next" is the next level in the ' - 'stack\n' - 'trace (towards the frame where the exception occurred), or "None" ' - 'if\n' - 'there is no next level.\n' + ' stack trace (towards the frame where the exception occurred), ' + 'or\n' + ' "None" if there is no next level.\n' '\n' - 'Changed in version 3.7: Traceback objects can now be explicitly\n' - 'instantiated from Python code, and the "tb_next" attribute of ' - 'existing\n' - 'instances can be updated.\n' + ' Changed in version 3.7: This attribute is now writable\n' '\n' '\n' 'Slice objects\n' @@ -15291,7 +15528,7 @@ 'notation.\n' 'There are two flavors: built-in methods (such as "append()" ' 'on lists)\n' - 'and class instance methods. Built-in methods are described ' + 'and class instance method. Built-in methods are described ' 'with the\n' 'types that support them.\n' '\n' @@ -15299,8 +15536,8 @@ 'namespace)\n' 'through an instance, you get a special object: a *bound ' 'method* (also\n' - 'called *instance method*) object. When called, it will add ' - 'the "self"\n' + 'called instance method) object. When called, it will add the ' + '"self"\n' 'argument to the argument list. Bound methods have two ' 'special read-\n' 'only attributes: "m.__self__" is the object on which the ' @@ -15315,7 +15552,7 @@ 'arbitrary\n' 'attributes. However, since method attributes are actually ' 'stored on\n' - 'the underlying function object ("meth.__func__"), setting ' + 'the underlying function object ("method.__func__"), setting ' 'method\n' 'attributes on bound methods is disallowed. Attempting to ' 'set an\n' @@ -15340,7 +15577,7 @@ ' >>> c.method.whoami\n' " 'my name is method'\n" '\n' - 'See The standard type hierarchy for more information.\n', + 'See Instance methods for more information.\n', 'typesmodules': 'Modules\n' '*******\n' '\n' diff --git a/Lib/runpy.py b/Lib/runpy.py index 42f896c9cd5094..ef54d3282eee06 100644 --- a/Lib/runpy.py +++ b/Lib/runpy.py @@ -247,17 +247,17 @@ def _get_main_module_details(error=ImportError): sys.modules[main_name] = saved_main -def _get_code_from_file(run_name, fname): +def _get_code_from_file(fname): # Check for a compiled file first from pkgutil import read_code - decoded_path = os.path.abspath(os.fsdecode(fname)) - with io.open_code(decoded_path) as f: + code_path = os.path.abspath(fname) + with io.open_code(code_path) as f: code = read_code(f) if code is None: # That didn't work, so try it as normal source code - with io.open_code(decoded_path) as f: + with io.open_code(code_path) as f: code = compile(f.read(), fname, 'exec') - return code, fname + return code def run_path(path_name, init_globals=None, run_name=None): """Execute code located at the specified filesystem location. @@ -279,12 +279,13 @@ def run_path(path_name, init_globals=None, run_name=None): pkg_name = run_name.rpartition(".")[0] from pkgutil import get_importer importer = get_importer(path_name) + path_name = os.fsdecode(path_name) if isinstance(importer, type(None)): # Not a valid sys.path entry, so run the code directly # execfile() doesn't help as we want to allow compiled files - code, fname = _get_code_from_file(run_name, path_name) + code = _get_code_from_file(path_name) return _run_module_code(code, init_globals, run_name, - pkg_name=pkg_name, script_name=fname) + pkg_name=pkg_name, script_name=path_name) else: # Finder is defined for path, so add it to # the start of sys.path diff --git a/Lib/site.py b/Lib/site.py index 6f5738b02cb23b..0631f3f6115ec0 100644 --- a/Lib/site.py +++ b/Lib/site.py @@ -74,6 +74,7 @@ import builtins import _sitebuiltins import io +import stat # Prefixes for site-packages; add additional prefixes like /usr/local here PREFIXES = [sys.prefix, sys.exec_prefix] @@ -168,6 +169,14 @@ def addpackage(sitedir, name, known_paths): else: reset = False fullname = os.path.join(sitedir, name) + try: + st = os.lstat(fullname) + except OSError: + return + if ((getattr(st, 'st_flags', 0) & stat.UF_HIDDEN) or + (getattr(st, 'st_file_attributes', 0) & stat.FILE_ATTRIBUTE_HIDDEN)): + _trace(f"Skipping hidden .pth file: {fullname!r}") + return _trace(f"Processing .pth file: {fullname!r}") try: # locale encoding is not ideal especially on Windows. But we have used @@ -221,7 +230,8 @@ def addsitedir(sitedir, known_paths=None): names = os.listdir(sitedir) except OSError: return - names = [name for name in names if name.endswith(".pth")] + names = [name for name in names + if name.endswith(".pth") and not name.startswith(".")] for name in sorted(names): addpackage(sitedir, name, known_paths) if reset: @@ -433,6 +443,20 @@ def setcopyright(): def sethelper(): builtins.help = _sitebuiltins._Helper() + +def gethistoryfile(): + """Check if the PYTHON_HISTORY environment variable is set and define + it as the .python_history file. If PYTHON_HISTORY is not set, use the + default .python_history file. + """ + if not sys.flags.ignore_environment: + history = os.environ.get("PYTHON_HISTORY") + if history: + return history + return os.path.join(os.path.expanduser('~'), + '.python_history') + + def enablerlcompleter(): """Enable default readline configuration on interactive prompts, by registering a sys.__interactivehook__. @@ -467,13 +491,13 @@ def register_readline(): pass if readline.get_current_history_length() == 0: - # If no history was loaded, default to .python_history. + # If no history was loaded, default to .python_history, + # or PYTHON_HISTORY. # The guard is necessary to avoid doubling history size at # each interpreter exit when readline was already configured # through a PYTHONSTARTUP hook, see: # http://bugs.python.org/issue5845#msg198636 - history = os.path.join(os.path.expanduser('~'), - '.python_history') + history = gethistoryfile() try: readline.read_history_file(history) except OSError: diff --git a/Lib/socket.py b/Lib/socket.py index 5f0a1f40e25b94..77986fc2e48099 100644 --- a/Lib/socket.py +++ b/Lib/socket.py @@ -382,7 +382,7 @@ def _sendfile_use_sendfile(self, file, offset=0, count=None): if timeout and not selector_select(timeout): raise TimeoutError('timed out') if count: - blocksize = count - total_sent + blocksize = min(count - total_sent, blocksize) if blocksize <= 0: break try: diff --git a/Lib/sqlite3/dump.py b/Lib/sqlite3/dump.py index ead3360ce67608..719dfc8947697d 100644 --- a/Lib/sqlite3/dump.py +++ b/Lib/sqlite3/dump.py @@ -26,6 +26,10 @@ def _iterdump(connection): writeable_schema = False cu = connection.cursor() + # Disable foreign key constraints, if there is any foreign key violation. + violations = cu.execute("PRAGMA foreign_key_check").fetchall() + if violations: + yield('PRAGMA foreign_keys=OFF;') yield('BEGIN TRANSACTION;') # sqlite_master table contains the SQL CREATE statements for the database. diff --git a/Lib/ssl.py b/Lib/ssl.py index d01484964b6895..74a9d2d8fd4fb0 100644 --- a/Lib/ssl.py +++ b/Lib/ssl.py @@ -110,7 +110,7 @@ try: from _ssl import RAND_egd except ImportError: - # LibreSSL does not provide RAND_egd + # RAND_egd is not supported on some platforms pass diff --git a/Lib/stat.py b/Lib/stat.py index 52cadbf04f6c88..9167ab185944fb 100644 --- a/Lib/stat.py +++ b/Lib/stat.py @@ -2,6 +2,7 @@ Suggested usage: from stat import * """ +import sys # Indices for stat struct members in the tuple returned by os.stat() @@ -110,19 +111,25 @@ def S_ISWHT(mode): S_IXOTH = 0o0001 # execute by others # Names for file flags - +UF_SETTABLE = 0x0000ffff # owner settable flags UF_NODUMP = 0x00000001 # do not dump file UF_IMMUTABLE = 0x00000002 # file may not be changed UF_APPEND = 0x00000004 # file may only be appended to UF_OPAQUE = 0x00000008 # directory is opaque when viewed through a union stack UF_NOUNLINK = 0x00000010 # file may not be renamed or deleted -UF_COMPRESSED = 0x00000020 # OS X: file is hfs-compressed -UF_HIDDEN = 0x00008000 # OS X: file should not be displayed +UF_COMPRESSED = 0x00000020 # macOS: file is compressed +UF_TRACKED = 0x00000040 # macOS: used for handling document IDs +UF_DATAVAULT = 0x00000080 # macOS: entitlement needed for I/O +UF_HIDDEN = 0x00008000 # macOS: file should not be displayed +SF_SETTABLE = 0xffff0000 # superuser settable flags SF_ARCHIVED = 0x00010000 # file may be archived SF_IMMUTABLE = 0x00020000 # file may not be changed SF_APPEND = 0x00040000 # file may only be appended to +SF_RESTRICTED = 0x00080000 # macOS: entitlement needed for writing SF_NOUNLINK = 0x00100000 # file may not be renamed or deleted SF_SNAPSHOT = 0x00200000 # file is a snapshot file +SF_FIRMLINK = 0x00800000 # macOS: file is a firmlink +SF_DATALESS = 0x40000000 # macOS: file is a dataless object _filemode_table = ( diff --git a/Lib/subprocess.py b/Lib/subprocess.py index d5bd9a9e31aa04..20db7747d5db13 100644 --- a/Lib/subprocess.py +++ b/Lib/subprocess.py @@ -1944,16 +1944,21 @@ def _execute_child(self, args, executable, preexec_fn, close_fds, SubprocessError) if issubclass(child_exception_type, OSError) and hex_errno: errno_num = int(hex_errno, 16) - child_exec_never_called = (err_msg == "noexec") - if child_exec_never_called: + if err_msg == "noexec:chdir": err_msg = "" # The error must be from chdir(cwd). err_filename = cwd + elif err_msg == "noexec": + err_msg = "" + err_filename = None else: err_filename = orig_executable if errno_num != 0: err_msg = os.strerror(errno_num) - raise child_exception_type(errno_num, err_msg, err_filename) + if err_filename is not None: + raise child_exception_type(errno_num, err_msg, err_filename) + else: + raise child_exception_type(errno_num, err_msg) raise child_exception_type(err_msg) diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py index 8e4e0765d46809..6a050fa541db1e 100644 --- a/Lib/test/_test_multiprocessing.py +++ b/Lib/test/_test_multiprocessing.py @@ -2693,12 +2693,17 @@ def test_make_pool(self): p.join() def test_terminate(self): + # Simulate slow tasks which take "forever" to complete + sleep_time = support.LONG_TIMEOUT + if self.TYPE == 'threads': - self.skipTest("Threads cannot be terminated") + # Thread pool workers can't be forced to quit, so if the first + # task starts early enough, we will end up waiting for it. + # Sleep for a shorter time, so the test doesn't block. + sleep_time = 1 - # Simulate slow tasks which take "forever" to complete p = self.Pool(3) - args = [support.LONG_TIMEOUT for i in range(10_000)] + args = [sleep_time for i in range(10_000)] result = p.map_async(time.sleep, args, chunksize=1) p.terminate() p.join() diff --git a/Lib/test/libregrtest/findtests.py b/Lib/test/libregrtest/findtests.py index 78343775bc5b99..ee890b5b1db4cd 100644 --- a/Lib/test/libregrtest/findtests.py +++ b/Lib/test/libregrtest/findtests.py @@ -19,6 +19,7 @@ SPLITTESTDIRS: set[TestName] = { "test_asyncio", "test_concurrent_futures", + "test_doctests", "test_future_stmt", "test_gdb", "test_inspect", diff --git a/Lib/test/pickletester.py b/Lib/test/pickletester.py index 74b82caf742f20..93e7dbbd103934 100644 --- a/Lib/test/pickletester.py +++ b/Lib/test/pickletester.py @@ -2437,7 +2437,7 @@ def test_bad_getattr(self): # Issue #3514: crash when there is an infinite loop in __getattr__ x = BadGetattr() for proto in range(2): - with support.infinite_recursion(): + with support.infinite_recursion(25): self.assertRaises(RuntimeError, self.dumps, x, proto) for proto in range(2, pickle.HIGHEST_PROTOCOL + 1): s = self.dumps(x, proto) diff --git a/Lib/test/pythoninfo.py b/Lib/test/pythoninfo.py index 6dfb7f37e406a5..814358746d6d8a 100644 --- a/Lib/test/pythoninfo.py +++ b/Lib/test/pythoninfo.py @@ -865,26 +865,36 @@ def collect_subprocess(info_add): def collect_windows(info_add): - try: - import ctypes - except ImportError: - return - - if not hasattr(ctypes, 'WinDLL'): + if sys.platform != "win32": + # Code specific to Windows return - ntdll = ctypes.WinDLL('ntdll') - BOOLEAN = ctypes.c_ubyte - + # windows.RtlAreLongPathsEnabled: RtlAreLongPathsEnabled() + # windows.is_admin: IsUserAnAdmin() try: - RtlAreLongPathsEnabled = ntdll.RtlAreLongPathsEnabled - except AttributeError: - res = '' + import ctypes + if not hasattr(ctypes, 'WinDLL'): + raise ImportError + except ImportError: + pass else: - RtlAreLongPathsEnabled.restype = BOOLEAN - RtlAreLongPathsEnabled.argtypes = () - res = bool(RtlAreLongPathsEnabled()) - info_add('windows.RtlAreLongPathsEnabled', res) + ntdll = ctypes.WinDLL('ntdll') + BOOLEAN = ctypes.c_ubyte + try: + RtlAreLongPathsEnabled = ntdll.RtlAreLongPathsEnabled + except AttributeError: + res = '' + else: + RtlAreLongPathsEnabled.restype = BOOLEAN + RtlAreLongPathsEnabled.argtypes = () + res = bool(RtlAreLongPathsEnabled()) + info_add('windows.RtlAreLongPathsEnabled', res) + + shell32 = ctypes.windll.shell32 + IsUserAnAdmin = shell32.IsUserAnAdmin + IsUserAnAdmin.restype = BOOLEAN + IsUserAnAdmin.argtypes = () + info_add('windows.is_admin', IsUserAnAdmin()) try: import _winapi @@ -893,6 +903,7 @@ def collect_windows(info_add): except (ImportError, AttributeError): pass + # windows.version_caption: "wmic os get Caption,Version /value" command import subprocess try: # When wmic.exe output is redirected to a pipe, @@ -919,6 +930,7 @@ def collect_windows(info_add): if line: info_add('windows.version', line) + # windows.ver: "ver" command try: proc = subprocess.Popen(["ver"], shell=True, stdout=subprocess.PIPE, @@ -937,6 +949,22 @@ def collect_windows(info_add): if line: info_add('windows.ver', line) + # windows.developer_mode: get AllowDevelopmentWithoutDevLicense registry + import winreg + try: + key = winreg.OpenKey( + winreg.HKEY_LOCAL_MACHINE, + r"SOFTWARE\Microsoft\Windows\CurrentVersion\AppModelUnlock") + subkey = "AllowDevelopmentWithoutDevLicense" + try: + value, value_type = winreg.QueryValueEx(key, subkey) + finally: + winreg.CloseKey(key) + except OSError: + pass + else: + info_add('windows.developer_mode', "enabled" if value else "disabled") + def collect_fips(info_add): try: diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index e5fb725a30b5b8..8344dd1849c61d 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -2377,7 +2377,10 @@ def _get_c_recursion_limit(): return _testcapi.Py_C_RECURSION_LIMIT except (ImportError, AttributeError): # Originally taken from Include/cpython/pystate.h . - return 8000 + if sys.platform == 'win32': + return 4000 + else: + return 10000 # The default C recursion limit. Py_C_RECURSION_LIMIT = _get_c_recursion_limit() diff --git a/Lib/test/support/pty_helper.py b/Lib/test/support/pty_helper.py index 11037d22516448..6587fd40333c51 100644 --- a/Lib/test/support/pty_helper.py +++ b/Lib/test/support/pty_helper.py @@ -58,3 +58,23 @@ def terminate(proc): input = b"" # Stop writing if not input: sel.modify(master, selectors.EVENT_READ) + + +###################################################################### +## Fake stdin (for testing interactive debugging) +###################################################################### + +class FakeInput: + """ + A fake input stream for pdb's interactive debugger. Whenever a + line is read, print it (to simulate the user typing it), and then + return it. The set of lines to return is specified in the + constructor; they should not have trailing newlines. + """ + def __init__(self, lines): + self.lines = lines + + def readline(self): + line = self.lines.pop(0) + print(line) + return line + '\n' diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py index 64fcb02309de77..3789ac22e3899c 100644 --- a/Lib/test/test_ast.py +++ b/Lib/test/test_ast.py @@ -1126,7 +1126,7 @@ def next(self): def test_ast_recursion_limit(self): fail_depth = support.EXCEEDS_RECURSION_LIMIT crash_depth = 100_000 - success_depth = 1200 + success_depth = int(support.Py_C_RECURSION_LIMIT * 0.8) if _testinternalcapi is not None: remaining = _testinternalcapi.get_c_recursion_remaining() success_depth = min(success_depth, remaining) diff --git a/Lib/test/test_asyncgen.py b/Lib/test/test_asyncgen.py index a49630112af510..39605dca3886c8 100644 --- a/Lib/test/test_asyncgen.py +++ b/Lib/test/test_asyncgen.py @@ -379,7 +379,10 @@ async def async_gen_wrapper(): def test_async_gen_exception_12(self): async def gen(): - await anext(me) + with self.assertWarnsRegex(RuntimeWarning, + f"coroutine method 'asend' of '{gen.__qualname__}' " + f"was never awaited"): + await anext(me) yield 123 me = gen() @@ -395,7 +398,12 @@ async def gen(): yield 123 with self.assertWarns(DeprecationWarning): - gen().athrow(GeneratorExit, GeneratorExit(), None) + x = gen().athrow(GeneratorExit, GeneratorExit(), None) + with self.assertWarnsRegex(RuntimeWarning, + f"coroutine method 'athrow' of '{gen.__qualname__}' " + f"was never awaited"): + del x + gc_collect() def test_async_gen_api_01(self): async def gen(): @@ -1564,6 +1572,11 @@ async def main(): self.assertIsInstance(message['exception'], ZeroDivisionError) self.assertIn('unhandled exception during asyncio.run() shutdown', message['message']) + with self.assertWarnsRegex(RuntimeWarning, + f"coroutine method 'aclose' of '{async_iterate.__qualname__}' " + f"was never awaited"): + del message, messages + gc_collect() def test_async_gen_expression_01(self): async def arange(n): @@ -1617,6 +1630,10 @@ async def main(): asyncio.run(main()) self.assertEqual([], messages) + with self.assertWarnsRegex(RuntimeWarning, + f"coroutine method 'aclose' of '{async_iterate.__qualname__}' " + f"was never awaited"): + gc_collect() def test_async_gen_await_same_anext_coro_twice(self): async def async_iterate(): @@ -1701,6 +1718,14 @@ def test_asend(self): async def gen(): yield 1 + # gh-113753: asend objects allocated from a free-list should warn. + # Ensure there is a finalized 'asend' object ready to be reused. + try: + g = gen() + g.asend(None).send(None) + except StopIteration: + pass + msg = f"coroutine method 'asend' of '{gen.__qualname__}' was never awaited" with self.assertWarnsRegex(RuntimeWarning, msg): g = gen() diff --git a/Lib/test/test_asyncio/test_futures.py b/Lib/test/test_asyncio/test_futures.py index 2184b2091f84ee..d3e8efec1c04c2 100644 --- a/Lib/test/test_asyncio/test_futures.py +++ b/Lib/test/test_asyncio/test_futures.py @@ -270,10 +270,6 @@ def test_exception(self): f = self._new_future(loop=self.loop) self.assertRaises(asyncio.InvalidStateError, f.exception) - # StopIteration cannot be raised into a Future - CPython issue26221 - self.assertRaisesRegex(TypeError, "StopIteration .* cannot be raised", - f.set_exception, StopIteration) - f.set_exception(exc) self.assertFalse(f.cancelled()) self.assertTrue(f.done()) @@ -283,6 +279,25 @@ def test_exception(self): self.assertRaises(asyncio.InvalidStateError, f.set_exception, None) self.assertFalse(f.cancel()) + def test_stop_iteration_exception(self, stop_iteration_class=StopIteration): + exc = stop_iteration_class() + f = self._new_future(loop=self.loop) + f.set_exception(exc) + self.assertFalse(f.cancelled()) + self.assertTrue(f.done()) + self.assertRaises(RuntimeError, f.result) + exc = f.exception() + cause = exc.__cause__ + self.assertIsInstance(exc, RuntimeError) + self.assertRegex(str(exc), 'StopIteration .* cannot be raised') + self.assertIsInstance(cause, stop_iteration_class) + + def test_stop_iteration_subclass_exception(self): + class MyStopIteration(StopIteration): + pass + + self.test_stop_iteration_exception(MyStopIteration) + def test_exception_class(self): f = self._new_future(loop=self.loop) f.set_exception(RuntimeError) diff --git a/Lib/test/test_asyncio/test_locks.py b/Lib/test/test_asyncio/test_locks.py index f6c6a282429a21..9029efd2355b46 100644 --- a/Lib/test/test_asyncio/test_locks.py +++ b/Lib/test/test_asyncio/test_locks.py @@ -758,6 +758,63 @@ async def test_timeout_in_block(self): with self.assertRaises(asyncio.TimeoutError): await asyncio.wait_for(condition.wait(), timeout=0.5) + async def test_cancelled_error_wakeup(self): + # Test that a cancelled error, received when awaiting wakeup, + # will be re-raised un-modified. + wake = False + raised = None + cond = asyncio.Condition() + + async def func(): + nonlocal raised + async with cond: + with self.assertRaises(asyncio.CancelledError) as err: + await cond.wait_for(lambda: wake) + raised = err.exception + raise raised + + task = asyncio.create_task(func()) + await asyncio.sleep(0) + # Task is waiting on the condition, cancel it there. + task.cancel(msg="foo") + with self.assertRaises(asyncio.CancelledError) as err: + await task + self.assertEqual(err.exception.args, ("foo",)) + # We should have got the _same_ exception instance as the one + # originally raised. + self.assertIs(err.exception, raised) + + async def test_cancelled_error_re_aquire(self): + # Test that a cancelled error, received when re-aquiring lock, + # will be re-raised un-modified. + wake = False + raised = None + cond = asyncio.Condition() + + async def func(): + nonlocal raised + async with cond: + with self.assertRaises(asyncio.CancelledError) as err: + await cond.wait_for(lambda: wake) + raised = err.exception + raise raised + + task = asyncio.create_task(func()) + await asyncio.sleep(0) + # Task is waiting on the condition + await cond.acquire() + wake = True + cond.notify() + await asyncio.sleep(0) + # Task is now trying to re-acquire the lock, cancel it there. + task.cancel(msg="foo") + cond.release() + with self.assertRaises(asyncio.CancelledError) as err: + await task + self.assertEqual(err.exception.args, ("foo",)) + # We should have got the _same_ exception instance as the one + # originally raised. + self.assertIs(err.exception, raised) class SemaphoreTests(unittest.IsolatedAsyncioTestCase): @@ -1044,6 +1101,62 @@ async def c3(result): await asyncio.gather(*tasks, return_exceptions=True) self.assertEqual([2, 3], result) + async def test_acquire_fifo_order_4(self): + # Test that a successfule `acquire()` will wake up multiple Tasks + # that were waiting in the Semaphore queue due to FIFO rules. + sem = asyncio.Semaphore(0) + result = [] + count = 0 + + async def c1(result): + # First task immediatlly waits for semaphore. It will be awoken by c2. + self.assertEqual(sem._value, 0) + await sem.acquire() + # We should have woken up all waiting tasks now. + self.assertEqual(sem._value, 0) + # Create a fourth task. It should run after c3, not c2. + nonlocal t4 + t4 = asyncio.create_task(c4(result)) + result.append(1) + return True + + async def c2(result): + # The second task begins by releasing semaphore three times, + # for c1, c2, and c3. + sem.release() + sem.release() + sem.release() + self.assertEqual(sem._value, 2) + # It is locked, because c1 hasn't woken up yet. + self.assertTrue(sem.locked()) + await sem.acquire() + result.append(2) + return True + + async def c3(result): + await sem.acquire() + self.assertTrue(sem.locked()) + result.append(3) + return True + + async def c4(result): + result.append(4) + return True + + t1 = asyncio.create_task(c1(result)) + t2 = asyncio.create_task(c2(result)) + t3 = asyncio.create_task(c3(result)) + t4 = None + + await asyncio.sleep(0) + # Three tasks are in the queue, the first hasn't woken up yet. + self.assertEqual(sem._value, 2) + self.assertEqual(len(sem._waiters), 3) + await asyncio.sleep(0) + + tasks = [t1, t2, t3, t4] + await asyncio.gather(*tasks) + self.assertEqual([1, 2, 3, 4], result) class BarrierTests(unittest.IsolatedAsyncioTestCase): diff --git a/Lib/test/test_asyncio/test_server.py b/Lib/test/test_asyncio/test_server.py index f22cf3026e244b..918faac909b9bf 100644 --- a/Lib/test/test_asyncio/test_server.py +++ b/Lib/test/test_asyncio/test_server.py @@ -200,13 +200,13 @@ async def test_unix_server_sock_cleanup(self): async def serve(*args): pass - sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) - sock.bind(addr) + with socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) as sock: + sock.bind(addr) - srv = await asyncio.start_unix_server(serve, sock=sock) + srv = await asyncio.start_unix_server(serve, sock=sock) - srv.close() - self.assertFalse(os.path.exists(addr)) + srv.close() + self.assertFalse(os.path.exists(addr)) @socket_helper.skip_unless_bind_unix_socket async def test_unix_server_cleanup_gone(self): @@ -215,14 +215,14 @@ async def test_unix_server_cleanup_gone(self): async def serve(*args): pass - sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) - sock.bind(addr) + with socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) as sock: + sock.bind(addr) - srv = await asyncio.start_unix_server(serve, sock=sock) + srv = await asyncio.start_unix_server(serve, sock=sock) - os.unlink(addr) + os.unlink(addr) - srv.close() + srv.close() @socket_helper.skip_unless_bind_unix_socket async def test_unix_server_cleanup_replaced(self): @@ -234,11 +234,11 @@ async def serve(*args): srv = await asyncio.start_unix_server(serve, addr) os.unlink(addr) - sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) - sock.bind(addr) + with socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) as sock: + sock.bind(addr) - srv.close() - self.assertTrue(os.path.exists(addr)) + srv.close() + self.assertTrue(os.path.exists(addr)) @socket_helper.skip_unless_bind_unix_socket async def test_unix_server_cleanup_prevented(self): diff --git a/Lib/test/test_asyncio/test_ssl.py b/Lib/test/test_asyncio/test_ssl.py index e9cc735613fb8e..e072ede29ee3c7 100644 --- a/Lib/test/test_asyncio/test_ssl.py +++ b/Lib/test/test_asyncio/test_ssl.py @@ -1,3 +1,7 @@ +# Contains code from https://github.com/MagicStack/uvloop/tree/v0.16.0 +# SPDX-License-Identifier: PSF-2.0 AND (MIT OR Apache-2.0) +# SPDX-FileCopyrightText: Copyright (c) 2015-2021 MagicStack Inc. http://magic.io + import asyncio import contextlib import gc diff --git a/Lib/test/test_asyncio/test_subprocess.py b/Lib/test/test_asyncio/test_subprocess.py index 179c8cb8cc17cf..859d2932c33fed 100644 --- a/Lib/test/test_asyncio/test_subprocess.py +++ b/Lib/test/test_asyncio/test_subprocess.py @@ -975,8 +975,13 @@ async def in_thread(): async def main(): # asyncio.Runner did not call asyncio.set_event_loop() - with self.assertRaises(RuntimeError): - asyncio.get_event_loop_policy().get_event_loop() + with warnings.catch_warnings(): + warnings.simplefilter('error', DeprecationWarning) + # get_event_loop() raises DeprecationWarning if + # set_event_loop() was never called and RuntimeError if + # it was called at least once. + with self.assertRaises((RuntimeError, DeprecationWarning)): + asyncio.get_event_loop_policy().get_event_loop() return await asyncio.to_thread(asyncio.run, in_thread()) with self.assertWarns(DeprecationWarning): asyncio.set_child_watcher(asyncio.PidfdChildWatcher()) diff --git a/Lib/test/test_asyncio/test_timeouts.py b/Lib/test/test_asyncio/test_timeouts.py index f54e79e4d8e600..1f7f9ee696a525 100644 --- a/Lib/test/test_asyncio/test_timeouts.py +++ b/Lib/test/test_asyncio/test_timeouts.py @@ -116,15 +116,68 @@ async def test_foreign_exception_passed(self): raise KeyError self.assertFalse(cm.expired()) + async def test_timeout_exception_context(self): + with self.assertRaises(TimeoutError) as cm: + async with asyncio.timeout(0.01): + try: + 1/0 + finally: + await asyncio.sleep(1) + e = cm.exception + # Expect TimeoutError caused by CancelledError raised during handling + # of ZeroDivisionError. + e2 = e.__cause__ + self.assertIsInstance(e2, asyncio.CancelledError) + self.assertIs(e.__context__, e2) + self.assertIsNone(e2.__cause__) + self.assertIsInstance(e2.__context__, ZeroDivisionError) + async def test_foreign_exception_on_timeout(self): async def crash(): try: await asyncio.sleep(1) finally: 1/0 - with self.assertRaises(ZeroDivisionError): + with self.assertRaises(ZeroDivisionError) as cm: async with asyncio.timeout(0.01): await crash() + e = cm.exception + # Expect ZeroDivisionError raised during handling of TimeoutError + # caused by CancelledError. + self.assertIsNone(e.__cause__) + e2 = e.__context__ + self.assertIsInstance(e2, TimeoutError) + e3 = e2.__cause__ + self.assertIsInstance(e3, asyncio.CancelledError) + self.assertIs(e2.__context__, e3) + + async def test_foreign_exception_on_timeout_2(self): + with self.assertRaises(ZeroDivisionError) as cm: + async with asyncio.timeout(0.01): + try: + try: + raise ValueError + finally: + await asyncio.sleep(1) + finally: + try: + raise KeyError + finally: + 1/0 + e = cm.exception + # Expect ZeroDivisionError raised during handling of KeyError + # raised during handling of TimeoutError caused by CancelledError. + self.assertIsNone(e.__cause__) + e2 = e.__context__ + self.assertIsInstance(e2, KeyError) + self.assertIsNone(e2.__cause__) + e3 = e2.__context__ + self.assertIsInstance(e3, TimeoutError) + e4 = e3.__cause__ + self.assertIsInstance(e4, asyncio.CancelledError) + self.assertIsNone(e4.__cause__) + self.assertIsInstance(e4.__context__, ValueError) + self.assertIs(e3.__context__, e4) async def test_foreign_cancel_doesnt_timeout_if_not_expired(self): with self.assertRaises(asyncio.CancelledError): @@ -219,14 +272,30 @@ async def test_repr_disabled(self): self.assertEqual(repr(cm), r"") async def test_nested_timeout_in_finally(self): - with self.assertRaises(TimeoutError): + with self.assertRaises(TimeoutError) as cm1: async with asyncio.timeout(0.01): try: await asyncio.sleep(1) finally: - with self.assertRaises(TimeoutError): + with self.assertRaises(TimeoutError) as cm2: async with asyncio.timeout(0.01): await asyncio.sleep(10) + e1 = cm1.exception + # Expect TimeoutError caused by CancelledError. + e12 = e1.__cause__ + self.assertIsInstance(e12, asyncio.CancelledError) + self.assertIsNone(e12.__cause__) + self.assertIsNone(e12.__context__) + self.assertIs(e1.__context__, e12) + e2 = cm2.exception + # Expect TimeoutError caused by CancelledError raised during + # handling of other CancelledError (which is the same as in + # the above chain). + e22 = e2.__cause__ + self.assertIsInstance(e22, asyncio.CancelledError) + self.assertIsNone(e22.__cause__) + self.assertIs(e22.__context__, e12) + self.assertIs(e2.__context__, e22) async def test_timeout_after_cancellation(self): try: @@ -235,7 +304,7 @@ async def test_timeout_after_cancellation(self): except asyncio.CancelledError: pass finally: - with self.assertRaises(TimeoutError): + with self.assertRaises(TimeoutError) as cm: async with asyncio.timeout(0.0): await asyncio.sleep(1) # some cleanup @@ -251,13 +320,6 @@ async def test_cancel_in_timeout_after_cancellation(self): asyncio.current_task().cancel() await asyncio.sleep(2) # some cleanup - async def test_timeout_exception_cause (self): - with self.assertRaises(asyncio.TimeoutError) as exc: - async with asyncio.timeout(0): - await asyncio.sleep(1) - cause = exc.exception.__cause__ - assert isinstance(cause, asyncio.CancelledError) - async def test_timeout_already_entered(self): async with asyncio.timeout(0.01) as cm: with self.assertRaisesRegex(RuntimeError, "has already been entered"): @@ -303,6 +365,47 @@ async def test_timeout_without_task(self): with self.assertRaisesRegex(RuntimeError, "has not been entered"): cm.reschedule(0.02) + async def test_timeout_taskgroup(self): + async def task(): + try: + await asyncio.sleep(2) # Will be interrupted after 0.01 second + finally: + 1/0 # Crash in cleanup + + with self.assertRaises(ExceptionGroup) as cm: + async with asyncio.timeout(0.01): + async with asyncio.TaskGroup() as tg: + tg.create_task(task()) + try: + raise ValueError + finally: + await asyncio.sleep(1) + eg = cm.exception + # Expect ExceptionGroup raised during handling of TimeoutError caused + # by CancelledError raised during handling of ValueError. + self.assertIsNone(eg.__cause__) + e_1 = eg.__context__ + self.assertIsInstance(e_1, TimeoutError) + e_2 = e_1.__cause__ + self.assertIsInstance(e_2, asyncio.CancelledError) + self.assertIsNone(e_2.__cause__) + self.assertIsInstance(e_2.__context__, ValueError) + self.assertIs(e_1.__context__, e_2) + + self.assertEqual(len(eg.exceptions), 1, eg) + e1 = eg.exceptions[0] + # Expect ZeroDivisionError raised during handling of TimeoutError + # caused by CancelledError (it is a different CancelledError). + self.assertIsInstance(e1, ZeroDivisionError) + self.assertIsNone(e1.__cause__) + e2 = e1.__context__ + self.assertIsInstance(e2, TimeoutError) + e3 = e2.__cause__ + self.assertIsInstance(e3, asyncio.CancelledError) + self.assertIsNone(e3.__context__) + self.assertIsNone(e3.__cause__) + self.assertIs(e2.__context__, e3) + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_base64.py b/Lib/test/test_base64.py index fa03fa1d61ceab..f6171d3ed4efd7 100644 --- a/Lib/test/test_base64.py +++ b/Lib/test/test_base64.py @@ -586,6 +586,7 @@ def test_a85decode(self): eq(base64.a85decode(b'y+', b"www.python.org") @@ -689,6 +690,8 @@ def test_a85decode_errors(self): self.assertRaises(ValueError, base64.a85decode, b's8W', adobe=False) self.assertRaises(ValueError, base64.a85decode, b's8W-', adobe=False) self.assertRaises(ValueError, base64.a85decode, b's8W-"', adobe=False) + self.assertRaises(ValueError, base64.a85decode, b'aaaay', + foldspaces=True) def test_b85decode_errors(self): illegal = list(range(33)) + \ diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py index e15492783aeec1..fcddd147bac63e 100644 --- a/Lib/test/test_builtin.py +++ b/Lib/test/test_builtin.py @@ -629,8 +629,8 @@ def __dir__(self): def test___ne__(self): self.assertFalse(None.__ne__(None)) - self.assertTrue(None.__ne__(0)) - self.assertTrue(None.__ne__("abc")) + self.assertIs(None.__ne__(0), NotImplemented) + self.assertIs(None.__ne__("abc"), NotImplemented) def test_divmod(self): self.assertEqual(divmod(12, 7), (1, 5)) diff --git a/Lib/test/test_call.py b/Lib/test/test_call.py index b1c78d7136fc9b..3c8fc35e3c116d 100644 --- a/Lib/test/test_call.py +++ b/Lib/test/test_call.py @@ -1,5 +1,6 @@ import unittest -from test.support import cpython_only, requires_limited_api, skip_on_s390x +from test.support import (cpython_only, is_wasi, requires_limited_api, Py_DEBUG, + set_recursion_limit, skip_on_s390x) try: import _testcapi except ImportError: @@ -990,6 +991,7 @@ def case_change_over_substitution(BLuch=None, Luch = None, fluch = None): class TestRecursion(unittest.TestCase): @skip_on_s390x + @unittest.skipIf(is_wasi and Py_DEBUG, "requires deep stack") def test_super_deep(self): def recurse(n): @@ -1010,9 +1012,7 @@ def c_py_recurse(m): if m: _testcapi.pyobject_vectorcall(py_recurse, (1000, m), ()) - depth = sys.getrecursionlimit() - sys.setrecursionlimit(100_000) - try: + with set_recursion_limit(100_000): recurse(90_000) with self.assertRaises(RecursionError): recurse(101_000) @@ -1022,8 +1022,6 @@ def c_py_recurse(m): c_py_recurse(90) with self.assertRaises(RecursionError): c_py_recurse(100_000) - finally: - sys.setrecursionlimit(depth) class TestFunctionWithManyArgs(unittest.TestCase): diff --git a/Lib/test/test_capi/test_complex.py b/Lib/test/test_capi/test_complex.py index d6fc1f077c40aa..a5b59558e7f851 100644 --- a/Lib/test/test_capi/test_complex.py +++ b/Lib/test/test_capi/test_complex.py @@ -77,8 +77,14 @@ def test_realasdouble(self): self.assertEqual(realasdouble(FloatSubclass(4.25)), 4.25) # Test types with __complex__ dunder method - # Function doesn't support classes with __complex__ dunder, see #109598 - self.assertRaises(TypeError, realasdouble, Complex()) + self.assertEqual(realasdouble(Complex()), 4.25) + self.assertRaises(TypeError, realasdouble, BadComplex()) + with self.assertWarns(DeprecationWarning): + self.assertEqual(realasdouble(BadComplex2()), 4.25) + with warnings.catch_warnings(): + warnings.simplefilter("error", DeprecationWarning) + self.assertRaises(DeprecationWarning, realasdouble, BadComplex2()) + self.assertRaises(RuntimeError, realasdouble, BadComplex3()) # Test types with __float__ dunder method self.assertEqual(realasdouble(Float()), 4.25) @@ -104,11 +110,22 @@ def test_imagasdouble(self): self.assertEqual(imagasdouble(FloatSubclass(4.25)), 0.0) # Test types with __complex__ dunder method - # Function doesn't support classes with __complex__ dunder, see #109598 - self.assertEqual(imagasdouble(Complex()), 0.0) + self.assertEqual(imagasdouble(Complex()), 0.5) + self.assertRaises(TypeError, imagasdouble, BadComplex()) + with self.assertWarns(DeprecationWarning): + self.assertEqual(imagasdouble(BadComplex2()), 0.5) + with warnings.catch_warnings(): + warnings.simplefilter("error", DeprecationWarning) + self.assertRaises(DeprecationWarning, imagasdouble, BadComplex2()) + self.assertRaises(RuntimeError, imagasdouble, BadComplex3()) + + # Test types with __float__ dunder method + self.assertEqual(imagasdouble(Float()), 0.0) + self.assertRaises(TypeError, imagasdouble, BadFloat()) + with self.assertWarns(DeprecationWarning): + self.assertEqual(imagasdouble(BadFloat2()), 0.0) - # Function returns 0.0 anyway, see #109598 - self.assertEqual(imagasdouble(object()), 0.0) + self.assertRaises(TypeError, imagasdouble, object()) # CRASHES imagasdouble(NULL) diff --git a/Lib/test/test_codeop.py b/Lib/test/test_codeop.py index 2abb6c6d935b7e..787bd1b6a79e20 100644 --- a/Lib/test/test_codeop.py +++ b/Lib/test/test_codeop.py @@ -223,6 +223,9 @@ def test_incomplete(self): ai("(x for x in") ai("(x for x in (") + ai('a = f"""') + ai('a = \\') + def test_invalid(self): ai = self.assertInvalid ai("a b") diff --git a/Lib/test/test_compile.py b/Lib/test/test_compile.py index 7850977428985f..9c36f053314f9f 100644 --- a/Lib/test/test_compile.py +++ b/Lib/test/test_compile.py @@ -450,7 +450,8 @@ def test_condition_expression_with_dead_blocks_compiles(self): def test_condition_expression_with_redundant_comparisons_compiles(self): # See gh-113054 - compile('if 9<9<9and 9or 9:9', '', 'exec') + with self.assertWarns(SyntaxWarning): + compile('if 9<9<9and 9or 9:9', '', 'exec') def test_dead_code_with_except_handler_compiles(self): compile(textwrap.dedent(""" @@ -622,12 +623,10 @@ def test_yet_more_evil_still_undecodable(self): @support.cpython_only @unittest.skipIf(support.is_wasi, "exhausts limited stack on WASI") def test_compiler_recursion_limit(self): - # Expected limit is Py_C_RECURSION_LIMIT * 2 - # Duplicating the limit here is a little ugly. - # Perhaps it should be exposed somewhere... - fail_depth = Py_C_RECURSION_LIMIT * 2 + 1 + # Expected limit is Py_C_RECURSION_LIMIT + fail_depth = Py_C_RECURSION_LIMIT + 1 crash_depth = Py_C_RECURSION_LIMIT * 100 - success_depth = int(Py_C_RECURSION_LIMIT * 1.8) + success_depth = int(Py_C_RECURSION_LIMIT * 0.8) def check_limit(prefix, repeated, mode="single"): expect_ok = prefix + repeated * success_depth @@ -1097,6 +1096,21 @@ async def test(aseq): code_lines = self.get_code_lines(test.__code__) self.assertEqual(expected_lines, code_lines) + def test_line_number_synthetic_jump_multiple_predecessors(self): + def f(): + for x in it: + try: + if C1: + yield 2 + except OSError: + pass + + # Ensure that all JUMP_BACKWARDs have line number + code = f.__code__ + for inst in dis.Bytecode(code): + if inst.opname == 'JUMP_BACKWARD': + self.assertIsNotNone(inst.positions.lineno) + def test_lineno_of_backward_jump(self): # Issue gh-107901 def f(): diff --git a/Lib/test/test_compileall.py b/Lib/test/test_compileall.py index 83a9532aecfac8..0ec6013dc11e3e 100644 --- a/Lib/test/test_compileall.py +++ b/Lib/test/test_compileall.py @@ -369,7 +369,9 @@ def test_strip_only_invalid(self): script = script_helper.make_script(path, "test", "1 / 0") bc = importlib.util.cache_from_source(script) stripdir = os.path.join(self.directory, *(fullpath[:2] + ['fake'])) - compileall.compile_dir(path, quiet=True, stripdir=stripdir) + with support.captured_stdout() as out: + compileall.compile_dir(path, quiet=True, stripdir=stripdir) + self.assertIn("not a valid prefix", out.getvalue()) rc, out, err = script_helper.assert_python_failure(bc) expected_not_in = os.path.join(self.directory, *fullpath[2:]) self.assertIn( diff --git a/Lib/test/test_compiler_codegen.py b/Lib/test/test_compiler_codegen.py index 6d7731ddba02c5..b5d1e2f9e4752c 100644 --- a/Lib/test/test_compiler_codegen.py +++ b/Lib/test/test_compiler_codegen.py @@ -21,7 +21,7 @@ def test_if_expression(self): ('TO_BOOL', 0, 1), ('POP_JUMP_IF_FALSE', false_lbl := self.Label(), 1), ('LOAD_CONST', 1, 1), - ('JUMP', exit_lbl := self.Label()), + ('JUMP_NO_INTERRUPT', exit_lbl := self.Label()), false_lbl, ('LOAD_CONST', 2, 1), exit_lbl, diff --git a/Lib/test/test_csv.py b/Lib/test/test_csv.py index 97b9bba24bcbca..36da86e6a2c622 100644 --- a/Lib/test/test_csv.py +++ b/Lib/test/test_csv.py @@ -297,13 +297,18 @@ def test_read_oddinputs(self): [b'abc'], None) def test_read_eol(self): - self._read_test(['a,b'], [['a','b']]) - self._read_test(['a,b\n'], [['a','b']]) - self._read_test(['a,b\r\n'], [['a','b']]) - self._read_test(['a,b\r'], [['a','b']]) - self.assertRaises(csv.Error, self._read_test, ['a,b\rc,d'], []) - self.assertRaises(csv.Error, self._read_test, ['a,b\nc,d'], []) - self.assertRaises(csv.Error, self._read_test, ['a,b\r\nc,d'], []) + self._read_test(['a,b', 'c,d'], [['a','b'], ['c','d']]) + self._read_test(['a,b\n', 'c,d\n'], [['a','b'], ['c','d']]) + self._read_test(['a,b\r\n', 'c,d\r\n'], [['a','b'], ['c','d']]) + self._read_test(['a,b\r', 'c,d\r'], [['a','b'], ['c','d']]) + + errmsg = "with newline=''" + with self.assertRaisesRegex(csv.Error, errmsg): + next(csv.reader(['a,b\rc,d'])) + with self.assertRaisesRegex(csv.Error, errmsg): + next(csv.reader(['a,b\nc,d'])) + with self.assertRaisesRegex(csv.Error, errmsg): + next(csv.reader(['a,b\r\nc,d'])) def test_read_eof(self): self._read_test(['a,"'], [['a', '']]) @@ -1411,8 +1416,7 @@ def test_ordered_dict_reader(self): class MiscTestCase(unittest.TestCase): def test__all__(self): - extra = {'__doc__', '__version__'} - support.check__all__(self, csv, ('csv', '_csv'), extra=extra) + support.check__all__(self, csv, ('csv', '_csv')) def test_subclassable(self): # issue 44089 diff --git a/Lib/test/test_ctypes/_support.py b/Lib/test/test_ctypes/_support.py new file mode 100644 index 00000000000000..e4c2b33825ae8f --- /dev/null +++ b/Lib/test/test_ctypes/_support.py @@ -0,0 +1,24 @@ +# Some classes and types are not export to _ctypes module directly. + +import ctypes +from _ctypes import Structure, Union, _Pointer, Array, _SimpleCData, CFuncPtr + + +_CData = Structure.__base__ +assert _CData.__name__ == "_CData" + +class _X(Structure): + _fields_ = [("x", ctypes.c_int)] +CField = type(_X.x) + +# metaclasses +PyCStructType = type(Structure) +UnionType = type(Union) +PyCPointerType = type(_Pointer) +PyCArrayType = type(Array) +PyCSimpleType = type(_SimpleCData) +PyCFuncPtrType = type(CFuncPtr) + +# type flags +Py_TPFLAGS_DISALLOW_INSTANTIATION = 1 << 7 +Py_TPFLAGS_IMMUTABLETYPE = 1 << 8 diff --git a/Lib/test/test_ctypes/test_arrays.py b/Lib/test/test_ctypes/test_arrays.py index 6b6cebd3e20285..774316e227ff73 100644 --- a/Lib/test/test_ctypes/test_arrays.py +++ b/Lib/test/test_ctypes/test_arrays.py @@ -7,6 +7,8 @@ c_char, c_wchar, c_byte, c_ubyte, c_short, c_ushort, c_int, c_uint, c_long, c_ulonglong, c_float, c_double, c_longdouble) from test.support import bigmemtest, _2G +from ._support import (_CData, PyCArrayType, Py_TPFLAGS_DISALLOW_INSTANTIATION, + Py_TPFLAGS_IMMUTABLETYPE) formats = "bBhHiIlLqQfd" @@ -23,6 +25,18 @@ def ARRAY(*args): class ArrayTestCase(unittest.TestCase): + def test_inheritance_hierarchy(self): + self.assertEqual(Array.mro(), [Array, _CData, object]) + + self.assertEqual(PyCArrayType.__name__, "PyCArrayType") + self.assertEqual(type(PyCArrayType), type) + + def test_type_flags(self): + for cls in Array, PyCArrayType: + with self.subTest(cls=cls): + self.assertTrue(cls.__flags__ & Py_TPFLAGS_IMMUTABLETYPE) + self.assertFalse(cls.__flags__ & Py_TPFLAGS_DISALLOW_INSTANTIATION) + def test_simple(self): # create classes holding simple numeric types, and check # various properties. diff --git a/Lib/test/test_ctypes/test_byteswap.py b/Lib/test/test_ctypes/test_byteswap.py index b97b57646ecd71..78eff0392c4548 100644 --- a/Lib/test/test_ctypes/test_byteswap.py +++ b/Lib/test/test_ctypes/test_byteswap.py @@ -363,6 +363,24 @@ class TestUnion(parent): self.assertEqual(s.point.x, 1) self.assertEqual(s.point.y, 2) + def test_build_struct_union_opposite_system_byteorder(self): + # gh-105102 + if sys.byteorder == "little": + _Structure = BigEndianStructure + _Union = BigEndianUnion + else: + _Structure = LittleEndianStructure + _Union = LittleEndianUnion + + class S1(_Structure): + _fields_ = [("a", c_byte), ("b", c_byte)] + + class U1(_Union): + _fields_ = [("s1", S1), ("ab", c_short)] + + class S2(_Structure): + _fields_ = [("u1", U1), ("c", c_byte)] + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_ctypes/test_funcptr.py b/Lib/test/test_ctypes/test_funcptr.py index 2ad40647e0cfbb..0eed39484fb39e 100644 --- a/Lib/test/test_ctypes/test_funcptr.py +++ b/Lib/test/test_ctypes/test_funcptr.py @@ -3,6 +3,8 @@ import unittest from ctypes import (CDLL, Structure, CFUNCTYPE, sizeof, _CFuncPtr, c_void_p, c_char_p, c_char, c_int, c_uint, c_long) +from ._support import (_CData, PyCFuncPtrType, Py_TPFLAGS_DISALLOW_INSTANTIATION, + Py_TPFLAGS_IMMUTABLETYPE) try: @@ -15,6 +17,18 @@ class CFuncPtrTestCase(unittest.TestCase): + def test_inheritance_hierarchy(self): + self.assertEqual(_CFuncPtr.mro(), [_CFuncPtr, _CData, object]) + + self.assertEqual(PyCFuncPtrType.__name__, "PyCFuncPtrType") + self.assertEqual(type(PyCFuncPtrType), type) + + def test_type_flags(self): + for cls in _CFuncPtr, PyCFuncPtrType: + with self.subTest(cls=cls): + self.assertTrue(_CFuncPtr.__flags__ & Py_TPFLAGS_IMMUTABLETYPE) + self.assertFalse(_CFuncPtr.__flags__ & Py_TPFLAGS_DISALLOW_INSTANTIATION) + def test_basic(self): X = WINFUNCTYPE(c_int, c_int, c_int) diff --git a/Lib/test/test_ctypes/test_loading.py b/Lib/test/test_ctypes/test_loading.py index 22db97b818c17e..59d7f51935f3cd 100644 --- a/Lib/test/test_ctypes/test_loading.py +++ b/Lib/test/test_ctypes/test_loading.py @@ -141,7 +141,7 @@ def test_load_hasattr(self): def test_load_dll_with_flags(self): _sqlite3 = import_helper.import_module("_sqlite3") src = _sqlite3.__file__ - if src.lower().endswith("_d.pyd"): + if os.path.basename(src).partition(".")[0].lower().endswith("_d"): ext = "_d.dll" else: ext = ".dll" diff --git a/Lib/test/test_ctypes/test_pointers.py b/Lib/test/test_ctypes/test_pointers.py index 8410174358c19d..8cf2114c282cab 100644 --- a/Lib/test/test_ctypes/test_pointers.py +++ b/Lib/test/test_ctypes/test_pointers.py @@ -10,6 +10,8 @@ c_byte, c_ubyte, c_short, c_ushort, c_int, c_uint, c_long, c_ulong, c_longlong, c_ulonglong, c_float, c_double) +from ._support import (_CData, PyCPointerType, Py_TPFLAGS_DISALLOW_INSTANTIATION, + Py_TPFLAGS_IMMUTABLETYPE) ctype_types = [c_byte, c_ubyte, c_short, c_ushort, c_int, c_uint, @@ -19,6 +21,18 @@ class PointersTestCase(unittest.TestCase): + def test_inheritance_hierarchy(self): + self.assertEqual(_Pointer.mro(), [_Pointer, _CData, object]) + + self.assertEqual(PyCPointerType.__name__, "PyCPointerType") + self.assertEqual(type(PyCPointerType), type) + + def test_type_flags(self): + for cls in _Pointer, PyCPointerType: + with self.subTest(cls=cls): + self.assertTrue(_Pointer.__flags__ & Py_TPFLAGS_IMMUTABLETYPE) + self.assertFalse(_Pointer.__flags__ & Py_TPFLAGS_DISALLOW_INSTANTIATION) + def test_pointer_crash(self): class A(POINTER(c_ulong)): diff --git a/Lib/test/test_ctypes/test_simplesubclasses.py b/Lib/test/test_ctypes/test_simplesubclasses.py index 6072b62de5d53a..c96798e67f23f7 100644 --- a/Lib/test/test_ctypes/test_simplesubclasses.py +++ b/Lib/test/test_ctypes/test_simplesubclasses.py @@ -1,5 +1,7 @@ import unittest -from ctypes import Structure, CFUNCTYPE, c_int +from ctypes import Structure, CFUNCTYPE, c_int, _SimpleCData +from ._support import (_CData, PyCSimpleType, Py_TPFLAGS_DISALLOW_INSTANTIATION, + Py_TPFLAGS_IMMUTABLETYPE) class MyInt(c_int): @@ -10,6 +12,19 @@ def __eq__(self, other): class Test(unittest.TestCase): + def test_inheritance_hierarchy(self): + self.assertEqual(_SimpleCData.mro(), [_SimpleCData, _CData, object]) + + self.assertEqual(PyCSimpleType.__name__, "PyCSimpleType") + self.assertEqual(type(PyCSimpleType), type) + + self.assertEqual(c_int.mro(), [c_int, _SimpleCData, _CData, object]) + + def test_type_flags(self): + for cls in _SimpleCData, PyCSimpleType: + with self.subTest(cls=cls): + self.assertTrue(_SimpleCData.__flags__ & Py_TPFLAGS_IMMUTABLETYPE) + self.assertFalse(_SimpleCData.__flags__ & Py_TPFLAGS_DISALLOW_INSTANTIATION) def test_compare(self): self.assertEqual(MyInt(3), MyInt(3)) diff --git a/Lib/test/test_ctypes/test_struct_fields.py b/Lib/test/test_ctypes/test_struct_fields.py index f60dfe5b42ef65..f474a02fa8db06 100644 --- a/Lib/test/test_ctypes/test_struct_fields.py +++ b/Lib/test/test_ctypes/test_struct_fields.py @@ -1,5 +1,7 @@ import unittest from ctypes import Structure, Union, sizeof, c_char, c_int +from ._support import (CField, Py_TPFLAGS_DISALLOW_INSTANTIATION, + Py_TPFLAGS_IMMUTABLETYPE) class StructFieldsTestCase(unittest.TestCase): @@ -12,7 +14,6 @@ class StructFieldsTestCase(unittest.TestCase): # 4. The type is subclassed # # When they are finalized, assigning _fields_ is no longer allowed. - def test_1_A(self): class X(Structure): pass @@ -56,11 +57,15 @@ class X(Structure): self.assertEqual(bytes(x), b'a\x00###') def test_6(self): - class X(Structure): - _fields_ = [("x", c_int)] - CField = type(X.x) self.assertRaises(TypeError, CField) + def test_cfield_type_flags(self): + self.assertTrue(CField.__flags__ & Py_TPFLAGS_DISALLOW_INSTANTIATION) + self.assertTrue(CField.__flags__ & Py_TPFLAGS_IMMUTABLETYPE) + + def test_cfield_inheritance_hierarchy(self): + self.assertEqual(CField.mro(), [CField, object]) + def test_gh99275(self): class BrokenStructure(Structure): def __init_subclass__(cls, **kwargs): diff --git a/Lib/test/test_ctypes/test_structures.py b/Lib/test/test_ctypes/test_structures.py index 3eafc77ca70aea..98bc4bdcac9306 100644 --- a/Lib/test/test_ctypes/test_structures.py +++ b/Lib/test/test_ctypes/test_structures.py @@ -12,6 +12,8 @@ from struct import calcsize from collections import namedtuple from test import support +from ._support import (_CData, PyCStructType, Py_TPFLAGS_DISALLOW_INSTANTIATION, + Py_TPFLAGS_IMMUTABLETYPE) class SubclassesTest(unittest.TestCase): @@ -70,6 +72,19 @@ class StructureTestCase(unittest.TestCase): "d": c_double, } + def test_inheritance_hierarchy(self): + self.assertEqual(Structure.mro(), [Structure, _CData, object]) + + self.assertEqual(PyCStructType.__name__, "PyCStructType") + self.assertEqual(type(PyCStructType), type) + + + def test_type_flags(self): + for cls in Structure, PyCStructType: + with self.subTest(cls=cls): + self.assertTrue(Structure.__flags__ & Py_TPFLAGS_IMMUTABLETYPE) + self.assertFalse(Structure.__flags__ & Py_TPFLAGS_DISALLOW_INSTANTIATION) + def test_simple_structs(self): for code, tp in self.formats.items(): class X(Structure): diff --git a/Lib/test/test_ctypes/test_unions.py b/Lib/test/test_ctypes/test_unions.py new file mode 100644 index 00000000000000..cf5344bdf19165 --- /dev/null +++ b/Lib/test/test_ctypes/test_unions.py @@ -0,0 +1,18 @@ +import unittest +from ctypes import Union +from ._support import (_CData, UnionType, Py_TPFLAGS_DISALLOW_INSTANTIATION, + Py_TPFLAGS_IMMUTABLETYPE) + + +class ArrayTestCase(unittest.TestCase): + def test_inheritance_hierarchy(self): + self.assertEqual(Union.mro(), [Union, _CData, object]) + + self.assertEqual(UnionType.__name__, "UnionType") + self.assertEqual(type(UnionType), type) + + def test_type_flags(self): + for cls in Union, UnionType: + with self.subTest(cls=Union): + self.assertTrue(Union.__flags__ & Py_TPFLAGS_IMMUTABLETYPE) + self.assertFalse(Union.__flags__ & Py_TPFLAGS_DISALLOW_INSTANTIATION) diff --git a/Lib/test/test_ctypes/test_win32.py b/Lib/test/test_ctypes/test_win32.py index 01e624f76f0685..4aaecd8d38f98f 100644 --- a/Lib/test/test_ctypes/test_win32.py +++ b/Lib/test/test_ctypes/test_win32.py @@ -9,6 +9,7 @@ _pointer_type_cache, c_void_p, c_char, c_int, c_long) from test import support +from ._support import Py_TPFLAGS_DISALLOW_INSTANTIATION, Py_TPFLAGS_IMMUTABLETYPE @unittest.skipUnless(sys.platform == "win32", 'Windows-specific test') @@ -73,6 +74,11 @@ def test_COMError(self): self.assertEqual(ex.text, "text") self.assertEqual(ex.details, ("details",)) + self.assertEqual(COMError.mro(), + [COMError, Exception, BaseException, object]) + self.assertFalse(COMError.__flags__ & Py_TPFLAGS_DISALLOW_INSTANTIATION) + self.assertTrue(COMError.__flags__ & Py_TPFLAGS_IMMUTABLETYPE) + @unittest.skipUnless(sys.platform == "win32", 'Windows-specific test') class TestWinError(unittest.TestCase): diff --git a/Lib/test/test_dis.py b/Lib/test/test_dis.py index e4c7e9ba4649f7..3ae81b2f5d62b0 100644 --- a/Lib/test/test_dis.py +++ b/Lib/test/test_dis.py @@ -577,14 +577,10 @@ async def _asyncwith(c): RETURN_CONST 0 (None) %4d L12: CLEANUP_THROW - - -- L13: JUMP_BACKWARD 26 (to L5) - -%4d L14: CLEANUP_THROW - - -- L15: JUMP_BACKWARD 11 (to L11) - -%4d L16: PUSH_EXC_INFO + L13: JUMP_BACKWARD_NO_INTERRUPT 25 (to L5) + L14: CLEANUP_THROW + L15: JUMP_BACKWARD_NO_INTERRUPT 9 (to L11) + L16: PUSH_EXC_INFO WITH_EXCEPT_START GET_AWAITABLE 2 LOAD_CONST 0 (None) @@ -630,8 +626,6 @@ async def _asyncwith(c): _asyncwith.__code__.co_firstlineno + 1, _asyncwith.__code__.co_firstlineno + 3, _asyncwith.__code__.co_firstlineno + 1, - _asyncwith.__code__.co_firstlineno + 1, - _asyncwith.__code__.co_firstlineno + 1, _asyncwith.__code__.co_firstlineno + 3, ) @@ -1742,34 +1736,34 @@ def _prepare_test_cases(): Instruction(opname='POP_EXCEPT', opcode=31, arg=None, argval=None, argrepr='', offset=328, start_offset=328, starts_line=False, line_number=25, label=None, positions=None), Instruction(opname='POP_TOP', opcode=32, arg=None, argval=None, argrepr='', offset=330, start_offset=330, starts_line=False, line_number=25, label=None, positions=None), Instruction(opname='POP_TOP', opcode=32, arg=None, argval=None, argrepr='', offset=332, start_offset=332, starts_line=False, line_number=25, label=None, positions=None), - Instruction(opname='JUMP_BACKWARD', opcode=77, arg=27, argval=284, argrepr='to L11', offset=334, start_offset=334, starts_line=False, line_number=25, label=None, positions=None), - Instruction(opname='COPY', opcode=61, arg=3, argval=3, argrepr='', offset=338, start_offset=338, starts_line=True, line_number=None, label=None, positions=None), - Instruction(opname='POP_EXCEPT', opcode=31, arg=None, argval=None, argrepr='', offset=340, start_offset=340, starts_line=False, line_number=None, label=None, positions=None), - Instruction(opname='RERAISE', opcode=102, arg=1, argval=1, argrepr='', offset=342, start_offset=342, starts_line=False, line_number=None, label=None, positions=None), - Instruction(opname='PUSH_EXC_INFO', opcode=33, arg=None, argval=None, argrepr='', offset=344, start_offset=344, starts_line=False, line_number=None, label=None, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=91, arg=4, argval='ZeroDivisionError', argrepr='ZeroDivisionError', offset=346, start_offset=346, starts_line=True, line_number=22, label=None, positions=None), - Instruction(opname='CHECK_EXC_MATCH', opcode=7, arg=None, argval=None, argrepr='', offset=356, start_offset=356, starts_line=False, line_number=22, label=None, positions=None), - Instruction(opname='POP_JUMP_IF_FALSE', opcode=97, arg=15, argval=392, argrepr='to L13', offset=358, start_offset=358, starts_line=False, line_number=22, label=None, positions=None), - Instruction(opname='POP_TOP', opcode=32, arg=None, argval=None, argrepr='', offset=362, start_offset=362, starts_line=False, line_number=22, label=None, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=91, arg=3, argval='print', argrepr='print + NULL', offset=364, start_offset=364, starts_line=True, line_number=23, label=None, positions=None), - Instruction(opname='LOAD_CONST', opcode=83, arg=9, argval='Here we go, here we go, here we go...', argrepr="'Here we go, here we go, here we go...'", offset=374, start_offset=374, starts_line=False, line_number=23, label=None, positions=None), - Instruction(opname='CALL', opcode=53, arg=1, argval=1, argrepr='', offset=376, start_offset=376, starts_line=False, line_number=23, label=None, positions=None), - Instruction(opname='POP_TOP', opcode=32, arg=None, argval=None, argrepr='', offset=384, start_offset=384, starts_line=False, line_number=23, label=None, positions=None), - Instruction(opname='POP_EXCEPT', opcode=31, arg=None, argval=None, argrepr='', offset=386, start_offset=386, starts_line=False, line_number=23, label=None, positions=None), - Instruction(opname='JUMP_BACKWARD', opcode=77, arg=54, argval=284, argrepr='to L11', offset=388, start_offset=388, starts_line=False, line_number=23, label=None, positions=None), - Instruction(opname='RERAISE', opcode=102, arg=0, argval=0, argrepr='', offset=392, start_offset=392, starts_line=True, line_number=22, label=13, positions=None), - Instruction(opname='COPY', opcode=61, arg=3, argval=3, argrepr='', offset=394, start_offset=394, starts_line=True, line_number=None, label=None, positions=None), - Instruction(opname='POP_EXCEPT', opcode=31, arg=None, argval=None, argrepr='', offset=396, start_offset=396, starts_line=False, line_number=None, label=None, positions=None), - Instruction(opname='RERAISE', opcode=102, arg=1, argval=1, argrepr='', offset=398, start_offset=398, starts_line=False, line_number=None, label=None, positions=None), - Instruction(opname='PUSH_EXC_INFO', opcode=33, arg=None, argval=None, argrepr='', offset=400, start_offset=400, starts_line=False, line_number=None, label=None, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=91, arg=3, argval='print', argrepr='print + NULL', offset=402, start_offset=402, starts_line=True, line_number=28, label=None, positions=None), - Instruction(opname='LOAD_CONST', opcode=83, arg=10, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=412, start_offset=412, starts_line=False, line_number=28, label=None, positions=None), - Instruction(opname='CALL', opcode=53, arg=1, argval=1, argrepr='', offset=414, start_offset=414, starts_line=False, line_number=28, label=None, positions=None), - Instruction(opname='POP_TOP', opcode=32, arg=None, argval=None, argrepr='', offset=422, start_offset=422, starts_line=False, line_number=28, label=None, positions=None), - Instruction(opname='RERAISE', opcode=102, arg=0, argval=0, argrepr='', offset=424, start_offset=424, starts_line=False, line_number=28, label=None, positions=None), - Instruction(opname='COPY', opcode=61, arg=3, argval=3, argrepr='', offset=426, start_offset=426, starts_line=True, line_number=None, label=None, positions=None), - Instruction(opname='POP_EXCEPT', opcode=31, arg=None, argval=None, argrepr='', offset=428, start_offset=428, starts_line=False, line_number=None, label=None, positions=None), - Instruction(opname='RERAISE', opcode=102, arg=1, argval=1, argrepr='', offset=430, start_offset=430, starts_line=False, line_number=None, label=None, positions=None), + Instruction(opname='JUMP_BACKWARD_NO_INTERRUPT', opcode=78, arg=26, argval=284, argrepr='to L11', offset=334, start_offset=334, starts_line=False, line_number=25, label=None, positions=None), + Instruction(opname='COPY', opcode=61, arg=3, argval=3, argrepr='', offset=336, start_offset=336, starts_line=True, line_number=None, label=None, positions=None), + Instruction(opname='POP_EXCEPT', opcode=31, arg=None, argval=None, argrepr='', offset=338, start_offset=338, starts_line=False, line_number=None, label=None, positions=None), + Instruction(opname='RERAISE', opcode=102, arg=1, argval=1, argrepr='', offset=340, start_offset=340, starts_line=False, line_number=None, label=None, positions=None), + Instruction(opname='PUSH_EXC_INFO', opcode=33, arg=None, argval=None, argrepr='', offset=342, start_offset=342, starts_line=False, line_number=None, label=None, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=91, arg=4, argval='ZeroDivisionError', argrepr='ZeroDivisionError', offset=344, start_offset=344, starts_line=True, line_number=22, label=None, positions=None), + Instruction(opname='CHECK_EXC_MATCH', opcode=7, arg=None, argval=None, argrepr='', offset=354, start_offset=354, starts_line=False, line_number=22, label=None, positions=None), + Instruction(opname='POP_JUMP_IF_FALSE', opcode=97, arg=14, argval=388, argrepr='to L13', offset=356, start_offset=356, starts_line=False, line_number=22, label=None, positions=None), + Instruction(opname='POP_TOP', opcode=32, arg=None, argval=None, argrepr='', offset=360, start_offset=360, starts_line=False, line_number=22, label=None, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=91, arg=3, argval='print', argrepr='print + NULL', offset=362, start_offset=362, starts_line=True, line_number=23, label=None, positions=None), + Instruction(opname='LOAD_CONST', opcode=83, arg=9, argval='Here we go, here we go, here we go...', argrepr="'Here we go, here we go, here we go...'", offset=372, start_offset=372, starts_line=False, line_number=23, label=None, positions=None), + Instruction(opname='CALL', opcode=53, arg=1, argval=1, argrepr='', offset=374, start_offset=374, starts_line=False, line_number=23, label=None, positions=None), + Instruction(opname='POP_TOP', opcode=32, arg=None, argval=None, argrepr='', offset=382, start_offset=382, starts_line=False, line_number=23, label=None, positions=None), + Instruction(opname='POP_EXCEPT', opcode=31, arg=None, argval=None, argrepr='', offset=384, start_offset=384, starts_line=False, line_number=23, label=None, positions=None), + Instruction(opname='JUMP_BACKWARD_NO_INTERRUPT', opcode=78, arg=52, argval=284, argrepr='to L11', offset=386, start_offset=386, starts_line=False, line_number=23, label=None, positions=None), + Instruction(opname='RERAISE', opcode=102, arg=0, argval=0, argrepr='', offset=388, start_offset=388, starts_line=True, line_number=22, label=13, positions=None), + Instruction(opname='COPY', opcode=61, arg=3, argval=3, argrepr='', offset=390, start_offset=390, starts_line=True, line_number=None, label=None, positions=None), + Instruction(opname='POP_EXCEPT', opcode=31, arg=None, argval=None, argrepr='', offset=392, start_offset=392, starts_line=False, line_number=None, label=None, positions=None), + Instruction(opname='RERAISE', opcode=102, arg=1, argval=1, argrepr='', offset=394, start_offset=394, starts_line=False, line_number=None, label=None, positions=None), + Instruction(opname='PUSH_EXC_INFO', opcode=33, arg=None, argval=None, argrepr='', offset=396, start_offset=396, starts_line=False, line_number=None, label=None, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=91, arg=3, argval='print', argrepr='print + NULL', offset=398, start_offset=398, starts_line=True, line_number=28, label=None, positions=None), + Instruction(opname='LOAD_CONST', opcode=83, arg=10, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=408, start_offset=408, starts_line=False, line_number=28, label=None, positions=None), + Instruction(opname='CALL', opcode=53, arg=1, argval=1, argrepr='', offset=410, start_offset=410, starts_line=False, line_number=28, label=None, positions=None), + Instruction(opname='POP_TOP', opcode=32, arg=None, argval=None, argrepr='', offset=418, start_offset=418, starts_line=False, line_number=28, label=None, positions=None), + Instruction(opname='RERAISE', opcode=102, arg=0, argval=0, argrepr='', offset=420, start_offset=420, starts_line=False, line_number=28, label=None, positions=None), + Instruction(opname='COPY', opcode=61, arg=3, argval=3, argrepr='', offset=422, start_offset=422, starts_line=True, line_number=None, label=None, positions=None), + Instruction(opname='POP_EXCEPT', opcode=31, arg=None, argval=None, argrepr='', offset=424, start_offset=424, starts_line=False, line_number=None, label=None, positions=None), + Instruction(opname='RERAISE', opcode=102, arg=1, argval=1, argrepr='', offset=426, start_offset=426, starts_line=False, line_number=None, label=None, positions=None), ] # One last piece of inspect fodder to check the default line number handling diff --git a/Lib/test/test_doctest/__init__.py b/Lib/test/test_doctest/__init__.py new file mode 100644 index 00000000000000..4b16ecc31156a5 --- /dev/null +++ b/Lib/test/test_doctest/__init__.py @@ -0,0 +1,5 @@ +import os +from test.support import load_package_tests + +def load_tests(*args): + return load_package_tests(os.path.dirname(__file__), *args) diff --git a/Lib/test/doctest_aliases.py b/Lib/test/test_doctest/doctest_aliases.py similarity index 100% rename from Lib/test/doctest_aliases.py rename to Lib/test/test_doctest/doctest_aliases.py diff --git a/Lib/test/doctest_lineno.py b/Lib/test/test_doctest/doctest_lineno.py similarity index 100% rename from Lib/test/doctest_lineno.py rename to Lib/test/test_doctest/doctest_lineno.py diff --git a/Lib/test/sample_doctest.py b/Lib/test/test_doctest/sample_doctest.py similarity index 91% rename from Lib/test/sample_doctest.py rename to Lib/test/test_doctest/sample_doctest.py index 89eb5cb7cf1d97..049f737a0a44ac 100644 --- a/Lib/test/sample_doctest.py +++ b/Lib/test/test_doctest/sample_doctest.py @@ -32,8 +32,8 @@ def bar(): def test_silly_setup(): """ - >>> import test.test_doctest - >>> test.test_doctest.sillySetup + >>> import test.test_doctest.test_doctest + >>> test.test_doctest.test_doctest.sillySetup True """ diff --git a/Lib/test/sample_doctest_no_docstrings.py b/Lib/test/test_doctest/sample_doctest_no_docstrings.py similarity index 100% rename from Lib/test/sample_doctest_no_docstrings.py rename to Lib/test/test_doctest/sample_doctest_no_docstrings.py diff --git a/Lib/test/sample_doctest_no_doctests.py b/Lib/test/test_doctest/sample_doctest_no_doctests.py similarity index 100% rename from Lib/test/sample_doctest_no_doctests.py rename to Lib/test/test_doctest/sample_doctest_no_doctests.py diff --git a/Lib/test/test_doctest.py b/Lib/test/test_doctest/test_doctest.py similarity index 94% rename from Lib/test/test_doctest.py rename to Lib/test/test_doctest/test_doctest.py index 46a51007f9644d..7015255db1f7f0 100644 --- a/Lib/test/test_doctest.py +++ b/Lib/test/test_doctest/test_doctest.py @@ -4,6 +4,7 @@ from test import support from test.support import import_helper +from test.support.pty_helper import FakeInput # used in doctests import doctest import functools import os @@ -77,6 +78,15 @@ def get(self): """ return self.val + def setter(self, val): + """ + >>> s = SampleClass(-5) + >>> s.setter(1) + >>> print(s.val) + 1 + """ + self.val = val + def a_staticmethod(v): """ >>> print(SampleClass.a_staticmethod(10)) @@ -95,7 +105,7 @@ def a_classmethod(cls, v): return v+2 a_classmethod = classmethod(a_classmethod) - a_property = property(get, doc=""" + a_property = property(get, setter, doc=""" >>> print(SampleClass(22).a_property) 22 """) @@ -156,25 +166,6 @@ def get(self): """ return self.val -###################################################################### -## Fake stdin (for testing interactive debugging) -###################################################################### - -class _FakeInput: - """ - A fake input stream for pdb's interactive debugger. Whenever a - line is read, print it (to simulate the user typing it), and then - return it. The set of lines to return is specified in the - constructor; they should not have trailing newlines. - """ - def __init__(self, lines): - self.lines = lines - - def readline(self): - line = self.lines.pop(0) - print(line) - return line+'\n' - ###################################################################### ## Test Cases ###################################################################### @@ -468,14 +459,14 @@ def basics(): r""" We'll simulate a __file__ attr that ends in pyc: - >>> import test.test_doctest - >>> old = test.test_doctest.__file__ - >>> test.test_doctest.__file__ = 'test_doctest.pyc' + >>> from test.test_doctest import test_doctest + >>> old = test_doctest.__file__ + >>> test_doctest.__file__ = 'test_doctest.pyc' >>> tests = finder.find(sample_func) >>> print(tests) # doctest: +ELLIPSIS - [] + [] The exact name depends on how test_doctest was invoked, so allow for leading path components. @@ -483,7 +474,7 @@ def basics(): r""" >>> tests[0].filename # doctest: +ELLIPSIS '...test_doctest.py' - >>> test.test_doctest.__file__ = old + >>> test_doctest.__file__ = old >>> e = tests[0].examples[0] @@ -537,6 +528,7 @@ def basics(): r""" 1 SampleClass.a_staticmethod 1 SampleClass.double 1 SampleClass.get + 3 SampleClass.setter New-style classes are also supported: @@ -576,10 +568,10 @@ def basics(): r""" ... 'c': triple}}) >>> finder = doctest.DocTestFinder() - >>> # Use module=test.test_doctest, to prevent doctest from + >>> # Use module=test_doctest, to prevent doctest from >>> # ignoring the objects since they weren't defined in m. - >>> import test.test_doctest - >>> tests = finder.find(m, module=test.test_doctest) + >>> from test.test_doctest import test_doctest + >>> tests = finder.find(m, module=test_doctest) >>> for t in tests: ... print('%2s %s' % (len(t.examples), t.name)) 1 some_module @@ -593,23 +585,38 @@ def basics(): r""" 1 some_module.SampleClass.a_staticmethod 1 some_module.SampleClass.double 1 some_module.SampleClass.get + 3 some_module.SampleClass.setter 1 some_module.__test__.c 2 some_module.__test__.d 1 some_module.sample_func +However, doctest will ignore imported objects from other modules +(without proper `module=`): + + >>> import types + >>> m = types.ModuleType('poluted_namespace') + >>> m.__dict__.update({ + ... 'sample_func': sample_func, + ... 'SampleClass': SampleClass, + ... }) + + >>> finder = doctest.DocTestFinder() + >>> finder.find(m) + [] + Duplicate Removal ~~~~~~~~~~~~~~~~~ If a single object is listed twice (under different names), then tests will only be generated for it once: - >>> from test import doctest_aliases + >>> from test.test_doctest import doctest_aliases >>> assert doctest_aliases.TwoNames.f >>> assert doctest_aliases.TwoNames.g >>> tests = excl_empty_finder.find(doctest_aliases) >>> print(len(tests)) 2 >>> print(tests[0].name) - test.doctest_aliases.TwoNames + test.test_doctest.doctest_aliases.TwoNames TwoNames.f and TwoNames.g are bound to the same object. We can't guess which will be found in doctest's traversal of @@ -635,6 +642,7 @@ def basics(): r""" 1 SampleClass.a_staticmethod 1 SampleClass.double 1 SampleClass.get + 3 SampleClass.setter By default, that excluded objects with no doctests. exclude_empty=False tells it to include (empty) tests for objects with no doctests. This feature @@ -656,28 +664,29 @@ def basics(): r""" 1 SampleClass.a_staticmethod 1 SampleClass.double 1 SampleClass.get + 3 SampleClass.setter When used with `exclude_empty=False` we are also interested in line numbers of doctests that are empty. It used to be broken for quite some time until `bpo-28249`. - >>> from test import doctest_lineno + >>> from test.test_doctest import doctest_lineno >>> tests = doctest.DocTestFinder(exclude_empty=False).find(doctest_lineno) >>> for t in tests: ... print('%5s %s' % (t.lineno, t.name)) - None test.doctest_lineno - 22 test.doctest_lineno.ClassWithDocstring - 30 test.doctest_lineno.ClassWithDoctest - None test.doctest_lineno.ClassWithoutDocstring - None test.doctest_lineno.MethodWrapper - 53 test.doctest_lineno.MethodWrapper.classmethod_with_doctest - 39 test.doctest_lineno.MethodWrapper.method_with_docstring - 45 test.doctest_lineno.MethodWrapper.method_with_doctest - None test.doctest_lineno.MethodWrapper.method_without_docstring - 61 test.doctest_lineno.MethodWrapper.property_with_doctest - 4 test.doctest_lineno.func_with_docstring - 12 test.doctest_lineno.func_with_doctest - None test.doctest_lineno.func_without_docstring + None test.test_doctest.doctest_lineno + 22 test.test_doctest.doctest_lineno.ClassWithDocstring + 30 test.test_doctest.doctest_lineno.ClassWithDoctest + None test.test_doctest.doctest_lineno.ClassWithoutDocstring + None test.test_doctest.doctest_lineno.MethodWrapper + 53 test.test_doctest.doctest_lineno.MethodWrapper.classmethod_with_doctest + 39 test.test_doctest.doctest_lineno.MethodWrapper.method_with_docstring + 45 test.test_doctest.doctest_lineno.MethodWrapper.method_with_doctest + None test.test_doctest.doctest_lineno.MethodWrapper.method_without_docstring + 61 test.test_doctest.doctest_lineno.MethodWrapper.property_with_doctest + 4 test.test_doctest.doctest_lineno.func_with_docstring + 12 test.test_doctest.doctest_lineno.func_with_doctest + None test.test_doctest.doctest_lineno.func_without_docstring Turning off Recursion ~~~~~~~~~~~~~~~~~~~~~ @@ -1924,9 +1933,9 @@ def test_testsource(): r""" example code is converted to regular Python code. The surrounding words and expected output are converted to comments: - >>> import test.test_doctest - >>> name = 'test.test_doctest.sample_func' - >>> print(doctest.testsource(test.test_doctest, name)) + >>> from test.test_doctest import test_doctest + >>> name = 'test.test_doctest.test_doctest.sample_func' + >>> print(doctest.testsource(test_doctest, name)) # Blah blah # print(sample_func(22)) @@ -1936,8 +1945,8 @@ def test_testsource(): r""" # Yee ha! - >>> name = 'test.test_doctest.SampleNewStyleClass' - >>> print(doctest.testsource(test.test_doctest, name)) + >>> name = 'test.test_doctest.test_doctest.SampleNewStyleClass' + >>> print(doctest.testsource(test_doctest, name)) print('1\n2\n3') # Expected: ## 1 @@ -1945,8 +1954,8 @@ def test_testsource(): r""" ## 3 - >>> name = 'test.test_doctest.SampleClass.a_classmethod' - >>> print(doctest.testsource(test.test_doctest, name)) + >>> name = 'test.test_doctest.test_doctest.SampleClass.a_classmethod' + >>> print(doctest.testsource(test_doctest, name)) print(SampleClass.a_classmethod(10)) # Expected: ## 12 @@ -1969,7 +1978,7 @@ def test_debug(): r""" Create some fake stdin input, to feed to the debugger: >>> real_stdin = sys.stdin - >>> sys.stdin = _FakeInput(['next', 'print(x)', 'continue']) + >>> sys.stdin = FakeInput(['next', 'print(x)', 'continue']) Run the debugger on the docstring, and then restore sys.stdin. @@ -2012,7 +2021,7 @@ def test_pdb_set_trace(): captures our debugger input: >>> real_stdin = sys.stdin - >>> sys.stdin = _FakeInput([ + >>> sys.stdin = FakeInput([ ... 'print(x)', # print data defined by the example ... 'continue', # stop debugging ... '']) @@ -2039,7 +2048,7 @@ def test_pdb_set_trace(): ... ''' >>> test = parser.get_doctest(doc, globals(), "foo-bar@baz", "foo-bar@baz.py", 0) >>> real_stdin = sys.stdin - >>> sys.stdin = _FakeInput([ + >>> sys.stdin = FakeInput([ ... 'print(y)', # print data defined in the function ... 'up', # out of function ... 'print(x)', # print data defined by the example @@ -2051,7 +2060,7 @@ def test_pdb_set_trace(): ... finally: ... sys.stdin = real_stdin --Return-- - > (3)calls_set_trace()->None + > (3)calls_set_trace()->None -> import pdb; pdb.set_trace() (Pdb) print(y) 2 @@ -2076,7 +2085,7 @@ def test_pdb_set_trace(): ... ''' >>> test = parser.get_doctest(doc, globals(), "foo-bar@baz", "foo-bar@baz.py", 0) >>> real_stdin = sys.stdin - >>> sys.stdin = _FakeInput([ + >>> sys.stdin = FakeInput([ ... 'list', # list source from example 2 ... 'next', # return from g() ... 'list', # list source from example 1 @@ -2148,7 +2157,7 @@ def test_pdb_set_trace_nested(): >>> runner = doctest.DocTestRunner(verbose=False) >>> test = parser.get_doctest(doc, globals(), "foo-bar@baz", "foo-bar@baz.py", 0) >>> real_stdin = sys.stdin - >>> sys.stdin = _FakeInput([ + >>> sys.stdin = FakeInput([ ... 'print(y)', # print data defined in the function ... 'step', 'step', 'step', 'step', 'step', 'step', 'print(z)', ... 'up', 'print(x)', @@ -2162,39 +2171,39 @@ def test_pdb_set_trace_nested(): ... finally: ... sys.stdin = real_stdin ... # doctest: +REPORT_NDIFF - > (5)calls_set_trace() + > (5)calls_set_trace() -> self.f1() (Pdb) print(y) 1 (Pdb) step --Call-- - > (7)f1() + > (7)f1() -> def f1(self): (Pdb) step - > (8)f1() + > (8)f1() -> x = 1 (Pdb) step - > (9)f1() + > (9)f1() -> self.f2() (Pdb) step --Call-- - > (11)f2() + > (11)f2() -> def f2(self): (Pdb) step - > (12)f2() + > (12)f2() -> z = 1 (Pdb) step - > (13)f2() + > (13)f2() -> z = 2 (Pdb) print(z) 1 (Pdb) up - > (9)f1() + > (9)f1() -> self.f2() (Pdb) print(x) 1 (Pdb) up - > (5)calls_set_trace() + > (5)calls_set_trace() -> self.f1() (Pdb) print(y) 1 @@ -2214,39 +2223,39 @@ def test_DocTestSuite(): by passing a module object: >>> import unittest - >>> import test.sample_doctest - >>> suite = doctest.DocTestSuite(test.sample_doctest) + >>> import test.test_doctest.sample_doctest + >>> suite = doctest.DocTestSuite(test.test_doctest.sample_doctest) >>> suite.run(unittest.TestResult()) We can also supply the module by name: - >>> suite = doctest.DocTestSuite('test.sample_doctest') + >>> suite = doctest.DocTestSuite('test.test_doctest.sample_doctest') >>> suite.run(unittest.TestResult()) The module need not contain any doctest examples: - >>> suite = doctest.DocTestSuite('test.sample_doctest_no_doctests') + >>> suite = doctest.DocTestSuite('test.test_doctest.sample_doctest_no_doctests') >>> suite.run(unittest.TestResult()) The module need not contain any docstrings either: - >>> suite = doctest.DocTestSuite('test.sample_doctest_no_docstrings') + >>> suite = doctest.DocTestSuite('test.test_doctest.sample_doctest_no_docstrings') >>> suite.run(unittest.TestResult()) We can use the current module: - >>> suite = test.sample_doctest.test_suite() + >>> suite = test.test_doctest.sample_doctest.test_suite() >>> suite.run(unittest.TestResult()) We can also provide a DocTestFinder: >>> finder = doctest.DocTestFinder() - >>> suite = doctest.DocTestSuite('test.sample_doctest', + >>> suite = doctest.DocTestSuite('test.test_doctest.sample_doctest', ... test_finder=finder) >>> suite.run(unittest.TestResult()) @@ -2254,7 +2263,7 @@ def test_DocTestSuite(): The DocTestFinder need not return any tests: >>> finder = doctest.DocTestFinder() - >>> suite = doctest.DocTestSuite('test.sample_doctest_no_docstrings', + >>> suite = doctest.DocTestSuite('test.test_doctest.sample_doctest_no_docstrings', ... test_finder=finder) >>> suite.run(unittest.TestResult()) @@ -2263,14 +2272,14 @@ def test_DocTestSuite(): used instead of the module globals. Here we'll pass an empty globals, triggering an extra error: - >>> suite = doctest.DocTestSuite('test.sample_doctest', globs={}) + >>> suite = doctest.DocTestSuite('test.test_doctest.sample_doctest', globs={}) >>> suite.run(unittest.TestResult()) Alternatively, we can provide extra globals. Here we'll make an error go away by providing an extra global variable: - >>> suite = doctest.DocTestSuite('test.sample_doctest', + >>> suite = doctest.DocTestSuite('test.test_doctest.sample_doctest', ... extraglobs={'y': 1}) >>> suite.run(unittest.TestResult()) @@ -2278,7 +2287,7 @@ def test_DocTestSuite(): You can pass option flags. Here we'll cause an extra error by disabling the blank-line feature: - >>> suite = doctest.DocTestSuite('test.sample_doctest', + >>> suite = doctest.DocTestSuite('test.test_doctest.sample_doctest', ... optionflags=doctest.DONT_ACCEPT_BLANKLINE) >>> suite.run(unittest.TestResult()) @@ -2286,27 +2295,27 @@ def test_DocTestSuite(): You can supply setUp and tearDown functions: >>> def setUp(t): - ... import test.test_doctest - ... test.test_doctest.sillySetup = True + ... from test.test_doctest import test_doctest + ... test_doctest.sillySetup = True >>> def tearDown(t): - ... import test.test_doctest - ... del test.test_doctest.sillySetup + ... from test.test_doctest import test_doctest + ... del test_doctest.sillySetup Here, we installed a silly variable that the test expects: - >>> suite = doctest.DocTestSuite('test.sample_doctest', + >>> suite = doctest.DocTestSuite('test.test_doctest.sample_doctest', ... setUp=setUp, tearDown=tearDown) >>> suite.run(unittest.TestResult()) But the tearDown restores sanity: - >>> import test.test_doctest - >>> test.test_doctest.sillySetup + >>> from test.test_doctest import test_doctest + >>> test_doctest.sillySetup Traceback (most recent call last): ... - AttributeError: module 'test.test_doctest' has no attribute 'sillySetup' + AttributeError: module 'test.test_doctest.test_doctest' has no attribute 'sillySetup' The setUp and tearDown functions are passed test objects. Here we'll use the setUp function to supply the missing variable y: @@ -2314,7 +2323,7 @@ def test_DocTestSuite(): >>> def setUp(test): ... test.globs['y'] = 1 - >>> suite = doctest.DocTestSuite('test.sample_doctest', setUp=setUp) + >>> suite = doctest.DocTestSuite('test.test_doctest.sample_doctest', setUp=setUp) >>> suite.run(unittest.TestResult()) @@ -2345,7 +2354,7 @@ def test_DocFileSuite(): >>> suite = doctest.DocFileSuite('test_doctest.txt', ... 'test_doctest2.txt', ... 'test_doctest4.txt', - ... package='test') + ... package='test.test_doctest') >>> suite.run(unittest.TestResult()) @@ -2361,7 +2370,7 @@ def test_DocFileSuite(): ... suite = doctest.DocFileSuite('test_doctest.txt', ... 'test_doctest2.txt', ... 'test_doctest4.txt', - ... package='test') + ... package='test.test_doctest') ... suite.run(unittest.TestResult()) ... finally: ... if added_loader: @@ -2371,16 +2380,17 @@ def test_DocFileSuite(): '/' should be used as a path separator. It will be converted to a native separator at run time: - >>> suite = doctest.DocFileSuite('../test/test_doctest.txt') + >>> suite = doctest.DocFileSuite('../test_doctest/test_doctest.txt') >>> suite.run(unittest.TestResult()) If DocFileSuite is used from an interactive session, then files are resolved relative to the directory of sys.argv[0]: - >>> import types, os.path, test.test_doctest + >>> import types, os.path + >>> from test.test_doctest import test_doctest >>> save_argv = sys.argv - >>> sys.argv = [test.test_doctest.__file__] + >>> sys.argv = [test_doctest.__file__] >>> suite = doctest.DocFileSuite('test_doctest.txt', ... package=types.ModuleType('__main__')) >>> sys.argv = save_argv @@ -2390,7 +2400,7 @@ def test_DocFileSuite(): working directory): >>> # Get the absolute path of the test package. - >>> test_doctest_path = os.path.abspath(test.test_doctest.__file__) + >>> test_doctest_path = os.path.abspath(test_doctest.__file__) >>> test_pkg_path = os.path.split(test_doctest_path)[0] >>> # Use it to find the absolute path of test_doctest.txt. @@ -2430,12 +2440,12 @@ def test_DocFileSuite(): And, you can provide setUp and tearDown functions: >>> def setUp(t): - ... import test.test_doctest - ... test.test_doctest.sillySetup = True + ... from test.test_doctest import test_doctest + ... test_doctest.sillySetup = True >>> def tearDown(t): - ... import test.test_doctest - ... del test.test_doctest.sillySetup + ... from test.test_doctest import test_doctest + ... del test_doctest.sillySetup Here, we installed a silly variable that the test expects: @@ -2448,11 +2458,11 @@ def test_DocFileSuite(): But the tearDown restores sanity: - >>> import test.test_doctest - >>> test.test_doctest.sillySetup + >>> from test.test_doctest import test_doctest + >>> test_doctest.sillySetup Traceback (most recent call last): ... - AttributeError: module 'test.test_doctest' has no attribute 'sillySetup' + AttributeError: module 'test.test_doctest.test_doctest' has no attribute 'sillySetup' The setUp and tearDown functions are passed test objects. Here, we'll use a setUp function to set the favorite color in @@ -3212,8 +3222,8 @@ def test_run_doctestsuite_multiple_times(): http://bugs.python.org/issue9736 >>> import unittest - >>> import test.sample_doctest - >>> suite = doctest.DocTestSuite(test.sample_doctest) + >>> import test.test_doctest.sample_doctest + >>> suite = doctest.DocTestSuite(test.test_doctest.sample_doctest) >>> suite.run(unittest.TestResult()) >>> suite.run(unittest.TestResult()) @@ -3390,4 +3400,4 @@ def load_tests(loader, tests, pattern): if __name__ == '__main__': - unittest.main(module='test.test_doctest') + unittest.main(module='test.test_doctest.test_doctest') diff --git a/Lib/test/test_doctest.txt b/Lib/test/test_doctest/test_doctest.txt similarity index 100% rename from Lib/test/test_doctest.txt rename to Lib/test/test_doctest/test_doctest.txt diff --git a/Lib/test/test_doctest2.py b/Lib/test/test_doctest/test_doctest2.py similarity index 100% rename from Lib/test/test_doctest2.py rename to Lib/test/test_doctest/test_doctest2.py diff --git a/Lib/test/test_doctest2.txt b/Lib/test/test_doctest/test_doctest2.txt similarity index 77% rename from Lib/test/test_doctest2.txt rename to Lib/test/test_doctest/test_doctest2.txt index 2e14856c27d8b3..76dab94a9c0470 100644 --- a/Lib/test/test_doctest2.txt +++ b/Lib/test/test_doctest/test_doctest2.txt @@ -2,8 +2,8 @@ This is a sample doctest in a text file. In this example, we'll rely on some silly setup: - >>> import test.test_doctest - >>> test.test_doctest.sillySetup + >>> import test.test_doctest.test_doctest + >>> test.test_doctest.test_doctest.sillySetup True This test also has some (random) encoded (utf-8) unicode text: diff --git a/Lib/test/test_doctest3.txt b/Lib/test/test_doctest/test_doctest3.txt similarity index 100% rename from Lib/test/test_doctest3.txt rename to Lib/test/test_doctest/test_doctest3.txt diff --git a/Lib/test/test_doctest4.txt b/Lib/test/test_doctest/test_doctest4.txt similarity index 100% rename from Lib/test/test_doctest4.txt rename to Lib/test/test_doctest/test_doctest4.txt diff --git a/Lib/test/test_email/test__header_value_parser.py b/Lib/test/test_email/test__header_value_parser.py index 854f2ff009c618..bdb0e55f21069f 100644 --- a/Lib/test/test_email/test__header_value_parser.py +++ b/Lib/test/test_email/test__header_value_parser.py @@ -2915,6 +2915,45 @@ def test_ews_combined_before_wrap(self): "mich. And that's\n" " all I'm sayin.\n") + def test_unicode_after_unknown_not_combined(self): + self._test(parser.get_unstructured("=?unknown-8bit?q?=A4?=\xa4"), + "=?unknown-8bit?q?=A4?==?utf-8?q?=C2=A4?=\n") + prefix = "0123456789 "*5 + self._test(parser.get_unstructured(prefix + "=?unknown-8bit?q?=A4?=\xa4"), + prefix + "=?unknown-8bit?q?=A4?=\n =?utf-8?q?=C2=A4?=\n") + + def test_ascii_after_unknown_not_combined(self): + self._test(parser.get_unstructured("=?unknown-8bit?q?=A4?=abc"), + "=?unknown-8bit?q?=A4?=abc\n") + prefix = "0123456789 "*5 + self._test(parser.get_unstructured(prefix + "=?unknown-8bit?q?=A4?=abc"), + prefix + "=?unknown-8bit?q?=A4?=\n =?utf-8?q?abc?=\n") + + def test_unknown_after_unicode_not_combined(self): + self._test(parser.get_unstructured("\xa4" + "=?unknown-8bit?q?=A4?="), + "=?utf-8?q?=C2=A4?==?unknown-8bit?q?=A4?=\n") + prefix = "0123456789 "*5 + self._test(parser.get_unstructured(prefix + "\xa4=?unknown-8bit?q?=A4?="), + prefix + "=?utf-8?q?=C2=A4?=\n =?unknown-8bit?q?=A4?=\n") + + def test_unknown_after_ascii_not_combined(self): + self._test(parser.get_unstructured("abc" + "=?unknown-8bit?q?=A4?="), + "abc=?unknown-8bit?q?=A4?=\n") + prefix = "0123456789 "*5 + self._test(parser.get_unstructured(prefix + "abcd=?unknown-8bit?q?=A4?="), + prefix + "abcd\n =?unknown-8bit?q?=A4?=\n") + + def test_unknown_after_unknown(self): + self._test(parser.get_unstructured("=?unknown-8bit?q?=C2?=" + "=?unknown-8bit?q?=A4?="), + "=?unknown-8bit?q?=C2=A4?=\n") + prefix = "0123456789 "*5 + self._test(parser.get_unstructured(prefix + "=?unknown-8bit?q?=C2?=" + "=?unknown-8bit?q?=A4?="), + prefix + "=?unknown-8bit?q?=C2?=\n =?unknown-8bit?q?=A4?=\n") + # XXX Need test of an encoded word so long that it needs to be wrapped def test_simple_address(self): diff --git a/Lib/test/test_email/test_utils.py b/Lib/test/test_email/test_utils.py index c9d973df0a2192..d04b3909efa643 100644 --- a/Lib/test/test_email/test_utils.py +++ b/Lib/test/test_email/test_utils.py @@ -143,12 +143,12 @@ def test_localtime_epoch_notz_daylight_false(self): t2 = utils.localtime(t0.replace(tzinfo=None)) self.assertEqual(t1, t2) - @unittest.skipUnless("Europe/Kyiv" in zoneinfo.available_timezones(), - "Can't find a Kyiv timezone database") @test.support.run_with_tz('Europe/Kyiv') def test_variable_tzname(self): t0 = datetime.datetime(1984, 1, 1, tzinfo=datetime.timezone.utc) t1 = utils.localtime(t0) + if t1.tzname() in ('Europe', 'UTC'): + self.skipTest("Can't find a Kyiv timezone database") self.assertEqual(t1.tzname(), 'MSK') t0 = datetime.datetime(1994, 1, 1, tzinfo=datetime.timezone.utc) t1 = utils.localtime(t0) diff --git a/Lib/test/test_enum.py b/Lib/test/test_enum.py index f99d4ca204b5a7..d045739efa46b8 100644 --- a/Lib/test/test_enum.py +++ b/Lib/test/test_enum.py @@ -3202,6 +3202,37 @@ class NTEnum(Enum): [TTuple(id=0, a=0, blist=[]), TTuple(id=1, a=2, blist=[4]), TTuple(id=2, a=4, blist=[0, 1, 2])], ) + self.assertRaises(AttributeError, getattr, NTEnum.NONE, 'id') + # + class NTCEnum(TTuple, Enum): + NONE = 0, 0, [] + A = 1, 2, [4] + B = 2, 4, [0, 1, 2] + self.assertEqual(repr(NTCEnum.NONE), "") + self.assertEqual(NTCEnum.NONE.value, TTuple(id=0, a=0, blist=[])) + self.assertEqual(NTCEnum.NONE.id, 0) + self.assertEqual(NTCEnum.A.a, 2) + self.assertEqual(NTCEnum.B.blist, [0, 1 ,2]) + self.assertEqual( + [x.value for x in NTCEnum], + [TTuple(id=0, a=0, blist=[]), TTuple(id=1, a=2, blist=[4]), TTuple(id=2, a=4, blist=[0, 1, 2])], + ) + # + class NTDEnum(Enum): + def __new__(cls, id, a, blist): + member = object.__new__(cls) + member.id = id + member.a = a + member.blist = blist + return member + NONE = TTuple(0, 0, []) + A = TTuple(1, 2, [4]) + B = TTuple(2, 4, [0, 1, 2]) + self.assertEqual(repr(NTDEnum.NONE), "") + self.assertEqual(NTDEnum.NONE.id, 0) + self.assertEqual(NTDEnum.A.a, 2) + self.assertEqual(NTDEnum.B.blist, [0, 1 ,2]) + def test_flag_with_custom_new(self): class FlagFromChar(IntFlag): def __new__(cls, c): diff --git a/Lib/test/test_fileio.py b/Lib/test/test_fileio.py index f490485cdaf3eb..06d9b454add34c 100644 --- a/Lib/test/test_fileio.py +++ b/Lib/test/test_fileio.py @@ -174,6 +174,16 @@ def testRepr(self): self.assertEqual(repr(self.f), "<%s.FileIO [closed]>" % (self.modulename,)) + def test_subclass_repr(self): + class TestSubclass(self.FileIO): + pass + + f = TestSubclass(TESTFN) + with f: + self.assertIn(TestSubclass.__name__, repr(f)) + + self.assertIn(TestSubclass.__name__, repr(f)) + def testReprNoCloseFD(self): fd = os.open(TESTFN, os.O_RDONLY) try: diff --git a/Lib/test/test_fractions.py b/Lib/test/test_fractions.py index 84779526ce0eb0..af3cb214ab0ac1 100644 --- a/Lib/test/test_fractions.py +++ b/Lib/test/test_fractions.py @@ -261,6 +261,30 @@ def testFromString(self): self.assertRaisesMessage( ValueError, "Invalid literal for Fraction: '1.1e+1__1'", F, "1.1e+1__1") + self.assertRaisesMessage( + ValueError, "Invalid literal for Fraction: '123.dd'", + F, "123.dd") + self.assertRaisesMessage( + ValueError, "Invalid literal for Fraction: '123.5_dd'", + F, "123.5_dd") + self.assertRaisesMessage( + ValueError, "Invalid literal for Fraction: 'dd.5'", + F, "dd.5") + self.assertRaisesMessage( + ValueError, "Invalid literal for Fraction: '7_dd'", + F, "7_dd") + self.assertRaisesMessage( + ValueError, "Invalid literal for Fraction: '1/dd'", + F, "1/dd") + self.assertRaisesMessage( + ValueError, "Invalid literal for Fraction: '1/123_dd'", + F, "1/123_dd") + self.assertRaisesMessage( + ValueError, "Invalid literal for Fraction: '789edd'", + F, "789edd") + self.assertRaisesMessage( + ValueError, "Invalid literal for Fraction: '789e2_dd'", + F, "789e2_dd") # Test catastrophic backtracking. val = "9"*50 + "_" self.assertRaisesMessage( diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py index 0ef45d3c670e85..7c66b906d308ba 100644 --- a/Lib/test/test_functools.py +++ b/Lib/test/test_functools.py @@ -1875,8 +1875,14 @@ def fib(n): return fib(n-1) + fib(n-2) if not support.Py_DEBUG: + depth = support.Py_C_RECURSION_LIMIT*2//7 with support.infinite_recursion(): - fib(2500) + fib(depth) + if self.module == c_functools: + fib.cache_clear() + with support.infinite_recursion(): + with self.assertRaises(RecursionError): + fib(10000) @py_functools.lru_cache() diff --git a/Lib/test/test_gc.py b/Lib/test/test_gc.py index db7cb9ace6e5f3..1d71dd9e262a6a 100644 --- a/Lib/test/test_gc.py +++ b/Lib/test/test_gc.py @@ -1225,7 +1225,7 @@ def test_refcount_errors(self): p.stderr.close() # Verify that stderr has a useful error message: self.assertRegex(stderr, - br'gcmodule\.c:[0-9]+: gc_decref: Assertion "gc_get_refs\(g\) > 0" failed.') + br'gc\.c:[0-9]+: gc_decref: Assertion "gc_get_refs\(g\) > 0" failed.') self.assertRegex(stderr, br'refcount is too small') # "address : 0x7fb5062efc18" diff --git a/Lib/test/test_generated_cases.py b/Lib/test/test_generated_cases.py index 3b2f579be684b7..ca1228ee7008a9 100644 --- a/Lib/test/test_generated_cases.py +++ b/Lib/test/test_generated_cases.py @@ -9,7 +9,7 @@ def skip_if_different_mount_drives(): - if sys.platform != 'win32': + if sys.platform != "win32": return ROOT = os.path.dirname(os.path.dirname(__file__)) root_drive = os.path.splitroot(ROOT)[0] @@ -22,11 +22,13 @@ def skip_if_different_mount_drives(): f"directory have different mount drives " f"({cwd_drive} and {root_drive})" ) + + skip_if_different_mount_drives() -test_tools.skip_if_missing('cases_generator') -with test_tools.imports_under_tool('cases_generator'): +test_tools.skip_if_missing("cases_generator") +with test_tools.imports_under_tool("cases_generator"): from analyzer import StackItem import parser from stack import Stack @@ -39,13 +41,14 @@ def handle_stderr(): else: return support.captured_stderr() + class TestEffects(unittest.TestCase): def test_effect_sizes(self): stack = Stack() inputs = [ - x:= StackItem("x", None, "", "1"), - y:= StackItem("y", None, "", "oparg"), - z:= StackItem("z", None, "", "oparg*2"), + x := StackItem("x", None, "", "1"), + y := StackItem("y", None, "", "oparg"), + z := StackItem("z", None, "", "oparg*2"), ] outputs = [ StackItem("x", None, "", "1"), @@ -96,9 +99,7 @@ def run_cases_test(self, input: str, expected: str): with handle_stderr(): tier1_generator.generate_tier1_from_files( - [self.temp_input_filename], - self.temp_output_filename, - False + [self.temp_input_filename], self.temp_output_filename, False ) with open(self.temp_output_filename) as temp_output: @@ -750,7 +751,7 @@ def test_override_op(self): def test_annotated_inst(self): input = """ - guard inst(OP, (--)) { + pure inst(OP, (--)) { ham(); } """ @@ -767,7 +768,7 @@ def test_annotated_inst(self): def test_annotated_op(self): input = """ - guard op(OP, (--)) { + pure op(OP, (--)) { spam(); } macro(M) = OP; @@ -784,7 +785,7 @@ def test_annotated_op(self): self.run_cases_test(input, output) input = """ - guard register specializing op(OP, (--)) { + pure register specializing op(OP, (--)) { spam(); } macro(M) = OP; diff --git a/Lib/test/test_importlib/test_main.py b/Lib/test/test_importlib/test_main.py index 1d3817151edf64..0a769b89841234 100644 --- a/Lib/test/test_importlib/test_main.py +++ b/Lib/test/test_importlib/test_main.py @@ -4,6 +4,7 @@ import warnings import importlib.metadata import contextlib +from test.support import os_helper try: import pyfakefs.fake_filesystem_unittest as ffs @@ -403,6 +404,7 @@ def test_packages_distributions_all_module_types(self): assert not any(name.endswith('.dist-info') for name in distributions) + @os_helper.skip_unless_symlink def test_packages_distributions_symlinked_top_level(self) -> None: """ Distribution is resolvable from a simple top-level symlink in RECORD. diff --git a/Lib/test/test_int.py b/Lib/test/test_int.py index 5545ee39d8e942..0bf55facad9fed 100644 --- a/Lib/test/test_int.py +++ b/Lib/test/test_int.py @@ -90,6 +90,7 @@ def test_basic(self): self.assertRaises(TypeError, int, 1, 12) + self.assertRaises(TypeError, int, "10", 2, 1) self.assertEqual(int('0o123', 0), 83) self.assertEqual(int('0x123', 16), 291) diff --git a/Lib/test/test_io.py b/Lib/test/test_io.py index 1d78876f2a1c84..936edea3cad70c 100644 --- a/Lib/test/test_io.py +++ b/Lib/test/test_io.py @@ -2806,6 +2806,13 @@ def test_recursive_repr(self): with self.assertRaises(RuntimeError): repr(t) # Should not crash + def test_subclass_repr(self): + class TestSubclass(self.TextIOWrapper): + pass + + f = TestSubclass(self.StringIO()) + self.assertIn(TestSubclass.__name__, repr(f)) + def test_line_buffering(self): r = self.BytesIO() b = self.BufferedWriter(r, 1000) @@ -3880,6 +3887,14 @@ def test_issue25862(self): t.write('x') t.tell() + def test_issue35928(self): + p = self.BufferedRWPair(self.BytesIO(b'foo\nbar\n'), self.BytesIO()) + f = self.TextIOWrapper(p) + res = f.readline() + self.assertEqual(res, 'foo\n') + f.write(res) + self.assertEqual(res + f.readline(), 'foo\nbar\n') + class MemviewBytesIO(io.BytesIO): '''A BytesIO object whose read method returns memoryviews diff --git a/Lib/test/test_isinstance.py b/Lib/test/test_isinstance.py index bf9332e40aeaf2..7f759fb3317146 100644 --- a/Lib/test/test_isinstance.py +++ b/Lib/test/test_isinstance.py @@ -310,7 +310,7 @@ class X: @property def __bases__(self): return self.__bases__ - with support.infinite_recursion(): + with support.infinite_recursion(25): self.assertRaises(RecursionError, issubclass, X(), int) self.assertRaises(RecursionError, issubclass, int, X()) self.assertRaises(RecursionError, isinstance, 1, X()) @@ -344,7 +344,7 @@ class B: pass A.__getattr__ = B.__getattr__ = X.__getattr__ return (A(), B()) - with support.infinite_recursion(): + with support.infinite_recursion(25): self.assertRaises(RecursionError, issubclass, X(), int) diff --git a/Lib/test/test_launcher.py b/Lib/test/test_launcher.py index bcd4ed63bf25a0..3da6173cfd3f13 100644 --- a/Lib/test/test_launcher.py +++ b/Lib/test/test_launcher.py @@ -19,8 +19,10 @@ PY_EXE = "py.exe" +DEBUG_BUILD = False if sys.executable.casefold().endswith("_d.exe".casefold()): PY_EXE = "py_d.exe" + DEBUG_BUILD = True # Registry data to create. On removal, everything beneath top-level names will # be deleted. @@ -232,7 +234,7 @@ def run_py(self, args, env=None, allow_fail=False, expect_returncode=0, argv=Non p.stdin.close() p.wait(10) out = p.stdout.read().decode("utf-8", "replace") - err = p.stderr.read().decode("ascii", "replace") + err = p.stderr.read().decode("ascii", "replace").replace("\uFFFD", "?") if p.returncode != expect_returncode and support.verbose and not allow_fail: print("++ COMMAND ++") print([self.py_exe, *args]) @@ -273,7 +275,7 @@ def script(self, content, encoding="utf-8"): def fake_venv(self): venv = Path.cwd() / "Scripts" venv.mkdir(exist_ok=True, parents=True) - venv_exe = (venv / Path(sys.executable).name) + venv_exe = (venv / ("python_d.exe" if DEBUG_BUILD else "python.exe")) venv_exe.touch() try: yield venv_exe, {"VIRTUAL_ENV": str(venv.parent)} @@ -521,6 +523,9 @@ def test_virtualenv_in_list(self): self.assertEqual(str(venv_exe), m.group(1)) break else: + if support.verbose: + print(data["stdout"]) + print(data["stderr"]) self.fail("did not find active venv path") data = self.run_py(["-0"], env=env) @@ -616,25 +621,29 @@ def test_py_handle_64_in_ini(self): self.assertEqual("True", data["SearchInfo.oldStyleTag"]) def test_search_path(self): - stem = Path(sys.executable).stem + exe = Path("arbitrary-exe-name.exe").absolute() + exe.touch() + self.addCleanup(exe.unlink) with self.py_ini(TEST_PY_DEFAULTS): - with self.script(f"#! /usr/bin/env {stem} -prearg") as script: + with self.script(f"#! /usr/bin/env {exe.stem} -prearg") as script: data = self.run_py( [script, "-postarg"], - env={"PATH": f"{Path(sys.executable).parent};{os.getenv('PATH')}"}, + env={"PATH": f"{exe.parent};{os.getenv('PATH')}"}, ) - self.assertEqual(f"{sys.executable} -prearg {script} -postarg", data["stdout"].strip()) + self.assertEqual(f"{exe} -prearg {script} -postarg", data["stdout"].strip()) def test_search_path_exe(self): # Leave the .exe on the name to ensure we don't add it a second time - name = Path(sys.executable).name + exe = Path("arbitrary-exe-name.exe").absolute() + exe.touch() + self.addCleanup(exe.unlink) with self.py_ini(TEST_PY_DEFAULTS): - with self.script(f"#! /usr/bin/env {name} -prearg") as script: + with self.script(f"#! /usr/bin/env {exe.name} -prearg") as script: data = self.run_py( [script, "-postarg"], - env={"PATH": f"{Path(sys.executable).parent};{os.getenv('PATH')}"}, + env={"PATH": f"{exe.parent};{os.getenv('PATH')}"}, ) - self.assertEqual(f"{sys.executable} -prearg {script} -postarg", data["stdout"].strip()) + self.assertEqual(f"{exe} -prearg {script} -postarg", data["stdout"].strip()) def test_recursive_search_path(self): stem = self.get_py_exe().stem @@ -727,15 +736,18 @@ def test_shebang_command_in_venv(self): data = self.run_py([script], expect_returncode=103) with self.fake_venv() as (venv_exe, env): - # Put a real Python (ourselves) on PATH as a distraction. + # Put a "normal" Python on PATH as a distraction. # The active VIRTUAL_ENV should be preferred when the name isn't an # exact match. - env["PATH"] = f"{Path(sys.executable).parent};{os.environ['PATH']}" + exe = Path(Path(venv_exe).name).absolute() + exe.touch() + self.addCleanup(exe.unlink) + env["PATH"] = f"{exe.parent};{os.environ['PATH']}" with self.script(f'#! /usr/bin/env {stem} arg1') as script: data = self.run_py([script], env=env) self.assertEqual(data["stdout"].strip(), f"{venv_exe} arg1 {script}") - with self.script(f'#! /usr/bin/env {Path(sys.executable).stem} arg1') as script: + with self.script(f'#! /usr/bin/env {exe.stem} arg1') as script: data = self.run_py([script], env=env) - self.assertEqual(data["stdout"].strip(), f"{sys.executable} arg1 {script}") + self.assertEqual(data["stdout"].strip(), f"{exe} arg1 {script}") diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py index 33db5d6443f6fd..0be26981184213 100644 --- a/Lib/test/test_logging.py +++ b/Lib/test/test_logging.py @@ -43,6 +43,7 @@ import tempfile from test.support.script_helper import assert_python_ok, assert_python_failure from test import support +from test.support import import_helper from test.support import os_helper from test.support import socket_helper from test.support import threading_helper @@ -3924,7 +3925,8 @@ def test_90195(self): def test_111615(self): # See gh-111615 - import multiprocessing as mp + import_helper.import_module('_multiprocessing') # see gh-113692 + mp = import_helper.import_module('multiprocessing') config = { 'version': 1, diff --git a/Lib/test/test_lzma.py b/Lib/test/test_lzma.py index 13b200912f6abd..65e6488c5d7b10 100644 --- a/Lib/test/test_lzma.py +++ b/Lib/test/test_lzma.py @@ -1401,6 +1401,14 @@ def test__decode_filter_properties(self): self.assertEqual(filterspec["lc"], 3) self.assertEqual(filterspec["dict_size"], 8 << 20) + # see gh-104282 + filters = [lzma.FILTER_X86, lzma.FILTER_POWERPC, + lzma.FILTER_IA64, lzma.FILTER_ARM, + lzma.FILTER_ARMTHUMB, lzma.FILTER_SPARC] + for f in filters: + filterspec = lzma._decode_filter_properties(f, b"") + self.assertEqual(filterspec, {"id": f}) + def test_filter_properties_roundtrip(self): spec1 = lzma._decode_filter_properties( lzma.FILTER_LZMA1, b"]\x00\x00\x80\x00") diff --git a/Lib/test/test_mailbox.py b/Lib/test/test_mailbox.py index 8c350eb02ccc17..d84faad0eb3406 100644 --- a/Lib/test/test_mailbox.py +++ b/Lib/test/test_mailbox.py @@ -1360,6 +1360,15 @@ def test_no_dot_mh_sequences_file(self): box.set_sequences({}) self.assertEqual(os.listdir(path), ['.mh_sequences']) + def test_lock_unlock_no_dot_mh_sequences_file(self): + path = os.path.join(self._path, 'foo.bar') + os.mkdir(path) + box = self._factory(path) + self.assertEqual(os.listdir(path), []) + box.lock() + box.unlock() + self.assertEqual(os.listdir(path), ['.mh_sequences']) + def test_issue2625(self): msg0 = mailbox.MHMessage(self._template % 0) msg0.add_sequence('foo') diff --git a/Lib/test/test_marshal.py b/Lib/test/test_marshal.py index 3d9d6d5d0aca34..6e17e010e7f355 100644 --- a/Lib/test/test_marshal.py +++ b/Lib/test/test_marshal.py @@ -129,6 +129,32 @@ def test_different_filenames(self): self.assertEqual(co1.co_filename, "f1") self.assertEqual(co2.co_filename, "f2") + def test_no_allow_code(self): + data = {'a': [({0},)]} + dump = marshal.dumps(data, allow_code=False) + self.assertEqual(marshal.loads(dump, allow_code=False), data) + + f = io.BytesIO() + marshal.dump(data, f, allow_code=False) + f.seek(0) + self.assertEqual(marshal.load(f, allow_code=False), data) + + co = ExceptionTestCase.test_exceptions.__code__ + data = {'a': [({co, 0},)]} + dump = marshal.dumps(data, allow_code=True) + self.assertEqual(marshal.loads(dump, allow_code=True), data) + with self.assertRaises(ValueError): + marshal.dumps(data, allow_code=False) + with self.assertRaises(ValueError): + marshal.loads(dump, allow_code=False) + + marshal.dump(data, io.BytesIO(), allow_code=True) + self.assertEqual(marshal.load(io.BytesIO(dump), allow_code=True), data) + with self.assertRaises(ValueError): + marshal.dump(data, io.BytesIO(), allow_code=False) + with self.assertRaises(ValueError): + marshal.load(io.BytesIO(dump), allow_code=False) + @requires_debug_ranges() def test_minimal_linetable_with_no_debug_ranges(self): # Make sure when demarshalling objects with `-X no_debug_ranges` diff --git a/Lib/test/test_mmap.py b/Lib/test/test_mmap.py index 866ede5b83dff0..b89621e08577be 100644 --- a/Lib/test/test_mmap.py +++ b/Lib/test/test_mmap.py @@ -4,6 +4,7 @@ from test.support.import_helper import import_module from test.support.os_helper import TESTFN, unlink import unittest +import errno import os import re import itertools @@ -266,6 +267,62 @@ def test_access_parameter(self): self.assertRaises(TypeError, m.write_byte, 0) m.close() + @unittest.skipIf(os.name == 'nt', 'trackfd not present on Windows') + def test_trackfd_parameter(self): + size = 64 + with open(TESTFN, "wb") as f: + f.write(b"a"*size) + for close_original_fd in True, False: + with self.subTest(close_original_fd=close_original_fd): + with open(TESTFN, "r+b") as f: + with mmap.mmap(f.fileno(), size, trackfd=False) as m: + if close_original_fd: + f.close() + self.assertEqual(len(m), size) + with self.assertRaises(OSError) as err_cm: + m.size() + self.assertEqual(err_cm.exception.errno, errno.EBADF) + with self.assertRaises(ValueError): + m.resize(size * 2) + with self.assertRaises(ValueError): + m.resize(size // 2) + self.assertEqual(m.closed, False) + + # Smoke-test other API + m.write_byte(ord('X')) + m[2] = ord('Y') + m.flush() + with open(TESTFN, "rb") as f: + self.assertEqual(f.read(4), b'XaYa') + self.assertEqual(m.tell(), 1) + m.seek(0) + self.assertEqual(m.tell(), 0) + self.assertEqual(m.read_byte(), ord('X')) + + self.assertEqual(m.closed, True) + self.assertEqual(os.stat(TESTFN).st_size, size) + + @unittest.skipIf(os.name == 'nt', 'trackfd not present on Windows') + def test_trackfd_neg1(self): + size = 64 + with mmap.mmap(-1, size, trackfd=False) as m: + with self.assertRaises(OSError): + m.size() + with self.assertRaises(ValueError): + m.resize(size // 2) + self.assertEqual(len(m), size) + m[0] = ord('a') + assert m[0] == ord('a') + + @unittest.skipIf(os.name != 'nt', 'trackfd only fails on Windows') + def test_no_trackfd_parameter_on_windows(self): + # 'trackffd' is an invalid keyword argument for this function + size = 64 + with self.assertRaises(TypeError): + mmap.mmap(-1, size, trackfd=True) + with self.assertRaises(TypeError): + mmap.mmap(-1, size, trackfd=False) + def test_bad_file_desc(self): # Try opening a bad file descriptor... self.assertRaises(OSError, mmap.mmap, -2, 4096) @@ -672,14 +729,16 @@ def test_tagname(self): m2.close() m1.close() + with self.assertRaisesRegex(TypeError, 'tagname'): + mmap.mmap(-1, 8, tagname=1) + @cpython_only @unittest.skipUnless(os.name == 'nt', 'requires Windows') def test_sizeof(self): m1 = mmap.mmap(-1, 100) tagname = random_tagname() m2 = mmap.mmap(-1, 100, tagname=tagname) - self.assertEqual(sys.getsizeof(m2), - sys.getsizeof(m1) + len(tagname) + 1) + self.assertGreater(sys.getsizeof(m2), sys.getsizeof(m1)) @unittest.skipUnless(os.name == 'nt', 'requires Windows') def test_crasher_on_windows(self): diff --git a/Lib/test/test_monitoring.py b/Lib/test/test_monitoring.py index a2efbc95f556c6..a64d1ed79decd8 100644 --- a/Lib/test/test_monitoring.py +++ b/Lib/test/test_monitoring.py @@ -750,7 +750,7 @@ class UnwindRecorder(ExceptionRecorder): event_type = E.PY_UNWIND def __call__(self, code, offset, exc): - self.events.append(("unwind", type(exc))) + self.events.append(("unwind", type(exc), code.co_name)) class ExceptionHandledRecorder(ExceptionRecorder): @@ -766,8 +766,27 @@ class ThrowRecorder(ExceptionRecorder): def __call__(self, code, offset, exc): self.events.append(("throw", type(exc))) -class ExceptionMonitoringTest(CheckEvents): +class CallRecorder: + + event_type = E.CALL + + def __init__(self, events): + self.events = events + + def __call__(self, code, offset, func, arg): + self.events.append(("call", func.__name__, arg)) + +class ReturnRecorder: + + event_type = E.PY_RETURN + + def __init__(self, events): + self.events = events + def __call__(self, code, offset, val): + self.events.append(("return", code.co_name, val)) + +class ExceptionMonitoringTest(CheckEvents): exception_recorders = ( ExceptionRecorder, @@ -936,26 +955,48 @@ def func(): ) self.assertEqual(events[0], ("throw", IndexError)) -class LineRecorder: + def test_no_unwind_for_shim_frame(self): - event_type = E.LINE + class B: + def __init__(self): + raise ValueError() + + def f(): + try: + return B() + except ValueError: + pass + for _ in range(100): + f() + recorders = ( + ReturnRecorder, + UnwindRecorder + ) + events = self.get_events(f, TEST_TOOL, recorders) + adaptive_insts = dis.get_instructions(f, adaptive=True) + self.assertIn( + "CALL_ALLOC_AND_ENTER_INIT", + [i.opname for i in adaptive_insts] + ) + #There should be only one unwind event + expected = [ + ('unwind', ValueError, '__init__'), + ('return', 'f', None), + ] - def __init__(self, events): - self.events = events + self.assertEqual(events, expected) - def __call__(self, code, line): - self.events.append(("line", code.co_name, line - code.co_firstlineno)) +class LineRecorder: -class CallRecorder: + event_type = E.LINE - event_type = E.CALL def __init__(self, events): self.events = events - def __call__(self, code, offset, func, arg): - self.events.append(("call", func.__name__, arg)) + def __call__(self, code, line): + self.events.append(("line", code.co_name, line - code.co_firstlineno)) class CEventRecorder: @@ -1351,15 +1392,6 @@ class BranchRecorder(JumpRecorder): event_type = E.BRANCH name = "branch" -class ReturnRecorder: - - event_type = E.PY_RETURN - - def __init__(self, events): - self.events = events - - def __call__(self, code, offset, val): - self.events.append(("return", val)) JUMP_AND_BRANCH_RECORDERS = JumpRecorder, BranchRecorder @@ -1449,11 +1481,11 @@ def func(): ('branch', 'func', 4, 4), ('line', 'func', 5), ('line', 'meth', 1), - ('return', None), + ('return', 'meth', None), ('jump', 'func', 5, 5), ('jump', 'func', 5, '[offset=114]'), ('branch', 'func', '[offset=120]', '[offset=124]'), - ('return', None), + ('return', 'func', None), ('line', 'get_events', 11)]) class TestLoadSuperAttr(CheckEvents): diff --git a/Lib/test/test_ntpath.py b/Lib/test/test_ntpath.py index bf990ed36fbcae..aefcb98f1c30eb 100644 --- a/Lib/test/test_ntpath.py +++ b/Lib/test/test_ntpath.py @@ -227,10 +227,18 @@ def test_split(self): tester('ntpath.split("//conky/mountpoint/")', ('//conky/mountpoint/', '')) def test_isabs(self): + tester('ntpath.isabs("foo\\bar")', 0) + tester('ntpath.isabs("foo/bar")', 0) tester('ntpath.isabs("c:\\")', 1) + tester('ntpath.isabs("c:\\foo\\bar")', 1) + tester('ntpath.isabs("c:/foo/bar")', 1) tester('ntpath.isabs("\\\\conky\\mountpoint\\")', 1) - tester('ntpath.isabs("\\foo")', 1) - tester('ntpath.isabs("\\foo\\bar")', 1) + + # gh-44626: paths with only a drive or root are not absolute. + tester('ntpath.isabs("\\foo\\bar")', 0) + tester('ntpath.isabs("/foo/bar")', 0) + tester('ntpath.isabs("c:foo\\bar")', 0) + tester('ntpath.isabs("c:foo/bar")', 0) # gh-96290: normal UNC paths and device paths without trailing backslashes tester('ntpath.isabs("\\\\conky\\mountpoint")', 1) diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py index c66c5797471413..98b30d2108a1a1 100644 --- a/Lib/test/test_os.py +++ b/Lib/test/test_os.py @@ -3085,6 +3085,66 @@ def test_stat_unlink_race(self): except subprocess.TimeoutExpired: proc.terminate() + @support.requires_subprocess() + def test_stat_inaccessible_file(self): + filename = os_helper.TESTFN + ICACLS = os.path.expandvars(r"%SystemRoot%\System32\icacls.exe") + + with open(filename, "wb") as f: + f.write(b'Test data') + + stat1 = os.stat(filename) + + try: + # Remove all permissions from the file + subprocess.check_output([ICACLS, filename, "/inheritance:r"], + stderr=subprocess.STDOUT) + except subprocess.CalledProcessError as ex: + if support.verbose: + print(ICACLS, filename, "/inheritance:r", "failed.") + print(ex.stdout.decode("oem", "replace").rstrip()) + try: + os.unlink(filename) + except OSError: + pass + self.skipTest("Unable to create inaccessible file") + + def cleanup(): + # Give delete permission. We are the file owner, so we can do this + # even though we removed all permissions earlier. + subprocess.check_output([ICACLS, filename, "/grant", "Everyone:(D)"], + stderr=subprocess.STDOUT) + os.unlink(filename) + + self.addCleanup(cleanup) + + if support.verbose: + print("File:", filename) + print("stat with access:", stat1) + + # First test - we shouldn't raise here, because we still have access to + # the directory and can extract enough information from its metadata. + stat2 = os.stat(filename) + + if support.verbose: + print(" without access:", stat2) + + # We cannot get st_dev/st_ino, so ensure those are 0 or else our test + # is not set up correctly + self.assertEqual(0, stat2.st_dev) + self.assertEqual(0, stat2.st_ino) + + # st_mode and st_size should match (for a normal file, at least) + self.assertEqual(stat1.st_mode, stat2.st_mode) + self.assertEqual(stat1.st_size, stat2.st_size) + + # st_ctime and st_mtime should be the same + self.assertEqual(stat1.st_ctime, stat2.st_ctime) + self.assertEqual(stat1.st_mtime, stat2.st_mtime) + + # st_atime should be the same or later + self.assertGreaterEqual(stat1.st_atime, stat2.st_atime) + @os_helper.skip_unless_symlink class NonLocalSymlinkTests(unittest.TestCase): @@ -4485,6 +4545,61 @@ def test_openpty(self): self.assertEqual(os.get_inheritable(master_fd), False) self.assertEqual(os.get_inheritable(slave_fd), False) + @unittest.skipUnless(hasattr(os, 'spawnl'), "need os.openpty()") + def test_pipe_spawnl(self): + # gh-77046: On Windows, os.pipe() file descriptors must be created with + # _O_NOINHERIT to make them non-inheritable. UCRT has no public API to + # get (_osfile(fd) & _O_NOINHERIT), so use a functional test. + # + # Make sure that fd is not inherited by a child process created by + # os.spawnl(): get_osfhandle() and dup() must fail with EBADF. + + fd, fd2 = os.pipe() + self.addCleanup(os.close, fd) + self.addCleanup(os.close, fd2) + + code = textwrap.dedent(f""" + import errno + import os + import test.support + try: + import msvcrt + except ImportError: + msvcrt = None + + fd = {fd} + + with test.support.SuppressCrashReport(): + if msvcrt is not None: + try: + handle = msvcrt.get_osfhandle(fd) + except OSError as exc: + if exc.errno != errno.EBADF: + raise + # get_osfhandle(fd) failed with EBADF as expected + else: + raise Exception("get_osfhandle() must fail") + + try: + fd3 = os.dup(fd) + except OSError as exc: + if exc.errno != errno.EBADF: + raise + # os.dup(fd) failed with EBADF as expected + else: + os.close(fd3) + raise Exception("dup must fail") + """) + + filename = os_helper.TESTFN + self.addCleanup(os_helper.unlink, os_helper.TESTFN) + with open(filename, "w") as fp: + print(code, file=fp, end="") + + cmd = [sys.executable, filename] + exitcode = os.spawnl(os.P_WAIT, cmd[0], *cmd) + self.assertEqual(exitcode, 0) + class PathTConverterTests(unittest.TestCase): # tuples of (function name, allows fd arguments, additional arguments to diff --git a/Lib/test/test_pathlib/test_pathlib.py b/Lib/test/test_pathlib/test_pathlib.py index 8f95c804f80e69..bdbe92369639ef 100644 --- a/Lib/test/test_pathlib/test_pathlib.py +++ b/Lib/test/test_pathlib/test_pathlib.py @@ -45,6 +45,22 @@ class PurePathTest(test_pathlib_abc.DummyPurePathTest): # Make sure any symbolic links in the base test path are resolved. base = os.path.realpath(TESTFN) + # Keys are canonical paths, values are list of tuples of arguments + # supposed to produce equal paths. + equivalences = { + 'a/b': [ + ('a', 'b'), ('a/', 'b'), ('a', 'b/'), ('a/', 'b/'), + ('a/b/',), ('a//b',), ('a//b//',), + # Empty components get removed. + ('', 'a', 'b'), ('a', '', 'b'), ('a', 'b', ''), + ], + '/b/c/d': [ + ('a', '/b/c', 'd'), ('/a', '/b/c', 'd'), + # Empty components get removed. + ('/', 'b', '', 'c/d'), ('/', '', 'b/c/d'), ('', '/b/c/d'), + ], + } + def test_concrete_class(self): if self.cls is pathlib.PurePath: expected = pathlib.PureWindowsPath if os.name == 'nt' else pathlib.PurePosixPath @@ -95,6 +111,47 @@ def test_constructor_nested(self): self.assertEqual(P(P('a'), P('b'), P('c')), P(FakePath("a/b/c"))) self.assertEqual(P(P('./a:b')), P('./a:b')) + def _check_parse_path(self, raw_path, *expected): + sep = self.pathmod.sep + actual = self.cls._parse_path(raw_path.replace('/', sep)) + self.assertEqual(actual, expected) + if altsep := self.pathmod.altsep: + actual = self.cls._parse_path(raw_path.replace('/', altsep)) + self.assertEqual(actual, expected) + + def test_parse_path_common(self): + check = self._check_parse_path + sep = self.pathmod.sep + check('', '', '', []) + check('a', '', '', ['a']) + check('a/', '', '', ['a']) + check('a/b', '', '', ['a', 'b']) + check('a/b/', '', '', ['a', 'b']) + check('a/b/c/d', '', '', ['a', 'b', 'c', 'd']) + check('a/b//c/d', '', '', ['a', 'b', 'c', 'd']) + check('a/b/c/d', '', '', ['a', 'b', 'c', 'd']) + check('.', '', '', []) + check('././b', '', '', ['b']) + check('a/./b', '', '', ['a', 'b']) + check('a/./.', '', '', ['a']) + check('/a/b', '', sep, ['a', 'b']) + + def test_empty_path(self): + # The empty path points to '.' + p = self.cls('') + self.assertEqual(str(p), '.') + # Special case for the empty path. + self._check_str('.', ('',)) + + def test_parts_interning(self): + P = self.cls + p = P('/usr/bin/foo') + q = P('/usr/local/bin') + # 'usr' + self.assertIs(p.parts[1], q.parts[1]) + # 'bin' + self.assertIs(p.parts[2], q.parts[3]) + def test_join_nested(self): P = self.cls p = P('a/b').joinpath(P('c')) @@ -168,6 +225,37 @@ def test_as_bytes_common(self): P = self.cls self.assertEqual(bytes(P('a/b')), b'a' + sep + b'b') + def test_eq_common(self): + P = self.cls + self.assertEqual(P('a/b'), P('a/b')) + self.assertEqual(P('a/b'), P('a', 'b')) + self.assertNotEqual(P('a/b'), P('a')) + self.assertNotEqual(P('a/b'), P('/a/b')) + self.assertNotEqual(P('a/b'), P()) + self.assertNotEqual(P('/a/b'), P('/')) + self.assertNotEqual(P(), P('/')) + self.assertNotEqual(P(), "") + self.assertNotEqual(P(), {}) + self.assertNotEqual(P(), int) + + def test_equivalences(self): + for k, tuples in self.equivalences.items(): + canon = k.replace('/', self.sep) + posix = k.replace(self.sep, '/') + if canon != posix: + tuples = tuples + [ + tuple(part.replace('/', self.sep) for part in t) + for t in tuples + ] + tuples.append((posix, )) + pcanon = self.cls(canon) + for t in tuples: + p = self.cls(*t) + self.assertEqual(p, pcanon, "failed with args {}".format(t)) + self.assertEqual(hash(p), hash(pcanon)) + self.assertEqual(str(p), canon) + self.assertEqual(p.as_posix(), posix) + def test_ordering_common(self): # Ordering is tuple-alike. def assertLess(a, b): @@ -214,6 +302,58 @@ def test_repr_roundtrips(self): self.assertEqual(q, p) self.assertEqual(repr(q), r) + def test_name_empty(self): + P = self.cls + self.assertEqual(P('').name, '') + self.assertEqual(P('.').name, '') + self.assertEqual(P('/a/b/.').name, 'b') + + def test_stem_empty(self): + P = self.cls + self.assertEqual(P('').stem, '') + self.assertEqual(P('.').stem, '') + + def test_with_name_empty(self): + P = self.cls + self.assertRaises(ValueError, P('').with_name, 'd.xml') + self.assertRaises(ValueError, P('.').with_name, 'd.xml') + self.assertRaises(ValueError, P('/').with_name, 'd.xml') + self.assertRaises(ValueError, P('a/b').with_name, '') + self.assertRaises(ValueError, P('a/b').with_name, '.') + + def test_with_stem_empty(self): + P = self.cls + self.assertRaises(ValueError, P('').with_stem, 'd') + self.assertRaises(ValueError, P('.').with_stem, 'd') + self.assertRaises(ValueError, P('/').with_stem, 'd') + self.assertRaises(ValueError, P('a/b').with_stem, '') + self.assertRaises(ValueError, P('a/b').with_stem, '.') + + def test_with_suffix_empty(self): + # Path doesn't have a "filename" component. + P = self.cls + self.assertRaises(ValueError, P('').with_suffix, '.gz') + self.assertRaises(ValueError, P('.').with_suffix, '.gz') + self.assertRaises(ValueError, P('/').with_suffix, '.gz') + + def test_relative_to_several_args(self): + P = self.cls + p = P('a/b') + with self.assertWarns(DeprecationWarning): + p.relative_to('a', 'b') + p.relative_to('a', 'b', walk_up=True) + + def test_is_relative_to_several_args(self): + P = self.cls + p = P('a/b') + with self.assertWarns(DeprecationWarning): + p.is_relative_to('a', 'b') + + def test_match_empty(self): + P = self.cls + self.assertRaises(ValueError, P('a').match, '') + self.assertRaises(ValueError, P('a').match, '.') + class PurePosixPathTest(PurePathTest): cls = pathlib.PurePosixPath @@ -871,10 +1011,14 @@ def test_is_absolute(self): self.assertTrue(P('c:/a').is_absolute()) self.assertTrue(P('c:/a/b/').is_absolute()) # UNC paths are absolute by definition. + self.assertTrue(P('//').is_absolute()) + self.assertTrue(P('//a').is_absolute()) self.assertTrue(P('//a/b').is_absolute()) self.assertTrue(P('//a/b/').is_absolute()) self.assertTrue(P('//a/b/c').is_absolute()) self.assertTrue(P('//a/b/c/d').is_absolute()) + self.assertTrue(P('//?/UNC/').is_absolute()) + self.assertTrue(P('//?/UNC/spam').is_absolute()) def test_join(self): P = self.cls @@ -1007,6 +1151,7 @@ def tempdir(self): def test_matches_pathbase_api(self): our_names = {name for name in dir(self.cls) if name[0] != '_'} + our_names.remove('is_reserved') # only present in PurePath path_names = {name for name in dir(pathlib._abc.PathBase) if name[0] != '_'} self.assertEqual(our_names, path_names) for attr_name in our_names: @@ -1673,6 +1818,13 @@ def test_walk_above_recursion_limit(self): list(base.walk()) list(base.walk(top_down=False)) + def test_glob_empty_pattern(self): + p = self.cls('') + with self.assertRaisesRegex(ValueError, 'Unacceptable pattern'): + list(p.glob('')) + with self.assertRaisesRegex(ValueError, 'Unacceptable pattern'): + list(p.glob('.')) + def test_glob_many_open_files(self): depth = 30 P = self.cls @@ -1703,6 +1855,34 @@ def test_glob_above_recursion_limit(self): with set_recursion_limit(recursion_limit): list(base.glob('**/')) + def test_glob_recursive_no_trailing_slash(self): + P = self.cls + p = P(self.base) + with self.assertWarns(FutureWarning): + p.glob('**') + with self.assertWarns(FutureWarning): + p.glob('*/**') + with self.assertWarns(FutureWarning): + p.rglob('**') + with self.assertWarns(FutureWarning): + p.rglob('*/**') + + def test_glob_pathlike(self): + P = self.cls + p = P(self.base) + pattern = "dir*/file*" + expect = {p / "dirB/fileB", p / "dirC/fileC"} + self.assertEqual(expect, set(p.glob(P(pattern)))) + self.assertEqual(expect, set(p.glob(FakePath(pattern)))) + + def test_rglob_pathlike(self): + P = self.cls + p = P(self.base, "dirC") + pattern = "**/file*" + expect = {p / "fileC", p / "dirD/fileD"} + self.assertEqual(expect, set(p.rglob(P(pattern)))) + self.assertEqual(expect, set(p.rglob(FakePath(pattern)))) + @only_posix class PosixPathTest(PathTest, PurePosixPathTest): diff --git a/Lib/test/test_pathlib/test_pathlib_abc.py b/Lib/test/test_pathlib/test_pathlib_abc.py index e4a4e81e547cd1..199718a8a69c5a 100644 --- a/Lib/test/test_pathlib/test_pathlib_abc.py +++ b/Lib/test/test_pathlib/test_pathlib_abc.py @@ -1,11 +1,11 @@ -import collections.abc +import collections import io import os import errno import stat import unittest -from pathlib._abc import UnsupportedOperation, PurePathBase, PathBase +from pathlib._abc import UnsupportedOperation, PathModuleBase, PurePathBase, PathBase import posixpath from test.support.os_helper import TESTFN @@ -17,6 +17,20 @@ def test_is_notimplemented(self): self.assertTrue(isinstance(UnsupportedOperation(), NotImplementedError)) +class PathModuleBaseTest(unittest.TestCase): + cls = PathModuleBase + + def test_unsupported_operation(self): + m = self.cls() + e = UnsupportedOperation + with self.assertRaises(e): + m.sep + self.assertRaises(e, m.join, 'foo') + self.assertRaises(e, m.split, 'foo') + self.assertRaises(e, m.splitdrive, 'foo') + self.assertRaises(e, m.normcase, 'foo') + self.assertRaises(e, m.isabs, 'foo') + # # Tests for the pure classes. # @@ -25,6 +39,42 @@ def test_is_notimplemented(self): class PurePathBaseTest(unittest.TestCase): cls = PurePathBase + def test_unsupported_operation_pure(self): + p = self.cls('foo') + e = UnsupportedOperation + with self.assertRaises(e): + p.drive + with self.assertRaises(e): + p.root + with self.assertRaises(e): + p.anchor + with self.assertRaises(e): + p.parts + with self.assertRaises(e): + p.parent + with self.assertRaises(e): + p.parents + with self.assertRaises(e): + p.name + with self.assertRaises(e): + p.stem + with self.assertRaises(e): + p.suffix + with self.assertRaises(e): + p.suffixes + with self.assertRaises(e): + p / 'bar' + with self.assertRaises(e): + 'bar' / p + self.assertRaises(e, p.joinpath, 'bar') + self.assertRaises(e, p.with_name, 'bar') + self.assertRaises(e, p.with_stem, 'bar') + self.assertRaises(e, p.with_suffix, '.txt') + self.assertRaises(e, p.relative_to, '') + self.assertRaises(e, p.is_relative_to, '') + self.assertRaises(e, p.is_absolute) + self.assertRaises(e, p.match, '*') + def test_magic_methods(self): P = self.cls self.assertFalse(hasattr(P, '__fspath__')) @@ -39,10 +89,13 @@ def test_magic_methods(self): self.assertIs(P.__ge__, object.__ge__) def test_pathmod(self): - self.assertIs(self.cls.pathmod, posixpath) + self.assertIsInstance(self.cls.pathmod, PathModuleBase) class DummyPurePath(PurePathBase): + __slots__ = () + pathmod = posixpath + def __eq__(self, other): if not isinstance(other, DummyPurePath): return NotImplemented @@ -51,6 +104,9 @@ def __eq__(self, other): def __hash__(self): return hash(str(self)) + def __repr__(self): + return "{}({!r})".format(self.__class__.__name__, self.as_posix()) + class DummyPurePathTest(unittest.TestCase): cls = DummyPurePath @@ -58,22 +114,6 @@ class DummyPurePathTest(unittest.TestCase): # Use a base path that's unrelated to any real filesystem path. base = f'/this/path/kills/fascists/{TESTFN}' - # Keys are canonical paths, values are list of tuples of arguments - # supposed to produce equal paths. - equivalences = { - 'a/b': [ - ('a', 'b'), ('a/', 'b'), ('a', 'b/'), ('a/', 'b/'), - ('a/b/',), ('a//b',), ('a//b//',), - # Empty components get removed. - ('', 'a', 'b'), ('a', '', 'b'), ('a', 'b', ''), - ], - '/b/c/d': [ - ('a', '/b/c', 'd'), ('/a', '/b/c', 'd'), - # Empty components get removed. - ('/', 'b', '', 'c/d'), ('/', '', 'b/c/d'), ('', '/b/c/d'), - ], - } - def setUp(self): p = self.cls('a') self.pathmod = p.pathmod @@ -129,31 +169,6 @@ def with_segments(self, *pathsegments): for parent in p.parents: self.assertEqual(42, parent.session_id) - def _check_parse_path(self, raw_path, *expected): - sep = self.pathmod.sep - actual = self.cls._parse_path(raw_path.replace('/', sep)) - self.assertEqual(actual, expected) - if altsep := self.pathmod.altsep: - actual = self.cls._parse_path(raw_path.replace('/', altsep)) - self.assertEqual(actual, expected) - - def test_parse_path_common(self): - check = self._check_parse_path - sep = self.pathmod.sep - check('', '', '', []) - check('a', '', '', ['a']) - check('a/', '', '', ['a']) - check('a/b', '', '', ['a', 'b']) - check('a/b/', '', '', ['a', 'b']) - check('a/b/c/d', '', '', ['a', 'b', 'c', 'd']) - check('a/b//c/d', '', '', ['a', 'b', 'c', 'd']) - check('a/b/c/d', '', '', ['a', 'b', 'c', 'd']) - check('.', '', '', []) - check('././b', '', '', ['b']) - check('a/./b', '', '', ['a', 'b']) - check('a/./.', '', '', ['a']) - check('/a/b', '', sep, ['a', 'b']) - def test_join_common(self): P = self.cls p = P('a/b') @@ -189,8 +204,6 @@ def test_str_common(self): # Canonicalized paths roundtrip. for pathstr in ('a', 'a/b', 'a/b/c', '/', '/a/b', '/a/b/c'): self._check_str(pathstr, (pathstr,)) - # Special case for the empty path. - self._check_str('.', ('',)) # Other tests for str() are in test_equivalences(). def test_as_posix_common(self): @@ -199,23 +212,12 @@ def test_as_posix_common(self): self.assertEqual(P(pathstr).as_posix(), pathstr) # Other tests for as_posix() are in test_equivalences(). - def test_eq_common(self): + def test_match_empty(self): P = self.cls - self.assertEqual(P('a/b'), P('a/b')) - self.assertEqual(P('a/b'), P('a', 'b')) - self.assertNotEqual(P('a/b'), P('a')) - self.assertNotEqual(P('a/b'), P('/a/b')) - self.assertNotEqual(P('a/b'), P()) - self.assertNotEqual(P('/a/b'), P('/')) - self.assertNotEqual(P(), P('/')) - self.assertNotEqual(P(), "") - self.assertNotEqual(P(), {}) - self.assertNotEqual(P(), int) + self.assertRaises(ValueError, P('a').match, '') def test_match_common(self): P = self.cls - self.assertRaises(ValueError, P('a').match, '') - self.assertRaises(ValueError, P('a').match, '.') # Simple relative pattern. self.assertTrue(P('b.py').match('b.py')) self.assertTrue(P('a/b.py').match('b.py')) @@ -252,7 +254,6 @@ def test_match_common(self): self.assertTrue(P('a/b/c.py').match('**')) self.assertTrue(P('/a/b/c.py').match('**')) self.assertTrue(P('/a/b/c.py').match('/**')) - self.assertTrue(P('/a/b/c.py').match('**/')) self.assertTrue(P('/a/b/c.py').match('/a/**')) self.assertTrue(P('/a/b/c.py').match('**/*.py')) self.assertTrue(P('/a/b/c.py').match('/**/*.py')) @@ -277,9 +278,9 @@ def test_match_common(self): self.assertFalse(P('c:/a/B.Py').match('C:/A/*.pY', case_sensitive=True)) self.assertTrue(P('/a/b/c.py').match('/A/*/*.Py', case_sensitive=False)) # Matching against empty path - self.assertFalse(P().match('*')) - self.assertTrue(P().match('**')) - self.assertFalse(P().match('**/*')) + self.assertFalse(P('').match('*')) + self.assertTrue(P('').match('**')) + self.assertFalse(P('').match('**/*')) def test_parts_common(self): # `parts` returns a tuple. @@ -293,32 +294,14 @@ def test_parts_common(self): parts = p.parts self.assertEqual(parts, (sep, 'a', 'b')) - def test_equivalences(self): - for k, tuples in self.equivalences.items(): - canon = k.replace('/', self.sep) - posix = k.replace(self.sep, '/') - if canon != posix: - tuples = tuples + [ - tuple(part.replace('/', self.sep) for part in t) - for t in tuples - ] - tuples.append((posix, )) - pcanon = self.cls(canon) - for t in tuples: - p = self.cls(*t) - self.assertEqual(p, pcanon, "failed with args {}".format(t)) - self.assertEqual(hash(p), hash(pcanon)) - self.assertEqual(str(p), canon) - self.assertEqual(p.as_posix(), posix) - def test_parent_common(self): # Relative P = self.cls p = P('a/b/c') self.assertEqual(p.parent, P('a/b')) self.assertEqual(p.parent.parent, P('a')) - self.assertEqual(p.parent.parent.parent, P()) - self.assertEqual(p.parent.parent.parent.parent, P()) + self.assertEqual(p.parent.parent.parent, P('')) + self.assertEqual(p.parent.parent.parent.parent, P('')) # Anchored p = P('/a/b/c') self.assertEqual(p.parent, P('/a/b')) @@ -334,17 +317,17 @@ def test_parents_common(self): self.assertEqual(len(par), 3) self.assertEqual(par[0], P('a/b')) self.assertEqual(par[1], P('a')) - self.assertEqual(par[2], P('.')) - self.assertEqual(par[-1], P('.')) + self.assertEqual(par[2], P('')) + self.assertEqual(par[-1], P('')) self.assertEqual(par[-2], P('a')) self.assertEqual(par[-3], P('a/b')) self.assertEqual(par[0:1], (P('a/b'),)) self.assertEqual(par[:2], (P('a/b'), P('a'))) self.assertEqual(par[:-1], (P('a/b'), P('a'))) - self.assertEqual(par[1:], (P('a'), P('.'))) - self.assertEqual(par[::2], (P('a/b'), P('.'))) - self.assertEqual(par[::-1], (P('.'), P('a'), P('a/b'))) - self.assertEqual(list(par), [P('a/b'), P('a'), P('.')]) + self.assertEqual(par[1:], (P('a'), P(''))) + self.assertEqual(par[::2], (P('a/b'), P(''))) + self.assertEqual(par[::-1], (P(''), P('a'), P('a/b'))) + self.assertEqual(list(par), [P('a/b'), P('a'), P('')]) with self.assertRaises(IndexError): par[-4] with self.assertRaises(IndexError): @@ -395,14 +378,17 @@ def test_anchor_common(self): self.assertEqual(P('/').anchor, sep) self.assertEqual(P('/a/b').anchor, sep) - def test_name_common(self): + def test_name_empty(self): P = self.cls self.assertEqual(P('').name, '') - self.assertEqual(P('.').name, '') + self.assertEqual(P('.').name, '.') + self.assertEqual(P('/a/b/.').name, '.') + + def test_name_common(self): + P = self.cls self.assertEqual(P('/').name, '') self.assertEqual(P('a/b').name, 'b') self.assertEqual(P('/a/b').name, 'b') - self.assertEqual(P('/a/b/.').name, 'b') self.assertEqual(P('a/b.py').name, 'b.py') self.assertEqual(P('/a/b.py').name, 'b.py') @@ -445,10 +431,13 @@ def test_suffixes_common(self): self.assertEqual(P('a/Some name. Ending with a dot.').suffixes, []) self.assertEqual(P('/a/Some name. Ending with a dot.').suffixes, []) - def test_stem_common(self): + def test_stem_empty(self): P = self.cls self.assertEqual(P('').stem, '') - self.assertEqual(P('.').stem, '') + self.assertEqual(P('.').stem, '.') + + def test_stem_common(self): + P = self.cls self.assertEqual(P('..').stem, '..') self.assertEqual(P('/').stem, '') self.assertEqual(P('a/b').stem, 'b') @@ -467,11 +456,17 @@ def test_with_name_common(self): self.assertEqual(P('/a/b.py').with_name('d.xml'), P('/a/d.xml')) self.assertEqual(P('a/Dot ending.').with_name('d.xml'), P('a/d.xml')) self.assertEqual(P('/a/Dot ending.').with_name('d.xml'), P('/a/d.xml')) - self.assertRaises(ValueError, P('').with_name, 'd.xml') - self.assertRaises(ValueError, P('.').with_name, 'd.xml') - self.assertRaises(ValueError, P('/').with_name, 'd.xml') - self.assertRaises(ValueError, P('a/b').with_name, '') - self.assertRaises(ValueError, P('a/b').with_name, '.') + + def test_with_name_empty(self): + P = self.cls + self.assertEqual(P('').with_name('d.xml'), P('d.xml')) + self.assertEqual(P('.').with_name('d.xml'), P('d.xml')) + self.assertEqual(P('/').with_name('d.xml'), P('/d.xml')) + self.assertEqual(P('a/b').with_name(''), P('a/')) + self.assertEqual(P('a/b').with_name('.'), P('a/.')) + + def test_with_name_seps(self): + P = self.cls self.assertRaises(ValueError, P('a/b').with_name, '/c') self.assertRaises(ValueError, P('a/b').with_name, 'c/') self.assertRaises(ValueError, P('a/b').with_name, 'c/d') @@ -485,11 +480,17 @@ def test_with_stem_common(self): self.assertEqual(P('/a/b.tar.gz').with_stem('d'), P('/a/d.gz')) self.assertEqual(P('a/Dot ending.').with_stem('d'), P('a/d')) self.assertEqual(P('/a/Dot ending.').with_stem('d'), P('/a/d')) - self.assertRaises(ValueError, P('').with_stem, 'd') - self.assertRaises(ValueError, P('.').with_stem, 'd') - self.assertRaises(ValueError, P('/').with_stem, 'd') - self.assertRaises(ValueError, P('a/b').with_stem, '') - self.assertRaises(ValueError, P('a/b').with_stem, '.') + + def test_with_stem_empty(self): + P = self.cls + self.assertEqual(P('').with_stem('d'), P('d')) + self.assertEqual(P('.').with_stem('d'), P('d')) + self.assertEqual(P('/').with_stem('d'), P('/d')) + self.assertEqual(P('a/b').with_stem(''), P('a/')) + self.assertEqual(P('a/b').with_stem('.'), P('a/.')) + + def test_with_stem_seps(self): + P = self.cls self.assertRaises(ValueError, P('a/b').with_stem, '/c') self.assertRaises(ValueError, P('a/b').with_stem, 'c/') self.assertRaises(ValueError, P('a/b').with_stem, 'c/d') @@ -503,10 +504,16 @@ def test_with_suffix_common(self): # Stripping suffix. self.assertEqual(P('a/b.py').with_suffix(''), P('a/b')) self.assertEqual(P('/a/b').with_suffix(''), P('/a/b')) + + def test_with_suffix_empty(self): + P = self.cls # Path doesn't have a "filename" component. - self.assertRaises(ValueError, P('').with_suffix, '.gz') - self.assertRaises(ValueError, P('.').with_suffix, '.gz') - self.assertRaises(ValueError, P('/').with_suffix, '.gz') + self.assertEqual(P('').with_suffix('.gz'), P('.gz')) + self.assertEqual(P('.').with_suffix('.gz'), P('..gz')) + self.assertEqual(P('/').with_suffix('.gz'), P('/.gz')) + + def test_with_suffix_seps(self): + P = self.cls # Invalid suffix. self.assertRaises(ValueError, P('a/b').with_suffix, 'gz') self.assertRaises(ValueError, P('a/b').with_suffix, '/') @@ -522,30 +529,26 @@ def test_relative_to_common(self): p = P('a/b') self.assertRaises(TypeError, p.relative_to) self.assertRaises(TypeError, p.relative_to, b'a') - self.assertEqual(p.relative_to(P()), P('a/b')) + self.assertEqual(p.relative_to(P('')), P('a/b')) self.assertEqual(p.relative_to(''), P('a/b')) self.assertEqual(p.relative_to(P('a')), P('b')) self.assertEqual(p.relative_to('a'), P('b')) self.assertEqual(p.relative_to('a/'), P('b')) - self.assertEqual(p.relative_to(P('a/b')), P()) - self.assertEqual(p.relative_to('a/b'), P()) - self.assertEqual(p.relative_to(P(), walk_up=True), P('a/b')) + self.assertEqual(p.relative_to(P('a/b')), P('')) + self.assertEqual(p.relative_to('a/b'), P('')) + self.assertEqual(p.relative_to(P(''), walk_up=True), P('a/b')) self.assertEqual(p.relative_to('', walk_up=True), P('a/b')) self.assertEqual(p.relative_to(P('a'), walk_up=True), P('b')) self.assertEqual(p.relative_to('a', walk_up=True), P('b')) self.assertEqual(p.relative_to('a/', walk_up=True), P('b')) - self.assertEqual(p.relative_to(P('a/b'), walk_up=True), P()) - self.assertEqual(p.relative_to('a/b', walk_up=True), P()) + self.assertEqual(p.relative_to(P('a/b'), walk_up=True), P('')) + self.assertEqual(p.relative_to('a/b', walk_up=True), P('')) self.assertEqual(p.relative_to(P('a/c'), walk_up=True), P('../b')) self.assertEqual(p.relative_to('a/c', walk_up=True), P('../b')) self.assertEqual(p.relative_to(P('a/b/c'), walk_up=True), P('..')) self.assertEqual(p.relative_to('a/b/c', walk_up=True), P('..')) self.assertEqual(p.relative_to(P('c'), walk_up=True), P('../a/b')) self.assertEqual(p.relative_to('c', walk_up=True), P('../a/b')) - # With several args. - with self.assertWarns(DeprecationWarning): - p.relative_to('a', 'b') - p.relative_to('a', 'b', walk_up=True) # Unrelated paths. self.assertRaises(ValueError, p.relative_to, P('c')) self.assertRaises(ValueError, p.relative_to, P('a/b/c')) @@ -565,15 +568,15 @@ def test_relative_to_common(self): self.assertEqual(p.relative_to(P('/a')), P('b')) self.assertEqual(p.relative_to('/a'), P('b')) self.assertEqual(p.relative_to('/a/'), P('b')) - self.assertEqual(p.relative_to(P('/a/b')), P()) - self.assertEqual(p.relative_to('/a/b'), P()) + self.assertEqual(p.relative_to(P('/a/b')), P('')) + self.assertEqual(p.relative_to('/a/b'), P('')) self.assertEqual(p.relative_to(P('/'), walk_up=True), P('a/b')) self.assertEqual(p.relative_to('/', walk_up=True), P('a/b')) self.assertEqual(p.relative_to(P('/a'), walk_up=True), P('b')) self.assertEqual(p.relative_to('/a', walk_up=True), P('b')) self.assertEqual(p.relative_to('/a/', walk_up=True), P('b')) - self.assertEqual(p.relative_to(P('/a/b'), walk_up=True), P()) - self.assertEqual(p.relative_to('/a/b', walk_up=True), P()) + self.assertEqual(p.relative_to(P('/a/b'), walk_up=True), P('')) + self.assertEqual(p.relative_to('/a/b', walk_up=True), P('')) self.assertEqual(p.relative_to(P('/a/c'), walk_up=True), P('../b')) self.assertEqual(p.relative_to('/a/c', walk_up=True), P('../b')) self.assertEqual(p.relative_to(P('/a/b/c'), walk_up=True), P('..')) @@ -584,7 +587,7 @@ def test_relative_to_common(self): self.assertRaises(ValueError, p.relative_to, P('/c')) self.assertRaises(ValueError, p.relative_to, P('/a/b/c')) self.assertRaises(ValueError, p.relative_to, P('/a/c')) - self.assertRaises(ValueError, p.relative_to, P()) + self.assertRaises(ValueError, p.relative_to, P('')) self.assertRaises(ValueError, p.relative_to, '') self.assertRaises(ValueError, p.relative_to, P('a')) self.assertRaises(ValueError, p.relative_to, P("../a")) @@ -601,15 +604,12 @@ def test_is_relative_to_common(self): p = P('a/b') self.assertRaises(TypeError, p.is_relative_to) self.assertRaises(TypeError, p.is_relative_to, b'a') - self.assertTrue(p.is_relative_to(P())) + self.assertTrue(p.is_relative_to(P(''))) self.assertTrue(p.is_relative_to('')) self.assertTrue(p.is_relative_to(P('a'))) self.assertTrue(p.is_relative_to('a/')) self.assertTrue(p.is_relative_to(P('a/b'))) self.assertTrue(p.is_relative_to('a/b')) - # With several args. - with self.assertWarns(DeprecationWarning): - p.is_relative_to('a', 'b') # Unrelated paths. self.assertFalse(p.is_relative_to(P('c'))) self.assertFalse(p.is_relative_to(P('a/b/c'))) @@ -627,7 +627,7 @@ def test_is_relative_to_common(self): self.assertFalse(p.is_relative_to(P('/c'))) self.assertFalse(p.is_relative_to(P('/a/b/c'))) self.assertFalse(p.is_relative_to(P('/a/c'))) - self.assertFalse(p.is_relative_to(P())) + self.assertFalse(p.is_relative_to(P(''))) self.assertFalse(p.is_relative_to('')) self.assertFalse(p.is_relative_to(P('a'))) @@ -641,7 +641,7 @@ class PathBaseTest(PurePathBaseTest): def test_unsupported_operation(self): P = self.cls - p = self.cls() + p = self.cls('') e = UnsupportedOperation self.assertRaises(e, p.stat) self.assertRaises(e, p.lstat) @@ -685,13 +685,13 @@ def test_unsupported_operation(self): def test_as_uri_common(self): e = UnsupportedOperation - self.assertRaises(e, self.cls().as_uri) + self.assertRaises(e, self.cls('').as_uri) def test_fspath_common(self): - self.assertRaises(TypeError, os.fspath, self.cls()) + self.assertRaises(TypeError, os.fspath, self.cls('')) def test_as_bytes_common(self): - self.assertRaises(TypeError, bytes, self.cls()) + self.assertRaises(TypeError, bytes, self.cls('')) class DummyPathIO(io.BytesIO): @@ -709,11 +709,19 @@ def close(self): super().close() +DummyPathStatResult = collections.namedtuple( + 'DummyPathStatResult', + 'st_mode st_ino st_dev st_nlink st_uid st_gid st_size st_atime st_mtime st_ctime') + + class DummyPath(PathBase): """ Simple implementation of PathBase that keeps files and directories in memory. """ + __slots__ = () + pathmod = posixpath + _files = {} _directories = {} _symlinks = {} @@ -726,6 +734,9 @@ def __eq__(self, other): def __hash__(self): return hash(str(self)) + def __repr__(self): + return "{}({!r})".format(self.__class__.__name__, self.as_posix()) + def stat(self, *, follow_symlinks=True): if follow_symlinks: path = str(self.resolve()) @@ -739,7 +750,7 @@ def stat(self, *, follow_symlinks=True): st_mode = stat.S_IFLNK else: raise FileNotFoundError(errno.ENOENT, "Not found", str(self)) - return os.stat_result((st_mode, hash(str(self)), 0, 0, 0, 0, 0, 0, 0, 0)) + return DummyPathStatResult(st_mode, hash(str(self)), 0, 0, 0, 0, 0, 0, 0, 0) def open(self, mode='r', buffering=-1, encoding=None, errors=None, newline=None): @@ -892,11 +903,6 @@ def test_samefile(self): self.assertRaises(FileNotFoundError, r.samefile, r) self.assertRaises(FileNotFoundError, r.samefile, non_existent) - def test_empty_path(self): - # The empty path points to '.' - p = self.cls('') - self.assertEqual(str(p), '.') - def test_exists(self): P = self.cls p = P(self.base) @@ -1039,9 +1045,12 @@ def _check(glob, expected): _check(p.glob("*/"), ["dirA/", "dirB/", "dirC/", "dirE/", "linkB/"]) def test_glob_empty_pattern(self): - p = self.cls() - with self.assertRaisesRegex(ValueError, 'Unacceptable pattern'): - list(p.glob('')) + def _check(glob, expected): + self.assertEqual(set(glob), { P(self.base, q) for q in expected }) + P = self.cls + p = P(self.base) + _check(p.glob(""), [""]) + _check(p.glob("."), ["."]) def test_glob_case_sensitive(self): P = self.cls @@ -1060,7 +1069,7 @@ def test_glob_follow_symlinks_common(self): self.skipTest("symlinks required") def _check(path, glob, expected): actual = {path for path in path.glob(glob, follow_symlinks=True) - if "linkD" not in path.parent.parts} # exclude symlink loop. + if path.parts.count("linkD") <= 1} # exclude symlink loop. self.assertEqual(actual, { P(self.base, q) for q in expected }) P = self.cls p = P(self.base) @@ -1070,13 +1079,15 @@ def _check(path, glob, expected): _check(p, "*B/*", ["dirB/fileB", "dirB/linkD", "linkB/fileB", "linkB/linkD"]) _check(p, "*/fileB", ["dirB/fileB", "linkB/fileB"]) _check(p, "*/", ["dirA/", "dirB/", "dirC/", "dirE/", "linkB/"]) - _check(p, "dir*/*/..", ["dirC/dirD/..", "dirA/linkC/.."]) + _check(p, "dir*/*/..", ["dirC/dirD/..", "dirA/linkC/..", "dirB/linkD/.."]) _check(p, "dir*/**/", ["dirA/", "dirA/linkC/", "dirA/linkC/linkD/", "dirB/", "dirB/linkD/", "dirC/", "dirC/dirD/", "dirE/"]) _check(p, "dir*/**/..", ["dirA/..", "dirA/linkC/..", "dirB/..", + "dirB/linkD/..", "dirA/linkC/linkD/..", "dirC/..", "dirC/dirD/..", "dirE/.."]) _check(p, "dir*/*/**/", ["dirA/linkC/", "dirA/linkC/linkD/", "dirB/linkD/", "dirC/dirD/"]) - _check(p, "dir*/*/**/..", ["dirA/linkC/..", "dirC/dirD/.."]) + _check(p, "dir*/*/**/..", ["dirA/linkC/..", "dirA/linkC/linkD/..", + "dirB/linkD/..", "dirC/dirD/.."]) _check(p, "dir*/**/fileC", ["dirC/fileC"]) _check(p, "dir*/*/../dirD/**/", ["dirC/dirD/../dirD/"]) _check(p, "*/dirD/**/", ["dirC/dirD/"]) @@ -1131,7 +1142,7 @@ def _check(glob, expected): "dirA/", "dirA/linkC/", "dirB/", "dirB/linkD/", "dirC/", "dirC/dirD/", "dirE/", "linkB/", ]) - _check(p.rglob(""), ["./", "dirA/", "dirB/", "dirC/", "dirE/", "dirC/dirD/"]) + _check(p.rglob(""), ["", "dirA/", "dirB/", "dirC/", "dirE/", "dirC/dirD/"]) p = P(self.base, "dirC") _check(p.rglob("*"), ["dirC/fileC", "dirC/novel.txt", @@ -1152,18 +1163,21 @@ def test_rglob_follow_symlinks_common(self): self.skipTest("symlinks required") def _check(path, glob, expected): actual = {path for path in path.rglob(glob, follow_symlinks=True) - if 'linkD' not in path.parent.parts} # exclude symlink loop. + if path.parts.count("linkD") <= 1} # exclude symlink loop. self.assertEqual(actual, { P(self.base, q) for q in expected }) P = self.cls p = P(self.base) - _check(p, "fileB", ["dirB/fileB", "dirA/linkC/fileB", "linkB/fileB"]) + _check(p, "fileB", ["dirB/fileB", "dirA/linkC/fileB", "linkB/fileB", + "dirA/linkC/linkD/fileB", "dirB/linkD/fileB", "linkB/linkD/fileB"]) _check(p, "*/fileA", []) - _check(p, "*/fileB", ["dirB/fileB", "dirA/linkC/fileB", "linkB/fileB"]) + _check(p, "*/fileB", ["dirB/fileB", "dirA/linkC/fileB", "linkB/fileB", + "dirA/linkC/linkD/fileB", "dirB/linkD/fileB", "linkB/linkD/fileB"]) _check(p, "file*", ["fileA", "dirA/linkC/fileB", "dirB/fileB", + "dirA/linkC/linkD/fileB", "dirB/linkD/fileB", "linkB/linkD/fileB", "dirC/fileC", "dirC/dirD/fileD", "linkB/fileB"]) _check(p, "*/", ["dirA/", "dirA/linkC/", "dirA/linkC/linkD/", "dirB/", "dirB/linkD/", "dirC/", "dirC/dirD/", "dirE/", "linkB/", "linkB/linkD/"]) - _check(p, "", ["./", "dirA/", "dirA/linkC/", "dirA/linkC/linkD/", "dirB/", "dirB/linkD/", + _check(p, "", ["", "dirA/", "dirA/linkC/", "dirA/linkC/linkD/", "dirB/", "dirB/linkD/", "dirC/", "dirE/", "dirC/dirD/", "linkB/", "linkB/linkD/"]) p = P(self.base, "dirC") @@ -1190,7 +1204,7 @@ def _check(path, glob, expected): _check(p, "*/fileB", ["dirB/fileB"]) _check(p, "file*", ["fileA", "dirB/fileB", "dirC/fileC", "dirC/dirD/fileD", ]) _check(p, "*/", ["dirA/", "dirB/", "dirC/", "dirC/dirD/", "dirE/"]) - _check(p, "", ["./", "dirA/", "dirB/", "dirC/", "dirE/", "dirC/dirD/"]) + _check(p, "", ["", "dirA/", "dirB/", "dirC/", "dirE/", "dirC/dirD/"]) p = P(self.base, "dirC") _check(p, "*", ["dirC/fileC", "dirC/novel.txt", @@ -1266,19 +1280,6 @@ def test_glob_long_symlink(self): bad_link.symlink_to("bad" * 200) self.assertEqual(sorted(base.glob('**/*')), [bad_link]) - def test_glob_recursive_no_trailing_slash(self): - P = self.cls - p = P(self.base) - with self.assertWarns(FutureWarning): - p.glob('**') - with self.assertWarns(FutureWarning): - p.glob('*/**') - with self.assertWarns(FutureWarning): - p.rglob('**') - with self.assertWarns(FutureWarning): - p.rglob('*/**') - - def test_readlink(self): if not self.can_symlink: self.skipTest("symlinks required") @@ -1580,15 +1581,6 @@ def test_is_char_device_false(self): self.assertIs((P / 'fileA\udfff').is_char_device(), False) self.assertIs((P / 'fileA\x00').is_char_device(), False) - def test_parts_interning(self): - P = self.cls - p = P('/usr/bin/foo') - q = P('/usr/local/bin') - # 'usr' - self.assertIs(p.parts[1], q.parts[1]) - # 'bin' - self.assertIs(p.parts[2], q.parts[3]) - def _check_complex_symlinks(self, link0_target): if not self.can_symlink: self.skipTest("symlinks required") @@ -1617,7 +1609,7 @@ def _check_complex_symlinks(self, link0_target): # Resolve relative paths. try: - self.cls().absolute() + self.cls('').absolute() except UnsupportedOperation: return old_path = os.getcwd() @@ -1796,6 +1788,11 @@ def test_walk_symlink_location(self): class DummyPathWithSymlinks(DummyPath): + __slots__ = () + + # Reduce symlink traversal limit to make tests run faster. + _max_symlinks = 20 + def readlink(self): path = str(self.parent.resolve() / self.name) if path in self._symlinks: diff --git a/Lib/test/test_pdb.py b/Lib/test/test_pdb.py index d53fe3c611bc35..03487aa6ffd81f 100644 --- a/Lib/test/test_pdb.py +++ b/Lib/test/test_pdb.py @@ -16,11 +16,12 @@ from test import support from test.support import os_helper from test.support.import_helper import import_module -from test.support.pty_helper import run_pty -# This little helper class is essential for testing pdb under doctest. -from test.test_doctest import _FakeInput +from test.support.pty_helper import run_pty, FakeInput from unittest.mock import patch +# gh-114275: WASI fails to run asyncio tests, similar skip than test_asyncio. +SKIP_ASYNCIO_TESTS = (not support.has_socket_support) + class PdbTestInput(object): """Context manager that makes testing Pdb in doctests easier.""" @@ -30,7 +31,7 @@ def __init__(self, input): def __enter__(self): self.real_stdin = sys.stdin - sys.stdin = _FakeInput(self.input) + sys.stdin = FakeInput(self.input) self.orig_trace = sys.gettrace() if hasattr(sys, 'gettrace') else None def __exit__(self, *exc): @@ -847,9 +848,12 @@ def test_convenience_variables(): >>> with PdbTestInput([ # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE ... '$_frame.f_lineno', # Check frame convenience variable + ... '$ _frame', # This should be a syntax error ... '$a = 10', # Set a convenience variable ... '$a', # Print its value + ... 'p "$a"', # Print the string $a ... 'p $a + 2', # Do some calculation + ... 'p f"$a = {$a}"', # Make sure $ in string is not converted and f-string works ... 'u', # Switch frame ... '$_frame.f_lineno', # Make sure the frame changed ... '$a', # Make sure the value persists @@ -869,11 +873,17 @@ def test_convenience_variables(): -> try: (Pdb) $_frame.f_lineno 3 + (Pdb) $ _frame + *** SyntaxError: invalid syntax (Pdb) $a = 10 (Pdb) $a 10 + (Pdb) p "$a" + '$a' (Pdb) p $a + 2 12 + (Pdb) p f"$a = {$a}" + '$a = 10' (Pdb) u > (2)test_function() -> util_function() @@ -1686,122 +1696,123 @@ def test_pdb_next_command_for_generator(): finished """ -def test_pdb_next_command_for_coroutine(): - """Testing skip unwindng stack on yield for coroutines for "next" command - - >>> import asyncio - - >>> async def test_coro(): - ... await asyncio.sleep(0) - ... await asyncio.sleep(0) - ... await asyncio.sleep(0) - - >>> async def test_main(): - ... import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace() - ... await test_coro() - - >>> def test_function(): - ... loop = asyncio.new_event_loop() - ... loop.run_until_complete(test_main()) - ... loop.close() - ... asyncio.set_event_loop_policy(None) - ... print("finished") - - >>> with PdbTestInput(['step', - ... 'step', - ... 'next', - ... 'next', - ... 'next', - ... 'step', - ... 'continue']): - ... test_function() - > (3)test_main() - -> await test_coro() - (Pdb) step - --Call-- - > (1)test_coro() - -> async def test_coro(): - (Pdb) step - > (2)test_coro() - -> await asyncio.sleep(0) - (Pdb) next - > (3)test_coro() - -> await asyncio.sleep(0) - (Pdb) next - > (4)test_coro() - -> await asyncio.sleep(0) - (Pdb) next - Internal StopIteration - > (3)test_main() - -> await test_coro() - (Pdb) step - --Return-- - > (3)test_main()->None - -> await test_coro() - (Pdb) continue - finished - """ - -def test_pdb_next_command_for_asyncgen(): - """Testing skip unwindng stack on yield for coroutines for "next" command - - >>> import asyncio - - >>> async def agen(): - ... yield 1 - ... await asyncio.sleep(0) - ... yield 2 - - >>> async def test_coro(): - ... async for x in agen(): - ... print(x) - - >>> async def test_main(): - ... import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace() - ... await test_coro() - - >>> def test_function(): - ... loop = asyncio.new_event_loop() - ... loop.run_until_complete(test_main()) - ... loop.close() - ... asyncio.set_event_loop_policy(None) - ... print("finished") +if not SKIP_ASYNCIO_TESTS: + def test_pdb_next_command_for_coroutine(): + """Testing skip unwindng stack on yield for coroutines for "next" command + + >>> import asyncio + + >>> async def test_coro(): + ... await asyncio.sleep(0) + ... await asyncio.sleep(0) + ... await asyncio.sleep(0) + + >>> async def test_main(): + ... import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace() + ... await test_coro() + + >>> def test_function(): + ... loop = asyncio.new_event_loop() + ... loop.run_until_complete(test_main()) + ... loop.close() + ... asyncio.set_event_loop_policy(None) + ... print("finished") + + >>> with PdbTestInput(['step', + ... 'step', + ... 'next', + ... 'next', + ... 'next', + ... 'step', + ... 'continue']): + ... test_function() + > (3)test_main() + -> await test_coro() + (Pdb) step + --Call-- + > (1)test_coro() + -> async def test_coro(): + (Pdb) step + > (2)test_coro() + -> await asyncio.sleep(0) + (Pdb) next + > (3)test_coro() + -> await asyncio.sleep(0) + (Pdb) next + > (4)test_coro() + -> await asyncio.sleep(0) + (Pdb) next + Internal StopIteration + > (3)test_main() + -> await test_coro() + (Pdb) step + --Return-- + > (3)test_main()->None + -> await test_coro() + (Pdb) continue + finished + """ - >>> with PdbTestInput(['step', - ... 'step', - ... 'next', - ... 'next', - ... 'step', - ... 'next', - ... 'continue']): - ... test_function() - > (3)test_main() - -> await test_coro() - (Pdb) step - --Call-- - > (1)test_coro() - -> async def test_coro(): - (Pdb) step - > (2)test_coro() - -> async for x in agen(): - (Pdb) next - > (3)test_coro() - -> print(x) - (Pdb) next - 1 - > (2)test_coro() - -> async for x in agen(): - (Pdb) step - --Call-- - > (2)agen() - -> yield 1 - (Pdb) next - > (3)agen() - -> await asyncio.sleep(0) - (Pdb) continue - 2 - finished - """ + def test_pdb_next_command_for_asyncgen(): + """Testing skip unwindng stack on yield for coroutines for "next" command + + >>> import asyncio + + >>> async def agen(): + ... yield 1 + ... await asyncio.sleep(0) + ... yield 2 + + >>> async def test_coro(): + ... async for x in agen(): + ... print(x) + + >>> async def test_main(): + ... import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace() + ... await test_coro() + + >>> def test_function(): + ... loop = asyncio.new_event_loop() + ... loop.run_until_complete(test_main()) + ... loop.close() + ... asyncio.set_event_loop_policy(None) + ... print("finished") + + >>> with PdbTestInput(['step', + ... 'step', + ... 'next', + ... 'next', + ... 'step', + ... 'next', + ... 'continue']): + ... test_function() + > (3)test_main() + -> await test_coro() + (Pdb) step + --Call-- + > (1)test_coro() + -> async def test_coro(): + (Pdb) step + > (2)test_coro() + -> async for x in agen(): + (Pdb) next + > (3)test_coro() + -> print(x) + (Pdb) next + 1 + > (2)test_coro() + -> async for x in agen(): + (Pdb) step + --Call-- + > (2)agen() + -> yield 1 + (Pdb) next + > (3)agen() + -> await asyncio.sleep(0) + (Pdb) continue + 2 + finished + """ def test_pdb_return_command_for_generator(): """Testing no unwindng stack on yield for generators @@ -1858,47 +1869,48 @@ def test_pdb_return_command_for_generator(): finished """ -def test_pdb_return_command_for_coroutine(): - """Testing no unwindng stack on yield for coroutines for "return" command - - >>> import asyncio - - >>> async def test_coro(): - ... await asyncio.sleep(0) - ... await asyncio.sleep(0) - ... await asyncio.sleep(0) - - >>> async def test_main(): - ... import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace() - ... await test_coro() - - >>> def test_function(): - ... loop = asyncio.new_event_loop() - ... loop.run_until_complete(test_main()) - ... loop.close() - ... asyncio.set_event_loop_policy(None) - ... print("finished") - - >>> with PdbTestInput(['step', - ... 'step', - ... 'next', - ... 'continue']): - ... test_function() - > (3)test_main() - -> await test_coro() - (Pdb) step - --Call-- - > (1)test_coro() - -> async def test_coro(): - (Pdb) step - > (2)test_coro() - -> await asyncio.sleep(0) - (Pdb) next - > (3)test_coro() - -> await asyncio.sleep(0) - (Pdb) continue - finished - """ +if not SKIP_ASYNCIO_TESTS: + def test_pdb_return_command_for_coroutine(): + """Testing no unwindng stack on yield for coroutines for "return" command + + >>> import asyncio + + >>> async def test_coro(): + ... await asyncio.sleep(0) + ... await asyncio.sleep(0) + ... await asyncio.sleep(0) + + >>> async def test_main(): + ... import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace() + ... await test_coro() + + >>> def test_function(): + ... loop = asyncio.new_event_loop() + ... loop.run_until_complete(test_main()) + ... loop.close() + ... asyncio.set_event_loop_policy(None) + ... print("finished") + + >>> with PdbTestInput(['step', + ... 'step', + ... 'next', + ... 'continue']): + ... test_function() + > (3)test_main() + -> await test_coro() + (Pdb) step + --Call-- + > (1)test_coro() + -> async def test_coro(): + (Pdb) step + > (2)test_coro() + -> await asyncio.sleep(0) + (Pdb) next + > (3)test_coro() + -> await asyncio.sleep(0) + (Pdb) continue + finished + """ def test_pdb_until_command_for_generator(): """Testing no unwindng stack on yield for generators @@ -1944,52 +1956,53 @@ def test_pdb_until_command_for_generator(): finished """ -def test_pdb_until_command_for_coroutine(): - """Testing no unwindng stack for coroutines - for "until" command if target breakpoint is not reached - - >>> import asyncio - - >>> async def test_coro(): - ... print(0) - ... await asyncio.sleep(0) - ... print(1) - ... await asyncio.sleep(0) - ... print(2) - ... await asyncio.sleep(0) - ... print(3) - - >>> async def test_main(): - ... import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace() - ... await test_coro() - - >>> def test_function(): - ... loop = asyncio.new_event_loop() - ... loop.run_until_complete(test_main()) - ... loop.close() - ... asyncio.set_event_loop_policy(None) - ... print("finished") - - >>> with PdbTestInput(['step', - ... 'until 8', - ... 'continue']): - ... test_function() - > (3)test_main() - -> await test_coro() - (Pdb) step - --Call-- - > (1)test_coro() - -> async def test_coro(): - (Pdb) until 8 - 0 - 1 - 2 - > (8)test_coro() - -> print(3) - (Pdb) continue - 3 - finished - """ +if not SKIP_ASYNCIO_TESTS: + def test_pdb_until_command_for_coroutine(): + """Testing no unwindng stack for coroutines + for "until" command if target breakpoint is not reached + + >>> import asyncio + + >>> async def test_coro(): + ... print(0) + ... await asyncio.sleep(0) + ... print(1) + ... await asyncio.sleep(0) + ... print(2) + ... await asyncio.sleep(0) + ... print(3) + + >>> async def test_main(): + ... import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace() + ... await test_coro() + + >>> def test_function(): + ... loop = asyncio.new_event_loop() + ... loop.run_until_complete(test_main()) + ... loop.close() + ... asyncio.set_event_loop_policy(None) + ... print("finished") + + >>> with PdbTestInput(['step', + ... 'until 8', + ... 'continue']): + ... test_function() + > (3)test_main() + -> await test_coro() + (Pdb) step + --Call-- + > (1)test_coro() + -> async def test_coro(): + (Pdb) until 8 + 0 + 1 + 2 + > (8)test_coro() + -> print(3) + (Pdb) continue + 3 + finished + """ def test_pdb_next_command_in_generator_for_loop(): """The next command on returning from a generator controlled by a for loop. diff --git a/Lib/test/test_peepholer.py b/Lib/test/test_peepholer.py index b825b27060e86b..76a6f25c34bbd3 100644 --- a/Lib/test/test_peepholer.py +++ b/Lib/test/test_peepholer.py @@ -1131,6 +1131,46 @@ def test_no_dead_store_elimination_in_different_lineno(self): ] self.cfg_optimization_test(insts, expected_insts, consts=list(range(3)), nlocals=1) + def test_unconditional_jump_threading(self): + + def get_insts(lno1, lno2, op1, op2): + return [ + lbl2 := self.Label(), + ('LOAD_NAME', 0, 10), + (op1, lbl1 := self.Label(), lno1), + ('LOAD_NAME', 1, 20), + lbl1, + (op2, lbl2, lno2), + ] + + + for op1 in ('JUMP', 'JUMP_NO_INTERRUPT'): + for op2 in ('JUMP', 'JUMP_NO_INTERRUPT'): + # different lines + lno1, lno2 = (4, 5) + with self.subTest(lno = (lno1, lno2), ops = (op1, op2)): + insts = get_insts(lno1, lno2, op1, op2) + expected_insts = [ + ('LOAD_NAME', 0, 10), + ('NOP', 0, 4), + (op2, 0, 5), + ] + self.cfg_optimization_test(insts, expected_insts, consts=list(range(5))) + + # Threading + for lno1, lno2 in [(-1, -1), (-1, 5), (6, -1), (7, 7)]: + with self.subTest(lno = (lno1, lno2), ops = (op1, op2)): + insts = get_insts(lno1, lno2, op1, op2) + lno = lno1 if lno1 != -1 else lno2 + if lno == -1: + lno = 10 # Propagated from the line before + + op = 'JUMP' if 'JUMP' in (op1, op2) else 'JUMP_NO_INTERRUPT' + expected_insts = [ + ('LOAD_NAME', 0, 10), + (op, 0, lno), + ] + self.cfg_optimization_test(insts, expected_insts, consts=list(range(5))) if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_pickle.py b/Lib/test/test_pickle.py index 1a55da39bdc58d..f6405d6dd44ef6 100644 --- a/Lib/test/test_pickle.py +++ b/Lib/test/test_pickle.py @@ -122,6 +122,7 @@ class PyIdPersPicklerTests(AbstractIdentityPersistentPicklerTests, pickler = pickle._Pickler unpickler = pickle._Unpickler + persistent_load_error = pickle.UnpicklingError @support.cpython_only def test_pickler_reference_cycle(self): @@ -176,7 +177,6 @@ class DispatchTable: support.gc_collect() self.assertIsNone(table_ref()) - @support.cpython_only def test_unpickler_reference_cycle(self): def check(Unpickler): @@ -206,6 +206,28 @@ def persistent_load(pid): return pid check(PersUnpickler) + def test_pickler_super(self): + class PersPickler(self.pickler): + def persistent_id(subself, obj): + self.assertIsNone(super().persistent_id(obj)) + return obj + + for proto in range(pickle.HIGHEST_PROTOCOL + 1): + f = io.BytesIO() + pickler = PersPickler(f, proto) + pickler.dump('abc') + self.assertEqual(self.loads(f.getvalue()), 'abc') + + def test_unpickler_super(self): + class PersUnpickler(self.unpickler): + def persistent_load(subself, pid): + with self.assertRaises(self.persistent_load_error): + super().persistent_load(pid) + return pid + + for proto in range(pickle.HIGHEST_PROTOCOL + 1): + unpickler = PersUnpickler(io.BytesIO(self.dumps('abc', proto))) + self.assertEqual(unpickler.load(), 'abc') class PyPicklerUnpicklerObjectTests(AbstractPicklerUnpicklerObjectTests, unittest.TestCase): @@ -256,6 +278,7 @@ class CPersPicklerTests(PyPersPicklerTests): class CIdPersPicklerTests(PyIdPersPicklerTests): pickler = _pickle.Pickler unpickler = _pickle.Unpickler + persistent_load_error = _pickle.UnpicklingError class CDumpPickle_LoadPickle(PyPicklerTests): pickler = _pickle.Pickler @@ -326,7 +349,7 @@ class SizeofTests(unittest.TestCase): check_sizeof = support.check_sizeof def test_pickler(self): - basesize = support.calcobjsize('7P2n3i2n3i2P') + basesize = support.calcobjsize('6P2n3i2n3i2P') p = _pickle.Pickler(io.BytesIO()) self.assertEqual(object.__sizeof__(p), basesize) MT_size = struct.calcsize('3nP0n') @@ -343,7 +366,7 @@ def test_pickler(self): 0) # Write buffer is cleared after every dump(). def test_unpickler(self): - basesize = support.calcobjsize('2P2n2P 2P2n2i5P 2P3n8P2n2i') + basesize = support.calcobjsize('2P2nP 2P2n2i5P 2P3n8P2n2i') unpickler = _pickle.Unpickler P = struct.calcsize('P') # Size of memo table entry. n = struct.calcsize('n') # Size of mark table entry. diff --git a/Lib/test/test_plistlib.py b/Lib/test/test_plistlib.py index 1d2e14a30c4e13..f47982907def21 100644 --- a/Lib/test/test_plistlib.py +++ b/Lib/test/test_plistlib.py @@ -510,6 +510,19 @@ def test_bytes(self): data2 = plistlib.dumps(pl2) self.assertEqual(data, data2) + def test_loads_str_with_xml_fmt(self): + pl = self._create() + b = plistlib.dumps(pl) + s = b.decode() + self.assertIsInstance(s, str) + pl2 = plistlib.loads(s) + self.assertEqual(pl, pl2) + + def test_loads_str_with_binary_fmt(self): + msg = "value must be bytes-like object when fmt is FMT_BINARY" + with self.assertRaisesRegex(TypeError, msg): + plistlib.loads('test', fmt=plistlib.FMT_BINARY) + def test_indentation_array(self): data = [[[[[[[[{'test': b'aaaaaa'}]]]]]]]] self.assertEqual(plistlib.loads(plistlib.dumps(data)), data) diff --git a/Lib/test/test_regrtest.py b/Lib/test/test_regrtest.py index e828941f6c779d..89562fa5eac62c 100644 --- a/Lib/test/test_regrtest.py +++ b/Lib/test/test_regrtest.py @@ -845,6 +845,8 @@ def test_tools_buildbot_test(self): test_args.append('-x64') # 64-bit build if not support.Py_DEBUG: test_args.append('+d') # Release build, use python.exe + if sysconfig.get_config_var("Py_GIL_DISABLED"): + test_args.append('--disable-gil') self.run_batch(script, *test_args, *self.tests) @unittest.skipUnless(sys.platform == 'win32', 'Windows only') @@ -862,6 +864,8 @@ def test_pcbuild_rt(self): rt_args.append('-x64') # 64-bit build if support.Py_DEBUG: rt_args.append('-d') # Debug build, use python_d.exe + if sysconfig.get_config_var("Py_GIL_DISABLED"): + rt_args.append('--disable-gil') self.run_batch(script, *rt_args, *self.regrtest_args, *self.tests) diff --git a/Lib/test/test_richcmp.py b/Lib/test/test_richcmp.py index 6fb31c80d7e670..5f449cdc05c6ba 100644 --- a/Lib/test/test_richcmp.py +++ b/Lib/test/test_richcmp.py @@ -221,7 +221,7 @@ def do(bad): self.assertRaises(Exc, func, Bad()) @support.no_tracing - @support.infinite_recursion() + @support.infinite_recursion(25) def test_recursion(self): # Check that comparison for recursive objects fails gracefully from collections import UserList diff --git a/Lib/test/test_runpy.py b/Lib/test/test_runpy.py index 628c8cae38a751..57fe859e366b5b 100644 --- a/Lib/test/test_runpy.py +++ b/Lib/test/test_runpy.py @@ -661,8 +661,10 @@ def test_basic_script_with_path_object(self): mod_name = 'script' script_name = pathlib.Path(self._make_test_script(script_dir, mod_name)) - self._check_script(script_name, "", script_name, - script_name, expect_spec=False) + self._check_script(script_name, "", + os.fsdecode(script_name), + os.fsdecode(script_name), + expect_spec=False) def test_basic_script_no_suffix(self): with temp_dir() as script_dir: diff --git a/Lib/test/test_site.py b/Lib/test/test_site.py index 9f199d9069d207..0502181854f52b 100644 --- a/Lib/test/test_site.py +++ b/Lib/test/test_site.py @@ -7,6 +7,7 @@ import unittest import test.support from test import support +from test.support.script_helper import assert_python_ok from test.support import os_helper from test.support import socket_helper from test.support import captured_stderr @@ -18,6 +19,7 @@ import os import re import shutil +import stat import subprocess import sys import sysconfig @@ -194,6 +196,45 @@ def test_addsitedir(self): finally: pth_file.cleanup() + def test_addsitedir_dotfile(self): + pth_file = PthFile('.dotfile') + pth_file.cleanup(prep=True) + try: + pth_file.create() + site.addsitedir(pth_file.base_dir, set()) + self.assertNotIn(site.makepath(pth_file.good_dir_path)[0], sys.path) + self.assertIn(pth_file.base_dir, sys.path) + finally: + pth_file.cleanup() + + @unittest.skipUnless(hasattr(os, 'chflags'), 'test needs os.chflags()') + def test_addsitedir_hidden_flags(self): + pth_file = PthFile() + pth_file.cleanup(prep=True) + try: + pth_file.create() + st = os.stat(pth_file.file_path) + os.chflags(pth_file.file_path, st.st_flags | stat.UF_HIDDEN) + site.addsitedir(pth_file.base_dir, set()) + self.assertNotIn(site.makepath(pth_file.good_dir_path)[0], sys.path) + self.assertIn(pth_file.base_dir, sys.path) + finally: + pth_file.cleanup() + + @unittest.skipUnless(sys.platform == 'win32', 'test needs Windows') + @support.requires_subprocess() + def test_addsitedir_hidden_file_attribute(self): + pth_file = PthFile() + pth_file.cleanup(prep=True) + try: + pth_file.create() + subprocess.check_call(['attrib', '+H', pth_file.file_path]) + site.addsitedir(pth_file.base_dir, set()) + self.assertNotIn(site.makepath(pth_file.good_dir_path)[0], sys.path) + self.assertIn(pth_file.base_dir, sys.path) + finally: + pth_file.cleanup() + # This tests _getuserbase, hence the double underline # to distinguish from a test for getuserbase def test__getuserbase(self): @@ -338,6 +379,19 @@ def test_no_home_directory(self): mock_addsitedir.assert_not_called() self.assertFalse(known_paths) + def test_gethistoryfile(self): + filename = 'file' + rc, out, err = assert_python_ok('-c', + f'import site; assert site.gethistoryfile() == "{filename}"', + PYTHON_HISTORY=filename) + self.assertEqual(rc, 0) + + # Check that PYTHON_HISTORY is ignored in isolated mode. + rc, out, err = assert_python_ok('-I', '-c', + f'import site; assert site.gethistoryfile() != "{filename}"', + PYTHON_HISTORY=filename) + self.assertEqual(rc, 0) + def test_trace(self): message = "bla-bla-bla" for verbose, out in (True, message + "\n"), (False, ""): diff --git a/Lib/test/test_sqlite3/test_dump.py b/Lib/test/test_sqlite3/test_dump.py index 14a18c1ad37102..2e1f0b80c10f46 100644 --- a/Lib/test/test_sqlite3/test_dump.py +++ b/Lib/test/test_sqlite3/test_dump.py @@ -20,7 +20,8 @@ def test_table_dump(self): , "CREATE TABLE t1(id integer primary key, s1 text, " \ "t1_i1 integer not null, i2 integer, unique (s1), " \ - "constraint t1_idx1 unique (i2));" + "constraint t1_idx1 unique (i2), " \ + "constraint t1_i1_idx1 unique (t1_i1));" , "INSERT INTO \"t1\" VALUES(1,'foo',10,20);" , @@ -30,6 +31,9 @@ def test_table_dump(self): "t2_i2 integer, primary key (id)," \ "foreign key(t2_i1) references t1(t1_i1));" , + # Foreign key violation. + "INSERT INTO \"t2\" VALUES(1,2,3);" + , "CREATE TRIGGER trigger_1 update of t1_i1 on t1 " \ "begin " \ "update t2 set t2_i1 = new.t1_i1 where t2_i1 = old.t1_i1; " \ @@ -41,8 +45,12 @@ def test_table_dump(self): [self.cu.execute(s) for s in expected_sqls] i = self.cx.iterdump() actual_sqls = [s for s in i] - expected_sqls = ['BEGIN TRANSACTION;'] + expected_sqls + \ - ['COMMIT;'] + expected_sqls = [ + "PRAGMA foreign_keys=OFF;", + "BEGIN TRANSACTION;", + *expected_sqls, + "COMMIT;", + ] [self.assertEqual(expected_sqls[i], actual_sqls[i]) for i in range(len(expected_sqls))] diff --git a/Lib/test/test_stat.py b/Lib/test/test_stat.py index a0d0f61e5a192c..d6b6dd6e741700 100644 --- a/Lib/test/test_stat.py +++ b/Lib/test/test_stat.py @@ -15,8 +15,10 @@ class TestFilemode: statmod = None file_flags = {'SF_APPEND', 'SF_ARCHIVED', 'SF_IMMUTABLE', 'SF_NOUNLINK', - 'SF_SNAPSHOT', 'UF_APPEND', 'UF_COMPRESSED', 'UF_HIDDEN', - 'UF_IMMUTABLE', 'UF_NODUMP', 'UF_NOUNLINK', 'UF_OPAQUE'} + 'SF_SNAPSHOT', 'SF_SETTABLE', 'SF_RESTRICTED', 'SF_FIRMLINK', + 'SF_DATALESS', 'UF_APPEND', 'UF_COMPRESSED', 'UF_HIDDEN', + 'UF_IMMUTABLE', 'UF_NODUMP', 'UF_NOUNLINK', 'UF_OPAQUE', + 'UF_SETTABLE', 'UF_TRACKED', 'UF_DATAVAULT'} formats = {'S_IFBLK', 'S_IFCHR', 'S_IFDIR', 'S_IFIFO', 'S_IFLNK', 'S_IFREG', 'S_IFSOCK', 'S_IFDOOR', 'S_IFPORT', 'S_IFWHT'} @@ -239,6 +241,18 @@ def test_module_attributes(self): self.assertTrue(callable(func)) self.assertEqual(func(0), 0) + def test_flags_consistent(self): + self.assertFalse(self.statmod.UF_SETTABLE & self.statmod.SF_SETTABLE) + + for flag in self.file_flags: + if flag.startswith("UF"): + self.assertTrue(getattr(self.statmod, flag) & self.statmod.UF_SETTABLE, f"{flag} not in UF_SETTABLE") + elif sys.platform == 'darwin' and self.statmod is c_stat and flag == 'SF_DATALESS': + self.assertTrue(self.statmod.SF_DATALESS & self.statmod.SF_SYNTHETIC, "SF_DATALESS not in SF_SYNTHETIC") + self.assertFalse(self.statmod.SF_DATALESS & self.statmod.SF_SETTABLE, "SF_DATALESS in SF_SETTABLE") + else: + self.assertTrue(getattr(self.statmod, flag) & self.statmod.SF_SETTABLE, f"{flag} notin SF_SETTABLE") + @unittest.skipUnless(sys.platform == "win32", "FILE_ATTRIBUTE_* constants are Win32 specific") def test_file_attribute_constants(self): @@ -247,6 +261,66 @@ def test_file_attribute_constants(self): modvalue = getattr(self.statmod, key) self.assertEqual(value, modvalue, key) + @unittest.skipUnless(sys.platform == "darwin", "macOS system check") + def test_macosx_attribute_values(self): + self.assertEqual(self.statmod.UF_SETTABLE, 0x0000ffff) + self.assertEqual(self.statmod.UF_NODUMP, 0x00000001) + self.assertEqual(self.statmod.UF_IMMUTABLE, 0x00000002) + self.assertEqual(self.statmod.UF_APPEND, 0x00000004) + self.assertEqual(self.statmod.UF_OPAQUE, 0x00000008) + self.assertEqual(self.statmod.UF_COMPRESSED, 0x00000020) + self.assertEqual(self.statmod.UF_TRACKED, 0x00000040) + self.assertEqual(self.statmod.UF_DATAVAULT, 0x00000080) + self.assertEqual(self.statmod.UF_HIDDEN, 0x00008000) + + if self.statmod is c_stat: + self.assertEqual(self.statmod.SF_SUPPORTED, 0x009f0000) + self.assertEqual(self.statmod.SF_SETTABLE, 0x3fff0000) + self.assertEqual(self.statmod.SF_SYNTHETIC, 0xc0000000) + else: + self.assertEqual(self.statmod.SF_SETTABLE, 0xffff0000) + self.assertEqual(self.statmod.SF_ARCHIVED, 0x00010000) + self.assertEqual(self.statmod.SF_IMMUTABLE, 0x00020000) + self.assertEqual(self.statmod.SF_APPEND, 0x00040000) + self.assertEqual(self.statmod.SF_RESTRICTED, 0x00080000) + self.assertEqual(self.statmod.SF_NOUNLINK, 0x00100000) + self.assertEqual(self.statmod.SF_FIRMLINK, 0x00800000) + self.assertEqual(self.statmod.SF_DATALESS, 0x40000000) + + self.assertFalse(isinstance(self.statmod.S_IFMT, int)) + self.assertEqual(self.statmod.S_IFIFO, 0o010000) + self.assertEqual(self.statmod.S_IFCHR, 0o020000) + self.assertEqual(self.statmod.S_IFDIR, 0o040000) + self.assertEqual(self.statmod.S_IFBLK, 0o060000) + self.assertEqual(self.statmod.S_IFREG, 0o100000) + self.assertEqual(self.statmod.S_IFLNK, 0o120000) + self.assertEqual(self.statmod.S_IFSOCK, 0o140000) + + if self.statmod is c_stat: + self.assertEqual(self.statmod.S_IFWHT, 0o160000) + + self.assertEqual(self.statmod.S_IRWXU, 0o000700) + self.assertEqual(self.statmod.S_IRUSR, 0o000400) + self.assertEqual(self.statmod.S_IWUSR, 0o000200) + self.assertEqual(self.statmod.S_IXUSR, 0o000100) + self.assertEqual(self.statmod.S_IRWXG, 0o000070) + self.assertEqual(self.statmod.S_IRGRP, 0o000040) + self.assertEqual(self.statmod.S_IWGRP, 0o000020) + self.assertEqual(self.statmod.S_IXGRP, 0o000010) + self.assertEqual(self.statmod.S_IRWXO, 0o000007) + self.assertEqual(self.statmod.S_IROTH, 0o000004) + self.assertEqual(self.statmod.S_IWOTH, 0o000002) + self.assertEqual(self.statmod.S_IXOTH, 0o000001) + self.assertEqual(self.statmod.S_ISUID, 0o004000) + self.assertEqual(self.statmod.S_ISGID, 0o002000) + self.assertEqual(self.statmod.S_ISVTX, 0o001000) + + self.assertFalse(hasattr(self.statmod, "S_ISTXT")) + self.assertEqual(self.statmod.S_IREAD, self.statmod.S_IRUSR) + self.assertEqual(self.statmod.S_IWRITE, self.statmod.S_IWUSR) + self.assertEqual(self.statmod.S_IEXEC, self.statmod.S_IXUSR) + + @unittest.skipIf(c_stat is None, 'need _stat extension') class TestFilemodeCStat(TestFilemode, unittest.TestCase): diff --git a/Lib/test/test_subprocess.py b/Lib/test/test_subprocess.py index 6d3228bf92f8ca..c44a778d5bbefe 100644 --- a/Lib/test/test_subprocess.py +++ b/Lib/test/test_subprocess.py @@ -791,6 +791,19 @@ def test_env(self): stdout, stderr = p.communicate() self.assertEqual(stdout, b"orange") + @unittest.skipUnless(sys.platform == "win32", "Windows only issue") + def test_win32_duplicate_envs(self): + newenv = os.environ.copy() + newenv["fRUit"] = "cherry" + newenv["fruit"] = "lemon" + newenv["FRUIT"] = "orange" + newenv["frUit"] = "banana" + with subprocess.Popen(["CMD", "/c", "SET", "fruit"], + stdout=subprocess.PIPE, + env=newenv) as p: + stdout, _ = p.communicate() + self.assertEqual(stdout.strip(), b"frUit=banana") + # Windows requires at least the SYSTEMROOT environment variable to start # Python @unittest.skipIf(sys.platform == 'win32', @@ -822,6 +835,26 @@ def is_env_var_to_ignore(n): if not is_env_var_to_ignore(k)] self.assertEqual(child_env_names, []) + @unittest.skipIf(sysconfig.get_config_var('Py_ENABLE_SHARED') == 1, + 'The Python shared library cannot be loaded ' + 'without some system environments.') + @unittest.skipIf(check_sanitizer(address=True), + 'AddressSanitizer adds to the environment.') + def test_one_environment_variable(self): + newenv = {'fruit': 'orange'} + cmd = [sys.executable, '-c', + 'import sys,os;' + 'sys.stdout.write("fruit="+os.getenv("fruit"))'] + if sys.platform == "win32": + cmd = ["CMD", "/c", "SET", "fruit"] + with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=newenv) as p: + stdout, stderr = p.communicate() + if p.returncode and support.verbose: + print("STDOUT:", stdout.decode("ascii", "replace")) + print("STDERR:", stderr.decode("ascii", "replace")) + self.assertEqual(p.returncode, 0) + self.assertEqual(stdout.strip(), b"fruit=orange") + def test_invalid_cmd(self): # null character in the command name cmd = sys.executable + '\0' @@ -862,6 +895,19 @@ def test_invalid_env(self): stdout, stderr = p.communicate() self.assertEqual(stdout, b"orange=lemon") + @unittest.skipUnless(sys.platform == "win32", "Windows only issue") + def test_win32_invalid_env(self): + # '=' in the environment variable name + newenv = os.environ.copy() + newenv["FRUIT=VEGETABLE"] = "cabbage" + with self.assertRaises(ValueError): + subprocess.Popen(ZERO_RETURN_CMD, env=newenv) + + newenv = os.environ.copy() + newenv["==FRUIT"] = "cabbage" + with self.assertRaises(ValueError): + subprocess.Popen(ZERO_RETURN_CMD, env=newenv) + def test_communicate_stdin(self): p = subprocess.Popen([sys.executable, "-c", 'import sys;' @@ -1945,9 +1991,9 @@ def test_process_group_0(self): @unittest.skipUnless(hasattr(os, 'setreuid'), 'no setreuid on platform') def test_user(self): - # For code coverage of the user parameter. We don't care if we get an - # EPERM error from it depending on the test execution environment, that - # still indicates that it was called. + # For code coverage of the user parameter. We don't care if we get a + # permission error from it depending on the test execution environment, + # that still indicates that it was called. uid = os.geteuid() test_users = [65534 if uid != 65534 else 65533, uid] @@ -1971,11 +2017,11 @@ def test_user(self): "import os; print(os.getuid())"], user=user, close_fds=close_fds) - except PermissionError: # (EACCES, EPERM) - pass - except OSError as e: - if e.errno not in (errno.EACCES, errno.EPERM): - raise + except PermissionError as e: # (EACCES, EPERM) + if e.errno == errno.EACCES: + self.assertEqual(e.filename, sys.executable) + else: + self.assertIsNone(e.filename) else: if isinstance(user, str): user_uid = pwd.getpwnam(user).pw_uid @@ -2019,8 +2065,8 @@ def test_group(self): "import os; print(os.getgid())"], group=group, close_fds=close_fds) - except PermissionError: # (EACCES, EPERM) - pass + except PermissionError as e: # (EACCES, EPERM) + self.assertIsNone(e.filename) else: if isinstance(group, str): group_gid = grp.getgrnam(group).gr_gid @@ -2068,7 +2114,8 @@ def _test_extra_groups_impl(self, *, gid, group_list): [sys.executable, "-c", "import os, sys, json; json.dump(os.getgroups(), sys.stdout)"], extra_groups=group_list) - except PermissionError: + except PermissionError as e: + self.assertIsNone(e.filename) self.skipTest("setgroup() EPERM; this test may require root.") else: parent_groups = os.getgroups() diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py index 83cbf5ec865dbb..ee9b873d9023f0 100644 --- a/Lib/test/test_syntax.py +++ b/Lib/test/test_syntax.py @@ -259,6 +259,36 @@ Traceback (most recent call last): SyntaxError: invalid syntax +Comprehensions without 'in' keyword: + +>>> [x for x if range(1)] +Traceback (most recent call last): +SyntaxError: 'in' expected after for-loop variables + +>>> tuple(x for x if range(1)) +Traceback (most recent call last): +SyntaxError: 'in' expected after for-loop variables + +>>> [x for x() in a] +Traceback (most recent call last): +SyntaxError: cannot assign to function call + +>>> [x for a, b, (c + 1, d()) in y] +Traceback (most recent call last): +SyntaxError: cannot assign to expression + +>>> [x for a, b, (c + 1, d()) if y] +Traceback (most recent call last): +SyntaxError: 'in' expected after for-loop variables + +>>> [x for x+1 in y] +Traceback (most recent call last): +SyntaxError: cannot assign to expression + +>>> [x for x+1, x() in y] +Traceback (most recent call last): +SyntaxError: cannot assign to expression + Comprehensions creating tuples without parentheses should produce a specialized error message: diff --git a/Lib/test/test_sys_settrace.py b/Lib/test/test_sys_settrace.py index fc5ca72236b1fb..ae6e192a7ab6ef 100644 --- a/Lib/test/test_sys_settrace.py +++ b/Lib/test/test_sys_settrace.py @@ -3037,10 +3037,8 @@ def test_trace_unpack_long_sequence(self): self.assertEqual(counts, {'call': 1, 'line': 301, 'return': 1}) def test_trace_lots_of_globals(self): - count = 1000 - if _testinternalcapi is not None: - remaining = _testinternalcapi.get_c_recursion_remaining() - count = min(count, remaining) + + count = min(1000, int(support.Py_C_RECURSION_LIMIT * 0.8)) code = """if 1: def f(): diff --git a/Lib/test/test_tkinter/test_misc.py b/Lib/test/test_tkinter/test_misc.py index 6639eaaa59936a..dc8a810235fc9b 100644 --- a/Lib/test/test_tkinter/test_misc.py +++ b/Lib/test/test_tkinter/test_misc.py @@ -281,6 +281,18 @@ def assertApprox(col1, col2): with self.assertRaises(tkinter.TclError): rgb((111, 78, 55)) + def test_winfo_pathname(self): + t = tkinter.Toplevel(self.root) + w = tkinter.Button(t) + wid = w.winfo_id() + self.assertIsInstance(wid, int) + self.assertEqual(self.root.winfo_pathname(hex(wid)), str(w)) + self.assertEqual(self.root.winfo_pathname(hex(wid), displayof=None), str(w)) + self.assertEqual(self.root.winfo_pathname(hex(wid), displayof=t), str(w)) + self.assertEqual(self.root.winfo_pathname(wid), str(w)) + self.assertEqual(self.root.winfo_pathname(wid, displayof=None), str(w)) + self.assertEqual(self.root.winfo_pathname(wid, displayof=t), str(w)) + def test_event_repr_defaults(self): e = tkinter.Event() e.serial = 12345 diff --git a/Lib/test/test_traceback.py b/Lib/test/test_traceback.py index a6708119b81191..372fc48bf81a6a 100644 --- a/Lib/test/test_traceback.py +++ b/Lib/test/test_traceback.py @@ -2209,6 +2209,20 @@ def __repr__(self): err_msg = "b'please do not show me as numbers'" self.assertEqual(self.get_report(e), vanilla + err_msg + '\n') + # an exception with a broken __getattr__ raising a non expected error + class BrokenException(Exception): + broken = False + def __getattr__(self, name): + if self.broken: + raise ValueError(f'no {name}') + + e = BrokenException(123) + vanilla = self.get_report(e) + e.broken = True + self.assertEqual( + self.get_report(e), + vanilla + "Ignored error getting __notes__: ValueError('no __notes__')\n") + def test_exception_with_multiple_notes(self): for e in [ValueError(42), SyntaxError('bad syntax')]: with self.subTest(e=e): diff --git a/Lib/test/test_type_cache.py b/Lib/test/test_type_cache.py index 72587ecc11b6f3..295df78a17374a 100644 --- a/Lib/test/test_type_cache.py +++ b/Lib/test/test_type_cache.py @@ -1,5 +1,6 @@ """ Tests for the internal type cache in CPython. """ import unittest +import dis from test import support from test.support import import_helper try: @@ -8,8 +9,11 @@ _clear_type_cache = None # Skip this test if the _testcapi module isn't available. -type_get_version = import_helper.import_module('_testcapi').type_get_version -type_assign_version = import_helper.import_module('_testcapi').type_assign_version +_testcapi = import_helper.import_module("_testcapi") +type_get_version = _testcapi.type_get_version +type_assign_specific_version_unsafe = _testcapi.type_assign_specific_version_unsafe +type_assign_version = _testcapi.type_assign_version +type_modified = _testcapi.type_modified @support.cpython_only @@ -56,6 +60,183 @@ class C: self.assertNotEqual(type_get_version(C), 0) self.assertNotEqual(type_get_version(C), c_ver) + def test_type_assign_specific_version(self): + """meta-test for type_assign_specific_version_unsafe""" + class C: + pass + + type_assign_version(C) + orig_version = type_get_version(C) + if orig_version == 0: + self.skipTest("Could not assign a valid type version") + + type_modified(C) + type_assign_specific_version_unsafe(C, orig_version + 5) + type_assign_version(C) # this should do nothing + + new_version = type_get_version(C) + self.assertEqual(new_version, orig_version + 5) + + _clear_type_cache() + + +@support.cpython_only +class TypeCacheWithSpecializationTests(unittest.TestCase): + def tearDown(self): + _clear_type_cache() + + def _assign_valid_version_or_skip(self, type_): + type_modified(type_) + type_assign_version(type_) + if type_get_version(type_) == 0: + self.skipTest("Could not assign valid type version") + + def _assign_and_check_version_0(self, user_type): + type_modified(user_type) + type_assign_specific_version_unsafe(user_type, 0) + self.assertEqual(type_get_version(user_type), 0) + + def _all_opnames(self, func): + return set(instr.opname for instr in dis.Bytecode(func, adaptive=True)) + + def _check_specialization(self, func, arg, opname, *, should_specialize): + for _ in range(100): + func(arg) + + if should_specialize: + self.assertNotIn(opname, self._all_opnames(func)) + else: + self.assertIn(opname, self._all_opnames(func)) + + def test_class_load_attr_specialization_user_type(self): + class A: + def foo(self): + pass + + self._assign_valid_version_or_skip(A) + + def load_foo_1(type_): + type_.foo + + self._check_specialization(load_foo_1, A, "LOAD_ATTR", should_specialize=True) + del load_foo_1 + + self._assign_and_check_version_0(A) + + def load_foo_2(type_): + return type_.foo + + self._check_specialization(load_foo_2, A, "LOAD_ATTR", should_specialize=False) + + def test_class_load_attr_specialization_static_type(self): + self._assign_valid_version_or_skip(str) + self._assign_valid_version_or_skip(bytes) + + def get_capitalize_1(type_): + return type_.capitalize + + self._check_specialization(get_capitalize_1, str, "LOAD_ATTR", should_specialize=True) + self.assertEqual(get_capitalize_1(str)('hello'), 'Hello') + self.assertEqual(get_capitalize_1(bytes)(b'hello'), b'Hello') + del get_capitalize_1 + + # Permanently overflow the static type version counter, and force str and bytes + # to have tp_version_tag == 0 + for _ in range(2**16): + type_modified(str) + type_assign_version(str) + type_modified(bytes) + type_assign_version(bytes) + + self.assertEqual(type_get_version(str), 0) + self.assertEqual(type_get_version(bytes), 0) + + def get_capitalize_2(type_): + return type_.capitalize + + self._check_specialization(get_capitalize_2, str, "LOAD_ATTR", should_specialize=False) + self.assertEqual(get_capitalize_2(str)('hello'), 'Hello') + self.assertEqual(get_capitalize_2(bytes)(b'hello'), b'Hello') + + def test_property_load_attr_specialization_user_type(self): + class G: + @property + def x(self): + return 9 + + self._assign_valid_version_or_skip(G) + + def load_x_1(instance): + instance.x + + self._check_specialization(load_x_1, G(), "LOAD_ATTR", should_specialize=True) + del load_x_1 + + self._assign_and_check_version_0(G) + + def load_x_2(instance): + instance.x + + self._check_specialization(load_x_2, G(), "LOAD_ATTR", should_specialize=False) + + def test_store_attr_specialization_user_type(self): + class B: + __slots__ = ("bar",) + + self._assign_valid_version_or_skip(B) + + def store_bar_1(type_): + type_.bar = 10 + + self._check_specialization(store_bar_1, B(), "STORE_ATTR", should_specialize=True) + del store_bar_1 + + self._assign_and_check_version_0(B) + + def store_bar_2(type_): + type_.bar = 10 + + self._check_specialization(store_bar_2, B(), "STORE_ATTR", should_specialize=False) + + def test_class_call_specialization_user_type(self): + class F: + def __init__(self): + pass + + self._assign_valid_version_or_skip(F) + + def call_class_1(type_): + type_() + + self._check_specialization(call_class_1, F, "CALL", should_specialize=True) + del call_class_1 + + self._assign_and_check_version_0(F) + + def call_class_2(type_): + type_() + + self._check_specialization(call_class_2, F, "CALL", should_specialize=False) + + def test_to_bool_specialization_user_type(self): + class H: + pass + + self._assign_valid_version_or_skip(H) + + def to_bool_1(instance): + not instance + + self._check_specialization(to_bool_1, H(), "TO_BOOL", should_specialize=True) + del to_bool_1 + + self._assign_and_check_version_0(H) + + def to_bool_2(instance): + not instance + + self._check_specialization(to_bool_2, H(), "TO_BOOL", should_specialize=False) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_types.py b/Lib/test/test_types.py index bfecd8eb71220c..1acb2a4d81adf3 100644 --- a/Lib/test/test_types.py +++ b/Lib/test/test_types.py @@ -2263,5 +2263,38 @@ def coro(): 'close', 'throw'})) +class FunctionTests(unittest.TestCase): + def test_function_type_defaults(self): + def ex(a, /, b, *, c): + return a + b + c + + func = types.FunctionType( + ex.__code__, {}, "func", (1, 2), None, {'c': 3}, + ) + + self.assertEqual(func(), 6) + self.assertEqual(func.__defaults__, (1, 2)) + self.assertEqual(func.__kwdefaults__, {'c': 3}) + + func = types.FunctionType( + ex.__code__, {}, "func", None, None, None, + ) + self.assertEqual(func.__defaults__, None) + self.assertEqual(func.__kwdefaults__, None) + + def test_function_type_wrong_defaults(self): + def ex(a, /, b, *, c): + return a + b + c + + with self.assertRaisesRegex(TypeError, 'arg 4'): + types.FunctionType( + ex.__code__, {}, "func", 1, None, {'c': 3}, + ) + with self.assertRaisesRegex(TypeError, 'arg 6'): + types.FunctionType( + ex.__code__, {}, "func", None, None, 3, + ) + + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_typing.py b/Lib/test/test_typing.py index 2d10c39840ddf3..b684af4f33ed71 100644 --- a/Lib/test/test_typing.py +++ b/Lib/test/test_typing.py @@ -3448,8 +3448,8 @@ def meth(self): pass self.assertNotIn("__protocol_attrs__", vars(NonP)) self.assertNotIn("__protocol_attrs__", vars(NonPR)) - self.assertNotIn("__callable_proto_members_only__", vars(NonP)) - self.assertNotIn("__callable_proto_members_only__", vars(NonPR)) + self.assertNotIn("__non_callable_proto_members__", vars(NonP)) + self.assertNotIn("__non_callable_proto_members__", vars(NonPR)) self.assertEqual(get_protocol_members(P), {"x"}) self.assertEqual(get_protocol_members(PR), {"meth"}) @@ -4105,6 +4105,7 @@ def method(self) -> None: ... self.assertNotIsInstance(42, ProtocolWithMixedMembers) def test_protocol_issubclass_error_message(self): + @runtime_checkable class Vec2D(Protocol): x: float y: float @@ -4120,6 +4121,39 @@ def square_norm(self) -> float: with self.assertRaisesRegex(TypeError, re.escape(expected_error_message)): issubclass(int, Vec2D) + def test_nonruntime_protocol_interaction_with_evil_classproperty(self): + class classproperty: + def __get__(self, instance, type): + raise RuntimeError("NO") + + class Commentable(Protocol): + evil = classproperty() + + # recognised as a protocol attr, + # but not actually accessed by the protocol metaclass + # (which would raise RuntimeError) for non-runtime protocols. + # See gh-113320 + self.assertEqual(get_protocol_members(Commentable), {"evil"}) + + def test_runtime_protocol_interaction_with_evil_classproperty(self): + class CustomError(Exception): pass + + class classproperty: + def __get__(self, instance, type): + raise CustomError + + with self.assertRaises(TypeError) as cm: + @runtime_checkable + class Commentable(Protocol): + evil = classproperty() + + exc = cm.exception + self.assertEqual( + exc.args[0], + "Failed to determine whether protocol member 'evil' is a method member" + ) + self.assertIs(type(exc.__cause__), CustomError) + class GenericTests(BaseTestCase): @@ -5650,7 +5684,7 @@ def fun(x: a): pass def cmp(o1, o2): return o1 == o2 - with infinite_recursion(): + with infinite_recursion(25): r1 = namespace1() r2 = namespace2() self.assertIsNot(r1, r2) diff --git a/Lib/test/test_unittest/test_program.py b/Lib/test/test_unittest/test_program.py index f6d52f93e4a25f..7241cf59f73d4f 100644 --- a/Lib/test/test_unittest/test_program.py +++ b/Lib/test/test_unittest/test_program.py @@ -167,6 +167,18 @@ def test_ExitAsDefault(self): 'expected failures=1, unexpected successes=1)\n') self.assertTrue(out.endswith(expected)) + def test_ExitSkippedSuite(self): + stream = BufferedWriter() + with self.assertRaises(SystemExit) as cm: + unittest.main( + argv=["foobar", "-k", "testSkipped"], + testRunner=unittest.TextTestRunner(stream=stream), + testLoader=self.TestLoader(self.FooBar)) + self.assertEqual(cm.exception.code, 0) + out = stream.getvalue() + expected = '\n\nOK (skipped=1)\n' + self.assertTrue(out.endswith(expected)) + def test_ExitEmptySuite(self): stream = BufferedWriter() with self.assertRaises(SystemExit) as cm: @@ -447,8 +459,8 @@ def _join(name): def testParseArgsAbsolutePathsThatCannotBeConverted(self): program = self.program - # even on Windows '/...' is considered absolute by os.path.abspath - argv = ['progname', '/foo/bar/baz.py', '/green/red.py'] + drive = os.path.splitdrive(os.getcwd())[0] + argv = ['progname', f'{drive}/foo/bar/baz.py', f'{drive}/green/red.py'] self._patch_isfile(argv) program.createTests = lambda: None diff --git a/Lib/test/test_userdict.py b/Lib/test/test_userdict.py index 9a03f2d04ce970..61e79f553e8ec9 100644 --- a/Lib/test/test_userdict.py +++ b/Lib/test/test_userdict.py @@ -215,7 +215,7 @@ class G(collections.UserDict): # Decorate existing test with recursion limit, because # the test is for C structure, but `UserDict` is a Python structure. - test_repr_deep = support.infinite_recursion()( + test_repr_deep = support.infinite_recursion(25)( mapping_tests.TestHashMappingProtocol.test_repr_deep, ) diff --git a/Lib/test/test_userlist.py b/Lib/test/test_userlist.py index 76d253753528b0..312702c8e398b9 100644 --- a/Lib/test/test_userlist.py +++ b/Lib/test/test_userlist.py @@ -69,7 +69,7 @@ def test_userlist_copy(self): # Decorate existing test with recursion limit, because # the test is for C structure, but `UserList` is a Python structure. - test_repr_deep = support.infinite_recursion()( + test_repr_deep = support.infinite_recursion(25)( list_tests.CommonTest.test_repr_deep, ) diff --git a/Lib/test/test_venv.py b/Lib/test/test_venv.py index 8ecb23ff384362..6852625c36c62b 100644 --- a/Lib/test/test_venv.py +++ b/Lib/test/test_venv.py @@ -223,8 +223,14 @@ def test_prompt(self): def test_upgrade_dependencies(self): builder = venv.EnvBuilder() - bin_path = 'Scripts' if sys.platform == 'win32' else 'bin' + bin_path = 'bin' python_exe = os.path.split(sys.executable)[1] + if sys.platform == 'win32': + bin_path = 'Scripts' + if os.path.normcase(os.path.splitext(python_exe)[0]).endswith('_d'): + python_exe = 'python_d.exe' + else: + python_exe = 'python.exe' with tempfile.TemporaryDirectory() as fake_env_dir: expect_exe = os.path.normcase( os.path.join(fake_env_dir, bin_path, python_exe) @@ -283,7 +289,9 @@ def test_sysconfig(self): # build environment ('is_python_build()', str(sysconfig.is_python_build())), ('get_makefile_filename()', sysconfig.get_makefile_filename()), - ('get_config_h_filename()', sysconfig.get_config_h_filename())): + ('get_config_h_filename()', sysconfig.get_config_h_filename()), + ('get_config_var("Py_GIL_DISABLED")', + str(sysconfig.get_config_var("Py_GIL_DISABLED")))): with self.subTest(call): cmd[2] = 'import sysconfig; print(sysconfig.%s)' % call out, err = check_output(cmd, encoding='utf-8') @@ -315,7 +323,9 @@ def test_sysconfig_symlinks(self): # build environment ('is_python_build()', str(sysconfig.is_python_build())), ('get_makefile_filename()', sysconfig.get_makefile_filename()), - ('get_config_h_filename()', sysconfig.get_config_h_filename())): + ('get_config_h_filename()', sysconfig.get_config_h_filename()), + ('get_config_var("Py_GIL_DISABLED")', + str(sysconfig.get_config_var("Py_GIL_DISABLED")))): with self.subTest(call): cmd[2] = 'import sysconfig; print(sysconfig.%s)' % call out, err = check_output(cmd, encoding='utf-8') @@ -324,7 +334,8 @@ def test_sysconfig_symlinks(self): ('executable', self.envpy()), # Usually compare to sys.executable, but if we're running in our own # venv then we really need to compare to our base executable - ('_base_executable', sys._base_executable), + # HACK: Test fails on POSIX with unversioned binary (PR gh-113033) + #('_base_executable', sys._base_executable), ): with self.subTest(attr): cmd[2] = f'import sys; print(sys.{attr})' diff --git a/Lib/test/test_winconsoleio.py b/Lib/test/test_winconsoleio.py index 70a85552cc03b0..72ff9606908ed5 100644 --- a/Lib/test/test_winconsoleio.py +++ b/Lib/test/test_winconsoleio.py @@ -98,6 +98,16 @@ def test_open_name(self): self.assertIsInstance(f, ConIO) f.close() + def test_subclass_repr(self): + class TestSubclass(ConIO): + pass + + f = TestSubclass("CON") + with f: + self.assertIn(TestSubclass.__name__, repr(f)) + + self.assertIn(TestSubclass.__name__, repr(f)) + @unittest.skipIf(sys.getwindowsversion()[:2] <= (6, 1), "test does not work on Windows 7 and earlier") def test_conin_conout_names(self): diff --git a/Lib/test/test_xml_etree.py b/Lib/test/test_xml_etree.py index 80ee064896f59a..b9e7937b0bbc00 100644 --- a/Lib/test/test_xml_etree.py +++ b/Lib/test/test_xml_etree.py @@ -2535,7 +2535,7 @@ def __eq__(self, o): e.extend([ET.Element('bar')]) self.assertRaises(ValueError, e.remove, X('baz')) - @support.infinite_recursion() + @support.infinite_recursion(25) def test_recursive_repr(self): # Issue #25455 e = ET.Element('foo') diff --git a/Lib/test/test_zipfile/_path/test_path.py b/Lib/test/test_zipfile/_path/test_path.py index c66cb3cba69ebd..171ab6fdb5fc28 100644 --- a/Lib/test/test_zipfile/_path/test_path.py +++ b/Lib/test/test_zipfile/_path/test_path.py @@ -577,3 +577,15 @@ def test_getinfo_missing(self, alpharep): zipfile.Path(alpharep) with self.assertRaises(KeyError): alpharep.getinfo('does-not-exist') + + def test_root_folder_in_zipfile(self): + """ + gh-112795: Some tools or self constructed codes will add '/' folder to + the zip file, this is a strange behavior, but we should support it. + """ + in_memory_file = io.BytesIO() + zf = zipfile.ZipFile(in_memory_file, "w") + zf.mkdir('/') + zf.writestr('./a.txt', 'aaa') + tmpdir = pathlib.Path(self.fixtures.enter_context(temp_dir())) + zf.extractall(tmpdir) diff --git a/Lib/test/test_zipfile/test_core.py b/Lib/test/test_zipfile/test_core.py index a51764b9297363..9bdb08aeabb781 100644 --- a/Lib/test/test_zipfile/test_core.py +++ b/Lib/test/test_zipfile/test_core.py @@ -315,7 +315,7 @@ def test_writestr_compresslevel(self): # Compression level follows the constructor. a_info = zipfp.getinfo('a.txt') self.assertEqual(a_info.compress_type, self.compression) - self.assertEqual(a_info._compresslevel, 1) + self.assertEqual(a_info.compress_level, 1) # Compression level is overridden. b_info = zipfp.getinfo('b.txt') @@ -408,7 +408,7 @@ def test_per_file_compresslevel(self): one_info = zipfp.getinfo('compress_1') nine_info = zipfp.getinfo('compress_9') self.assertEqual(one_info._compresslevel, 1) - self.assertEqual(nine_info._compresslevel, 9) + self.assertEqual(nine_info.compress_level, 9) def test_writing_errors(self): class BrokenFile(io.BytesIO): @@ -2272,6 +2272,66 @@ def test_decompress_without_3rd_party_library(self): with zipfile.ZipFile(zip_file) as zf: self.assertRaises(RuntimeError, zf.extract, 'a.txt') + @requires_zlib() + def test_full_overlap(self): + data = ( + b'PK\x03\x04\x14\x00\x00\x00\x08\x00\xa0lH\x05\xe2\x1e' + b'8\xbb\x10\x00\x00\x00\t\x04\x00\x00\x01\x00\x00\x00a\xed' + b'\xc0\x81\x08\x00\x00\x00\xc00\xd6\xfbK\\d\x0b`P' + b'K\x01\x02\x14\x00\x14\x00\x00\x00\x08\x00\xa0lH\x05\xe2' + b'\x1e8\xbb\x10\x00\x00\x00\t\x04\x00\x00\x01\x00\x00\x00\x00' + b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00aPK' + b'\x01\x02\x14\x00\x14\x00\x00\x00\x08\x00\xa0lH\x05\xe2\x1e' + b'8\xbb\x10\x00\x00\x00\t\x04\x00\x00\x01\x00\x00\x00\x00\x00' + b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00bPK\x05' + b'\x06\x00\x00\x00\x00\x02\x00\x02\x00^\x00\x00\x00/\x00\x00' + b'\x00\x00\x00' + ) + with zipfile.ZipFile(io.BytesIO(data), 'r') as zipf: + self.assertEqual(zipf.namelist(), ['a', 'b']) + zi = zipf.getinfo('a') + self.assertEqual(zi.header_offset, 0) + self.assertEqual(zi.compress_size, 16) + self.assertEqual(zi.file_size, 1033) + zi = zipf.getinfo('b') + self.assertEqual(zi.header_offset, 0) + self.assertEqual(zi.compress_size, 16) + self.assertEqual(zi.file_size, 1033) + self.assertEqual(len(zipf.read('a')), 1033) + with self.assertRaisesRegex(zipfile.BadZipFile, 'File name.*differ'): + zipf.read('b') + + @requires_zlib() + def test_quoted_overlap(self): + data = ( + b'PK\x03\x04\x14\x00\x00\x00\x08\x00\xa0lH\x05Y\xfc' + b'8\x044\x00\x00\x00(\x04\x00\x00\x01\x00\x00\x00a\x00' + b'\x1f\x00\xe0\xffPK\x03\x04\x14\x00\x00\x00\x08\x00\xa0l' + b'H\x05\xe2\x1e8\xbb\x10\x00\x00\x00\t\x04\x00\x00\x01\x00' + b'\x00\x00b\xed\xc0\x81\x08\x00\x00\x00\xc00\xd6\xfbK\\' + b'd\x0b`PK\x01\x02\x14\x00\x14\x00\x00\x00\x08\x00\xa0' + b'lH\x05Y\xfc8\x044\x00\x00\x00(\x04\x00\x00\x01' + b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + b'\x00aPK\x01\x02\x14\x00\x14\x00\x00\x00\x08\x00\xa0l' + b'H\x05\xe2\x1e8\xbb\x10\x00\x00\x00\t\x04\x00\x00\x01\x00' + b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00$\x00\x00\x00' + b'bPK\x05\x06\x00\x00\x00\x00\x02\x00\x02\x00^\x00\x00' + b'\x00S\x00\x00\x00\x00\x00' + ) + with zipfile.ZipFile(io.BytesIO(data), 'r') as zipf: + self.assertEqual(zipf.namelist(), ['a', 'b']) + zi = zipf.getinfo('a') + self.assertEqual(zi.header_offset, 0) + self.assertEqual(zi.compress_size, 52) + self.assertEqual(zi.file_size, 1064) + zi = zipf.getinfo('b') + self.assertEqual(zi.header_offset, 36) + self.assertEqual(zi.compress_size, 16) + self.assertEqual(zi.file_size, 1033) + with self.assertRaisesRegex(zipfile.BadZipFile, 'Overlapped entries'): + zipf.read('a') + self.assertEqual(len(zipf.read('b')), 1033) + def tearDown(self): unlink(TESTFN) unlink(TESTFN2) @@ -2951,6 +3011,17 @@ def test_from_dir(self): self.assertEqual(zi.compress_type, zipfile.ZIP_STORED) self.assertEqual(zi.file_size, 0) + def test_compresslevel_property(self): + zinfo = zipfile.ZipInfo("xxx") + self.assertFalse(zinfo._compresslevel) + self.assertFalse(zinfo.compress_level) + zinfo._compresslevel = 99 # test the legacy @property.setter + self.assertEqual(zinfo.compress_level, 99) + self.assertEqual(zinfo._compresslevel, 99) + zinfo.compress_level = 8 + self.assertEqual(zinfo.compress_level, 8) + self.assertEqual(zinfo._compresslevel, 8) + class CommandLineTest(unittest.TestCase): diff --git a/Lib/test/test_zipimport_support.py b/Lib/test/test_zipimport_support.py index 7bf50a33728e53..71039d2a8e7ab9 100644 --- a/Lib/test/test_zipimport_support.py +++ b/Lib/test/test_zipimport_support.py @@ -29,8 +29,9 @@ # test_cmd_line_script (covers the zipimport support in runpy) # Retrieve some helpers from other test cases -from test import (test_doctest, sample_doctest, sample_doctest_no_doctests, - sample_doctest_no_docstrings) +from test.test_doctest import (test_doctest, + sample_doctest, sample_doctest_no_doctests, + sample_doctest_no_docstrings) def _run_object_doctest(obj, module): @@ -100,18 +101,18 @@ def test_doctest_issue4197(self): # everything still works correctly test_src = inspect.getsource(test_doctest) test_src = test_src.replace( - "from test import test_doctest", + "from test.test_doctest import test_doctest", "import test_zipped_doctest as test_doctest") - test_src = test_src.replace("test.test_doctest", + test_src = test_src.replace("test.test_doctest.test_doctest", "test_zipped_doctest") - test_src = test_src.replace("test.sample_doctest", + test_src = test_src.replace("test.test_doctest.sample_doctest", "sample_zipped_doctest") # The sample doctest files rewritten to include in the zipped version. sample_sources = {} for mod in [sample_doctest, sample_doctest_no_doctests, sample_doctest_no_docstrings]: src = inspect.getsource(mod) - src = src.replace("test.test_doctest", "test_zipped_doctest") + src = src.replace("test.test_doctest.test_doctest", "test_zipped_doctest") # Rewrite the module name so that, for example, # "test.sample_doctest" becomes "sample_zipped_doctest". mod_name = mod.__name__.split(".")[-1] diff --git a/Lib/test/test_zoneinfo/test_zoneinfo.py b/Lib/test/test_zoneinfo/test_zoneinfo.py index 7b6b69d0109d88..18eab5b33540c9 100644 --- a/Lib/test/test_zoneinfo/test_zoneinfo.py +++ b/Lib/test/test_zoneinfo/test_zoneinfo.py @@ -36,6 +36,7 @@ TEMP_DIR = None DATA_DIR = pathlib.Path(__file__).parent / "data" ZONEINFO_JSON = DATA_DIR / "zoneinfo_data.json" +DRIVE = os.path.splitdrive('x:')[0] # Useful constants ZERO = timedelta(0) @@ -1679,8 +1680,8 @@ def test_env_variable(self): """Tests that the environment variable works with reset_tzpath.""" new_paths = [ ("", []), - ("/etc/zoneinfo", ["/etc/zoneinfo"]), - (f"/a/b/c{os.pathsep}/d/e/f", ["/a/b/c", "/d/e/f"]), + (f"{DRIVE}/etc/zoneinfo", [f"{DRIVE}/etc/zoneinfo"]), + (f"{DRIVE}/a/b/c{os.pathsep}{DRIVE}/d/e/f", [f"{DRIVE}/a/b/c", f"{DRIVE}/d/e/f"]), ] for new_path_var, expected_result in new_paths: @@ -1694,22 +1695,22 @@ def test_env_variable_relative_paths(self): test_cases = [ [("path/to/somewhere",), ()], [ - ("/usr/share/zoneinfo", "path/to/somewhere",), - ("/usr/share/zoneinfo",), + (f"{DRIVE}/usr/share/zoneinfo", "path/to/somewhere",), + (f"{DRIVE}/usr/share/zoneinfo",), ], [("../relative/path",), ()], [ - ("/usr/share/zoneinfo", "../relative/path",), - ("/usr/share/zoneinfo",), + (f"{DRIVE}/usr/share/zoneinfo", "../relative/path",), + (f"{DRIVE}/usr/share/zoneinfo",), ], [("path/to/somewhere", "../relative/path",), ()], [ ( - "/usr/share/zoneinfo", + f"{DRIVE}/usr/share/zoneinfo", "path/to/somewhere", "../relative/path", ), - ("/usr/share/zoneinfo",), + (f"{DRIVE}/usr/share/zoneinfo",), ], ] @@ -1727,9 +1728,9 @@ def test_env_variable_relative_paths(self): self.assertSequenceEqual(tzpath, expected_paths) def test_reset_tzpath_kwarg(self): - self.module.reset_tzpath(to=["/a/b/c"]) + self.module.reset_tzpath(to=[f"{DRIVE}/a/b/c"]) - self.assertSequenceEqual(self.module.TZPATH, ("/a/b/c",)) + self.assertSequenceEqual(self.module.TZPATH, (f"{DRIVE}/a/b/c",)) def test_reset_tzpath_relative_paths(self): bad_values = [ @@ -1758,8 +1759,8 @@ def test_tzpath_type_error(self): self.module.reset_tzpath(bad_value) def test_tzpath_attribute(self): - tzpath_0 = ["/one", "/two"] - tzpath_1 = ["/three"] + tzpath_0 = [f"{DRIVE}/one", f"{DRIVE}/two"] + tzpath_1 = [f"{DRIVE}/three"] with self.tzpath_context(tzpath_0): query_0 = self.module.TZPATH diff --git a/Lib/tkinter/__init__.py b/Lib/tkinter/__init__.py index 124882420c255c..e0db41dd915ece 100644 --- a/Lib/tkinter/__init__.py +++ b/Lib/tkinter/__init__.py @@ -1260,6 +1260,8 @@ def winfo_parent(self): def winfo_pathname(self, id, displayof=0): """Return the pathname of the widget given by ID.""" + if isinstance(id, int): + id = hex(id) args = ('winfo', 'pathname') \ + self._displayof(displayof) + (id,) return self.tk.call(args) @@ -4713,7 +4715,7 @@ def panes(self): def _test(): root = Tk() - text = "This is Tcl/Tk version %s" % TclVersion + text = "This is Tcl/Tk %s" % root.globalgetvar('tk_patchLevel') text += "\nThis should be a cedilla: \xe7" label = Label(root, text=text) label.pack() diff --git a/Lib/trace.py b/Lib/trace.py index 7cb6f897634b14..7886959fa64f68 100755 --- a/Lib/trace.py +++ b/Lib/trace.py @@ -265,8 +265,7 @@ def write_results(self, show_missing=True, summary=False, coverdir=None, *, modulename = _modname(filename) else: dir = coverdir - if not os.path.exists(dir): - os.makedirs(dir) + os.makedirs(dir, exist_ok=True) modulename = _fullmodname(filename) # If desired, get a list of the line numbers which represent diff --git a/Lib/traceback.py b/Lib/traceback.py index 1cf008c7e9da97..d27c7a726d2bb6 100644 --- a/Lib/traceback.py +++ b/Lib/traceback.py @@ -1051,7 +1051,11 @@ def __init__(self, exc_type, exc_value, exc_traceback, *, limit=None, # Capture now to permit freeing resources: only complication is in the # unofficial API _format_final_exc_line self._str = _safe_string(exc_value, 'exception') - self.__notes__ = getattr(exc_value, '__notes__', None) + try: + self.__notes__ = getattr(exc_value, '__notes__', None) + except Exception as e: + self.__notes__ = [ + f'Ignored error getting __notes__: {_safe_string(e, '__notes__', repr)}'] self._is_syntax_error = False self._have_exc_type = exc_type is not None @@ -1497,6 +1501,13 @@ def _compute_suggestion_error(exc_value, tb, wrong_name): if hasattr(self, wrong_name): return f"self.{wrong_name}" + try: + import _suggestions + except ImportError: + pass + else: + return _suggestions._generate_suggestions(d, wrong_name) + # Compute closest match if len(d) > _MAX_CANDIDATE_ITEMS: diff --git a/Lib/typing.py b/Lib/typing.py index d7d793539b35b1..d278b4effc7eba 100644 --- a/Lib/typing.py +++ b/Lib/typing.py @@ -1670,7 +1670,7 @@ class _TypingEllipsis: _TYPING_INTERNALS = frozenset({ '__parameters__', '__orig_bases__', '__orig_class__', '_is_protocol', '_is_runtime_protocol', '__protocol_attrs__', - '__callable_proto_members_only__', '__type_params__', + '__non_callable_proto_members__', '__type_params__', }) _SPECIAL_NAMES = frozenset({ @@ -1833,11 +1833,6 @@ def __init__(cls, *args, **kwargs): super().__init__(*args, **kwargs) if getattr(cls, "_is_protocol", False): cls.__protocol_attrs__ = _get_protocol_attrs(cls) - # PEP 544 prohibits using issubclass() - # with protocols that have non-method members. - cls.__callable_proto_members_only__ = all( - callable(getattr(cls, attr, None)) for attr in cls.__protocol_attrs__ - ) def __subclasscheck__(cls, other): if cls is Protocol: @@ -1846,25 +1841,23 @@ def __subclasscheck__(cls, other): getattr(cls, '_is_protocol', False) and not _allow_reckless_class_checks() ): + if not getattr(cls, '_is_runtime_protocol', False): + _type_check_issubclass_arg_1(other) + raise TypeError( + "Instance and class checks can only be used with " + "@runtime_checkable protocols" + ) if ( - not cls.__callable_proto_members_only__ + # this attribute is set by @runtime_checkable: + cls.__non_callable_proto_members__ and cls.__dict__.get("__subclasshook__") is _proto_hook ): _type_check_issubclass_arg_1(other) - non_method_attrs = sorted( - attr for attr in cls.__protocol_attrs__ - if not callable(getattr(cls, attr, None)) - ) + non_method_attrs = sorted(cls.__non_callable_proto_members__) raise TypeError( "Protocols with non-method members don't support issubclass()." f" Non-method members: {str(non_method_attrs)[1:-1]}." ) - if not getattr(cls, '_is_runtime_protocol', False): - _type_check_issubclass_arg_1(other) - raise TypeError( - "Instance and class checks can only be used with " - "@runtime_checkable protocols" - ) return _abc_subclasscheck(cls, other) def __instancecheck__(cls, instance): @@ -1892,7 +1885,8 @@ def __instancecheck__(cls, instance): val = getattr_static(instance, attr) except AttributeError: break - if val is None and callable(getattr(cls, attr, None)): + # this attribute is set by @runtime_checkable: + if val is None and attr not in cls.__non_callable_proto_members__: break else: return True @@ -2114,6 +2108,22 @@ def close(self): ... raise TypeError('@runtime_checkable can be only applied to protocol classes,' ' got %r' % cls) cls._is_runtime_protocol = True + # PEP 544 prohibits using issubclass() + # with protocols that have non-method members. + # See gh-113320 for why we compute this attribute here, + # rather than in `_ProtocolMeta.__init__` + cls.__non_callable_proto_members__ = set() + for attr in cls.__protocol_attrs__: + try: + is_callable = callable(getattr(cls, attr, None)) + except Exception as e: + raise TypeError( + f"Failed to determine whether protocol member {attr!r} " + "is a method member" + ) from e + else: + if not is_callable: + cls.__non_callable_proto_members__.add(attr) return cls diff --git a/Lib/unittest/main.py b/Lib/unittest/main.py index d29a9f91fcca42..c3869de3f6f18e 100644 --- a/Lib/unittest/main.py +++ b/Lib/unittest/main.py @@ -269,7 +269,7 @@ def runTests(self): testRunner = self.testRunner self.result = testRunner.run(self.test) if self.exit: - if self.result.testsRun == 0: + if self.result.testsRun == 0 and len(self.result.skipped) == 0: sys.exit(_NO_TESTS_EXITCODE) elif self.result.wasSuccessful(): sys.exit(0) diff --git a/Lib/unittest/runner.py b/Lib/unittest/runner.py index e3c020e0ace96d..2bcadf0c998bd9 100644 --- a/Lib/unittest/runner.py +++ b/Lib/unittest/runner.py @@ -274,7 +274,7 @@ def run(self, test): infos.append("failures=%d" % failed) if errored: infos.append("errors=%d" % errored) - elif run == 0: + elif run == 0 and not skipped: self.stream.write("NO TESTS RAN") else: self.stream.write("OK") diff --git a/Lib/urllib/request.py b/Lib/urllib/request.py index 1d03259b918c33..bca594420f6d9d 100644 --- a/Lib/urllib/request.py +++ b/Lib/urllib/request.py @@ -2490,7 +2490,7 @@ def getproxies_environment(): # select only environment variables which end in (after making lowercase) _proxy proxies = {} environment = [] - for name in os.environ.keys(): + for name in os.environ: # fast screen underscore position before more expensive case-folding if len(name) > 5 and name[-6] == "_" and name[-5:].lower() == "proxy": value = os.environ[name] diff --git a/Lib/venv/__init__.py b/Lib/venv/__init__.py index d960bf3bd82ac5..f04ca8fafcc33b 100644 --- a/Lib/venv/__init__.py +++ b/Lib/venv/__init__.py @@ -139,6 +139,11 @@ def create_if_needed(d): 'check that your PATH environment variable is ' 'correctly set.') dirname, exename = os.path.split(os.path.abspath(executable)) + if sys.platform == 'win32': + # Always create the simplest name in the venv. It will either be a + # link back to executable, or a copy of the appropriate launcher + _d = '_d' if os.path.splitext(exename)[0].endswith('_d') else '' + exename = f'python{_d}.exe' context.executable = executable context.python_dir = dirname context.python_exe = exename @@ -222,67 +227,26 @@ def create_configuration(self, context): args = ' '.join(args) f.write(f'command = {sys.executable} -m venv {args}\n') - if os.name != 'nt': - def symlink_or_copy(self, src, dst, relative_symlinks_ok=False): - """ - Try symlinking a file, and if that fails, fall back to copying. - """ - force_copy = not self.symlinks - if not force_copy: - try: - if not os.path.islink(dst): # can't link to itself! - if relative_symlinks_ok: - assert os.path.dirname(src) == os.path.dirname(dst) - os.symlink(os.path.basename(src), dst) - else: - os.symlink(src, dst) - except Exception: # may need to use a more specific exception - logger.warning('Unable to symlink %r to %r', src, dst) - force_copy = True - if force_copy: - shutil.copyfile(src, dst) - else: - def symlink_or_copy(self, src, dst, relative_symlinks_ok=False): - """ - Try symlinking a file, and if that fails, fall back to copying. - """ - bad_src = os.path.lexists(src) and not os.path.exists(src) - if self.symlinks and not bad_src and not os.path.islink(dst): - try: + def symlink_or_copy(self, src, dst, relative_symlinks_ok=False): + """ + Try symlinking a file, and if that fails, fall back to copying. + (Unused on Windows, because we can't just copy a failed symlink file: we + switch to a different set of files instead.) + """ + assert os.name != 'nt' + force_copy = not self.symlinks + if not force_copy: + try: + if not os.path.islink(dst): # can't link to itself! if relative_symlinks_ok: assert os.path.dirname(src) == os.path.dirname(dst) os.symlink(os.path.basename(src), dst) else: os.symlink(src, dst) - return - except Exception: # may need to use a more specific exception - logger.warning('Unable to symlink %r to %r', src, dst) - - # On Windows, we rewrite symlinks to our base python.exe into - # copies of venvlauncher.exe - basename, ext = os.path.splitext(os.path.basename(src)) - srcfn = os.path.join(os.path.dirname(__file__), - "scripts", - "nt", - basename + ext) - # Builds or venv's from builds need to remap source file - # locations, as we do not put them into Lib/venv/scripts - if sysconfig.is_python_build() or not os.path.isfile(srcfn): - if basename.endswith('_d'): - ext = '_d' + ext - basename = basename[:-2] - if basename == 'python': - basename = 'venvlauncher' - elif basename == 'pythonw': - basename = 'venvwlauncher' - src = os.path.join(os.path.dirname(src), basename + ext) - else: - src = srcfn - if not os.path.exists(src): - if not bad_src: - logger.warning('Unable to copy %r', src) - return - + except Exception: # may need to use a more specific exception + logger.warning('Unable to symlink %r to %r', src, dst) + force_copy = True + if force_copy: shutil.copyfile(src, dst) def create_git_ignore_file(self, context): @@ -298,22 +262,23 @@ def create_git_ignore_file(self, context): 'see https://docs.python.org/3/library/venv.html\n') file.write('*\n') - def setup_python(self, context): - """ - Set up a Python executable in the environment. + if os.name != 'nt': + def setup_python(self, context): + """ + Set up a Python executable in the environment. - :param context: The information for the environment creation request - being processed. - """ - binpath = context.bin_path - path = context.env_exe - copier = self.symlink_or_copy - dirname = context.python_dir - if os.name != 'nt': + :param context: The information for the environment creation request + being processed. + """ + binpath = context.bin_path + path = context.env_exe + copier = self.symlink_or_copy + dirname = context.python_dir copier(context.executable, path) if not os.path.islink(path): os.chmod(path, 0o755) - for suffix in ('python', 'python3', f'python3.{sys.version_info[1]}'): + for suffix in ('python', 'python3', + f'python3.{sys.version_info[1]}'): path = os.path.join(binpath, suffix) if not os.path.exists(path): # Issue 18807: make copies if @@ -321,30 +286,105 @@ def setup_python(self, context): copier(context.env_exe, path, relative_symlinks_ok=True) if not os.path.islink(path): os.chmod(path, 0o755) - else: - if self.symlinks: - # For symlinking, we need a complete copy of the root directory - # If symlinks fail, you'll get unnecessary copies of files, but - # we assume that if you've opted into symlinks on Windows then - # you know what you're doing. - suffixes = [ - f for f in os.listdir(dirname) if - os.path.normcase(os.path.splitext(f)[1]) in ('.exe', '.dll') - ] - if sysconfig.is_python_build(): - suffixes = [ - f for f in suffixes if - os.path.normcase(f).startswith(('python', 'vcruntime')) - ] + + else: + def setup_python(self, context): + """ + Set up a Python executable in the environment. + + :param context: The information for the environment creation request + being processed. + """ + binpath = context.bin_path + dirname = context.python_dir + exename = os.path.basename(context.env_exe) + exe_stem = os.path.splitext(exename)[0] + exe_d = '_d' if os.path.normcase(exe_stem).endswith('_d') else '' + if sysconfig.is_python_build(): + scripts = dirname + else: + scripts = os.path.join(os.path.dirname(__file__), + 'scripts', 'nt') + if not sysconfig.get_config_var("Py_GIL_DISABLED"): + python_exe = os.path.join(dirname, f'python{exe_d}.exe') + pythonw_exe = os.path.join(dirname, f'pythonw{exe_d}.exe') + link_sources = { + 'python.exe': python_exe, + f'python{exe_d}.exe': python_exe, + 'pythonw.exe': pythonw_exe, + f'pythonw{exe_d}.exe': pythonw_exe, + } + python_exe = os.path.join(scripts, f'venvlauncher{exe_d}.exe') + pythonw_exe = os.path.join(scripts, f'venvwlauncher{exe_d}.exe') + copy_sources = { + 'python.exe': python_exe, + f'python{exe_d}.exe': python_exe, + 'pythonw.exe': pythonw_exe, + f'pythonw{exe_d}.exe': pythonw_exe, + } else: - suffixes = {'python.exe', 'python_d.exe', 'pythonw.exe', 'pythonw_d.exe'} - base_exe = os.path.basename(context.env_exe) - suffixes.add(base_exe) + exe_t = f'3.{sys.version_info[1]}t' + python_exe = os.path.join(dirname, f'python{exe_t}{exe_d}.exe') + pythonw_exe = os.path.join(dirname, f'pythonw{exe_t}{exe_d}.exe') + link_sources = { + 'python.exe': python_exe, + f'python{exe_d}.exe': python_exe, + f'python{exe_t}.exe': python_exe, + f'python{exe_t}{exe_d}.exe': python_exe, + 'pythonw.exe': pythonw_exe, + f'pythonw{exe_d}.exe': pythonw_exe, + f'pythonw{exe_t}.exe': pythonw_exe, + f'pythonw{exe_t}{exe_d}.exe': pythonw_exe, + } + python_exe = os.path.join(scripts, f'venvlaunchert{exe_d}.exe') + pythonw_exe = os.path.join(scripts, f'venvwlaunchert{exe_d}.exe') + copy_sources = { + 'python.exe': python_exe, + f'python{exe_d}.exe': python_exe, + f'python{exe_t}.exe': python_exe, + f'python{exe_t}{exe_d}.exe': python_exe, + 'pythonw.exe': pythonw_exe, + f'pythonw{exe_d}.exe': pythonw_exe, + f'pythonw{exe_t}.exe': pythonw_exe, + f'pythonw{exe_t}{exe_d}.exe': pythonw_exe, + } + + do_copies = True + if self.symlinks: + do_copies = False + # For symlinking, we need all the DLLs to be available alongside + # the executables. + link_sources.update({ + f: os.path.join(dirname, f) for f in os.listdir(dirname) + if os.path.normcase(f).startswith(('python', 'vcruntime')) + and os.path.normcase(os.path.splitext(f)[1]) == '.dll' + }) + + to_unlink = [] + for dest, src in link_sources.items(): + dest = os.path.join(binpath, dest) + try: + os.symlink(src, dest) + to_unlink.append(dest) + except OSError: + logger.warning('Unable to symlink %r to %r', src, dst) + do_copies = True + for f in to_unlink: + try: + os.unlink(f) + except OSError: + logger.warning('Failed to clean up symlink %r', + f) + logger.warning('Retrying with copies') + break - for suffix in suffixes: - src = os.path.join(dirname, suffix) - if os.path.lexists(src): - copier(src, os.path.join(binpath, suffix)) + if do_copies: + for dest, src in copy_sources.items(): + dest = os.path.join(binpath, dest) + try: + shutil.copy2(src, dest) + except OSError: + logger.warning('Unable to copy %r to %r', src, dest) if sysconfig.is_python_build(): # copy init.tcl @@ -437,6 +477,14 @@ def install_scripts(self, context, path): """ binpath = context.bin_path plen = len(path) + if os.name == 'nt': + def skip_file(f): + f = os.path.normcase(f) + return (f.startswith(('python', 'venv')) + and f.endswith(('.exe', '.pdb'))) + else: + def skip_file(f): + return False for root, dirs, files in os.walk(path): if root == path: # at top-level, remove irrelevant dirs for d in dirs[:]: @@ -444,8 +492,7 @@ def install_scripts(self, context, path): dirs.remove(d) continue # ignore files in top level for f in files: - if (os.name == 'nt' and f.startswith('python') - and f.endswith(('.exe', '.pdb'))): + if skip_file(f): continue srcfile = os.path.join(root, f) suffix = root[plen:].split(os.sep)[2:] @@ -456,20 +503,25 @@ def install_scripts(self, context, path): if not os.path.exists(dstdir): os.makedirs(dstdir) dstfile = os.path.join(dstdir, f) + if os.name == 'nt' and srcfile.endswith(('.exe', '.pdb')): + shutil.copy2(srcfile, dstfile) + continue with open(srcfile, 'rb') as f: data = f.read() - if not srcfile.endswith(('.exe', '.pdb')): - try: - data = data.decode('utf-8') - data = self.replace_variables(data, context) - data = data.encode('utf-8') - except UnicodeError as e: - data = None - logger.warning('unable to copy script %r, ' - 'may be binary: %s', srcfile, e) - if data is not None: + try: + new_data = ( + self.replace_variables(data.decode('utf-8'), context) + .encode('utf-8') + ) + except UnicodeError as e: + logger.warning('unable to copy script %r, ' + 'may be binary: %s', srcfile, e) + continue + if new_data == data: + shutil.copy2(srcfile, dstfile) + else: with open(dstfile, 'wb') as f: - f.write(data) + f.write(new_data) shutil.copymode(srcfile, dstfile) def upgrade_dependencies(self, context): diff --git a/Lib/warnings.py b/Lib/warnings.py index b8ff078569d2ce..4ad6ad027192e8 100644 --- a/Lib/warnings.py +++ b/Lib/warnings.py @@ -58,15 +58,16 @@ def _formatwarnmsg_impl(msg): # catch Exception, not only ImportError and RecursionError. except Exception: # don't suggest to enable tracemalloc if it's not available - tracing = True + suggest_tracemalloc = False tb = None else: - tracing = tracemalloc.is_tracing() try: + suggest_tracemalloc = not tracemalloc.is_tracing() tb = tracemalloc.get_object_traceback(msg.source) except Exception: # When a warning is logged during Python shutdown, tracemalloc # and the import machinery don't work anymore + suggest_tracemalloc = False tb = None if tb is not None: @@ -85,7 +86,7 @@ def _formatwarnmsg_impl(msg): if line: line = line.strip() s += ' %s\n' % line - elif not tracing: + elif suggest_tracemalloc: s += (f'{category}: Enable tracemalloc to get the object ' f'allocation traceback\n') return s diff --git a/Lib/zipfile/__init__.py b/Lib/zipfile/__init__.py index fe629ed1cf2fc5..8005b4b34ccf76 100644 --- a/Lib/zipfile/__init__.py +++ b/Lib/zipfile/__init__.py @@ -371,7 +371,7 @@ def _sanitize_filename(filename): return filename -class ZipInfo (object): +class ZipInfo: """Class with attributes describing each file in the ZIP archive.""" __slots__ = ( @@ -379,7 +379,7 @@ class ZipInfo (object): 'filename', 'date_time', 'compress_type', - '_compresslevel', + 'compress_level', 'comment', 'extra', 'create_system', @@ -395,6 +395,7 @@ class ZipInfo (object): 'compress_size', 'file_size', '_raw_time', + '_end_offset', ) def __init__(self, filename="NoName", date_time=(1980,1,1,0,0,0)): @@ -412,7 +413,7 @@ def __init__(self, filename="NoName", date_time=(1980,1,1,0,0,0)): # Standard values: self.compress_type = ZIP_STORED # Type of compression for the file - self._compresslevel = None # Level for the compressor + self.compress_level = None # Level for the compressor self.comment = b"" # Comment for each file self.extra = b"" # ZIP extra data if sys.platform == 'win32': @@ -429,10 +430,20 @@ def __init__(self, filename="NoName", date_time=(1980,1,1,0,0,0)): self.external_attr = 0 # External file attributes self.compress_size = 0 # Size of the compressed file self.file_size = 0 # Size of the uncompressed file + self._end_offset = None # Start of the next local header or central directory # Other attributes are set by class ZipFile: # header_offset Byte offset to the file header # CRC CRC-32 of the uncompressed file + # Maintain backward compatibility with the old protected attribute name. + @property + def _compresslevel(self): + return self.compress_level + + @_compresslevel.setter + def _compresslevel(self, value): + self.compress_level = value + def __repr__(self): result = ['<%s filename=%r' % (self.__class__.__name__, self.filename)] if self.compress_type != ZIP_STORED: @@ -1189,7 +1200,7 @@ def __init__(self, zf, zinfo, zip64): self._zip64 = zip64 self._zipfile = zf self._compressor = _get_compressor(zinfo.compress_type, - zinfo._compresslevel) + zinfo.compress_level) self._file_size = 0 self._compress_size = 0 self._crc = 0 @@ -1488,6 +1499,12 @@ def _RealGetContents(self): if self.debug > 2: print("total", total) + end_offset = self.start_dir + for zinfo in sorted(self.filelist, + key=lambda zinfo: zinfo.header_offset, + reverse=True): + zinfo._end_offset = end_offset + end_offset = zinfo.header_offset def namelist(self): """Return a list of file names in the archive.""" @@ -1595,7 +1612,7 @@ def open(self, name, mode="r", pwd=None, *, force_zip64=False): elif mode == 'w': zinfo = ZipInfo(name) zinfo.compress_type = self.compression - zinfo._compresslevel = self.compresslevel + zinfo.compress_level = self.compresslevel else: # Get info object for name zinfo = self.getinfo(name) @@ -1644,6 +1661,10 @@ def open(self, name, mode="r", pwd=None, *, force_zip64=False): 'File name in directory %r and header %r differ.' % (zinfo.orig_filename, fname)) + if (zinfo._end_offset is not None and + zef_file.tell() + zinfo.compress_size > zinfo._end_offset): + raise BadZipFile(f"Overlapped entries: {zinfo.orig_filename!r} (possible zip bomb)") + # check for encrypted flag & handle password is_encrypted = zinfo.flag_bits & _MASK_ENCRYPTED if is_encrypted: @@ -1772,7 +1793,7 @@ def _extract_member(self, member, targetpath, pwd): # filter illegal characters on Windows arcname = self._sanitize_windows_name(arcname, os.path.sep) - if not arcname: + if not arcname and not member.is_dir(): raise ValueError("Empty filename.") targetpath = os.path.join(targetpath, arcname) @@ -1843,9 +1864,9 @@ def write(self, filename, arcname=None, zinfo.compress_type = self.compression if compresslevel is not None: - zinfo._compresslevel = compresslevel + zinfo.compress_level = compresslevel else: - zinfo._compresslevel = self.compresslevel + zinfo.compress_level = self.compresslevel with open(filename, "rb") as src, self.open(zinfo, 'w') as dest: shutil.copyfileobj(src, dest, 1024*8) @@ -1863,7 +1884,7 @@ def writestr(self, zinfo_or_arcname, data, zinfo = ZipInfo(filename=zinfo_or_arcname, date_time=time.localtime(time.time())[:6]) zinfo.compress_type = self.compression - zinfo._compresslevel = self.compresslevel + zinfo.compress_level = self.compresslevel if zinfo.filename.endswith('/'): zinfo.external_attr = 0o40775 << 16 # drwxrwxr-x zinfo.external_attr |= 0x10 # MS-DOS directory flag @@ -1884,7 +1905,7 @@ def writestr(self, zinfo_or_arcname, data, zinfo.compress_type = compress_type if compresslevel is not None: - zinfo._compresslevel = compresslevel + zinfo.compress_level = compresslevel zinfo.file_size = len(data) # Uncompressed size with self._lock: diff --git a/Mac/IDLE/IDLE.app/Contents/Info.plist b/Mac/IDLE/IDLE.app/Contents/Info.plist index 20b97b67f41d1a..8549e405e2a65a 100644 --- a/Mac/IDLE/IDLE.app/Contents/Info.plist +++ b/Mac/IDLE/IDLE.app/Contents/Info.plist @@ -37,7 +37,7 @@ CFBundleExecutable IDLE CFBundleGetInfoString - %version%, © 2001-2023 Python Software Foundation + %version%, © 2001-2024 Python Software Foundation CFBundleIconFile IDLE.icns CFBundleIdentifier diff --git a/Mac/PythonLauncher/Info.plist.in b/Mac/PythonLauncher/Info.plist.in index b7cddac0729fc2..233694788ac2b7 100644 --- a/Mac/PythonLauncher/Info.plist.in +++ b/Mac/PythonLauncher/Info.plist.in @@ -40,9 +40,9 @@ CFBundleExecutable Python Launcher NSHumanReadableCopyright - Copyright © 2001-2023 Python Software Foundation + Copyright © 2001-2024 Python Software Foundation CFBundleGetInfoString - %VERSION%, © 2001-2023 Python Software Foundation + %VERSION%, © 2001-2024 Python Software Foundation CFBundleIconFile PythonLauncher.icns CFBundleIdentifier diff --git a/Mac/Resources/app/Info.plist.in b/Mac/Resources/app/Info.plist.in index 8362b19b361b62..a1fc1511c40e96 100644 --- a/Mac/Resources/app/Info.plist.in +++ b/Mac/Resources/app/Info.plist.in @@ -20,7 +20,7 @@ CFBundleExecutable Python CFBundleGetInfoString - %version%, (c) 2001-2023 Python Software Foundation. + %version%, (c) 2001-2024 Python Software Foundation. CFBundleHelpBookFolder Documentation @@ -37,7 +37,7 @@ CFBundleInfoDictionaryVersion 6.0 CFBundleLongVersionString - %version%, (c) 2001-2023 Python Software Foundation. + %version%, (c) 2001-2024 Python Software Foundation. CFBundleName Python CFBundlePackageType @@ -55,7 +55,7 @@ NSAppleScriptEnabled NSHumanReadableCopyright - (c) 2001-2023 Python Software Foundation. + (c) 2001-2024 Python Software Foundation. NSHighResolutionCapable CFBundleAllowMixedLocalizations diff --git a/Mac/Resources/framework/Info.plist.in b/Mac/Resources/framework/Info.plist.in index 238441ce2c76c7..4c42971ed90ee4 100644 --- a/Mac/Resources/framework/Info.plist.in +++ b/Mac/Resources/framework/Info.plist.in @@ -17,9 +17,9 @@ CFBundlePackageType FMWK CFBundleShortVersionString - %VERSION%, (c) 2001-2023 Python Software Foundation. + %VERSION%, (c) 2001-2024 Python Software Foundation. CFBundleLongVersionString - %VERSION%, (c) 2001-2023 Python Software Foundation. + %VERSION%, (c) 2001-2024 Python Software Foundation. CFBundleSignature ???? CFBundleVersion diff --git a/Makefile.pre.in b/Makefile.pre.in index 6a64547e97d266..d251e7c481b52b 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -417,6 +417,9 @@ PYTHON_OBJS= \ Python/frame.o \ Python/frozenmain.o \ Python/future.o \ + Python/gc.o \ + Python/gc_free_threading.o \ + Python/gc_gil.o \ Python/getargs.o \ Python/getcompiler.o \ Python/getcopyright.o \ @@ -546,7 +549,7 @@ LINK_PYTHON_OBJS=@LINK_PYTHON_OBJS@ # On some systems, object files that reference DTrace probes need to be modified # in-place by dtrace(1). DTRACE_DEPS = \ - Python/ceval.o Python/import.o Python/sysmodule.o Modules/gcmodule.o + Python/ceval.o Python/gc.o Python/import.o Python/sysmodule.o ########################################################################## # decimal's libmpdec @@ -1647,8 +1650,8 @@ Include/pydtrace_probes.h: $(srcdir)/Include/pydtrace.d mv $@.tmp $@ Python/ceval.o: $(srcdir)/Include/pydtrace.h +Python/gc.o: $(srcdir)/Include/pydtrace.h Python/import.o: $(srcdir)/Include/pydtrace.h -Modules/gcmodule.o: $(srcdir)/Include/pydtrace.h Python/pydtrace.o: $(srcdir)/Include/pydtrace.d $(DTRACE_DEPS) $(DTRACE) $(DFLAGS) -o $@ -G -s $< $(DTRACE_DEPS) @@ -1827,6 +1830,7 @@ PYTHON_HEADERS= \ $(srcdir)/Include/internal/pycore_floatobject.h \ $(srcdir)/Include/internal/pycore_format.h \ $(srcdir)/Include/internal/pycore_frame.h \ + $(srcdir)/Include/internal/pycore_freelist.h \ $(srcdir)/Include/internal/pycore_function.h \ $(srcdir)/Include/internal/pycore_genobject.h \ $(srcdir)/Include/internal/pycore_getopt.h \ @@ -1891,7 +1895,6 @@ PYTHON_HEADERS= \ $(srcdir)/Include/internal/pycore_unionobject.h \ $(srcdir)/Include/internal/pycore_unicodeobject.h \ $(srcdir)/Include/internal/pycore_unicodeobject_generated.h \ - $(srcdir)/Include/internal/pycore_uops.h \ $(srcdir)/Include/internal/pycore_uop_metadata.h \ $(srcdir)/Include/internal/pycore_warnings.h \ $(srcdir)/Include/internal/pycore_weakref.h \ @@ -2218,6 +2221,7 @@ TESTSUBDIRS= idlelib/idle_test \ test/test_cppext \ test/test_ctypes \ test/test_dataclasses \ + test/test_doctest \ test/test_email \ test/test_email/data \ test/test_future_stmt \ @@ -2864,7 +2868,7 @@ MODULE__SHA1_DEPS=$(srcdir)/Modules/hashlib.h $(LIBHACL_HEADERS) Modules/_hacl/H MODULE__SHA2_DEPS=$(srcdir)/Modules/hashlib.h $(LIBHACL_SHA2_HEADERS) $(LIBHACL_SHA2_A) MODULE__SHA3_DEPS=$(srcdir)/Modules/hashlib.h $(LIBHACL_HEADERS) Modules/_hacl/Hacl_Hash_SHA3.h Modules/_hacl/Hacl_Hash_SHA3.c MODULE__SOCKET_DEPS=$(srcdir)/Modules/socketmodule.h $(srcdir)/Modules/addrinfo.h $(srcdir)/Modules/getaddrinfo.c $(srcdir)/Modules/getnameinfo.c -MODULE__SSL_DEPS=$(srcdir)/Modules/_ssl.h $(srcdir)/Modules/_ssl/cert.c $(srcdir)/Modules/_ssl/debughelpers.c $(srcdir)/Modules/_ssl/misc.c $(srcdir)/Modules/_ssl_data.h $(srcdir)/Modules/_ssl_data_111.h $(srcdir)/Modules/_ssl_data_300.h $(srcdir)/Modules/socketmodule.h +MODULE__SSL_DEPS=$(srcdir)/Modules/_ssl.h $(srcdir)/Modules/_ssl/cert.c $(srcdir)/Modules/_ssl/debughelpers.c $(srcdir)/Modules/_ssl/misc.c $(srcdir)/Modules/_ssl_data_111.h $(srcdir)/Modules/_ssl_data_300.h $(srcdir)/Modules/socketmodule.h MODULE__TESTCAPI_DEPS=$(srcdir)/Modules/_testcapi/testcapi_long.h $(srcdir)/Modules/_testcapi/parts.h $(srcdir)/Modules/_testcapi/util.h MODULE__TESTINTERNALCAPI_DEPS=$(srcdir)/Modules/_testinternalcapi/parts.h MODULE__SQLITE3_DEPS=$(srcdir)/Modules/_sqlite/connection.h $(srcdir)/Modules/_sqlite/cursor.h $(srcdir)/Modules/_sqlite/microprotocols.h $(srcdir)/Modules/_sqlite/module.h $(srcdir)/Modules/_sqlite/prepare_protocol.h $(srcdir)/Modules/_sqlite/row.h $(srcdir)/Modules/_sqlite/util.h diff --git a/Misc/ACKS b/Misc/ACKS index ab1255be2d58fa..466023f390a421 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -666,6 +666,7 @@ Eddy De Greef Duane Griffin Grant Griffin Andrea Griffini +Semyon Grigoryev Duncan Grisby Olivier Grisel Fabian Groffen @@ -963,6 +964,7 @@ Carsten Klein Bastian Kleineidam Joel Klimont Bob Kline +Alois Klink Matthias Klose Jeremy Kloth Thomas Kluyver diff --git a/Misc/NEWS.d/3.10.0a1.rst b/Misc/NEWS.d/3.10.0a1.rst index 3186de75efd9c5..4842a026aa49f7 100644 --- a/Misc/NEWS.d/3.10.0a1.rst +++ b/Misc/NEWS.d/3.10.0a1.rst @@ -4,7 +4,7 @@ .. release date: 2020-10-05 .. section: Security -Fixes `python3x._pth` being ignored on Windows, caused by the fix for +Fixes ``python3x._pth`` being ignored on Windows, caused by the fix for :issue:`29778` (CVE-2020-15801). .. @@ -217,7 +217,7 @@ Port the :mod:`_opcode` extension module to multi-phase initialization .. nonce: 3-VJiH .. section: Core and Builtins -Fixes the wrong error description in the error raised by using 2 `,` in +Fixes the wrong error description in the error raised by using 2 ``,`` in format string in f-string and :meth:`str.format`. .. @@ -747,7 +747,7 @@ Galindo. Fix a bug where a line with only a line continuation character is not considered a blank line at tokenizer level. In such cases, more than a -single `NEWLINE` token was emitted. The old parser was working around the +single ``NEWLINE`` token was emitted. The old parser was working around the issue, but the new parser threw a :exc:`SyntaxError` for valid input due to this. For example, an empty line following a line continuation character was interpreted as a :exc:`SyntaxError`. @@ -1157,7 +1157,7 @@ The :class:`threading.Thread` constructor now uses the target name if the .. nonce: bnh-VG .. section: Library -fix `tkinter.EventType` Enum so all members are strings, and none are tuples +fix ``tkinter.EventType`` Enum so all members are strings, and none are tuples .. @@ -1220,7 +1220,7 @@ class Previously there was no way to check that without using private API. See the `relevant issue in python/typing -` +`_. .. @@ -1229,8 +1229,8 @@ Previously there was no way to check that without using private API. See the .. nonce: pI_uZQ .. section: Library -Honor `object` overrides in `Enum` class creation (specifically, `__str__`, -`__repr__`, `__format__`, and `__reduce_ex__`). +Honor ``object`` overrides in ``Enum`` class creation (specifically, ``__str__``, +``__repr__``, ``__format__``, and ``__reduce_ex__``). .. @@ -1239,7 +1239,7 @@ Honor `object` overrides in `Enum` class creation (specifically, `__str__`, .. nonce: IpYkEe .. section: Library -`enum.Flag` and `enum.IntFlag` members are now iterable +``enum.Flag`` and ``enum.IntFlag`` members are now iterable. .. @@ -1557,7 +1557,7 @@ activation. .. nonce: wqrj8C .. section: Library -Recursive evaluation of `typing.ForwardRef` in `get_type_hints`. +Recursive evaluation of ``typing.ForwardRef`` in ``get_type_hints``. .. @@ -1851,8 +1851,8 @@ Merry. .. nonce: 1dk8Bu .. section: Library -:mod:`ensurepip` now disables the use of `pip` cache when installing the -bundled versions of `pip` and `setuptools`. Patch by Krzysztof Konopko. +:mod:`ensurepip` now disables the use of ``pip`` cache when installing the +bundled versions of ``pip`` and ``setuptools``. Patch by Krzysztof Konopko. .. @@ -1933,7 +1933,7 @@ Smith and Tal Einat. .. nonce: n7fOwS .. section: Library -Added a `defaults` parameter to :class:`logging.Formatter`, to allow +Added a ``defaults`` parameter to :class:`logging.Formatter`, to allow specifying default values for custom fields. Patch by Asaf Alon and Bar Harel. @@ -2225,7 +2225,7 @@ policy. .. nonce: ps7Yf1 .. section: Library -func:`hashlib.new` passed ``usedforsecurity`` to OpenSSL EVP constructor +:func:`hashlib.new` passed ``usedforsecurity`` to OpenSSL EVP constructor ``_hashlib.new()``. test_hashlib and test_smtplib handle strict security policy better. @@ -2393,8 +2393,8 @@ closes connection during TLS negotiation .. nonce: kOOaHn .. section: Library -fix default `_missing_` so a duplicate `ValueError` is not set as the -`__context__` of the original `ValueError` +fix default ``_missing_`` so a duplicate ``ValueError`` is not set as the +``__context__`` of the original ``ValueError``. .. diff --git a/Misc/NEWS.d/3.10.0a2.rst b/Misc/NEWS.d/3.10.0a2.rst index 78f4377656b0cc..79f570439b52b8 100644 --- a/Misc/NEWS.d/3.10.0a2.rst +++ b/Misc/NEWS.d/3.10.0a2.rst @@ -88,10 +88,10 @@ Goldschmidt. .. nonce: d4a-8o .. section: Core and Builtins -Fix a bug in the parser, where a curly brace following a `primary` didn't -fail immediately. This led to invalid expressions like `a {b}` to throw a +Fix a bug in the parser, where a curly brace following a ``primary`` didn't +fail immediately. This led to invalid expressions like ``a {b}`` to throw a :exc:`SyntaxError` with a wrong offset, or invalid expressions ending with a -curly brace like `a {` to not fail immediately in the REPL. +curly brace like ``a {`` to not fail immediately in the REPL. .. @@ -214,7 +214,7 @@ Micro optimization for range.index if step is 1. Patch by Donghee Na. .. nonce: qPWjJA .. section: Core and Builtins -Add `sys._current_exceptions()` function to retrieve a dictionary mapping +Add ``sys._current_exceptions()`` function to retrieve a dictionary mapping each thread's identifier to the topmost exception currently active in that thread at the time the function is called. @@ -302,7 +302,7 @@ type to a heap type. Patch by Mohamed Koubaa and Victor Stinner. .. section: Library Fix memory leak in :func:`subprocess.Popen` in case an uid (gid) specified -in `user` (`group`, `extra_groups`) overflows `uid_t` (`gid_t`). +in ``user`` (``group``, ``extra_groups``) overflows ``uid_t`` (``gid_t``). .. @@ -649,7 +649,7 @@ Document __format__ functionality for IP addresses. .. nonce: CzBMit .. section: Documentation -Document the default implementation of `object.__eq__`. +Document the default implementation of ``object.__eq__``. .. @@ -878,7 +878,7 @@ targeting 3.10 or later. .. nonce: sh8IDY .. section: C API -Add `_Py_closerange` function to provide performant closing of a range of +Add ``_Py_closerange`` function to provide performant closing of a range of file descriptors. .. @@ -898,7 +898,7 @@ available again in limited API. .. nonce: ZZ5wJG .. section: C API -Add `PyIter_Send` function to allow sending value into +Add ``PyIter_Send`` function to allow sending value into generator/coroutine/iterator without raising StopIteration exception to signal return. diff --git a/Misc/NEWS.d/3.10.0a3.rst b/Misc/NEWS.d/3.10.0a3.rst index 7112819c1b4118..179cf3e9cfb08c 100644 --- a/Misc/NEWS.d/3.10.0a3.rst +++ b/Misc/NEWS.d/3.10.0a3.rst @@ -247,8 +247,8 @@ fixes union type expressions not de-duplicating ``GenericAlias`` objects. .. nonce: B-Veg7 .. section: Core and Builtins -The import system triggers a `ImportWarning` when it falls back to using -`load_module()`. +The import system triggers a ``ImportWarning`` when it falls back to using +``load_module()``. .. @@ -464,8 +464,8 @@ Support signal module on VxWorks. .. nonce: r9rNCj .. section: Library -We fixed an issue in `pickle.whichmodule` in which importing -`multiprocessing` could change the how pickle identifies which module an +We fixed an issue in ``pickle.whichmodule`` in which importing +``multiprocessing`` could change the how pickle identifies which module an object belongs to, potentially breaking the unpickling of those objects. .. @@ -602,7 +602,7 @@ function when ``os.open`` fails. .. nonce: F363jO .. section: Library -Fix `os.sendfile()` on illumos. +Fix ``os.sendfile()`` on illumos. .. @@ -718,8 +718,8 @@ Improve asyncio.wait function to create the futures set just one time. .. nonce: BzizYV .. section: Library -Update various modules in the stdlib to fall back on `__spec__.loader` when -`__loader__` isn't defined on a module. +Update various modules in the stdlib to fall back on ``__spec__.loader`` when +``__loader__`` isn't defined on a module. .. @@ -728,8 +728,8 @@ Update various modules in the stdlib to fall back on `__spec__.loader` when .. nonce: CAsI3O .. section: Library -The `load_module()` methods found in importlib now trigger a -DeprecationWarning. +The ``load_module()`` methods found in ``importlib`` now trigger a +``DeprecationWarning``. .. @@ -872,7 +872,7 @@ extension during TLS handshake when no custom context is supplied. .. nonce: 5mi7b0 .. section: Library -Add func:`os.eventfd` to provide a low level interface for Linux's event +Add :func:`os.eventfd` to provide a low level interface for Linux's event notification file descriptor. .. diff --git a/Misc/NEWS.d/3.10.0a4.rst b/Misc/NEWS.d/3.10.0a4.rst index 414823f162d85c..398f7e5d3422cb 100644 --- a/Misc/NEWS.d/3.10.0a4.rst +++ b/Misc/NEWS.d/3.10.0a4.rst @@ -516,7 +516,7 @@ Define THREAD_STACK_SIZE for VxWorks. .. nonce: x8TASR .. section: Library -[Enum] `_EnumDict.update()` is now supported +[Enum] ``_EnumDict.update()`` is now supported. .. diff --git a/Misc/NEWS.d/3.10.0a7.rst b/Misc/NEWS.d/3.10.0a7.rst index d9cdfbd04c88d4..74120a3b40c012 100644 --- a/Misc/NEWS.d/3.10.0a7.rst +++ b/Misc/NEWS.d/3.10.0a7.rst @@ -261,7 +261,7 @@ the annotations _Py_NO_SANITIZE_ADDRESS, _Py_NO_SANITIZE_THREAD, and _Py_NO_SANITIZE_MEMORY. Those annotations are no longer needed. To disable the radix tree map, set a preprocessor flag as follows: -`-DWITH_PYMALLOC_RADIX_TREE=0`. +``-DWITH_PYMALLOC_RADIX_TREE=0``. Co-authored-by: Tim Peters @@ -664,11 +664,11 @@ on failure. .. nonce: f1dr_5 .. section: Library -Enum's `repr()` and `str()` have changed: `repr()` is now -*EnumClass.MemberName* and `str()` is *MemberName*. Additionally, stdlib +Enum's ``repr()`` and ``str()`` have changed: ``repr()`` is now +*EnumClass.MemberName* and ``str()`` is *MemberName*. Additionally, stdlib Enum's whose contents are available as module attributes, such as -`RegexFlag.IGNORECASE`, have their `repr()` as *module.name*, e.g. -`re.IGNORECASE`. +``RegexFlag.IGNORECASE``, have their ``repr()`` as *module.name*, e.g. +``re.IGNORECASE``. .. diff --git a/Misc/NEWS.d/3.10.0b1.rst b/Misc/NEWS.d/3.10.0b1.rst index e7b6b93d0b6df3..640f3ee58adbae 100644 --- a/Misc/NEWS.d/3.10.0b1.rst +++ b/Misc/NEWS.d/3.10.0b1.rst @@ -1181,8 +1181,8 @@ Lewis Gaul. .. nonce: NzLVaR .. section: Library -Add `pathlib.Path.hardlink_to()` method that supersedes `link_to()`. The new -method has the same argument order as `symlink_to()`. +Add ``pathlib.Path.hardlink_to()`` method that supersedes ``link_to()``. The new +method has the same argument order as ``symlink_to()``. .. diff --git a/Misc/NEWS.d/3.11.0a1.rst b/Misc/NEWS.d/3.11.0a1.rst index 63abcbd5a6499e..e8d4a02a11e0f9 100644 --- a/Misc/NEWS.d/3.11.0a1.rst +++ b/Misc/NEWS.d/3.11.0a1.rst @@ -3204,7 +3204,7 @@ deprecated, for removal in 3.14. .. section: Library When :class:`http.server.SimpleHTTPRequestHandler` sends a ``301 (Moved -Permanently)`` for a directory path not ending with `/`, add a +Permanently)`` for a directory path not ending with ``/``, add a ``Content-Length: 0`` header. This improves the behavior for certain clients. @@ -3240,7 +3240,7 @@ Patch by Erlend E. Aasland. .. nonce: m72tlH .. section: Library -AIX: `Lib/_aix_support.get_platform()` may fail in an AIX WPAR. The fileset +AIX: ``Lib/_aix_support.get_platform()`` may fail in an AIX WPAR. The fileset bos.rte appears to have a builddate in both LPAR and WPAR so this fileset is queried rather than bos.mp64. To prevent a similar situation (no builddate in ODM) a value (9988) sufficient for completing a build is provided. Patch @@ -3685,9 +3685,9 @@ equivalents are valid. .. nonce: aJuvQF .. section: Documentation -Removed the othergui.rst file, any references to it, and the list of GUI +Removed the ``othergui.rst`` file, any references to it, and the list of GUI frameworks in the FAQ. In their place I've added links to the Python Wiki -`page on GUI frameworks `. +`page on GUI frameworks `_. .. diff --git a/Misc/NEWS.d/3.11.0a2.rst b/Misc/NEWS.d/3.11.0a2.rst index 503e489b658e4d..eb1456f1bcf353 100644 --- a/Misc/NEWS.d/3.11.0a2.rst +++ b/Misc/NEWS.d/3.11.0a2.rst @@ -507,7 +507,7 @@ Waygood. .. section: Library Make :func:`inspect.getmodule` catch ``FileNotFoundError`` raised by -:'func:`inspect.getabsfile`, and return ``None`` to indicate that the module +:func:`inspect.getabsfile`, and return ``None`` to indicate that the module could not be determined. .. @@ -897,8 +897,8 @@ was often the case on macOS. .. nonce: F18qcE .. section: Tests -Add more test cases for `@functools.singledispatchmethod` when combined with -`@classmethod` or `@staticmethod`. +Add more test cases for ``@functools.singledispatchmethod`` when combined with +``@classmethod`` or ``@staticmethod``. .. diff --git a/Misc/NEWS.d/3.11.0a3.rst b/Misc/NEWS.d/3.11.0a3.rst index a96a59115797ee..2842aad0e163d6 100644 --- a/Misc/NEWS.d/3.11.0a3.rst +++ b/Misc/NEWS.d/3.11.0a3.rst @@ -350,7 +350,7 @@ Galindo. .. section: Library Fix possible crash when getting an attribute of -class:`xml.etree.ElementTree.Element` simultaneously with replacing the +:class:`xml.etree.ElementTree.Element` simultaneously with replacing the ``attrib`` dict. .. diff --git a/Misc/NEWS.d/3.11.0a4.rst b/Misc/NEWS.d/3.11.0a4.rst index 3dd335929d655f..5abacd8473f394 100644 --- a/Misc/NEWS.d/3.11.0a4.rst +++ b/Misc/NEWS.d/3.11.0a4.rst @@ -510,7 +510,7 @@ Remove special-casing of ``__new__`` in :meth:`enum.Enum.__dir__`. Improve day constants in :mod:`calendar`. -Now all constants (`MONDAY` ... `SUNDAY`) are documented, tested, and added +Now all constants (``MONDAY`` ... ``SUNDAY``) are documented, tested, and added to ``__all__``. .. diff --git a/Misc/NEWS.d/3.11.0a5.rst b/Misc/NEWS.d/3.11.0a5.rst index 08d94e82ed8ccf..30a462e9bfdcbf 100644 --- a/Misc/NEWS.d/3.11.0a5.rst +++ b/Misc/NEWS.d/3.11.0a5.rst @@ -275,7 +275,7 @@ Do not use POSIX semaphores on NetBSD .. nonce: M9m8Qd .. section: Core and Builtins -Improve the exc:`TypeError` message for non-string second arguments passed +Improve the :exc:`TypeError` message for non-string second arguments passed to the built-in functions :func:`getattr` and :func:`hasattr`. Patch by Géry Ogam. @@ -673,7 +673,7 @@ file .. section: Tests Mocks can no longer be provided as the specs for other Mocks. As a result, -an already-mocked object cannot be passed to `mock.Mock()`. This can uncover +an already-mocked object cannot be passed to ``mock.Mock()``. This can uncover bugs in tests since these Mock-derived Mocks will always pass certain tests (e.g. isinstance) and builtin assert functions (e.g. assert_called_once_with) will unconditionally pass. diff --git a/Misc/NEWS.d/3.11.0a6.rst b/Misc/NEWS.d/3.11.0a6.rst index 974d025c631a45..2b50b7773492cb 100644 --- a/Misc/NEWS.d/3.11.0a6.rst +++ b/Misc/NEWS.d/3.11.0a6.rst @@ -1034,7 +1034,7 @@ if zlib uses the s390x hardware accelerator. Patch by Victor Stinner. .. nonce: jfciLG .. section: Build -Respect `--with-suffix` when building on case-insensitive file systems. +Respect ``--with-suffix`` when building on case-insensitive file systems. .. @@ -1151,7 +1151,7 @@ Make query dialogs on Windows start with a cursor in the entry box. .. nonce: FhiH5P .. section: IDLE -Apply IDLE syntax highlighting to `.pyi` files. Patch by Alex Waygood and +Apply IDLE syntax highlighting to ``.pyi`` files. Patch by Alex Waygood and Terry Jan Reedy. .. diff --git a/Misc/NEWS.d/3.11.0b1.rst b/Misc/NEWS.d/3.11.0b1.rst index a4cdda2cafdb43..2c30dc6e084bfb 100644 --- a/Misc/NEWS.d/3.11.0b1.rst +++ b/Misc/NEWS.d/3.11.0b1.rst @@ -437,7 +437,7 @@ cache when applicable. .. section: Core and Builtins Classes and functions that unconditionally declared their docstrings -ignoring the `--without-doc-strings` compilation flag no longer do so. +ignoring the ``--without-doc-strings`` compilation flag no longer do so. The classes affected are :class:`ctypes.UnionType`, :class:`pickle.PickleBuffer`, :class:`testcapi.RecursingInfinitelyError`, @@ -576,7 +576,7 @@ planned). Patch by Alex Waygood. .. section: Library Deprecate nested classes in enum definitions becoming members -- in 3.13 -they will be normal classes; add `member` and `nonmember` functions to allow +they will be normal classes; add ``member`` and ``nonmember`` functions to allow control over results now. .. @@ -785,7 +785,7 @@ avoid :exc:`BrokenPipeError` at garbage collection and at .. nonce: V0YveU .. section: Library -Add `datetime.UTC` alias for `datetime.timezone.utc`. +Add ``datetime.UTC`` alias for ``datetime.timezone.utc``. Patch by Kabir Kwatra. @@ -1657,9 +1657,9 @@ Convert :mod:`csv` to use Argument Clinic for :func:`csv.field_size_limit`, .. nonce: z2WhDQ .. section: Library -Raise an ArgumentError when the same subparser name is added twice to an -`argparse.ArgumentParser`. This is consistent with the (default) behavior -when the same option string is added twice to an ArgumentParser. +Raise an ``ArgumentError`` when the same subparser name is added twice to an +``argparse.ArgumentParser``. This is consistent with the (default) behavior +when the same option string is added twice to an ``ArgumentParser``. .. @@ -1712,7 +1712,7 @@ Aviv Palivoda and Erlend E. Aasland. .. nonce: kTjJLx .. section: Documentation -Add a new `gh` role to the documentation to link to GitHub issues. +Add a new ``gh`` role to the documentation to link to GitHub issues. .. diff --git a/Misc/NEWS.d/3.12.0a1.rst b/Misc/NEWS.d/3.12.0a1.rst index 81ef69093005e8..f192bf086ed259 100644 --- a/Misc/NEWS.d/3.12.0a1.rst +++ b/Misc/NEWS.d/3.12.0a1.rst @@ -1462,7 +1462,7 @@ expression, but there's no trailing brace. For example, f"{i=". .. nonce: Jf6gAj .. section: Core and Builtins -Cache the result of :c:func:`PyCode_GetCode` function to restore the O(1) +Cache the result of :c:func:`PyCode_GetCode` function to restore the *O*\ (1) lookup of the :attr:`~types.CodeType.co_code` attribute. .. diff --git a/Misc/NEWS.d/3.12.0a2.rst b/Misc/NEWS.d/3.12.0a2.rst index dbc743abe8a767..a9c5038fa489bb 100644 --- a/Misc/NEWS.d/3.12.0a2.rst +++ b/Misc/NEWS.d/3.12.0a2.rst @@ -584,8 +584,8 @@ Use the frame bound builtins when offering a name suggestion in .. nonce: qtm-9T .. section: Library -In :mod:`importlib._bootstrap`, enhance namespace package repr to ``. +In :mod:`importlib._bootstrap`, enhance namespace package repr to ````. .. diff --git a/Misc/NEWS.d/3.12.0a7.rst b/Misc/NEWS.d/3.12.0a7.rst index 1ef81747558857..f22050b0dc377b 100644 --- a/Misc/NEWS.d/3.12.0a7.rst +++ b/Misc/NEWS.d/3.12.0a7.rst @@ -429,7 +429,7 @@ an awaitable object. Patch by Kumar Aditya. Speed up setting or deleting mutable attributes on non-dataclass subclasses of frozen dataclasses. Due to the implementation of ``__setattr__`` and ``__delattr__`` for frozen dataclasses, this previously had a time -complexity of ``O(n)``. It now has a time complexity of ``O(1)``. +complexity of *O*\ (*n*). It now has a time complexity of *O*\ (1). .. diff --git a/Misc/NEWS.d/3.13.0a2.rst b/Misc/NEWS.d/3.13.0a2.rst index c1b1be523325e8..d4be4fb8a3d3ab 100644 --- a/Misc/NEWS.d/3.13.0a2.rst +++ b/Misc/NEWS.d/3.13.0a2.rst @@ -199,7 +199,7 @@ their debugging to ``PYTHON_UOPS`` and ``PYTHON_LLTRACE``. .. nonce: 11h6Mc .. section: Core and Builtins -Speed up :obj:`Traceback` object creation by lazily compute the line number. +Speed up :class:`Traceback` object creation by lazily compute the line number. Patch by Pablo Galindo .. diff --git a/Misc/NEWS.d/3.13.0a3.rst b/Misc/NEWS.d/3.13.0a3.rst new file mode 100644 index 00000000000000..95aa66603de7cb --- /dev/null +++ b/Misc/NEWS.d/3.13.0a3.rst @@ -0,0 +1,2414 @@ +.. date: 2024-01-02-19-52-23 +.. gh-issue: 113659 +.. nonce: DkmnQc +.. release date: 2024-01-17 +.. section: Security + +Skip ``.pth`` files with names starting with a dot or hidden file attribute. + +.. + +.. date: 2023-12-06-14-06-59 +.. gh-issue: 112302 +.. nonce: 3bl20f +.. section: Security + +Created a Software Bill-of-Materials document and tooling for tracking +dependencies. + +.. + +.. date: 2024-01-11-16-54-55 +.. gh-issue: 107901 +.. nonce: Td3JPI +.. section: Core and Builtins + +Compiler duplicates basic blocks that have an eval breaker check, no line +number, and multiple predecessors. + +.. + +.. date: 2024-01-11-14-03-31 +.. gh-issue: 107901 +.. nonce: U65IyC +.. section: Core and Builtins + +A jump leaving an exception handler back to normal code no longer checks the +eval breaker. + +.. + +.. date: 2024-01-11-01-28-25 +.. gh-issue: 113655 +.. nonce: Mfioxp +.. section: Core and Builtins + +Set the C recursion limit to 4000 on Windows, and 10000 on Linux/OSX. This +seems to be near the sweet spot to maintain safety, but not compromise +backwards compatibility. + +.. + +.. date: 2024-01-09-23-01-00 +.. gh-issue: 113710 +.. nonce: pe3flY +.. section: Core and Builtins + +Add typed stack effects to the interpreter DSL, along with various +instruction annotations. + +.. + +.. date: 2024-01-08-14-34-02 +.. gh-issue: 77046 +.. nonce: sDUh2d +.. section: Core and Builtins + +On Windows, file descriptors wrapping Windows handles are now created non +inheritable by default (:pep:`446`). Patch by Zackery Spytz and Victor +Stinner. + +.. + +.. date: 2024-01-08-05-36-59 +.. gh-issue: 113853 +.. nonce: lm-6_a +.. section: Core and Builtins + +Guarantee that all executors make progress. This then guarantees that tier 2 +execution always makes progress. + +.. + +.. date: 2024-01-05-21-28-48 +.. gh-issue: 113753 +.. nonce: 2HNiuq +.. section: Core and Builtins + +Fix an issue where the finalizer of ``PyAsyncGenASend`` objects might not be +called if they were allocated from a free list. + +.. + +.. date: 2024-01-05-00-49-14 +.. gh-issue: 107901 +.. nonce: 6JRrb6 +.. section: Core and Builtins + +Compiler changed so that synthetic jumps which are not at loop end no longer +check the eval breaker. + +.. + +.. date: 2024-01-04-17-15-30 +.. gh-issue: 113703 +.. nonce: Zsk0pY +.. section: Core and Builtins + +Fix a regression in the :mod:`codeop` module that was causing it to +incorrectly identify incomplete f-strings. Patch by Pablo Galindo + +.. + +.. date: 2024-01-03-12-19-37 +.. gh-issue: 89811 +.. nonce: cZOj6d +.. section: Core and Builtins + +Check for a valid ``tp_version_tag`` before performing bytecode +specializations that rely on this value being usable. + +.. + +.. date: 2024-01-02-17-22-57 +.. gh-issue: 111488 +.. nonce: EJH3Oh +.. section: Core and Builtins + +Changed error message in case of no 'in' keyword after 'for' in list +comprehensions + +.. + +.. date: 2024-01-02-11-14-29 +.. gh-issue: 113657 +.. nonce: CQo9vF +.. section: Core and Builtins + +Fix an issue that caused important instruction pointer updates to be +optimized out of tier two traces. + +.. + +.. date: 2024-01-01-23-57-24 +.. gh-issue: 113603 +.. nonce: ySwovr +.. section: Core and Builtins + +Fixed bug where a redundant NOP is not removed, causing an assertion to fail +in the compiler in debug mode. + +.. + +.. date: 2024-01-01-00-07-02 +.. gh-issue: 113602 +.. nonce: cWuTzk +.. section: Core and Builtins + +Fix an error that was causing the parser to try to overwrite existing errors +and crashing in the process. Patch by Pablo Galindo + +.. + +.. date: 2023-12-31-07-46-01 +.. gh-issue: 113486 +.. nonce: uki19C +.. section: Core and Builtins + +No longer issue spurious ``PY_UNWIND`` events for optimized calls to +classes. + +.. + +.. date: 2023-12-20-18-27-11 +.. gh-issue: 113297 +.. nonce: BZyAI_ +.. section: Core and Builtins + +Fix segfault in the compiler on with statement with 19 context managers. + +.. + +.. date: 2023-12-20-08-54-54 +.. gh-issue: 113212 +.. nonce: 62AUlw +.. section: Core and Builtins + +Improve :py:class:`super` error messages. + +.. + +.. date: 2023-12-19-22-03-43 +.. gh-issue: 111375 +.. nonce: M9vuA6 +.. section: Core and Builtins + +Only use ``NULL`` in the exception stack to indicate an exception was +handled. Patch by Carey Metcalfe. + +.. + +.. date: 2023-12-15-16-26-01 +.. gh-issue: 112215 +.. nonce: xJS6_6 +.. section: Core and Builtins + +Increase the C recursion limit by a factor of 3 for non-debug builds, except +for webassembly and s390 platforms which are unchanged. This mitigates some +regressions in 3.12 with deep recursion mixing builtin (C) and Python code. + +.. + +.. date: 2023-12-14-20-08-35 +.. gh-issue: 113054 +.. nonce: e20CtM +.. section: Core and Builtins + +Fixed bug where a redundant NOP is not removed, causing an assertion to fail +in the compiler in debug mode. + +.. + +.. date: 2023-12-13-11-45-53 +.. gh-issue: 106905 +.. nonce: 5dslTN +.. section: Core and Builtins + +Use per AST-parser state rather than global state to track recursion depth +within the AST parser to prevent potential race condition due to +simultaneous parsing. + +The issue primarily showed up in 3.11 by multithreaded users of +:func:`ast.parse`. In 3.12 a change to when garbage collection can be +triggered prevented the race condition from occurring. + +.. + +.. date: 2023-12-12-04-53-19 +.. gh-issue: 108866 +.. nonce: xbJ-9a +.. section: Core and Builtins + +Change the API and contract of ``_PyExecutorObject`` to return the +next_instr pointer, instead of the frame, and to always execute at least one +instruction. + +.. + +.. date: 2023-12-11-19-53-32 +.. gh-issue: 90350 +.. nonce: -FQy3E +.. section: Core and Builtins + +Optimize builtin functions :func:`min` and :func:`max`. + +.. + +.. date: 2023-12-11-00-50-00 +.. gh-issue: 112943 +.. nonce: RHNZie +.. section: Core and Builtins + +Correctly compute end column offsets for multiline tokens in the +:mod:`tokenize` module. Patch by Pablo Galindo + +.. + +.. date: 2023-12-07-13-19-55 +.. gh-issue: 112125 +.. nonce: 4ADN7i +.. section: Core and Builtins + +Fix None.__ne__(None) returning NotImplemented instead of False + +.. + +.. date: 2023-12-07-12-00-04 +.. gh-issue: 74616 +.. nonce: kgTGVb +.. section: Core and Builtins + +:func:`input` now raises a ValueError when output on the terminal if the +prompt contains embedded null characters instead of silently truncating it. + +.. + +.. date: 2023-12-05-20-41-58 +.. gh-issue: 112716 +.. nonce: hOcx0Y +.. section: Core and Builtins + +Fix SystemError in the ``import`` statement and in ``__reduce__()`` methods +of builtin types when ``__builtins__`` is not a dict. + +.. + +.. date: 2023-12-04-23-09-07 +.. gh-issue: 112730 +.. nonce: BXHlFa +.. section: Core and Builtins + +Use color to highlight error locations in tracebacks. Patch by Pablo Galindo + +.. + +.. date: 2023-12-03-19-34-51 +.. gh-issue: 112625 +.. nonce: QWTlwS +.. section: Core and Builtins + +Fixes a bug where a bytearray object could be cleared while iterating over +an argument in the ``bytearray.join()`` method that could result in reading +memory after it was freed. + +.. + +.. date: 2023-12-03-15-29-53 +.. gh-issue: 112660 +.. nonce: gldBvh +.. section: Core and Builtins + +Do not clear unexpected errors during formatting error messages for +ImportError and AttributeError for modules. + +.. + +.. date: 2023-12-01-19-02-21 +.. gh-issue: 105967 +.. nonce: Puq5Cn +.. section: Core and Builtins + +Workaround a bug in Apple's macOS platform zlib library where +:func:`zlib.crc32` and :func:`binascii.crc32` could produce incorrect +results on multi-gigabyte inputs. Including when using :mod:`zipfile` on +zips containing large data. + +.. + +.. date: 2023-12-01-08-16-10 +.. gh-issue: 95754 +.. nonce: ae4gwy +.. section: Core and Builtins + +Provide a better error message when accessing invalid attributes on +partially initialized modules. The origin of the module being accessed is +now included in the message to help with the common issue of shadowing other +modules. + +.. + +.. date: 2023-11-27-18-55-30 +.. gh-issue: 112217 +.. nonce: SwFLMj +.. section: Core and Builtins + +Add check for the type of ``__cause__`` returned from calling the type ``T`` +in ``raise from T``. + +.. + +.. date: 2023-11-26-21-30-11 +.. gh-issue: 111058 +.. nonce: q4DqDY +.. section: Core and Builtins + +Change coro.cr_frame/gen.gi_frame to return ``None`` after the +coroutine/generator has been closed. This fixes a bug where +:func:`~inspect.getcoroutinestate` and :func:`~inspect.getgeneratorstate` +return the wrong state for a closed coroutine/generator. + +.. + +.. date: 2023-11-25-22-58-49 +.. gh-issue: 112388 +.. nonce: MU3cIM +.. section: Core and Builtins + +Fix an error that was causing the parser to try to overwrite tokenizer +errors. Patch by pablo Galindo + +.. + +.. date: 2023-11-25-22-39-44 +.. gh-issue: 112387 +.. nonce: AbBq5W +.. section: Core and Builtins + +Fix error positions for decoded strings with backwards tokenize errors. +Patch by Pablo Galindo + +.. + +.. date: 2023-11-25-20-36-38 +.. gh-issue: 99606 +.. nonce: fDY5hK +.. section: Core and Builtins + +Make code generated for an empty f-string identical to the code of an empty +normal string. + +.. + +.. date: 2023-11-24-14-10-57 +.. gh-issue: 112367 +.. nonce: 9z1IDp +.. section: Core and Builtins + +Avoid undefined behaviour when using the perf trampolines by not freeing the +code arenas until shutdown. Patch by Pablo Galindo + +.. + +.. date: 2023-11-22-13-17-54 +.. gh-issue: 112320 +.. nonce: EddM51 +.. section: Core and Builtins + +The Tier 2 translator now tracks the confidence level for staying "on trace" +(i.e. not exiting back to the Tier 1 interpreter) for branch instructions +based on the number of bits set in the branch "counter". Trace translation +ends when the confidence drops below 1/3rd. + +.. + +.. date: 2023-09-21-11-54-28 +.. gh-issue: 109598 +.. nonce: CRidSy +.. section: Core and Builtins + +:c:func:`PyComplex_RealAsDouble`/:c:func:`PyComplex_ImagAsDouble` now tries +to convert an object to a :class:`complex` instance using its +``__complex__()`` method before falling back to the ``__float__()`` method. +Patch by Sergey B Kirpichev. + +.. + +.. date: 2022-07-07-05-37-53 +.. gh-issue: 94606 +.. nonce: hojJ54 +.. section: Core and Builtins + +Fix UnicodeEncodeError when :func:`email.message.get_payload` reads a +message with a Unicode surrogate character and the message content is not +well-formed for surrogateescape encoding. Patch by Sidney Markowitz. + +.. + +.. bpo: 21861 +.. date: 2022-01-23-18-00-10 +.. nonce: N8E1zw +.. section: Core and Builtins + +Use the object's actual class name in :meth:`_io.FileIO.__repr__`, +:meth:`_io._WindowsConsoleIO` and :meth:`_io.TextIOWrapper.__repr__`, to +make these methods subclass friendly. + +.. + +.. bpo: 45369 +.. date: 2021-10-05-05-00-16 +.. nonce: tluk_X +.. section: Core and Builtins + +Remove LibreSSL workarounds as per :pep:`644`. + +.. + +.. bpo: 34392 +.. date: 2018-08-13-13-25-15 +.. nonce: 9kIlMF +.. section: Core and Builtins + +Added :func:`sys._is_interned`. + +.. + +.. date: 2024-01-15-12-12-54 +.. gh-issue: 114077 +.. nonce: KcVnfj +.. section: Library + +Fix possible :exc:`OverflowError` in :meth:`socket.socket.sendfile` when +pass *count* larger than 2 GiB on 32-bit platform. + +.. + +.. date: 2024-01-13-14-20-31 +.. gh-issue: 111803 +.. nonce: llpLAw +.. section: Library + +:mod:`plistlib` now supports loading more deeply nested lists in binary +format. + +.. + +.. date: 2024-01-13-11-34-29 +.. gh-issue: 114014 +.. nonce: WRHifN +.. section: Library + +Fixed a bug in :class:`fractions.Fraction` where an invalid string using +``d`` in the decimals part creates a different error compared to other +invalid letters/characters. Patch by Jeremiah Gabriel Pascual. + +.. + +.. date: 2024-01-11-22-22-51 +.. gh-issue: 108364 +.. nonce: QH7C-1 +.. section: Library + +:meth:`sqlite3.Connection.iterdump` now ensures that foreign key support is +disabled before dumping the database schema, if there is any foreign key +violation. Patch by Erlend E. Aasland and Mariusz Felisiak. + +.. + +.. date: 2024-01-11-16-58-10 +.. gh-issue: 113971 +.. nonce: skJZ4g +.. section: Library + +The :class:`zipfile.ZipInfo` previously protected ``._compresslevel`` +attribute has been made public as ``.compress_level`` with the old +``_compresslevel`` name remaining available as a property to retain +compatibility. + +.. + +.. date: 2024-01-10-12-03-38 +.. gh-issue: 113877 +.. nonce: RxKlrQ +.. section: Library + +Fix :mod:`tkinter` method ``winfo_pathname()`` on 64-bit Windows. + +.. + +.. date: 2024-01-09-18-07-08 +.. gh-issue: 113868 +.. nonce: DlZG2r +.. section: Library + +Added :data:`mmap.MAP_NORESERVE`, :data:`mmap.MAP_NOEXTEND`, +:data:`mmap.MAP_HASSEMAPHORE`, :data:`mmap.MAP_NOCACHE`, +:data:`mmap.MAP_JIT`, :data:`mmap.MAP_RESILIENT_CODESIGN`, +:data:`mmap.MAP_RESILIENT_MEDIA`, :data:`mmap.MAP_32BIT`, +:data:`mmap.MAP_TRANSLATED_ALLOW_EXECUTE`, :data:`mmap.MAP_UNIX03` and +:data:`mmap.MAP_TPRO`. All of them are ``mmap(2)`` flags on macOS. + +.. + +.. date: 2024-01-09-12-19-55 +.. gh-issue: 113848 +.. nonce: kXoCy0 +.. section: Library + +:func:`asyncio.TaskGroup()` and :func:`asyncio.timeout()` context managers +now handle :exc:`~asyncio.CancelledError` subclasses as well as exact +:exc:`!CancelledError`. + +.. + +.. date: 2024-01-09-08-59-43 +.. gh-issue: 113661 +.. nonce: asvXSx +.. section: Library + +unittest runner: Don't exit 5 if tests were skipped. The intention of +exiting 5 was to detect issues where the test suite wasn't discovered at +all. If we skipped tests, it was correctly discovered. + +.. + +.. date: 2024-01-08-19-38-42 +.. gh-issue: 96037 +.. nonce: Yr2Y1C +.. section: Library + +Insert :exc:`TimeoutError` in the context of the exception that was raised +during exiting an expired :func:`asyncio.timeout` block. + +.. + +.. date: 2024-01-08-14-57-09 +.. gh-issue: 113781 +.. nonce: IoTnwi +.. section: Library + +Silence unraisable AttributeError when warnings are emitted during Python +finalization. + +.. + +.. date: 2024-01-07-23-31-44 +.. gh-issue: 113238 +.. nonce: wFWBfW +.. section: Library + +Add ``Anchor`` to ``importlib.resources`` (in order for the code to comply +with the documentation) + +.. + +.. date: 2024-01-07-13-36-03 +.. gh-issue: 111693 +.. nonce: xN2LuL +.. section: Library + +:func:`asyncio.Condition.wait()` now re-raises the same +:exc:`CancelledError` instance that may have caused it to be interrupted. +Fixed race condition in :func:`asyncio.Semaphore.aquire` when interrupted +with a :exc:`CancelledError`. + +.. + +.. date: 2024-01-07-11-45-56 +.. gh-issue: 113791 +.. nonce: XF5xSW +.. section: Library + +Add ``CLOCK_MONOTONIC_RAW_APPROX`` and ``CLOCK_UPTIME_RAW_APPROX`` to +:mod:`time` on macOS. These are clocks available on macOS 10.12 or later. + +.. + +.. date: 2024-01-07-00-56-41 +.. gh-issue: 112932 +.. nonce: OfhUu7 +.. section: Library + +Restore the ability for :mod:`zipfile` to ``extractall`` from zip files with +a "/" directory entry in them as is commonly added to zips by some wiki or +bug tracker data exporters. + +.. + +.. date: 2024-01-05-21-52-59 +.. gh-issue: 113568 +.. nonce: _0FkpZ +.. section: Library + +Raise deprecation warnings from :class:`pathlib.PurePath` and not its +private base class ``PurePathBase``. + +.. + +.. date: 2024-01-05-12-42-07 +.. gh-issue: 113594 +.. nonce: 4t8HiR +.. section: Library + +Fix :exc:`UnicodeEncodeError` in :mod:`email` when re-fold lines that +contain unknown-8bit encoded part followed by non-unknown-8bit encoded part. + +.. + +.. date: 2024-01-03-14-19-26 +.. gh-issue: 113538 +.. nonce: ahuBCo +.. section: Library + +In :meth:`asyncio.StreamReaderProtocol.connection_made`, there is callback +that logs an error if the task wrapping the "connected callback" fails. This +callback would itself fail if the task was cancelled. Prevent this by +checking whether the task was cancelled first. If so, close the transport +but don't log an error. + +.. + +.. date: 2024-01-02-12-41-59 +.. gh-issue: 113626 +.. nonce: i1PPY_ +.. section: Library + +Add support for the *allow_code* argument in the :mod:`marshal` module. +Passing ``allow_code=False`` prevents serialization and de-serialization of +code objects which is incompatible between Python versions. + +.. + +.. date: 2024-01-01-13-26-02 +.. gh-issue: 85567 +.. nonce: K4U15m +.. section: Library + +Fix resource warnings for unclosed files in :mod:`pickle` and +:mod:`pickletools` command line interfaces. + +.. + +.. date: 2023-12-30-20-30-05 +.. gh-issue: 113537 +.. nonce: v1W5_X +.. section: Library + +Support loads ``str`` in :func:`plistlib.loads`. + +.. + +.. date: 2023-12-29-22-29-34 +.. gh-issue: 89850 +.. nonce: KnxiZA +.. section: Library + +Add default implementations of :meth:`pickle.Pickler.persistent_id` and +:meth:`pickle.Unpickler.persistent_load` methods in the C implementation. +Calling ``super().persistent_id()`` and ``super().persistent_load()`` in +subclasses of the C implementation of :class:`pickle.Pickler` and +:class:`pickle.Unpickler` classes no longer causes infinite recursion. + +.. + +.. date: 2023-12-29-17-57-45 +.. gh-issue: 113569 +.. nonce: qcRCEI +.. section: Library + +Indicate if there were no actual calls in unittest +:meth:`~unittest.mock.Mock.assert_has_calls` failure. + +.. + +.. date: 2023-12-29-17-46-06 +.. gh-issue: 101225 +.. nonce: QaEyxF +.. section: Library + +Increase the backlog for :class:`multiprocessing.connection.Listener` +objects created by :mod:`multiprocessing.manager` and +:mod:`multiprocessing.resource_sharer` to significantly reduce the risk of +getting a connection refused error when creating a +:class:`multiprocessing.connection.Connection` to them. + +.. + +.. date: 2023-12-29-17-30-49 +.. gh-issue: 113568 +.. nonce: UpWNAI +.. section: Library + +Raise audit events from :class:`pathlib.Path` and not its private base class +``PathBase``. + +.. + +.. date: 2023-12-28-14-36-20 +.. gh-issue: 113543 +.. nonce: 2iWkOR +.. section: Library + +Make sure that ``webbrowser.MacOSXOSAScript`` sends ``webbrowser.open`` +audit event. + +.. + +.. date: 2023-12-23-16-51-17 +.. gh-issue: 113028 +.. nonce: 3Jmdoj +.. section: Library + +When a second reference to a string appears in the input to :mod:`pickle`, +and the Python implementation is in use, we are guaranteed that a single +copy gets pickled and a single object is shared when reloaded. Previously, +in protocol 0, when a string contained certain characters (e.g. newline) it +resulted in duplicate objects. + +.. + +.. date: 2023-12-23-16-10-07 +.. gh-issue: 113421 +.. nonce: w7vs08 +.. section: Library + +Fix multiprocessing logger for ``%(filename)s``. + +.. + +.. date: 2023-12-23-13-10-42 +.. gh-issue: 111784 +.. nonce: Nb4L1j +.. section: Library + +Fix segfaults in the ``_elementtree`` module. Fix first segfault during +deallocation of ``_elementtree.XMLParser`` instances by keeping strong +reference to ``pyexpat`` module in module state for capsule lifetime. Fix +second segfault which happens in the same deallocation process by keeping +strong reference to ``_elementtree`` module in ``XMLParser`` structure for +``_elementtree`` module lifetime. + +.. + +.. date: 2023-12-22-20-49-52 +.. gh-issue: 113407 +.. nonce: C_O13_ +.. section: Library + +Fix import of :mod:`unittest.mock` when CPython is built without docstrings. + +.. + +.. date: 2023-12-22-11-30-57 +.. gh-issue: 113320 +.. nonce: Vp5suS +.. section: Library + +Fix regression in Python 3.12 where :class:`~typing.Protocol` classes that +were not marked as :func:`runtime-checkable ` +would be unnecessarily introspected, potentially causing exceptions to be +raised if the protocol had problematic members. Patch by Alex Waygood. + +.. + +.. date: 2023-12-21-23-47-42 +.. gh-issue: 53502 +.. nonce: dercJI +.. section: Library + +Add a new option ``aware_datetime`` in :mod:`plistlib` to loads or dumps +aware datetime. + +.. + +.. date: 2023-12-21-14-55-06 +.. gh-issue: 113358 +.. nonce: nRkiSL +.. section: Library + +Fix rendering tracebacks with exceptions with a broken __getattr__ + +.. + +.. date: 2023-12-20-21-18-51 +.. gh-issue: 113214 +.. nonce: JcV9Mn +.. section: Library + +Fix an ``AttributeError`` during asyncio SSL protocol aborts in SSL-over-SSL +scenarios. + +.. + +.. date: 2023-12-18-09-47-54 +.. gh-issue: 113246 +.. nonce: em930H +.. section: Library + +Update bundled pip to 23.3.2. + +.. + +.. date: 2023-12-17-13-56-30 +.. gh-issue: 87264 +.. nonce: RgfHCv +.. section: Library + +Fixed tarfile list() method to show file type. + +.. + +.. date: 2023-12-17-10-22-55 +.. gh-issue: 112182 +.. nonce: jLWGlr +.. section: Library + +:meth:`asyncio.futures.Future.set_exception()` now transforms +:exc:`StopIteration` into :exc:`RuntimeError` instead of hanging or other +misbehavior. Patch contributed by Jamie Phan. + +.. + +.. date: 2023-12-17-04-43-57 +.. gh-issue: 113225 +.. nonce: dhxhiZ +.. section: Library + +Speed up :meth:`pathlib.Path.glob` by using :attr:`os.DirEntry.path` where +possible. + +.. + +.. date: 2023-12-16-23-56-42 +.. gh-issue: 113149 +.. nonce: 7LWgTS +.. section: Library + +Improve error message when a JSON array or object contains a trailing comma. +Patch by Carson Radtke. + +.. + +.. date: 2023-12-16-10-58-34 +.. gh-issue: 113117 +.. nonce: 0zF7bH +.. section: Library + +The :mod:`subprocess` module can now use the :func:`os.posix_spawn` function +with ``close_fds=True`` on platforms where +``posix_spawn_file_actions_addclosefrom_np`` is available. Patch by Jakub +Kulik. + +.. + +.. date: 2023-12-16-01-10-47 +.. gh-issue: 113199 +.. nonce: oDjnjL +.. section: Library + +Make ``http.client.HTTPResponse.read1`` and +``http.client.HTTPResponse.readline`` close IO after reading all data when +content length is known. Patch by Illia Volochii. + +.. + +.. date: 2023-12-15-21-33-42 +.. gh-issue: 113191 +.. nonce: Il155b +.. section: Library + +Add support of :func:`os.fchmod` and a file descriptor in :func:`os.chmod` +on Windows. + +.. + +.. date: 2023-12-15-20-29-49 +.. gh-issue: 113188 +.. nonce: AvoraB +.. section: Library + +Fix :func:`shutil.copymode` and :func:`shutil.copystat` on Windows. +Previously they worked differenly if *dst* is a symbolic link: they modified +the permission bits of *dst* itself rather than the file it points to if +*follow_symlinks* is true or *src* is not a symbolic link, and did not +modify the permission bits if *follow_symlinks* is false and *src* is a +symbolic link. + +.. + +.. date: 2023-12-15-18-13-59 +.. gh-issue: 113119 +.. nonce: al-569 +.. section: Library + +:func:`os.posix_spawn` now accepts ``env=None``, which makes the newly +spawned process use the current process environment. Patch by Jakub Kulik. + +.. + +.. date: 2023-12-15-18-10-26 +.. gh-issue: 113202 +.. nonce: xv_Ww8 +.. section: Library + +Add a ``strict`` option to ``batched()`` in the ``itertools`` module. + +.. + +.. date: 2023-12-15-12-35-28 +.. gh-issue: 61648 +.. nonce: G-4pz0 +.. section: Library + +Detect line numbers of properties in doctests. + +.. + +.. date: 2023-12-15-09-51-41 +.. gh-issue: 113175 +.. nonce: RHsNwE +.. section: Library + +Sync with importlib_metadata 7.0, including improved type annotations, fixed +issue with symlinked packages in ``package_distributions``, added +``EntryPoints.__repr__``, introduced the ``diagnose`` script, added +``Distribution.origin`` property, and removed deprecated ``EntryPoint`` +access by numeric index (tuple behavior). + +.. + +.. date: 2023-12-13-17-08-21 +.. gh-issue: 59616 +.. nonce: JNlWSs +.. section: Library + +Add support of :func:`os.lchmod` and the *follow_symlinks* argument in +:func:`os.chmod` on Windows. Note that the default value of +*follow_symlinks* in :func:`!os.lchmod` is ``False`` on Windows. + +.. + +.. date: 2023-12-12-20-15-57 +.. gh-issue: 112559 +.. nonce: IgXkje +.. section: Library + +:func:`signal.signal` and :func:`signal.getsignal` no longer call ``repr`` +on callable handlers. :func:`asyncio.run` and :meth:`asyncio.Runner.run` no +longer call ``repr`` on the task results. Patch by Yilei Yang. + +.. + +.. date: 2023-12-12-16-32-55 +.. gh-issue: 112962 +.. nonce: ZZWXZn +.. section: Library + +:mod:`dis` module functions add cache information to the +:class:`~dis.Instruction` instance rather than creating fake +:class:`~dis.Instruction` instances to represent the cache entries. + +.. + +.. date: 2023-12-12-05-48-17 +.. gh-issue: 112989 +.. nonce: ZAa_eq +.. section: Library + +Reduce overhead to connect sockets with :mod:`asyncio` SelectorEventLoop. + +.. + +.. date: 2023-12-11-16-13-15 +.. gh-issue: 112970 +.. nonce: 87jmKP +.. section: Library + +Use :c:func:`!closefrom` on Linux where available (e.g. glibc-2.34), rather +than only FreeBSD. + +.. + +.. date: 2023-12-11-14-12-46 +.. gh-issue: 110190 +.. nonce: e0iEUa +.. section: Library + +Fix ctypes structs with array on PPC64LE platform by setting +``MAX_STRUCT_SIZE`` to 64 in stgdict. Patch by Diego Russo. + +.. + +.. date: 2023-12-08-11-17-17 +.. gh-issue: 112540 +.. nonce: Pm5egX +.. section: Library + +The statistics.geometric_mean() function now returns zero for datasets +containing a zero. Formerly, it would raise an exception. + +.. + +.. date: 2023-12-07-16-55-41 +.. gh-issue: 87286 +.. nonce: MILC9_ +.. section: Library + +Added :const:`LOG_FTP`, :const:`LOG_NETINFO`, :const:`LOG_REMOTEAUTH`, +:const:`LOG_INSTALL`, :const:`LOG_RAS`, and :const:`LOG_LAUNCHD` tot the +:mod:`syslog` module, all of them constants on used on macOS. + +.. + +.. date: 2023-12-06-16-01-33 +.. gh-issue: 112800 +.. nonce: TNsGJ- +.. section: Library + +Fix :mod:`asyncio` ``SubprocessTransport.close()`` not to throw +``PermissionError`` when used with setuid executables. + +.. + +.. date: 2023-12-06-14-06-14 +.. gh-issue: 51944 +.. nonce: -5qq_L +.. section: Library + +Add the following constants to the :mod:`termios` module. These values are +present in macOS system headers: ``ALTWERASE``, ``B14400``, ``B28800``, +``B7200``, ``B76800``, ``CCAR_OFLOW``, ``CCTS_OFLOW``, ``CDSR_OFLOW``, +``CDTR_IFLOW``, ``CIGNORE``, ``CRTS_IFLOW``, ``EXTPROC``, ``IUTF8``, +``MDMBUF``, ``NL2``, ``NL3``, ``NOKERNINFO``, ``ONOEOT``, ``OXTABS``, +``VDSUSP``, ``VSTATUS``. + +.. + +.. date: 2023-12-05-18-57-53 +.. gh-issue: 79325 +.. nonce: P2vMVK +.. section: Library + +Fix an infinite recursion error in :func:`tempfile.TemporaryDirectory` +cleanup on Windows. + +.. + +.. date: 2023-12-05-16-20-40 +.. gh-issue: 94692 +.. nonce: -e5C3c +.. section: Library + +:func:`shutil.rmtree` now only catches OSError exceptions. Previously a +symlink attack resistant version of ``shutil.rmtree()`` could ignore or pass +to the error handler arbitrary exception when invalid arguments were +provided. + +.. + +.. date: 2023-12-05-01-19-28 +.. gh-issue: 112736 +.. nonce: rdHDrU +.. section: Library + +The use of del-safe symbols in ``subprocess`` was refactored to allow for +use in cross-platform build environments. + +.. + +.. date: 2023-12-04-21-30-34 +.. gh-issue: 112727 +.. nonce: jpgNRB +.. section: Library + +Speed up :meth:`pathlib.Path.absolute`. Patch by Barney Gale. + +.. + +.. date: 2023-12-04-16-45-11 +.. gh-issue: 74690 +.. nonce: pQYP5U +.. section: Library + +Speedup :func:`issubclass` checks against simple :func:`runtime-checkable +protocols ` by around 6%. Patch by Alex Waygood. + +.. + +.. date: 2023-12-04-14-05-24 +.. gh-issue: 74690 +.. nonce: eODKRm +.. section: Library + +Speedup :func:`isinstance` checks by roughly 20% for +:func:`runtime-checkable protocols ` that only +have one callable member. Speedup :func:`issubclass` checks for these +protocols by roughly 10%. Patch by Alex Waygood. + +.. + +.. date: 2023-12-03-12-41-48 +.. gh-issue: 112645 +.. nonce: blMsKf +.. section: Library + +Remove deprecation error on passing ``onerror`` to :func:`shutil.rmtree`. + +.. + +.. date: 2023-12-03-11-15-53 +.. gh-issue: 112640 +.. nonce: -FVwP7 +.. section: Library + +Add ``kwdefaults`` parameter to :data:`types.FunctionType` to set default +keyword argument values. + +.. + +.. date: 2023-12-03-01-01-52 +.. gh-issue: 112622 +.. nonce: 1Z8cpx +.. section: Library + +Ensure ``name`` parameter is passed to event loop in +:func:`asyncio.create_task`. + +.. + +.. date: 2023-12-02-12-55-17 +.. gh-issue: 112618 +.. nonce: 7_FT8- +.. section: Library + +Fix a caching bug relating to :data:`typing.Annotated`. ``Annotated[str, +True]`` is no longer identical to ``Annotated[str, 1]``. + +.. + +.. date: 2023-12-01-21-05-46 +.. gh-issue: 112334 +.. nonce: DmNXKh +.. section: Library + +Fixed a performance regression in 3.12's :mod:`subprocess` on Linux where it +would no longer use the fast-path ``vfork()`` system call when it could have +due to a logic bug, instead falling back to the safe but slower ``fork()``. + +Also fixed a second 3.12.0 potential security bug. If a value of +``extra_groups=[]`` was passed to :mod:`subprocess.Popen` or related APIs, +the underlying ``setgroups(0, NULL)`` system call to clear the groups list +would not be made in the child process prior to ``exec()``. + +This was identified via code inspection in the process of fixing the first +bug. + +.. + +.. date: 2023-12-01-18-05-09 +.. gh-issue: 110190 +.. nonce: 5bf-c9 +.. section: Library + +Fix ctypes structs with array on Arm platform by setting ``MAX_STRUCT_SIZE`` +to 32 in stgdict. Patch by Diego Russo. + +.. + +.. date: 2023-12-01-16-09-59 +.. gh-issue: 81194 +.. nonce: FFad1c +.. section: Library + +Fix a crash in :func:`socket.if_indextoname` with specific value (UINT_MAX). +Fix an integer overflow in :func:`socket.if_indextoname` on 64-bit +non-Windows platforms. + +.. + +.. date: 2023-12-01-08-28-09 +.. gh-issue: 112578 +.. nonce: bfNbfi +.. section: Library + +Fix a spurious :exc:`RuntimeWarning` when executing the :mod:`zipfile` +module. + +.. + +.. date: 2023-11-29-10-51-41 +.. gh-issue: 112516 +.. nonce: rFKUKN +.. section: Library + +Update the bundled copy of pip to version 23.3.1. + +.. + +.. date: 2023-11-29-02-26-32 +.. gh-issue: 112510 +.. nonce: j-zXGc +.. section: Library + +Add :data:`readline.backend` for the backend readline uses (``editline`` or +``readline``) + +.. + +.. date: 2023-11-28-20-47-39 +.. gh-issue: 112328 +.. nonce: Z2AxEY +.. section: Library + +[Enum] Make ``EnumDict``, ``EnumDict.member_names``, +``EnumType._add_alias_`` and ``EnumType._add_value_alias_`` public. + +.. + +.. date: 2023-11-28-20-01-33 +.. gh-issue: 112509 +.. nonce: QtoKed +.. section: Library + +Fix edge cases that could cause a key to be present in both the +``__required_keys__`` and ``__optional_keys__`` attributes of a +:class:`typing.TypedDict`. Patch by Jelle Zijlstra. + +.. + +.. date: 2023-11-28-02-39-30 +.. gh-issue: 101336 +.. nonce: ya433z +.. section: Library + +Add ``keep_alive`` keyword parameter for +:meth:`AbstractEventLoop.create_server` and +:meth:`BaseEventLoop.create_server`. + +.. + +.. date: 2023-11-27-12-41-23 +.. gh-issue: 63284 +.. nonce: q2Qi9q +.. section: Library + +Added support for TLS-PSK (pre-shared key) mode to the :mod:`ssl` module. + +.. + +.. date: 2023-11-26-13-44-19 +.. gh-issue: 112414 +.. nonce: kx2E7S +.. section: Library + +Fix regression in Python 3.12 where calling :func:`repr` on a module that +had been imported using a custom :term:`loader` could fail with +:exc:`AttributeError`. Patch by Alex Waygood. + +.. + +.. date: 2023-11-26-13-26-56 +.. gh-issue: 112358 +.. nonce: smhaeZ +.. section: Library + +Revert change to :class:`struct.Struct` initialization that broke some cases +of subclassing. + +.. + +.. date: 2023-11-25-20-29-28 +.. gh-issue: 112405 +.. nonce: cOtzxC +.. section: Library + +Optimize :meth:`pathlib.PurePath.relative_to`. Patch by Alex Waygood. + +.. + +.. date: 2023-11-24-21-00-24 +.. gh-issue: 94722 +.. nonce: GMIQIn +.. section: Library + +Fix bug where comparison between instances of :class:`~doctest.DocTest` +fails if one of them has ``None`` as its lineno. + +.. + +.. date: 2023-11-24-09-27-01 +.. gh-issue: 112361 +.. nonce: kYtnHW +.. section: Library + +Speed up a small handful of :mod:`pathlib` methods by removing some +temporary objects. + +.. + +.. date: 2023-11-23-17-25-27 +.. gh-issue: 112345 +.. nonce: FFApHx +.. section: Library + +Improve error message when trying to call :func:`issubclass` against a +:class:`typing.Protocol` that has non-method members. Patch by Randolf +Scholz. + +.. + +.. date: 2023-11-23-12-37-22 +.. gh-issue: 112137 +.. nonce: kM46Q6 +.. section: Library + +Change :mod:`dis` output to display no-lineno as "--" instead of "None". + +.. + +.. date: 2023-11-23-10-41-21 +.. gh-issue: 112332 +.. nonce: rhTBaa +.. section: Library + +Deprecate the ``exc_type`` field of :class:`traceback.TracebackException`. +Add ``exc_type_str`` to replace it. + +.. + +.. date: 2023-11-22-23-08-47 +.. gh-issue: 81620 +.. nonce: mfZ2Wf +.. section: Library + +Add extra tests for :func:`random.binomialvariate` + +.. + +.. date: 2023-11-22-19-43-54 +.. gh-issue: 112292 +.. nonce: 5nDU87 +.. section: Library + +Fix a crash in :mod:`readline` when imported from a sub interpreter. Patch +by Anthony Shaw + +.. + +.. date: 2023-11-21-02-58-14 +.. gh-issue: 77621 +.. nonce: MYv5XS +.. section: Library + +Slightly improve the import time of the :mod:`pathlib` module by deferring +some imports. Patch by Barney Gale. + +.. + +.. date: 2023-11-16-17-18-09 +.. gh-issue: 112137 +.. nonce: QvjGjN +.. section: Library + +Change :mod:`dis` output to display logical labels for jump targets instead +of offsets. + +.. + +.. date: 2023-11-16-10-42-15 +.. gh-issue: 112139 +.. nonce: WpHosf +.. section: Library + +Add :meth:`Signature.format` to format signatures to string with extra +options. And use it in :mod:`pydoc` to render more readable signatures that +have new lines between parameters. + +.. + +.. date: 2023-11-15-04-53-37 +.. gh-issue: 112105 +.. nonce: I3RcVN +.. section: Library + +Make :func:`readline.set_completer_delims` work with libedit + +.. + +.. date: 2023-11-15-01-36-04 +.. gh-issue: 106922 +.. nonce: qslOVH +.. section: Library + +Display multiple lines with ``traceback`` when errors span multiple lines. + +.. + +.. date: 2023-11-09-11-07-34 +.. gh-issue: 111874 +.. nonce: dzYc3j +.. section: Library + +When creating a :class:`typing.NamedTuple` class, ensure +:func:`~object.__set_name__` is called on all objects that define +``__set_name__`` and exist in the values of the ``NamedTuple`` class's class +dictionary. Patch by Alex Waygood. + +.. + +.. date: 2023-11-08-18-53-07 +.. gh-issue: 68166 +.. nonce: 1iTh4Y +.. section: Library + +Add support of the "vsapi" element type in +:meth:`tkinter.ttk.Style.element_create`. + +.. + +.. date: 2023-11-08-16-11-04 +.. gh-issue: 110275 +.. nonce: Bm6GwR +.. section: Library + +Named tuple's methods ``_replace()`` and ``__replace__()`` now raise +TypeError instead of ValueError for invalid keyword arguments. + +.. + +.. date: 2023-11-05-20-09-27 +.. gh-issue: 99367 +.. nonce: HLaWKo +.. section: Library + +Do not mangle ``sys.path[0]`` in :mod:`pdb` if safe_path is set + +.. + +.. date: 2023-11-02-10-13-31 +.. gh-issue: 111615 +.. nonce: 3SMixi +.. section: Library + +Fix a regression caused by a fix to gh-93162 whereby you couldn't configure +a :class:`QueueHandler` without specifying handlers. + +.. + +.. date: 2023-10-25-16-37-13 +.. gh-issue: 75666 +.. nonce: BpsWut +.. section: Library + +Fix the behavior of :mod:`tkinter` widget's ``unbind()`` method with two +arguments. Previously, ``widget.unbind(sequence, funcid)`` destroyed the +current binding for *sequence*, leaving *sequence* unbound, and deleted the +*funcid* command. Now it removes only *funcid* from the binding for +*sequence*, keeping other commands, and deletes the *funcid* command. It +leaves *sequence* unbound only if *funcid* was the last bound command. + +.. + +.. date: 2023-10-25-13-07-53 +.. gh-issue: 67790 +.. nonce: jMn9Ad +.. section: Library + +Implement basic formatting support (minimum width, alignment, fill) for +:class:`fractions.Fraction`. + +.. + +.. date: 2023-10-23-18-42-26 +.. gh-issue: 111049 +.. nonce: Ys7-o_ +.. section: Library + +Fix crash during garbage collection of the :class:`io.BytesIO` buffer +object. + +.. + +.. date: 2023-10-23-03-49-34 +.. gh-issue: 102980 +.. nonce: aXBd54 +.. section: Library + +Redirect the output of ``interact`` command of :mod:`pdb` to the same +channel as the debugger. Add tests and improve docs. + +.. + +.. date: 2023-10-20-15-28-08 +.. gh-issue: 102988 +.. nonce: dStNO7 +.. section: Library + +:func:`email.utils.getaddresses` and :func:`email.utils.parseaddr` now +return ``('', '')`` 2-tuples in more situations where invalid email +addresses are encountered instead of potentially inaccurate values. Add +optional *strict* parameter to these two functions: use ``strict=False`` to +get the old behavior, accept malformed inputs. ``getattr(email.utils, +'supports_strict_parsing', False)`` can be use to check if the *strict* +paramater is available. Patch by Thomas Dwyer and Victor Stinner to improve +the CVE-2023-27043 fix. + +.. + +.. date: 2023-10-17-16-11-03 +.. gh-issue: 52161 +.. nonce: WBYyCJ +.. section: Library + +:meth:`cmd.Cmd.do_help` now cleans docstrings with :func:`inspect.cleandoc` +before writing them. Patch by Filip Łapkiewicz. + +.. + +.. date: 2023-10-12-18-19-47 +.. gh-issue: 82300 +.. nonce: P8-O38 +.. section: Library + +Add ``track`` parameter to +:class:`multiprocessing.shared_memory.SharedMemory` that allows using shared +memory blocks without having to register with the POSIX resource tracker +that automatically releases them upon process exit. + +.. + +.. date: 2023-10-11-02-34-01 +.. gh-issue: 110109 +.. nonce: RFCmHs +.. section: Library + +Add private ``pathlib._PurePathBase`` class: a base class for +:class:`pathlib.PurePath` that omits certain magic methods. It may be made +public (along with ``_PathBase``) in future. + +.. + +.. date: 2023-09-28-13-15-51 +.. gh-issue: 109858 +.. nonce: 43e2dg +.. section: Library + +Protect :mod:`zipfile` from "quoted-overlap" zipbomb. It now raises +BadZipFile when try to read an entry that overlaps with other entry or +central directory. + +.. + +.. date: 2023-09-23-14-40-51 +.. gh-issue: 109786 +.. nonce: UX3pKv +.. section: Library + +Fix possible reference leaks and crash when re-enter the ``__next__()`` +method of :class:`itertools.pairwise`. + +.. + +.. date: 2023-09-01-15-33-18 +.. gh-issue: 91539 +.. nonce: xoNLEI +.. section: Library + +Small (10 - 20%) and trivial performance improvement of +:func:`urrlib.request.getproxies_environment`, typically useful when there +are many environment variables to go over. + +.. + +.. date: 2023-08-14-21-10-52 +.. gh-issue: 103363 +.. nonce: u64_QI +.. section: Library + +Add *follow_symlinks* keyword-only argument to :meth:`pathlib.Path.owner` +and :meth:`~pathlib.Path.group`, defaulting to ``True``. + +.. + +.. date: 2023-08-07-21-11-24 +.. gh-issue: 102130 +.. nonce: _UyI5i +.. section: Library + +Support tab completion in :mod:`cmd` for ``editline``. + +.. + +.. date: 2023-08-04-18-43-21 +.. gh-issue: 99437 +.. nonce: Et8hu8 +.. section: Library + +:func:`runpy.run_path` now decodes path-like objects, making sure __file__ +and sys.argv[0] of the module being run are always strings. + +.. + +.. date: 2023-04-29-20-49-13 +.. gh-issue: 104003 +.. nonce: -8Ruk2 +.. section: Library + +Add :func:`warnings.deprecated`, a decorator to mark deprecated functions to +static type checkers and to warn on usage of deprecated classes and +functions. See :pep:`702`. Patch by Jelle Zijlstra. + +.. + +.. date: 2023-04-23-11-08-02 +.. gh-issue: 103708 +.. nonce: Y17C7p +.. section: Library + +Make hardcoded python name, a configurable parameter so that different +implementations of python can override it instead of making huge diffs in +sysconfig.py + +.. + +.. date: 2023-04-09-21-05-43 +.. gh-issue: 66515 +.. nonce: 0DS8Ya +.. section: Library + +:class:`mailbox.MH` now supports folders that do not contain a +``.mh_sequences`` file (e.g. Claws Mail IMAP-cache folders). Patch by Serhiy +Storchaka. + +.. + +.. date: 2023-02-08-00-43-29 +.. gh-issue: 83162 +.. nonce: ufdI9F +.. section: Library + +Renamed :exc:`!re.error` to :exc:`PatternError` for clarity, and kept +:exc:`!re.error` for backward compatibility. Patch by Matthias Bussonnier +and Adam Chhina. + +.. + +.. date: 2022-12-01-16-57-44 +.. gh-issue: 91133 +.. nonce: LKMVCV +.. section: Library + +Fix a bug in :class:`tempfile.TemporaryDirectory` cleanup, which now no +longer dereferences symlinks when working around file system permission +errors. + +.. + +.. bpo: 43153 +.. date: 2021-12-06-22-10-53 +.. nonce: J7mjSy +.. section: Library + +On Windows, ``tempfile.TemporaryDirectory`` previously masked a +``PermissionError`` with ``NotADirectoryError`` during directory cleanup. It +now correctly raises ``PermissionError`` if errors are not ignored. Patch by +Andrei Kulakov and Ken Jin. + +.. + +.. bpo: 32731 +.. date: 2021-11-23-22-22-49 +.. nonce: kNOASr +.. section: Library + +:func:`getpass.getuser` now raises :exc:`OSError` for all failures rather +than :exc:`ImportError` on systems lacking the :mod:`pwd` module or +:exc:`KeyError` if the password database is empty. + +.. + +.. bpo: 34321 +.. date: 2021-04-15-10-41-51 +.. nonce: 36m6_l +.. section: Library + +:class:`mmap.mmap` now has a *trackfd* parameter on Unix; if it is +``False``, the file descriptor specified by *fileno* will not be duplicated. + +.. + +.. bpo: 35332 +.. date: 2020-12-14-09-31-13 +.. nonce: s22wAx +.. section: Library + +The :func:`shutil.rmtree` function now ignores errors when calling +:func:`os.close` when *ignore_errors* is ``True``, and :func:`os.close` no +longer retried after error. + +.. + +.. bpo: 35928 +.. date: 2020-10-03-23-47-28 +.. nonce: E0iPAa +.. section: Library + +:class:`io.TextIOWrapper` now correctly handles the decoding buffer after +``read()`` and ``write()``. + +.. + +.. bpo: 26791 +.. date: 2020-08-06-14-43-55 +.. nonce: KxoEfO +.. section: Library + +:func:`shutil.move` now moves a symlink into a directory when that directory +is the target of the symlink. This provides the same behavior as the mv +shell command. The previous behavior raised an exception. Patch by Jeffrey +Kintscher. + +.. + +.. bpo: 41422 +.. date: 2020-07-28-20-48-05 +.. nonce: iMwnMu +.. section: Library + +Fixed memory leaks of :class:`pickle.Pickler` and :class:`pickle.Unpickler` +involving cyclic references via the internal memo mapping. + +.. + +.. bpo: 19821 +.. date: 2020-06-15-23-44-53 +.. nonce: ihBk39 +.. section: Library + +The :func:`!pydoc.ispackage` function has been deprecated. + +.. + +.. bpo: 40262 +.. date: 2020-05-21-23-32-46 +.. nonce: z4fQv1 +.. section: Library + +The :meth:`ssl.SSLSocket.recv_into` method no longer requires the *buffer* +argument to implement ``__len__`` and supports buffers with arbitrary item +size. + +.. + +.. bpo: 39912 +.. date: 2020-03-09-15-08-29 +.. nonce: xPOBBY +.. section: Library + +:func:`warnings.filterwarnings()` and :func:`warnings.simplefilter()` now +raise appropriate exceptions instead of ``AssertionError``. Patch +contributed by Rémi Lapeyre. + +.. + +.. bpo: 37260 +.. date: 2019-06-14-22-37-32 +.. nonce: oecdIf +.. section: Library + +Fixed a race condition in :func:`shutil.rmtree` in which directory entries +removed by another process or thread while ``shutil.rmtree()`` is running +can cause it to raise FileNotFoundError. Patch by Jeffrey Kintscher. + +.. + +.. bpo: 36959 +.. date: 2019-05-18-15-50-14 +.. nonce: ew6WZ4 +.. section: Library + +Fix some error messages for invalid ISO format string combinations in +``strptime()`` that referred to directives not contained in the format +string. Patch by Gordon P. Hemsley. + +.. + +.. bpo: 18060 +.. date: 2019-05-17-07-22-33 +.. nonce: 5mqTQM +.. section: Library + +Fixed a class inheritance issue that can cause segfaults when deriving two +or more levels of subclasses from a base class of Structure or Union. + +.. + +.. bpo: 29779 +.. date: 2019-05-08-13-14-11 +.. nonce: jg33dp +.. section: Library + +Add a new :envvar:`PYTHON_HISTORY` environment variable to set the location +of a ``.python_history`` file. + +.. + +.. bpo: 21360 +.. date: 2019-02-12-16-12-54 +.. nonce: gkSSfx +.. section: Library + +:class:`mailbox.Maildir` now ignores files with a leading dot. + +.. + +.. date: 2023-11-30-02-33-59 +.. gh-issue: 111699 +.. nonce: _O5G_y +.. section: Documentation + +Relocate ``smtpd`` deprecation notice to its own section rather than under +``locale`` in What's New in Python 3.12 document + +.. + +.. date: 2023-10-23-23-43-43 +.. gh-issue: 110746 +.. nonce: yg77IE +.. section: Documentation + +Improved markup for valid options/values for methods ttk.treeview.column and +ttk.treeview.heading, and for Layouts. + +.. + +.. date: 2023-08-01-13-11-39 +.. gh-issue: 95649 +.. nonce: F4KhPS +.. section: Documentation + +Document that the :mod:`asyncio` module contains code taken from `v0.16.0 of +the uvloop project `_, as +well as the required MIT licensing information. + +.. + +.. date: 2024-01-12-14-34-24 +.. gh-issue: 111798 +.. nonce: hd9B_- +.. section: Tests + +Disable ``test_super_deep()`` from ``test_call`` under pydebug builds on +WASI; the stack depth is too small to make the test useful. + +.. + +.. date: 2024-01-12-13-19-12 +.. gh-issue: 111801 +.. nonce: 9hh9DY +.. section: Tests + +Lower the recursion limit in ``test_isinstance`` for +``test_infinitely_many_bases()``. This prevents a stack overflow on a +pydebug build of WASI. + +.. + +.. date: 2024-01-12-12-45-24 +.. gh-issue: 111802 +.. nonce: gN41vt +.. section: Tests + +Specify a low recursion depth for ``test_bad_getattr()`` in +``test.pickletester`` to avoid exhausting the stack under a pydebug build +for WASI. + +.. + +.. date: 2024-01-08-21-15-48 +.. gh-issue: 44626 +.. nonce: DRq-PR +.. section: Tests + +Fix :func:`os.path.isabs` incorrectly returning ``True`` when given a path +that starts with exactly one (back)slash on Windows. + +Fix :meth:`pathlib.PureWindowsPath.is_absolute` incorrectly returning +``False`` for some paths beginning with two (back)slashes. + +.. + +.. date: 2024-01-01-14-40-02 +.. gh-issue: 113633 +.. nonce: VOY5ai +.. section: Tests + +Use module state for the _testcapi extension module. + +.. + +.. date: 2023-12-09-21-27-46 +.. gh-issue: 109980 +.. nonce: y--500 +.. section: Tests + +Fix ``test_tarfile_vs_tar`` in ``test_shutil`` for macOS, where system tar +can include more information in the archive than :mod:`shutil.make_archive`. + +.. + +.. date: 2023-12-05-19-50-03 +.. gh-issue: 112769 +.. nonce: kdLJmS +.. section: Tests + +The tests now correctly compare zlib version when +:const:`zlib.ZLIB_RUNTIME_VERSION` contains non-integer suffixes. For +example zlib-ng defines the version as ``1.3.0.zlib-ng``. + +.. + +.. date: 2023-12-04-15-56-11 +.. gh-issue: 112334 +.. nonce: FFc9Ti +.. section: Tests + +Adds a regression test to verify that ``vfork()`` is used when expected by +:mod:`subprocess` on vfork enabled POSIX systems (Linux). + +.. + +.. date: 2023-09-05-20-46-35 +.. gh-issue: 108927 +.. nonce: TpwWav +.. section: Tests + +Fixed order dependence in running tests in the same process when a test that +has submodules (e.g. test_importlib) follows a test that imports its +submodule (e.g. test_importlib.util) and precedes a test (e.g. test_unittest +or test_compileall) that uses that submodule. + +.. + +.. bpo: 40648 +.. date: 2020-05-16-18-00-21 +.. nonce: p2uPqy +.. section: Tests + +Test modes that file can get with chmod() on Windows. + +.. + +.. date: 2024-01-15-16-58-43 +.. gh-issue: 114013 +.. nonce: FoSeQf +.. section: Build + +Fix ``Tools/wasm/wasi.py`` to not include the path to ``python.wasm`` as +part of ``HOSTRUNNER``. The environment variable is meant to specify how to +run the WASI host only, having ``python.wasm`` and relevant flags appended +to the ``HOSTRUNNER``. This fixes ``make test`` work. + +.. + +.. date: 2023-12-23-09-35-48 +.. gh-issue: 113258 +.. nonce: GlsAyH +.. section: Build + +Changed the Windows build to write out generated frozen modules into the +build tree instead of the source tree. + +.. + +.. date: 2023-12-21-05-35-06 +.. gh-issue: 112305 +.. nonce: VfqQPx +.. section: Build + +Fixed the ``check-clean-src`` step performed on out of tree builds to detect +errant ``$(srcdir)/Python/frozen_modules/*.h`` files and recommend +appropriate source tree cleanup steps to get a working build again. + +.. + +.. date: 2023-12-17-18-23-02 +.. gh-issue: 112536 +.. nonce: 8lr3Ep +.. section: Build + +Add support for thread sanitizer (TSAN) + +.. + +.. date: 2023-12-08-11-33-37 +.. gh-issue: 112867 +.. nonce: ZzDfXQ +.. section: Build + +Fix the build for the case that WITH_PYMALLOC_RADIX_TREE=0 set. + +.. + +.. date: 2023-11-27-13-55-47 +.. gh-issue: 103065 +.. nonce: o72OiA +.. section: Build + +Introduce ``Tools/wasm/wasi.py`` to simplify doing a WASI build. + +.. + +.. bpo: 11102 +.. date: 2020-05-01-23-44-31 +.. nonce: Fw9zeS +.. section: Build + +The :func:`os.major`, :func:`os.makedev`, and :func:`os.minor` functions are +now available on HP-UX v3. + +.. + +.. bpo: 36351 +.. date: 2020-01-11-23-49-17 +.. nonce: ce8BBh +.. section: Build + +Do not set ipv6type when cross-compiling. + +.. + +.. date: 2024-01-15-23-53-25 +.. gh-issue: 114096 +.. nonce: G-Myja +.. section: Windows + +Process privileges that are activated for creating directory junctions are +now restored afterwards, avoiding behaviour changes in other parts of the +program. + +.. + +.. date: 2024-01-04-21-16-31 +.. gh-issue: 111877 +.. nonce: fR-B4c +.. section: Windows + +:func:`os.stat` calls were returning incorrect time values for files that +could not be accessed directly. + +.. + +.. date: 2023-12-19-10-56-46 +.. gh-issue: 111973 +.. nonce: A9Wtsb +.. section: Windows + +Update Windows installer to use SQLite 3.44.2. + +.. + +.. date: 2023-12-14-19-00-29 +.. gh-issue: 113009 +.. nonce: 6LNdjz +.. section: Windows + +:mod:`multiprocessing`: On Windows, fix a race condition in +``Process.terminate()``: no longer set the ``returncode`` attribute to +always call ``WaitForSingleObject()`` in ``Process.wait()``. Previously, +sometimes the process was still running after ``TerminateProcess()`` even if +``GetExitCodeProcess()`` is not ``STILL_ACTIVE``. Patch by Victor Stinner. + +.. + +.. date: 2023-12-12-20-58-09 +.. gh-issue: 86179 +.. nonce: YYSk_6 +.. section: Windows + +Fixes path calculations when launching Python on Windows through a symlink. + +.. + +.. date: 2023-12-11-20-23-04 +.. gh-issue: 71383 +.. nonce: 9pZh6t +.. section: Windows + +Update Tcl/Tk in Windows installer to 8.6.13 with a patch to suppress +incorrect ThemeChanged warnings. + +.. + +.. date: 2023-12-05-22-56-30 +.. gh-issue: 111650 +.. nonce: xlWmvM +.. section: Windows + +Ensures the ``Py_GIL_DISABLED`` preprocessor variable is defined in +:file:`pyconfig.h` so that extension modules written in C are able to use +it. + +.. + +.. date: 2023-12-03-19-22-37 +.. gh-issue: 112278 +.. nonce: FiloCE +.. section: Windows + +Reduce the time cost for some functions in :mod:`platform` on Windows if +current user has no permission to the WMI. + +.. + +.. date: 2023-08-08-01-42-14 +.. gh-issue: 73427 +.. nonce: WOpiNt +.. section: Windows + +Deprecate :func:`sys._enablelegacywindowsfsencoding`. Use +:envvar:`PYTHONLEGACYWINDOWSFSENCODING` instead. Patch by Inada Naoki. + +.. + +.. date: 2023-03-15-23-53-45 +.. gh-issue: 87868 +.. nonce: 4C36oQ +.. section: Windows + +Correctly sort and remove duplicate environment variables in +:py:func:`!_winapi.CreateProcess`. + +.. + +.. bpo: 37308 +.. date: 2019-06-16-11-27-05 +.. nonce: Iz_NU_ +.. section: Windows + +Fix mojibake in :class:`mmap.mmap` when using a non-ASCII *tagname* argument +on Windows. + +.. + +.. date: 2024-01-02-22-25-21 +.. gh-issue: 113666 +.. nonce: xKZoBm +.. section: macOS + +Add the following constants to module :mod:`stat`: ``UF_SETTABLE``, +``UF_TRACKED``, ``UF_DATAVAULT``, ``SF_SUPPORTED``, ``SF_SETTABLE``, +``SF_SYNTHETIC``, ``SF_RESTRICTED``, ``SF_FIRMLINK`` and ``SF_DATALESS``. +The values ``UF_SETTABLE``, ``SF_SUPPORTED``, ``SF_SETTABLE`` and +``SF_SYNTHETIC`` are only available on macOS. + +.. + +.. date: 2023-12-28-12-18-39 +.. gh-issue: 113536 +.. nonce: 0ythg7 +.. section: macOS + +:func:`os.waitid` is now available on macOS + +.. + +.. date: 2023-12-23-22-41-07 +.. gh-issue: 110459 +.. nonce: NaMBJy +.. section: macOS + +Running ``configure ... --with-openssl-rpath=X/Y/Z`` no longer fails to +detect OpenSSL on macOS. + +.. + +.. date: 2023-12-21-11-53-47 +.. gh-issue: 74573 +.. nonce: MA6Vys +.. section: macOS + +Document that :mod:`dbm.ndbm` can silently corrupt DBM files on updates when +exceeding undocumented platform limits, and can crash (segmentation fault) +when reading such a corrupted file. (FB8919203) + +.. + +.. date: 2023-12-21-10-20-41 +.. gh-issue: 65701 +.. nonce: Q2hNbN +.. section: macOS + +The :program:`freeze` tool doesn't work with framework builds of Python. +Document this and bail out early when running the tool with such a build. + +.. + +.. date: 2023-12-21-09-41-42 +.. gh-issue: 87277 +.. nonce: IF6EZZ +.. section: macOS + +webbrowser: Don't look for X11 browsers on macOS. Those are generally not +used and probing for them can result in starting XQuartz even if it isn't +used otherwise. + +.. + +.. date: 2023-12-19-10-50-08 +.. gh-issue: 111973 +.. nonce: HMHJfy +.. section: macOS + +Update macOS installer to use SQLite 3.44.2. + +.. + +.. date: 2023-12-16-11-45-32 +.. gh-issue: 108269 +.. nonce: wVgCHF +.. section: macOS + +Set ``CFBundleAllowMixedLocalizations`` to true in the Info.plist for the +framework, embedded Python.app and IDLE.app with framework installs on +macOS. This allows applications to pick up the user's preferred locale when +that's different from english. + +.. + +.. date: 2023-12-10-20-30-06 +.. gh-issue: 102362 +.. nonce: y8svbF +.. section: macOS + +Make sure the result of :func:`sysconfig.get_plaform` includes at least a +major and minor versions, even if ``MACOSX_DEPLOYMENT_TARGET`` is set to +only a major version during build to match the format expected by pip. + +.. + +.. date: 2023-12-07-15-53-16 +.. gh-issue: 110017 +.. nonce: UMYzMR +.. section: macOS + +Disable a signal handling stress test on macOS due to a bug in macOS +(FB13453490). + +.. + +.. date: 2023-12-07-14-19-46 +.. gh-issue: 110820 +.. nonce: DIxb_F +.. section: macOS + +Make sure the preprocessor definitions for ``ALIGNOF_MAX_ALIGN_T``, +``SIZEOF_LONG_DOUBLE`` and ``HAVE_GCC_ASM_FOR_X64`` are correct for +Universal 2 builds on macOS. + +.. + +.. date: 2023-12-06-12-11-13 +.. gh-issue: 109981 +.. nonce: mOHg10 +.. section: macOS + +Use ``/dev/fd`` on macOS to determine the number of open files in +``test.support.os_helper.fd_count`` to avoid a crash with "guarded" file +descriptors when probing for open files. + +.. + +.. date: 2024-01-17-02-15-33 +.. gh-issue: 72284 +.. nonce: cAQiYO +.. section: IDLE + +Improve the lists of features, editor key bindings, and shell key bingings +in the IDLE doc. + +.. + +.. date: 2024-01-11-21-26-58 +.. gh-issue: 113903 +.. nonce: __GLlQ +.. section: IDLE + +Fix rare failure of test.test_idle, in test_configdialog. + +.. + +.. date: 2024-01-05-12-24-01 +.. gh-issue: 113729 +.. nonce: qpluea +.. section: IDLE + +Fix the "Help -> IDLE Doc" menu bug in 3.11.7 and 3.12.1. + +.. + +.. date: 2023-12-19-00-03-12 +.. gh-issue: 113269 +.. nonce: lrU-IC +.. section: IDLE + +Fix test_editor hang on macOS Catalina. + +.. + +.. date: 2023-12-10-20-01-11 +.. gh-issue: 112898 +.. nonce: 98aWv2 +.. section: IDLE + +Fix processing unsaved files when quitting IDLE on macOS. + +.. + +.. bpo: 13586 +.. date: 2019-12-13-12-26-56 +.. nonce: 1grqsR +.. section: IDLE + +Enter the selected text when opening the "Replace" dialog. + +.. + +.. date: 2023-12-02-02-08-11 +.. gh-issue: 106560 +.. nonce: THvuji +.. section: C API + +Fix redundant declarations in the public C API. Declare PyBool_Type, +PyLong_Type and PySys_Audit() only once. Patch by Victor Stinner. + +.. + +.. date: 2023-11-27-09-44-16 +.. gh-issue: 112438 +.. nonce: GdNZiI +.. section: C API + +Fix support of format units "es", "et", "es#", and "et#" in nested tuples in +:c:func:`PyArg_ParseTuple`-like functions. + +.. + +.. date: 2023-11-15-01-26-59 +.. gh-issue: 111545 +.. nonce: iAoFtA +.. section: C API + +Add :c:func:`Py_HashPointer` function to hash a pointer. Patch by Victor +Stinner. + +.. + +.. date: 2023-06-21-11-53-09 +.. gh-issue: 65210 +.. nonce: PhFRBJ +.. section: C API + +Change the declaration of the *keywords* parameter of +:c:func:`PyArg_ParseTupleAndKeywords` and +:c:func:`PyArg_VaParseTupleAndKeywords` for better compatibility with C++. diff --git a/Misc/NEWS.d/3.5.0a1.rst b/Misc/NEWS.d/3.5.0a1.rst index 96e59206cb1291..26b3d8253dbdee 100644 --- a/Misc/NEWS.d/3.5.0a1.rst +++ b/Misc/NEWS.d/3.5.0a1.rst @@ -251,8 +251,8 @@ and "surrogatepass" error handlers. .. nonce: FM72m- .. section: Core and Builtins -speed up `PyObject_IsInstance` and `PyObject_IsSubclass` in the common case -that the second argument has metaclass `type`. +speed up ``PyObject_IsInstance`` and ``PyObject_IsSubclass`` in the common case +that the second argument has metaclass ``type``. .. @@ -261,8 +261,8 @@ that the second argument has metaclass `type`. .. nonce: ds5wQa .. section: Core and Builtins -Add a new `PyErr_FormatV` function, similar to `PyErr_Format` but accepting -a `va_list` argument. +Add a new ``PyErr_FormatV`` function, similar to ``PyErr_Format`` but accepting +a ``va_list`` argument. .. @@ -1522,7 +1522,7 @@ Fixed fcntl() with integer argument on 64-bit big-endian platforms. .. nonce: 62MLqr .. section: Library -Add an `--sort-keys` option to json.tool CLI. +Add an ``--sort-keys`` option to ``json.tool`` CLI. .. @@ -1745,7 +1745,7 @@ already failed. .. section: Library Make it possible to examine the errors from unittest discovery without -executing the test suite. The new `errors` attribute on TestLoader exposes +executing the test suite. The new ``errors`` attribute on ``TestLoader`` exposes these non-fatal errors encountered during discovery. .. @@ -2342,9 +2342,9 @@ as normal call attributes. .. nonce: Nghn-Y .. section: Library -load_tests() is now unconditionally run when it is present in a package's -__init__.py. TestLoader.loadTestsFromModule() still accepts use_load_tests, -but it is deprecated and ignored. A new keyword-only attribute `pattern` is +``load_tests()`` is now unconditionally run when it is present in a package's +``__init__.py``. ``TestLoader.loadTestsFromModule()`` still accepts use_load_tests, +but it is deprecated and ignored. A new keyword-only attribute ``pattern`` is added and documented. Patch given by Robert Collins, tweaked by Barry Warsaw. @@ -2648,7 +2648,7 @@ module. .. nonce: THJSYB .. section: Library -Changed FeedParser feed() to avoid O(N\ :sup:`2`) behavior when parsing long line. +Changed FeedParser feed() to avoid *O*\ (*n*\ :sup:`2`) behavior when parsing long line. Original patch by Raymond Hettinger. .. @@ -2736,8 +2736,8 @@ Convert posixmodule to use Argument Clinic. .. nonce: YccmZF .. section: Library -Add an *exists_ok* argument to `Pathlib.mkdir()` to mimic `mkdir -p` and -`os.makedirs()` functionality. When true, ignore FileExistsErrors. Patch +Add an *exists_ok* argument to ``Pathlib.mkdir()`` to mimic ``mkdir -p`` and +``os.makedirs()`` functionality. When true, ignore ``FileExistsErrors``. Patch by Berker Peksag. .. @@ -3930,7 +3930,7 @@ has been called. .. nonce: 5CDoox .. section: Library -New keyword argument `unsafe` to Mock. It raises `AttributeError` incase of +New keyword argument ``unsafe`` to Mock. It raises ``AttributeError`` incase of an attribute startswith assert or assret. .. @@ -4173,7 +4173,7 @@ provide better security by default. .. nonce: FP5FY0 .. section: Library -`assertRaisesRegex` and `assertWarnsRegex` now raise a TypeError if the +``assertRaisesRegex`` and ``assertWarnsRegex`` now raise a ``TypeError`` if the second argument is not a string or compiled regex. .. diff --git a/Misc/NEWS.d/3.5.0b4.rst b/Misc/NEWS.d/3.5.0b4.rst index 2b1b98a4316fc1..e42d93689d01a0 100644 --- a/Misc/NEWS.d/3.5.0b4.rst +++ b/Misc/NEWS.d/3.5.0b4.rst @@ -129,7 +129,7 @@ Random.setstate() now validates the value of state last element. .. nonce: HvJf6T .. section: Library -Fixed an issue that caused `inspect.getsource` to return incorrect results +Fixed an issue that caused ``inspect.getsource`` to return incorrect results on nested functions. .. diff --git a/Misc/NEWS.d/3.5.2rc1.rst b/Misc/NEWS.d/3.5.2rc1.rst index 01fcd866a896ae..a7e5c1b130f9e9 100644 --- a/Misc/NEWS.d/3.5.2rc1.rst +++ b/Misc/NEWS.d/3.5.2rc1.rst @@ -710,9 +710,9 @@ Fixed the comparison of plistlib.Data with other types. .. nonce: RMRMtM .. section: Library -Fix an uninitialized variable in `ctypes.util`. +Fix an uninitialized variable in ``ctypes.util``. The bug only occurs on SunOS when the ctypes implementation searches for the -`crle` program. Patch by Xiang Zhang. Tested on SunOS by Kees Bos. +``crle`` program. Patch by Xiang Zhang. Tested on SunOS by Kees Bos. .. diff --git a/Misc/NEWS.d/3.6.0a1.rst b/Misc/NEWS.d/3.6.0a1.rst index 98f1215fb91873..144d217f6098a1 100644 --- a/Misc/NEWS.d/3.6.0a1.rst +++ b/Misc/NEWS.d/3.6.0a1.rst @@ -1113,9 +1113,9 @@ Fixed the comparison of plistlib.Data with other types. .. nonce: RMRMtM .. section: Library -Fix an uninitialized variable in `ctypes.util`. +Fix an uninitialized variable in ``ctypes.util``. The bug only occurs on SunOS when the ctypes implementation searches for the -`crle` program. Patch by Xiang Zhang. Tested on SunOS by Kees Bos. +``crle`` program. Patch by Xiang Zhang. Tested on SunOS by Kees Bos. .. @@ -3915,7 +3915,7 @@ Fix output of python-config --extension-suffix. .. nonce: yLO-r4 .. section: Tools/Demos -The pyvenv script has been deprecated in favour of `python3 -m venv`. +The pyvenv script has been deprecated in favour of ``python3 -m venv``. .. diff --git a/Misc/NEWS.d/3.6.0b1.rst b/Misc/NEWS.d/3.6.0b1.rst index 3fbae5c6a4b3a8..4fb6bdd6f89c9b 100644 --- a/Misc/NEWS.d/3.6.0b1.rst +++ b/Misc/NEWS.d/3.6.0b1.rst @@ -166,7 +166,7 @@ a DeprecationWarning. Patch by Emanuel Barry. .. nonce: aABzcL .. section: Core and Builtins -`dict` implementation is changed like PyPy. It is more compact and preserves +``dict`` implementation is changed like PyPy. It is more compact and preserves insertion order. (Concept developed by Raymond Hettinger and patch by Inada Naoki.) diff --git a/Misc/NEWS.d/3.6.3.rst b/Misc/NEWS.d/3.6.3.rst index 4d591d77ffe545..58fd009aea1fed 100644 --- a/Misc/NEWS.d/3.6.3.rst +++ b/Misc/NEWS.d/3.6.3.rst @@ -4,7 +4,7 @@ .. release date: 2017-10-03 .. section: Library -Re-allow arbitrary iterables in `concurrent.futures.as_completed()`. Fixes +Re-allow arbitrary iterables in ``concurrent.futures.as_completed()``. Fixes regression in 3.6.3rc1. .. diff --git a/Misc/NEWS.d/3.6.3rc1.rst b/Misc/NEWS.d/3.6.3rc1.rst index 4b2aae9dc88441..ebda7665e2b6ea 100644 --- a/Misc/NEWS.d/3.6.3rc1.rst +++ b/Misc/NEWS.d/3.6.3rc1.rst @@ -24,8 +24,8 @@ fixes. .. nonce: 0yiA5Q .. section: Core and Builtins -Fix an assertion failure in `subprocess.Popen()` on Windows, in case the env -argument has a bad keys() method. Patch by Oren Milman. +Fix an assertion failure in ``subprocess.Popen()`` on Windows, in case the env +argument has a bad ``keys()`` method. Patch by Oren Milman. .. @@ -34,7 +34,7 @@ argument has a bad keys() method. Patch by Oren Milman. .. nonce: rS-FlC .. section: Core and Builtins -Fix an assertion failure in `PyErr_WriteUnraisable()` in case of an +Fix an assertion failure in ``PyErr_WriteUnraisable()`` in case of an exception with a bad ``__module__`` attribute. Patch by Oren Milman. .. @@ -95,7 +95,7 @@ plans to remove the functions from sys/types.h. .. nonce: t8QggK .. section: Core and Builtins -Fix an assertion failure in `zipimport.zipimporter.get_data` on Windows, +Fix an assertion failure in ``zipimport.zipimporter.get_data`` on Windows, when the return value of ``pathname.replace('/','\\')`` isn't a string. Patch by Oren Milman. @@ -106,7 +106,7 @@ Patch by Oren Milman. .. nonce: YMduKF .. section: Core and Builtins -Fix an assertion failure in the write() method of `io.TextIOWrapper`, when +Fix an assertion failure in the ``write()`` method of ``io.TextIOWrapper``, when the encoder doesn't return a bytes object. Patch by Oren Milman. .. @@ -116,7 +116,7 @@ the encoder doesn't return a bytes object. Patch by Oren Milman. .. nonce: dRJzqR .. section: Core and Builtins -Fix a crash in some methods of `io.TextIOWrapper`, when the decoder's state +Fix a crash in some methods of ``io.TextIOWrapper``, when the decoder's state is invalid. Patch by Oren Milman. .. @@ -477,7 +477,7 @@ attributes to help the garbage collector to destroy all widgets. .. nonce: 1t2hn5 .. section: Library -Fix `copyreg._slotnames()` mangled attribute calculation for classes whose +Fix ``copyreg._slotnames()`` mangled attribute calculation for classes whose name begins with an underscore. Patch by Shane Harvey. .. @@ -585,7 +585,7 @@ socket.close() now ignores ECONNRESET error. .. nonce: CLvEvV .. section: Library -Fix out of bounds write in `asyncio.CFuture.remove_done_callback()`. +Fix out of bounds write in ``asyncio.CFuture.remove_done_callback()``. .. @@ -933,7 +933,7 @@ Add tests for configdialog keys tab. Patch by Cheryl Sabella. .. nonce: sqE1FS .. section: IDLE -IDLE: Calltips use `inspect.signature` instead of `inspect.getfullargspec`. +IDLE: Calltips use ``inspect.signature`` instead of ``inspect.getfullargspec``. This improves calltips for builtins converted to use Argument Clinic. Patch by Louie Lu. diff --git a/Misc/NEWS.d/3.6.4rc1.rst b/Misc/NEWS.d/3.6.4rc1.rst index ae4534be62c5c4..afa5b8b0efb148 100644 --- a/Misc/NEWS.d/3.6.4rc1.rst +++ b/Misc/NEWS.d/3.6.4rc1.rst @@ -138,7 +138,7 @@ integer with binary base. .. section: Core and Builtins Fixed an assertion failure in Python parser in case of a bad -`unicodedata.normalize()`. Patch by Oren Milman. +``unicodedata.normalize()``. Patch by Oren Milman. .. @@ -147,7 +147,7 @@ Fixed an assertion failure in Python parser in case of a bad .. nonce: wT9Iy7 .. section: Core and Builtins -Raise a `TypeError` with a helpful error message when class creation fails +Raise a ``TypeError`` with a helpful error message when class creation fails due to a metaclass with a bad ``__prepare__()`` method. Patch by Oren Milman. @@ -158,7 +158,7 @@ Milman. .. nonce: OxwINs .. section: Core and Builtins -Fix an assertion failure in `_warnings.warn()` in case of a bad ``__name__`` +Fix an assertion failure in ``_warnings.warn()`` in case of a bad ``__name__`` global. Patch by Oren Milman. .. @@ -168,8 +168,8 @@ global. Patch by Oren Milman. .. nonce: VomaFa .. section: Core and Builtins -Fix an assertion failure in `json`, in case `_json.make_encoder()` received -a bad `encoder()` argument. Patch by Oren Milman. +Fix an assertion failure in ``json``, in case ``_json.make_encoder()`` received +a bad ``encoder()`` argument. Patch by Oren Milman. .. @@ -189,7 +189,7 @@ such a module. Patch by Oren Milman. .. nonce: r7m2sj .. section: Core and Builtins -Fix an assertion failure in `ctypes` class definition, in case the class has +Fix an assertion failure in ``ctypes`` class definition, in case the class has an attribute whose name is specified in ``_anonymous_`` but not in ``_fields_``. Patch by Oren Milman. @@ -200,7 +200,7 @@ an attribute whose name is specified in ``_anonymous_`` but not in .. nonce: o06iKD .. section: Core and Builtins -Fix an assertion failure in `_random.Random.seed()` in case the argument has +Fix an assertion failure in ``_random.Random.seed()`` in case the argument has a bad ``__abs__()`` method. Patch by Oren Milman. .. @@ -220,7 +220,7 @@ string. Patch by Oren Milman. .. nonce: bNE2l- .. section: Core and Builtins -Fix a crash in the ``__setstate__()`` method of `ctypes._CData`, in case of +Fix a crash in the ``__setstate__()`` method of ``ctypes._CData``, in case of a bad ``__dict__``. Patch by Oren Milman. .. @@ -240,8 +240,8 @@ float with a bad as_integer_ratio() method. Patch by Oren Milman. .. nonce: 7lzaKV .. section: Core and Builtins -Fix an assertion failure in `warnings.warn_explicit`, when the return value -of the received loader's get_source() has a bad splitlines() method. Patch +Fix an assertion failure in ``warnings.warn_explicit``, when the return value +of the received loader's ``get_source()`` has a bad ``splitlines()`` method. Patch by Oren Milman. .. @@ -251,8 +251,8 @@ by Oren Milman. .. nonce: j7ZvN_ .. section: Core and Builtins -`PyErr_PrintEx()` clears now the ignored exception that may be raised by -`_PySys_SetObjectId()`, for example when no memory. +``PyErr_PrintEx()`` clears now the ignored exception that may be raised by +``_PySys_SetObjectId()``, for example when no memory. .. @@ -599,8 +599,8 @@ On Windows, faulthandler.enable() now ignores MSC and COM exceptions. .. nonce: XrVMME .. section: Library -Prevent crashes in `_elementtree` due to unsafe cleanup of `Element.text` -and `Element.tail`. Patch by Oren Milman. +Prevent crashes in ``_elementtree`` due to unsafe cleanup of ``Element.text`` +and ``Element.tail``. Patch by Oren Milman. .. @@ -813,8 +813,8 @@ reference leaks. .. nonce: lo7FQX .. section: Tests -Add the `set_nomemory(start, stop)` and `remove_mem_hooks()` functions to -the _testcapi module. +Add the ``set_nomemory(start, stop)`` and ``remove_mem_hooks()`` functions to +the ``_testcapi`` module. .. @@ -897,7 +897,7 @@ Prevent double substitution of prefix in python-config.sh. .. nonce: KUDjno .. section: Build -Avoid wholesale rebuild after `make regen-all` if nothing changed. +Avoid wholesale rebuild after ``make regen-all`` if nothing changed. .. @@ -1123,7 +1123,7 @@ and Py_SetPath() .. nonce: Q3T_8n .. section: C API -The `PyExc_RecursionErrorInst` singleton is removed and -`PyErr_NormalizeException()` does not use it anymore. This singleton is +The ``PyExc_RecursionErrorInst`` singleton is removed and +``PyErr_NormalizeException()`` does not use it anymore. This singleton is persistent and its members being never cleared may cause a segfault during finalization of the interpreter. See also issue #22898. diff --git a/Misc/NEWS.d/3.6.5rc1.rst b/Misc/NEWS.d/3.6.5rc1.rst index 448baed5413ecb..056bacb5267c41 100644 --- a/Misc/NEWS.d/3.6.5rc1.rst +++ b/Misc/NEWS.d/3.6.5rc1.rst @@ -283,7 +283,7 @@ Make sure sys.argv remains as a list when running trace. .. nonce: bvHDOc .. section: Library -Fixed `asyncio.Condition` issue which silently ignored cancellation after +Fixed ``asyncio.Condition`` issue which silently ignored cancellation after notifying and cancelling a conditional lock. Patch by Bar Harel. .. @@ -599,7 +599,7 @@ deprecation. .. nonce: w1m_8r .. section: Documentation -Improve docstrings for `pathlib.PurePath` subclasses. +Improve docstrings for ``pathlib.PurePath`` subclasses. .. diff --git a/Misc/NEWS.d/3.6.6rc1.rst b/Misc/NEWS.d/3.6.6rc1.rst index 9624195c79043b..f70649af055b4e 100644 --- a/Misc/NEWS.d/3.6.6rc1.rst +++ b/Misc/NEWS.d/3.6.6rc1.rst @@ -110,7 +110,7 @@ on Windows. .. nonce: UoC319 .. section: Core and Builtins -Fix a crash in `ctypes.cast()` in case the type argument is a ctypes +Fix a crash in ``ctypes.cast()`` in case the type argument is a ctypes structured data type. Patch by Eryk Sun and Oren Milman. .. @@ -248,7 +248,7 @@ Patch by Zvi Effron .. nonce: taxbVT .. section: Library -Fix race condition with `ReadTransport.resume_reading` in Windows proactor +Fix race condition with ``ReadTransport.resume_reading`` in Windows proactor event loop. .. @@ -346,7 +346,7 @@ tree of tuples or lists with ``line_info=False`` and ``col_info=True``. .. nonce: B56Hc1 .. section: Library -Fix FD leak in `_SelectorSocketTransport` Patch by Vlad Starostin. +Fix FD leak in ``_SelectorSocketTransport`` Patch by Vlad Starostin. .. diff --git a/Misc/NEWS.d/3.7.0a1.rst b/Misc/NEWS.d/3.7.0a1.rst index bee424241fd712..aca79c4cc8c1b8 100644 --- a/Misc/NEWS.d/3.7.0a1.rst +++ b/Misc/NEWS.d/3.7.0a1.rst @@ -86,7 +86,7 @@ information. .. nonce: r7m2sj .. section: Core and Builtins -Fix an assertion failure in `ctypes` class definition, in case the class has +Fix an assertion failure in ``ctypes`` class definition, in case the class has an attribute whose name is specified in ``_anonymous_`` but not in ``_fields_``. Patch by Oren Milman. @@ -97,8 +97,8 @@ an attribute whose name is specified in ``_anonymous_`` but not in .. nonce: 0yiA5Q .. section: Core and Builtins -Fix an assertion failure in `subprocess.Popen()` on Windows, in case the env -argument has a bad keys() method. Patch by Oren Milman. +Fix an assertion failure in ``subprocess.Popen()`` on Windows, in case the env +argument has a bad ``keys()`` method. Patch by Oren Milman. .. @@ -107,7 +107,7 @@ argument has a bad keys() method. Patch by Oren Milman. .. nonce: rS-FlC .. section: Core and Builtins -Fix an assertion failure in `PyErr_WriteUnraisable()` in case of an +Fix an assertion failure in ``PyErr_WriteUnraisable()`` in case of an exception with a bad ``__module__`` attribute. Patch by Oren Milman. .. @@ -224,7 +224,7 @@ plans to remove the functions from sys/types.h. .. nonce: t8QggK .. section: Core and Builtins -Fix an assertion failure in `zipimport.zipimporter.get_data` on Windows, +Fix an assertion failure in ``zipimport.zipimporter.get_data`` on Windows, when the return value of ``pathname.replace('/','\\')`` isn't a string. Patch by Oren Milman. @@ -235,7 +235,7 @@ Patch by Oren Milman. .. nonce: YMduKF .. section: Core and Builtins -Fix an assertion failure in the write() method of `io.TextIOWrapper`, when +Fix an assertion failure in the ``write()`` method of ``io.TextIOWrapper``, when the encoder doesn't return a bytes object. Patch by Oren Milman. .. @@ -245,7 +245,7 @@ the encoder doesn't return a bytes object. Patch by Oren Milman. .. nonce: dRJzqR .. section: Core and Builtins -Fix a crash in some methods of `io.TextIOWrapper`, when the decoder's state +Fix a crash in some methods of ``io.TextIOWrapper``, when the decoder's state is invalid. Patch by Oren Milman. .. @@ -1855,7 +1855,7 @@ docserver attribute to None to break a reference cycle. .. nonce: gwnthq .. section: Library -Many asserts in `multiprocessing` are now more informative, and some error +Many asserts in ``multiprocessing`` are now more informative, and some error types have been changed to more specific ones. .. @@ -1896,7 +1896,7 @@ child exit. .. nonce: -2_YGj .. section: Library -`dis` now works with asynchronous generator and coroutine objects. Patch by +``dis`` now works with asynchronous generator and coroutine objects. Patch by George Collins based on diagnosis by Luciano Ramalho. .. @@ -1906,10 +1906,10 @@ George Collins based on diagnosis by Luciano Ramalho. .. nonce: huQi2Y .. section: Library -There are a number of uninformative asserts in the `multiprocessing` module, +There are a number of uninformative asserts in the ``multiprocessing`` module, as noted in issue 5001. This change fixes two of the most potentially problematic ones, since they are in error-reporting code, in the -`multiprocessing.managers.convert_to_error` function. (It also makes more +``multiprocessing.managers.convert_to_error`` function. (It also makes more informative a ValueError message.) The only potentially problematic change is that the AssertionError is now a TypeError; however, this should also help distinguish it from an AssertionError being *reported* by the @@ -1973,7 +1973,7 @@ attributes to help the garbage collector to destroy all widgets. .. nonce: 1t2hn5 .. section: Library -Fix `copyreg._slotnames()` mangled attribute calculation for classes whose +Fix ``copyreg._slotnames()`` mangled attribute calculation for classes whose name begins with an underscore. Patch by Shane Harvey. .. @@ -1983,7 +1983,7 @@ name begins with an underscore. Patch by Shane Harvey. .. nonce: 2CFVCO .. section: Library -Allow `logging.config.fileConfig` to accept kwargs and/or args. +Allow ``logging.config.fileConfig`` to accept kwargs and/or args. .. @@ -2159,7 +2159,7 @@ socket.close() now ignores ECONNRESET error. .. nonce: CLvEvV .. section: Library -Fix out of bounds write in `asyncio.CFuture.remove_done_callback()`. +Fix out of bounds write in ``asyncio.CFuture.remove_done_callback()``. .. @@ -2406,7 +2406,7 @@ Don't log exceptions if Task/Future "cancel()" method was called. .. nonce: xihJ4Y .. section: Library -Fix path calculation in `imp.load_package()`, fixing it for cases when a +Fix path calculation in ``imp.load_package()``, fixing it for cases when a package is only shipped with bytecodes. Patch by Alexandru Ardelean. .. @@ -4947,8 +4947,8 @@ run test_zipfile64. .. nonce: lo7FQX .. section: Tests -Add the `set_nomemory(start, stop)` and `remove_mem_hooks()` functions to -the _testcapi module. +Add the ``set_nomemory(start, stop)`` and ``remove_mem_hooks()`` functions to +the ``_testcapi`` module. .. @@ -5076,7 +5076,7 @@ on the Android armv7 qemu emulator. .. nonce: 4f-VJK .. section: Build -Allow --with-lto to be used on all builds, not just `make profile-opt`. +Allow ``--with-lto`` to be used on all builds, not just ``make profile-opt``. .. @@ -5819,7 +5819,7 @@ Add tests for configdialog keys tab. Patch by Cheryl Sabella. .. nonce: sqE1FS .. section: IDLE -IDLE: Calltips use `inspect.signature` instead of `inspect.getfullargspec`. +IDLE: Calltips use ``inspect.signature`` instead of ``inspect.getfullargspec``. This improves calltips for builtins converted to use Argument Clinic. Patch by Louie Lu. @@ -6224,9 +6224,9 @@ Added the slice index converter in Argument Clinic. .. nonce: KPFC7o .. section: Tools/Demos -Argument Clinic now uses the converter `bool(accept={int})` rather than -`int` for semantical booleans. This avoids repeating the default value for -Python and C and will help in converting to `bool` in future. +Argument Clinic now uses the converter ``bool(accept={int})`` rather than +``int`` for semantical booleans. This avoids repeating the default value for +Python and C and will help in converting to ``bool`` in future. .. diff --git a/Misc/NEWS.d/3.7.0a2.rst b/Misc/NEWS.d/3.7.0a2.rst index 0f107d8c5f5a93..06ca32d37fa4fa 100644 --- a/Misc/NEWS.d/3.7.0a2.rst +++ b/Misc/NEWS.d/3.7.0a2.rst @@ -65,8 +65,8 @@ integer with binary base. .. nonce: MtgLCn .. section: Core and Builtins -Fix an assertion failure in `zipimporter.get_source()` in case of a bad -`zlib.decompress()`. Patch by Oren Milman. +Fix an assertion failure in ``zipimporter.get_source()`` in case of a bad +``zlib.decompress()``. Patch by Oren Milman. .. @@ -76,7 +76,7 @@ Fix an assertion failure in `zipimporter.get_source()` in case of a bad .. section: Core and Builtins Fixed an assertion failure in Python parser in case of a bad -`unicodedata.normalize()`. Patch by Oren Milman. +``unicodedata.normalize()``. Patch by Oren Milman. .. @@ -85,7 +85,7 @@ Fixed an assertion failure in Python parser in case of a bad .. nonce: wT9Iy7 .. section: Core and Builtins -Raise a `TypeError` with a helpful error message when class creation fails +Raise a ``TypeError`` with a helpful error message when class creation fails due to a metaclass with a bad ``__prepare__()`` method. Patch by Oren Milman. @@ -105,7 +105,7 @@ Importlib was instrumented with two dtrace probes to profile import timing. .. nonce: OxwINs .. section: Core and Builtins -Fix an assertion failure in `_warnings.warn()` in case of a bad ``__name__`` +Fix an assertion failure in ``_warnings.warn()`` in case of a bad ``__name__`` global. Patch by Oren Milman. .. @@ -115,7 +115,7 @@ global. Patch by Oren Milman. .. nonce: pRVTRB .. section: Core and Builtins -Improved the error message logic for object.__new__ and object.__init__. +Improved the error message logic for ``object.__new__`` and ``object.__init__``. .. @@ -124,8 +124,8 @@ Improved the error message logic for object.__new__ and object.__init__. .. nonce: VomaFa .. section: Core and Builtins -Fix an assertion failure in `json`, in case `_json.make_encoder()` received -a bad `encoder()` argument. Patch by Oren Milman. +Fix an assertion failure in ``json``, in case ``_json.make_encoder()`` received +a bad ``encoder()`` argument. Patch by Oren Milman. .. @@ -145,7 +145,7 @@ such a module. Patch by Oren Milman. .. nonce: o06iKD .. section: Core and Builtins -Fix an assertion failure in `_random.Random.seed()` in case the argument has +Fix an assertion failure in ``_random.Random.seed()`` in case the argument has a bad ``__abs__()`` method. Patch by Oren Milman. .. @@ -218,7 +218,7 @@ string. Patch by Oren Milman. .. nonce: bNE2l- .. section: Core and Builtins -Fix a crash in the ``__setstate__()`` method of `ctypes._CData`, in case of +Fix a crash in the ``__setstate__()`` method of ``ctypes._CData``, in case of a bad ``__dict__``. Patch by Oren Milman. .. @@ -238,8 +238,8 @@ float with a bad as_integer_ratio() method. Patch by Oren Milman. .. nonce: 7lzaKV .. section: Core and Builtins -Fix an assertion failure in `warnings.warn_explicit`, when the return value -of the received loader's get_source() has a bad splitlines() method. Patch +Fix an assertion failure in ``warnings.warn_explicit``, when the return value +of the received loader's ``get_source()`` has a bad ``splitlines()`` method. Patch by Oren Milman. .. @@ -286,8 +286,8 @@ On Windows, faulthandler.enable() now ignores MSC and COM exceptions. .. nonce: XrVMME .. section: Library -Prevent crashes in `_elementtree` due to unsafe cleanup of `Element.text` -and `Element.tail`. Patch by Oren Milman. +Prevent crashes in ``_elementtree`` due to unsafe cleanup of ``Element.text`` +and ``Element.tail``. Patch by Oren Milman. .. @@ -307,8 +307,8 @@ compiling. bm_regex_compile benchmark shows 14% performance improvements. .. section: Library The types of compiled regular objects and match objects are now exposed as -`re.Pattern` and `re.Match`. This adds information in pydoc output for the -re module. +``re.Pattern`` and ``re.Match``. This adds information in pydoc output for the +``re`` module. .. @@ -485,10 +485,10 @@ since Python 3.3.) .. nonce: cIMFJW .. section: Library -Reprs of subclasses of some collection and iterator classes (`bytearray`, -`array.array`, `collections.deque`, `collections.defaultdict`, -`itertools.count`, `itertools.repeat`) now contain actual type name insteads -of hardcoded name of the base class. +Reprs of subclasses of some collection and iterator classes (``bytearray``, +``array.array``, ``collections.deque``, ``collections.defaultdict``, +``itertools.count``, ``itertools.repeat``) now contain actual type name instead +of hardcoded names of the base class. .. @@ -584,7 +584,7 @@ Correct PCBuild/ case to PCbuild/ in build scripts and documentation. .. nonce: KUDjno .. section: Build -Avoid wholesale rebuild after `make regen-all` if nothing changed. +Avoid wholesale rebuild after ``make regen-all`` if nothing changed. .. @@ -657,8 +657,8 @@ for code and tests by Guilherme Polo and Cheryl Sabella, respectively. .. nonce: K_EjpO .. section: C API -Make `PyMapping_Keys()`, `PyMapping_Values()` and `PyMapping_Items()` always -return a `list` (rather than a `list` or a `tuple`). Patch by Oren Milman. +Make ``PyMapping_Keys()``, ``PyMapping_Values()`` and ``PyMapping_Items()`` always +return a ``list`` (rather than a ``list`` or a ``tuple``). Patch by Oren Milman. .. diff --git a/Misc/NEWS.d/3.7.0a3.rst b/Misc/NEWS.d/3.7.0a3.rst index a968616f55be68..08cabeda7e9a46 100644 --- a/Misc/NEWS.d/3.7.0a3.rst +++ b/Misc/NEWS.d/3.7.0a3.rst @@ -240,8 +240,8 @@ after shrinking a memory block. .. nonce: j7ZvN_ .. section: Core and Builtins -`PyErr_PrintEx()` clears now the ignored exception that may be raised by -`_PySys_SetObjectId()`, for example when no memory. +``PyErr_PrintEx()`` clears now the ignored exception that may be raised by +``_PySys_SetObjectId()``, for example when no memory. .. @@ -643,7 +643,7 @@ undocumented. .. nonce: RwietE .. section: Library -cProfile command line now accepts `-m module_name` as an alternative to +cProfile command line now accepts ``-m module_name`` as an alternative to script path. Patch by Sanyam Khurana. .. @@ -926,7 +926,7 @@ Fix multiprocessing.Process when stdout and/or stderr is closed or None. .. nonce: -1MBEy .. section: Library -Add `subnet_of` and `superset_of` containment tests to +Add ``subnet_of`` and ``superset_of`` containment tests to :class:`ipaddress.IPv6Network` and :class:`ipaddress.IPv4Network`. Patch by Michel Albert and Cheryl Sabella. @@ -948,7 +948,7 @@ for backward compatibility with Python 2.2, and was deprecated since Python .. nonce: IxCvGB .. section: Library -Add a ``subprocess.Popen(text=False)`` keyword argument to `subprocess` +Add a ``subprocess.Popen(text=False)`` keyword argument to ``subprocess`` functions to be more explicit about when the library should attempt to decode outputs into text. Patch by Andrew Clegg. @@ -1191,7 +1191,7 @@ interruptions. If it crashes, restart it when necessary. .. nonce: AniZuz .. section: Library -Added support for AF_UNIX socket in asyncio `create_datagram_endpoint`. +Added support for AF_UNIX socket in asyncio ``create_datagram_endpoint``. .. @@ -1609,7 +1609,7 @@ comparison functions. .. nonce: Q3T_8n .. section: C API -The `PyExc_RecursionErrorInst` singleton is removed and -`PyErr_NormalizeException()` does not use it anymore. This singleton is +The ``PyExc_RecursionErrorInst`` singleton is removed and +``PyErr_NormalizeException()`` does not use it anymore. This singleton is persistent and its members being never cleared may cause a segfault during finalization of the interpreter. See also issue #22898. diff --git a/Misc/NEWS.d/3.7.0a4.rst b/Misc/NEWS.d/3.7.0a4.rst index ebae046a7a6ba0..f2c6559037d84f 100644 --- a/Misc/NEWS.d/3.7.0a4.rst +++ b/Misc/NEWS.d/3.7.0a4.rst @@ -152,7 +152,7 @@ option or the ``PYTHONWARNINGS`` environment variable. .. nonce: PgGQaB .. section: Core and Builtins -`-X dev` now injects a ``'default'`` entry into sys.warnoptions, ensuring +``-X dev`` now injects a ``'default'`` entry into sys.warnoptions, ensuring that it behaves identically to actually passing ``-Wdefault`` at the command line. @@ -192,7 +192,7 @@ by Ivan Levkivskyi. .. nonce: mDeCLK .. section: Core and Builtins -The `atexit` module now has its callback stored per interpreter. +The ``atexit`` module now has its callback stored per interpreter. .. @@ -792,7 +792,7 @@ to more readily adjust to platform dependent behaviour. .. nonce: ODpc9y .. section: Windows -Implement support for `subprocess.Popen(close_fds=True)` on Windows. Patch +Implement support for ``subprocess.Popen(close_fds=True)`` on Windows. Patch by Segev Finer. .. diff --git a/Misc/NEWS.d/3.7.0b2.rst b/Misc/NEWS.d/3.7.0b2.rst index 9590914599bb86..702dbc960c018d 100644 --- a/Misc/NEWS.d/3.7.0b2.rst +++ b/Misc/NEWS.d/3.7.0b2.rst @@ -214,7 +214,7 @@ helper methods that can be used instead ``_dump_registry``, .. nonce: bvHDOc .. section: Library -Fixed `asyncio.Condition` issue which silently ignored cancellation after +Fixed ``asyncio.Condition`` issue which silently ignored cancellation after notifying and cancelling a conditional lock. Patch by Bar Harel. .. @@ -497,7 +497,7 @@ deprecation. .. nonce: w1m_8r .. section: Documentation -Improve docstrings for `pathlib.PurePath` subclasses. +Improve docstrings for ``pathlib.PurePath`` subclasses. .. diff --git a/Misc/NEWS.d/3.7.0b4.rst b/Misc/NEWS.d/3.7.0b4.rst index 1d4fc921406fd0..b17c7e08d1d408 100644 --- a/Misc/NEWS.d/3.7.0b4.rst +++ b/Misc/NEWS.d/3.7.0b4.rst @@ -371,8 +371,8 @@ suffixes). .. nonce: o7G_UO .. section: Build -By default, modules configured in `Modules/Setup` are no longer built with -`-DPy_BUILD_CORE`. Instead, modules that specifically need that preprocessor +By default, modules configured in ``Modules/Setup`` are no longer built with +``-DPy_BUILD_CORE``. Instead, modules that specifically need that preprocessor definition include it in their individual entries. .. diff --git a/Misc/NEWS.d/3.7.0b5.rst b/Misc/NEWS.d/3.7.0b5.rst index fb29109869188b..459eaddbc91f59 100644 --- a/Misc/NEWS.d/3.7.0b5.rst +++ b/Misc/NEWS.d/3.7.0b5.rst @@ -26,7 +26,7 @@ module_globals is not a dict. .. nonce: kqBNzv .. section: Core and Builtins -The new `os.posix_spawn` added in 3.7.0b1 was removed as we are still +The new ``os.posix_spawn`` added in 3.7.0b1 was removed as we are still working on what the API should look like. Expect this in 3.8 instead. .. @@ -77,7 +77,7 @@ their body. Based on patch by Inada Naoki. .. nonce: UoC319 .. section: Core and Builtins -Fix a crash in `ctypes.cast()` in case the type argument is a ctypes +Fix a crash in ``ctypes.cast()`` in case the type argument is a ctypes structured data type. Patch by Eryk Sun and Oren Milman. .. @@ -254,8 +254,8 @@ default. .. nonce: C6Hnd1 .. section: Library -Do not simplify arguments to `typing.Union`. Now `Union[Manager, Employee]` -is not simplified to `Employee` at runtime. Such simplification previously +Do not simplify arguments to ``typing.Union``. Now ``Union[Manager, Employee]`` +is not simplified to ``Employee`` at runtime. Such simplification previously caused several bugs and limited possibilities for introspection. .. @@ -314,7 +314,7 @@ Patch by Zvi Effron .. nonce: taxbVT .. section: Library -Fix race condition with `ReadTransport.resume_reading` in Windows proactor +Fix race condition with ``ReadTransport.resume_reading`` in Windows proactor event loop. .. @@ -324,7 +324,7 @@ event loop. .. nonce: pj2Mbb .. section: Library -Fix failure in `typing.get_type_hints()` when ClassVar was provided as a +Fix failure in ``typing.get_type_hints()`` when ClassVar was provided as a string forward reference. .. @@ -427,7 +427,7 @@ Donghee Na. .. nonce: B56Hc1 .. section: Library -Fix FD leak in `_SelectorSocketTransport` Patch by Vlad Starostin. +Fix FD leak in ``_SelectorSocketTransport`` Patch by Vlad Starostin. .. @@ -466,7 +466,7 @@ Support arrays >=2GiB in :mod:`ctypes`. Patch by Segev Finer. .. nonce: E5gba1 .. section: Documentation -Document that `asyncio.wait()` does not cancel its futures on timeout. +Document that ``asyncio.wait()`` does not cancel its futures on timeout. .. diff --git a/Misc/NEWS.d/3.8.0a1.rst b/Misc/NEWS.d/3.8.0a1.rst index b56cda86f11faa..11b303e89ad04f 100644 --- a/Misc/NEWS.d/3.8.0a1.rst +++ b/Misc/NEWS.d/3.8.0a1.rst @@ -882,7 +882,7 @@ Update valgrind suppression list to use .. nonce: GIOm_8 .. section: Core and Builtins -Added the "socket" option in the `stat.filemode()` Python implementation to +Added the "socket" option in the ``stat.filemode()`` Python implementation to match the C implementation. .. @@ -1271,8 +1271,8 @@ parentheses in the string representation. .. nonce: tDBciE .. section: Core and Builtins -Added support for the `setpgroup`, `resetids`, `setsigmask`, `setsigdef` and -`scheduler` parameters of `posix_spawn`. Patch by Pablo Galindo. +Added support for the ``setpgroup``, ``resetids``, ``setsigmask``, ``setsigdef`` and +``scheduler`` parameters of ``posix_spawn``. Patch by Pablo Galindo. .. @@ -1735,7 +1735,7 @@ Patch by Zackery Spytz. .. nonce: UoC319 .. section: Core and Builtins -Fix a crash in `ctypes.cast()` in case the type argument is a ctypes +Fix a crash in ``ctypes.cast()`` in case the type argument is a ctypes structured data type. Patch by Eryk Sun and Oren Milman. .. @@ -1745,7 +1745,7 @@ structured data type. Patch by Eryk Sun and Oren Milman. .. nonce: jgYsSA .. section: Core and Builtins -Fix a crash in `os.utime()` in case of a bad ns argument. Patch by Oren +Fix a crash in ``os.utime()`` in case of a bad ns argument. Patch by Oren Milman. .. @@ -2025,7 +2025,7 @@ Fixed help() on metaclasses. Patch by Sanyam Khurana. .. nonce: PutiOC .. section: Library -Expose ``raise(signum)`` as `raise_signal` +Expose ``raise(signum)`` as ``raise_signal`` .. @@ -2248,7 +2248,7 @@ the pool is still running. .. nonce: abB4BN .. section: Library -When a :class:`Mock` instance was used to wrap an object, if `side_effect` +When a :class:`Mock` instance was used to wrap an object, if ``side_effect`` is used in one of the mocks of it methods, don't call the original implementation and return the result of using the side effect the same way that it is done with return_value. @@ -2377,7 +2377,7 @@ compliant, and will not throw an exception on a trailing '%'. .. nonce: vepCSJ .. section: Library -The function `platform.popen` has been removed, it was deprecated since +The function ``platform.popen`` has been removed, it was deprecated since Python 3.3: use :func:`os.popen` instead. .. @@ -2529,9 +2529,9 @@ Fix incorrect parsing of :class:`_io.IncrementalNewlineDecoder`'s .. nonce: CulMN8 .. section: Library -Remove `StreamReaderProtocol._untrack_reader`. The call to `_untrack_reader` +Remove ``StreamReaderProtocol._untrack_reader``. The call to ``_untrack_reader`` is currently performed too soon, causing the protocol to forget about the -reader before `connection_lost` can run and feed the EOF to the reader. +reader before ``connection_lost`` can run and feed the EOF to the reader. .. @@ -2583,8 +2583,8 @@ polling for new events. .. nonce: ltSrtr .. section: Library -`importlib` no longer logs `wrote ` redundantly after -`(created|could not create) ` is already logged. Patch by +``importlib`` no longer logs ``wrote `` redundantly after +``(created|could not create) `` is already logged. Patch by Quentin Agren. .. @@ -2717,7 +2717,7 @@ Use :func:`socket.CMSG_SPACE` to calculate ancillary data size instead of .. nonce: rWBb43 .. section: Library -The `mailbox.mbox.get_string` function *from_* parameter can now +The ``mailbox.mbox.get_string`` function *from_* parameter can now successfully be set to a non-default value. .. @@ -2799,7 +2799,7 @@ children which are instances of ``Element`` subclasses. .. nonce: z2FbOp .. section: Library -:class:`smtplib.SMTP` objects now always have a `sock` attribute present +:class:`smtplib.SMTP` objects now always have a ``sock`` attribute present .. @@ -2914,7 +2914,7 @@ Fix inspect module polluted ``sys.modules`` when parsing .. nonce: Wo2PoJ .. section: Library -Add `mtime` argument to `gzip.compress` for reproducible output. Patch by +Add ``mtime`` argument to ``gzip.compress`` for reproducible output. Patch by Guo Ci Teo. .. @@ -3019,7 +3019,7 @@ argument. Patch by Andrés Delfino. .. nonce: rSPBW9 .. section: Library -In :class:`QueueHandler`, clear `exc_text` from :class:`LogRecord` to +In :class:`QueueHandler`, clear ``exc_text`` from :class:`LogRecord` to prevent traceback from being written twice. .. @@ -3060,8 +3060,8 @@ and will be removed in future Python versions. .. nonce: CUE8LU .. section: Library -Add deprecation warning when `loop` is used in methods: `asyncio.sleep`, -`asyncio.wait` and `asyncio.wait_for`. +Add deprecation warning when ``loop`` is used in methods: ``asyncio.sleep``, +``asyncio.wait`` and ``asyncio.wait_for``. .. @@ -3191,7 +3191,7 @@ the stream is deleted (garbage collected) without ``close()`` call. .. nonce: 3IPIH5 .. section: Library -`Enum._missing_`: raise `ValueError` if None returned and `TypeError` if +``Enum._missing_``: raise ``ValueError`` if None returned and ``TypeError`` if non-member is returned. .. @@ -3302,8 +3302,8 @@ issues on Windows. .. nonce: xL7-kG .. section: Library -Fix possible mojibake in the error message of `pwd.getpwnam` and -`grp.getgrnam` using string representation because of invisible characters +Fix possible mojibake in the error message of ``pwd.getpwnam`` and +``grp.getgrnam`` using string representation because of invisible characters or trailing whitespaces. Patch by William Grzybowski. .. @@ -4575,8 +4575,8 @@ OpenSSL 1.1.1 .. nonce: nzQgD8 .. section: Library -Release GIL on `grp.getgrnam`, `grp.getgrgid`, `pwd.getpwnam` and -`pwd.getpwuid` if reentrant variants of these functions are available. Patch +Release GIL on ``grp.getgrnam``, ``grp.getgrgid``, ``pwd.getpwnam`` and +``pwd.getpwuid`` if reentrant variants of these functions are available. Patch by William Grzybowski. .. @@ -4656,8 +4656,8 @@ default. .. nonce: C6Hnd1 .. section: Library -Do not simplify arguments to `typing.Union`. Now `Union[Manager, Employee]` -is not simplified to `Employee` at runtime. Such simplification previously +Do not simplify arguments to ``typing.Union``. Now ``Union[Manager, Employee]`` +is not simplified to ``Employee`` at runtime. Such simplification previously caused several bugs and limited possibilities for introspection. .. @@ -4736,7 +4736,7 @@ Patch by Zvi Effron .. nonce: taxbVT .. section: Library -Fix race condition with `ReadTransport.resume_reading` in Windows proactor +Fix race condition with ``ReadTransport.resume_reading`` in Windows proactor event loop. .. @@ -4746,7 +4746,7 @@ event loop. .. nonce: pj2Mbb .. section: Library -Fix failure in `typing.get_type_hints()` when ClassVar was provided as a +Fix failure in ``typing.get_type_hints()`` when ClassVar was provided as a string forward reference. .. @@ -5150,7 +5150,7 @@ instead of a wrapper function for exit callbacks. .. nonce: B56Hc1 .. section: Library -Fix FD leak in `_SelectorSocketTransport` Patch by Vlad Starostin. +Fix FD leak in ``_SelectorSocketTransport`` Patch by Vlad Starostin. .. @@ -5777,7 +5777,7 @@ helper methods that can be used instead ``_dump_registry``, .. nonce: bvHDOc .. section: Library -Fixed `asyncio.Condition` issue which silently ignored cancellation after +Fixed ``asyncio.Condition`` issue which silently ignored cancellation after notifying and cancelling a conditional lock. Patch by Bar Harel. .. @@ -5992,7 +5992,7 @@ Add Ttk spinbox widget to :mod:`tkinter.ttk`. Patch by Alan D Moore. .. nonce: flC-dE .. section: Library -:mod:`profile` CLI accepts `-m module_name` as an alternative to script +:mod:`profile` CLI accepts ``-m module_name`` as an alternative to script path. .. @@ -6147,8 +6147,8 @@ Support arrays >=2GiB in :mod:`ctypes`. Patch by Segev Finer. .. nonce: pDsFJl .. section: Library -Removed support of arguments in `tkinter.ttk.Treeview.selection`. It was -deprecated in 3.6. Use specialized methods like `selection_set` for +Removed support of arguments in ``tkinter.ttk.Treeview.selection``. It was +deprecated in 3.6. Use specialized methods like ``selection_set`` for changing the selection. .. @@ -6224,7 +6224,7 @@ imported from ``typing`` directly. .. nonce: 2eVOYS .. section: Documentation -Fix the documentation about an unexisting `f_restricted` attribute in the +Fix the documentation about an unexisting ``f_restricted`` attribute in the frame object. Patch by Stéphane Wirtel .. @@ -6311,7 +6311,7 @@ Document how passing coroutines to asyncio.wait() can be confusing. .. nonce: p9PoYv .. section: Documentation -Make clear that ``==`` operator sometimes is equivalent to `is`. The ``<``, +Make clear that ``==`` operator sometimes is equivalent to ``is``. The ``<``, ``<=``, ``>`` and ``>=`` operators are only defined where they make sense. .. @@ -6409,7 +6409,7 @@ Improve the documentation of :func:`asyncio.open_connection`, .. nonce: E5gba1 .. section: Documentation -Document that `asyncio.wait()` does not cancel its futures on timeout. +Document that ``asyncio.wait()`` does not cancel its futures on timeout. .. @@ -6660,7 +6660,7 @@ Stéphane Wirtel .. nonce: w1m_8r .. section: Documentation -Improve docstrings for `pathlib.PurePath` subclasses. +Improve docstrings for ``pathlib.PurePath`` subclasses. .. @@ -6997,7 +6997,7 @@ Use 3072 RSA keys and SHA-256 signature for test certs and keys. .. nonce: H8fCGa .. section: Tests -Remove special condition for AIX in `test_subprocess.test_undecodable_env` +Remove special condition for AIX in ``test_subprocess.test_undecodable_env`` .. @@ -7006,7 +7006,7 @@ Remove special condition for AIX in `test_subprocess.test_undecodable_env` .. nonce: IsRDPB .. section: Tests -Fix `test_utf8_mode.test_cmd_line` for AIX +Fix ``test_utf8_mode.test_cmd_line`` for AIX .. @@ -7034,9 +7034,9 @@ Fix ftplib test for TLS 1.3 by reading from data socket. .. nonce: g7TwYm .. section: Tests -Fix `test_socket` on AIX 6.1 and later IPv6 zone id supports only -supported by inet_pton6_zone() Switch to runtime-based platform.system() to -establish current platform rather than build-time based sys.platform() +Fix ``test_socket`` on AIX 6.1 and later IPv6 zone id supports only +supported by ``inet_pton6_zone()``. Switch to runtime-based ``platform.system()`` to +establish current platform rather than build-time based ``sys.platform()`` .. @@ -7172,8 +7172,8 @@ Fix failing ``test_asyncio`` on macOS 10.12.2+ due to transport of .. nonce: IKDsqu .. section: Tests -Making sure the `SMTPUTF8SimTests` class of tests gets run in -test_smtplib.py. +Making sure the ``SMTPUTF8SimTests`` class of tests gets run in +``test_smtplib.py``. .. @@ -7282,7 +7282,7 @@ CFLAGS to build third-party C extensions through distutils. .. nonce: XZTttb .. section: Build -Fix a compiler error when statically linking `pyexpat` in `Modules/Setup`. +Fix a compiler error when statically linking ``pyexpat`` in ``Modules/Setup``. .. @@ -7520,8 +7520,8 @@ suffixes). .. nonce: o7G_UO .. section: Build -By default, modules configured in `Modules/Setup` are no longer built with -`-DPy_BUILD_CORE`. Instead, modules that specifically need that preprocessor +By default, modules configured in ``Modules/Setup`` are no longer built with +``-DPy_BUILD_CORE``. Instead, modules that specifically need that preprocessor definition include it in their individual entries. .. @@ -8090,7 +8090,7 @@ Fix imports in idlelib.window. .. nonce: QEaANl .. section: IDLE -Proper format `calltip` when the function has no docstring. +Proper format ``calltip`` when the function has no docstring. .. diff --git a/Misc/NEWS.d/3.8.0a2.rst b/Misc/NEWS.d/3.8.0a2.rst index 223126145c77f1..c8620aeea7f133 100644 --- a/Misc/NEWS.d/3.8.0a2.rst +++ b/Misc/NEWS.d/3.8.0a2.rst @@ -4,7 +4,7 @@ .. release date: 2019-02-25 .. section: Core and Builtins -Raise a :exc:`SyntaxError` when assigning a value to `__debug__` with the +Raise a :exc:`SyntaxError` when assigning a value to ``__debug__`` with the Assignment Operator. Contributed by Stéphane Wirtel and Pablo Galindo. .. diff --git a/Misc/NEWS.d/3.8.0a3.rst b/Misc/NEWS.d/3.8.0a3.rst index 66308c6bca2e26..a06efb9abe795d 100644 --- a/Misc/NEWS.d/3.8.0a3.rst +++ b/Misc/NEWS.d/3.8.0a3.rst @@ -324,7 +324,7 @@ Raise ModuleNotFoundError in pyclbr when a module can't be found. Thanks to .. nonce: MDXLw6 .. section: Library -Switch the default format used for writing tars with mod:`tarfile` to the +Switch the default format used for writing tars with :mod:`tarfile` to the modern POSIX.1-2001 pax standard, from the vendor-specific GNU. Contributed by C.A.M. Gerlach. @@ -473,7 +473,7 @@ path string. .. nonce: NA_rXa .. section: Library -Ensure custom :func:`warnings.formatwarning` function can receive `line` as +Ensure custom :func:`warnings.formatwarning` function can receive ``line`` as positional argument. Based on patch by Tashrif Billah. .. @@ -533,10 +533,10 @@ Kumar Akshay. .. nonce: yffB3F .. section: Library -`pprint.pp` has been added to pretty-print objects with dictionary keys +``pprint.pp`` has been added to pretty-print objects with dictionary keys being sorted with their insertion order by default. Parameter *sort_dicts* -has been added to `pprint.pprint`, `pprint.pformat` and -`pprint.PrettyPrinter`. Contributed by Rémi Lapeyre. +has been added to ``pprint.pprint``, ``pprint.pformat`` and +``pprint.PrettyPrinter``. Contributed by Rémi Lapeyre. .. diff --git a/Misc/NEWS.d/3.8.0a4.rst b/Misc/NEWS.d/3.8.0a4.rst index 3097245b74a511..fa5eb697d9202d 100644 --- a/Misc/NEWS.d/3.8.0a4.rst +++ b/Misc/NEWS.d/3.8.0a4.rst @@ -168,7 +168,7 @@ decoder. Changing ``dict`` keys during iteration of the dict itself, ``keys()``, ``values()``, or ``items()`` will now be detected in certain corner cases where keys are deleted/added so that the number of keys isn't changed. A -`RuntimeError` will be raised after ``len(dict)`` iterations. Contributed by +``RuntimeError`` will be raised after ``len(dict)`` iterations. Contributed by Thomas Perl. .. @@ -580,7 +580,7 @@ is a dictionary. .. section: Library Calling ``stop()`` on an unstarted or stopped :func:`unittest.mock.patch` -object will now return `None` instead of raising :exc:`RuntimeError`, making +object will now return ``None`` instead of raising :exc:`RuntimeError`, making the method idempotent. Patch by Karthikeyan Singaravelan. .. @@ -609,9 +609,9 @@ Add time module support and fix test_time faiures for VxWorks. .. nonce: i2Z1XR .. section: Library -Added support for keyword arguments `default_namespace` and -`xml_declaration` in functions ElementTree.tostring() and -ElementTree.tostringlist(). +Added support for keyword arguments ``default_namespace`` and +``xml_declaration`` in functions ``ElementTree.tostring()`` and +``ElementTree.tostringlist()``. .. @@ -744,7 +744,7 @@ Remove stale unix datagram socket before binding .. nonce: _4Q_bi .. section: Library -Implemented Happy Eyeballs in `asyncio.create_connection()`. Added two new +Implemented Happy Eyeballs in ``asyncio.create_connection()``. Added two new arguments, *happy_eyeballs_delay* and *interleave*, to specify Happy Eyeballs behavior. @@ -1405,7 +1405,7 @@ only present in alpha releases of Python 3.8. Patch by Paul Ganssle. .. nonce: wpbWeb .. section: C API -Modify ``PyObject_Init`` to correctly increase the refcount of heap- -allocated Type objects. Also fix the refcounts of the heap-allocated types +Modify ``PyObject_Init`` to correctly increase the refcount of heap-allocated +Type objects. Also fix the refcounts of the heap-allocated types that were either doing this manually or not decreasing the type's refcount in tp_dealloc diff --git a/Misc/NEWS.d/3.8.0b1.rst b/Misc/NEWS.d/3.8.0b1.rst index c5e27b04ef8e8b..4eb0c0451e97b5 100644 --- a/Misc/NEWS.d/3.8.0b1.rst +++ b/Misc/NEWS.d/3.8.0b1.rst @@ -146,8 +146,8 @@ constant expressions inside the f-string). .. nonce: VeVvhJ .. section: Core and Builtins -The `bytes.hex`, `bytearray.hex`, and `memoryview.hex` methods as well as -the `binascii.hexlify` and `b2a_hex` functions now have the ability to +The ``bytes.hex``, ``bytearray.hex``, and ``memoryview.hex`` methods as well as +the ``binascii.hexlify`` and ``b2a_hex`` functions now have the ability to include an optional separator between hex bytes. This functionality was inspired by MicroPython's hexlify implementation. @@ -198,8 +198,8 @@ any error. .. nonce: QwLa3P .. section: Core and Builtins -Only accept text after `# type: ignore` if the first character is ASCII. -This is to disallow things like `# type: ignoreé`. +Only accept text after ``# type: ignore`` if the first character is ASCII. +This is to disallow things like ``# type: ignoreé``. .. @@ -208,9 +208,9 @@ This is to disallow things like `# type: ignoreé`. .. nonce: EFRHZ3 .. section: Core and Builtins -Store text appearing after a `# type: ignore` comment in the AST. For -example a type ignore like `# type: ignore[E1000]` will have the string -`"[E1000]"` stored in its AST node. +Store text appearing after a ``# type: ignore`` comment in the AST. For +example a type ignore like ``# type: ignore[E1000]`` will have the string +``"[E1000]"`` stored in its AST node. .. @@ -414,7 +414,7 @@ Fix incorrect use of ``%p`` in format strings. Patch by Zackery Spytz. .. nonce: RO20OV .. section: Core and Builtins -builtins.help() now prefixes `async` for async functions +``builtins.help()`` now prefixes ``async`` for async functions. .. @@ -443,7 +443,7 @@ Added fix for broken symlinks in combination with pathlib .. section: Core and Builtins Added new trashcan macros to deal with a double deallocation that could -occur when the `tp_dealloc` of a subclass calls the `tp_dealloc` of a base +occur when the ``tp_dealloc`` of a subclass calls the ``tp_dealloc`` of a base class and that base class uses the trashcan mechanism. Patch by Jeroen Demeyer. @@ -768,7 +768,7 @@ Taskaya .. nonce: u7cxu7 .. section: Library -PDB command `args` now display positional only arguments. Patch contributed +PDB command ``args`` now display positional only arguments. Patch contributed by Rémi Lapeyre. .. @@ -778,7 +778,7 @@ by Rémi Lapeyre. .. nonce: JkZORP .. section: Library -PDB command `args` now display keyword only arguments. Patch contributed by +PDB command ``args`` now display keyword only arguments. Patch contributed by Rémi Lapeyre. .. @@ -799,7 +799,7 @@ Add missing names to ``typing.__all__``: ``ChainMap``, ``ForwardRef``, .. section: Library Add SupportsIndex protocol to the typing module to allow type checking to -detect classes that can be passed to `hex()`, `oct()` and `bin()`. +detect classes that can be passed to ``hex()``, ``oct()`` and ``bin()``. .. @@ -1023,10 +1023,10 @@ reading metadata from third-party packages. .. nonce: iigeqk .. section: Library -When using `type_comments=True` in `ast.parse`, treat `# type: ignore` +When using ``type_comments=True`` in ``ast.parse``, treat ``# type: ignore`` followed by a non-alphanumeric character and then arbitrary text as a type ignore, instead of requiring nothing but whitespace or another comment. This -is to permit formations such as `# type: ignore[E1000]`. +is to permit formations such as ``# type: ignore[E1000]``. .. @@ -1066,7 +1066,7 @@ exposed to the user. Patch by Aviv Palivoda. .. nonce: WK8Y-k .. section: Library -In `shutil.copystat()`, first copy extended file attributes and then file +In ``shutil.copystat()``, first copy extended file attributes and then file permissions, since extended attributes can only be set on the destination while it is still writeable. @@ -1120,7 +1120,7 @@ add_done_callback correctly when the Future has already completed. .. nonce: 4payXb .. section: Library -Limit `max_workers` in `ProcessPoolExecutor` to 61 to work around a +Limit ``max_workers`` in ``ProcessPoolExecutor`` to 61 to work around a WaitForMultipleObjects limitation. .. @@ -1676,7 +1676,7 @@ documentation covers it independently. .. nonce: 6hg6J8 .. section: Documentation -Add detail to the documentation on the `pty.spawn` function. +Add detail to the documentation on the ``pty.spawn`` function. .. @@ -1704,7 +1704,7 @@ Added the context variable in glossary. .. nonce: Q7s2FB .. section: Documentation -Clarify that `copy()` is not part of the `MutableSequence` ABC. +Clarify that ``copy()`` is not part of the ``MutableSequence`` ABC. .. @@ -1713,7 +1713,7 @@ Clarify that `copy()` is not part of the `MutableSequence` ABC. .. nonce: jalAaQ .. section: Documentation -Make `codecs.StreamRecoder.writelines` take a list of bytes. +Make ``codecs.StreamRecoder.writelines`` take a list of bytes. .. diff --git a/Misc/NEWS.d/3.9.0a1.rst b/Misc/NEWS.d/3.9.0a1.rst index b365e5fbfb0dbe..0772a0fed20652 100644 --- a/Misc/NEWS.d/3.9.0a1.rst +++ b/Misc/NEWS.d/3.9.0a1.rst @@ -1304,7 +1304,7 @@ parameter. Patch by Pablo Galindo. .. nonce: J12cWT .. section: Library -Fixed `hmac.new` and `hmac.HMAC` to raise TypeError instead of ValueError +Fixed ``hmac.new`` and ``hmac.HMAC`` to raise TypeError instead of ValueError when the digestmod parameter, now required in 3.8, is omitted. Also clarified the hmac module documentation and docstrings. @@ -1478,7 +1478,7 @@ Removes _AwaitEvent from AsyncMock. .. section: Library Allow the rare code that wants to send invalid http requests from the -`http.client` library a way to do so. The fixes for bpo-30458 led to +``http.client`` library a way to do so. The fixes for bpo-30458 led to breakage for some projects that were relying on this ability to test their own behavior in the face of bad requests. @@ -1813,9 +1813,9 @@ Update importlib.metadata with changes from `importlib_metadata 0.21 .. nonce: 8zn2o3 .. section: Library -Remove `__code__` check in AsyncMock that incorrectly evaluated function -specs as async objects but failed to evaluate classes with `__await__` but -no `__code__` attribute defined as async objects. +Remove ``__code__`` check in AsyncMock that incorrectly evaluated function +specs as async objects but failed to evaluate classes with ``__await__`` but +no ``__code__`` attribute defined as async objects. .. @@ -1976,11 +1976,11 @@ thread at a time. .. nonce: kP-n4L .. section: Library -Subscripts to the `unittest.mock.call` objects now receive the same chaining +Subscripts to the ``unittest.mock.call`` objects now receive the same chaining mechanism as any other custom attributes, so that the following usage no -longer raises a `TypeError`: +longer raises a ``TypeError``: -call().foo().__getitem__('bar') +``call().foo().__getitem__('bar')`` Patch by blhsing @@ -2210,7 +2210,7 @@ Add C fastpath for statistics.NormalDist.inv_cdf() Patch by Donghee Na .. nonce: Ene6L- .. section: Library -Remove the deprecated method `threading.Thread.isAlive()`. Patch by Donghee +Remove the deprecated method ``threading.Thread.isAlive()``. Patch by Donghee Na. .. @@ -2260,8 +2260,8 @@ directories are added and that duplicates are excluded. .. nonce: xfvdb_ .. section: Library -Renamed and documented `test.bytecode_helper` as -`test.support.bytecode_helper`. Patch by Joannah Nanjekye. +Renamed and documented ``test.bytecode_helper`` as +``test.support.bytecode_helper``. Patch by Joannah Nanjekye. .. @@ -2289,8 +2289,8 @@ many small lines are passed. Patch by Sergey Fedoseev. .. nonce: ycbL2z .. section: Library -`ensurepip` now uses `importlib.resources.read_binary()` to read data -instead of `pkgutil.get_data()`. Patch by Joannah Nanjekye. +``ensurepip`` now uses ``importlib.resources.read_binary()`` to read data +instead of ``pkgutil.get_data()``. Patch by Joannah Nanjekye. .. @@ -2524,8 +2524,8 @@ Make internal attributes for statistics.NormalDist() private. .. nonce: S5am28 .. section: Library -Fix `NonCallableMock._call_matcher` returning tuple instead of `_Call` -object when `self._spec_signature` exists. Patch by Elizabeth Uselton +Fix ``NonCallableMock._call_matcher`` returning tuple instead of ``_Call`` +object when ``self._spec_signature`` exists. Patch by Elizabeth Uselton .. @@ -2673,7 +2673,7 @@ which had a too large value in some situations. .. nonce: 0i1MR- .. section: Library -Fixes a possible hang when using a timeout on `subprocess.run()` while +Fixes a possible hang when using a timeout on ``subprocess.run()`` while capturing output. If the child process spawned its own children or otherwise connected its stdout or stderr handles with another process, we could hang after the timeout was reached and our child was killed when @@ -2708,8 +2708,8 @@ The distutils ``bdist_wininst`` command is deprecated in Python 3.8, use .. nonce: O53a5S .. section: Library -When `Enum.__str__` is overridden in a derived class, the override will be -used by `Enum.__format__` regardless of whether mixin classes are present. +When ``Enum.__str__`` is overridden in a derived class, the override will be +used by ``Enum.__format__`` regardless of whether mixin classes are present. .. @@ -2857,8 +2857,8 @@ Patch by Justin Blanchard. Add formal support for UDPLITE sockets. Support was present before, but it is now easier to detect support with ``hasattr(socket, 'IPPROTO_UDPLITE')`` and there are constants defined for each of the values needed: -:py:obj:`socket.IPPROTO_UDPLITE`, :py:obj:`UDPLITE_SEND_CSCOV`, and -:py:obj:`UDPLITE_RECV_CSCOV`. Patch by Gabe Appleton. +``socket.IPPROTO_UDPLITE``, ``UDPLITE_SEND_CSCOV``, and +``UDPLITE_RECV_CSCOV``. Patch by Gabe Appleton. .. @@ -3195,9 +3195,9 @@ Remove ``Enum._convert`` method, deprecated in 3.8. .. nonce: TTzHxj .. section: Library -`argparse._ActionsContainer.add_argument` now throws error, if someone +``argparse._ActionsContainer.add_argument`` now throws error, if someone accidentally pass FileType class object instead of instance of FileType as -`type` argument +``type`` argument. .. @@ -3458,7 +3458,7 @@ Patch contributed by Rémi Lapeyre. .. section: Library Fixes a bug in :mod:`!cgi` module when a multipart/form-data request has no -`Content-Length` header. +``Content-Length`` header. .. @@ -3566,8 +3566,8 @@ source argument. Patch by Emily Morehouse and Maxwell "5.13b" McKinnon. .. nonce: 0stF0u .. section: Library -Added __format__ to IPv4 and IPv6 classes. Always outputs a fully zero- -padded string. Supports b/x/n modifiers (bin/hex/native format). Native +Added ``__format__`` to IPv4 and IPv6 classes. Always outputs a fully +zero-padded string. Supports b/x/n modifiers (bin/hex/native format). Native format for IPv4 is bin, native format for IPv6 is hex. Also supports '#' and '_' modifiers. @@ -3789,7 +3789,7 @@ verify the maildir folder layout correctness. Patch by Sviatoslav Sydorenko. .. nonce: 7tiFR- .. section: Documentation -Fix `importlib` examples to insert any newly created modules via +Fix ``importlib`` examples to insert any newly created modules via importlib.util.module_from_spec() immediately into sys.modules instead of after calling loader.exec_module(). @@ -3888,7 +3888,7 @@ Make C-API docs clear about what the "main" interpreter is. .. nonce: Iqiqtm .. section: Documentation -The documentation for decimal string formatting using the `:g` specifier has +The documentation for decimal string formatting using the ``:g`` specifier has been updated to reflect the correct exponential notation cutoff point. Original patch contributed by Tuomas Suutari. @@ -4450,8 +4450,8 @@ Fix _hashlib build when Blake2 is disabled, but OpenSSL supports it. .. nonce: buCO84 .. section: Build -Misc/python-config.in now uses `getvar()` for all still existing -`sysconfig.get_config_var()` calls. Patch by Joannah Nanjekye. +Misc/python-config.in now uses ``getvar()`` for all still existing +``sysconfig.get_config_var()`` calls. Patch by Joannah Nanjekye. .. diff --git a/Misc/NEWS.d/3.9.0a2.rst b/Misc/NEWS.d/3.9.0a2.rst index a03eb10f1d523a..7d878cfe227552 100644 --- a/Misc/NEWS.d/3.9.0a2.rst +++ b/Misc/NEWS.d/3.9.0a2.rst @@ -403,7 +403,7 @@ locale encoding is not UTF-8. Prevent UnboundLocalError to pop up in parse_message_id. parse_message_id() was improperly using a token defined inside an exception -handler, which was raising `UnboundLocalError` on parsing an invalid value. +handler, which was raising ``UnboundLocalError`` on parsing an invalid value. Patch by Claudiu Popa. .. @@ -444,12 +444,12 @@ random.choices() now raises a ValueError when all the weights are zero. Raise pickle.UnpicklingError when loading an item from memo for invalid input. -The previous code was raising a `KeyError` for both the Python and C +The previous code was raising a ``KeyError`` for both the Python and C implementation. This was caused by the specified index of an invalid input which did not exist in the memo structure, where the pickle stores what -objects it has seen. The malformed input would have caused either a `BINGET` -or `LONG_BINGET` load from the memo, leading to a `KeyError` as the -determined index was bogus. Patch by Claudiu Popa +objects it has seen. The malformed input would have caused either a ``BINGET`` +or ``LONG_BINGET`` load from the memo, leading to a ``KeyError`` as the +determined index was bogus. Patch by Claudiu Popa. .. @@ -458,7 +458,7 @@ determined index was bogus. Patch by Claudiu Popa .. nonce: iKx23z .. section: Library -Calling func:`shutil.copytree` to copy a directory tree from one directory +Calling func:``shutil.copytree`` to copy a directory tree from one directory to another subdirectory resulted in an endless loop and a RecursionError. A fix was added to consume an iterator and create the list of the entries to be copied, avoiding the recursion for newly created directories. Patch by @@ -685,7 +685,7 @@ added. .. section: Documentation Update documentation to state that to activate virtual environments under -fish one should use `source`, not `.` as documented at +fish one should use ``source``, not ``.`` as documented at https://fishshell.com/docs/current/cmds/source.html. .. @@ -884,7 +884,7 @@ Add support for building and releasing Windows ARM64 packages. Fixed a crash on OSX dynamic builds that occurred when re-initializing the posix module after a Py_Finalize if the environment had changed since the -previous `import posix`. Patch by Benoît Hudson. +previous ``import posix``. Patch by Benoît Hudson. .. diff --git a/Misc/NEWS.d/3.9.0a3.rst b/Misc/NEWS.d/3.9.0a3.rst index bc7f4f9c5d39c1..78194791ab5277 100644 --- a/Misc/NEWS.d/3.9.0a3.rst +++ b/Misc/NEWS.d/3.9.0a3.rst @@ -266,8 +266,8 @@ The :func:`os.unsetenv` function is now also available on Windows. .. nonce: D2tSXk .. section: Library -Fixed a regression with the `ignore` callback of :func:`shutil.copytree`. -The argument types are now str and List[str] again. +Fixed a regression with the ``ignore`` callback of :func:`shutil.copytree`. +The argument types are now ``str`` and ``List[str]`` again. .. @@ -507,8 +507,8 @@ for examples of :class:`!NNTP` news reader server and nntplib tests. .. nonce: ihRT1z .. section: Library -Proxy the `SimpleHTTPRequestHandler.guess_type` to `mimetypes.guess_type` so -the `mimetypes.init` is called lazily to avoid unnecessary costs when +Proxy the ``SimpleHTTPRequestHandler.guess_type`` to ``mimetypes.guess_type`` so +the ``mimetypes.init`` is called lazily to avoid unnecessary costs when :mod:`http.server` module is imported. .. @@ -547,10 +547,10 @@ all options. Giovanni Lombardo contributed part of the patch. .. nonce: nzwGyG .. section: Library -If an exception were to be thrown in `Logger.isEnabledFor` (say, by asyncio -timeouts or stopit) , the `logging` global lock may not be released +If an exception were to be thrown in ``Logger.isEnabledFor`` (say, by asyncio +timeouts or stopit) , the ``logging`` global lock may not be released appropriately, resulting in deadlock. This change wraps that block of code -with `try...finally` to ensure the lock is released. +with ``try...finally`` to ensure the lock is released. .. @@ -571,7 +571,7 @@ new task spawning before exception raising. .. section: Library Correctly parenthesize filter-based statements that contain lambda -expressions in mod:`!lib2to3`. Patch by Donghee Na. +expressions in :mod:`!lib2to3`. Patch by Donghee Na. .. @@ -732,9 +732,9 @@ different process. .. nonce: beZ0Sk .. section: Library -Removes trailing space in formatted currency with `international=True` and a -locale with symbol following value. E.g. `locale.currency(12.34, -international=True)` returned `'12,34 EUR '` instead of `'12,34 EUR'`. +Removes trailing space in formatted currency with ``international=True`` and a +locale with symbol following value. E.g. ``locale.currency(12.34, +international=True)`` returned ``'12,34 EUR '`` instead of ``'12,34 EUR'``. .. @@ -835,7 +835,7 @@ functions are now required to build Python. .. nonce: aBmj13 .. section: Build -Updated the documentation in `./configure --help` to show default values, +Updated the documentation in ``./configure --help`` to show default values, reference documentation where required and add additional explanation where needed. diff --git a/Misc/NEWS.d/3.9.0a4.rst b/Misc/NEWS.d/3.9.0a4.rst index e59435b5509acf..ca0eb2abf1d654 100644 --- a/Misc/NEWS.d/3.9.0a4.rst +++ b/Misc/NEWS.d/3.9.0a4.rst @@ -4,8 +4,8 @@ .. release date: 2020-02-25 .. section: Security -Add audit events to functions in `fcntl`, `msvcrt`, `os`, `resource`, -`shutil`, `signal` and `syslog`. +Add audit events to functions in ``fcntl``, ``msvcrt``, ``os``, ``resource``, +``shutil``, ``signal`` and ``syslog``. .. @@ -81,9 +81,9 @@ Fix regression caused by fix for bpo-39386, that prevented calling .. nonce: itNmC0 .. section: Core and Builtins -Change the ending column offset of `Attribute` nodes constructed in -`ast_for_dotted_name` to point at the end of the current node and not at the -end of the last `NAME` node. +Change the ending column offset of ``Attribute`` nodes constructed in +``ast_for_dotted_name`` to point at the end of the current node and not at the +end of the last ``NAME`` node. .. @@ -269,7 +269,7 @@ codec. .. nonce: qiubSp .. section: Library -Remove obsolete check for `__args__` in bdb.Bdb.format_stack_entry. +Remove obsolete check for ``__args__`` in ``bdb.Bdb.format_stack_entry``. .. @@ -599,7 +599,7 @@ default return values. Patch by Karthikeyan Singaravelan. .. nonce: udRSWE .. section: Library -`inspect.Signature.parameters` and `inspect.BoundArguments.arguments` are +``inspect.Signature.parameters`` and ``inspect.BoundArguments.arguments`` are now dicts instead of OrderedDicts. Patch contributed by Rémi Lapeyre. .. @@ -619,9 +619,9 @@ multiprocessing.Process. .. nonce: e0C5dF .. section: Library -* Add `lazycache` function to `__all__`. -* Use `dict.clear` to clear the cache. -* Refactoring `getline` function and `checkcache` function. +* Add ``lazycache`` function to ``__all__``. +* Use ``dict.clear`` to clear the cache. +* Refactoring ``getline`` function and ``checkcache`` function. .. diff --git a/Misc/NEWS.d/3.9.0a5.rst b/Misc/NEWS.d/3.9.0a5.rst index 6ff05788214723..b4594aade3b3ed 100644 --- a/Misc/NEWS.d/3.9.0a5.rst +++ b/Misc/NEWS.d/3.9.0a5.rst @@ -691,7 +691,7 @@ Fix AttributeError when calling get_stack on a PyAsyncGenObject Task .. section: Library The :func:`compileall.compile_dir` function's *ddir* parameter and the -compileall command line flag `-d` no longer write the wrong pathname to the +compileall command line flag ``-d`` no longer write the wrong pathname to the generated pyc file for submodules beneath the root of the directory tree being compiled. This fixes a regression introduced with Python 3.5. diff --git a/Misc/NEWS.d/3.9.0a6.rst b/Misc/NEWS.d/3.9.0a6.rst index 519c7f833ebcb8..366a260172efb8 100644 --- a/Misc/NEWS.d/3.9.0a6.rst +++ b/Misc/NEWS.d/3.9.0a6.rst @@ -59,8 +59,8 @@ anything that depends on it. .. section: Core and Builtins Fix the tokenizer to display the correct error message, when there is a -SyntaxError on the last input character and no newline follows. It used to -be `unexpected EOF while parsing`, while it should be `invalid syntax`. +``SyntaxError`` on the last input character and no newline follows. It used to +be ``unexpected EOF while parsing``, while it should be ``invalid syntax``. .. @@ -79,7 +79,7 @@ evaluation for annotations activated. Patch by Batuhan Taskaya. .. nonce: vXPze5 .. section: Core and Builtins -Report a specialized error message, `invalid string prefix`, when the +Report a specialized error message, ``invalid string prefix``, when the tokenizer encounters a string with an invalid prefix. .. @@ -434,7 +434,7 @@ for the correspondent concrete type (``list`` in this case). .. nonce: ux8FUr .. section: Library -func:`inspect.getdoc` no longer returns docstring inherited from the type of +:func:`inspect.getdoc` no longer returns docstring inherited from the type of the object or from parent class if it is a class if it is not defined in the object itself. In :mod:`pydoc` the documentation string is now shown not only for class, function, method etc, but for any object that has its own @@ -504,7 +504,7 @@ extension. This allows the use of functions such as ``json_object``. .. nonce: 4EcyIN .. section: Library -Wait in `KqueueSelector.select` when no fds are registered +Wait in ``KqueueSelector.select`` when no fds are registered .. @@ -701,7 +701,7 @@ per header: use the realm of the first Basic challenge. .. section: Library Removed daemon threads from :mod:`concurrent.futures` by adding an internal -`threading._register_atexit()`, which calls registered functions prior to +``threading._register_atexit()``, which calls registered functions prior to joining all non-daemon threads. This allows for compatibility with subinterpreters, which don't support daemon threads. @@ -781,9 +781,9 @@ Added :pep:`584` operators to :class:`weakref.WeakKeyDictionary`. .. nonce: 56Yokh .. section: Library -Fix linear runtime behaviour of the `__getitem__` and `__setitem__` methods +Fix linear runtime behaviour of the ``__getitem__`` and ``__setitem__`` methods in :class:`multiprocessing.shared_memory.ShareableList`. This avoids -quadratic performance when iterating a `ShareableList`. Patch by Thomas +quadratic performance when iterating a ``ShareableList``. Patch by Thomas Krennwallner. .. @@ -793,9 +793,9 @@ Krennwallner. .. nonce: AxXZNz .. section: Library -Remove undocumented support for *closing* a `pathlib.Path` object via its +Remove undocumented support for *closing* a ``pathlib.Path`` object via its context manager. The context manager magic methods remain, but they are now -a no-op, making `Path` objects immutable. +a no-op, making ``Path`` objects immutable. .. diff --git a/Misc/NEWS.d/3.9.0b1.rst b/Misc/NEWS.d/3.9.0b1.rst index ee87315ad334e9..40fb8474bf9364 100644 --- a/Misc/NEWS.d/3.9.0b1.rst +++ b/Misc/NEWS.d/3.9.0b1.rst @@ -112,8 +112,8 @@ Port :mod:`syslog` to multiphase initialization (:pep:`489`). Reporting a specialised error message for invalid string prefixes, which was introduced in :issue:`40246`, is being reverted due to backwards compatibility concerns for strings that immediately follow a reserved -keyword without whitespace between them. Constructs like `bg="#d00" if clear -else"#fca"` were failing to parse, which is not an acceptable breakage on +keyword without whitespace between them. Constructs like ``bg="#d00" if clear +else"#fca"`` were failing to parse, which is not an acceptable breakage on such short notice. .. @@ -684,7 +684,7 @@ The ``isocalendar()`` methods of :class:`datetime.date` and .. nonce: t6kW_1 .. section: Documentation -Add version of removal for explicit passing of coros to `asyncio.wait()`'s +Add version of removal for explicit passing of coros to ``asyncio.wait()``'s documentation .. diff --git a/Misc/NEWS.d/next/Build/2020-05-01-23-44-31.bpo-11102.Fw9zeS.rst b/Misc/NEWS.d/next/Build/2020-05-01-23-44-31.bpo-11102.Fw9zeS.rst deleted file mode 100644 index 6477538edf5550..00000000000000 --- a/Misc/NEWS.d/next/Build/2020-05-01-23-44-31.bpo-11102.Fw9zeS.rst +++ /dev/null @@ -1,2 +0,0 @@ -The :func:`os.major`, :func:`os.makedev`, and :func:`os.minor` functions are -now available on HP-UX v3. diff --git a/Misc/NEWS.d/next/Build/2023-11-27-13-55-47.gh-issue-103065.o72OiA.rst b/Misc/NEWS.d/next/Build/2023-11-27-13-55-47.gh-issue-103065.o72OiA.rst deleted file mode 100644 index e2240b7c656a2f..00000000000000 --- a/Misc/NEWS.d/next/Build/2023-11-27-13-55-47.gh-issue-103065.o72OiA.rst +++ /dev/null @@ -1 +0,0 @@ -Introduce ``Tools/wasm/wasi.py`` to simplify doing a WASI build. diff --git a/Misc/NEWS.d/next/Build/2023-12-08-11-33-37.gh-issue-112867.ZzDfXQ.rst b/Misc/NEWS.d/next/Build/2023-12-08-11-33-37.gh-issue-112867.ZzDfXQ.rst deleted file mode 100644 index a36814854882bb..00000000000000 --- a/Misc/NEWS.d/next/Build/2023-12-08-11-33-37.gh-issue-112867.ZzDfXQ.rst +++ /dev/null @@ -1 +0,0 @@ -Fix the build for the case that WITH_PYMALLOC_RADIX_TREE=0 set. diff --git a/Misc/NEWS.d/next/Build/2023-12-17-18-23-02.gh-issue-112536.8lr3Ep.rst b/Misc/NEWS.d/next/Build/2023-12-17-18-23-02.gh-issue-112536.8lr3Ep.rst deleted file mode 100644 index a136eb47584993..00000000000000 --- a/Misc/NEWS.d/next/Build/2023-12-17-18-23-02.gh-issue-112536.8lr3Ep.rst +++ /dev/null @@ -1 +0,0 @@ -Add support for thread sanitizer (TSAN) diff --git a/Misc/NEWS.d/next/Build/2023-12-21-05-35-06.gh-issue-112305.VfqQPx.rst b/Misc/NEWS.d/next/Build/2023-12-21-05-35-06.gh-issue-112305.VfqQPx.rst deleted file mode 100644 index 2df3207f4e6f6c..00000000000000 --- a/Misc/NEWS.d/next/Build/2023-12-21-05-35-06.gh-issue-112305.VfqQPx.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fixed the ``check-clean-src`` step performed on out of tree builds to detect -errant ``$(srcdir)/Python/frozen_modules/*.h`` files and recommend -appropriate source tree cleanup steps to get a working build again. diff --git a/Misc/NEWS.d/next/Build/2023-12-23-09-35-48.gh-issue-113258.GlsAyH.rst b/Misc/NEWS.d/next/Build/2023-12-23-09-35-48.gh-issue-113258.GlsAyH.rst deleted file mode 100644 index e7256ea423b3e0..00000000000000 --- a/Misc/NEWS.d/next/Build/2023-12-23-09-35-48.gh-issue-113258.GlsAyH.rst +++ /dev/null @@ -1,2 +0,0 @@ -Changed the Windows build to write out generated frozen modules into the -build tree instead of the source tree. diff --git a/Misc/NEWS.d/next/C API/2023-06-21-11-53-09.gh-issue-65210.PhFRBJ.rst b/Misc/NEWS.d/next/C API/2023-06-21-11-53-09.gh-issue-65210.PhFRBJ.rst deleted file mode 100644 index a15646f4dad127..00000000000000 --- a/Misc/NEWS.d/next/C API/2023-06-21-11-53-09.gh-issue-65210.PhFRBJ.rst +++ /dev/null @@ -1,3 +0,0 @@ -Change the declaration of the *keywords* parameter of -:c:func:`PyArg_ParseTupleAndKeywords` and -:c:func:`PyArg_VaParseTupleAndKeywords` for better compatibility with C++. diff --git a/Misc/NEWS.d/next/C API/2023-11-15-01-26-59.gh-issue-111545.iAoFtA.rst b/Misc/NEWS.d/next/C API/2023-11-15-01-26-59.gh-issue-111545.iAoFtA.rst deleted file mode 100644 index 7bde2498acf999..00000000000000 --- a/Misc/NEWS.d/next/C API/2023-11-15-01-26-59.gh-issue-111545.iAoFtA.rst +++ /dev/null @@ -1,2 +0,0 @@ -Add :c:func:`Py_HashPointer` function to hash a pointer. Patch by Victor -Stinner. diff --git a/Misc/NEWS.d/next/C API/2023-11-27-09-44-16.gh-issue-112438.GdNZiI.rst b/Misc/NEWS.d/next/C API/2023-11-27-09-44-16.gh-issue-112438.GdNZiI.rst deleted file mode 100644 index 113119efd6aebb..00000000000000 --- a/Misc/NEWS.d/next/C API/2023-11-27-09-44-16.gh-issue-112438.GdNZiI.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix support of format units "es", "et", "es#", and "et#" in nested tuples in -:c:func:`PyArg_ParseTuple`-like functions. diff --git a/Misc/NEWS.d/next/C API/2023-12-02-02-08-11.gh-issue-106560.THvuji.rst b/Misc/NEWS.d/next/C API/2023-12-02-02-08-11.gh-issue-106560.THvuji.rst deleted file mode 100644 index 59b461ec47ad64..00000000000000 --- a/Misc/NEWS.d/next/C API/2023-12-02-02-08-11.gh-issue-106560.THvuji.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix redundant declarations in the public C API. Declare PyBool_Type, -PyLong_Type and PySys_Audit() only once. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Core and Builtins/2018-08-13-13-25-15.bpo-34392.9kIlMF.rst b/Misc/NEWS.d/next/Core and Builtins/2018-08-13-13-25-15.bpo-34392.9kIlMF.rst deleted file mode 100644 index bc4fd1ad1f5c7c..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2018-08-13-13-25-15.bpo-34392.9kIlMF.rst +++ /dev/null @@ -1 +0,0 @@ -Added :func:`sys._is_interned`. diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-07-07-05-37-53.gh-issue-94606.hojJ54.rst b/Misc/NEWS.d/next/Core and Builtins/2022-07-07-05-37-53.gh-issue-94606.hojJ54.rst deleted file mode 100644 index 5201ab7d842088..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2022-07-07-05-37-53.gh-issue-94606.hojJ54.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix UnicodeEncodeError when :func:`email.message.get_payload` reads a message -with a Unicode surrogate character and the message content is not well-formed for -surrogateescape encoding. Patch by Sidney Markowitz. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-11-22-13-17-54.gh-issue-112320.EddM51.rst b/Misc/NEWS.d/next/Core and Builtins/2023-11-22-13-17-54.gh-issue-112320.EddM51.rst deleted file mode 100644 index 0da2fd33b0ea52..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2023-11-22-13-17-54.gh-issue-112320.EddM51.rst +++ /dev/null @@ -1,4 +0,0 @@ -The Tier 2 translator now tracks the confidence level for staying "on trace" -(i.e. not exiting back to the Tier 1 interpreter) for branch instructions -based on the number of bits set in the branch "counter". Trace translation -ends when the confidence drops below 1/3rd. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-11-24-14-10-57.gh-issue-112367.9z1IDp.rst b/Misc/NEWS.d/next/Core and Builtins/2023-11-24-14-10-57.gh-issue-112367.9z1IDp.rst deleted file mode 100644 index 991e45ad47fabe..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2023-11-24-14-10-57.gh-issue-112367.9z1IDp.rst +++ /dev/null @@ -1,2 +0,0 @@ -Avoid undefined behaviour when using the perf trampolines by not freeing the -code arenas until shutdown. Patch by Pablo Galindo diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-11-25-20-36-38.gh-issue-99606.fDY5hK.rst b/Misc/NEWS.d/next/Core and Builtins/2023-11-25-20-36-38.gh-issue-99606.fDY5hK.rst deleted file mode 100644 index adc0e3a6bbc89a..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2023-11-25-20-36-38.gh-issue-99606.fDY5hK.rst +++ /dev/null @@ -1,2 +0,0 @@ -Make code generated for an empty f-string identical to the code of an empty -normal string. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-11-25-22-39-44.gh-issue-112387.AbBq5W.rst b/Misc/NEWS.d/next/Core and Builtins/2023-11-25-22-39-44.gh-issue-112387.AbBq5W.rst deleted file mode 100644 index adac11bf4c90a1..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2023-11-25-22-39-44.gh-issue-112387.AbBq5W.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix error positions for decoded strings with backwards tokenize errors. -Patch by Pablo Galindo diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-11-25-22-58-49.gh-issue-112388.MU3cIM.rst b/Misc/NEWS.d/next/Core and Builtins/2023-11-25-22-58-49.gh-issue-112388.MU3cIM.rst deleted file mode 100644 index 1c82be2febda4f..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2023-11-25-22-58-49.gh-issue-112388.MU3cIM.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix an error that was causing the parser to try to overwrite tokenizer -errors. Patch by pablo Galindo diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-11-26-21-30-11.gh-issue-111058.q4DqDY.rst b/Misc/NEWS.d/next/Core and Builtins/2023-11-26-21-30-11.gh-issue-111058.q4DqDY.rst deleted file mode 100644 index de5661f911aa82..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2023-11-26-21-30-11.gh-issue-111058.q4DqDY.rst +++ /dev/null @@ -1,3 +0,0 @@ -Change coro.cr_frame/gen.gi_frame to return ``None`` after the coroutine/generator has been closed. -This fixes a bug where :func:`~inspect.getcoroutinestate` and :func:`~inspect.getgeneratorstate` -return the wrong state for a closed coroutine/generator. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-11-27-18-55-30.gh-issue-112217.SwFLMj.rst b/Misc/NEWS.d/next/Core and Builtins/2023-11-27-18-55-30.gh-issue-112217.SwFLMj.rst deleted file mode 100644 index d4efbab6b2d128..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2023-11-27-18-55-30.gh-issue-112217.SwFLMj.rst +++ /dev/null @@ -1 +0,0 @@ -Add check for the type of ``__cause__`` returned from calling the type ``T`` in ``raise from T``. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-12-01-08-16-10.gh-issue-95754.ae4gwy.rst b/Misc/NEWS.d/next/Core and Builtins/2023-12-01-08-16-10.gh-issue-95754.ae4gwy.rst deleted file mode 100644 index 0884bc4a4be726..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2023-12-01-08-16-10.gh-issue-95754.ae4gwy.rst +++ /dev/null @@ -1 +0,0 @@ -Provide a better error message when accessing invalid attributes on partially initialized modules. The origin of the module being accessed is now included in the message to help with the common issue of shadowing other modules. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-12-01-19-02-21.gh-issue-105967.Puq5Cn.rst b/Misc/NEWS.d/next/Core and Builtins/2023-12-01-19-02-21.gh-issue-105967.Puq5Cn.rst deleted file mode 100644 index c69511218e3e16..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2023-12-01-19-02-21.gh-issue-105967.Puq5Cn.rst +++ /dev/null @@ -1,4 +0,0 @@ -Workaround a bug in Apple's macOS platform zlib library where -:func:`zlib.crc32` and :func:`binascii.crc32` could produce incorrect results -on multi-gigabyte inputs. Including when using :mod:`zipfile` on zips -containing large data. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-12-03-15-29-53.gh-issue-112660.gldBvh.rst b/Misc/NEWS.d/next/Core and Builtins/2023-12-03-15-29-53.gh-issue-112660.gldBvh.rst deleted file mode 100644 index ea9052b3e35c48..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2023-12-03-15-29-53.gh-issue-112660.gldBvh.rst +++ /dev/null @@ -1,2 +0,0 @@ -Do not clear unexpected errors during formatting error messages for -ImportError and AttributeError for modules. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-12-03-19-34-51.gh-issue-112625.QWTlwS.rst b/Misc/NEWS.d/next/Core and Builtins/2023-12-03-19-34-51.gh-issue-112625.QWTlwS.rst deleted file mode 100644 index 4970e10f3f4dcb..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2023-12-03-19-34-51.gh-issue-112625.QWTlwS.rst +++ /dev/null @@ -1 +0,0 @@ -Fixes a bug where a bytearray object could be cleared while iterating over an argument in the ``bytearray.join()`` method that could result in reading memory after it was freed. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-12-04-23-09-07.gh-issue-112730.BXHlFa.rst b/Misc/NEWS.d/next/Core and Builtins/2023-12-04-23-09-07.gh-issue-112730.BXHlFa.rst deleted file mode 100644 index 51758dd5f4c318..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2023-12-04-23-09-07.gh-issue-112730.BXHlFa.rst +++ /dev/null @@ -1 +0,0 @@ -Use color to highlight error locations in tracebacks. Patch by Pablo Galindo diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-12-05-20-41-58.gh-issue-112716.hOcx0Y.rst b/Misc/NEWS.d/next/Core and Builtins/2023-12-05-20-41-58.gh-issue-112716.hOcx0Y.rst deleted file mode 100644 index 44d63269c5424a..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2023-12-05-20-41-58.gh-issue-112716.hOcx0Y.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix SystemError in the ``import`` statement and in ``__reduce__()`` methods -of builtin types when ``__builtins__`` is not a dict. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-12-07-12-00-04.gh-issue-74616.kgTGVb.rst b/Misc/NEWS.d/next/Core and Builtins/2023-12-07-12-00-04.gh-issue-74616.kgTGVb.rst deleted file mode 100644 index 5c345be9de6d0b..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2023-12-07-12-00-04.gh-issue-74616.kgTGVb.rst +++ /dev/null @@ -1,2 +0,0 @@ -:func:`input` now raises a ValueError when output on the terminal if the -prompt contains embedded null characters instead of silently truncating it. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-12-07-13-19-55.gh-issue-112125.4ADN7i.rst b/Misc/NEWS.d/next/Core and Builtins/2023-12-07-13-19-55.gh-issue-112125.4ADN7i.rst deleted file mode 100644 index 52cd45029fb8c7..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2023-12-07-13-19-55.gh-issue-112125.4ADN7i.rst +++ /dev/null @@ -1 +0,0 @@ -Fix None.__ne__(None) returning NotImplemented instead of False diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-12-11-00-50-00.gh-issue-112943.RHNZie.rst b/Misc/NEWS.d/next/Core and Builtins/2023-12-11-00-50-00.gh-issue-112943.RHNZie.rst deleted file mode 100644 index 4bc2fe7c26d904..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2023-12-11-00-50-00.gh-issue-112943.RHNZie.rst +++ /dev/null @@ -1,2 +0,0 @@ -Correctly compute end column offsets for multiline tokens in the -:mod:`tokenize` module. Patch by Pablo Galindo diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-12-11-19-53-32.gh-issue-90350.-FQy3E.rst b/Misc/NEWS.d/next/Core and Builtins/2023-12-11-19-53-32.gh-issue-90350.-FQy3E.rst deleted file mode 100644 index 6b7881bbd19f59..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2023-12-11-19-53-32.gh-issue-90350.-FQy3E.rst +++ /dev/null @@ -1 +0,0 @@ -Optimize builtin functions :func:`min` and :func:`max`. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-12-12-04-53-19.gh-issue-108866.xbJ-9a.rst b/Misc/NEWS.d/next/Core and Builtins/2023-12-12-04-53-19.gh-issue-108866.xbJ-9a.rst deleted file mode 100644 index 96606924d4a3ec..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2023-12-12-04-53-19.gh-issue-108866.xbJ-9a.rst +++ /dev/null @@ -1,3 +0,0 @@ -Change the API and contract of ``_PyExecutorObject`` to return the -next_instr pointer, instead of the frame, and to always execute at least one -instruction. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-12-13-11-45-53.gh-issue-106905.5dslTN.rst b/Misc/NEWS.d/next/Core and Builtins/2023-12-13-11-45-53.gh-issue-106905.5dslTN.rst deleted file mode 100644 index e3a772f3354ecf..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2023-12-13-11-45-53.gh-issue-106905.5dslTN.rst +++ /dev/null @@ -1,7 +0,0 @@ -Use per AST-parser state rather than global state to track recursion depth -within the AST parser to prevent potential race condition due to -simultaneous parsing. - -The issue primarily showed up in 3.11 by multithreaded users of -:func:`ast.parse`. In 3.12 a change to when garbage collection can be -triggered prevented the race condition from occurring. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-12-14-20-08-35.gh-issue-113054.e20CtM.rst b/Misc/NEWS.d/next/Core and Builtins/2023-12-14-20-08-35.gh-issue-113054.e20CtM.rst deleted file mode 100644 index d0729f9c44754c..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2023-12-14-20-08-35.gh-issue-113054.e20CtM.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fixed bug where a redundant NOP is not removed, causing an assertion to fail -in the compiler in debug mode. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-12-15-16-26-01.gh-issue-112215.xJS6_6.rst b/Misc/NEWS.d/next/Core and Builtins/2023-12-15-16-26-01.gh-issue-112215.xJS6_6.rst deleted file mode 100644 index 01ca1cc7f79b8f..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2023-12-15-16-26-01.gh-issue-112215.xJS6_6.rst +++ /dev/null @@ -1,3 +0,0 @@ -Increase the C recursion limit by a factor of 3 for non-debug builds, except -for webassembly and s390 platforms which are unchanged. This mitigates some -regressions in 3.12 with deep recursion mixing builtin (C) and Python code. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-12-19-22-03-43.gh-issue-111375.M9vuA6.rst b/Misc/NEWS.d/next/Core and Builtins/2023-12-19-22-03-43.gh-issue-111375.M9vuA6.rst deleted file mode 100644 index fbb517173451f8..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2023-12-19-22-03-43.gh-issue-111375.M9vuA6.rst +++ /dev/null @@ -1,2 +0,0 @@ -Only use ``NULL`` in the exception stack to indicate an exception was -handled. Patch by Carey Metcalfe. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-12-20-08-54-54.gh-issue-113212.62AUlw.rst b/Misc/NEWS.d/next/Core and Builtins/2023-12-20-08-54-54.gh-issue-113212.62AUlw.rst deleted file mode 100644 index 6edbc9c60d968c..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2023-12-20-08-54-54.gh-issue-113212.62AUlw.rst +++ /dev/null @@ -1 +0,0 @@ -Improve :py:class:`super` error messages. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-12-20-18-27-11.gh-issue-113297.BZyAI_.rst b/Misc/NEWS.d/next/Core and Builtins/2023-12-20-18-27-11.gh-issue-113297.BZyAI_.rst deleted file mode 100644 index b6aee1f241fd23..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2023-12-20-18-27-11.gh-issue-113297.BZyAI_.rst +++ /dev/null @@ -1 +0,0 @@ -Fix segfault in the compiler on with statement with 19 context managers. diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-01-01-00-07-02.gh-issue-113602.cWuTzk.rst b/Misc/NEWS.d/next/Core and Builtins/2024-01-01-00-07-02.gh-issue-113602.cWuTzk.rst deleted file mode 100644 index 5e064657348720..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2024-01-01-00-07-02.gh-issue-113602.cWuTzk.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix an error that was causing the parser to try to overwrite existing errors -and crashing in the process. Patch by Pablo Galindo diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-01-01-23-57-24.gh-issue-113603.ySwovr.rst b/Misc/NEWS.d/next/Core and Builtins/2024-01-01-23-57-24.gh-issue-113603.ySwovr.rst deleted file mode 100644 index 5fe6d80dedd19d..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2024-01-01-23-57-24.gh-issue-113603.ySwovr.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed bug where a redundant NOP is not removed, causing an assertion to fail in the compiler in debug mode. diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-01-02-11-14-29.gh-issue-113657.CQo9vF.rst b/Misc/NEWS.d/next/Core and Builtins/2024-01-02-11-14-29.gh-issue-113657.CQo9vF.rst deleted file mode 100644 index b520b5c2529425..00000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2024-01-02-11-14-29.gh-issue-113657.CQo9vF.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix an issue that caused important instruction pointer updates to be -optimized out of tier two traces. diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-01-17-23-39-20.gh-issue-114050.Lnv1oq.rst b/Misc/NEWS.d/next/Core and Builtins/2024-01-17-23-39-20.gh-issue-114050.Lnv1oq.rst new file mode 100644 index 00000000000000..c35d2508e6bdda --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2024-01-17-23-39-20.gh-issue-114050.Lnv1oq.rst @@ -0,0 +1,2 @@ +Fix segmentation fault caused by an incorrect format string +in ``TypeError`` exception when more than two arguments are passed to ``int``. diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-01-19-13-18-13.gh-issue-114265.7HAi--.rst b/Misc/NEWS.d/next/Core and Builtins/2024-01-19-13-18-13.gh-issue-114265.7HAi--.rst new file mode 100644 index 00000000000000..74affbbd09ffb4 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2024-01-19-13-18-13.gh-issue-114265.7HAi--.rst @@ -0,0 +1 @@ +Compiler propagates line numbers before optimization, leading to more optimization opportunities and removing the need for the ``guarantee_lineno_for_exits`` hack. diff --git a/Misc/NEWS.d/next/Documentation/2023-10-23-23-43-43.gh-issue-110746.yg77IE.rst b/Misc/NEWS.d/next/Documentation/2023-10-23-23-43-43.gh-issue-110746.yg77IE.rst deleted file mode 100644 index 215db7beb75dcf..00000000000000 --- a/Misc/NEWS.d/next/Documentation/2023-10-23-23-43-43.gh-issue-110746.yg77IE.rst +++ /dev/null @@ -1 +0,0 @@ -Improved markup for valid options/values for methods ttk.treeview.column and ttk.treeview.heading, and for Layouts. diff --git a/Misc/NEWS.d/next/Documentation/2023-11-30-02-33-59.gh-issue-111699._O5G_y.rst b/Misc/NEWS.d/next/Documentation/2023-11-30-02-33-59.gh-issue-111699._O5G_y.rst deleted file mode 100644 index 2d31345e6c2044..00000000000000 --- a/Misc/NEWS.d/next/Documentation/2023-11-30-02-33-59.gh-issue-111699._O5G_y.rst +++ /dev/null @@ -1 +0,0 @@ -Relocate ``smtpd`` deprecation notice to its own section rather than under ``locale`` in What's New in Python 3.12 document diff --git a/Misc/NEWS.d/next/Documentation/2024-01-17-11-40-03.gh-issue-114123.LuueXf.rst b/Misc/NEWS.d/next/Documentation/2024-01-17-11-40-03.gh-issue-114123.LuueXf.rst new file mode 100644 index 00000000000000..1d93a422840077 --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2024-01-17-11-40-03.gh-issue-114123.LuueXf.rst @@ -0,0 +1,7 @@ +Move the :mod:`csv` module docstring to the :mod:`!csv` module +instead of reexporting it from the internal :mod:`!_csv` module, +and remove ``__doc__`` from ``csv.__all__``. + +Move :attr:`!csv.__version__` to the :mod:`!csv` module +instead of reexporting it from the internal :mod:`!_csv` module, +and remove ``__version__`` from ``csv.__all__``. diff --git a/Misc/NEWS.d/next/IDLE/2019-12-13-12-26-56.bpo-13586.1grqsR.rst b/Misc/NEWS.d/next/IDLE/2019-12-13-12-26-56.bpo-13586.1grqsR.rst deleted file mode 100644 index 1a73cad175c888..00000000000000 --- a/Misc/NEWS.d/next/IDLE/2019-12-13-12-26-56.bpo-13586.1grqsR.rst +++ /dev/null @@ -1 +0,0 @@ -Enter the selected text when opening the "Replace" dialog. diff --git a/Misc/NEWS.d/next/IDLE/2023-12-10-20-01-11.gh-issue-112898.98aWv2.rst b/Misc/NEWS.d/next/IDLE/2023-12-10-20-01-11.gh-issue-112898.98aWv2.rst deleted file mode 100644 index 1c20e46b1e5f7b..00000000000000 --- a/Misc/NEWS.d/next/IDLE/2023-12-10-20-01-11.gh-issue-112898.98aWv2.rst +++ /dev/null @@ -1 +0,0 @@ -Fix processing unsaved files when quitting IDLE on macOS. diff --git a/Misc/NEWS.d/next/IDLE/2023-12-19-00-03-12.gh-issue-113269.lrU-IC.rst b/Misc/NEWS.d/next/IDLE/2023-12-19-00-03-12.gh-issue-113269.lrU-IC.rst deleted file mode 100644 index 72e75b7910e359..00000000000000 --- a/Misc/NEWS.d/next/IDLE/2023-12-19-00-03-12.gh-issue-113269.lrU-IC.rst +++ /dev/null @@ -1 +0,0 @@ -Fix test_editor hang on macOS Catalina. diff --git a/Misc/NEWS.d/next/IDLE/2024-01-17-23-18-15.gh-issue-96905.UYaxoU.rst b/Misc/NEWS.d/next/IDLE/2024-01-17-23-18-15.gh-issue-96905.UYaxoU.rst new file mode 100644 index 00000000000000..fe7dde64c7c7d5 --- /dev/null +++ b/Misc/NEWS.d/next/IDLE/2024-01-17-23-18-15.gh-issue-96905.UYaxoU.rst @@ -0,0 +1 @@ +In idlelib code, stop redefining built-ins 'dict' and 'object'. diff --git a/Misc/NEWS.d/next/Library/2019-02-12-16-12-54.bpo-21360.gkSSfx.rst b/Misc/NEWS.d/next/Library/2019-02-12-16-12-54.bpo-21360.gkSSfx.rst deleted file mode 100644 index bc32b9fe4199f9..00000000000000 --- a/Misc/NEWS.d/next/Library/2019-02-12-16-12-54.bpo-21360.gkSSfx.rst +++ /dev/null @@ -1 +0,0 @@ -:class:`mailbox.Maildir` now ignores files with a leading dot. diff --git a/Misc/NEWS.d/next/Library/2019-05-17-07-22-33.bpo-18060.5mqTQM.rst b/Misc/NEWS.d/next/Library/2019-05-17-07-22-33.bpo-18060.5mqTQM.rst deleted file mode 100644 index 3fefbc3efb63c0..00000000000000 --- a/Misc/NEWS.d/next/Library/2019-05-17-07-22-33.bpo-18060.5mqTQM.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fixed a class inheritance issue that can cause segfaults when deriving two or more levels of subclasses from a base class of Structure or Union. - diff --git a/Misc/NEWS.d/next/Library/2019-05-18-15-50-14.bpo-36959.ew6WZ4.rst b/Misc/NEWS.d/next/Library/2019-05-18-15-50-14.bpo-36959.ew6WZ4.rst deleted file mode 100644 index 1ac05a730a2086..00000000000000 --- a/Misc/NEWS.d/next/Library/2019-05-18-15-50-14.bpo-36959.ew6WZ4.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix some error messages for invalid ISO format string combinations in ``strptime()`` that referred to directives not contained in the format string. -Patch by Gordon P. Hemsley. diff --git a/Misc/NEWS.d/next/Library/2019-06-14-22-37-32.bpo-37260.oecdIf.rst b/Misc/NEWS.d/next/Library/2019-06-14-22-37-32.bpo-37260.oecdIf.rst deleted file mode 100644 index a5f2c5e8e18919..00000000000000 --- a/Misc/NEWS.d/next/Library/2019-06-14-22-37-32.bpo-37260.oecdIf.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fixed a race condition in :func:`shutil.rmtree` in which directory entries removed by another process or thread while ``shutil.rmtree()`` is running can cause it to raise FileNotFoundError. Patch by Jeffrey Kintscher. - diff --git a/Misc/NEWS.d/next/Library/2020-03-09-15-08-29.bpo-39912.xPOBBY.rst b/Misc/NEWS.d/next/Library/2020-03-09-15-08-29.bpo-39912.xPOBBY.rst deleted file mode 100644 index fb8579725a2d7d..00000000000000 --- a/Misc/NEWS.d/next/Library/2020-03-09-15-08-29.bpo-39912.xPOBBY.rst +++ /dev/null @@ -1,3 +0,0 @@ -:func:`warnings.filterwarnings()` and :func:`warnings.simplefilter()` now raise -appropriate exceptions instead of ``AssertionError``. Patch contributed by -Rémi Lapeyre. diff --git a/Misc/NEWS.d/next/Library/2020-05-21-23-32-46.bpo-40262.z4fQv1.rst b/Misc/NEWS.d/next/Library/2020-05-21-23-32-46.bpo-40262.z4fQv1.rst deleted file mode 100644 index c017a1c8df09d8..00000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-21-23-32-46.bpo-40262.z4fQv1.rst +++ /dev/null @@ -1,2 +0,0 @@ -The :meth:`ssl.SSLSocket.recv_into` method no longer requires the *buffer* -argument to implement ``__len__`` and supports buffers with arbitrary item size. diff --git a/Misc/NEWS.d/next/Library/2020-06-15-23-44-53.bpo-19821.ihBk39.rst b/Misc/NEWS.d/next/Library/2020-06-15-23-44-53.bpo-19821.ihBk39.rst deleted file mode 100644 index ede68106b56ff8..00000000000000 --- a/Misc/NEWS.d/next/Library/2020-06-15-23-44-53.bpo-19821.ihBk39.rst +++ /dev/null @@ -1 +0,0 @@ -The :func:`!pydoc.ispackage` function has been deprecated. diff --git a/Misc/NEWS.d/next/Library/2020-07-28-20-48-05.bpo-41422.iMwnMu.rst b/Misc/NEWS.d/next/Library/2020-07-28-20-48-05.bpo-41422.iMwnMu.rst deleted file mode 100644 index 8bde68f8f2afc8..00000000000000 --- a/Misc/NEWS.d/next/Library/2020-07-28-20-48-05.bpo-41422.iMwnMu.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fixed memory leaks of :class:`pickle.Pickler` and :class:`pickle.Unpickler` involving cyclic references via the -internal memo mapping. diff --git a/Misc/NEWS.d/next/Library/2020-08-06-14-43-55.bpo-26791.KxoEfO.rst b/Misc/NEWS.d/next/Library/2020-08-06-14-43-55.bpo-26791.KxoEfO.rst deleted file mode 100644 index c6f8dcb6f9269c..00000000000000 --- a/Misc/NEWS.d/next/Library/2020-08-06-14-43-55.bpo-26791.KxoEfO.rst +++ /dev/null @@ -1,4 +0,0 @@ -:func:`shutil.move` now moves a symlink into a directory when that -directory is the target of the symlink. This provides the same behavior as -the mv shell command. The previous behavior raised an exception. Patch by -Jeffrey Kintscher. diff --git a/Misc/NEWS.d/next/Library/2020-12-14-09-31-13.bpo-35332.s22wAx.rst b/Misc/NEWS.d/next/Library/2020-12-14-09-31-13.bpo-35332.s22wAx.rst deleted file mode 100644 index 80564b99a079c6..00000000000000 --- a/Misc/NEWS.d/next/Library/2020-12-14-09-31-13.bpo-35332.s22wAx.rst +++ /dev/null @@ -1,3 +0,0 @@ -The :func:`shutil.rmtree` function now ignores errors when calling -:func:`os.close` when *ignore_errors* is ``True``, and -:func:`os.close` no longer retried after error. diff --git a/Misc/NEWS.d/next/Library/2021-11-23-22-22-49.bpo-32731.kNOASr.rst b/Misc/NEWS.d/next/Library/2021-11-23-22-22-49.bpo-32731.kNOASr.rst deleted file mode 100644 index 92f3b870c11131..00000000000000 --- a/Misc/NEWS.d/next/Library/2021-11-23-22-22-49.bpo-32731.kNOASr.rst +++ /dev/null @@ -1,3 +0,0 @@ -:func:`getpass.getuser` now raises :exc:`OSError` for all failures rather -than :exc:`ImportError` on systems lacking the :mod:`pwd` module or -:exc:`KeyError` if the password database is empty. diff --git a/Misc/NEWS.d/next/Library/2021-12-06-22-10-53.bpo-43153.J7mjSy.rst b/Misc/NEWS.d/next/Library/2021-12-06-22-10-53.bpo-43153.J7mjSy.rst deleted file mode 100644 index 7800e0a4869adf..00000000000000 --- a/Misc/NEWS.d/next/Library/2021-12-06-22-10-53.bpo-43153.J7mjSy.rst +++ /dev/null @@ -1,4 +0,0 @@ -On Windows, ``tempfile.TemporaryDirectory`` previously masked a -``PermissionError`` with ``NotADirectoryError`` during directory cleanup. It -now correctly raises ``PermissionError`` if errors are not ignored. Patch by -Andrei Kulakov and Ken Jin. diff --git a/Misc/NEWS.d/next/Library/2022-12-01-16-57-44.gh-issue-91133.LKMVCV.rst b/Misc/NEWS.d/next/Library/2022-12-01-16-57-44.gh-issue-91133.LKMVCV.rst deleted file mode 100644 index 7991048fc48e03..00000000000000 --- a/Misc/NEWS.d/next/Library/2022-12-01-16-57-44.gh-issue-91133.LKMVCV.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix a bug in :class:`tempfile.TemporaryDirectory` cleanup, which now no longer -dereferences symlinks when working around file system permission errors. diff --git a/Misc/NEWS.d/next/Library/2023-02-08-00-43-29.gh-issue-83162.ufdI9F.rst b/Misc/NEWS.d/next/Library/2023-02-08-00-43-29.gh-issue-83162.ufdI9F.rst deleted file mode 100644 index 6074dd7f101a6d..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-02-08-00-43-29.gh-issue-83162.ufdI9F.rst +++ /dev/null @@ -1,3 +0,0 @@ -Renamed :exc:`!re.error` to :exc:`PatternError` for clarity, and kept -:exc:`!re.error` for backward compatibility. Patch by Matthias Bussonnier and -Adam Chhina. diff --git a/Misc/NEWS.d/next/Library/2023-04-09-21-05-43.gh-issue-66515.0DS8Ya.rst b/Misc/NEWS.d/next/Library/2023-04-09-21-05-43.gh-issue-66515.0DS8Ya.rst deleted file mode 100644 index b9c52f3b8db52c..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-04-09-21-05-43.gh-issue-66515.0DS8Ya.rst +++ /dev/null @@ -1,3 +0,0 @@ -:class:`mailbox.MH` now supports folders that do not contain a -``.mh_sequences`` file (e.g. Claws Mail IMAP-cache folders). Patch by Serhiy -Storchaka. diff --git a/Misc/NEWS.d/next/Library/2023-04-23-11-08-02.gh-issue-103708.Y17C7p.rst b/Misc/NEWS.d/next/Library/2023-04-23-11-08-02.gh-issue-103708.Y17C7p.rst deleted file mode 100644 index 4b7d747175df03..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-04-23-11-08-02.gh-issue-103708.Y17C7p.rst +++ /dev/null @@ -1 +0,0 @@ -Make hardcoded python name, a configurable parameter so that different implementations of python can override it instead of making huge diffs in sysconfig.py diff --git a/Misc/NEWS.d/next/Library/2023-04-29-20-49-13.gh-issue-104003.-8Ruk2.rst b/Misc/NEWS.d/next/Library/2023-04-29-20-49-13.gh-issue-104003.-8Ruk2.rst deleted file mode 100644 index 82d61ca8b8bc97..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-04-29-20-49-13.gh-issue-104003.-8Ruk2.rst +++ /dev/null @@ -1,3 +0,0 @@ -Add :func:`warnings.deprecated`, a decorator to mark deprecated functions to -static type checkers and to warn on usage of deprecated classes and functions. -See :pep:`702`. Patch by Jelle Zijlstra. diff --git a/Misc/NEWS.d/next/Library/2023-05-08-09-30-00.gh-issue-104282.h4c6Eb.rst b/Misc/NEWS.d/next/Library/2023-05-08-09-30-00.gh-issue-104282.h4c6Eb.rst new file mode 100644 index 00000000000000..569ce66a5b9d5f --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-05-08-09-30-00.gh-issue-104282.h4c6Eb.rst @@ -0,0 +1,3 @@ +Fix null pointer dereference in :func:`lzma._decode_filter_properties` +due to improper handling of BCJ filters with properties of zero length. +Patch by Radislav Chugunov. diff --git a/Misc/NEWS.d/next/Library/2023-05-30-18-30-11.gh-issue-105102.SnpK04.rst b/Misc/NEWS.d/next/Library/2023-05-30-18-30-11.gh-issue-105102.SnpK04.rst new file mode 100644 index 00000000000000..7ca21afefa3132 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-05-30-18-30-11.gh-issue-105102.SnpK04.rst @@ -0,0 +1,2 @@ +Allow :class:`ctypes.Union` to be nested in :class:`ctypes.Structure` when +the system endianness is the opposite of the classes. diff --git a/Misc/NEWS.d/next/Library/2023-08-07-21-11-24.gh-issue-102130._UyI5i.rst b/Misc/NEWS.d/next/Library/2023-08-07-21-11-24.gh-issue-102130._UyI5i.rst deleted file mode 100644 index f582ad5df39e84..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-08-07-21-11-24.gh-issue-102130._UyI5i.rst +++ /dev/null @@ -1 +0,0 @@ -Support tab completion in :mod:`cmd` for ``editline``. diff --git a/Misc/NEWS.d/next/Library/2023-08-14-21-10-52.gh-issue-103363.u64_QI.rst b/Misc/NEWS.d/next/Library/2023-08-14-21-10-52.gh-issue-103363.u64_QI.rst deleted file mode 100644 index d4a27d624eb5e6..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-08-14-21-10-52.gh-issue-103363.u64_QI.rst +++ /dev/null @@ -1,2 +0,0 @@ -Add *follow_symlinks* keyword-only argument to :meth:`pathlib.Path.owner` -and :meth:`~pathlib.Path.group`, defaulting to ``True``. diff --git a/Misc/NEWS.d/next/Library/2023-09-22-22-17-45.gh-issue-38807.m9McRN.rst b/Misc/NEWS.d/next/Library/2023-09-22-22-17-45.gh-issue-38807.m9McRN.rst new file mode 100644 index 00000000000000..4219723d15b9e6 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-09-22-22-17-45.gh-issue-38807.m9McRN.rst @@ -0,0 +1,3 @@ +Fix race condition in :mod:`trace`. Instead of checking if a directory +exists and creating it, directly call :func:`os.makedirs` with the kwarg +``exist_ok=True``. diff --git a/Misc/NEWS.d/next/Library/2023-09-23-14-40-51.gh-issue-109786.UX3pKv.rst b/Misc/NEWS.d/next/Library/2023-09-23-14-40-51.gh-issue-109786.UX3pKv.rst deleted file mode 100644 index 07222fa339d703..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-09-23-14-40-51.gh-issue-109786.UX3pKv.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix possible reference leaks and crash when re-enter the ``__next__()`` method of -:class:`itertools.pairwise`. diff --git a/Misc/NEWS.d/next/Library/2023-10-04-11-09-30.gh-issue-110345.fZU1ud.rst b/Misc/NEWS.d/next/Library/2023-10-04-11-09-30.gh-issue-110345.fZU1ud.rst new file mode 100644 index 00000000000000..d9ccc0f12de47c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-10-04-11-09-30.gh-issue-110345.fZU1ud.rst @@ -0,0 +1 @@ +Show the Tcl/Tk patchlevel (rather than version) in :meth:`tkinter._test`. diff --git a/Misc/NEWS.d/next/Library/2023-10-11-02-34-01.gh-issue-110109.RFCmHs.rst b/Misc/NEWS.d/next/Library/2023-10-11-02-34-01.gh-issue-110109.RFCmHs.rst deleted file mode 100644 index 4f12d128f49fb3..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-10-11-02-34-01.gh-issue-110109.RFCmHs.rst +++ /dev/null @@ -1,3 +0,0 @@ -Add private ``pathlib._PurePathBase`` class: a base class for -:class:`pathlib.PurePath` that omits certain magic methods. It may be made -public (along with ``_PathBase``) in future. diff --git a/Misc/NEWS.d/next/Library/2023-10-12-18-19-47.gh-issue-82300.P8-O38.rst b/Misc/NEWS.d/next/Library/2023-10-12-18-19-47.gh-issue-82300.P8-O38.rst deleted file mode 100644 index d7e6b225489b99..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-10-12-18-19-47.gh-issue-82300.P8-O38.rst +++ /dev/null @@ -1 +0,0 @@ -Add ``track`` parameter to :class:`multiprocessing.shared_memory.SharedMemory` that allows using shared memory blocks without having to register with the POSIX resource tracker that automatically releases them upon process exit. diff --git a/Misc/NEWS.d/next/Library/2023-10-17-16-11-03.gh-issue-52161.WBYyCJ.rst b/Misc/NEWS.d/next/Library/2023-10-17-16-11-03.gh-issue-52161.WBYyCJ.rst deleted file mode 100644 index 3f598d40e4ae93..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-10-17-16-11-03.gh-issue-52161.WBYyCJ.rst +++ /dev/null @@ -1,2 +0,0 @@ -:meth:`cmd.Cmd.do_help` now cleans docstrings with :func:`inspect.cleandoc` -before writing them. Patch by Filip Łapkiewicz. diff --git a/Misc/NEWS.d/next/Library/2023-10-20-15-28-08.gh-issue-102988.dStNO7.rst b/Misc/NEWS.d/next/Library/2023-10-20-15-28-08.gh-issue-102988.dStNO7.rst deleted file mode 100644 index 3d0e9e4078c934..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-10-20-15-28-08.gh-issue-102988.dStNO7.rst +++ /dev/null @@ -1,8 +0,0 @@ -:func:`email.utils.getaddresses` and :func:`email.utils.parseaddr` now -return ``('', '')`` 2-tuples in more situations where invalid email -addresses are encountered instead of potentially inaccurate values. Add -optional *strict* parameter to these two functions: use ``strict=False`` to -get the old behavior, accept malformed inputs. -``getattr(email.utils, 'supports_strict_parsing', False)`` can be use to check -if the *strict* paramater is available. Patch by Thomas Dwyer and Victor -Stinner to improve the CVE-2023-27043 fix. diff --git a/Misc/NEWS.d/next/Library/2023-10-23-03-49-34.gh-issue-102980.aXBd54.rst b/Misc/NEWS.d/next/Library/2023-10-23-03-49-34.gh-issue-102980.aXBd54.rst deleted file mode 100644 index d4bae4790d6fa4..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-10-23-03-49-34.gh-issue-102980.aXBd54.rst +++ /dev/null @@ -1 +0,0 @@ -Redirect the output of ``interact`` command of :mod:`pdb` to the same channel as the debugger. Add tests and improve docs. diff --git a/Misc/NEWS.d/next/Library/2023-10-23-18-42-26.gh-issue-111049.Ys7-o_.rst b/Misc/NEWS.d/next/Library/2023-10-23-18-42-26.gh-issue-111049.Ys7-o_.rst deleted file mode 100644 index b1de348bea0a58..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-10-23-18-42-26.gh-issue-111049.Ys7-o_.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix crash during garbage collection of the :class:`io.BytesIO` buffer -object. diff --git a/Misc/NEWS.d/next/Library/2023-10-25-13-07-53.gh-issue-67790.jMn9Ad.rst b/Misc/NEWS.d/next/Library/2023-10-25-13-07-53.gh-issue-67790.jMn9Ad.rst deleted file mode 100644 index 44c5702a6551b0..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-10-25-13-07-53.gh-issue-67790.jMn9Ad.rst +++ /dev/null @@ -1,2 +0,0 @@ -Implement basic formatting support (minimum width, alignment, fill) for -:class:`fractions.Fraction`. diff --git a/Misc/NEWS.d/next/Library/2023-10-25-16-37-13.gh-issue-75666.BpsWut.rst b/Misc/NEWS.d/next/Library/2023-10-25-16-37-13.gh-issue-75666.BpsWut.rst deleted file mode 100644 index d774cc4f7c687f..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-10-25-16-37-13.gh-issue-75666.BpsWut.rst +++ /dev/null @@ -1,6 +0,0 @@ -Fix the behavior of :mod:`tkinter` widget's ``unbind()`` method with two -arguments. Previously, ``widget.unbind(sequence, funcid)`` destroyed the -current binding for *sequence*, leaving *sequence* unbound, and deleted the -*funcid* command. Now it removes only *funcid* from the binding for -*sequence*, keeping other commands, and deletes the *funcid* command. It -leaves *sequence* unbound only if *funcid* was the last bound command. diff --git a/Misc/NEWS.d/next/Library/2023-11-02-10-13-31.gh-issue-111615.3SMixi.rst b/Misc/NEWS.d/next/Library/2023-11-02-10-13-31.gh-issue-111615.3SMixi.rst deleted file mode 100644 index f80ab00a3adbff..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-11-02-10-13-31.gh-issue-111615.3SMixi.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix a regression caused by a fix to gh-93162 whereby you couldn't configure -a :class:`QueueHandler` without specifying handlers. diff --git a/Misc/NEWS.d/next/Library/2023-11-05-20-09-27.gh-issue-99367.HLaWKo.rst b/Misc/NEWS.d/next/Library/2023-11-05-20-09-27.gh-issue-99367.HLaWKo.rst deleted file mode 100644 index 0920da221e423f..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-11-05-20-09-27.gh-issue-99367.HLaWKo.rst +++ /dev/null @@ -1 +0,0 @@ -Do not mangle ``sys.path[0]`` in :mod:`pdb` if safe_path is set diff --git a/Misc/NEWS.d/next/Library/2023-11-08-16-11-04.gh-issue-110275.Bm6GwR.rst b/Misc/NEWS.d/next/Library/2023-11-08-16-11-04.gh-issue-110275.Bm6GwR.rst deleted file mode 100644 index 194dd5cb623f0f..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-11-08-16-11-04.gh-issue-110275.Bm6GwR.rst +++ /dev/null @@ -1,2 +0,0 @@ -Named tuple's methods ``_replace()`` and ``__replace__()`` now raise -TypeError instead of ValueError for invalid keyword arguments. diff --git a/Misc/NEWS.d/next/Library/2023-11-08-18-53-07.gh-issue-68166.1iTh4Y.rst b/Misc/NEWS.d/next/Library/2023-11-08-18-53-07.gh-issue-68166.1iTh4Y.rst deleted file mode 100644 index 30379b8fa1afaf..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-11-08-18-53-07.gh-issue-68166.1iTh4Y.rst +++ /dev/null @@ -1,2 +0,0 @@ -Add support of the "vsapi" element type in -:meth:`tkinter.ttk.Style.element_create`. diff --git a/Misc/NEWS.d/next/Library/2023-11-09-11-07-34.gh-issue-111874.dzYc3j.rst b/Misc/NEWS.d/next/Library/2023-11-09-11-07-34.gh-issue-111874.dzYc3j.rst deleted file mode 100644 index 50408202a7a5a1..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-11-09-11-07-34.gh-issue-111874.dzYc3j.rst +++ /dev/null @@ -1,4 +0,0 @@ -When creating a :class:`typing.NamedTuple` class, ensure -:func:`~object.__set_name__` is called on all objects that define -``__set_name__`` and exist in the values of the ``NamedTuple`` class's class -dictionary. Patch by Alex Waygood. diff --git a/Misc/NEWS.d/next/Library/2023-11-15-01-36-04.gh-issue-106922.qslOVH.rst b/Misc/NEWS.d/next/Library/2023-11-15-01-36-04.gh-issue-106922.qslOVH.rst deleted file mode 100644 index b68e75ab87cd0b..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-11-15-01-36-04.gh-issue-106922.qslOVH.rst +++ /dev/null @@ -1 +0,0 @@ -Display multiple lines with ``traceback`` when errors span multiple lines. diff --git a/Misc/NEWS.d/next/Library/2023-11-15-04-53-37.gh-issue-112105.I3RcVN.rst b/Misc/NEWS.d/next/Library/2023-11-15-04-53-37.gh-issue-112105.I3RcVN.rst deleted file mode 100644 index 4243dcb190434f..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-11-15-04-53-37.gh-issue-112105.I3RcVN.rst +++ /dev/null @@ -1 +0,0 @@ -Make :func:`readline.set_completer_delims` work with libedit diff --git a/Misc/NEWS.d/next/Library/2023-11-16-10-42-15.gh-issue-112139.WpHosf.rst b/Misc/NEWS.d/next/Library/2023-11-16-10-42-15.gh-issue-112139.WpHosf.rst deleted file mode 100644 index 090dc8847d9556..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-11-16-10-42-15.gh-issue-112139.WpHosf.rst +++ /dev/null @@ -1,3 +0,0 @@ -Add :meth:`Signature.format` to format signatures to string with extra options. -And use it in :mod:`pydoc` to render more readable signatures that have new -lines between parameters. diff --git a/Misc/NEWS.d/next/Library/2023-11-16-17-18-09.gh-issue-112137.QvjGjN.rst b/Misc/NEWS.d/next/Library/2023-11-16-17-18-09.gh-issue-112137.QvjGjN.rst deleted file mode 100644 index 6b61d051966846..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-11-16-17-18-09.gh-issue-112137.QvjGjN.rst +++ /dev/null @@ -1 +0,0 @@ -Change :mod:`dis` output to display logical labels for jump targets instead of offsets. diff --git a/Misc/NEWS.d/next/Library/2023-11-21-02-58-14.gh-issue-77621.MYv5XS.rst b/Misc/NEWS.d/next/Library/2023-11-21-02-58-14.gh-issue-77621.MYv5XS.rst deleted file mode 100644 index f3e6efc389afca..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-11-21-02-58-14.gh-issue-77621.MYv5XS.rst +++ /dev/null @@ -1,2 +0,0 @@ -Slightly improve the import time of the :mod:`pathlib` module by deferring -some imports. Patch by Barney Gale. diff --git a/Misc/NEWS.d/next/Library/2023-11-22-19-43-54.gh-issue-112292.5nDU87.rst b/Misc/NEWS.d/next/Library/2023-11-22-19-43-54.gh-issue-112292.5nDU87.rst deleted file mode 100644 index 8345e33791cde0..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-11-22-19-43-54.gh-issue-112292.5nDU87.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix a crash in :mod:`readline` when imported from a sub interpreter. Patch -by Anthony Shaw diff --git a/Misc/NEWS.d/next/Library/2023-11-22-23-08-47.gh-issue-81620.mfZ2Wf.rst b/Misc/NEWS.d/next/Library/2023-11-22-23-08-47.gh-issue-81620.mfZ2Wf.rst deleted file mode 100644 index ff35806e4d5ed6..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-11-22-23-08-47.gh-issue-81620.mfZ2Wf.rst +++ /dev/null @@ -1 +0,0 @@ -Add extra tests for :func:`random.binomialvariate` diff --git a/Misc/NEWS.d/next/Library/2023-11-23-10-41-21.gh-issue-112332.rhTBaa.rst b/Misc/NEWS.d/next/Library/2023-11-23-10-41-21.gh-issue-112332.rhTBaa.rst deleted file mode 100644 index bd686ad052e5b2..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-11-23-10-41-21.gh-issue-112332.rhTBaa.rst +++ /dev/null @@ -1,2 +0,0 @@ -Deprecate the ``exc_type`` field of :class:`traceback.TracebackException`. -Add ``exc_type_str`` to replace it. diff --git a/Misc/NEWS.d/next/Library/2023-11-23-12-37-22.gh-issue-112137.kM46Q6.rst b/Misc/NEWS.d/next/Library/2023-11-23-12-37-22.gh-issue-112137.kM46Q6.rst deleted file mode 100644 index 1b2e41ae96ff09..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-11-23-12-37-22.gh-issue-112137.kM46Q6.rst +++ /dev/null @@ -1 +0,0 @@ -Change :mod:`dis` output to display no-lineno as "--" instead of "None". diff --git a/Misc/NEWS.d/next/Library/2023-11-23-17-25-27.gh-issue-112345.FFApHx.rst b/Misc/NEWS.d/next/Library/2023-11-23-17-25-27.gh-issue-112345.FFApHx.rst deleted file mode 100644 index b2b9894e6bef3a..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-11-23-17-25-27.gh-issue-112345.FFApHx.rst +++ /dev/null @@ -1,3 +0,0 @@ -Improve error message when trying to call :func:`issubclass` against a -:class:`typing.Protocol` that has non-method members. -Patch by Randolf Scholz. diff --git a/Misc/NEWS.d/next/Library/2023-11-24-09-27-01.gh-issue-112361.kYtnHW.rst b/Misc/NEWS.d/next/Library/2023-11-24-09-27-01.gh-issue-112361.kYtnHW.rst deleted file mode 100644 index 5a83f93f9fbec8..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-11-24-09-27-01.gh-issue-112361.kYtnHW.rst +++ /dev/null @@ -1,2 +0,0 @@ -Speed up a small handful of :mod:`pathlib` methods by removing some -temporary objects. diff --git a/Misc/NEWS.d/next/Library/2023-11-24-19-08-50.gh-issue-112343.RarGFC.rst b/Misc/NEWS.d/next/Library/2023-11-24-19-08-50.gh-issue-112343.RarGFC.rst new file mode 100644 index 00000000000000..aaa50fce3ac962 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-11-24-19-08-50.gh-issue-112343.RarGFC.rst @@ -0,0 +1 @@ +Improve handling of pdb convenience variables to avoid replacing string contents. diff --git a/Misc/NEWS.d/next/Library/2023-11-24-21-00-24.gh-issue-94722.GMIQIn.rst b/Misc/NEWS.d/next/Library/2023-11-24-21-00-24.gh-issue-94722.GMIQIn.rst deleted file mode 100644 index 41bd57f46ed82a..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-11-24-21-00-24.gh-issue-94722.GMIQIn.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix bug where comparison between instances of :class:`~doctest.DocTest` fails if -one of them has ``None`` as its lineno. diff --git a/Misc/NEWS.d/next/Library/2023-11-25-20-29-28.gh-issue-112405.cOtzxC.rst b/Misc/NEWS.d/next/Library/2023-11-25-20-29-28.gh-issue-112405.cOtzxC.rst deleted file mode 100644 index f6f1bee2a0c38f..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-11-25-20-29-28.gh-issue-112405.cOtzxC.rst +++ /dev/null @@ -1 +0,0 @@ -Optimize :meth:`pathlib.PurePath.relative_to`. Patch by Alex Waygood. diff --git a/Misc/NEWS.d/next/Library/2023-11-26-13-26-56.gh-issue-112358.smhaeZ.rst b/Misc/NEWS.d/next/Library/2023-11-26-13-26-56.gh-issue-112358.smhaeZ.rst deleted file mode 100644 index e473ded46a1309..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-11-26-13-26-56.gh-issue-112358.smhaeZ.rst +++ /dev/null @@ -1,2 +0,0 @@ -Revert change to :class:`struct.Struct` initialization that broke some cases -of subclassing. diff --git a/Misc/NEWS.d/next/Library/2023-11-26-13-44-19.gh-issue-112414.kx2E7S.rst b/Misc/NEWS.d/next/Library/2023-11-26-13-44-19.gh-issue-112414.kx2E7S.rst deleted file mode 100644 index 058e5a33227e5a..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-11-26-13-44-19.gh-issue-112414.kx2E7S.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix regression in Python 3.12 where calling :func:`repr` on a module that -had been imported using a custom :term:`loader` could fail with -:exc:`AttributeError`. Patch by Alex Waygood. diff --git a/Misc/NEWS.d/next/Library/2023-11-27-12-41-23.gh-issue-63284.q2Qi9q.rst b/Misc/NEWS.d/next/Library/2023-11-27-12-41-23.gh-issue-63284.q2Qi9q.rst deleted file mode 100644 index abb57dccd5a91a..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-11-27-12-41-23.gh-issue-63284.q2Qi9q.rst +++ /dev/null @@ -1 +0,0 @@ -Added support for TLS-PSK (pre-shared key) mode to the :mod:`ssl` module. diff --git a/Misc/NEWS.d/next/Library/2023-11-28-02-39-30.gh-issue-101336.ya433z.rst b/Misc/NEWS.d/next/Library/2023-11-28-02-39-30.gh-issue-101336.ya433z.rst deleted file mode 100644 index c222febae6b554..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-11-28-02-39-30.gh-issue-101336.ya433z.rst +++ /dev/null @@ -1 +0,0 @@ -Add ``keep_alive`` keyword parameter for :meth:`AbstractEventLoop.create_server` and :meth:`BaseEventLoop.create_server`. diff --git a/Misc/NEWS.d/next/Library/2023-11-28-20-01-33.gh-issue-112509.QtoKed.rst b/Misc/NEWS.d/next/Library/2023-11-28-20-01-33.gh-issue-112509.QtoKed.rst deleted file mode 100644 index a16d67e7776bcb..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-11-28-20-01-33.gh-issue-112509.QtoKed.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix edge cases that could cause a key to be present in both the -``__required_keys__`` and ``__optional_keys__`` attributes of a -:class:`typing.TypedDict`. Patch by Jelle Zijlstra. diff --git a/Misc/NEWS.d/next/Library/2023-11-28-20-47-39.gh-issue-112328.Z2AxEY.rst b/Misc/NEWS.d/next/Library/2023-11-28-20-47-39.gh-issue-112328.Z2AxEY.rst deleted file mode 100644 index 6e6902486b7bc9..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-11-28-20-47-39.gh-issue-112328.Z2AxEY.rst +++ /dev/null @@ -1,2 +0,0 @@ -[Enum] Make ``EnumDict``, ``EnumDict.member_names``, -``EnumType._add_alias_`` and ``EnumType._add_value_alias_`` public. diff --git a/Misc/NEWS.d/next/Library/2023-11-29-02-26-32.gh-issue-112510.j-zXGc.rst b/Misc/NEWS.d/next/Library/2023-11-29-02-26-32.gh-issue-112510.j-zXGc.rst deleted file mode 100644 index 02de6fa80c1b3e..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-11-29-02-26-32.gh-issue-112510.j-zXGc.rst +++ /dev/null @@ -1 +0,0 @@ -Add :data:`readline.backend` for the backend readline uses (``editline`` or ``readline``) diff --git a/Misc/NEWS.d/next/Library/2023-11-29-10-51-41.gh-issue-112516.rFKUKN.rst b/Misc/NEWS.d/next/Library/2023-11-29-10-51-41.gh-issue-112516.rFKUKN.rst deleted file mode 100644 index 530cf992dcd77a..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-11-29-10-51-41.gh-issue-112516.rFKUKN.rst +++ /dev/null @@ -1 +0,0 @@ -Update the bundled copy of pip to version 23.3.1. diff --git a/Misc/NEWS.d/next/Library/2023-12-01-08-28-09.gh-issue-112578.bfNbfi.rst b/Misc/NEWS.d/next/Library/2023-12-01-08-28-09.gh-issue-112578.bfNbfi.rst deleted file mode 100644 index 1de5b1fe26ce6d..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-01-08-28-09.gh-issue-112578.bfNbfi.rst +++ /dev/null @@ -1 +0,0 @@ -Fix a spurious :exc:`RuntimeWarning` when executing the :mod:`zipfile` module. diff --git a/Misc/NEWS.d/next/Library/2023-12-01-16-09-59.gh-issue-81194.FFad1c.rst b/Misc/NEWS.d/next/Library/2023-12-01-16-09-59.gh-issue-81194.FFad1c.rst deleted file mode 100644 index feb7a8643b97f6..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-01-16-09-59.gh-issue-81194.FFad1c.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix a crash in :func:`socket.if_indextoname` with specific value (UINT_MAX). -Fix an integer overflow in :func:`socket.if_indextoname` on 64-bit -non-Windows platforms. diff --git a/Misc/NEWS.d/next/Library/2023-12-01-18-05-09.gh-issue-110190.5bf-c9.rst b/Misc/NEWS.d/next/Library/2023-12-01-18-05-09.gh-issue-110190.5bf-c9.rst deleted file mode 100644 index 730b9d49119805..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-01-18-05-09.gh-issue-110190.5bf-c9.rst +++ /dev/null @@ -1 +0,0 @@ -Fix ctypes structs with array on Arm platform by setting ``MAX_STRUCT_SIZE`` to 32 in stgdict. Patch by Diego Russo. diff --git a/Misc/NEWS.d/next/Library/2023-12-01-21-05-46.gh-issue-112334.DmNXKh.rst b/Misc/NEWS.d/next/Library/2023-12-01-21-05-46.gh-issue-112334.DmNXKh.rst deleted file mode 100644 index 3a53a8bf84230f..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-01-21-05-46.gh-issue-112334.DmNXKh.rst +++ /dev/null @@ -1,11 +0,0 @@ -Fixed a performance regression in 3.12's :mod:`subprocess` on Linux where it -would no longer use the fast-path ``vfork()`` system call when it could have -due to a logic bug, instead falling back to the safe but slower ``fork()``. - -Also fixed a second 3.12.0 potential security bug. If a value of -``extra_groups=[]`` was passed to :mod:`subprocess.Popen` or related APIs, -the underlying ``setgroups(0, NULL)`` system call to clear the groups list -would not be made in the child process prior to ``exec()``. - -This was identified via code inspection in the process of fixing the first -bug. diff --git a/Misc/NEWS.d/next/Library/2023-12-02-12-55-17.gh-issue-112618.7_FT8-.rst b/Misc/NEWS.d/next/Library/2023-12-02-12-55-17.gh-issue-112618.7_FT8-.rst deleted file mode 100644 index c732de15609c96..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-02-12-55-17.gh-issue-112618.7_FT8-.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix a caching bug relating to :data:`typing.Annotated`. -``Annotated[str, True]`` is no longer identical to ``Annotated[str, 1]``. diff --git a/Misc/NEWS.d/next/Library/2023-12-03-01-01-52.gh-issue-112622.1Z8cpx.rst b/Misc/NEWS.d/next/Library/2023-12-03-01-01-52.gh-issue-112622.1Z8cpx.rst deleted file mode 100644 index 91c88bac334dcb..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-03-01-01-52.gh-issue-112622.1Z8cpx.rst +++ /dev/null @@ -1,2 +0,0 @@ -Ensure ``name`` parameter is passed to event loop in -:func:`asyncio.create_task`. diff --git a/Misc/NEWS.d/next/Library/2023-12-03-12-41-48.gh-issue-112645.blMsKf.rst b/Misc/NEWS.d/next/Library/2023-12-03-12-41-48.gh-issue-112645.blMsKf.rst deleted file mode 100644 index 4e8f6ebdb882e0..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-03-12-41-48.gh-issue-112645.blMsKf.rst +++ /dev/null @@ -1 +0,0 @@ -Remove deprecation error on passing ``onerror`` to :func:`shutil.rmtree`. diff --git a/Misc/NEWS.d/next/Library/2023-12-04-14-05-24.gh-issue-74690.eODKRm.rst b/Misc/NEWS.d/next/Library/2023-12-04-14-05-24.gh-issue-74690.eODKRm.rst deleted file mode 100644 index 36d793f787302e..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-04-14-05-24.gh-issue-74690.eODKRm.rst +++ /dev/null @@ -1,5 +0,0 @@ -Speedup :func:`isinstance` checks by roughly 20% for -:func:`runtime-checkable protocols ` -that only have one callable member. -Speedup :func:`issubclass` checks for these protocols by roughly 10%. -Patch by Alex Waygood. diff --git a/Misc/NEWS.d/next/Library/2023-12-04-16-45-11.gh-issue-74690.pQYP5U.rst b/Misc/NEWS.d/next/Library/2023-12-04-16-45-11.gh-issue-74690.pQYP5U.rst deleted file mode 100644 index 8102f02e941c29..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-04-16-45-11.gh-issue-74690.pQYP5U.rst +++ /dev/null @@ -1,2 +0,0 @@ -Speedup :func:`issubclass` checks against simple :func:`runtime-checkable -protocols ` by around 6%. Patch by Alex Waygood. diff --git a/Misc/NEWS.d/next/Library/2023-12-04-21-30-34.gh-issue-112727.jpgNRB.rst b/Misc/NEWS.d/next/Library/2023-12-04-21-30-34.gh-issue-112727.jpgNRB.rst deleted file mode 100644 index bbe7aae5732d9a..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-04-21-30-34.gh-issue-112727.jpgNRB.rst +++ /dev/null @@ -1 +0,0 @@ -Speed up :meth:`pathlib.Path.absolute`. Patch by Barney Gale. diff --git a/Misc/NEWS.d/next/Library/2023-12-05-01-19-28.gh-issue-112736.rdHDrU.rst b/Misc/NEWS.d/next/Library/2023-12-05-01-19-28.gh-issue-112736.rdHDrU.rst deleted file mode 100644 index 6c09e622923af8..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-05-01-19-28.gh-issue-112736.rdHDrU.rst +++ /dev/null @@ -1 +0,0 @@ -The use of del-safe symbols in ``subprocess`` was refactored to allow for use in cross-platform build environments. diff --git a/Misc/NEWS.d/next/Library/2023-12-05-16-20-40.gh-issue-94692.-e5C3c.rst b/Misc/NEWS.d/next/Library/2023-12-05-16-20-40.gh-issue-94692.-e5C3c.rst deleted file mode 100644 index c67ba6c9ececdb..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-05-16-20-40.gh-issue-94692.-e5C3c.rst +++ /dev/null @@ -1,4 +0,0 @@ -:func:`shutil.rmtree` now only catches OSError exceptions. Previously a -symlink attack resistant version of ``shutil.rmtree()`` could ignore or pass -to the error handler arbitrary exception when invalid arguments were -provided. diff --git a/Misc/NEWS.d/next/Library/2023-12-05-18-57-53.gh-issue-79325.P2vMVK.rst b/Misc/NEWS.d/next/Library/2023-12-05-18-57-53.gh-issue-79325.P2vMVK.rst deleted file mode 100644 index f3c32d27b5fe66..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-05-18-57-53.gh-issue-79325.P2vMVK.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix an infinite recursion error in :func:`tempfile.TemporaryDirectory` -cleanup on Windows. diff --git a/Misc/NEWS.d/next/Library/2023-12-06-14-06-14.gh-issue-51944.-5qq_L.rst b/Misc/NEWS.d/next/Library/2023-12-06-14-06-14.gh-issue-51944.-5qq_L.rst deleted file mode 100644 index 821eefa7cffcd5..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-06-14-06-14.gh-issue-51944.-5qq_L.rst +++ /dev/null @@ -1,6 +0,0 @@ -Add the following constants to the :mod:`termios` module. These values are -present in macOS system headers: ``ALTWERASE``, ``B14400``, ``B28800``, -``B7200``, ``B76800``, ``CCAR_OFLOW``, ``CCTS_OFLOW``, ``CDSR_OFLOW``, -``CDTR_IFLOW``, ``CIGNORE``, ``CRTS_IFLOW``, ``EXTPROC``, ``IUTF8``, -``MDMBUF``, ``NL2``, ``NL3``, ``NOKERNINFO``, ``ONOEOT``, ``OXTABS``, -``VDSUSP``, ``VSTATUS``. diff --git a/Misc/NEWS.d/next/Library/2023-12-06-16-01-33.gh-issue-112800.TNsGJ-.rst b/Misc/NEWS.d/next/Library/2023-12-06-16-01-33.gh-issue-112800.TNsGJ-.rst deleted file mode 100644 index e88eac169177a9..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-06-16-01-33.gh-issue-112800.TNsGJ-.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix :mod:`asyncio` ``SubprocessTransport.close()`` not to throw -``PermissionError`` when used with setuid executables. diff --git a/Misc/NEWS.d/next/Library/2023-12-07-16-55-41.gh-issue-87286.MILC9_.rst b/Misc/NEWS.d/next/Library/2023-12-07-16-55-41.gh-issue-87286.MILC9_.rst deleted file mode 100644 index bfeec3c95207cb..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-07-16-55-41.gh-issue-87286.MILC9_.rst +++ /dev/null @@ -1,3 +0,0 @@ -Added :const:`LOG_FTP`, :const:`LOG_NETINFO`, :const:`LOG_REMOTEAUTH`, -:const:`LOG_INSTALL`, :const:`LOG_RAS`, and :const:`LOG_LAUNCHD` tot the -:mod:`syslog` module, all of them constants on used on macOS. diff --git a/Misc/NEWS.d/next/Library/2023-12-08-11-17-17.gh-issue-112540.Pm5egX.rst b/Misc/NEWS.d/next/Library/2023-12-08-11-17-17.gh-issue-112540.Pm5egX.rst deleted file mode 100644 index 263b13d1762bf1..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-08-11-17-17.gh-issue-112540.Pm5egX.rst +++ /dev/null @@ -1,2 +0,0 @@ -The statistics.geometric_mean() function now returns zero for datasets -containing a zero. Formerly, it would raise an exception. diff --git a/Misc/NEWS.d/next/Library/2023-12-11-14-12-46.gh-issue-110190.e0iEUa.rst b/Misc/NEWS.d/next/Library/2023-12-11-14-12-46.gh-issue-110190.e0iEUa.rst deleted file mode 100644 index 3bfed1e0f1dc91..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-11-14-12-46.gh-issue-110190.e0iEUa.rst +++ /dev/null @@ -1 +0,0 @@ -Fix ctypes structs with array on PPC64LE platform by setting ``MAX_STRUCT_SIZE`` to 64 in stgdict. Patch by Diego Russo. diff --git a/Misc/NEWS.d/next/Library/2023-12-11-16-13-15.gh-issue-112970.87jmKP.rst b/Misc/NEWS.d/next/Library/2023-12-11-16-13-15.gh-issue-112970.87jmKP.rst deleted file mode 100644 index 58ca26af511383..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-11-16-13-15.gh-issue-112970.87jmKP.rst +++ /dev/null @@ -1 +0,0 @@ -Use :c:func:`!closefrom` on Linux where available (e.g. glibc-2.34), rather than only FreeBSD. diff --git a/Misc/NEWS.d/next/Library/2023-12-12-05-48-17.gh-issue-112989.ZAa_eq.rst b/Misc/NEWS.d/next/Library/2023-12-12-05-48-17.gh-issue-112989.ZAa_eq.rst deleted file mode 100644 index ceeab8cc7d6bec..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-12-05-48-17.gh-issue-112989.ZAa_eq.rst +++ /dev/null @@ -1 +0,0 @@ -Reduce overhead to connect sockets with :mod:`asyncio` SelectorEventLoop. diff --git a/Misc/NEWS.d/next/Library/2023-12-12-16-32-55.gh-issue-112962.ZZWXZn.rst b/Misc/NEWS.d/next/Library/2023-12-12-16-32-55.gh-issue-112962.ZZWXZn.rst deleted file mode 100644 index b99e6bc90ae791..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-12-16-32-55.gh-issue-112962.ZZWXZn.rst +++ /dev/null @@ -1,3 +0,0 @@ -:mod:`dis` module functions add cache information to the -:class:`~dis.Instruction` instance rather than creating fake -:class:`~dis.Instruction` instances to represent the cache entries. diff --git a/Misc/NEWS.d/next/Library/2023-12-12-20-15-57.gh-issue-112559.IgXkje.rst b/Misc/NEWS.d/next/Library/2023-12-12-20-15-57.gh-issue-112559.IgXkje.rst deleted file mode 100644 index c08cb7c3ba5ea5..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-12-20-15-57.gh-issue-112559.IgXkje.rst +++ /dev/null @@ -1,3 +0,0 @@ -:func:`signal.signal` and :func:`signal.getsignal` no longer call ``repr`` on -callable handlers. :func:`asyncio.run` and :meth:`asyncio.Runner.run` no longer -call ``repr`` on the task results. Patch by Yilei Yang. diff --git a/Misc/NEWS.d/next/Library/2023-12-13-17-08-21.gh-issue-59616.JNlWSs.rst b/Misc/NEWS.d/next/Library/2023-12-13-17-08-21.gh-issue-59616.JNlWSs.rst deleted file mode 100644 index 793ae63b4c1ff5..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-13-17-08-21.gh-issue-59616.JNlWSs.rst +++ /dev/null @@ -1,3 +0,0 @@ -Add support of :func:`os.lchmod` and the *follow_symlinks* argument in -:func:`os.chmod` on Windows. Note that the default value of *follow_symlinks* -in :func:`!os.lchmod` is ``False`` on Windows. diff --git a/Misc/NEWS.d/next/Library/2023-12-15-09-51-41.gh-issue-113175.RHsNwE.rst b/Misc/NEWS.d/next/Library/2023-12-15-09-51-41.gh-issue-113175.RHsNwE.rst deleted file mode 100644 index 1b43803d1a7aa4..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-15-09-51-41.gh-issue-113175.RHsNwE.rst +++ /dev/null @@ -1,5 +0,0 @@ -Sync with importlib_metadata 7.0, including improved type annotations, fixed -issue with symlinked packages in ``package_distributions``, added -``EntryPoints.__repr__``, introduced the ``diagnose`` script, added -``Distribution.origin`` property, and removed deprecated ``EntryPoint`` -access by numeric index (tuple behavior). diff --git a/Misc/NEWS.d/next/Library/2023-12-15-12-35-28.gh-issue-61648.G-4pz0.rst b/Misc/NEWS.d/next/Library/2023-12-15-12-35-28.gh-issue-61648.G-4pz0.rst deleted file mode 100644 index c841e5c7f7683a..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-15-12-35-28.gh-issue-61648.G-4pz0.rst +++ /dev/null @@ -1 +0,0 @@ -Detect line numbers of properties in doctests. diff --git a/Misc/NEWS.d/next/Library/2023-12-15-18-10-26.gh-issue-113202.xv_Ww8.rst b/Misc/NEWS.d/next/Library/2023-12-15-18-10-26.gh-issue-113202.xv_Ww8.rst deleted file mode 100644 index 44f26aef60a33a..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-15-18-10-26.gh-issue-113202.xv_Ww8.rst +++ /dev/null @@ -1 +0,0 @@ -Add a ``strict`` option to ``batched()`` in the ``itertools`` module. diff --git a/Misc/NEWS.d/next/Library/2023-12-15-18-13-59.gh-issue-113119.al-569.rst b/Misc/NEWS.d/next/Library/2023-12-15-18-13-59.gh-issue-113119.al-569.rst deleted file mode 100644 index 94087b00515e97..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-15-18-13-59.gh-issue-113119.al-569.rst +++ /dev/null @@ -1,2 +0,0 @@ -:func:`os.posix_spawn` now accepts ``env=None``, which makes the newly spawned -process use the current process environment. Patch by Jakub Kulik. diff --git a/Misc/NEWS.d/next/Library/2023-12-15-20-29-49.gh-issue-113188.AvoraB.rst b/Misc/NEWS.d/next/Library/2023-12-15-20-29-49.gh-issue-113188.AvoraB.rst deleted file mode 100644 index 17c69572d9f2b1..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-15-20-29-49.gh-issue-113188.AvoraB.rst +++ /dev/null @@ -1,6 +0,0 @@ -Fix :func:`shutil.copymode` and :func:`shutil.copystat` on Windows. -Previously they worked differenly if *dst* is a symbolic link: -they modified the permission bits of *dst* itself -rather than the file it points to if *follow_symlinks* is true or *src* is -not a symbolic link, and did not modify the permission bits if -*follow_symlinks* is false and *src* is a symbolic link. diff --git a/Misc/NEWS.d/next/Library/2023-12-15-21-33-42.gh-issue-113191.Il155b.rst b/Misc/NEWS.d/next/Library/2023-12-15-21-33-42.gh-issue-113191.Il155b.rst deleted file mode 100644 index 13fe4ff5f6a8bd..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-15-21-33-42.gh-issue-113191.Il155b.rst +++ /dev/null @@ -1,2 +0,0 @@ -Add support of :func:`os.fchmod` and a file descriptor in :func:`os.chmod` -on Windows. diff --git a/Misc/NEWS.d/next/Library/2023-12-16-01-10-47.gh-issue-113199.oDjnjL.rst b/Misc/NEWS.d/next/Library/2023-12-16-01-10-47.gh-issue-113199.oDjnjL.rst deleted file mode 100644 index d8e0b1731d1e3b..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-16-01-10-47.gh-issue-113199.oDjnjL.rst +++ /dev/null @@ -1,3 +0,0 @@ -Make ``http.client.HTTPResponse.read1`` and -``http.client.HTTPResponse.readline`` close IO after reading all data when -content length is known. Patch by Illia Volochii. diff --git a/Misc/NEWS.d/next/Library/2023-12-16-10-58-34.gh-issue-113117.0zF7bH.rst b/Misc/NEWS.d/next/Library/2023-12-16-10-58-34.gh-issue-113117.0zF7bH.rst deleted file mode 100644 index 718226a0021efe..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-16-10-58-34.gh-issue-113117.0zF7bH.rst +++ /dev/null @@ -1,4 +0,0 @@ -The :mod:`subprocess` module can now use the :func:`os.posix_spawn` function -with ``close_fds=True`` on platforms where -``posix_spawn_file_actions_addclosefrom_np`` is available. -Patch by Jakub Kulik. diff --git a/Misc/NEWS.d/next/Library/2023-12-16-23-56-42.gh-issue-113149.7LWgTS.rst b/Misc/NEWS.d/next/Library/2023-12-16-23-56-42.gh-issue-113149.7LWgTS.rst deleted file mode 100644 index 0faa67fefabeca..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-16-23-56-42.gh-issue-113149.7LWgTS.rst +++ /dev/null @@ -1,2 +0,0 @@ -Improve error message when a JSON array or object contains a trailing comma. -Patch by Carson Radtke. diff --git a/Misc/NEWS.d/next/Library/2023-12-17-04-43-57.gh-issue-113225.dhxhiZ.rst b/Misc/NEWS.d/next/Library/2023-12-17-04-43-57.gh-issue-113225.dhxhiZ.rst deleted file mode 100644 index 7160cca2e11366..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-17-04-43-57.gh-issue-113225.dhxhiZ.rst +++ /dev/null @@ -1 +0,0 @@ -Speed up :meth:`pathlib.Path.glob` by using :attr:`os.DirEntry.path` where possible. diff --git a/Misc/NEWS.d/next/Library/2023-12-17-13-56-30.gh-issue-87264.RgfHCv.rst b/Misc/NEWS.d/next/Library/2023-12-17-13-56-30.gh-issue-87264.RgfHCv.rst deleted file mode 100644 index fa987d4f0af9ba..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-17-13-56-30.gh-issue-87264.RgfHCv.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed tarfile list() method to show file type. diff --git a/Misc/NEWS.d/next/Library/2023-12-18-09-47-54.gh-issue-113246.em930H.rst b/Misc/NEWS.d/next/Library/2023-12-18-09-47-54.gh-issue-113246.em930H.rst deleted file mode 100644 index 167bb37c0e0643..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-18-09-47-54.gh-issue-113246.em930H.rst +++ /dev/null @@ -1 +0,0 @@ -Update bundled pip to 23.3.2. diff --git a/Misc/NEWS.d/next/Library/2023-12-20-21-18-51.gh-issue-113214.JcV9Mn.rst b/Misc/NEWS.d/next/Library/2023-12-20-21-18-51.gh-issue-113214.JcV9Mn.rst deleted file mode 100644 index 6db74cda166e92..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-20-21-18-51.gh-issue-113214.JcV9Mn.rst +++ /dev/null @@ -1 +0,0 @@ -Fix an ``AttributeError`` during asyncio SSL protocol aborts in SSL-over-SSL scenarios. diff --git a/Misc/NEWS.d/next/Library/2023-12-21-23-47-42.gh-issue-53502.dercJI.rst b/Misc/NEWS.d/next/Library/2023-12-21-23-47-42.gh-issue-53502.dercJI.rst deleted file mode 100644 index aa7274161d4166..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-21-23-47-42.gh-issue-53502.dercJI.rst +++ /dev/null @@ -1,2 +0,0 @@ -Add a new option ``aware_datetime`` in :mod:`plistlib` to loads or dumps -aware datetime. diff --git a/Misc/NEWS.d/next/Library/2023-12-22-20-49-52.gh-issue-113407.C_O13_.rst b/Misc/NEWS.d/next/Library/2023-12-22-20-49-52.gh-issue-113407.C_O13_.rst deleted file mode 100644 index da00977f03cefd..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-22-20-49-52.gh-issue-113407.C_O13_.rst +++ /dev/null @@ -1 +0,0 @@ -Fix import of :mod:`unittest.mock` when CPython is built without docstrings. diff --git a/Misc/NEWS.d/next/Library/2023-12-23-13-10-42.gh-issue-111784.Nb4L1j.rst b/Misc/NEWS.d/next/Library/2023-12-23-13-10-42.gh-issue-111784.Nb4L1j.rst deleted file mode 100644 index 51ac0752cfae84..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-23-13-10-42.gh-issue-111784.Nb4L1j.rst +++ /dev/null @@ -1,5 +0,0 @@ -Fix segfaults in the ``_elementtree`` module. -Fix first segfault during deallocation of ``_elementtree.XMLParser`` instances by keeping strong reference -to ``pyexpat`` module in module state for capsule lifetime. -Fix second segfault which happens in the same deallocation process by keeping strong reference -to ``_elementtree`` module in ``XMLParser`` structure for ``_elementtree`` module lifetime. diff --git a/Misc/NEWS.d/next/Library/2023-12-23-16-10-07.gh-issue-113421.w7vs08.rst b/Misc/NEWS.d/next/Library/2023-12-23-16-10-07.gh-issue-113421.w7vs08.rst deleted file mode 100644 index 2082fe6391d261..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-23-16-10-07.gh-issue-113421.w7vs08.rst +++ /dev/null @@ -1 +0,0 @@ -Fix multiprocessing logger for ``%(filename)s``. diff --git a/Misc/NEWS.d/next/Library/2023-12-23-16-51-17.gh-issue-113028.3Jmdoj.rst b/Misc/NEWS.d/next/Library/2023-12-23-16-51-17.gh-issue-113028.3Jmdoj.rst deleted file mode 100644 index 5f66d6a00b4d3d..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-23-16-51-17.gh-issue-113028.3Jmdoj.rst +++ /dev/null @@ -1,6 +0,0 @@ -When a second reference to a string appears in the input to :mod:`pickle`, -and the Python implementation is in use, -we are guaranteed that a single copy gets pickled -and a single object is shared when reloaded. -Previously, in protocol 0, when a string contained certain characters -(e.g. newline) it resulted in duplicate objects. diff --git a/Misc/NEWS.d/next/Library/2023-12-28-14-36-20.gh-issue-113543.2iWkOR.rst b/Misc/NEWS.d/next/Library/2023-12-28-14-36-20.gh-issue-113543.2iWkOR.rst deleted file mode 100644 index 5bf557bedd0204..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-28-14-36-20.gh-issue-113543.2iWkOR.rst +++ /dev/null @@ -1,2 +0,0 @@ -Make sure that ``webbrowser.MacOSXOSAScript`` sends ``webbrowser.open`` -audit event. diff --git a/Misc/NEWS.d/next/Library/2023-12-29-17-57-45.gh-issue-113569.qcRCEI.rst b/Misc/NEWS.d/next/Library/2023-12-29-17-57-45.gh-issue-113569.qcRCEI.rst deleted file mode 100644 index 9b63fc940991fe..00000000000000 --- a/Misc/NEWS.d/next/Library/2023-12-29-17-57-45.gh-issue-113569.qcRCEI.rst +++ /dev/null @@ -1,2 +0,0 @@ -Indicate if there were no actual calls in unittest -:meth:`~unittest.mock.Mock.assert_has_calls` failure. diff --git a/Misc/NEWS.d/next/Library/2024-01-03-14-19-26.gh-issue-113538.ahuBCo.rst b/Misc/NEWS.d/next/Library/2024-01-03-14-19-26.gh-issue-113538.ahuBCo.rst deleted file mode 100644 index a52076501b7bf4..00000000000000 --- a/Misc/NEWS.d/next/Library/2024-01-03-14-19-26.gh-issue-113538.ahuBCo.rst +++ /dev/null @@ -1,5 +0,0 @@ -In :meth:`asyncio.StreamReaderProtocol.connection_made`, there is callback -that logs an error if the task wrapping the "connected callback" fails. This -callback would itself fail if the task was cancelled. Prevent this by -checking whether the task was cancelled first. If so, close the transport -but don't log an error. diff --git a/Misc/NEWS.d/next/Library/2024-01-12-17-32-36.gh-issue-79634.uTSTRI.rst b/Misc/NEWS.d/next/Library/2024-01-12-17-32-36.gh-issue-79634.uTSTRI.rst new file mode 100644 index 00000000000000..ba19b5209e648e --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-01-12-17-32-36.gh-issue-79634.uTSTRI.rst @@ -0,0 +1,2 @@ +Accept :term:`path-like objects ` as patterns in +:meth:`pathlib.Path.glob` and :meth:`~pathlib.Path.rglob`. diff --git a/Misc/NEWS.d/next/Library/2024-01-15-19-54-41.gh-issue-114087.Xic5vY.rst b/Misc/NEWS.d/next/Library/2024-01-15-19-54-41.gh-issue-114087.Xic5vY.rst new file mode 100644 index 00000000000000..68b27a7b0c9f6c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-01-15-19-54-41.gh-issue-114087.Xic5vY.rst @@ -0,0 +1 @@ +Speed up ``dataclasses.asdict`` up to 1.35x. diff --git a/Misc/NEWS.d/next/Library/2024-01-16-15-59-06.gh-issue-114149.LJ8IPm.rst b/Misc/NEWS.d/next/Library/2024-01-16-15-59-06.gh-issue-114149.LJ8IPm.rst new file mode 100644 index 00000000000000..1403d78d0d4905 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-01-16-15-59-06.gh-issue-114149.LJ8IPm.rst @@ -0,0 +1 @@ +Enum: correctly handle tuple subclasses in custom ``__new__``. diff --git a/Misc/NEWS.d/next/Library/2024-01-17-18-53-51.gh-issue-104522.3NyDf4.rst b/Misc/NEWS.d/next/Library/2024-01-17-18-53-51.gh-issue-104522.3NyDf4.rst new file mode 100644 index 00000000000000..ca980945ea12d3 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-01-17-18-53-51.gh-issue-104522.3NyDf4.rst @@ -0,0 +1,3 @@ +:exc:`OSError` raised when run a subprocess now only has *filename* +attribute set to *cwd* if the error was caused by a failed attempt to change +the current directory. diff --git a/Misc/NEWS.d/next/Library/2024-01-18-10-07-52.gh-issue-114198.lK4Iif.rst b/Misc/NEWS.d/next/Library/2024-01-18-10-07-52.gh-issue-114198.lK4Iif.rst new file mode 100644 index 00000000000000..fa047e288f807e --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-01-18-10-07-52.gh-issue-114198.lK4Iif.rst @@ -0,0 +1,2 @@ +The signature for the ``__replace__`` method on :mod:`dataclasses` now has +the first argument named ``self``, rather than ``obj``. diff --git a/Misc/NEWS.d/next/Security/2023-12-06-14-06-59.gh-issue-112302.3bl20f.rst b/Misc/NEWS.d/next/Security/2023-12-06-14-06-59.gh-issue-112302.3bl20f.rst deleted file mode 100644 index 65e4dc3762d3c0..00000000000000 --- a/Misc/NEWS.d/next/Security/2023-12-06-14-06-59.gh-issue-112302.3bl20f.rst +++ /dev/null @@ -1,2 +0,0 @@ -Created a Software Bill-of-Materials document and tooling for tracking -dependencies. diff --git a/Misc/NEWS.d/next/Tests/2020-05-16-18-00-21.bpo-40648.p2uPqy.rst b/Misc/NEWS.d/next/Tests/2020-05-16-18-00-21.bpo-40648.p2uPqy.rst deleted file mode 100644 index 8fbe42d263feb9..00000000000000 --- a/Misc/NEWS.d/next/Tests/2020-05-16-18-00-21.bpo-40648.p2uPqy.rst +++ /dev/null @@ -1 +0,0 @@ -Test modes that file can get with chmod() on Windows. diff --git a/Misc/NEWS.d/next/Tests/2023-09-05-20-46-35.gh-issue-108927.TpwWav.rst b/Misc/NEWS.d/next/Tests/2023-09-05-20-46-35.gh-issue-108927.TpwWav.rst deleted file mode 100644 index b1a78370afedb2..00000000000000 --- a/Misc/NEWS.d/next/Tests/2023-09-05-20-46-35.gh-issue-108927.TpwWav.rst +++ /dev/null @@ -1,4 +0,0 @@ -Fixed order dependence in running tests in the same process -when a test that has submodules (e.g. test_importlib) follows a test that -imports its submodule (e.g. test_importlib.util) and precedes a test -(e.g. test_unittest or test_compileall) that uses that submodule. diff --git a/Misc/NEWS.d/next/Tests/2023-12-04-15-56-11.gh-issue-112334.FFc9Ti.rst b/Misc/NEWS.d/next/Tests/2023-12-04-15-56-11.gh-issue-112334.FFc9Ti.rst deleted file mode 100644 index aeaad6e5055522..00000000000000 --- a/Misc/NEWS.d/next/Tests/2023-12-04-15-56-11.gh-issue-112334.FFc9Ti.rst +++ /dev/null @@ -1,2 +0,0 @@ -Adds a regression test to verify that ``vfork()`` is used when expected by -:mod:`subprocess` on vfork enabled POSIX systems (Linux). diff --git a/Misc/NEWS.d/next/Tests/2023-12-05-19-50-03.gh-issue-112769.kdLJmS.rst b/Misc/NEWS.d/next/Tests/2023-12-05-19-50-03.gh-issue-112769.kdLJmS.rst deleted file mode 100644 index 1bbbb26fc322fa..00000000000000 --- a/Misc/NEWS.d/next/Tests/2023-12-05-19-50-03.gh-issue-112769.kdLJmS.rst +++ /dev/null @@ -1,3 +0,0 @@ -The tests now correctly compare zlib version when -:const:`zlib.ZLIB_RUNTIME_VERSION` contains non-integer suffixes. For -example zlib-ng defines the version as ``1.3.0.zlib-ng``. diff --git a/Misc/NEWS.d/next/Tests/2023-12-09-21-27-46.gh-issue-109980.y--500.rst b/Misc/NEWS.d/next/Tests/2023-12-09-21-27-46.gh-issue-109980.y--500.rst deleted file mode 100644 index c475a33919db98..00000000000000 --- a/Misc/NEWS.d/next/Tests/2023-12-09-21-27-46.gh-issue-109980.y--500.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix ``test_tarfile_vs_tar`` in ``test_shutil`` for macOS, where system tar -can include more information in the archive than :mod:`shutil.make_archive`. diff --git a/Misc/NEWS.d/next/Tests/2024-01-01-14-40-02.gh-issue-113633.VOY5ai.rst b/Misc/NEWS.d/next/Tests/2024-01-01-14-40-02.gh-issue-113633.VOY5ai.rst deleted file mode 100644 index 150c0d91852cdf..00000000000000 --- a/Misc/NEWS.d/next/Tests/2024-01-01-14-40-02.gh-issue-113633.VOY5ai.rst +++ /dev/null @@ -1 +0,0 @@ -Use module state for the _testcapi extension module. diff --git a/Misc/NEWS.d/next/Windows/2023-08-08-01-42-14.gh-issue-73427.WOpiNt.rst b/Misc/NEWS.d/next/Windows/2023-08-08-01-42-14.gh-issue-73427.WOpiNt.rst deleted file mode 100644 index 830c4c54838e80..00000000000000 --- a/Misc/NEWS.d/next/Windows/2023-08-08-01-42-14.gh-issue-73427.WOpiNt.rst +++ /dev/null @@ -1,2 +0,0 @@ -Deprecate :func:`sys._enablelegacywindowsfsencoding`. Use -:envvar:`PYTHONLEGACYWINDOWSFSENCODING` instead. Patch by Inada Naoki. diff --git a/Misc/NEWS.d/next/Windows/2023-12-03-19-22-37.gh-issue-112278.FiloCE.rst b/Misc/NEWS.d/next/Windows/2023-12-03-19-22-37.gh-issue-112278.FiloCE.rst deleted file mode 100644 index 0350d105d97375..00000000000000 --- a/Misc/NEWS.d/next/Windows/2023-12-03-19-22-37.gh-issue-112278.FiloCE.rst +++ /dev/null @@ -1,2 +0,0 @@ -Reduce the time cost for some functions in :mod:`platform` on Windows if -current user has no permission to the WMI. diff --git a/Misc/NEWS.d/next/Windows/2023-12-05-22-56-30.gh-issue-111650.xlWmvM.rst b/Misc/NEWS.d/next/Windows/2023-12-05-22-56-30.gh-issue-111650.xlWmvM.rst deleted file mode 100644 index 5a3493356e30be..00000000000000 --- a/Misc/NEWS.d/next/Windows/2023-12-05-22-56-30.gh-issue-111650.xlWmvM.rst +++ /dev/null @@ -1,3 +0,0 @@ -Ensures the ``Py_GIL_DISABLED`` preprocessor variable is defined in -:file:`pyconfig.h` so that extension modules written in C are able to use -it. diff --git a/Misc/NEWS.d/next/Windows/2023-12-11-20-23-04.gh-issue-71383.9pZh6t.rst b/Misc/NEWS.d/next/Windows/2023-12-11-20-23-04.gh-issue-71383.9pZh6t.rst deleted file mode 100644 index cf2883357a962a..00000000000000 --- a/Misc/NEWS.d/next/Windows/2023-12-11-20-23-04.gh-issue-71383.9pZh6t.rst +++ /dev/null @@ -1,2 +0,0 @@ -Update Tcl/Tk in Windows installer to 8.6.13 with a patch to suppress -incorrect ThemeChanged warnings. diff --git a/Misc/NEWS.d/next/Windows/2023-12-12-20-58-09.gh-issue-86179.YYSk_6.rst b/Misc/NEWS.d/next/Windows/2023-12-12-20-58-09.gh-issue-86179.YYSk_6.rst deleted file mode 100644 index c1d96792bdae0b..00000000000000 --- a/Misc/NEWS.d/next/Windows/2023-12-12-20-58-09.gh-issue-86179.YYSk_6.rst +++ /dev/null @@ -1 +0,0 @@ -Fixes path calculations when launching Python on Windows through a symlink. diff --git a/Misc/NEWS.d/next/Windows/2023-12-14-19-00-29.gh-issue-113009.6LNdjz.rst b/Misc/NEWS.d/next/Windows/2023-12-14-19-00-29.gh-issue-113009.6LNdjz.rst deleted file mode 100644 index 6fd7f7f9afdfa2..00000000000000 --- a/Misc/NEWS.d/next/Windows/2023-12-14-19-00-29.gh-issue-113009.6LNdjz.rst +++ /dev/null @@ -1,5 +0,0 @@ -:mod:`multiprocessing`: On Windows, fix a race condition in -``Process.terminate()``: no longer set the ``returncode`` attribute to -always call ``WaitForSingleObject()`` in ``Process.wait()``. Previously, -sometimes the process was still running after ``TerminateProcess()`` even if -``GetExitCodeProcess()`` is not ``STILL_ACTIVE``. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Windows/2023-12-19-10-56-46.gh-issue-111973.A9Wtsb.rst b/Misc/NEWS.d/next/Windows/2023-12-19-10-56-46.gh-issue-111973.A9Wtsb.rst deleted file mode 100644 index 0cefa4e44093f0..00000000000000 --- a/Misc/NEWS.d/next/Windows/2023-12-19-10-56-46.gh-issue-111973.A9Wtsb.rst +++ /dev/null @@ -1 +0,0 @@ -Update Windows installer to use SQLite 3.44.2. diff --git a/Misc/NEWS.d/next/Windows/2023-12-19-22-32-28.gh-issue-112984.F7kFMl.rst b/Misc/NEWS.d/next/Windows/2023-12-19-22-32-28.gh-issue-112984.F7kFMl.rst new file mode 100644 index 00000000000000..429cd5bc923e09 --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2023-12-19-22-32-28.gh-issue-112984.F7kFMl.rst @@ -0,0 +1 @@ +Adds free-threaded binaries to Windows installer as an optional component. diff --git a/Misc/NEWS.d/next/macOS/2023-12-06-12-11-13.gh-issue-109981.mOHg10.rst b/Misc/NEWS.d/next/macOS/2023-12-06-12-11-13.gh-issue-109981.mOHg10.rst deleted file mode 100644 index f86ab2c37ee6ec..00000000000000 --- a/Misc/NEWS.d/next/macOS/2023-12-06-12-11-13.gh-issue-109981.mOHg10.rst +++ /dev/null @@ -1,3 +0,0 @@ -Use ``/dev/fd`` on macOS to determine the number of open files in -``test.support.os_helper.fd_count`` to avoid a crash with "guarded" file -descriptors when probing for open files. diff --git a/Misc/NEWS.d/next/macOS/2023-12-07-14-19-46.gh-issue-110820.DIxb_F.rst b/Misc/NEWS.d/next/macOS/2023-12-07-14-19-46.gh-issue-110820.DIxb_F.rst deleted file mode 100644 index 0badace7928745..00000000000000 --- a/Misc/NEWS.d/next/macOS/2023-12-07-14-19-46.gh-issue-110820.DIxb_F.rst +++ /dev/null @@ -1,3 +0,0 @@ -Make sure the preprocessor definitions for ``ALIGNOF_MAX_ALIGN_T``, -``SIZEOF_LONG_DOUBLE`` and ``HAVE_GCC_ASM_FOR_X64`` are correct for -Universal 2 builds on macOS. diff --git a/Misc/NEWS.d/next/macOS/2023-12-07-15-53-16.gh-issue-110017.UMYzMR.rst b/Misc/NEWS.d/next/macOS/2023-12-07-15-53-16.gh-issue-110017.UMYzMR.rst deleted file mode 100644 index eab1746f1ae3f7..00000000000000 --- a/Misc/NEWS.d/next/macOS/2023-12-07-15-53-16.gh-issue-110017.UMYzMR.rst +++ /dev/null @@ -1,2 +0,0 @@ -Disable a signal handling stress test on macOS due to a bug in macOS -(FB13453490). diff --git a/Misc/NEWS.d/next/macOS/2023-12-10-20-30-06.gh-issue-102362.y8svbF.rst b/Misc/NEWS.d/next/macOS/2023-12-10-20-30-06.gh-issue-102362.y8svbF.rst deleted file mode 100644 index 55c5ac01434660..00000000000000 --- a/Misc/NEWS.d/next/macOS/2023-12-10-20-30-06.gh-issue-102362.y8svbF.rst +++ /dev/null @@ -1,3 +0,0 @@ -Make sure the result of :func:`sysconfig.get_plaform` includes at least a -major and minor versions, even if ``MACOSX_DEPLOYMENT_TARGET`` is set to -only a major version during build to match the format expected by pip. diff --git a/Misc/NEWS.d/next/macOS/2023-12-16-11-45-32.gh-issue-108269.wVgCHF.rst b/Misc/NEWS.d/next/macOS/2023-12-16-11-45-32.gh-issue-108269.wVgCHF.rst deleted file mode 100644 index 85598454abcaad..00000000000000 --- a/Misc/NEWS.d/next/macOS/2023-12-16-11-45-32.gh-issue-108269.wVgCHF.rst +++ /dev/null @@ -1,4 +0,0 @@ -Set ``CFBundleAllowMixedLocalizations`` to true in the Info.plist for the -framework, embedded Python.app and IDLE.app with framework installs on -macOS. This allows applications to pick up the user's preferred locale when -that's different from english. diff --git a/Misc/NEWS.d/next/macOS/2023-12-19-10-50-08.gh-issue-111973.HMHJfy.rst b/Misc/NEWS.d/next/macOS/2023-12-19-10-50-08.gh-issue-111973.HMHJfy.rst deleted file mode 100644 index 0cf3abf3b71890..00000000000000 --- a/Misc/NEWS.d/next/macOS/2023-12-19-10-50-08.gh-issue-111973.HMHJfy.rst +++ /dev/null @@ -1 +0,0 @@ -Update macOS installer to use SQLite 3.44.2. diff --git a/Misc/NEWS.d/next/macOS/2023-12-21-09-41-42.gh-issue-87277.IF6EZZ.rst b/Misc/NEWS.d/next/macOS/2023-12-21-09-41-42.gh-issue-87277.IF6EZZ.rst deleted file mode 100644 index 4ae55c0293198a..00000000000000 --- a/Misc/NEWS.d/next/macOS/2023-12-21-09-41-42.gh-issue-87277.IF6EZZ.rst +++ /dev/null @@ -1,3 +0,0 @@ -webbrowser: Don't look for X11 browsers on macOS. Those are generally not -used and probing for them can result in starting XQuartz even if it isn't -used otherwise. diff --git a/Misc/NEWS.d/next/macOS/2023-12-21-10-20-41.gh-issue-65701.Q2hNbN.rst b/Misc/NEWS.d/next/macOS/2023-12-21-10-20-41.gh-issue-65701.Q2hNbN.rst deleted file mode 100644 index 870b84a4d1af80..00000000000000 --- a/Misc/NEWS.d/next/macOS/2023-12-21-10-20-41.gh-issue-65701.Q2hNbN.rst +++ /dev/null @@ -1,2 +0,0 @@ -The :program:`freeze` tool doesn't work with framework builds of Python. -Document this and bail out early when running the tool with such a build. diff --git a/Misc/NEWS.d/next/macOS/2023-12-21-11-53-47.gh-issue-74573.MA6Vys.rst b/Misc/NEWS.d/next/macOS/2023-12-21-11-53-47.gh-issue-74573.MA6Vys.rst deleted file mode 100644 index 96dcd4765d95da..00000000000000 --- a/Misc/NEWS.d/next/macOS/2023-12-21-11-53-47.gh-issue-74573.MA6Vys.rst +++ /dev/null @@ -1,3 +0,0 @@ -Document that :mod:`dbm.ndbm` can silently corrupt DBM files on updates when -exceeding undocumented platform limits, and can crash (segmentation fault) -when reading such a corrupted file. (FB8919203) diff --git a/Misc/NEWS.d/next/macOS/2023-12-23-22-41-07.gh-issue-110459.NaMBJy.rst b/Misc/NEWS.d/next/macOS/2023-12-23-22-41-07.gh-issue-110459.NaMBJy.rst deleted file mode 100644 index 44ffd857785f0d..00000000000000 --- a/Misc/NEWS.d/next/macOS/2023-12-23-22-41-07.gh-issue-110459.NaMBJy.rst +++ /dev/null @@ -1,2 +0,0 @@ -Running ``configure ... --with-openssl-rpath=X/Y/Z`` no longer fails to detect -OpenSSL on macOS. diff --git a/Misc/NEWS.d/next/macOS/2023-12-28-12-18-39.gh-issue-113536.0ythg7.rst b/Misc/NEWS.d/next/macOS/2023-12-28-12-18-39.gh-issue-113536.0ythg7.rst deleted file mode 100644 index 828b872d283627..00000000000000 --- a/Misc/NEWS.d/next/macOS/2023-12-28-12-18-39.gh-issue-113536.0ythg7.rst +++ /dev/null @@ -1 +0,0 @@ -:func:`os.waitid` is now available on macOS diff --git a/Misc/python.man b/Misc/python.man index 14cbd85c60bfa9..0f5dfa2e2289f7 100644 --- a/Misc/python.man +++ b/Misc/python.man @@ -604,6 +604,9 @@ can be set to the callable of your debugger of choice. .IP PYTHON_COLORS If this variable is set to 1, the interpreter will colorize various kinds of output. Setting it to 0 deactivates this behavior. +.IP PYTHON_HISTORY +This environment variable can be used to set the location of a history file +(on Unix, it is \fI~/.python_history\fP by default). .SS Debug-mode variables Setting these variables only has an effect in a debug build of Python, that is, if Python was configured with the diff --git a/Modules/Setup.bootstrap.in b/Modules/Setup.bootstrap.in index cd12c1bd0df8f9..aa4e60e272653b 100644 --- a/Modules/Setup.bootstrap.in +++ b/Modules/Setup.bootstrap.in @@ -11,6 +11,7 @@ faulthandler faulthandler.c posix posixmodule.c _signal signalmodule.c _tracemalloc _tracemalloc.c +_suggestions _suggestions.c # modules used by importlib, deepfreeze, freeze, runpy, and sysconfig _codecs _codecsmodule.c diff --git a/Modules/_asynciomodule.c b/Modules/_asynciomodule.c index 3a11cdc926f138..c1aa849ecf1aad 100644 --- a/Modules/_asynciomodule.c +++ b/Modules/_asynciomodule.c @@ -597,12 +597,27 @@ future_set_exception(asyncio_state *state, FutureObj *fut, PyObject *exc) PyErr_SetString(PyExc_TypeError, "invalid exception object"); return NULL; } - if (Py_IS_TYPE(exc_val, (PyTypeObject *)PyExc_StopIteration)) { + if (PyErr_GivenExceptionMatches(exc_val, PyExc_StopIteration)) { + const char *msg = "StopIteration interacts badly with " + "generators and cannot be raised into a " + "Future"; + PyObject *message = PyUnicode_FromString(msg); + if (message == NULL) { + Py_DECREF(exc_val); + return NULL; + } + PyObject *err = PyObject_CallOneArg(PyExc_RuntimeError, message); + Py_DECREF(message); + if (err == NULL) { + Py_DECREF(exc_val); + return NULL; + } + assert(PyExceptionInstance_Check(err)); + + PyException_SetCause(err, Py_NewRef(exc_val)); + PyException_SetContext(err, Py_NewRef(exc_val)); Py_DECREF(exc_val); - PyErr_SetString(PyExc_TypeError, - "StopIteration interacts badly with generators " - "and cannot be raised into a Future"); - return NULL; + exc_val = err; } assert(!fut->fut_exception); @@ -2754,7 +2769,6 @@ gen_status_from_result(PyObject **result) static PyObject * task_step_impl(asyncio_state *state, TaskObj *task, PyObject *exc) { - int res; int clear_exc = 0; PyObject *result = NULL; PyObject *coro; @@ -2771,20 +2785,7 @@ task_step_impl(asyncio_state *state, TaskObj *task, PyObject *exc) if (task->task_must_cancel) { assert(exc != Py_None); - if (exc) { - /* Check if exc is a CancelledError */ - res = PyObject_IsInstance(exc, state->asyncio_CancelledError); - if (res == -1) { - /* An error occurred, abort */ - goto fail; - } - if (res == 0) { - /* exc is not CancelledError; reset it to NULL */ - exc = NULL; - } - } - - if (!exc) { + if (!exc || !PyErr_GivenExceptionMatches(exc, state->asyncio_CancelledError)) { /* exc was not a CancelledError */ exc = create_cancelled_error(state, (FutureObj*)task); diff --git a/Modules/_csv.c b/Modules/_csv.c index ae6b6457ffad9a..8d941563025580 100644 --- a/Modules/_csv.c +++ b/Modules/_csv.c @@ -8,8 +8,6 @@ module instead. */ -#define MODULE_VERSION "1.0" - // clinic/_csv.c.h uses internal pycore_modsupport.h API #ifndef Py_BUILD_CORE_BUILTIN # define Py_BUILD_CORE_MODULE 1 @@ -837,7 +835,8 @@ parse_process_char(ReaderObj *self, _csvstate *module_state, Py_UCS4 c) self->state = START_RECORD; else { PyErr_Format(module_state->error_obj, - "new-line character seen in unquoted field - do you need to open the file in universal-newline mode?"); + "new-line character seen in unquoted field - " + "do you need to open the file with newline=''?"); return -1; } break; @@ -1606,68 +1605,7 @@ PyType_Spec error_spec = { * MODULE */ -PyDoc_STRVAR(csv_module_doc, -"CSV parsing and writing.\n" -"\n" -"This module provides classes that assist in the reading and writing\n" -"of Comma Separated Value (CSV) files, and implements the interface\n" -"described by PEP 305. Although many CSV files are simple to parse,\n" -"the format is not formally defined by a stable specification and\n" -"is subtle enough that parsing lines of a CSV file with something\n" -"like line.split(\",\") is bound to fail. The module supports three\n" -"basic APIs: reading, writing, and registration of dialects.\n" -"\n" -"\n" -"DIALECT REGISTRATION:\n" -"\n" -"Readers and writers support a dialect argument, which is a convenient\n" -"handle on a group of settings. When the dialect argument is a string,\n" -"it identifies one of the dialects previously registered with the module.\n" -"If it is a class or instance, the attributes of the argument are used as\n" -"the settings for the reader or writer:\n" -"\n" -" class excel:\n" -" delimiter = ','\n" -" quotechar = '\"'\n" -" escapechar = None\n" -" doublequote = True\n" -" skipinitialspace = False\n" -" lineterminator = '\\r\\n'\n" -" quoting = QUOTE_MINIMAL\n" -"\n" -"SETTINGS:\n" -"\n" -" * quotechar - specifies a one-character string to use as the\n" -" quoting character. It defaults to '\"'.\n" -" * delimiter - specifies a one-character string to use as the\n" -" field separator. It defaults to ','.\n" -" * skipinitialspace - specifies how to interpret spaces which\n" -" immediately follow a delimiter. It defaults to False, which\n" -" means that spaces immediately following a delimiter is part\n" -" of the following field.\n" -" * lineterminator - specifies the character sequence which should\n" -" terminate rows.\n" -" * quoting - controls when quotes should be generated by the writer.\n" -" It can take on any of the following module constants:\n" -"\n" -" csv.QUOTE_MINIMAL means only when required, for example, when a\n" -" field contains either the quotechar or the delimiter\n" -" csv.QUOTE_ALL means that quotes are always placed around fields.\n" -" csv.QUOTE_NONNUMERIC means that quotes are always placed around\n" -" fields which do not parse as integers or floating point\n" -" numbers.\n" -" csv.QUOTE_STRINGS means that quotes are always placed around\n" -" fields which are strings. Note that the Python value None\n" -" is not a string.\n" -" csv.QUOTE_NOTNULL means that quotes are only placed around fields\n" -" that are not the Python value None.\n" -" csv.QUOTE_NONE means that quotes are never placed around fields.\n" -" * escapechar - specifies a one-character string used to escape\n" -" the delimiter when quoting is set to QUOTE_NONE.\n" -" * doublequote - controls the handling of quotes inside fields. When\n" -" True, two consecutive quotes are interpreted as one during read,\n" -" and when writing, each quote character embedded in the data is\n" -" written as two quotes\n"); +PyDoc_STRVAR(csv_module_doc, "CSV parsing and writing.\n"); PyDoc_STRVAR(csv_reader_doc, " csv_reader = reader(iterable [, dialect='excel']\n" @@ -1740,12 +1678,6 @@ csv_exec(PyObject *module) { return -1; } - /* Add version to the module. */ - if (PyModule_AddStringConstant(module, "__version__", - MODULE_VERSION) == -1) { - return -1; - } - /* Set the field limit */ module_state->field_limit = 128 * 1024; diff --git a/Modules/_ctypes/_ctypes.c b/Modules/_ctypes/_ctypes.c index fc16b9176fd1c0..b51a03b5497fed 100644 --- a/Modules/_ctypes/_ctypes.c +++ b/Modules/_ctypes/_ctypes.c @@ -876,20 +876,6 @@ CDataType_repeat(PyObject *self, Py_ssize_t length) return PyCArrayType_from_ctype(self, length); } -static PySequenceMethods CDataType_as_sequence = { - 0, /* inquiry sq_length; */ - 0, /* binaryfunc sq_concat; */ - CDataType_repeat, /* intargfunc sq_repeat; */ - 0, /* intargfunc sq_item; */ - 0, /* intintargfunc sq_slice; */ - 0, /* intobjargproc sq_ass_item; */ - 0, /* intintobjargproc sq_ass_slice; */ - 0, /* objobjproc sq_contains; */ - - 0, /* binaryfunc sq_inplace_concat; */ - 0, /* intargfunc sq_inplace_repeat; */ -}; - static int CDataType_clear(PyTypeObject *self) { @@ -903,8 +889,10 @@ static int CDataType_traverse(PyTypeObject *self, visitproc visit, void *arg) { StgDictObject *dict = PyType_stgdict((PyObject *)self); - if (dict) + if (dict) { Py_VISIT(dict->proto); + } + Py_VISIT(Py_TYPE(self)); return PyType_Type.tp_traverse((PyObject *)self, visit, arg); } @@ -935,91 +923,45 @@ UnionType_setattro(PyObject *self, PyObject *key, PyObject *value) return 0; } +static PyType_Slot pycstruct_type_slots[] = { + {Py_tp_setattro, PyCStructType_setattro}, + {Py_tp_doc, PyDoc_STR("metatype for the CData Objects")}, + {Py_tp_traverse, CDataType_traverse}, + {Py_tp_clear, CDataType_clear}, + {Py_tp_methods, CDataType_methods}, + {Py_tp_new, PyCStructType_new}, -PyTypeObject PyCStructType_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_ctypes.PyCStructType", /* tp_name */ - 0, /* tp_basicsize */ - 0, /* tp_itemsize */ - 0, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - &CDataType_as_sequence, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - PyCStructType_setattro, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, /* tp_flags */ - PyDoc_STR("metatype for the CData Objects"), /* tp_doc */ - (traverseproc)CDataType_traverse, /* tp_traverse */ - (inquiry)CDataType_clear, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - CDataType_methods, /* tp_methods */ - 0, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - 0, /* tp_init */ - 0, /* tp_alloc */ - PyCStructType_new, /* tp_new */ - 0, /* tp_free */ + // Sequence protocol. + {Py_sq_repeat, CDataType_repeat}, + {0, NULL}, }; -static PyTypeObject UnionType_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_ctypes.UnionType", /* tp_name */ - 0, /* tp_basicsize */ - 0, /* tp_itemsize */ - 0, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - &CDataType_as_sequence, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - UnionType_setattro, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, /* tp_flags */ - PyDoc_STR("metatype for the CData Objects"), /* tp_doc */ - (traverseproc)CDataType_traverse, /* tp_traverse */ - (inquiry)CDataType_clear, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - CDataType_methods, /* tp_methods */ - 0, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - 0, /* tp_init */ - 0, /* tp_alloc */ - UnionType_new, /* tp_new */ - 0, /* tp_free */ +PyType_Spec pycstruct_type_spec = { + .name = "_ctypes.PyCStructType", + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_IMMUTABLETYPE), + .slots = pycstruct_type_slots, +}; + +static PyType_Slot union_type_slots[] = { + {Py_tp_setattro, UnionType_setattro}, + {Py_tp_doc, PyDoc_STR("metatype for the Union Objects")}, + {Py_tp_traverse, CDataType_traverse}, + {Py_tp_clear, CDataType_clear}, + {Py_tp_methods, CDataType_methods}, + {Py_tp_new, UnionType_new}, + + // Sequence protocol. + {Py_sq_repeat, CDataType_repeat}, + {0, NULL}, }; +static PyType_Spec union_type_spec = { + .name = "_ctypes.UnionType", + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_IMMUTABLETYPE), + .slots = union_type_slots, +}; /******************************************************************/ @@ -1234,46 +1176,23 @@ static PyMethodDef PyCPointerType_methods[] = { { NULL, NULL }, }; -PyTypeObject PyCPointerType_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_ctypes.PyCPointerType", /* tp_name */ - 0, /* tp_basicsize */ - 0, /* tp_itemsize */ - 0, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - &CDataType_as_sequence, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, /* tp_flags */ - PyDoc_STR("metatype for the Pointer Objects"), /* tp_doc */ - (traverseproc)CDataType_traverse, /* tp_traverse */ - (inquiry)CDataType_clear, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - PyCPointerType_methods, /* tp_methods */ - 0, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - 0, /* tp_init */ - 0, /* tp_alloc */ - PyCPointerType_new, /* tp_new */ - 0, /* tp_free */ +static PyType_Slot pycpointer_type_slots[] = { + {Py_tp_doc, PyDoc_STR("metatype for the Pointer Objects")}, + {Py_tp_traverse, CDataType_traverse}, + {Py_tp_clear, CDataType_clear}, + {Py_tp_methods, PyCPointerType_methods}, + {Py_tp_new, PyCPointerType_new}, + + // Sequence protocol. + {Py_sq_repeat, CDataType_repeat}, + {0, NULL}, +}; + +static PyType_Spec pycpointer_type_spec = { + .name = "_ctypes.PyCPointerType", + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_IMMUTABLETYPE), + .slots = pycpointer_type_slots, }; @@ -1606,48 +1525,24 @@ PyCArrayType_new(PyTypeObject *type, PyObject *args, PyObject *kwds) return NULL; } -PyTypeObject PyCArrayType_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_ctypes.PyCArrayType", /* tp_name */ - 0, /* tp_basicsize */ - 0, /* tp_itemsize */ - 0, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - &CDataType_as_sequence, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ - PyDoc_STR("metatype for the Array Objects"), /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - CDataType_methods, /* tp_methods */ - 0, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - 0, /* tp_init */ - 0, /* tp_alloc */ - PyCArrayType_new, /* tp_new */ - 0, /* tp_free */ +static PyType_Slot pycarray_type_slots[] = { + {Py_tp_doc, PyDoc_STR("metatype for the Array Objects")}, + {Py_tp_traverse, CDataType_traverse}, + {Py_tp_methods, CDataType_methods}, + {Py_tp_new, PyCArrayType_new}, + {Py_tp_clear, CDataType_clear}, + + // Sequence protocol. + {Py_sq_repeat, CDataType_repeat}, + {0, NULL}, }; +static PyType_Spec pycarray_type_spec = { + .name = "_ctypes.PyCArrayType", + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_IMMUTABLETYPE), + .slots = pycarray_type_slots, +}; /******************************************************************/ /* @@ -2175,7 +2070,11 @@ PyCSimpleType_new(PyTypeObject *type, PyObject *args, PyObject *kwds) } } - if (type == &PyCSimpleType_Type && fmt->setfunc_swapped && fmt->getfunc_swapped) { + ctypes_state *st = GLOBAL_STATE(); + if (type == st->PyCSimpleType_Type + && fmt->setfunc_swapped + && fmt->getfunc_swapped) + { PyObject *swapped = CreateSwappedType(type, args, kwds, proto, fmt); StgDictObject *sw_dict; @@ -2292,46 +2191,23 @@ static PyMethodDef PyCSimpleType_methods[] = { { NULL, NULL }, }; -PyTypeObject PyCSimpleType_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_ctypes.PyCSimpleType", /* tp_name */ - 0, /* tp_basicsize */ - 0, /* tp_itemsize */ - 0, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - &CDataType_as_sequence, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ - PyDoc_STR("metatype for the PyCSimpleType Objects"), /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - PyCSimpleType_methods, /* tp_methods */ - 0, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - 0, /* tp_init */ - 0, /* tp_alloc */ - PyCSimpleType_new, /* tp_new */ - 0, /* tp_free */ +static PyType_Slot pycsimple_type_slots[] = { + {Py_tp_doc, PyDoc_STR("metatype for the PyCSimpleType Objects")}, + {Py_tp_methods, PyCSimpleType_methods}, + {Py_tp_new, PyCSimpleType_new}, + {Py_tp_traverse, CDataType_traverse}, + {Py_tp_clear, CDataType_clear}, + + // Sequence protocol. + {Py_sq_repeat, CDataType_repeat}, + {0, NULL}, +}; + +PyType_Spec pycsimple_type_spec = { + .name = "_ctypes.PyCSimpleType", + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_IMMUTABLETYPE), + .slots = pycsimple_type_slots, }; /******************************************************************/ @@ -2575,46 +2451,23 @@ PyCFuncPtrType_new(PyTypeObject *type, PyObject *args, PyObject *kwds) return (PyObject *)result; } -PyTypeObject PyCFuncPtrType_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_ctypes.PyCFuncPtrType", /* tp_name */ - 0, /* tp_basicsize */ - 0, /* tp_itemsize */ - 0, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - &CDataType_as_sequence, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, /* tp_flags */ - PyDoc_STR("metatype for C function pointers"), /* tp_doc */ - (traverseproc)CDataType_traverse, /* tp_traverse */ - (inquiry)CDataType_clear, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - CDataType_methods, /* tp_methods */ - 0, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - 0, /* tp_init */ - 0, /* tp_alloc */ - PyCFuncPtrType_new, /* tp_new */ - 0, /* tp_free */ +static PyType_Slot pycfuncptr_type_slots[] = { + {Py_tp_doc, PyDoc_STR("metatype for C function pointers")}, + {Py_tp_traverse, CDataType_traverse}, + {Py_tp_clear, CDataType_clear}, + {Py_tp_methods, CDataType_methods}, + {Py_tp_new, PyCFuncPtrType_new}, + + // Sequence protocol. + {Py_sq_repeat, CDataType_repeat}, + {0, NULL}, +}; + +static PyType_Spec pycfuncptr_type_spec = { + .name = "_ctypes.PyCFuncPtrType", + .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC | + Py_TPFLAGS_IMMUTABLETYPE), + .slots = pycfuncptr_type_slots, }; @@ -4881,7 +4734,8 @@ PyCArrayType_from_ctype(PyObject *itemtype, Py_ssize_t length) ((PyTypeObject *)itemtype)->tp_name, (long)length); #endif - result = PyObject_CallFunction((PyObject *)&PyCArrayType_Type, + ctypes_state *st = GLOBAL_STATE(); + result = PyObject_CallFunction((PyObject *)st->PyCArrayType_Type, "s(O){s:n,s:O}", name, &PyCArray_Type, @@ -5690,24 +5544,29 @@ _ctypes_add_types(PyObject *mod) * * Metaclasses */ - TYPE_READY_BASE(&PyCStructType_Type, &PyType_Type); - TYPE_READY_BASE(&UnionType_Type, &PyType_Type); - TYPE_READY_BASE(&PyCPointerType_Type, &PyType_Type); - TYPE_READY_BASE(&PyCArrayType_Type, &PyType_Type); - TYPE_READY_BASE(&PyCSimpleType_Type, &PyType_Type); - TYPE_READY_BASE(&PyCFuncPtrType_Type, &PyType_Type); + CREATE_TYPE(mod, st->PyCStructType_Type, &pycstruct_type_spec, + &PyType_Type); + CREATE_TYPE(mod, st->UnionType_Type, &union_type_spec, &PyType_Type); + CREATE_TYPE(mod, st->PyCPointerType_Type, &pycpointer_type_spec, + &PyType_Type); + CREATE_TYPE(mod, st->PyCArrayType_Type, &pycarray_type_spec, + &PyType_Type); + CREATE_TYPE(mod, st->PyCSimpleType_Type, &pycsimple_type_spec, + &PyType_Type); + CREATE_TYPE(mod, st->PyCFuncPtrType_Type, &pycfuncptr_type_spec, + &PyType_Type); /************************************************* * * Classes using a custom metaclass */ - MOD_ADD_TYPE(&Struct_Type, &PyCStructType_Type, &PyCData_Type); - MOD_ADD_TYPE(&Union_Type, &UnionType_Type, &PyCData_Type); - MOD_ADD_TYPE(&PyCPointer_Type, &PyCPointerType_Type, &PyCData_Type); - MOD_ADD_TYPE(&PyCArray_Type, &PyCArrayType_Type, &PyCData_Type); - MOD_ADD_TYPE(&Simple_Type, &PyCSimpleType_Type, &PyCData_Type); - MOD_ADD_TYPE(&PyCFuncPtr_Type, &PyCFuncPtrType_Type, &PyCData_Type); + MOD_ADD_TYPE(&Struct_Type, st->PyCStructType_Type, &PyCData_Type); + MOD_ADD_TYPE(&Union_Type, st->UnionType_Type, &PyCData_Type); + MOD_ADD_TYPE(&PyCPointer_Type, st->PyCPointerType_Type, &PyCData_Type); + MOD_ADD_TYPE(&PyCArray_Type, st->PyCArrayType_Type, &PyCData_Type); + MOD_ADD_TYPE(&Simple_Type, st->PyCSimpleType_Type, &PyCData_Type); + MOD_ADD_TYPE(&PyCFuncPtr_Type, st->PyCFuncPtrType_Type, &PyCData_Type); /************************************************* * diff --git a/Modules/_ctypes/ctypes.h b/Modules/_ctypes/ctypes.h index 8891a0a741de7b..55e9f777788079 100644 --- a/Modules/_ctypes/ctypes.h +++ b/Modules/_ctypes/ctypes.h @@ -41,6 +41,12 @@ typedef struct { PyTypeObject *PyComError_Type; #endif PyTypeObject *StructParam_Type; + PyTypeObject *PyCStructType_Type; + PyTypeObject *UnionType_Type; + PyTypeObject *PyCPointerType_Type; + PyTypeObject *PyCArrayType_Type; + PyTypeObject *PyCSimpleType_Type; + PyTypeObject *PyCFuncPtrType_Type; } ctypes_state; extern ctypes_state global_state; @@ -118,7 +124,7 @@ typedef struct { Py_ssize_t b_size; /* size of memory block in bytes */ Py_ssize_t b_length; /* number of references we need */ Py_ssize_t b_index; /* index of this object into base's - b_object list */ + b_object list */ PyObject *b_objects; /* list of references we need to keep */ union value b_value; /* end of tagCDataObject, additional fields follow */ @@ -155,9 +161,8 @@ extern PyTypeObject PyCData_Type; #define CDataObject_Check(v) PyObject_TypeCheck(v, &PyCData_Type) #define _CDataObject_HasExternalBuffer(v) ((v)->b_ptr != (char *)&(v)->b_value) -extern PyTypeObject PyCSimpleType_Type; -#define PyCSimpleTypeObject_CheckExact(v) Py_IS_TYPE(v, &PyCSimpleType_Type) -#define PyCSimpleTypeObject_Check(v) PyObject_TypeCheck(v, &PyCSimpleType_Type) +#define PyCSimpleTypeObject_CheckExact(v) Py_IS_TYPE(v, GLOBAL_STATE()->PyCSimpleType_Type) +#define PyCSimpleTypeObject_Check(v) PyObject_TypeCheck(v, GLOBAL_STATE()->PyCSimpleType_Type) extern struct fielddesc *_ctypes_get_fielddesc(const char *fmt); @@ -171,21 +176,17 @@ PyCField_FromDesc(PyObject *desc, Py_ssize_t index, extern PyObject *PyCData_AtAddress(PyObject *type, void *buf); extern PyObject *PyCData_FromBytes(PyObject *type, char *data, Py_ssize_t length); -extern PyTypeObject PyCArrayType_Type; extern PyTypeObject PyCArray_Type; -extern PyTypeObject PyCPointerType_Type; extern PyTypeObject PyCPointer_Type; extern PyTypeObject PyCFuncPtr_Type; -extern PyTypeObject PyCFuncPtrType_Type; -extern PyTypeObject PyCStructType_Type; -#define PyCArrayTypeObject_Check(v) PyObject_TypeCheck(v, &PyCArrayType_Type) +#define PyCArrayTypeObject_Check(v) PyObject_TypeCheck(v, GLOBAL_STATE()->PyCArrayType_Type) #define ArrayObject_Check(v) PyObject_TypeCheck(v, &PyCArray_Type) #define PointerObject_Check(v) PyObject_TypeCheck(v, &PyCPointer_Type) -#define PyCPointerTypeObject_Check(v) PyObject_TypeCheck(v, &PyCPointerType_Type) +#define PyCPointerTypeObject_Check(v) PyObject_TypeCheck(v, GLOBAL_STATE()->PyCPointerType_Type) #define PyCFuncPtrObject_Check(v) PyObject_TypeCheck(v, &PyCFuncPtr_Type) -#define PyCFuncPtrTypeObject_Check(v) PyObject_TypeCheck(v, &PyCFuncPtrType_Type) -#define PyCStructTypeObject_Check(v) PyObject_TypeCheck(v, &PyCStructType_Type) +#define PyCFuncPtrTypeObject_Check(v) PyObject_TypeCheck(v, GLOBAL_STATE()->PyCFuncPtrType_Type) +#define PyCStructTypeObject_Check(v) PyObject_TypeCheck(v, GLOBAL_STATE()->PyCStructType_Type) extern PyObject * PyCArrayType_from_ctype(PyObject *itemtype, Py_ssize_t length); diff --git a/Modules/_io/fileio.c b/Modules/_io/fileio.c index 8a73ea0365b7a3..9cf268ca0b26c8 100644 --- a/Modules/_io/fileio.c +++ b/Modules/_io/fileio.c @@ -157,7 +157,7 @@ _io_FileIO_close_impl(fileio *self, PyTypeObject *cls) return res; } - PyObject *exc; + PyObject *exc = NULL; if (res == NULL) { exc = PyErr_GetRaisedException(); } @@ -1100,31 +1100,32 @@ static PyObject * fileio_repr(fileio *self) { PyObject *nameobj, *res; + const char *type_name = Py_TYPE((PyObject *) self)->tp_name; - if (self->fd < 0) - return PyUnicode_FromFormat("<_io.FileIO [closed]>"); + if (self->fd < 0) { + return PyUnicode_FromFormat("<%.100s [closed]>", type_name); + } if (PyObject_GetOptionalAttr((PyObject *) self, &_Py_ID(name), &nameobj) < 0) { return NULL; } if (nameobj == NULL) { res = PyUnicode_FromFormat( - "<_io.FileIO fd=%d mode='%s' closefd=%s>", - self->fd, mode_string(self), self->closefd ? "True" : "False"); + "<%.100s fd=%d mode='%s' closefd=%s>", + type_name, self->fd, mode_string(self), self->closefd ? "True" : "False"); } else { int status = Py_ReprEnter((PyObject *)self); res = NULL; if (status == 0) { res = PyUnicode_FromFormat( - "<_io.FileIO name=%R mode='%s' closefd=%s>", - nameobj, mode_string(self), self->closefd ? "True" : "False"); + "<%.100s name=%R mode='%s' closefd=%s>", + type_name, nameobj, mode_string(self), self->closefd ? "True" : "False"); Py_ReprLeave((PyObject *)self); } else if (status > 0) { PyErr_Format(PyExc_RuntimeError, - "reentrant call inside %s.__repr__", - Py_TYPE(self)->tp_name); + "reentrant call inside %.100s.__repr__", type_name); } Py_DECREF(nameobj); } diff --git a/Modules/_io/iobase.c b/Modules/_io/iobase.c index 4da8e5bd572d74..184e0b7d1aa7f1 100644 --- a/Modules/_io/iobase.c +++ b/Modules/_io/iobase.c @@ -66,12 +66,19 @@ PyDoc_STRVAR(iobase_doc, "with open('spam.txt', 'r') as fp:\n" " fp.write('Spam and eggs!')\n"); -/* Use this macro whenever you want to check the internal `closed` status + +/* Internal methods */ + +/* Use this function whenever you want to check the internal `closed` status of the IOBase object rather than the virtual `closed` attribute as returned by whatever subclass. */ +static int +iobase_is_closed(PyObject *self) +{ + return PyObject_HasAttrWithError(self, &_Py_ID(__IOBase_closed)); +} -/* Internal methods */ static PyObject * iobase_unsupported(_PyIO_State *state, const char *message) { @@ -145,14 +152,6 @@ _io__IOBase_truncate_impl(PyObject *self, PyTypeObject *cls, return iobase_unsupported(state, "truncate"); } -static int -iobase_is_closed(PyObject *self) -{ - /* This gets the derived attribute, which is *not* __IOBase_closed - in most cases! */ - return PyObject_HasAttrWithError(self, &_Py_ID(__IOBase_closed)); -} - /* Flush and close methods */ /*[clinic input] diff --git a/Modules/_io/textio.c b/Modules/_io/textio.c index 4507930c14bb50..d794af8de2b8f0 100644 --- a/Modules/_io/textio.c +++ b/Modules/_io/textio.c @@ -1762,8 +1762,10 @@ _io_TextIOWrapper_write_impl(textio *self, PyObject *text) } } - textiowrapper_set_decoded_chars(self, NULL); - Py_CLEAR(self->snapshot); + if (self->snapshot != NULL) { + textiowrapper_set_decoded_chars(self, NULL); + Py_CLEAR(self->snapshot); + } if (self->decoder) { ret = PyObject_CallMethodNoArgs(self->decoder, &_Py_ID(reset)); @@ -1999,8 +2001,10 @@ _io_TextIOWrapper_read_impl(textio *self, Py_ssize_t n) if (result == NULL) goto fail; - textiowrapper_set_decoded_chars(self, NULL); - Py_CLEAR(self->snapshot); + if (self->snapshot != NULL) { + textiowrapper_set_decoded_chars(self, NULL); + Py_CLEAR(self->snapshot); + } return result; } else { @@ -2944,10 +2948,11 @@ textiowrapper_repr(textio *self) { PyObject *nameobj, *modeobj, *res, *s; int status; + const char *type_name = Py_TYPE(self)->tp_name; CHECK_INITIALIZED(self); - res = PyUnicode_FromString("<_io.TextIOWrapper"); + res = PyUnicode_FromFormat("<%.100s", type_name); if (res == NULL) return NULL; @@ -2955,8 +2960,8 @@ textiowrapper_repr(textio *self) if (status != 0) { if (status > 0) { PyErr_Format(PyExc_RuntimeError, - "reentrant call inside %s.__repr__", - Py_TYPE(self)->tp_name); + "reentrant call inside %.100s.__repr__", + type_name); } goto error; } diff --git a/Modules/_io/winconsoleio.c b/Modules/_io/winconsoleio.c index 6680488b740cfc..54e15555417287 100644 --- a/Modules/_io/winconsoleio.c +++ b/Modules/_io/winconsoleio.c @@ -391,9 +391,9 @@ _io__WindowsConsoleIO___init___impl(winconsoleio *self, PyObject *nameobj, } if (self->writable) - self->fd = _Py_open_osfhandle_noraise(handle, _O_WRONLY | _O_BINARY); + self->fd = _Py_open_osfhandle_noraise(handle, _O_WRONLY | _O_BINARY | _O_NOINHERIT); else - self->fd = _Py_open_osfhandle_noraise(handle, _O_RDONLY | _O_BINARY); + self->fd = _Py_open_osfhandle_noraise(handle, _O_RDONLY | _O_BINARY | _O_NOINHERIT); if (self->fd < 0) { PyErr_SetFromErrnoWithFilenameObject(PyExc_OSError, nameobj); CloseHandle(handle); @@ -1070,15 +1070,22 @@ _io__WindowsConsoleIO_write_impl(winconsoleio *self, PyTypeObject *cls, static PyObject * winconsoleio_repr(winconsoleio *self) { - if (self->fd == -1) - return PyUnicode_FromFormat("<_io._WindowsConsoleIO [closed]>"); - - if (self->readable) - return PyUnicode_FromFormat("<_io._WindowsConsoleIO mode='rb' closefd=%s>", - self->closefd ? "True" : "False"); - if (self->writable) - return PyUnicode_FromFormat("<_io._WindowsConsoleIO mode='wb' closefd=%s>", - self->closefd ? "True" : "False"); + const char *type_name = (Py_TYPE((PyObject *)self)->tp_name); + + if (self->fd == -1) { + return PyUnicode_FromFormat("<%.100s [closed]>", type_name); + } + + if (self->readable) { + return PyUnicode_FromFormat("<%.100s mode='rb' closefd=%s>", + type_name, + self->closefd ? "True" : "False"); + } + if (self->writable) { + return PyUnicode_FromFormat("<%.100s mode='wb' closefd=%s>", + type_name, + self->closefd ? "True" : "False"); + } PyErr_SetString(PyExc_SystemError, "_WindowsConsoleIO has invalid mode"); return NULL; diff --git a/Modules/_lzmamodule.c b/Modules/_lzmamodule.c index eb90c308d16d19..f6bfbfa62687b8 100644 --- a/Modules/_lzmamodule.c +++ b/Modules/_lzmamodule.c @@ -492,7 +492,9 @@ build_filter_spec(const lzma_filter *f) case LZMA_FILTER_ARMTHUMB: case LZMA_FILTER_SPARC: { lzma_options_bcj *options = f->options; - ADD_FIELD(options, start_offset); + if (options) { + ADD_FIELD(options, start_offset); + } break; } default: diff --git a/Modules/_pickle.c b/Modules/_pickle.c index 227e5378e42285..f210c0ca205991 100644 --- a/Modules/_pickle.c +++ b/Modules/_pickle.c @@ -399,64 +399,6 @@ _Pickle_FastCall(PyObject *func, PyObject *obj) /*************************************************************************/ -/* Retrieve and deconstruct a method for avoiding a reference cycle - (pickler -> bound method of pickler -> pickler) */ -static int -init_method_ref(PyObject *self, PyObject *name, - PyObject **method_func, PyObject **method_self) -{ - PyObject *func, *func2; - int ret; - - /* *method_func and *method_self should be consistent. All refcount decrements - should be occurred after setting *method_self and *method_func. */ - ret = PyObject_GetOptionalAttr(self, name, &func); - if (func == NULL) { - *method_self = NULL; - Py_CLEAR(*method_func); - return ret; - } - - if (PyMethod_Check(func) && PyMethod_GET_SELF(func) == self) { - /* Deconstruct a bound Python method */ - *method_self = self; /* borrowed */ - func2 = PyMethod_GET_FUNCTION(func); - Py_XSETREF(*method_func, Py_NewRef(func2)); - Py_DECREF(func); - return 0; - } - else { - *method_self = NULL; - Py_XSETREF(*method_func, func); - return 0; - } -} - -/* Bind a method if it was deconstructed */ -static PyObject * -reconstruct_method(PyObject *func, PyObject *self) -{ - if (self) { - return PyMethod_New(func, self); - } - else { - return Py_NewRef(func); - } -} - -static PyObject * -call_method(PyObject *func, PyObject *self, PyObject *obj) -{ - if (self) { - return PyObject_CallFunctionObjArgs(func, self, obj, NULL); - } - else { - return PyObject_CallOneArg(func, obj); - } -} - -/*************************************************************************/ - /* Internal data type used as the unpickling stack. */ typedef struct { PyObject_VAR_HEAD @@ -668,9 +610,7 @@ typedef struct PicklerObject { PyMemoTable *memo; /* Memo table, keep track of the seen objects to support self-referential objects pickling. */ - PyObject *pers_func; /* persistent_id() method, can be NULL */ - PyObject *pers_func_self; /* borrowed reference to self if pers_func - is an unbound method, NULL otherwise */ + PyObject *persistent_id; /* persistent_id() method, can be NULL */ PyObject *dispatch_table; /* private dispatch_table, can be NULL */ PyObject *reducer_override; /* hook for invoking user-defined callbacks instead of save_global when pickling @@ -712,9 +652,7 @@ typedef struct UnpicklerObject { size_t memo_size; /* Capacity of the memo array */ size_t memo_len; /* Number of objects in the memo */ - PyObject *pers_func; /* persistent_load() method, can be NULL. */ - PyObject *pers_func_self; /* borrowed reference to self if pers_func - is an unbound method, NULL otherwise */ + PyObject *persistent_load; /* persistent_load() method, can be NULL. */ Py_buffer buffer; char *input_buffer; @@ -1167,8 +1105,7 @@ _Pickler_New(PickleState *st) } self->memo = memo; - self->pers_func = NULL; - self->pers_func_self = NULL; + self->persistent_id = NULL; self->dispatch_table = NULL; self->reducer_override = NULL; self->write = NULL; @@ -1662,8 +1599,7 @@ _Unpickler_New(PyObject *module) self->memo = memo; self->memo_size = MEMO_SIZE; self->memo_len = 0; - self->pers_func = NULL; - self->pers_func_self = NULL; + self->persistent_load = NULL; memset(&self->buffer, 0, sizeof(Py_buffer)); self->input_buffer = NULL; self->input_line = NULL; @@ -3929,7 +3865,7 @@ save_pers(PickleState *state, PicklerObject *self, PyObject *obj) const char persid_op = PERSID; const char binpersid_op = BINPERSID; - pid = call_method(self->pers_func, self->pers_func_self, obj); + pid = PyObject_CallOneArg(self->persistent_id, obj); if (pid == NULL) return -1; @@ -4317,7 +4253,7 @@ save(PickleState *st, PicklerObject *self, PyObject *obj, int pers_save) /* The extra pers_save argument is necessary to avoid calling save_pers() on its returned object. */ - if (!pers_save && self->pers_func) { + if (!pers_save && self->persistent_id) { /* save_pers() returns: -1 to signal an error; 0 if it did nothing successfully; @@ -4522,6 +4458,12 @@ save(PickleState *st, PicklerObject *self, PyObject *obj, int pers_save) return status; } +static PyObject * +persistent_id(PyObject *self, PyObject *obj) +{ + Py_RETURN_NONE; +} + static int dump(PickleState *state, PicklerObject *self, PyObject *obj) { @@ -4529,17 +4471,25 @@ dump(PickleState *state, PicklerObject *self, PyObject *obj) int status = -1; PyObject *tmp; - if (PyObject_GetOptionalAttr((PyObject *)self, &_Py_ID(reducer_override), - &tmp) < 0) { - goto error; + /* Cache the persistent_id method. */ + tmp = PyObject_GetAttr((PyObject *)self, &_Py_ID(persistent_id)); + if (tmp == NULL) { + goto error; } - /* Cache the reducer_override method, if it exists. */ - if (tmp != NULL) { - Py_XSETREF(self->reducer_override, tmp); + if (PyCFunction_Check(tmp) && + PyCFunction_GET_SELF(tmp) == (PyObject *)self && + PyCFunction_GET_FUNCTION(tmp) == persistent_id) + { + Py_CLEAR(tmp); } - else { - Py_CLEAR(self->reducer_override); + Py_XSETREF(self->persistent_id, tmp); + + /* Cache the reducer_override method, if it exists. */ + if (PyObject_GetOptionalAttr((PyObject *)self, &_Py_ID(reducer_override), + &tmp) < 0) { + goto error; } + Py_XSETREF(self->reducer_override, tmp); if (self->proto >= 2) { char header[2]; @@ -4565,11 +4515,12 @@ dump(PickleState *state, PicklerObject *self, PyObject *obj) self->framing = 0; /* Break the reference cycle we generated at the beginning this function - * call when setting the reducer_override attribute of the Pickler instance - * to a bound method of the same instance. This is important as the Pickler - * instance holds a reference to each object it has pickled (through its - * memo): thus, these objects won't be garbage-collected as long as the - * Pickler itself is not collected. */ + * call when setting the persistent_id and the reducer_override attributes + * of the Pickler instance to a bound method of the same instance. + * This is important as the Pickler instance holds a reference to each + * object it has pickled (through its memo): thus, these objects won't + * be garbage-collected as long as the Pickler itself is not collected. */ + Py_CLEAR(self->persistent_id); Py_CLEAR(self->reducer_override); return status; } @@ -4662,6 +4613,8 @@ _pickle_Pickler___sizeof___impl(PicklerObject *self) } static struct PyMethodDef Pickler_methods[] = { + {"persistent_id", persistent_id, METH_O, + PyDoc_STR("persistent_id($self, obj, /)\n--\n\n")}, _PICKLE_PICKLER_DUMP_METHODDEF _PICKLE_PICKLER_CLEAR_MEMO_METHODDEF _PICKLE_PICKLER___SIZEOF___METHODDEF @@ -4673,7 +4626,7 @@ Pickler_clear(PicklerObject *self) { Py_CLEAR(self->output_buffer); Py_CLEAR(self->write); - Py_CLEAR(self->pers_func); + Py_CLEAR(self->persistent_id); Py_CLEAR(self->dispatch_table); Py_CLEAR(self->fast_memo); Py_CLEAR(self->reducer_override); @@ -4702,7 +4655,7 @@ Pickler_traverse(PicklerObject *self, visitproc visit, void *arg) { Py_VISIT(Py_TYPE(self)); Py_VISIT(self->write); - Py_VISIT(self->pers_func); + Py_VISIT(self->persistent_id); Py_VISIT(self->dispatch_table); Py_VISIT(self->fast_memo); Py_VISIT(self->reducer_override); @@ -4799,11 +4752,6 @@ _pickle_Pickler___init___impl(PicklerObject *self, PyObject *file, self->fast_nesting = 0; self->fast_memo = NULL; - if (init_method_ref((PyObject *)self, &_Py_ID(persistent_id), - &self->pers_func, &self->pers_func_self) < 0) - { - return -1; - } if (self->dispatch_table != NULL) { return 0; } @@ -5052,36 +5000,6 @@ Pickler_set_memo(PicklerObject *self, PyObject *obj, void *Py_UNUSED(ignored)) return -1; } -static PyObject * -Pickler_get_persid(PicklerObject *self, void *Py_UNUSED(ignored)) -{ - if (self->pers_func == NULL) { - PyErr_SetString(PyExc_AttributeError, "persistent_id"); - return NULL; - } - return reconstruct_method(self->pers_func, self->pers_func_self); -} - -static int -Pickler_set_persid(PicklerObject *self, PyObject *value, void *Py_UNUSED(ignored)) -{ - if (value == NULL) { - PyErr_SetString(PyExc_TypeError, - "attribute deletion is not supported"); - return -1; - } - if (!PyCallable_Check(value)) { - PyErr_SetString(PyExc_TypeError, - "persistent_id must be a callable taking one argument"); - return -1; - } - - self->pers_func_self = NULL; - Py_XSETREF(self->pers_func, Py_NewRef(value)); - - return 0; -} - static PyMemberDef Pickler_members[] = { {"bin", Py_T_INT, offsetof(PicklerObject, bin)}, {"fast", Py_T_INT, offsetof(PicklerObject, fast)}, @@ -5092,8 +5010,6 @@ static PyMemberDef Pickler_members[] = { static PyGetSetDef Pickler_getsets[] = { {"memo", (getter)Pickler_get_memo, (setter)Pickler_set_memo}, - {"persistent_id", (getter)Pickler_get_persid, - (setter)Pickler_set_persid}, {NULL} }; @@ -6056,36 +5972,28 @@ load_persid(PickleState *st, UnpicklerObject *self) Py_ssize_t len; char *s; - if (self->pers_func) { - if ((len = _Unpickler_Readline(st, self, &s)) < 0) - return -1; - if (len < 1) - return bad_readline(st); + if ((len = _Unpickler_Readline(st, self, &s)) < 0) + return -1; + if (len < 1) + return bad_readline(st); - pid = PyUnicode_DecodeASCII(s, len - 1, "strict"); - if (pid == NULL) { - if (PyErr_ExceptionMatches(PyExc_UnicodeDecodeError)) { - PyErr_SetString(st->UnpicklingError, - "persistent IDs in protocol 0 must be " - "ASCII strings"); - } - return -1; + pid = PyUnicode_DecodeASCII(s, len - 1, "strict"); + if (pid == NULL) { + if (PyErr_ExceptionMatches(PyExc_UnicodeDecodeError)) { + PyErr_SetString(st->UnpicklingError, + "persistent IDs in protocol 0 must be " + "ASCII strings"); } - - obj = call_method(self->pers_func, self->pers_func_self, pid); - Py_DECREF(pid); - if (obj == NULL) - return -1; - - PDATA_PUSH(self->stack, obj, -1); - return 0; - } - else { - PyErr_SetString(st->UnpicklingError, - "A load persistent id instruction was encountered, " - "but no persistent_load function was specified."); return -1; } + + obj = PyObject_CallOneArg(self->persistent_load, pid); + Py_DECREF(pid); + if (obj == NULL) + return -1; + + PDATA_PUSH(self->stack, obj, -1); + return 0; } static int @@ -6093,25 +6001,17 @@ load_binpersid(PickleState *st, UnpicklerObject *self) { PyObject *pid, *obj; - if (self->pers_func) { - PDATA_POP(st, self->stack, pid); - if (pid == NULL) - return -1; - - obj = call_method(self->pers_func, self->pers_func_self, pid); - Py_DECREF(pid); - if (obj == NULL) - return -1; + PDATA_POP(st, self->stack, pid); + if (pid == NULL) + return -1; - PDATA_PUSH(self->stack, obj, -1); - return 0; - } - else { - PyErr_SetString(st->UnpicklingError, - "A load persistent id instruction was encountered, " - "but no persistent_load function was specified."); + obj = PyObject_CallOneArg(self->persistent_load, pid); + Py_DECREF(pid); + if (obj == NULL) return -1; - } + + PDATA_PUSH(self->stack, obj, -1); + return 0; } static int @@ -6837,6 +6737,7 @@ static PyObject * load(PickleState *st, UnpicklerObject *self) { PyObject *value = NULL; + PyObject *tmp; char *s = NULL; self->num_marks = 0; @@ -6846,6 +6747,13 @@ load(PickleState *st, UnpicklerObject *self) if (Py_SIZE(self->stack)) Pdata_clear(self->stack, 0); + /* Cache the persistent_load method. */ + tmp = PyObject_GetAttr((PyObject *)self, &_Py_ID(persistent_load)); + if (tmp == NULL) { + goto error; + } + Py_XSETREF(self->persistent_load, tmp); + /* Convenient macros for the dispatch while-switch loop just below. */ #define OP(opcode, load_func) \ case opcode: if (load_func(st, self) < 0) break; continue; @@ -6858,7 +6766,7 @@ load(PickleState *st, UnpicklerObject *self) if (PyErr_ExceptionMatches(st->UnpicklingError)) { PyErr_Format(PyExc_EOFError, "Ran out of input"); } - return NULL; + goto error; } switch ((enum opcode)s[0]) { @@ -6944,7 +6852,7 @@ load(PickleState *st, UnpicklerObject *self) PyErr_Format(st->UnpicklingError, "invalid load key, '\\x%02x'.", c); } - return NULL; + goto error; } } @@ -6952,14 +6860,41 @@ load(PickleState *st, UnpicklerObject *self) } if (PyErr_Occurred()) { - return NULL; + goto error; } if (_Unpickler_SkipConsumed(self) < 0) - return NULL; + goto error; + Py_CLEAR(self->persistent_load); PDATA_POP(st, self->stack, value); return value; + +error: + Py_CLEAR(self->persistent_load); + return NULL; +} + +/*[clinic input] + +_pickle.Unpickler.persistent_load + + cls: defining_class + pid: object + / + +[clinic start generated code]*/ + +static PyObject * +_pickle_Unpickler_persistent_load_impl(UnpicklerObject *self, + PyTypeObject *cls, PyObject *pid) +/*[clinic end generated code: output=9f4706f1330cb14d input=2f9554fae051276e]*/ +{ + PickleState *st = _Pickle_GetStateByClass(cls); + PyErr_SetString(st->UnpicklingError, + "A load persistent id instruction was encountered, " + "but no persistent_load function was specified."); + return NULL; } /*[clinic input] @@ -7128,6 +7063,7 @@ _pickle_Unpickler___sizeof___impl(UnpicklerObject *self) } static struct PyMethodDef Unpickler_methods[] = { + _PICKLE_UNPICKLER_PERSISTENT_LOAD_METHODDEF _PICKLE_UNPICKLER_LOAD_METHODDEF _PICKLE_UNPICKLER_FIND_CLASS_METHODDEF _PICKLE_UNPICKLER___SIZEOF___METHODDEF @@ -7142,7 +7078,7 @@ Unpickler_clear(UnpicklerObject *self) Py_CLEAR(self->read); Py_CLEAR(self->peek); Py_CLEAR(self->stack); - Py_CLEAR(self->pers_func); + Py_CLEAR(self->persistent_load); Py_CLEAR(self->buffers); if (self->buffer.buf != NULL) { PyBuffer_Release(&self->buffer); @@ -7181,7 +7117,7 @@ Unpickler_traverse(UnpicklerObject *self, visitproc visit, void *arg) Py_VISIT(self->read); Py_VISIT(self->peek); Py_VISIT(self->stack); - Py_VISIT(self->pers_func); + Py_VISIT(self->persistent_load); Py_VISIT(self->buffers); PyObject **memo = self->memo; if (memo) { @@ -7247,12 +7183,6 @@ _pickle_Unpickler___init___impl(UnpicklerObject *self, PyObject *file, self->fix_imports = fix_imports; - if (init_method_ref((PyObject *)self, &_Py_ID(persistent_load), - &self->pers_func, &self->pers_func_self) < 0) - { - return -1; - } - PyTypeObject *tp = Py_TYPE(self); PickleState *state = _Pickle_FindStateByType(tp); self->stack = (Pdata *)Pdata_New(state); @@ -7521,41 +7451,8 @@ Unpickler_set_memo(UnpicklerObject *self, PyObject *obj, void *Py_UNUSED(ignored return -1; } -static PyObject * -Unpickler_get_persload(UnpicklerObject *self, void *Py_UNUSED(ignored)) -{ - if (self->pers_func == NULL) { - PyErr_SetString(PyExc_AttributeError, "persistent_load"); - return NULL; - } - return reconstruct_method(self->pers_func, self->pers_func_self); -} - -static int -Unpickler_set_persload(UnpicklerObject *self, PyObject *value, void *Py_UNUSED(ignored)) -{ - if (value == NULL) { - PyErr_SetString(PyExc_TypeError, - "attribute deletion is not supported"); - return -1; - } - if (!PyCallable_Check(value)) { - PyErr_SetString(PyExc_TypeError, - "persistent_load must be a callable taking " - "one argument"); - return -1; - } - - self->pers_func_self = NULL; - Py_XSETREF(self->pers_func, Py_NewRef(value)); - - return 0; -} - static PyGetSetDef Unpickler_getsets[] = { {"memo", (getter)Unpickler_get_memo, (setter)Unpickler_set_memo}, - {"persistent_load", (getter)Unpickler_get_persload, - (setter)Unpickler_set_persload}, {NULL} }; diff --git a/Modules/_posixsubprocess.c b/Modules/_posixsubprocess.c index d0dd8f064e0395..aa1a300e4378dd 100644 --- a/Modules/_posixsubprocess.c +++ b/Modules/_posixsubprocess.c @@ -673,9 +673,10 @@ child_exec(char *const exec_array[], PyObject *preexec_fn, PyObject *preexec_fn_args_tuple) { - int i, saved_errno, reached_preexec = 0; + int i, saved_errno; PyObject *result; - const char* err_msg = ""; + /* Indicate to the parent that the error happened before exec(). */ + const char *err_msg = "noexec"; /* Buffer large enough to hold a hex integer. We can't malloc. */ char hex_errno[sizeof(saved_errno)*2+1]; @@ -735,8 +736,12 @@ child_exec(char *const exec_array[], /* We no longer manually close p2cread, c2pwrite, and errwrite here as * _close_open_fds takes care when it is not already non-inheritable. */ - if (cwd) - POSIX_CALL(chdir(cwd)); + if (cwd) { + if (chdir(cwd) == -1) { + err_msg = "noexec:chdir"; + goto error; + } + } if (child_umask >= 0) umask(child_umask); /* umask() always succeeds. */ @@ -784,7 +789,7 @@ child_exec(char *const exec_array[], #endif /* HAVE_SETREUID */ - reached_preexec = 1; + err_msg = ""; if (preexec_fn != Py_None && preexec_fn_args_tuple) { /* This is where the user has asked us to deadlock their program. */ result = PyObject_Call(preexec_fn, preexec_fn_args_tuple, NULL); @@ -842,16 +847,12 @@ child_exec(char *const exec_array[], } _Py_write_noraise(errpipe_write, cur, hex_errno + sizeof(hex_errno) - cur); _Py_write_noraise(errpipe_write, ":", 1); - if (!reached_preexec) { - /* Indicate to the parent that the error happened before exec(). */ - _Py_write_noraise(errpipe_write, "noexec", 6); - } /* We can't call strerror(saved_errno). It is not async signal safe. * The parent process will look the error message up. */ } else { _Py_write_noraise(errpipe_write, "SubprocessError:0:", 18); - _Py_write_noraise(errpipe_write, err_msg, strlen(err_msg)); } + _Py_write_noraise(errpipe_write, err_msg, strlen(err_msg)); } diff --git a/Modules/_queuemodule.c b/Modules/_queuemodule.c index 81a06cdb79a4f2..8fca3cdd0deb18 100644 --- a/Modules/_queuemodule.c +++ b/Modules/_queuemodule.c @@ -7,6 +7,7 @@ #include "pycore_moduleobject.h" // _PyModule_GetState() #include "pycore_time.h" // _PyTime_t +#include #include // offsetof() typedef struct { @@ -25,12 +26,167 @@ static struct PyModuleDef queuemodule; #define simplequeue_get_state_by_type(type) \ (simplequeue_get_state(PyType_GetModuleByDef(type, &queuemodule))) +static const Py_ssize_t INITIAL_RING_BUF_CAPACITY = 8; + +typedef struct { + // Where to place the next item + Py_ssize_t put_idx; + + // Where to get the next item + Py_ssize_t get_idx; + + PyObject **items; + + // Total number of items that may be stored + Py_ssize_t items_cap; + + // Number of items stored + Py_ssize_t num_items; +} RingBuf; + +static int +RingBuf_Init(RingBuf *buf) +{ + buf->put_idx = 0; + buf->get_idx = 0; + buf->items_cap = INITIAL_RING_BUF_CAPACITY; + buf->num_items = 0; + buf->items = PyMem_Calloc(buf->items_cap, sizeof(PyObject *)); + if (buf->items == NULL) { + PyErr_NoMemory(); + return -1; + } + return 0; +} + +static PyObject * +RingBuf_At(RingBuf *buf, Py_ssize_t idx) +{ + assert(idx >= 0 && idx < buf->num_items); + return buf->items[(buf->get_idx + idx) % buf->items_cap]; +} + +static void +RingBuf_Fini(RingBuf *buf) +{ + PyObject **items = buf->items; + Py_ssize_t num_items = buf->num_items; + Py_ssize_t cap = buf->items_cap; + Py_ssize_t idx = buf->get_idx; + buf->items = NULL; + buf->put_idx = 0; + buf->get_idx = 0; + buf->num_items = 0; + buf->items_cap = 0; + for (Py_ssize_t n = num_items; n > 0; idx = (idx + 1) % cap, n--) { + Py_DECREF(items[idx]); + } + PyMem_Free(items); +} + +// Resize the underlying items array of buf to the new capacity and arrange +// the items contiguously in the new items array. +// +// Returns -1 on allocation failure or 0 on success. +static int +resize_ringbuf(RingBuf *buf, Py_ssize_t capacity) +{ + Py_ssize_t new_capacity = Py_MAX(INITIAL_RING_BUF_CAPACITY, capacity); + if (new_capacity == buf->items_cap) { + return 0; + } + assert(buf->num_items <= new_capacity); + + PyObject **new_items = PyMem_Calloc(new_capacity, sizeof(PyObject *)); + if (new_items == NULL) { + return -1; + } + + // Copy the "tail" of the old items array. This corresponds to "head" of + // the abstract ring buffer. + Py_ssize_t tail_size = + Py_MIN(buf->num_items, buf->items_cap - buf->get_idx); + if (tail_size > 0) { + memcpy(new_items, buf->items + buf->get_idx, + tail_size * sizeof(PyObject *)); + } + + // Copy the "head" of the old items array, if any. This corresponds to the + // "tail" of the abstract ring buffer. + Py_ssize_t head_size = buf->num_items - tail_size; + if (head_size > 0) { + memcpy(new_items + tail_size, buf->items, + head_size * sizeof(PyObject *)); + } + + PyMem_Free(buf->items); + buf->items = new_items; + buf->items_cap = new_capacity; + buf->get_idx = 0; + buf->put_idx = buf->num_items; + + return 0; +} + +// Returns a strong reference from the head of the buffer. +static PyObject * +RingBuf_Get(RingBuf *buf) +{ + assert(buf->num_items > 0); + + if (buf->num_items < (buf->items_cap / 4)) { + // Items is less than 25% occupied, shrink it by 50%. This allows for + // growth without immediately needing to resize the underlying items + // array. + // + // It's safe it ignore allocation failures here; shrinking is an + // optimization that isn't required for correctness. + (void)resize_ringbuf(buf, buf->items_cap / 2); + } + + PyObject *item = buf->items[buf->get_idx]; + buf->items[buf->get_idx] = NULL; + buf->get_idx = (buf->get_idx + 1) % buf->items_cap; + buf->num_items--; + return item; +} + +// Returns 0 on success or -1 if the buffer failed to grow +static int +RingBuf_Put(RingBuf *buf, PyObject *item) +{ + assert(buf->num_items <= buf->items_cap); + + if (buf->num_items == buf->items_cap) { + // Buffer is full, grow it. + if (resize_ringbuf(buf, buf->items_cap * 2) < 0) { + PyErr_NoMemory(); + return -1; + } + } + buf->items[buf->put_idx] = Py_NewRef(item); + buf->put_idx = (buf->put_idx + 1) % buf->items_cap; + buf->num_items++; + return 0; +} + +static Py_ssize_t +RingBuf_Len(RingBuf *buf) +{ + return buf->num_items; +} + +static bool +RingBuf_IsEmpty(RingBuf *buf) +{ + return buf->num_items == 0; +} + typedef struct { PyObject_HEAD PyThread_type_lock lock; int locked; - PyObject *lst; - Py_ssize_t lst_pos; + RingBuf buf; PyObject *weakreflist; } simplequeueobject; @@ -43,7 +199,7 @@ class _queue.SimpleQueue "simplequeueobject *" "simplequeue_get_state_by_type(ty static int simplequeue_clear(simplequeueobject *self) { - Py_CLEAR(self->lst); + RingBuf_Fini(&self->buf); return 0; } @@ -69,7 +225,10 @@ simplequeue_dealloc(simplequeueobject *self) static int simplequeue_traverse(simplequeueobject *self, visitproc visit, void *arg) { - Py_VISIT(self->lst); + RingBuf *buf = &self->buf; + for (Py_ssize_t i = 0, num_items = buf->num_items; i < num_items; i++) { + Py_VISIT(RingBuf_At(buf, i)); + } Py_VISIT(Py_TYPE(self)); return 0; } @@ -90,15 +249,13 @@ simplequeue_new_impl(PyTypeObject *type) self = (simplequeueobject *) type->tp_alloc(type, 0); if (self != NULL) { self->weakreflist = NULL; - self->lst = PyList_New(0); self->lock = PyThread_allocate_lock(); - self->lst_pos = 0; if (self->lock == NULL) { Py_DECREF(self); PyErr_SetString(PyExc_MemoryError, "can't allocate lock"); return NULL; } - if (self->lst == NULL) { + if (RingBuf_Init(&self->buf) < 0) { Py_DECREF(self); return NULL; } @@ -126,7 +283,7 @@ _queue_SimpleQueue_put_impl(simplequeueobject *self, PyObject *item, /*[clinic end generated code: output=4333136e88f90d8b input=6e601fa707a782d5]*/ { /* BEGIN GIL-protected critical section */ - if (PyList_Append(self->lst, item) < 0) + if (RingBuf_Put(&self->buf, item) < 0) return NULL; if (self->locked) { /* A get() may be waiting, wake it up */ @@ -155,33 +312,6 @@ _queue_SimpleQueue_put_nowait_impl(simplequeueobject *self, PyObject *item) return _queue_SimpleQueue_put_impl(self, item, 0, Py_None); } -static PyObject * -simplequeue_pop_item(simplequeueobject *self) -{ - Py_ssize_t count, n; - PyObject *item; - - n = PyList_GET_SIZE(self->lst); - assert(self->lst_pos < n); - - item = PyList_GET_ITEM(self->lst, self->lst_pos); - Py_INCREF(Py_None); - PyList_SET_ITEM(self->lst, self->lst_pos, Py_None); - self->lst_pos += 1; - count = n - self->lst_pos; - if (self->lst_pos > count) { - /* The list is more than 50% empty, reclaim space at the beginning */ - if (PyList_SetSlice(self->lst, 0, self->lst_pos, NULL)) { - /* Undo pop */ - self->lst_pos -= 1; - PyList_SET_ITEM(self->lst, self->lst_pos, item); - return NULL; - } - self->lst_pos = 0; - } - return item; -} - /*[clinic input] _queue.SimpleQueue.get @@ -249,7 +379,7 @@ _queue_SimpleQueue_get_impl(simplequeueobject *self, PyTypeObject *cls, * So we simply try to acquire the lock in a loop, until the condition * (queue non-empty) becomes true. */ - while (self->lst_pos == PyList_GET_SIZE(self->lst)) { + while (RingBuf_IsEmpty(&self->buf)) { /* First a simple non-blocking try without releasing the GIL */ r = PyThread_acquire_lock_timed(self->lock, 0, 0); if (r == PY_LOCK_FAILURE && microseconds != 0) { @@ -279,8 +409,7 @@ _queue_SimpleQueue_get_impl(simplequeueobject *self, PyTypeObject *cls, } /* BEGIN GIL-protected critical section */ - assert(self->lst_pos < PyList_GET_SIZE(self->lst)); - item = simplequeue_pop_item(self); + item = RingBuf_Get(&self->buf); if (self->locked) { PyThread_release_lock(self->lock); self->locked = 0; @@ -320,7 +449,7 @@ static int _queue_SimpleQueue_empty_impl(simplequeueobject *self) /*[clinic end generated code: output=1a02a1b87c0ef838 input=1a98431c45fd66f9]*/ { - return self->lst_pos == PyList_GET_SIZE(self->lst); + return RingBuf_IsEmpty(&self->buf); } /*[clinic input] @@ -333,7 +462,7 @@ static Py_ssize_t _queue_SimpleQueue_qsize_impl(simplequeueobject *self) /*[clinic end generated code: output=f9dcd9d0a90e121e input=7a74852b407868a1]*/ { - return PyList_GET_SIZE(self->lst) - self->lst_pos; + return RingBuf_Len(&self->buf); } static int diff --git a/Modules/_ssl.c b/Modules/_ssl.c index 04c9f7daadf573..bc302909424227 100644 --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -125,10 +125,10 @@ static void _PySSLFixErrno(void) { #include "_ssl_data_31.h" #elif (OPENSSL_VERSION_NUMBER >= 0x30000000L) #include "_ssl_data_300.h" -#elif (OPENSSL_VERSION_NUMBER >= 0x10101000L) && !defined(LIBRESSL_VERSION_NUMBER) +#elif (OPENSSL_VERSION_NUMBER >= 0x10101000L) #include "_ssl_data_111.h" #else -#include "_ssl_data.h" +#error Unsupported OpenSSL version #endif /* OpenSSL API 1.1.0+ does not include version methods */ @@ -867,7 +867,7 @@ newPySSLSocket(PySSLContext *sslctx, PySocketSockObject *sock, } /* bpo43522 and OpenSSL < 1.1.1l: copy hostflags manually */ -#if !defined(LIBRESSL_VERSION_NUMBER) && OPENSSL_VERSION < 0x101010cf +#if OPENSSL_VERSION < 0x101010cf X509_VERIFY_PARAM *ssl_params = SSL_get0_param(self->ssl); X509_VERIFY_PARAM_set_hostflags(ssl_params, sslctx->hostflags); #endif diff --git a/Modules/_ssl_data.h b/Modules/_ssl_data.h deleted file mode 100644 index 8f2994f52dfef8..00000000000000 --- a/Modules/_ssl_data.h +++ /dev/null @@ -1,6323 +0,0 @@ -/* File generated by Tools/ssl/make_ssl_data.py */ -/* Generated on 2020-04-13T21:45:54.559159 */ - -static struct py_ssl_library_code library_codes[] = { -#ifdef ERR_LIB_ASN1 - {"ASN1", ERR_LIB_ASN1}, -#endif -#ifdef ERR_LIB_ASYNC - {"ASYNC", ERR_LIB_ASYNC}, -#endif -#ifdef ERR_LIB_BIO - {"BIO", ERR_LIB_BIO}, -#endif -#ifdef ERR_LIB_BN - {"BN", ERR_LIB_BN}, -#endif -#ifdef ERR_LIB_CMS - {"CMS", ERR_LIB_CMS}, -#endif -#ifdef ERR_LIB_COMP - {"COMP", ERR_LIB_COMP}, -#endif -#ifdef ERR_LIB_CONF - {"CONF", ERR_LIB_CONF}, -#endif -#ifdef ERR_LIB_CRYPTO - {"CRYPTO", ERR_LIB_CRYPTO}, -#endif -#ifdef ERR_LIB_CT - {"CT", ERR_LIB_CT}, -#endif -#ifdef ERR_LIB_DH - {"DH", ERR_LIB_DH}, -#endif -#ifdef ERR_LIB_DSA - {"DSA", ERR_LIB_DSA}, -#endif -#ifdef ERR_LIB_EC - {"EC", ERR_LIB_EC}, -#endif -#ifdef ERR_LIB_ENGINE - {"ENGINE", ERR_LIB_ENGINE}, -#endif -#ifdef ERR_LIB_EVP - {"EVP", ERR_LIB_EVP}, -#endif -#ifdef ERR_LIB_KDF - {"KDF", ERR_LIB_KDF}, -#endif -#ifdef ERR_LIB_OCSP - {"OCSP", ERR_LIB_OCSP}, -#endif -#ifdef ERR_LIB_PEM - {"PEM", ERR_LIB_PEM}, -#endif -#ifdef ERR_LIB_PKCS12 - {"PKCS12", ERR_LIB_PKCS12}, -#endif -#ifdef ERR_LIB_PKCS7 - {"PKCS7", ERR_LIB_PKCS7}, -#endif -#ifdef ERR_LIB_RAND - {"RAND", ERR_LIB_RAND}, -#endif -#ifdef ERR_LIB_RSA - {"RSA", ERR_LIB_RSA}, -#endif -#ifdef ERR_LIB_SSL - {"SSL", ERR_LIB_SSL}, -#endif -#ifdef ERR_LIB_TS - {"TS", ERR_LIB_TS}, -#endif -#ifdef ERR_LIB_UI - {"UI", ERR_LIB_UI}, -#endif -#ifdef ERR_LIB_X509 - {"X509", ERR_LIB_X509}, -#endif -#ifdef ERR_LIB_X509V3 - {"X509V3", ERR_LIB_X509V3}, -#endif - { NULL } -}; - -static struct py_ssl_error_code error_codes[] = { - #ifdef ASN1_R_ADDING_OBJECT - {"ADDING_OBJECT", ERR_LIB_ASN1, ASN1_R_ADDING_OBJECT}, - #else - {"ADDING_OBJECT", 13, 171}, - #endif - #ifdef ASN1_R_ASN1_PARSE_ERROR - {"ASN1_PARSE_ERROR", ERR_LIB_ASN1, ASN1_R_ASN1_PARSE_ERROR}, - #else - {"ASN1_PARSE_ERROR", 13, 203}, - #endif - #ifdef ASN1_R_ASN1_SIG_PARSE_ERROR - {"ASN1_SIG_PARSE_ERROR", ERR_LIB_ASN1, ASN1_R_ASN1_SIG_PARSE_ERROR}, - #else - {"ASN1_SIG_PARSE_ERROR", 13, 204}, - #endif - #ifdef ASN1_R_AUX_ERROR - {"AUX_ERROR", ERR_LIB_ASN1, ASN1_R_AUX_ERROR}, - #else - {"AUX_ERROR", 13, 100}, - #endif - #ifdef ASN1_R_BAD_OBJECT_HEADER - {"BAD_OBJECT_HEADER", ERR_LIB_ASN1, ASN1_R_BAD_OBJECT_HEADER}, - #else - {"BAD_OBJECT_HEADER", 13, 102}, - #endif - #ifdef ASN1_R_BMPSTRING_IS_WRONG_LENGTH - {"BMPSTRING_IS_WRONG_LENGTH", ERR_LIB_ASN1, ASN1_R_BMPSTRING_IS_WRONG_LENGTH}, - #else - {"BMPSTRING_IS_WRONG_LENGTH", 13, 214}, - #endif - #ifdef ASN1_R_BN_LIB - {"BN_LIB", ERR_LIB_ASN1, ASN1_R_BN_LIB}, - #else - {"BN_LIB", 13, 105}, - #endif - #ifdef ASN1_R_BOOLEAN_IS_WRONG_LENGTH - {"BOOLEAN_IS_WRONG_LENGTH", ERR_LIB_ASN1, ASN1_R_BOOLEAN_IS_WRONG_LENGTH}, - #else - {"BOOLEAN_IS_WRONG_LENGTH", 13, 106}, - #endif - #ifdef ASN1_R_BUFFER_TOO_SMALL - {"BUFFER_TOO_SMALL", ERR_LIB_ASN1, ASN1_R_BUFFER_TOO_SMALL}, - #else - {"BUFFER_TOO_SMALL", 13, 107}, - #endif - #ifdef ASN1_R_CIPHER_HAS_NO_OBJECT_IDENTIFIER - {"CIPHER_HAS_NO_OBJECT_IDENTIFIER", ERR_LIB_ASN1, ASN1_R_CIPHER_HAS_NO_OBJECT_IDENTIFIER}, - #else - {"CIPHER_HAS_NO_OBJECT_IDENTIFIER", 13, 108}, - #endif - #ifdef ASN1_R_CONTEXT_NOT_INITIALISED - {"CONTEXT_NOT_INITIALISED", ERR_LIB_ASN1, ASN1_R_CONTEXT_NOT_INITIALISED}, - #else - {"CONTEXT_NOT_INITIALISED", 13, 217}, - #endif - #ifdef ASN1_R_DATA_IS_WRONG - {"DATA_IS_WRONG", ERR_LIB_ASN1, ASN1_R_DATA_IS_WRONG}, - #else - {"DATA_IS_WRONG", 13, 109}, - #endif - #ifdef ASN1_R_DECODE_ERROR - {"DECODE_ERROR", ERR_LIB_ASN1, ASN1_R_DECODE_ERROR}, - #else - {"DECODE_ERROR", 13, 110}, - #endif - #ifdef ASN1_R_DEPTH_EXCEEDED - {"DEPTH_EXCEEDED", ERR_LIB_ASN1, ASN1_R_DEPTH_EXCEEDED}, - #else - {"DEPTH_EXCEEDED", 13, 174}, - #endif - #ifdef ASN1_R_DIGEST_AND_KEY_TYPE_NOT_SUPPORTED - {"DIGEST_AND_KEY_TYPE_NOT_SUPPORTED", ERR_LIB_ASN1, ASN1_R_DIGEST_AND_KEY_TYPE_NOT_SUPPORTED}, - #else - {"DIGEST_AND_KEY_TYPE_NOT_SUPPORTED", 13, 198}, - #endif - #ifdef ASN1_R_ENCODE_ERROR - {"ENCODE_ERROR", ERR_LIB_ASN1, ASN1_R_ENCODE_ERROR}, - #else - {"ENCODE_ERROR", 13, 112}, - #endif - #ifdef ASN1_R_ERROR_GETTING_TIME - {"ERROR_GETTING_TIME", ERR_LIB_ASN1, ASN1_R_ERROR_GETTING_TIME}, - #else - {"ERROR_GETTING_TIME", 13, 173}, - #endif - #ifdef ASN1_R_ERROR_LOADING_SECTION - {"ERROR_LOADING_SECTION", ERR_LIB_ASN1, ASN1_R_ERROR_LOADING_SECTION}, - #else - {"ERROR_LOADING_SECTION", 13, 172}, - #endif - #ifdef ASN1_R_ERROR_SETTING_CIPHER_PARAMS - {"ERROR_SETTING_CIPHER_PARAMS", ERR_LIB_ASN1, ASN1_R_ERROR_SETTING_CIPHER_PARAMS}, - #else - {"ERROR_SETTING_CIPHER_PARAMS", 13, 114}, - #endif - #ifdef ASN1_R_EXPECTING_AN_INTEGER - {"EXPECTING_AN_INTEGER", ERR_LIB_ASN1, ASN1_R_EXPECTING_AN_INTEGER}, - #else - {"EXPECTING_AN_INTEGER", 13, 115}, - #endif - #ifdef ASN1_R_EXPECTING_AN_OBJECT - {"EXPECTING_AN_OBJECT", ERR_LIB_ASN1, ASN1_R_EXPECTING_AN_OBJECT}, - #else - {"EXPECTING_AN_OBJECT", 13, 116}, - #endif - #ifdef ASN1_R_EXPLICIT_LENGTH_MISMATCH - {"EXPLICIT_LENGTH_MISMATCH", ERR_LIB_ASN1, ASN1_R_EXPLICIT_LENGTH_MISMATCH}, - #else - {"EXPLICIT_LENGTH_MISMATCH", 13, 119}, - #endif - #ifdef ASN1_R_EXPLICIT_TAG_NOT_CONSTRUCTED - {"EXPLICIT_TAG_NOT_CONSTRUCTED", ERR_LIB_ASN1, ASN1_R_EXPLICIT_TAG_NOT_CONSTRUCTED}, - #else - {"EXPLICIT_TAG_NOT_CONSTRUCTED", 13, 120}, - #endif - #ifdef ASN1_R_FIELD_MISSING - {"FIELD_MISSING", ERR_LIB_ASN1, ASN1_R_FIELD_MISSING}, - #else - {"FIELD_MISSING", 13, 121}, - #endif - #ifdef ASN1_R_FIRST_NUM_TOO_LARGE - {"FIRST_NUM_TOO_LARGE", ERR_LIB_ASN1, ASN1_R_FIRST_NUM_TOO_LARGE}, - #else - {"FIRST_NUM_TOO_LARGE", 13, 122}, - #endif - #ifdef ASN1_R_HEADER_TOO_LONG - {"HEADER_TOO_LONG", ERR_LIB_ASN1, ASN1_R_HEADER_TOO_LONG}, - #else - {"HEADER_TOO_LONG", 13, 123}, - #endif - #ifdef ASN1_R_ILLEGAL_BITSTRING_FORMAT - {"ILLEGAL_BITSTRING_FORMAT", ERR_LIB_ASN1, ASN1_R_ILLEGAL_BITSTRING_FORMAT}, - #else - {"ILLEGAL_BITSTRING_FORMAT", 13, 175}, - #endif - #ifdef ASN1_R_ILLEGAL_BOOLEAN - {"ILLEGAL_BOOLEAN", ERR_LIB_ASN1, ASN1_R_ILLEGAL_BOOLEAN}, - #else - {"ILLEGAL_BOOLEAN", 13, 176}, - #endif - #ifdef ASN1_R_ILLEGAL_CHARACTERS - {"ILLEGAL_CHARACTERS", ERR_LIB_ASN1, ASN1_R_ILLEGAL_CHARACTERS}, - #else - {"ILLEGAL_CHARACTERS", 13, 124}, - #endif - #ifdef ASN1_R_ILLEGAL_FORMAT - {"ILLEGAL_FORMAT", ERR_LIB_ASN1, ASN1_R_ILLEGAL_FORMAT}, - #else - {"ILLEGAL_FORMAT", 13, 177}, - #endif - #ifdef ASN1_R_ILLEGAL_HEX - {"ILLEGAL_HEX", ERR_LIB_ASN1, ASN1_R_ILLEGAL_HEX}, - #else - {"ILLEGAL_HEX", 13, 178}, - #endif - #ifdef ASN1_R_ILLEGAL_IMPLICIT_TAG - {"ILLEGAL_IMPLICIT_TAG", ERR_LIB_ASN1, ASN1_R_ILLEGAL_IMPLICIT_TAG}, - #else - {"ILLEGAL_IMPLICIT_TAG", 13, 179}, - #endif - #ifdef ASN1_R_ILLEGAL_INTEGER - {"ILLEGAL_INTEGER", ERR_LIB_ASN1, ASN1_R_ILLEGAL_INTEGER}, - #else - {"ILLEGAL_INTEGER", 13, 180}, - #endif - #ifdef ASN1_R_ILLEGAL_NEGATIVE_VALUE - {"ILLEGAL_NEGATIVE_VALUE", ERR_LIB_ASN1, ASN1_R_ILLEGAL_NEGATIVE_VALUE}, - #else - {"ILLEGAL_NEGATIVE_VALUE", 13, 226}, - #endif - #ifdef ASN1_R_ILLEGAL_NESTED_TAGGING - {"ILLEGAL_NESTED_TAGGING", ERR_LIB_ASN1, ASN1_R_ILLEGAL_NESTED_TAGGING}, - #else - {"ILLEGAL_NESTED_TAGGING", 13, 181}, - #endif - #ifdef ASN1_R_ILLEGAL_NULL - {"ILLEGAL_NULL", ERR_LIB_ASN1, ASN1_R_ILLEGAL_NULL}, - #else - {"ILLEGAL_NULL", 13, 125}, - #endif - #ifdef ASN1_R_ILLEGAL_NULL_VALUE - {"ILLEGAL_NULL_VALUE", ERR_LIB_ASN1, ASN1_R_ILLEGAL_NULL_VALUE}, - #else - {"ILLEGAL_NULL_VALUE", 13, 182}, - #endif - #ifdef ASN1_R_ILLEGAL_OBJECT - {"ILLEGAL_OBJECT", ERR_LIB_ASN1, ASN1_R_ILLEGAL_OBJECT}, - #else - {"ILLEGAL_OBJECT", 13, 183}, - #endif - #ifdef ASN1_R_ILLEGAL_OPTIONAL_ANY - {"ILLEGAL_OPTIONAL_ANY", ERR_LIB_ASN1, ASN1_R_ILLEGAL_OPTIONAL_ANY}, - #else - {"ILLEGAL_OPTIONAL_ANY", 13, 126}, - #endif - #ifdef ASN1_R_ILLEGAL_OPTIONS_ON_ITEM_TEMPLATE - {"ILLEGAL_OPTIONS_ON_ITEM_TEMPLATE", ERR_LIB_ASN1, ASN1_R_ILLEGAL_OPTIONS_ON_ITEM_TEMPLATE}, - #else - {"ILLEGAL_OPTIONS_ON_ITEM_TEMPLATE", 13, 170}, - #endif - #ifdef ASN1_R_ILLEGAL_PADDING - {"ILLEGAL_PADDING", ERR_LIB_ASN1, ASN1_R_ILLEGAL_PADDING}, - #else - {"ILLEGAL_PADDING", 13, 221}, - #endif - #ifdef ASN1_R_ILLEGAL_TAGGED_ANY - {"ILLEGAL_TAGGED_ANY", ERR_LIB_ASN1, ASN1_R_ILLEGAL_TAGGED_ANY}, - #else - {"ILLEGAL_TAGGED_ANY", 13, 127}, - #endif - #ifdef ASN1_R_ILLEGAL_TIME_VALUE - {"ILLEGAL_TIME_VALUE", ERR_LIB_ASN1, ASN1_R_ILLEGAL_TIME_VALUE}, - #else - {"ILLEGAL_TIME_VALUE", 13, 184}, - #endif - #ifdef ASN1_R_ILLEGAL_ZERO_CONTENT - {"ILLEGAL_ZERO_CONTENT", ERR_LIB_ASN1, ASN1_R_ILLEGAL_ZERO_CONTENT}, - #else - {"ILLEGAL_ZERO_CONTENT", 13, 222}, - #endif - #ifdef ASN1_R_INTEGER_NOT_ASCII_FORMAT - {"INTEGER_NOT_ASCII_FORMAT", ERR_LIB_ASN1, ASN1_R_INTEGER_NOT_ASCII_FORMAT}, - #else - {"INTEGER_NOT_ASCII_FORMAT", 13, 185}, - #endif - #ifdef ASN1_R_INTEGER_TOO_LARGE_FOR_LONG - {"INTEGER_TOO_LARGE_FOR_LONG", ERR_LIB_ASN1, ASN1_R_INTEGER_TOO_LARGE_FOR_LONG}, - #else - {"INTEGER_TOO_LARGE_FOR_LONG", 13, 128}, - #endif - #ifdef ASN1_R_INVALID_BIT_STRING_BITS_LEFT - {"INVALID_BIT_STRING_BITS_LEFT", ERR_LIB_ASN1, ASN1_R_INVALID_BIT_STRING_BITS_LEFT}, - #else - {"INVALID_BIT_STRING_BITS_LEFT", 13, 220}, - #endif - #ifdef ASN1_R_INVALID_BMPSTRING_LENGTH - {"INVALID_BMPSTRING_LENGTH", ERR_LIB_ASN1, ASN1_R_INVALID_BMPSTRING_LENGTH}, - #else - {"INVALID_BMPSTRING_LENGTH", 13, 129}, - #endif - #ifdef ASN1_R_INVALID_DIGIT - {"INVALID_DIGIT", ERR_LIB_ASN1, ASN1_R_INVALID_DIGIT}, - #else - {"INVALID_DIGIT", 13, 130}, - #endif - #ifdef ASN1_R_INVALID_MIME_TYPE - {"INVALID_MIME_TYPE", ERR_LIB_ASN1, ASN1_R_INVALID_MIME_TYPE}, - #else - {"INVALID_MIME_TYPE", 13, 205}, - #endif - #ifdef ASN1_R_INVALID_MODIFIER - {"INVALID_MODIFIER", ERR_LIB_ASN1, ASN1_R_INVALID_MODIFIER}, - #else - {"INVALID_MODIFIER", 13, 186}, - #endif - #ifdef ASN1_R_INVALID_NUMBER - {"INVALID_NUMBER", ERR_LIB_ASN1, ASN1_R_INVALID_NUMBER}, - #else - {"INVALID_NUMBER", 13, 187}, - #endif - #ifdef ASN1_R_INVALID_OBJECT_ENCODING - {"INVALID_OBJECT_ENCODING", ERR_LIB_ASN1, ASN1_R_INVALID_OBJECT_ENCODING}, - #else - {"INVALID_OBJECT_ENCODING", 13, 216}, - #endif - #ifdef ASN1_R_INVALID_SCRYPT_PARAMETERS - {"INVALID_SCRYPT_PARAMETERS", ERR_LIB_ASN1, ASN1_R_INVALID_SCRYPT_PARAMETERS}, - #else - {"INVALID_SCRYPT_PARAMETERS", 13, 227}, - #endif - #ifdef ASN1_R_INVALID_SEPARATOR - {"INVALID_SEPARATOR", ERR_LIB_ASN1, ASN1_R_INVALID_SEPARATOR}, - #else - {"INVALID_SEPARATOR", 13, 131}, - #endif - #ifdef ASN1_R_INVALID_STRING_TABLE_VALUE - {"INVALID_STRING_TABLE_VALUE", ERR_LIB_ASN1, ASN1_R_INVALID_STRING_TABLE_VALUE}, - #else - {"INVALID_STRING_TABLE_VALUE", 13, 218}, - #endif - #ifdef ASN1_R_INVALID_UNIVERSALSTRING_LENGTH - {"INVALID_UNIVERSALSTRING_LENGTH", ERR_LIB_ASN1, ASN1_R_INVALID_UNIVERSALSTRING_LENGTH}, - #else - {"INVALID_UNIVERSALSTRING_LENGTH", 13, 133}, - #endif - #ifdef ASN1_R_INVALID_UTF8STRING - {"INVALID_UTF8STRING", ERR_LIB_ASN1, ASN1_R_INVALID_UTF8STRING}, - #else - {"INVALID_UTF8STRING", 13, 134}, - #endif - #ifdef ASN1_R_INVALID_VALUE - {"INVALID_VALUE", ERR_LIB_ASN1, ASN1_R_INVALID_VALUE}, - #else - {"INVALID_VALUE", 13, 219}, - #endif - #ifdef ASN1_R_LIST_ERROR - {"LIST_ERROR", ERR_LIB_ASN1, ASN1_R_LIST_ERROR}, - #else - {"LIST_ERROR", 13, 188}, - #endif - #ifdef ASN1_R_MIME_NO_CONTENT_TYPE - {"MIME_NO_CONTENT_TYPE", ERR_LIB_ASN1, ASN1_R_MIME_NO_CONTENT_TYPE}, - #else - {"MIME_NO_CONTENT_TYPE", 13, 206}, - #endif - #ifdef ASN1_R_MIME_PARSE_ERROR - {"MIME_PARSE_ERROR", ERR_LIB_ASN1, ASN1_R_MIME_PARSE_ERROR}, - #else - {"MIME_PARSE_ERROR", 13, 207}, - #endif - #ifdef ASN1_R_MIME_SIG_PARSE_ERROR - {"MIME_SIG_PARSE_ERROR", ERR_LIB_ASN1, ASN1_R_MIME_SIG_PARSE_ERROR}, - #else - {"MIME_SIG_PARSE_ERROR", 13, 208}, - #endif - #ifdef ASN1_R_MISSING_EOC - {"MISSING_EOC", ERR_LIB_ASN1, ASN1_R_MISSING_EOC}, - #else - {"MISSING_EOC", 13, 137}, - #endif - #ifdef ASN1_R_MISSING_SECOND_NUMBER - {"MISSING_SECOND_NUMBER", ERR_LIB_ASN1, ASN1_R_MISSING_SECOND_NUMBER}, - #else - {"MISSING_SECOND_NUMBER", 13, 138}, - #endif - #ifdef ASN1_R_MISSING_VALUE - {"MISSING_VALUE", ERR_LIB_ASN1, ASN1_R_MISSING_VALUE}, - #else - {"MISSING_VALUE", 13, 189}, - #endif - #ifdef ASN1_R_MSTRING_NOT_UNIVERSAL - {"MSTRING_NOT_UNIVERSAL", ERR_LIB_ASN1, ASN1_R_MSTRING_NOT_UNIVERSAL}, - #else - {"MSTRING_NOT_UNIVERSAL", 13, 139}, - #endif - #ifdef ASN1_R_MSTRING_WRONG_TAG - {"MSTRING_WRONG_TAG", ERR_LIB_ASN1, ASN1_R_MSTRING_WRONG_TAG}, - #else - {"MSTRING_WRONG_TAG", 13, 140}, - #endif - #ifdef ASN1_R_NESTED_ASN1_STRING - {"NESTED_ASN1_STRING", ERR_LIB_ASN1, ASN1_R_NESTED_ASN1_STRING}, - #else - {"NESTED_ASN1_STRING", 13, 197}, - #endif - #ifdef ASN1_R_NESTED_TOO_DEEP - {"NESTED_TOO_DEEP", ERR_LIB_ASN1, ASN1_R_NESTED_TOO_DEEP}, - #else - {"NESTED_TOO_DEEP", 13, 201}, - #endif - #ifdef ASN1_R_NON_HEX_CHARACTERS - {"NON_HEX_CHARACTERS", ERR_LIB_ASN1, ASN1_R_NON_HEX_CHARACTERS}, - #else - {"NON_HEX_CHARACTERS", 13, 141}, - #endif - #ifdef ASN1_R_NOT_ASCII_FORMAT - {"NOT_ASCII_FORMAT", ERR_LIB_ASN1, ASN1_R_NOT_ASCII_FORMAT}, - #else - {"NOT_ASCII_FORMAT", 13, 190}, - #endif - #ifdef ASN1_R_NOT_ENOUGH_DATA - {"NOT_ENOUGH_DATA", ERR_LIB_ASN1, ASN1_R_NOT_ENOUGH_DATA}, - #else - {"NOT_ENOUGH_DATA", 13, 142}, - #endif - #ifdef ASN1_R_NO_CONTENT_TYPE - {"NO_CONTENT_TYPE", ERR_LIB_ASN1, ASN1_R_NO_CONTENT_TYPE}, - #else - {"NO_CONTENT_TYPE", 13, 209}, - #endif - #ifdef ASN1_R_NO_MATCHING_CHOICE_TYPE - {"NO_MATCHING_CHOICE_TYPE", ERR_LIB_ASN1, ASN1_R_NO_MATCHING_CHOICE_TYPE}, - #else - {"NO_MATCHING_CHOICE_TYPE", 13, 143}, - #endif - #ifdef ASN1_R_NO_MULTIPART_BODY_FAILURE - {"NO_MULTIPART_BODY_FAILURE", ERR_LIB_ASN1, ASN1_R_NO_MULTIPART_BODY_FAILURE}, - #else - {"NO_MULTIPART_BODY_FAILURE", 13, 210}, - #endif - #ifdef ASN1_R_NO_MULTIPART_BOUNDARY - {"NO_MULTIPART_BOUNDARY", ERR_LIB_ASN1, ASN1_R_NO_MULTIPART_BOUNDARY}, - #else - {"NO_MULTIPART_BOUNDARY", 13, 211}, - #endif - #ifdef ASN1_R_NO_SIG_CONTENT_TYPE - {"NO_SIG_CONTENT_TYPE", ERR_LIB_ASN1, ASN1_R_NO_SIG_CONTENT_TYPE}, - #else - {"NO_SIG_CONTENT_TYPE", 13, 212}, - #endif - #ifdef ASN1_R_NULL_IS_WRONG_LENGTH - {"NULL_IS_WRONG_LENGTH", ERR_LIB_ASN1, ASN1_R_NULL_IS_WRONG_LENGTH}, - #else - {"NULL_IS_WRONG_LENGTH", 13, 144}, - #endif - #ifdef ASN1_R_OBJECT_NOT_ASCII_FORMAT - {"OBJECT_NOT_ASCII_FORMAT", ERR_LIB_ASN1, ASN1_R_OBJECT_NOT_ASCII_FORMAT}, - #else - {"OBJECT_NOT_ASCII_FORMAT", 13, 191}, - #endif - #ifdef ASN1_R_ODD_NUMBER_OF_CHARS - {"ODD_NUMBER_OF_CHARS", ERR_LIB_ASN1, ASN1_R_ODD_NUMBER_OF_CHARS}, - #else - {"ODD_NUMBER_OF_CHARS", 13, 145}, - #endif - #ifdef ASN1_R_SECOND_NUMBER_TOO_LARGE - {"SECOND_NUMBER_TOO_LARGE", ERR_LIB_ASN1, ASN1_R_SECOND_NUMBER_TOO_LARGE}, - #else - {"SECOND_NUMBER_TOO_LARGE", 13, 147}, - #endif - #ifdef ASN1_R_SEQUENCE_LENGTH_MISMATCH - {"SEQUENCE_LENGTH_MISMATCH", ERR_LIB_ASN1, ASN1_R_SEQUENCE_LENGTH_MISMATCH}, - #else - {"SEQUENCE_LENGTH_MISMATCH", 13, 148}, - #endif - #ifdef ASN1_R_SEQUENCE_NOT_CONSTRUCTED - {"SEQUENCE_NOT_CONSTRUCTED", ERR_LIB_ASN1, ASN1_R_SEQUENCE_NOT_CONSTRUCTED}, - #else - {"SEQUENCE_NOT_CONSTRUCTED", 13, 149}, - #endif - #ifdef ASN1_R_SEQUENCE_OR_SET_NEEDS_CONFIG - {"SEQUENCE_OR_SET_NEEDS_CONFIG", ERR_LIB_ASN1, ASN1_R_SEQUENCE_OR_SET_NEEDS_CONFIG}, - #else - {"SEQUENCE_OR_SET_NEEDS_CONFIG", 13, 192}, - #endif - #ifdef ASN1_R_SHORT_LINE - {"SHORT_LINE", ERR_LIB_ASN1, ASN1_R_SHORT_LINE}, - #else - {"SHORT_LINE", 13, 150}, - #endif - #ifdef ASN1_R_SIG_INVALID_MIME_TYPE - {"SIG_INVALID_MIME_TYPE", ERR_LIB_ASN1, ASN1_R_SIG_INVALID_MIME_TYPE}, - #else - {"SIG_INVALID_MIME_TYPE", 13, 213}, - #endif - #ifdef ASN1_R_STREAMING_NOT_SUPPORTED - {"STREAMING_NOT_SUPPORTED", ERR_LIB_ASN1, ASN1_R_STREAMING_NOT_SUPPORTED}, - #else - {"STREAMING_NOT_SUPPORTED", 13, 202}, - #endif - #ifdef ASN1_R_STRING_TOO_LONG - {"STRING_TOO_LONG", ERR_LIB_ASN1, ASN1_R_STRING_TOO_LONG}, - #else - {"STRING_TOO_LONG", 13, 151}, - #endif - #ifdef ASN1_R_STRING_TOO_SHORT - {"STRING_TOO_SHORT", ERR_LIB_ASN1, ASN1_R_STRING_TOO_SHORT}, - #else - {"STRING_TOO_SHORT", 13, 152}, - #endif - #ifdef ASN1_R_THE_ASN1_OBJECT_IDENTIFIER_IS_NOT_KNOWN_FOR_THIS_MD - {"THE_ASN1_OBJECT_IDENTIFIER_IS_NOT_KNOWN_FOR_THIS_MD", ERR_LIB_ASN1, ASN1_R_THE_ASN1_OBJECT_IDENTIFIER_IS_NOT_KNOWN_FOR_THIS_MD}, - #else - {"THE_ASN1_OBJECT_IDENTIFIER_IS_NOT_KNOWN_FOR_THIS_MD", 13, 154}, - #endif - #ifdef ASN1_R_TIME_NOT_ASCII_FORMAT - {"TIME_NOT_ASCII_FORMAT", ERR_LIB_ASN1, ASN1_R_TIME_NOT_ASCII_FORMAT}, - #else - {"TIME_NOT_ASCII_FORMAT", 13, 193}, - #endif - #ifdef ASN1_R_TOO_LARGE - {"TOO_LARGE", ERR_LIB_ASN1, ASN1_R_TOO_LARGE}, - #else - {"TOO_LARGE", 13, 223}, - #endif - #ifdef ASN1_R_TOO_LONG - {"TOO_LONG", ERR_LIB_ASN1, ASN1_R_TOO_LONG}, - #else - {"TOO_LONG", 13, 155}, - #endif - #ifdef ASN1_R_TOO_SMALL - {"TOO_SMALL", ERR_LIB_ASN1, ASN1_R_TOO_SMALL}, - #else - {"TOO_SMALL", 13, 224}, - #endif - #ifdef ASN1_R_TYPE_NOT_CONSTRUCTED - {"TYPE_NOT_CONSTRUCTED", ERR_LIB_ASN1, ASN1_R_TYPE_NOT_CONSTRUCTED}, - #else - {"TYPE_NOT_CONSTRUCTED", 13, 156}, - #endif - #ifdef ASN1_R_TYPE_NOT_PRIMITIVE - {"TYPE_NOT_PRIMITIVE", ERR_LIB_ASN1, ASN1_R_TYPE_NOT_PRIMITIVE}, - #else - {"TYPE_NOT_PRIMITIVE", 13, 195}, - #endif - #ifdef ASN1_R_UNEXPECTED_EOC - {"UNEXPECTED_EOC", ERR_LIB_ASN1, ASN1_R_UNEXPECTED_EOC}, - #else - {"UNEXPECTED_EOC", 13, 159}, - #endif - #ifdef ASN1_R_UNIVERSALSTRING_IS_WRONG_LENGTH - {"UNIVERSALSTRING_IS_WRONG_LENGTH", ERR_LIB_ASN1, ASN1_R_UNIVERSALSTRING_IS_WRONG_LENGTH}, - #else - {"UNIVERSALSTRING_IS_WRONG_LENGTH", 13, 215}, - #endif - #ifdef ASN1_R_UNKNOWN_FORMAT - {"UNKNOWN_FORMAT", ERR_LIB_ASN1, ASN1_R_UNKNOWN_FORMAT}, - #else - {"UNKNOWN_FORMAT", 13, 160}, - #endif - #ifdef ASN1_R_UNKNOWN_MESSAGE_DIGEST_ALGORITHM - {"UNKNOWN_MESSAGE_DIGEST_ALGORITHM", ERR_LIB_ASN1, ASN1_R_UNKNOWN_MESSAGE_DIGEST_ALGORITHM}, - #else - {"UNKNOWN_MESSAGE_DIGEST_ALGORITHM", 13, 161}, - #endif - #ifdef ASN1_R_UNKNOWN_OBJECT_TYPE - {"UNKNOWN_OBJECT_TYPE", ERR_LIB_ASN1, ASN1_R_UNKNOWN_OBJECT_TYPE}, - #else - {"UNKNOWN_OBJECT_TYPE", 13, 162}, - #endif - #ifdef ASN1_R_UNKNOWN_PUBLIC_KEY_TYPE - {"UNKNOWN_PUBLIC_KEY_TYPE", ERR_LIB_ASN1, ASN1_R_UNKNOWN_PUBLIC_KEY_TYPE}, - #else - {"UNKNOWN_PUBLIC_KEY_TYPE", 13, 163}, - #endif - #ifdef ASN1_R_UNKNOWN_SIGNATURE_ALGORITHM - {"UNKNOWN_SIGNATURE_ALGORITHM", ERR_LIB_ASN1, ASN1_R_UNKNOWN_SIGNATURE_ALGORITHM}, - #else - {"UNKNOWN_SIGNATURE_ALGORITHM", 13, 199}, - #endif - #ifdef ASN1_R_UNKNOWN_TAG - {"UNKNOWN_TAG", ERR_LIB_ASN1, ASN1_R_UNKNOWN_TAG}, - #else - {"UNKNOWN_TAG", 13, 194}, - #endif - #ifdef ASN1_R_UNSUPPORTED_ANY_DEFINED_BY_TYPE - {"UNSUPPORTED_ANY_DEFINED_BY_TYPE", ERR_LIB_ASN1, ASN1_R_UNSUPPORTED_ANY_DEFINED_BY_TYPE}, - #else - {"UNSUPPORTED_ANY_DEFINED_BY_TYPE", 13, 164}, - #endif - #ifdef ASN1_R_UNSUPPORTED_CIPHER - {"UNSUPPORTED_CIPHER", ERR_LIB_ASN1, ASN1_R_UNSUPPORTED_CIPHER}, - #else - {"UNSUPPORTED_CIPHER", 13, 228}, - #endif - #ifdef ASN1_R_UNSUPPORTED_PUBLIC_KEY_TYPE - {"UNSUPPORTED_PUBLIC_KEY_TYPE", ERR_LIB_ASN1, ASN1_R_UNSUPPORTED_PUBLIC_KEY_TYPE}, - #else - {"UNSUPPORTED_PUBLIC_KEY_TYPE", 13, 167}, - #endif - #ifdef ASN1_R_UNSUPPORTED_TYPE - {"UNSUPPORTED_TYPE", ERR_LIB_ASN1, ASN1_R_UNSUPPORTED_TYPE}, - #else - {"UNSUPPORTED_TYPE", 13, 196}, - #endif - #ifdef ASN1_R_WRONG_INTEGER_TYPE - {"WRONG_INTEGER_TYPE", ERR_LIB_ASN1, ASN1_R_WRONG_INTEGER_TYPE}, - #else - {"WRONG_INTEGER_TYPE", 13, 225}, - #endif - #ifdef ASN1_R_WRONG_PUBLIC_KEY_TYPE - {"WRONG_PUBLIC_KEY_TYPE", ERR_LIB_ASN1, ASN1_R_WRONG_PUBLIC_KEY_TYPE}, - #else - {"WRONG_PUBLIC_KEY_TYPE", 13, 200}, - #endif - #ifdef ASN1_R_WRONG_TAG - {"WRONG_TAG", ERR_LIB_ASN1, ASN1_R_WRONG_TAG}, - #else - {"WRONG_TAG", 13, 168}, - #endif - #ifdef ASYNC_R_FAILED_TO_SET_POOL - {"FAILED_TO_SET_POOL", ERR_LIB_ASYNC, ASYNC_R_FAILED_TO_SET_POOL}, - #else - {"FAILED_TO_SET_POOL", 51, 101}, - #endif - #ifdef ASYNC_R_FAILED_TO_SWAP_CONTEXT - {"FAILED_TO_SWAP_CONTEXT", ERR_LIB_ASYNC, ASYNC_R_FAILED_TO_SWAP_CONTEXT}, - #else - {"FAILED_TO_SWAP_CONTEXT", 51, 102}, - #endif - #ifdef ASYNC_R_INIT_FAILED - {"INIT_FAILED", ERR_LIB_ASYNC, ASYNC_R_INIT_FAILED}, - #else - {"INIT_FAILED", 51, 105}, - #endif - #ifdef ASYNC_R_INVALID_POOL_SIZE - {"INVALID_POOL_SIZE", ERR_LIB_ASYNC, ASYNC_R_INVALID_POOL_SIZE}, - #else - {"INVALID_POOL_SIZE", 51, 103}, - #endif - #ifdef BIO_R_ACCEPT_ERROR - {"ACCEPT_ERROR", ERR_LIB_BIO, BIO_R_ACCEPT_ERROR}, - #else - {"ACCEPT_ERROR", 32, 100}, - #endif - #ifdef BIO_R_ADDRINFO_ADDR_IS_NOT_AF_INET - {"ADDRINFO_ADDR_IS_NOT_AF_INET", ERR_LIB_BIO, BIO_R_ADDRINFO_ADDR_IS_NOT_AF_INET}, - #else - {"ADDRINFO_ADDR_IS_NOT_AF_INET", 32, 141}, - #endif - #ifdef BIO_R_AMBIGUOUS_HOST_OR_SERVICE - {"AMBIGUOUS_HOST_OR_SERVICE", ERR_LIB_BIO, BIO_R_AMBIGUOUS_HOST_OR_SERVICE}, - #else - {"AMBIGUOUS_HOST_OR_SERVICE", 32, 129}, - #endif - #ifdef BIO_R_BAD_FOPEN_MODE - {"BAD_FOPEN_MODE", ERR_LIB_BIO, BIO_R_BAD_FOPEN_MODE}, - #else - {"BAD_FOPEN_MODE", 32, 101}, - #endif - #ifdef BIO_R_BROKEN_PIPE - {"BROKEN_PIPE", ERR_LIB_BIO, BIO_R_BROKEN_PIPE}, - #else - {"BROKEN_PIPE", 32, 124}, - #endif - #ifdef BIO_R_CONNECT_ERROR - {"CONNECT_ERROR", ERR_LIB_BIO, BIO_R_CONNECT_ERROR}, - #else - {"CONNECT_ERROR", 32, 103}, - #endif - #ifdef BIO_R_GETHOSTBYNAME_ADDR_IS_NOT_AF_INET - {"GETHOSTBYNAME_ADDR_IS_NOT_AF_INET", ERR_LIB_BIO, BIO_R_GETHOSTBYNAME_ADDR_IS_NOT_AF_INET}, - #else - {"GETHOSTBYNAME_ADDR_IS_NOT_AF_INET", 32, 107}, - #endif - #ifdef BIO_R_GETSOCKNAME_ERROR - {"GETSOCKNAME_ERROR", ERR_LIB_BIO, BIO_R_GETSOCKNAME_ERROR}, - #else - {"GETSOCKNAME_ERROR", 32, 132}, - #endif - #ifdef BIO_R_GETSOCKNAME_TRUNCATED_ADDRESS - {"GETSOCKNAME_TRUNCATED_ADDRESS", ERR_LIB_BIO, BIO_R_GETSOCKNAME_TRUNCATED_ADDRESS}, - #else - {"GETSOCKNAME_TRUNCATED_ADDRESS", 32, 133}, - #endif - #ifdef BIO_R_GETTING_SOCKTYPE - {"GETTING_SOCKTYPE", ERR_LIB_BIO, BIO_R_GETTING_SOCKTYPE}, - #else - {"GETTING_SOCKTYPE", 32, 134}, - #endif - #ifdef BIO_R_INVALID_ARGUMENT - {"INVALID_ARGUMENT", ERR_LIB_BIO, BIO_R_INVALID_ARGUMENT}, - #else - {"INVALID_ARGUMENT", 32, 125}, - #endif - #ifdef BIO_R_INVALID_SOCKET - {"INVALID_SOCKET", ERR_LIB_BIO, BIO_R_INVALID_SOCKET}, - #else - {"INVALID_SOCKET", 32, 135}, - #endif - #ifdef BIO_R_IN_USE - {"IN_USE", ERR_LIB_BIO, BIO_R_IN_USE}, - #else - {"IN_USE", 32, 123}, - #endif - #ifdef BIO_R_LENGTH_TOO_LONG - {"LENGTH_TOO_LONG", ERR_LIB_BIO, BIO_R_LENGTH_TOO_LONG}, - #else - {"LENGTH_TOO_LONG", 32, 102}, - #endif - #ifdef BIO_R_LISTEN_V6_ONLY - {"LISTEN_V6_ONLY", ERR_LIB_BIO, BIO_R_LISTEN_V6_ONLY}, - #else - {"LISTEN_V6_ONLY", 32, 136}, - #endif - #ifdef BIO_R_LOOKUP_RETURNED_NOTHING - {"LOOKUP_RETURNED_NOTHING", ERR_LIB_BIO, BIO_R_LOOKUP_RETURNED_NOTHING}, - #else - {"LOOKUP_RETURNED_NOTHING", 32, 142}, - #endif - #ifdef BIO_R_MALFORMED_HOST_OR_SERVICE - {"MALFORMED_HOST_OR_SERVICE", ERR_LIB_BIO, BIO_R_MALFORMED_HOST_OR_SERVICE}, - #else - {"MALFORMED_HOST_OR_SERVICE", 32, 130}, - #endif - #ifdef BIO_R_NBIO_CONNECT_ERROR - {"NBIO_CONNECT_ERROR", ERR_LIB_BIO, BIO_R_NBIO_CONNECT_ERROR}, - #else - {"NBIO_CONNECT_ERROR", 32, 110}, - #endif - #ifdef BIO_R_NO_ACCEPT_ADDR_OR_SERVICE_SPECIFIED - {"NO_ACCEPT_ADDR_OR_SERVICE_SPECIFIED", ERR_LIB_BIO, BIO_R_NO_ACCEPT_ADDR_OR_SERVICE_SPECIFIED}, - #else - {"NO_ACCEPT_ADDR_OR_SERVICE_SPECIFIED", 32, 143}, - #endif - #ifdef BIO_R_NO_HOSTNAME_OR_SERVICE_SPECIFIED - {"NO_HOSTNAME_OR_SERVICE_SPECIFIED", ERR_LIB_BIO, BIO_R_NO_HOSTNAME_OR_SERVICE_SPECIFIED}, - #else - {"NO_HOSTNAME_OR_SERVICE_SPECIFIED", 32, 144}, - #endif - #ifdef BIO_R_NO_PORT_DEFINED - {"NO_PORT_DEFINED", ERR_LIB_BIO, BIO_R_NO_PORT_DEFINED}, - #else - {"NO_PORT_DEFINED", 32, 113}, - #endif - #ifdef BIO_R_NO_SUCH_FILE - {"NO_SUCH_FILE", ERR_LIB_BIO, BIO_R_NO_SUCH_FILE}, - #else - {"NO_SUCH_FILE", 32, 128}, - #endif - #ifdef BIO_R_NULL_PARAMETER - {"NULL_PARAMETER", ERR_LIB_BIO, BIO_R_NULL_PARAMETER}, - #else - {"NULL_PARAMETER", 32, 115}, - #endif - #ifdef BIO_R_UNABLE_TO_BIND_SOCKET - {"UNABLE_TO_BIND_SOCKET", ERR_LIB_BIO, BIO_R_UNABLE_TO_BIND_SOCKET}, - #else - {"UNABLE_TO_BIND_SOCKET", 32, 117}, - #endif - #ifdef BIO_R_UNABLE_TO_CREATE_SOCKET - {"UNABLE_TO_CREATE_SOCKET", ERR_LIB_BIO, BIO_R_UNABLE_TO_CREATE_SOCKET}, - #else - {"UNABLE_TO_CREATE_SOCKET", 32, 118}, - #endif - #ifdef BIO_R_UNABLE_TO_KEEPALIVE - {"UNABLE_TO_KEEPALIVE", ERR_LIB_BIO, BIO_R_UNABLE_TO_KEEPALIVE}, - #else - {"UNABLE_TO_KEEPALIVE", 32, 137}, - #endif - #ifdef BIO_R_UNABLE_TO_LISTEN_SOCKET - {"UNABLE_TO_LISTEN_SOCKET", ERR_LIB_BIO, BIO_R_UNABLE_TO_LISTEN_SOCKET}, - #else - {"UNABLE_TO_LISTEN_SOCKET", 32, 119}, - #endif - #ifdef BIO_R_UNABLE_TO_NODELAY - {"UNABLE_TO_NODELAY", ERR_LIB_BIO, BIO_R_UNABLE_TO_NODELAY}, - #else - {"UNABLE_TO_NODELAY", 32, 138}, - #endif - #ifdef BIO_R_UNABLE_TO_REUSEADDR - {"UNABLE_TO_REUSEADDR", ERR_LIB_BIO, BIO_R_UNABLE_TO_REUSEADDR}, - #else - {"UNABLE_TO_REUSEADDR", 32, 139}, - #endif - #ifdef BIO_R_UNAVAILABLE_IP_FAMILY - {"UNAVAILABLE_IP_FAMILY", ERR_LIB_BIO, BIO_R_UNAVAILABLE_IP_FAMILY}, - #else - {"UNAVAILABLE_IP_FAMILY", 32, 145}, - #endif - #ifdef BIO_R_UNINITIALIZED - {"UNINITIALIZED", ERR_LIB_BIO, BIO_R_UNINITIALIZED}, - #else - {"UNINITIALIZED", 32, 120}, - #endif - #ifdef BIO_R_UNKNOWN_INFO_TYPE - {"UNKNOWN_INFO_TYPE", ERR_LIB_BIO, BIO_R_UNKNOWN_INFO_TYPE}, - #else - {"UNKNOWN_INFO_TYPE", 32, 140}, - #endif - #ifdef BIO_R_UNSUPPORTED_IP_FAMILY - {"UNSUPPORTED_IP_FAMILY", ERR_LIB_BIO, BIO_R_UNSUPPORTED_IP_FAMILY}, - #else - {"UNSUPPORTED_IP_FAMILY", 32, 146}, - #endif - #ifdef BIO_R_UNSUPPORTED_METHOD - {"UNSUPPORTED_METHOD", ERR_LIB_BIO, BIO_R_UNSUPPORTED_METHOD}, - #else - {"UNSUPPORTED_METHOD", 32, 121}, - #endif - #ifdef BIO_R_UNSUPPORTED_PROTOCOL_FAMILY - {"UNSUPPORTED_PROTOCOL_FAMILY", ERR_LIB_BIO, BIO_R_UNSUPPORTED_PROTOCOL_FAMILY}, - #else - {"UNSUPPORTED_PROTOCOL_FAMILY", 32, 131}, - #endif - #ifdef BIO_R_WRITE_TO_READ_ONLY_BIO - {"WRITE_TO_READ_ONLY_BIO", ERR_LIB_BIO, BIO_R_WRITE_TO_READ_ONLY_BIO}, - #else - {"WRITE_TO_READ_ONLY_BIO", 32, 126}, - #endif - #ifdef BIO_R_WSASTARTUP - {"WSASTARTUP", ERR_LIB_BIO, BIO_R_WSASTARTUP}, - #else - {"WSASTARTUP", 32, 122}, - #endif - #ifdef BN_R_ARG2_LT_ARG3 - {"ARG2_LT_ARG3", ERR_LIB_BN, BN_R_ARG2_LT_ARG3}, - #else - {"ARG2_LT_ARG3", 3, 100}, - #endif - #ifdef BN_R_BAD_RECIPROCAL - {"BAD_RECIPROCAL", ERR_LIB_BN, BN_R_BAD_RECIPROCAL}, - #else - {"BAD_RECIPROCAL", 3, 101}, - #endif - #ifdef BN_R_BIGNUM_TOO_LONG - {"BIGNUM_TOO_LONG", ERR_LIB_BN, BN_R_BIGNUM_TOO_LONG}, - #else - {"BIGNUM_TOO_LONG", 3, 114}, - #endif - #ifdef BN_R_BITS_TOO_SMALL - {"BITS_TOO_SMALL", ERR_LIB_BN, BN_R_BITS_TOO_SMALL}, - #else - {"BITS_TOO_SMALL", 3, 118}, - #endif - #ifdef BN_R_CALLED_WITH_EVEN_MODULUS - {"CALLED_WITH_EVEN_MODULUS", ERR_LIB_BN, BN_R_CALLED_WITH_EVEN_MODULUS}, - #else - {"CALLED_WITH_EVEN_MODULUS", 3, 102}, - #endif - #ifdef BN_R_DIV_BY_ZERO - {"DIV_BY_ZERO", ERR_LIB_BN, BN_R_DIV_BY_ZERO}, - #else - {"DIV_BY_ZERO", 3, 103}, - #endif - #ifdef BN_R_ENCODING_ERROR - {"ENCODING_ERROR", ERR_LIB_BN, BN_R_ENCODING_ERROR}, - #else - {"ENCODING_ERROR", 3, 104}, - #endif - #ifdef BN_R_EXPAND_ON_STATIC_BIGNUM_DATA - {"EXPAND_ON_STATIC_BIGNUM_DATA", ERR_LIB_BN, BN_R_EXPAND_ON_STATIC_BIGNUM_DATA}, - #else - {"EXPAND_ON_STATIC_BIGNUM_DATA", 3, 105}, - #endif - #ifdef BN_R_INPUT_NOT_REDUCED - {"INPUT_NOT_REDUCED", ERR_LIB_BN, BN_R_INPUT_NOT_REDUCED}, - #else - {"INPUT_NOT_REDUCED", 3, 110}, - #endif - #ifdef BN_R_INVALID_LENGTH - {"INVALID_LENGTH", ERR_LIB_BN, BN_R_INVALID_LENGTH}, - #else - {"INVALID_LENGTH", 3, 106}, - #endif - #ifdef BN_R_INVALID_RANGE - {"INVALID_RANGE", ERR_LIB_BN, BN_R_INVALID_RANGE}, - #else - {"INVALID_RANGE", 3, 115}, - #endif - #ifdef BN_R_INVALID_SHIFT - {"INVALID_SHIFT", ERR_LIB_BN, BN_R_INVALID_SHIFT}, - #else - {"INVALID_SHIFT", 3, 119}, - #endif - #ifdef BN_R_NOT_A_SQUARE - {"NOT_A_SQUARE", ERR_LIB_BN, BN_R_NOT_A_SQUARE}, - #else - {"NOT_A_SQUARE", 3, 111}, - #endif - #ifdef BN_R_NOT_INITIALIZED - {"NOT_INITIALIZED", ERR_LIB_BN, BN_R_NOT_INITIALIZED}, - #else - {"NOT_INITIALIZED", 3, 107}, - #endif - #ifdef BN_R_NO_INVERSE - {"NO_INVERSE", ERR_LIB_BN, BN_R_NO_INVERSE}, - #else - {"NO_INVERSE", 3, 108}, - #endif - #ifdef BN_R_NO_SOLUTION - {"NO_SOLUTION", ERR_LIB_BN, BN_R_NO_SOLUTION}, - #else - {"NO_SOLUTION", 3, 116}, - #endif - #ifdef BN_R_PRIVATE_KEY_TOO_LARGE - {"PRIVATE_KEY_TOO_LARGE", ERR_LIB_BN, BN_R_PRIVATE_KEY_TOO_LARGE}, - #else - {"PRIVATE_KEY_TOO_LARGE", 3, 117}, - #endif - #ifdef BN_R_P_IS_NOT_PRIME - {"P_IS_NOT_PRIME", ERR_LIB_BN, BN_R_P_IS_NOT_PRIME}, - #else - {"P_IS_NOT_PRIME", 3, 112}, - #endif - #ifdef BN_R_TOO_MANY_ITERATIONS - {"TOO_MANY_ITERATIONS", ERR_LIB_BN, BN_R_TOO_MANY_ITERATIONS}, - #else - {"TOO_MANY_ITERATIONS", 3, 113}, - #endif - #ifdef BN_R_TOO_MANY_TEMPORARY_VARIABLES - {"TOO_MANY_TEMPORARY_VARIABLES", ERR_LIB_BN, BN_R_TOO_MANY_TEMPORARY_VARIABLES}, - #else - {"TOO_MANY_TEMPORARY_VARIABLES", 3, 109}, - #endif - #ifdef CMS_R_ADD_SIGNER_ERROR - {"ADD_SIGNER_ERROR", ERR_LIB_CMS, CMS_R_ADD_SIGNER_ERROR}, - #else - {"ADD_SIGNER_ERROR", 46, 99}, - #endif - #ifdef CMS_R_ATTRIBUTE_ERROR - {"ATTRIBUTE_ERROR", ERR_LIB_CMS, CMS_R_ATTRIBUTE_ERROR}, - #else - {"ATTRIBUTE_ERROR", 46, 161}, - #endif - #ifdef CMS_R_CERTIFICATE_ALREADY_PRESENT - {"CERTIFICATE_ALREADY_PRESENT", ERR_LIB_CMS, CMS_R_CERTIFICATE_ALREADY_PRESENT}, - #else - {"CERTIFICATE_ALREADY_PRESENT", 46, 175}, - #endif - #ifdef CMS_R_CERTIFICATE_HAS_NO_KEYID - {"CERTIFICATE_HAS_NO_KEYID", ERR_LIB_CMS, CMS_R_CERTIFICATE_HAS_NO_KEYID}, - #else - {"CERTIFICATE_HAS_NO_KEYID", 46, 160}, - #endif - #ifdef CMS_R_CERTIFICATE_VERIFY_ERROR - {"CERTIFICATE_VERIFY_ERROR", ERR_LIB_CMS, CMS_R_CERTIFICATE_VERIFY_ERROR}, - #else - {"CERTIFICATE_VERIFY_ERROR", 46, 100}, - #endif - #ifdef CMS_R_CIPHER_INITIALISATION_ERROR - {"CIPHER_INITIALISATION_ERROR", ERR_LIB_CMS, CMS_R_CIPHER_INITIALISATION_ERROR}, - #else - {"CIPHER_INITIALISATION_ERROR", 46, 101}, - #endif - #ifdef CMS_R_CIPHER_PARAMETER_INITIALISATION_ERROR - {"CIPHER_PARAMETER_INITIALISATION_ERROR", ERR_LIB_CMS, CMS_R_CIPHER_PARAMETER_INITIALISATION_ERROR}, - #else - {"CIPHER_PARAMETER_INITIALISATION_ERROR", 46, 102}, - #endif - #ifdef CMS_R_CMS_DATAFINAL_ERROR - {"CMS_DATAFINAL_ERROR", ERR_LIB_CMS, CMS_R_CMS_DATAFINAL_ERROR}, - #else - {"CMS_DATAFINAL_ERROR", 46, 103}, - #endif - #ifdef CMS_R_CMS_LIB - {"CMS_LIB", ERR_LIB_CMS, CMS_R_CMS_LIB}, - #else - {"CMS_LIB", 46, 104}, - #endif - #ifdef CMS_R_CONTENTIDENTIFIER_MISMATCH - {"CONTENTIDENTIFIER_MISMATCH", ERR_LIB_CMS, CMS_R_CONTENTIDENTIFIER_MISMATCH}, - #else - {"CONTENTIDENTIFIER_MISMATCH", 46, 170}, - #endif - #ifdef CMS_R_CONTENT_NOT_FOUND - {"CONTENT_NOT_FOUND", ERR_LIB_CMS, CMS_R_CONTENT_NOT_FOUND}, - #else - {"CONTENT_NOT_FOUND", 46, 105}, - #endif - #ifdef CMS_R_CONTENT_TYPE_MISMATCH - {"CONTENT_TYPE_MISMATCH", ERR_LIB_CMS, CMS_R_CONTENT_TYPE_MISMATCH}, - #else - {"CONTENT_TYPE_MISMATCH", 46, 171}, - #endif - #ifdef CMS_R_CONTENT_TYPE_NOT_COMPRESSED_DATA - {"CONTENT_TYPE_NOT_COMPRESSED_DATA", ERR_LIB_CMS, CMS_R_CONTENT_TYPE_NOT_COMPRESSED_DATA}, - #else - {"CONTENT_TYPE_NOT_COMPRESSED_DATA", 46, 106}, - #endif - #ifdef CMS_R_CONTENT_TYPE_NOT_ENVELOPED_DATA - {"CONTENT_TYPE_NOT_ENVELOPED_DATA", ERR_LIB_CMS, CMS_R_CONTENT_TYPE_NOT_ENVELOPED_DATA}, - #else - {"CONTENT_TYPE_NOT_ENVELOPED_DATA", 46, 107}, - #endif - #ifdef CMS_R_CONTENT_TYPE_NOT_SIGNED_DATA - {"CONTENT_TYPE_NOT_SIGNED_DATA", ERR_LIB_CMS, CMS_R_CONTENT_TYPE_NOT_SIGNED_DATA}, - #else - {"CONTENT_TYPE_NOT_SIGNED_DATA", 46, 108}, - #endif - #ifdef CMS_R_CONTENT_VERIFY_ERROR - {"CONTENT_VERIFY_ERROR", ERR_LIB_CMS, CMS_R_CONTENT_VERIFY_ERROR}, - #else - {"CONTENT_VERIFY_ERROR", 46, 109}, - #endif - #ifdef CMS_R_CTRL_ERROR - {"CTRL_ERROR", ERR_LIB_CMS, CMS_R_CTRL_ERROR}, - #else - {"CTRL_ERROR", 46, 110}, - #endif - #ifdef CMS_R_CTRL_FAILURE - {"CTRL_FAILURE", ERR_LIB_CMS, CMS_R_CTRL_FAILURE}, - #else - {"CTRL_FAILURE", 46, 111}, - #endif - #ifdef CMS_R_DECRYPT_ERROR - {"DECRYPT_ERROR", ERR_LIB_CMS, CMS_R_DECRYPT_ERROR}, - #else - {"DECRYPT_ERROR", 46, 112}, - #endif - #ifdef CMS_R_ERROR_GETTING_PUBLIC_KEY - {"ERROR_GETTING_PUBLIC_KEY", ERR_LIB_CMS, CMS_R_ERROR_GETTING_PUBLIC_KEY}, - #else - {"ERROR_GETTING_PUBLIC_KEY", 46, 113}, - #endif - #ifdef CMS_R_ERROR_READING_MESSAGEDIGEST_ATTRIBUTE - {"ERROR_READING_MESSAGEDIGEST_ATTRIBUTE", ERR_LIB_CMS, CMS_R_ERROR_READING_MESSAGEDIGEST_ATTRIBUTE}, - #else - {"ERROR_READING_MESSAGEDIGEST_ATTRIBUTE", 46, 114}, - #endif - #ifdef CMS_R_ERROR_SETTING_KEY - {"ERROR_SETTING_KEY", ERR_LIB_CMS, CMS_R_ERROR_SETTING_KEY}, - #else - {"ERROR_SETTING_KEY", 46, 115}, - #endif - #ifdef CMS_R_ERROR_SETTING_RECIPIENTINFO - {"ERROR_SETTING_RECIPIENTINFO", ERR_LIB_CMS, CMS_R_ERROR_SETTING_RECIPIENTINFO}, - #else - {"ERROR_SETTING_RECIPIENTINFO", 46, 116}, - #endif - #ifdef CMS_R_INVALID_ENCRYPTED_KEY_LENGTH - {"INVALID_ENCRYPTED_KEY_LENGTH", ERR_LIB_CMS, CMS_R_INVALID_ENCRYPTED_KEY_LENGTH}, - #else - {"INVALID_ENCRYPTED_KEY_LENGTH", 46, 117}, - #endif - #ifdef CMS_R_INVALID_KEY_ENCRYPTION_PARAMETER - {"INVALID_KEY_ENCRYPTION_PARAMETER", ERR_LIB_CMS, CMS_R_INVALID_KEY_ENCRYPTION_PARAMETER}, - #else - {"INVALID_KEY_ENCRYPTION_PARAMETER", 46, 176}, - #endif - #ifdef CMS_R_INVALID_KEY_LENGTH - {"INVALID_KEY_LENGTH", ERR_LIB_CMS, CMS_R_INVALID_KEY_LENGTH}, - #else - {"INVALID_KEY_LENGTH", 46, 118}, - #endif - #ifdef CMS_R_MD_BIO_INIT_ERROR - {"MD_BIO_INIT_ERROR", ERR_LIB_CMS, CMS_R_MD_BIO_INIT_ERROR}, - #else - {"MD_BIO_INIT_ERROR", 46, 119}, - #endif - #ifdef CMS_R_MESSAGEDIGEST_ATTRIBUTE_WRONG_LENGTH - {"MESSAGEDIGEST_ATTRIBUTE_WRONG_LENGTH", ERR_LIB_CMS, CMS_R_MESSAGEDIGEST_ATTRIBUTE_WRONG_LENGTH}, - #else - {"MESSAGEDIGEST_ATTRIBUTE_WRONG_LENGTH", 46, 120}, - #endif - #ifdef CMS_R_MESSAGEDIGEST_WRONG_LENGTH - {"MESSAGEDIGEST_WRONG_LENGTH", ERR_LIB_CMS, CMS_R_MESSAGEDIGEST_WRONG_LENGTH}, - #else - {"MESSAGEDIGEST_WRONG_LENGTH", 46, 121}, - #endif - #ifdef CMS_R_MSGSIGDIGEST_ERROR - {"MSGSIGDIGEST_ERROR", ERR_LIB_CMS, CMS_R_MSGSIGDIGEST_ERROR}, - #else - {"MSGSIGDIGEST_ERROR", 46, 172}, - #endif - #ifdef CMS_R_MSGSIGDIGEST_VERIFICATION_FAILURE - {"MSGSIGDIGEST_VERIFICATION_FAILURE", ERR_LIB_CMS, CMS_R_MSGSIGDIGEST_VERIFICATION_FAILURE}, - #else - {"MSGSIGDIGEST_VERIFICATION_FAILURE", 46, 162}, - #endif - #ifdef CMS_R_MSGSIGDIGEST_WRONG_LENGTH - {"MSGSIGDIGEST_WRONG_LENGTH", ERR_LIB_CMS, CMS_R_MSGSIGDIGEST_WRONG_LENGTH}, - #else - {"MSGSIGDIGEST_WRONG_LENGTH", 46, 163}, - #endif - #ifdef CMS_R_NEED_ONE_SIGNER - {"NEED_ONE_SIGNER", ERR_LIB_CMS, CMS_R_NEED_ONE_SIGNER}, - #else - {"NEED_ONE_SIGNER", 46, 164}, - #endif - #ifdef CMS_R_NOT_A_SIGNED_RECEIPT - {"NOT_A_SIGNED_RECEIPT", ERR_LIB_CMS, CMS_R_NOT_A_SIGNED_RECEIPT}, - #else - {"NOT_A_SIGNED_RECEIPT", 46, 165}, - #endif - #ifdef CMS_R_NOT_ENCRYPTED_DATA - {"NOT_ENCRYPTED_DATA", ERR_LIB_CMS, CMS_R_NOT_ENCRYPTED_DATA}, - #else - {"NOT_ENCRYPTED_DATA", 46, 122}, - #endif - #ifdef CMS_R_NOT_KEK - {"NOT_KEK", ERR_LIB_CMS, CMS_R_NOT_KEK}, - #else - {"NOT_KEK", 46, 123}, - #endif - #ifdef CMS_R_NOT_KEY_AGREEMENT - {"NOT_KEY_AGREEMENT", ERR_LIB_CMS, CMS_R_NOT_KEY_AGREEMENT}, - #else - {"NOT_KEY_AGREEMENT", 46, 181}, - #endif - #ifdef CMS_R_NOT_KEY_TRANSPORT - {"NOT_KEY_TRANSPORT", ERR_LIB_CMS, CMS_R_NOT_KEY_TRANSPORT}, - #else - {"NOT_KEY_TRANSPORT", 46, 124}, - #endif - #ifdef CMS_R_NOT_PWRI - {"NOT_PWRI", ERR_LIB_CMS, CMS_R_NOT_PWRI}, - #else - {"NOT_PWRI", 46, 177}, - #endif - #ifdef CMS_R_NOT_SUPPORTED_FOR_THIS_KEY_TYPE - {"NOT_SUPPORTED_FOR_THIS_KEY_TYPE", ERR_LIB_CMS, CMS_R_NOT_SUPPORTED_FOR_THIS_KEY_TYPE}, - #else - {"NOT_SUPPORTED_FOR_THIS_KEY_TYPE", 46, 125}, - #endif - #ifdef CMS_R_NO_CIPHER - {"NO_CIPHER", ERR_LIB_CMS, CMS_R_NO_CIPHER}, - #else - {"NO_CIPHER", 46, 126}, - #endif - #ifdef CMS_R_NO_CONTENT - {"NO_CONTENT", ERR_LIB_CMS, CMS_R_NO_CONTENT}, - #else - {"NO_CONTENT", 46, 127}, - #endif - #ifdef CMS_R_NO_CONTENT_TYPE - {"NO_CONTENT_TYPE", ERR_LIB_CMS, CMS_R_NO_CONTENT_TYPE}, - #else - {"NO_CONTENT_TYPE", 46, 173}, - #endif - #ifdef CMS_R_NO_DEFAULT_DIGEST - {"NO_DEFAULT_DIGEST", ERR_LIB_CMS, CMS_R_NO_DEFAULT_DIGEST}, - #else - {"NO_DEFAULT_DIGEST", 46, 128}, - #endif - #ifdef CMS_R_NO_DIGEST_SET - {"NO_DIGEST_SET", ERR_LIB_CMS, CMS_R_NO_DIGEST_SET}, - #else - {"NO_DIGEST_SET", 46, 129}, - #endif - #ifdef CMS_R_NO_KEY - {"NO_KEY", ERR_LIB_CMS, CMS_R_NO_KEY}, - #else - {"NO_KEY", 46, 130}, - #endif - #ifdef CMS_R_NO_KEY_OR_CERT - {"NO_KEY_OR_CERT", ERR_LIB_CMS, CMS_R_NO_KEY_OR_CERT}, - #else - {"NO_KEY_OR_CERT", 46, 174}, - #endif - #ifdef CMS_R_NO_MATCHING_DIGEST - {"NO_MATCHING_DIGEST", ERR_LIB_CMS, CMS_R_NO_MATCHING_DIGEST}, - #else - {"NO_MATCHING_DIGEST", 46, 131}, - #endif - #ifdef CMS_R_NO_MATCHING_RECIPIENT - {"NO_MATCHING_RECIPIENT", ERR_LIB_CMS, CMS_R_NO_MATCHING_RECIPIENT}, - #else - {"NO_MATCHING_RECIPIENT", 46, 132}, - #endif - #ifdef CMS_R_NO_MATCHING_SIGNATURE - {"NO_MATCHING_SIGNATURE", ERR_LIB_CMS, CMS_R_NO_MATCHING_SIGNATURE}, - #else - {"NO_MATCHING_SIGNATURE", 46, 166}, - #endif - #ifdef CMS_R_NO_MSGSIGDIGEST - {"NO_MSGSIGDIGEST", ERR_LIB_CMS, CMS_R_NO_MSGSIGDIGEST}, - #else - {"NO_MSGSIGDIGEST", 46, 167}, - #endif - #ifdef CMS_R_NO_PASSWORD - {"NO_PASSWORD", ERR_LIB_CMS, CMS_R_NO_PASSWORD}, - #else - {"NO_PASSWORD", 46, 178}, - #endif - #ifdef CMS_R_NO_PRIVATE_KEY - {"NO_PRIVATE_KEY", ERR_LIB_CMS, CMS_R_NO_PRIVATE_KEY}, - #else - {"NO_PRIVATE_KEY", 46, 133}, - #endif - #ifdef CMS_R_NO_PUBLIC_KEY - {"NO_PUBLIC_KEY", ERR_LIB_CMS, CMS_R_NO_PUBLIC_KEY}, - #else - {"NO_PUBLIC_KEY", 46, 134}, - #endif - #ifdef CMS_R_NO_RECEIPT_REQUEST - {"NO_RECEIPT_REQUEST", ERR_LIB_CMS, CMS_R_NO_RECEIPT_REQUEST}, - #else - {"NO_RECEIPT_REQUEST", 46, 168}, - #endif - #ifdef CMS_R_NO_SIGNERS - {"NO_SIGNERS", ERR_LIB_CMS, CMS_R_NO_SIGNERS}, - #else - {"NO_SIGNERS", 46, 135}, - #endif - #ifdef CMS_R_PRIVATE_KEY_DOES_NOT_MATCH_CERTIFICATE - {"PRIVATE_KEY_DOES_NOT_MATCH_CERTIFICATE", ERR_LIB_CMS, CMS_R_PRIVATE_KEY_DOES_NOT_MATCH_CERTIFICATE}, - #else - {"PRIVATE_KEY_DOES_NOT_MATCH_CERTIFICATE", 46, 136}, - #endif - #ifdef CMS_R_RECEIPT_DECODE_ERROR - {"RECEIPT_DECODE_ERROR", ERR_LIB_CMS, CMS_R_RECEIPT_DECODE_ERROR}, - #else - {"RECEIPT_DECODE_ERROR", 46, 169}, - #endif - #ifdef CMS_R_RECIPIENT_ERROR - {"RECIPIENT_ERROR", ERR_LIB_CMS, CMS_R_RECIPIENT_ERROR}, - #else - {"RECIPIENT_ERROR", 46, 137}, - #endif - #ifdef CMS_R_SIGNER_CERTIFICATE_NOT_FOUND - {"SIGNER_CERTIFICATE_NOT_FOUND", ERR_LIB_CMS, CMS_R_SIGNER_CERTIFICATE_NOT_FOUND}, - #else - {"SIGNER_CERTIFICATE_NOT_FOUND", 46, 138}, - #endif - #ifdef CMS_R_SIGNFINAL_ERROR - {"SIGNFINAL_ERROR", ERR_LIB_CMS, CMS_R_SIGNFINAL_ERROR}, - #else - {"SIGNFINAL_ERROR", 46, 139}, - #endif - #ifdef CMS_R_SMIME_TEXT_ERROR - {"SMIME_TEXT_ERROR", ERR_LIB_CMS, CMS_R_SMIME_TEXT_ERROR}, - #else - {"SMIME_TEXT_ERROR", 46, 140}, - #endif - #ifdef CMS_R_STORE_INIT_ERROR - {"STORE_INIT_ERROR", ERR_LIB_CMS, CMS_R_STORE_INIT_ERROR}, - #else - {"STORE_INIT_ERROR", 46, 141}, - #endif - #ifdef CMS_R_TYPE_NOT_COMPRESSED_DATA - {"TYPE_NOT_COMPRESSED_DATA", ERR_LIB_CMS, CMS_R_TYPE_NOT_COMPRESSED_DATA}, - #else - {"TYPE_NOT_COMPRESSED_DATA", 46, 142}, - #endif - #ifdef CMS_R_TYPE_NOT_DATA - {"TYPE_NOT_DATA", ERR_LIB_CMS, CMS_R_TYPE_NOT_DATA}, - #else - {"TYPE_NOT_DATA", 46, 143}, - #endif - #ifdef CMS_R_TYPE_NOT_DIGESTED_DATA - {"TYPE_NOT_DIGESTED_DATA", ERR_LIB_CMS, CMS_R_TYPE_NOT_DIGESTED_DATA}, - #else - {"TYPE_NOT_DIGESTED_DATA", 46, 144}, - #endif - #ifdef CMS_R_TYPE_NOT_ENCRYPTED_DATA - {"TYPE_NOT_ENCRYPTED_DATA", ERR_LIB_CMS, CMS_R_TYPE_NOT_ENCRYPTED_DATA}, - #else - {"TYPE_NOT_ENCRYPTED_DATA", 46, 145}, - #endif - #ifdef CMS_R_TYPE_NOT_ENVELOPED_DATA - {"TYPE_NOT_ENVELOPED_DATA", ERR_LIB_CMS, CMS_R_TYPE_NOT_ENVELOPED_DATA}, - #else - {"TYPE_NOT_ENVELOPED_DATA", 46, 146}, - #endif - #ifdef CMS_R_UNABLE_TO_FINALIZE_CONTEXT - {"UNABLE_TO_FINALIZE_CONTEXT", ERR_LIB_CMS, CMS_R_UNABLE_TO_FINALIZE_CONTEXT}, - #else - {"UNABLE_TO_FINALIZE_CONTEXT", 46, 147}, - #endif - #ifdef CMS_R_UNKNOWN_CIPHER - {"UNKNOWN_CIPHER", ERR_LIB_CMS, CMS_R_UNKNOWN_CIPHER}, - #else - {"UNKNOWN_CIPHER", 46, 148}, - #endif - #ifdef CMS_R_UNKNOWN_DIGEST_ALGORITHM - {"UNKNOWN_DIGEST_ALGORITHM", ERR_LIB_CMS, CMS_R_UNKNOWN_DIGEST_ALGORITHM}, - #else - {"UNKNOWN_DIGEST_ALGORITHM", 46, 149}, - #endif - #ifdef CMS_R_UNKNOWN_ID - {"UNKNOWN_ID", ERR_LIB_CMS, CMS_R_UNKNOWN_ID}, - #else - {"UNKNOWN_ID", 46, 150}, - #endif - #ifdef CMS_R_UNSUPPORTED_COMPRESSION_ALGORITHM - {"UNSUPPORTED_COMPRESSION_ALGORITHM", ERR_LIB_CMS, CMS_R_UNSUPPORTED_COMPRESSION_ALGORITHM}, - #else - {"UNSUPPORTED_COMPRESSION_ALGORITHM", 46, 151}, - #endif - #ifdef CMS_R_UNSUPPORTED_CONTENT_TYPE - {"UNSUPPORTED_CONTENT_TYPE", ERR_LIB_CMS, CMS_R_UNSUPPORTED_CONTENT_TYPE}, - #else - {"UNSUPPORTED_CONTENT_TYPE", 46, 152}, - #endif - #ifdef CMS_R_UNSUPPORTED_KEK_ALGORITHM - {"UNSUPPORTED_KEK_ALGORITHM", ERR_LIB_CMS, CMS_R_UNSUPPORTED_KEK_ALGORITHM}, - #else - {"UNSUPPORTED_KEK_ALGORITHM", 46, 153}, - #endif - #ifdef CMS_R_UNSUPPORTED_KEY_ENCRYPTION_ALGORITHM - {"UNSUPPORTED_KEY_ENCRYPTION_ALGORITHM", ERR_LIB_CMS, CMS_R_UNSUPPORTED_KEY_ENCRYPTION_ALGORITHM}, - #else - {"UNSUPPORTED_KEY_ENCRYPTION_ALGORITHM", 46, 179}, - #endif - #ifdef CMS_R_UNSUPPORTED_RECIPIENTINFO_TYPE - {"UNSUPPORTED_RECIPIENTINFO_TYPE", ERR_LIB_CMS, CMS_R_UNSUPPORTED_RECIPIENTINFO_TYPE}, - #else - {"UNSUPPORTED_RECIPIENTINFO_TYPE", 46, 155}, - #endif - #ifdef CMS_R_UNSUPPORTED_RECIPIENT_TYPE - {"UNSUPPORTED_RECIPIENT_TYPE", ERR_LIB_CMS, CMS_R_UNSUPPORTED_RECIPIENT_TYPE}, - #else - {"UNSUPPORTED_RECIPIENT_TYPE", 46, 154}, - #endif - #ifdef CMS_R_UNSUPPORTED_TYPE - {"UNSUPPORTED_TYPE", ERR_LIB_CMS, CMS_R_UNSUPPORTED_TYPE}, - #else - {"UNSUPPORTED_TYPE", 46, 156}, - #endif - #ifdef CMS_R_UNWRAP_ERROR - {"UNWRAP_ERROR", ERR_LIB_CMS, CMS_R_UNWRAP_ERROR}, - #else - {"UNWRAP_ERROR", 46, 157}, - #endif - #ifdef CMS_R_UNWRAP_FAILURE - {"UNWRAP_FAILURE", ERR_LIB_CMS, CMS_R_UNWRAP_FAILURE}, - #else - {"UNWRAP_FAILURE", 46, 180}, - #endif - #ifdef CMS_R_VERIFICATION_FAILURE - {"VERIFICATION_FAILURE", ERR_LIB_CMS, CMS_R_VERIFICATION_FAILURE}, - #else - {"VERIFICATION_FAILURE", 46, 158}, - #endif - #ifdef CMS_R_WRAP_ERROR - {"WRAP_ERROR", ERR_LIB_CMS, CMS_R_WRAP_ERROR}, - #else - {"WRAP_ERROR", 46, 159}, - #endif - #ifdef COMP_R_ZLIB_DEFLATE_ERROR - {"ZLIB_DEFLATE_ERROR", ERR_LIB_COMP, COMP_R_ZLIB_DEFLATE_ERROR}, - #else - {"ZLIB_DEFLATE_ERROR", 41, 99}, - #endif - #ifdef COMP_R_ZLIB_INFLATE_ERROR - {"ZLIB_INFLATE_ERROR", ERR_LIB_COMP, COMP_R_ZLIB_INFLATE_ERROR}, - #else - {"ZLIB_INFLATE_ERROR", 41, 100}, - #endif - #ifdef COMP_R_ZLIB_NOT_SUPPORTED - {"ZLIB_NOT_SUPPORTED", ERR_LIB_COMP, COMP_R_ZLIB_NOT_SUPPORTED}, - #else - {"ZLIB_NOT_SUPPORTED", 41, 101}, - #endif - #ifdef CONF_R_ERROR_LOADING_DSO - {"ERROR_LOADING_DSO", ERR_LIB_CONF, CONF_R_ERROR_LOADING_DSO}, - #else - {"ERROR_LOADING_DSO", 14, 110}, - #endif - #ifdef CONF_R_LIST_CANNOT_BE_NULL - {"LIST_CANNOT_BE_NULL", ERR_LIB_CONF, CONF_R_LIST_CANNOT_BE_NULL}, - #else - {"LIST_CANNOT_BE_NULL", 14, 115}, - #endif - #ifdef CONF_R_MISSING_CLOSE_SQUARE_BRACKET - {"MISSING_CLOSE_SQUARE_BRACKET", ERR_LIB_CONF, CONF_R_MISSING_CLOSE_SQUARE_BRACKET}, - #else - {"MISSING_CLOSE_SQUARE_BRACKET", 14, 100}, - #endif - #ifdef CONF_R_MISSING_EQUAL_SIGN - {"MISSING_EQUAL_SIGN", ERR_LIB_CONF, CONF_R_MISSING_EQUAL_SIGN}, - #else - {"MISSING_EQUAL_SIGN", 14, 101}, - #endif - #ifdef CONF_R_MISSING_INIT_FUNCTION - {"MISSING_INIT_FUNCTION", ERR_LIB_CONF, CONF_R_MISSING_INIT_FUNCTION}, - #else - {"MISSING_INIT_FUNCTION", 14, 112}, - #endif - #ifdef CONF_R_MODULE_INITIALIZATION_ERROR - {"MODULE_INITIALIZATION_ERROR", ERR_LIB_CONF, CONF_R_MODULE_INITIALIZATION_ERROR}, - #else - {"MODULE_INITIALIZATION_ERROR", 14, 109}, - #endif - #ifdef CONF_R_NO_CLOSE_BRACE - {"NO_CLOSE_BRACE", ERR_LIB_CONF, CONF_R_NO_CLOSE_BRACE}, - #else - {"NO_CLOSE_BRACE", 14, 102}, - #endif - #ifdef CONF_R_NO_CONF - {"NO_CONF", ERR_LIB_CONF, CONF_R_NO_CONF}, - #else - {"NO_CONF", 14, 105}, - #endif - #ifdef CONF_R_NO_CONF_OR_ENVIRONMENT_VARIABLE - {"NO_CONF_OR_ENVIRONMENT_VARIABLE", ERR_LIB_CONF, CONF_R_NO_CONF_OR_ENVIRONMENT_VARIABLE}, - #else - {"NO_CONF_OR_ENVIRONMENT_VARIABLE", 14, 106}, - #endif - #ifdef CONF_R_NO_SECTION - {"NO_SECTION", ERR_LIB_CONF, CONF_R_NO_SECTION}, - #else - {"NO_SECTION", 14, 107}, - #endif - #ifdef CONF_R_NO_SUCH_FILE - {"NO_SUCH_FILE", ERR_LIB_CONF, CONF_R_NO_SUCH_FILE}, - #else - {"NO_SUCH_FILE", 14, 114}, - #endif - #ifdef CONF_R_NO_VALUE - {"NO_VALUE", ERR_LIB_CONF, CONF_R_NO_VALUE}, - #else - {"NO_VALUE", 14, 108}, - #endif - #ifdef CONF_R_NUMBER_TOO_LARGE - {"NUMBER_TOO_LARGE", ERR_LIB_CONF, CONF_R_NUMBER_TOO_LARGE}, - #else - {"NUMBER_TOO_LARGE", 14, 121}, - #endif - #ifdef CONF_R_RECURSIVE_DIRECTORY_INCLUDE - {"RECURSIVE_DIRECTORY_INCLUDE", ERR_LIB_CONF, CONF_R_RECURSIVE_DIRECTORY_INCLUDE}, - #else - {"RECURSIVE_DIRECTORY_INCLUDE", 14, 111}, - #endif - #ifdef CONF_R_SSL_COMMAND_SECTION_EMPTY - {"SSL_COMMAND_SECTION_EMPTY", ERR_LIB_CONF, CONF_R_SSL_COMMAND_SECTION_EMPTY}, - #else - {"SSL_COMMAND_SECTION_EMPTY", 14, 117}, - #endif - #ifdef CONF_R_SSL_COMMAND_SECTION_NOT_FOUND - {"SSL_COMMAND_SECTION_NOT_FOUND", ERR_LIB_CONF, CONF_R_SSL_COMMAND_SECTION_NOT_FOUND}, - #else - {"SSL_COMMAND_SECTION_NOT_FOUND", 14, 118}, - #endif - #ifdef CONF_R_SSL_SECTION_EMPTY - {"SSL_SECTION_EMPTY", ERR_LIB_CONF, CONF_R_SSL_SECTION_EMPTY}, - #else - {"SSL_SECTION_EMPTY", 14, 119}, - #endif - #ifdef CONF_R_SSL_SECTION_NOT_FOUND - {"SSL_SECTION_NOT_FOUND", ERR_LIB_CONF, CONF_R_SSL_SECTION_NOT_FOUND}, - #else - {"SSL_SECTION_NOT_FOUND", 14, 120}, - #endif - #ifdef CONF_R_UNABLE_TO_CREATE_NEW_SECTION - {"UNABLE_TO_CREATE_NEW_SECTION", ERR_LIB_CONF, CONF_R_UNABLE_TO_CREATE_NEW_SECTION}, - #else - {"UNABLE_TO_CREATE_NEW_SECTION", 14, 103}, - #endif - #ifdef CONF_R_UNKNOWN_MODULE_NAME - {"UNKNOWN_MODULE_NAME", ERR_LIB_CONF, CONF_R_UNKNOWN_MODULE_NAME}, - #else - {"UNKNOWN_MODULE_NAME", 14, 113}, - #endif - #ifdef CONF_R_VARIABLE_EXPANSION_TOO_LONG - {"VARIABLE_EXPANSION_TOO_LONG", ERR_LIB_CONF, CONF_R_VARIABLE_EXPANSION_TOO_LONG}, - #else - {"VARIABLE_EXPANSION_TOO_LONG", 14, 116}, - #endif - #ifdef CONF_R_VARIABLE_HAS_NO_VALUE - {"VARIABLE_HAS_NO_VALUE", ERR_LIB_CONF, CONF_R_VARIABLE_HAS_NO_VALUE}, - #else - {"VARIABLE_HAS_NO_VALUE", 14, 104}, - #endif - #ifdef CRYPTO_R_FIPS_MODE_NOT_SUPPORTED - {"FIPS_MODE_NOT_SUPPORTED", ERR_LIB_CRYPTO, CRYPTO_R_FIPS_MODE_NOT_SUPPORTED}, - #else - {"FIPS_MODE_NOT_SUPPORTED", 15, 101}, - #endif - #ifdef CRYPTO_R_ILLEGAL_HEX_DIGIT - {"ILLEGAL_HEX_DIGIT", ERR_LIB_CRYPTO, CRYPTO_R_ILLEGAL_HEX_DIGIT}, - #else - {"ILLEGAL_HEX_DIGIT", 15, 102}, - #endif - #ifdef CRYPTO_R_ODD_NUMBER_OF_DIGITS - {"ODD_NUMBER_OF_DIGITS", ERR_LIB_CRYPTO, CRYPTO_R_ODD_NUMBER_OF_DIGITS}, - #else - {"ODD_NUMBER_OF_DIGITS", 15, 103}, - #endif - #ifdef CT_R_BASE64_DECODE_ERROR - {"BASE64_DECODE_ERROR", ERR_LIB_CT, CT_R_BASE64_DECODE_ERROR}, - #else - {"BASE64_DECODE_ERROR", 50, 108}, - #endif - #ifdef CT_R_INVALID_LOG_ID_LENGTH - {"INVALID_LOG_ID_LENGTH", ERR_LIB_CT, CT_R_INVALID_LOG_ID_LENGTH}, - #else - {"INVALID_LOG_ID_LENGTH", 50, 100}, - #endif - #ifdef CT_R_LOG_CONF_INVALID - {"LOG_CONF_INVALID", ERR_LIB_CT, CT_R_LOG_CONF_INVALID}, - #else - {"LOG_CONF_INVALID", 50, 109}, - #endif - #ifdef CT_R_LOG_CONF_INVALID_KEY - {"LOG_CONF_INVALID_KEY", ERR_LIB_CT, CT_R_LOG_CONF_INVALID_KEY}, - #else - {"LOG_CONF_INVALID_KEY", 50, 110}, - #endif - #ifdef CT_R_LOG_CONF_MISSING_DESCRIPTION - {"LOG_CONF_MISSING_DESCRIPTION", ERR_LIB_CT, CT_R_LOG_CONF_MISSING_DESCRIPTION}, - #else - {"LOG_CONF_MISSING_DESCRIPTION", 50, 111}, - #endif - #ifdef CT_R_LOG_CONF_MISSING_KEY - {"LOG_CONF_MISSING_KEY", ERR_LIB_CT, CT_R_LOG_CONF_MISSING_KEY}, - #else - {"LOG_CONF_MISSING_KEY", 50, 112}, - #endif - #ifdef CT_R_LOG_KEY_INVALID - {"LOG_KEY_INVALID", ERR_LIB_CT, CT_R_LOG_KEY_INVALID}, - #else - {"LOG_KEY_INVALID", 50, 113}, - #endif - #ifdef CT_R_SCT_FUTURE_TIMESTAMP - {"SCT_FUTURE_TIMESTAMP", ERR_LIB_CT, CT_R_SCT_FUTURE_TIMESTAMP}, - #else - {"SCT_FUTURE_TIMESTAMP", 50, 116}, - #endif - #ifdef CT_R_SCT_INVALID - {"SCT_INVALID", ERR_LIB_CT, CT_R_SCT_INVALID}, - #else - {"SCT_INVALID", 50, 104}, - #endif - #ifdef CT_R_SCT_INVALID_SIGNATURE - {"SCT_INVALID_SIGNATURE", ERR_LIB_CT, CT_R_SCT_INVALID_SIGNATURE}, - #else - {"SCT_INVALID_SIGNATURE", 50, 107}, - #endif - #ifdef CT_R_SCT_LIST_INVALID - {"SCT_LIST_INVALID", ERR_LIB_CT, CT_R_SCT_LIST_INVALID}, - #else - {"SCT_LIST_INVALID", 50, 105}, - #endif - #ifdef CT_R_SCT_LOG_ID_MISMATCH - {"SCT_LOG_ID_MISMATCH", ERR_LIB_CT, CT_R_SCT_LOG_ID_MISMATCH}, - #else - {"SCT_LOG_ID_MISMATCH", 50, 114}, - #endif - #ifdef CT_R_SCT_NOT_SET - {"SCT_NOT_SET", ERR_LIB_CT, CT_R_SCT_NOT_SET}, - #else - {"SCT_NOT_SET", 50, 106}, - #endif - #ifdef CT_R_SCT_UNSUPPORTED_VERSION - {"SCT_UNSUPPORTED_VERSION", ERR_LIB_CT, CT_R_SCT_UNSUPPORTED_VERSION}, - #else - {"SCT_UNSUPPORTED_VERSION", 50, 115}, - #endif - #ifdef CT_R_UNRECOGNIZED_SIGNATURE_NID - {"UNRECOGNIZED_SIGNATURE_NID", ERR_LIB_CT, CT_R_UNRECOGNIZED_SIGNATURE_NID}, - #else - {"UNRECOGNIZED_SIGNATURE_NID", 50, 101}, - #endif - #ifdef CT_R_UNSUPPORTED_ENTRY_TYPE - {"UNSUPPORTED_ENTRY_TYPE", ERR_LIB_CT, CT_R_UNSUPPORTED_ENTRY_TYPE}, - #else - {"UNSUPPORTED_ENTRY_TYPE", 50, 102}, - #endif - #ifdef CT_R_UNSUPPORTED_VERSION - {"UNSUPPORTED_VERSION", ERR_LIB_CT, CT_R_UNSUPPORTED_VERSION}, - #else - {"UNSUPPORTED_VERSION", 50, 103}, - #endif - #ifdef DH_R_BAD_GENERATOR - {"BAD_GENERATOR", ERR_LIB_DH, DH_R_BAD_GENERATOR}, - #else - {"BAD_GENERATOR", 5, 101}, - #endif - #ifdef DH_R_BN_DECODE_ERROR - {"BN_DECODE_ERROR", ERR_LIB_DH, DH_R_BN_DECODE_ERROR}, - #else - {"BN_DECODE_ERROR", 5, 109}, - #endif - #ifdef DH_R_BN_ERROR - {"BN_ERROR", ERR_LIB_DH, DH_R_BN_ERROR}, - #else - {"BN_ERROR", 5, 106}, - #endif - #ifdef DH_R_CHECK_INVALID_J_VALUE - {"CHECK_INVALID_J_VALUE", ERR_LIB_DH, DH_R_CHECK_INVALID_J_VALUE}, - #else - {"CHECK_INVALID_J_VALUE", 5, 115}, - #endif - #ifdef DH_R_CHECK_INVALID_Q_VALUE - {"CHECK_INVALID_Q_VALUE", ERR_LIB_DH, DH_R_CHECK_INVALID_Q_VALUE}, - #else - {"CHECK_INVALID_Q_VALUE", 5, 116}, - #endif - #ifdef DH_R_CHECK_PUBKEY_INVALID - {"CHECK_PUBKEY_INVALID", ERR_LIB_DH, DH_R_CHECK_PUBKEY_INVALID}, - #else - {"CHECK_PUBKEY_INVALID", 5, 122}, - #endif - #ifdef DH_R_CHECK_PUBKEY_TOO_LARGE - {"CHECK_PUBKEY_TOO_LARGE", ERR_LIB_DH, DH_R_CHECK_PUBKEY_TOO_LARGE}, - #else - {"CHECK_PUBKEY_TOO_LARGE", 5, 123}, - #endif - #ifdef DH_R_CHECK_PUBKEY_TOO_SMALL - {"CHECK_PUBKEY_TOO_SMALL", ERR_LIB_DH, DH_R_CHECK_PUBKEY_TOO_SMALL}, - #else - {"CHECK_PUBKEY_TOO_SMALL", 5, 124}, - #endif - #ifdef DH_R_CHECK_P_NOT_PRIME - {"CHECK_P_NOT_PRIME", ERR_LIB_DH, DH_R_CHECK_P_NOT_PRIME}, - #else - {"CHECK_P_NOT_PRIME", 5, 117}, - #endif - #ifdef DH_R_CHECK_P_NOT_SAFE_PRIME - {"CHECK_P_NOT_SAFE_PRIME", ERR_LIB_DH, DH_R_CHECK_P_NOT_SAFE_PRIME}, - #else - {"CHECK_P_NOT_SAFE_PRIME", 5, 118}, - #endif - #ifdef DH_R_CHECK_Q_NOT_PRIME - {"CHECK_Q_NOT_PRIME", ERR_LIB_DH, DH_R_CHECK_Q_NOT_PRIME}, - #else - {"CHECK_Q_NOT_PRIME", 5, 119}, - #endif - #ifdef DH_R_DECODE_ERROR - {"DECODE_ERROR", ERR_LIB_DH, DH_R_DECODE_ERROR}, - #else - {"DECODE_ERROR", 5, 104}, - #endif - #ifdef DH_R_INVALID_PARAMETER_NAME - {"INVALID_PARAMETER_NAME", ERR_LIB_DH, DH_R_INVALID_PARAMETER_NAME}, - #else - {"INVALID_PARAMETER_NAME", 5, 110}, - #endif - #ifdef DH_R_INVALID_PARAMETER_NID - {"INVALID_PARAMETER_NID", ERR_LIB_DH, DH_R_INVALID_PARAMETER_NID}, - #else - {"INVALID_PARAMETER_NID", 5, 114}, - #endif - #ifdef DH_R_INVALID_PUBKEY - {"INVALID_PUBKEY", ERR_LIB_DH, DH_R_INVALID_PUBKEY}, - #else - {"INVALID_PUBKEY", 5, 102}, - #endif - #ifdef DH_R_KDF_PARAMETER_ERROR - {"KDF_PARAMETER_ERROR", ERR_LIB_DH, DH_R_KDF_PARAMETER_ERROR}, - #else - {"KDF_PARAMETER_ERROR", 5, 112}, - #endif - #ifdef DH_R_KEYS_NOT_SET - {"KEYS_NOT_SET", ERR_LIB_DH, DH_R_KEYS_NOT_SET}, - #else - {"KEYS_NOT_SET", 5, 108}, - #endif - #ifdef DH_R_MISSING_PUBKEY - {"MISSING_PUBKEY", ERR_LIB_DH, DH_R_MISSING_PUBKEY}, - #else - {"MISSING_PUBKEY", 5, 125}, - #endif - #ifdef DH_R_MODULUS_TOO_LARGE - {"MODULUS_TOO_LARGE", ERR_LIB_DH, DH_R_MODULUS_TOO_LARGE}, - #else - {"MODULUS_TOO_LARGE", 5, 103}, - #endif - #ifdef DH_R_NOT_SUITABLE_GENERATOR - {"NOT_SUITABLE_GENERATOR", ERR_LIB_DH, DH_R_NOT_SUITABLE_GENERATOR}, - #else - {"NOT_SUITABLE_GENERATOR", 5, 120}, - #endif - #ifdef DH_R_NO_PARAMETERS_SET - {"NO_PARAMETERS_SET", ERR_LIB_DH, DH_R_NO_PARAMETERS_SET}, - #else - {"NO_PARAMETERS_SET", 5, 107}, - #endif - #ifdef DH_R_NO_PRIVATE_VALUE - {"NO_PRIVATE_VALUE", ERR_LIB_DH, DH_R_NO_PRIVATE_VALUE}, - #else - {"NO_PRIVATE_VALUE", 5, 100}, - #endif - #ifdef DH_R_PARAMETER_ENCODING_ERROR - {"PARAMETER_ENCODING_ERROR", ERR_LIB_DH, DH_R_PARAMETER_ENCODING_ERROR}, - #else - {"PARAMETER_ENCODING_ERROR", 5, 105}, - #endif - #ifdef DH_R_PEER_KEY_ERROR - {"PEER_KEY_ERROR", ERR_LIB_DH, DH_R_PEER_KEY_ERROR}, - #else - {"PEER_KEY_ERROR", 5, 111}, - #endif - #ifdef DH_R_SHARED_INFO_ERROR - {"SHARED_INFO_ERROR", ERR_LIB_DH, DH_R_SHARED_INFO_ERROR}, - #else - {"SHARED_INFO_ERROR", 5, 113}, - #endif - #ifdef DH_R_UNABLE_TO_CHECK_GENERATOR - {"UNABLE_TO_CHECK_GENERATOR", ERR_LIB_DH, DH_R_UNABLE_TO_CHECK_GENERATOR}, - #else - {"UNABLE_TO_CHECK_GENERATOR", 5, 121}, - #endif - #ifdef DSA_R_BAD_Q_VALUE - {"BAD_Q_VALUE", ERR_LIB_DSA, DSA_R_BAD_Q_VALUE}, - #else - {"BAD_Q_VALUE", 10, 102}, - #endif - #ifdef DSA_R_BN_DECODE_ERROR - {"BN_DECODE_ERROR", ERR_LIB_DSA, DSA_R_BN_DECODE_ERROR}, - #else - {"BN_DECODE_ERROR", 10, 108}, - #endif - #ifdef DSA_R_BN_ERROR - {"BN_ERROR", ERR_LIB_DSA, DSA_R_BN_ERROR}, - #else - {"BN_ERROR", 10, 109}, - #endif - #ifdef DSA_R_DECODE_ERROR - {"DECODE_ERROR", ERR_LIB_DSA, DSA_R_DECODE_ERROR}, - #else - {"DECODE_ERROR", 10, 104}, - #endif - #ifdef DSA_R_INVALID_DIGEST_TYPE - {"INVALID_DIGEST_TYPE", ERR_LIB_DSA, DSA_R_INVALID_DIGEST_TYPE}, - #else - {"INVALID_DIGEST_TYPE", 10, 106}, - #endif - #ifdef DSA_R_INVALID_PARAMETERS - {"INVALID_PARAMETERS", ERR_LIB_DSA, DSA_R_INVALID_PARAMETERS}, - #else - {"INVALID_PARAMETERS", 10, 112}, - #endif - #ifdef DSA_R_MISSING_PARAMETERS - {"MISSING_PARAMETERS", ERR_LIB_DSA, DSA_R_MISSING_PARAMETERS}, - #else - {"MISSING_PARAMETERS", 10, 101}, - #endif - #ifdef DSA_R_MISSING_PRIVATE_KEY - {"MISSING_PRIVATE_KEY", ERR_LIB_DSA, DSA_R_MISSING_PRIVATE_KEY}, - #else - {"MISSING_PRIVATE_KEY", 10, 111}, - #endif - #ifdef DSA_R_MODULUS_TOO_LARGE - {"MODULUS_TOO_LARGE", ERR_LIB_DSA, DSA_R_MODULUS_TOO_LARGE}, - #else - {"MODULUS_TOO_LARGE", 10, 103}, - #endif - #ifdef DSA_R_NO_PARAMETERS_SET - {"NO_PARAMETERS_SET", ERR_LIB_DSA, DSA_R_NO_PARAMETERS_SET}, - #else - {"NO_PARAMETERS_SET", 10, 107}, - #endif - #ifdef DSA_R_PARAMETER_ENCODING_ERROR - {"PARAMETER_ENCODING_ERROR", ERR_LIB_DSA, DSA_R_PARAMETER_ENCODING_ERROR}, - #else - {"PARAMETER_ENCODING_ERROR", 10, 105}, - #endif - #ifdef DSA_R_Q_NOT_PRIME - {"Q_NOT_PRIME", ERR_LIB_DSA, DSA_R_Q_NOT_PRIME}, - #else - {"Q_NOT_PRIME", 10, 113}, - #endif - #ifdef DSA_R_SEED_LEN_SMALL - {"SEED_LEN_SMALL", ERR_LIB_DSA, DSA_R_SEED_LEN_SMALL}, - #else - {"SEED_LEN_SMALL", 10, 110}, - #endif - #ifdef EC_R_ASN1_ERROR - {"ASN1_ERROR", ERR_LIB_EC, EC_R_ASN1_ERROR}, - #else - {"ASN1_ERROR", 16, 115}, - #endif - #ifdef EC_R_BAD_SIGNATURE - {"BAD_SIGNATURE", ERR_LIB_EC, EC_R_BAD_SIGNATURE}, - #else - {"BAD_SIGNATURE", 16, 156}, - #endif - #ifdef EC_R_BIGNUM_OUT_OF_RANGE - {"BIGNUM_OUT_OF_RANGE", ERR_LIB_EC, EC_R_BIGNUM_OUT_OF_RANGE}, - #else - {"BIGNUM_OUT_OF_RANGE", 16, 144}, - #endif - #ifdef EC_R_BUFFER_TOO_SMALL - {"BUFFER_TOO_SMALL", ERR_LIB_EC, EC_R_BUFFER_TOO_SMALL}, - #else - {"BUFFER_TOO_SMALL", 16, 100}, - #endif - #ifdef EC_R_CANNOT_INVERT - {"CANNOT_INVERT", ERR_LIB_EC, EC_R_CANNOT_INVERT}, - #else - {"CANNOT_INVERT", 16, 165}, - #endif - #ifdef EC_R_COORDINATES_OUT_OF_RANGE - {"COORDINATES_OUT_OF_RANGE", ERR_LIB_EC, EC_R_COORDINATES_OUT_OF_RANGE}, - #else - {"COORDINATES_OUT_OF_RANGE", 16, 146}, - #endif - #ifdef EC_R_CURVE_DOES_NOT_SUPPORT_ECDH - {"CURVE_DOES_NOT_SUPPORT_ECDH", ERR_LIB_EC, EC_R_CURVE_DOES_NOT_SUPPORT_ECDH}, - #else - {"CURVE_DOES_NOT_SUPPORT_ECDH", 16, 160}, - #endif - #ifdef EC_R_CURVE_DOES_NOT_SUPPORT_SIGNING - {"CURVE_DOES_NOT_SUPPORT_SIGNING", ERR_LIB_EC, EC_R_CURVE_DOES_NOT_SUPPORT_SIGNING}, - #else - {"CURVE_DOES_NOT_SUPPORT_SIGNING", 16, 159}, - #endif - #ifdef EC_R_D2I_ECPKPARAMETERS_FAILURE - {"D2I_ECPKPARAMETERS_FAILURE", ERR_LIB_EC, EC_R_D2I_ECPKPARAMETERS_FAILURE}, - #else - {"D2I_ECPKPARAMETERS_FAILURE", 16, 117}, - #endif - #ifdef EC_R_DECODE_ERROR - {"DECODE_ERROR", ERR_LIB_EC, EC_R_DECODE_ERROR}, - #else - {"DECODE_ERROR", 16, 142}, - #endif - #ifdef EC_R_DISCRIMINANT_IS_ZERO - {"DISCRIMINANT_IS_ZERO", ERR_LIB_EC, EC_R_DISCRIMINANT_IS_ZERO}, - #else - {"DISCRIMINANT_IS_ZERO", 16, 118}, - #endif - #ifdef EC_R_EC_GROUP_NEW_BY_NAME_FAILURE - {"EC_GROUP_NEW_BY_NAME_FAILURE", ERR_LIB_EC, EC_R_EC_GROUP_NEW_BY_NAME_FAILURE}, - #else - {"EC_GROUP_NEW_BY_NAME_FAILURE", 16, 119}, - #endif - #ifdef EC_R_FIELD_TOO_LARGE - {"FIELD_TOO_LARGE", ERR_LIB_EC, EC_R_FIELD_TOO_LARGE}, - #else - {"FIELD_TOO_LARGE", 16, 143}, - #endif - #ifdef EC_R_GF2M_NOT_SUPPORTED - {"GF2M_NOT_SUPPORTED", ERR_LIB_EC, EC_R_GF2M_NOT_SUPPORTED}, - #else - {"GF2M_NOT_SUPPORTED", 16, 147}, - #endif - #ifdef EC_R_GROUP2PKPARAMETERS_FAILURE - {"GROUP2PKPARAMETERS_FAILURE", ERR_LIB_EC, EC_R_GROUP2PKPARAMETERS_FAILURE}, - #else - {"GROUP2PKPARAMETERS_FAILURE", 16, 120}, - #endif - #ifdef EC_R_I2D_ECPKPARAMETERS_FAILURE - {"I2D_ECPKPARAMETERS_FAILURE", ERR_LIB_EC, EC_R_I2D_ECPKPARAMETERS_FAILURE}, - #else - {"I2D_ECPKPARAMETERS_FAILURE", 16, 121}, - #endif - #ifdef EC_R_INCOMPATIBLE_OBJECTS - {"INCOMPATIBLE_OBJECTS", ERR_LIB_EC, EC_R_INCOMPATIBLE_OBJECTS}, - #else - {"INCOMPATIBLE_OBJECTS", 16, 101}, - #endif - #ifdef EC_R_INVALID_ARGUMENT - {"INVALID_ARGUMENT", ERR_LIB_EC, EC_R_INVALID_ARGUMENT}, - #else - {"INVALID_ARGUMENT", 16, 112}, - #endif - #ifdef EC_R_INVALID_COMPRESSED_POINT - {"INVALID_COMPRESSED_POINT", ERR_LIB_EC, EC_R_INVALID_COMPRESSED_POINT}, - #else - {"INVALID_COMPRESSED_POINT", 16, 110}, - #endif - #ifdef EC_R_INVALID_COMPRESSION_BIT - {"INVALID_COMPRESSION_BIT", ERR_LIB_EC, EC_R_INVALID_COMPRESSION_BIT}, - #else - {"INVALID_COMPRESSION_BIT", 16, 109}, - #endif - #ifdef EC_R_INVALID_CURVE - {"INVALID_CURVE", ERR_LIB_EC, EC_R_INVALID_CURVE}, - #else - {"INVALID_CURVE", 16, 141}, - #endif - #ifdef EC_R_INVALID_DIGEST - {"INVALID_DIGEST", ERR_LIB_EC, EC_R_INVALID_DIGEST}, - #else - {"INVALID_DIGEST", 16, 151}, - #endif - #ifdef EC_R_INVALID_DIGEST_TYPE - {"INVALID_DIGEST_TYPE", ERR_LIB_EC, EC_R_INVALID_DIGEST_TYPE}, - #else - {"INVALID_DIGEST_TYPE", 16, 138}, - #endif - #ifdef EC_R_INVALID_ENCODING - {"INVALID_ENCODING", ERR_LIB_EC, EC_R_INVALID_ENCODING}, - #else - {"INVALID_ENCODING", 16, 102}, - #endif - #ifdef EC_R_INVALID_FIELD - {"INVALID_FIELD", ERR_LIB_EC, EC_R_INVALID_FIELD}, - #else - {"INVALID_FIELD", 16, 103}, - #endif - #ifdef EC_R_INVALID_FORM - {"INVALID_FORM", ERR_LIB_EC, EC_R_INVALID_FORM}, - #else - {"INVALID_FORM", 16, 104}, - #endif - #ifdef EC_R_INVALID_GROUP_ORDER - {"INVALID_GROUP_ORDER", ERR_LIB_EC, EC_R_INVALID_GROUP_ORDER}, - #else - {"INVALID_GROUP_ORDER", 16, 122}, - #endif - #ifdef EC_R_INVALID_KEY - {"INVALID_KEY", ERR_LIB_EC, EC_R_INVALID_KEY}, - #else - {"INVALID_KEY", 16, 116}, - #endif - #ifdef EC_R_INVALID_OUTPUT_LENGTH - {"INVALID_OUTPUT_LENGTH", ERR_LIB_EC, EC_R_INVALID_OUTPUT_LENGTH}, - #else - {"INVALID_OUTPUT_LENGTH", 16, 161}, - #endif - #ifdef EC_R_INVALID_PEER_KEY - {"INVALID_PEER_KEY", ERR_LIB_EC, EC_R_INVALID_PEER_KEY}, - #else - {"INVALID_PEER_KEY", 16, 133}, - #endif - #ifdef EC_R_INVALID_PENTANOMIAL_BASIS - {"INVALID_PENTANOMIAL_BASIS", ERR_LIB_EC, EC_R_INVALID_PENTANOMIAL_BASIS}, - #else - {"INVALID_PENTANOMIAL_BASIS", 16, 132}, - #endif - #ifdef EC_R_INVALID_PRIVATE_KEY - {"INVALID_PRIVATE_KEY", ERR_LIB_EC, EC_R_INVALID_PRIVATE_KEY}, - #else - {"INVALID_PRIVATE_KEY", 16, 123}, - #endif - #ifdef EC_R_INVALID_TRINOMIAL_BASIS - {"INVALID_TRINOMIAL_BASIS", ERR_LIB_EC, EC_R_INVALID_TRINOMIAL_BASIS}, - #else - {"INVALID_TRINOMIAL_BASIS", 16, 137}, - #endif - #ifdef EC_R_KDF_PARAMETER_ERROR - {"KDF_PARAMETER_ERROR", ERR_LIB_EC, EC_R_KDF_PARAMETER_ERROR}, - #else - {"KDF_PARAMETER_ERROR", 16, 148}, - #endif - #ifdef EC_R_KEYS_NOT_SET - {"KEYS_NOT_SET", ERR_LIB_EC, EC_R_KEYS_NOT_SET}, - #else - {"KEYS_NOT_SET", 16, 140}, - #endif - #ifdef EC_R_LADDER_POST_FAILURE - {"LADDER_POST_FAILURE", ERR_LIB_EC, EC_R_LADDER_POST_FAILURE}, - #else - {"LADDER_POST_FAILURE", 16, 136}, - #endif - #ifdef EC_R_LADDER_PRE_FAILURE - {"LADDER_PRE_FAILURE", ERR_LIB_EC, EC_R_LADDER_PRE_FAILURE}, - #else - {"LADDER_PRE_FAILURE", 16, 153}, - #endif - #ifdef EC_R_LADDER_STEP_FAILURE - {"LADDER_STEP_FAILURE", ERR_LIB_EC, EC_R_LADDER_STEP_FAILURE}, - #else - {"LADDER_STEP_FAILURE", 16, 162}, - #endif - #ifdef EC_R_MISSING_PARAMETERS - {"MISSING_PARAMETERS", ERR_LIB_EC, EC_R_MISSING_PARAMETERS}, - #else - {"MISSING_PARAMETERS", 16, 124}, - #endif - #ifdef EC_R_MISSING_PRIVATE_KEY - {"MISSING_PRIVATE_KEY", ERR_LIB_EC, EC_R_MISSING_PRIVATE_KEY}, - #else - {"MISSING_PRIVATE_KEY", 16, 125}, - #endif - #ifdef EC_R_NEED_NEW_SETUP_VALUES - {"NEED_NEW_SETUP_VALUES", ERR_LIB_EC, EC_R_NEED_NEW_SETUP_VALUES}, - #else - {"NEED_NEW_SETUP_VALUES", 16, 157}, - #endif - #ifdef EC_R_NOT_A_NIST_PRIME - {"NOT_A_NIST_PRIME", ERR_LIB_EC, EC_R_NOT_A_NIST_PRIME}, - #else - {"NOT_A_NIST_PRIME", 16, 135}, - #endif - #ifdef EC_R_NOT_IMPLEMENTED - {"NOT_IMPLEMENTED", ERR_LIB_EC, EC_R_NOT_IMPLEMENTED}, - #else - {"NOT_IMPLEMENTED", 16, 126}, - #endif - #ifdef EC_R_NOT_INITIALIZED - {"NOT_INITIALIZED", ERR_LIB_EC, EC_R_NOT_INITIALIZED}, - #else - {"NOT_INITIALIZED", 16, 111}, - #endif - #ifdef EC_R_NO_PARAMETERS_SET - {"NO_PARAMETERS_SET", ERR_LIB_EC, EC_R_NO_PARAMETERS_SET}, - #else - {"NO_PARAMETERS_SET", 16, 139}, - #endif - #ifdef EC_R_NO_PRIVATE_VALUE - {"NO_PRIVATE_VALUE", ERR_LIB_EC, EC_R_NO_PRIVATE_VALUE}, - #else - {"NO_PRIVATE_VALUE", 16, 154}, - #endif - #ifdef EC_R_OPERATION_NOT_SUPPORTED - {"OPERATION_NOT_SUPPORTED", ERR_LIB_EC, EC_R_OPERATION_NOT_SUPPORTED}, - #else - {"OPERATION_NOT_SUPPORTED", 16, 152}, - #endif - #ifdef EC_R_PASSED_NULL_PARAMETER - {"PASSED_NULL_PARAMETER", ERR_LIB_EC, EC_R_PASSED_NULL_PARAMETER}, - #else - {"PASSED_NULL_PARAMETER", 16, 134}, - #endif - #ifdef EC_R_PEER_KEY_ERROR - {"PEER_KEY_ERROR", ERR_LIB_EC, EC_R_PEER_KEY_ERROR}, - #else - {"PEER_KEY_ERROR", 16, 149}, - #endif - #ifdef EC_R_PKPARAMETERS2GROUP_FAILURE - {"PKPARAMETERS2GROUP_FAILURE", ERR_LIB_EC, EC_R_PKPARAMETERS2GROUP_FAILURE}, - #else - {"PKPARAMETERS2GROUP_FAILURE", 16, 127}, - #endif - #ifdef EC_R_POINT_ARITHMETIC_FAILURE - {"POINT_ARITHMETIC_FAILURE", ERR_LIB_EC, EC_R_POINT_ARITHMETIC_FAILURE}, - #else - {"POINT_ARITHMETIC_FAILURE", 16, 155}, - #endif - #ifdef EC_R_POINT_AT_INFINITY - {"POINT_AT_INFINITY", ERR_LIB_EC, EC_R_POINT_AT_INFINITY}, - #else - {"POINT_AT_INFINITY", 16, 106}, - #endif - #ifdef EC_R_POINT_COORDINATES_BLIND_FAILURE - {"POINT_COORDINATES_BLIND_FAILURE", ERR_LIB_EC, EC_R_POINT_COORDINATES_BLIND_FAILURE}, - #else - {"POINT_COORDINATES_BLIND_FAILURE", 16, 163}, - #endif - #ifdef EC_R_POINT_IS_NOT_ON_CURVE - {"POINT_IS_NOT_ON_CURVE", ERR_LIB_EC, EC_R_POINT_IS_NOT_ON_CURVE}, - #else - {"POINT_IS_NOT_ON_CURVE", 16, 107}, - #endif - #ifdef EC_R_RANDOM_NUMBER_GENERATION_FAILED - {"RANDOM_NUMBER_GENERATION_FAILED", ERR_LIB_EC, EC_R_RANDOM_NUMBER_GENERATION_FAILED}, - #else - {"RANDOM_NUMBER_GENERATION_FAILED", 16, 158}, - #endif - #ifdef EC_R_SHARED_INFO_ERROR - {"SHARED_INFO_ERROR", ERR_LIB_EC, EC_R_SHARED_INFO_ERROR}, - #else - {"SHARED_INFO_ERROR", 16, 150}, - #endif - #ifdef EC_R_SLOT_FULL - {"SLOT_FULL", ERR_LIB_EC, EC_R_SLOT_FULL}, - #else - {"SLOT_FULL", 16, 108}, - #endif - #ifdef EC_R_UNDEFINED_GENERATOR - {"UNDEFINED_GENERATOR", ERR_LIB_EC, EC_R_UNDEFINED_GENERATOR}, - #else - {"UNDEFINED_GENERATOR", 16, 113}, - #endif - #ifdef EC_R_UNDEFINED_ORDER - {"UNDEFINED_ORDER", ERR_LIB_EC, EC_R_UNDEFINED_ORDER}, - #else - {"UNDEFINED_ORDER", 16, 128}, - #endif - #ifdef EC_R_UNKNOWN_COFACTOR - {"UNKNOWN_COFACTOR", ERR_LIB_EC, EC_R_UNKNOWN_COFACTOR}, - #else - {"UNKNOWN_COFACTOR", 16, 164}, - #endif - #ifdef EC_R_UNKNOWN_GROUP - {"UNKNOWN_GROUP", ERR_LIB_EC, EC_R_UNKNOWN_GROUP}, - #else - {"UNKNOWN_GROUP", 16, 129}, - #endif - #ifdef EC_R_UNKNOWN_ORDER - {"UNKNOWN_ORDER", ERR_LIB_EC, EC_R_UNKNOWN_ORDER}, - #else - {"UNKNOWN_ORDER", 16, 114}, - #endif - #ifdef EC_R_UNSUPPORTED_FIELD - {"UNSUPPORTED_FIELD", ERR_LIB_EC, EC_R_UNSUPPORTED_FIELD}, - #else - {"UNSUPPORTED_FIELD", 16, 131}, - #endif - #ifdef EC_R_WRONG_CURVE_PARAMETERS - {"WRONG_CURVE_PARAMETERS", ERR_LIB_EC, EC_R_WRONG_CURVE_PARAMETERS}, - #else - {"WRONG_CURVE_PARAMETERS", 16, 145}, - #endif - #ifdef EC_R_WRONG_ORDER - {"WRONG_ORDER", ERR_LIB_EC, EC_R_WRONG_ORDER}, - #else - {"WRONG_ORDER", 16, 130}, - #endif - #ifdef ENGINE_R_ALREADY_LOADED - {"ALREADY_LOADED", ERR_LIB_ENGINE, ENGINE_R_ALREADY_LOADED}, - #else - {"ALREADY_LOADED", 38, 100}, - #endif - #ifdef ENGINE_R_ARGUMENT_IS_NOT_A_NUMBER - {"ARGUMENT_IS_NOT_A_NUMBER", ERR_LIB_ENGINE, ENGINE_R_ARGUMENT_IS_NOT_A_NUMBER}, - #else - {"ARGUMENT_IS_NOT_A_NUMBER", 38, 133}, - #endif - #ifdef ENGINE_R_CMD_NOT_EXECUTABLE - {"CMD_NOT_EXECUTABLE", ERR_LIB_ENGINE, ENGINE_R_CMD_NOT_EXECUTABLE}, - #else - {"CMD_NOT_EXECUTABLE", 38, 134}, - #endif - #ifdef ENGINE_R_COMMAND_TAKES_INPUT - {"COMMAND_TAKES_INPUT", ERR_LIB_ENGINE, ENGINE_R_COMMAND_TAKES_INPUT}, - #else - {"COMMAND_TAKES_INPUT", 38, 135}, - #endif - #ifdef ENGINE_R_COMMAND_TAKES_NO_INPUT - {"COMMAND_TAKES_NO_INPUT", ERR_LIB_ENGINE, ENGINE_R_COMMAND_TAKES_NO_INPUT}, - #else - {"COMMAND_TAKES_NO_INPUT", 38, 136}, - #endif - #ifdef ENGINE_R_CONFLICTING_ENGINE_ID - {"CONFLICTING_ENGINE_ID", ERR_LIB_ENGINE, ENGINE_R_CONFLICTING_ENGINE_ID}, - #else - {"CONFLICTING_ENGINE_ID", 38, 103}, - #endif - #ifdef ENGINE_R_CTRL_COMMAND_NOT_IMPLEMENTED - {"CTRL_COMMAND_NOT_IMPLEMENTED", ERR_LIB_ENGINE, ENGINE_R_CTRL_COMMAND_NOT_IMPLEMENTED}, - #else - {"CTRL_COMMAND_NOT_IMPLEMENTED", 38, 119}, - #endif - #ifdef ENGINE_R_DSO_FAILURE - {"DSO_FAILURE", ERR_LIB_ENGINE, ENGINE_R_DSO_FAILURE}, - #else - {"DSO_FAILURE", 38, 104}, - #endif - #ifdef ENGINE_R_DSO_NOT_FOUND - {"DSO_NOT_FOUND", ERR_LIB_ENGINE, ENGINE_R_DSO_NOT_FOUND}, - #else - {"DSO_NOT_FOUND", 38, 132}, - #endif - #ifdef ENGINE_R_ENGINES_SECTION_ERROR - {"ENGINES_SECTION_ERROR", ERR_LIB_ENGINE, ENGINE_R_ENGINES_SECTION_ERROR}, - #else - {"ENGINES_SECTION_ERROR", 38, 148}, - #endif - #ifdef ENGINE_R_ENGINE_CONFIGURATION_ERROR - {"ENGINE_CONFIGURATION_ERROR", ERR_LIB_ENGINE, ENGINE_R_ENGINE_CONFIGURATION_ERROR}, - #else - {"ENGINE_CONFIGURATION_ERROR", 38, 102}, - #endif - #ifdef ENGINE_R_ENGINE_IS_NOT_IN_LIST - {"ENGINE_IS_NOT_IN_LIST", ERR_LIB_ENGINE, ENGINE_R_ENGINE_IS_NOT_IN_LIST}, - #else - {"ENGINE_IS_NOT_IN_LIST", 38, 105}, - #endif - #ifdef ENGINE_R_ENGINE_SECTION_ERROR - {"ENGINE_SECTION_ERROR", ERR_LIB_ENGINE, ENGINE_R_ENGINE_SECTION_ERROR}, - #else - {"ENGINE_SECTION_ERROR", 38, 149}, - #endif - #ifdef ENGINE_R_FAILED_LOADING_PRIVATE_KEY - {"FAILED_LOADING_PRIVATE_KEY", ERR_LIB_ENGINE, ENGINE_R_FAILED_LOADING_PRIVATE_KEY}, - #else - {"FAILED_LOADING_PRIVATE_KEY", 38, 128}, - #endif - #ifdef ENGINE_R_FAILED_LOADING_PUBLIC_KEY - {"FAILED_LOADING_PUBLIC_KEY", ERR_LIB_ENGINE, ENGINE_R_FAILED_LOADING_PUBLIC_KEY}, - #else - {"FAILED_LOADING_PUBLIC_KEY", 38, 129}, - #endif - #ifdef ENGINE_R_FINISH_FAILED - {"FINISH_FAILED", ERR_LIB_ENGINE, ENGINE_R_FINISH_FAILED}, - #else - {"FINISH_FAILED", 38, 106}, - #endif - #ifdef ENGINE_R_ID_OR_NAME_MISSING - {"ID_OR_NAME_MISSING", ERR_LIB_ENGINE, ENGINE_R_ID_OR_NAME_MISSING}, - #else - {"ID_OR_NAME_MISSING", 38, 108}, - #endif - #ifdef ENGINE_R_INIT_FAILED - {"INIT_FAILED", ERR_LIB_ENGINE, ENGINE_R_INIT_FAILED}, - #else - {"INIT_FAILED", 38, 109}, - #endif - #ifdef ENGINE_R_INTERNAL_LIST_ERROR - {"INTERNAL_LIST_ERROR", ERR_LIB_ENGINE, ENGINE_R_INTERNAL_LIST_ERROR}, - #else - {"INTERNAL_LIST_ERROR", 38, 110}, - #endif - #ifdef ENGINE_R_INVALID_ARGUMENT - {"INVALID_ARGUMENT", ERR_LIB_ENGINE, ENGINE_R_INVALID_ARGUMENT}, - #else - {"INVALID_ARGUMENT", 38, 143}, - #endif - #ifdef ENGINE_R_INVALID_CMD_NAME - {"INVALID_CMD_NAME", ERR_LIB_ENGINE, ENGINE_R_INVALID_CMD_NAME}, - #else - {"INVALID_CMD_NAME", 38, 137}, - #endif - #ifdef ENGINE_R_INVALID_CMD_NUMBER - {"INVALID_CMD_NUMBER", ERR_LIB_ENGINE, ENGINE_R_INVALID_CMD_NUMBER}, - #else - {"INVALID_CMD_NUMBER", 38, 138}, - #endif - #ifdef ENGINE_R_INVALID_INIT_VALUE - {"INVALID_INIT_VALUE", ERR_LIB_ENGINE, ENGINE_R_INVALID_INIT_VALUE}, - #else - {"INVALID_INIT_VALUE", 38, 151}, - #endif - #ifdef ENGINE_R_INVALID_STRING - {"INVALID_STRING", ERR_LIB_ENGINE, ENGINE_R_INVALID_STRING}, - #else - {"INVALID_STRING", 38, 150}, - #endif - #ifdef ENGINE_R_NOT_INITIALISED - {"NOT_INITIALISED", ERR_LIB_ENGINE, ENGINE_R_NOT_INITIALISED}, - #else - {"NOT_INITIALISED", 38, 117}, - #endif - #ifdef ENGINE_R_NOT_LOADED - {"NOT_LOADED", ERR_LIB_ENGINE, ENGINE_R_NOT_LOADED}, - #else - {"NOT_LOADED", 38, 112}, - #endif - #ifdef ENGINE_R_NO_CONTROL_FUNCTION - {"NO_CONTROL_FUNCTION", ERR_LIB_ENGINE, ENGINE_R_NO_CONTROL_FUNCTION}, - #else - {"NO_CONTROL_FUNCTION", 38, 120}, - #endif - #ifdef ENGINE_R_NO_INDEX - {"NO_INDEX", ERR_LIB_ENGINE, ENGINE_R_NO_INDEX}, - #else - {"NO_INDEX", 38, 144}, - #endif - #ifdef ENGINE_R_NO_LOAD_FUNCTION - {"NO_LOAD_FUNCTION", ERR_LIB_ENGINE, ENGINE_R_NO_LOAD_FUNCTION}, - #else - {"NO_LOAD_FUNCTION", 38, 125}, - #endif - #ifdef ENGINE_R_NO_REFERENCE - {"NO_REFERENCE", ERR_LIB_ENGINE, ENGINE_R_NO_REFERENCE}, - #else - {"NO_REFERENCE", 38, 130}, - #endif - #ifdef ENGINE_R_NO_SUCH_ENGINE - {"NO_SUCH_ENGINE", ERR_LIB_ENGINE, ENGINE_R_NO_SUCH_ENGINE}, - #else - {"NO_SUCH_ENGINE", 38, 116}, - #endif - #ifdef ENGINE_R_UNIMPLEMENTED_CIPHER - {"UNIMPLEMENTED_CIPHER", ERR_LIB_ENGINE, ENGINE_R_UNIMPLEMENTED_CIPHER}, - #else - {"UNIMPLEMENTED_CIPHER", 38, 146}, - #endif - #ifdef ENGINE_R_UNIMPLEMENTED_DIGEST - {"UNIMPLEMENTED_DIGEST", ERR_LIB_ENGINE, ENGINE_R_UNIMPLEMENTED_DIGEST}, - #else - {"UNIMPLEMENTED_DIGEST", 38, 147}, - #endif - #ifdef ENGINE_R_UNIMPLEMENTED_PUBLIC_KEY_METHOD - {"UNIMPLEMENTED_PUBLIC_KEY_METHOD", ERR_LIB_ENGINE, ENGINE_R_UNIMPLEMENTED_PUBLIC_KEY_METHOD}, - #else - {"UNIMPLEMENTED_PUBLIC_KEY_METHOD", 38, 101}, - #endif - #ifdef ENGINE_R_VERSION_INCOMPATIBILITY - {"VERSION_INCOMPATIBILITY", ERR_LIB_ENGINE, ENGINE_R_VERSION_INCOMPATIBILITY}, - #else - {"VERSION_INCOMPATIBILITY", 38, 145}, - #endif - #ifdef EVP_R_AES_KEY_SETUP_FAILED - {"AES_KEY_SETUP_FAILED", ERR_LIB_EVP, EVP_R_AES_KEY_SETUP_FAILED}, - #else - {"AES_KEY_SETUP_FAILED", 6, 143}, - #endif - #ifdef EVP_R_ARIA_KEY_SETUP_FAILED - {"ARIA_KEY_SETUP_FAILED", ERR_LIB_EVP, EVP_R_ARIA_KEY_SETUP_FAILED}, - #else - {"ARIA_KEY_SETUP_FAILED", 6, 176}, - #endif - #ifdef EVP_R_BAD_DECRYPT - {"BAD_DECRYPT", ERR_LIB_EVP, EVP_R_BAD_DECRYPT}, - #else - {"BAD_DECRYPT", 6, 100}, - #endif - #ifdef EVP_R_BAD_KEY_LENGTH - {"BAD_KEY_LENGTH", ERR_LIB_EVP, EVP_R_BAD_KEY_LENGTH}, - #else - {"BAD_KEY_LENGTH", 6, 195}, - #endif - #ifdef EVP_R_BUFFER_TOO_SMALL - {"BUFFER_TOO_SMALL", ERR_LIB_EVP, EVP_R_BUFFER_TOO_SMALL}, - #else - {"BUFFER_TOO_SMALL", 6, 155}, - #endif - #ifdef EVP_R_CAMELLIA_KEY_SETUP_FAILED - {"CAMELLIA_KEY_SETUP_FAILED", ERR_LIB_EVP, EVP_R_CAMELLIA_KEY_SETUP_FAILED}, - #else - {"CAMELLIA_KEY_SETUP_FAILED", 6, 157}, - #endif - #ifdef EVP_R_CIPHER_PARAMETER_ERROR - {"CIPHER_PARAMETER_ERROR", ERR_LIB_EVP, EVP_R_CIPHER_PARAMETER_ERROR}, - #else - {"CIPHER_PARAMETER_ERROR", 6, 122}, - #endif - #ifdef EVP_R_COMMAND_NOT_SUPPORTED - {"COMMAND_NOT_SUPPORTED", ERR_LIB_EVP, EVP_R_COMMAND_NOT_SUPPORTED}, - #else - {"COMMAND_NOT_SUPPORTED", 6, 147}, - #endif - #ifdef EVP_R_COPY_ERROR - {"COPY_ERROR", ERR_LIB_EVP, EVP_R_COPY_ERROR}, - #else - {"COPY_ERROR", 6, 173}, - #endif - #ifdef EVP_R_CTRL_NOT_IMPLEMENTED - {"CTRL_NOT_IMPLEMENTED", ERR_LIB_EVP, EVP_R_CTRL_NOT_IMPLEMENTED}, - #else - {"CTRL_NOT_IMPLEMENTED", 6, 132}, - #endif - #ifdef EVP_R_CTRL_OPERATION_NOT_IMPLEMENTED - {"CTRL_OPERATION_NOT_IMPLEMENTED", ERR_LIB_EVP, EVP_R_CTRL_OPERATION_NOT_IMPLEMENTED}, - #else - {"CTRL_OPERATION_NOT_IMPLEMENTED", 6, 133}, - #endif - #ifdef EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH - {"DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH", ERR_LIB_EVP, EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH}, - #else - {"DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH", 6, 138}, - #endif - #ifdef EVP_R_DECODE_ERROR - {"DECODE_ERROR", ERR_LIB_EVP, EVP_R_DECODE_ERROR}, - #else - {"DECODE_ERROR", 6, 114}, - #endif - #ifdef EVP_R_DIFFERENT_KEY_TYPES - {"DIFFERENT_KEY_TYPES", ERR_LIB_EVP, EVP_R_DIFFERENT_KEY_TYPES}, - #else - {"DIFFERENT_KEY_TYPES", 6, 101}, - #endif - #ifdef EVP_R_DIFFERENT_PARAMETERS - {"DIFFERENT_PARAMETERS", ERR_LIB_EVP, EVP_R_DIFFERENT_PARAMETERS}, - #else - {"DIFFERENT_PARAMETERS", 6, 153}, - #endif - #ifdef EVP_R_ERROR_LOADING_SECTION - {"ERROR_LOADING_SECTION", ERR_LIB_EVP, EVP_R_ERROR_LOADING_SECTION}, - #else - {"ERROR_LOADING_SECTION", 6, 165}, - #endif - #ifdef EVP_R_ERROR_SETTING_FIPS_MODE - {"ERROR_SETTING_FIPS_MODE", ERR_LIB_EVP, EVP_R_ERROR_SETTING_FIPS_MODE}, - #else - {"ERROR_SETTING_FIPS_MODE", 6, 166}, - #endif - #ifdef EVP_R_EXPECTING_AN_HMAC_KEY - {"EXPECTING_AN_HMAC_KEY", ERR_LIB_EVP, EVP_R_EXPECTING_AN_HMAC_KEY}, - #else - {"EXPECTING_AN_HMAC_KEY", 6, 174}, - #endif - #ifdef EVP_R_EXPECTING_AN_RSA_KEY - {"EXPECTING_AN_RSA_KEY", ERR_LIB_EVP, EVP_R_EXPECTING_AN_RSA_KEY}, - #else - {"EXPECTING_AN_RSA_KEY", 6, 127}, - #endif - #ifdef EVP_R_EXPECTING_A_DH_KEY - {"EXPECTING_A_DH_KEY", ERR_LIB_EVP, EVP_R_EXPECTING_A_DH_KEY}, - #else - {"EXPECTING_A_DH_KEY", 6, 128}, - #endif - #ifdef EVP_R_EXPECTING_A_DSA_KEY - {"EXPECTING_A_DSA_KEY", ERR_LIB_EVP, EVP_R_EXPECTING_A_DSA_KEY}, - #else - {"EXPECTING_A_DSA_KEY", 6, 129}, - #endif - #ifdef EVP_R_EXPECTING_A_EC_KEY - {"EXPECTING_A_EC_KEY", ERR_LIB_EVP, EVP_R_EXPECTING_A_EC_KEY}, - #else - {"EXPECTING_A_EC_KEY", 6, 142}, - #endif - #ifdef EVP_R_EXPECTING_A_POLY1305_KEY - {"EXPECTING_A_POLY1305_KEY", ERR_LIB_EVP, EVP_R_EXPECTING_A_POLY1305_KEY}, - #else - {"EXPECTING_A_POLY1305_KEY", 6, 164}, - #endif - #ifdef EVP_R_EXPECTING_A_SIPHASH_KEY - {"EXPECTING_A_SIPHASH_KEY", ERR_LIB_EVP, EVP_R_EXPECTING_A_SIPHASH_KEY}, - #else - {"EXPECTING_A_SIPHASH_KEY", 6, 175}, - #endif - #ifdef EVP_R_FIPS_MODE_NOT_SUPPORTED - {"FIPS_MODE_NOT_SUPPORTED", ERR_LIB_EVP, EVP_R_FIPS_MODE_NOT_SUPPORTED}, - #else - {"FIPS_MODE_NOT_SUPPORTED", 6, 167}, - #endif - #ifdef EVP_R_GET_RAW_KEY_FAILED - {"GET_RAW_KEY_FAILED", ERR_LIB_EVP, EVP_R_GET_RAW_KEY_FAILED}, - #else - {"GET_RAW_KEY_FAILED", 6, 182}, - #endif - #ifdef EVP_R_ILLEGAL_SCRYPT_PARAMETERS - {"ILLEGAL_SCRYPT_PARAMETERS", ERR_LIB_EVP, EVP_R_ILLEGAL_SCRYPT_PARAMETERS}, - #else - {"ILLEGAL_SCRYPT_PARAMETERS", 6, 171}, - #endif - #ifdef EVP_R_INITIALIZATION_ERROR - {"INITIALIZATION_ERROR", ERR_LIB_EVP, EVP_R_INITIALIZATION_ERROR}, - #else - {"INITIALIZATION_ERROR", 6, 134}, - #endif - #ifdef EVP_R_INPUT_NOT_INITIALIZED - {"INPUT_NOT_INITIALIZED", ERR_LIB_EVP, EVP_R_INPUT_NOT_INITIALIZED}, - #else - {"INPUT_NOT_INITIALIZED", 6, 111}, - #endif - #ifdef EVP_R_INVALID_DIGEST - {"INVALID_DIGEST", ERR_LIB_EVP, EVP_R_INVALID_DIGEST}, - #else - {"INVALID_DIGEST", 6, 152}, - #endif - #ifdef EVP_R_INVALID_FIPS_MODE - {"INVALID_FIPS_MODE", ERR_LIB_EVP, EVP_R_INVALID_FIPS_MODE}, - #else - {"INVALID_FIPS_MODE", 6, 168}, - #endif - #ifdef EVP_R_INVALID_IV_LENGTH - {"INVALID_IV_LENGTH", ERR_LIB_EVP, EVP_R_INVALID_IV_LENGTH}, - #else - {"INVALID_IV_LENGTH", 6, 194}, - #endif - #ifdef EVP_R_INVALID_KEY - {"INVALID_KEY", ERR_LIB_EVP, EVP_R_INVALID_KEY}, - #else - {"INVALID_KEY", 6, 163}, - #endif - #ifdef EVP_R_INVALID_KEY_LENGTH - {"INVALID_KEY_LENGTH", ERR_LIB_EVP, EVP_R_INVALID_KEY_LENGTH}, - #else - {"INVALID_KEY_LENGTH", 6, 130}, - #endif - #ifdef EVP_R_INVALID_OPERATION - {"INVALID_OPERATION", ERR_LIB_EVP, EVP_R_INVALID_OPERATION}, - #else - {"INVALID_OPERATION", 6, 148}, - #endif - #ifdef EVP_R_KEYGEN_FAILURE - {"KEYGEN_FAILURE", ERR_LIB_EVP, EVP_R_KEYGEN_FAILURE}, - #else - {"KEYGEN_FAILURE", 6, 120}, - #endif - #ifdef EVP_R_KEY_SETUP_FAILED - {"KEY_SETUP_FAILED", ERR_LIB_EVP, EVP_R_KEY_SETUP_FAILED}, - #else - {"KEY_SETUP_FAILED", 6, 180}, - #endif - #ifdef EVP_R_MEMORY_LIMIT_EXCEEDED - {"MEMORY_LIMIT_EXCEEDED", ERR_LIB_EVP, EVP_R_MEMORY_LIMIT_EXCEEDED}, - #else - {"MEMORY_LIMIT_EXCEEDED", 6, 172}, - #endif - #ifdef EVP_R_MESSAGE_DIGEST_IS_NULL - {"MESSAGE_DIGEST_IS_NULL", ERR_LIB_EVP, EVP_R_MESSAGE_DIGEST_IS_NULL}, - #else - {"MESSAGE_DIGEST_IS_NULL", 6, 159}, - #endif - #ifdef EVP_R_METHOD_NOT_SUPPORTED - {"METHOD_NOT_SUPPORTED", ERR_LIB_EVP, EVP_R_METHOD_NOT_SUPPORTED}, - #else - {"METHOD_NOT_SUPPORTED", 6, 144}, - #endif - #ifdef EVP_R_MISSING_PARAMETERS - {"MISSING_PARAMETERS", ERR_LIB_EVP, EVP_R_MISSING_PARAMETERS}, - #else - {"MISSING_PARAMETERS", 6, 103}, - #endif - #ifdef EVP_R_NOT_XOF_OR_INVALID_LENGTH - {"NOT_XOF_OR_INVALID_LENGTH", ERR_LIB_EVP, EVP_R_NOT_XOF_OR_INVALID_LENGTH}, - #else - {"NOT_XOF_OR_INVALID_LENGTH", 6, 178}, - #endif - #ifdef EVP_R_NO_CIPHER_SET - {"NO_CIPHER_SET", ERR_LIB_EVP, EVP_R_NO_CIPHER_SET}, - #else - {"NO_CIPHER_SET", 6, 131}, - #endif - #ifdef EVP_R_NO_DEFAULT_DIGEST - {"NO_DEFAULT_DIGEST", ERR_LIB_EVP, EVP_R_NO_DEFAULT_DIGEST}, - #else - {"NO_DEFAULT_DIGEST", 6, 158}, - #endif - #ifdef EVP_R_NO_DIGEST_SET - {"NO_DIGEST_SET", ERR_LIB_EVP, EVP_R_NO_DIGEST_SET}, - #else - {"NO_DIGEST_SET", 6, 139}, - #endif - #ifdef EVP_R_NO_KEY_SET - {"NO_KEY_SET", ERR_LIB_EVP, EVP_R_NO_KEY_SET}, - #else - {"NO_KEY_SET", 6, 154}, - #endif - #ifdef EVP_R_NO_OPERATION_SET - {"NO_OPERATION_SET", ERR_LIB_EVP, EVP_R_NO_OPERATION_SET}, - #else - {"NO_OPERATION_SET", 6, 149}, - #endif - #ifdef EVP_R_ONLY_ONESHOT_SUPPORTED - {"ONLY_ONESHOT_SUPPORTED", ERR_LIB_EVP, EVP_R_ONLY_ONESHOT_SUPPORTED}, - #else - {"ONLY_ONESHOT_SUPPORTED", 6, 177}, - #endif - #ifdef EVP_R_OPERATION_NOT_SUPPORTED_FOR_THIS_KEYTYPE - {"OPERATION_NOT_SUPPORTED_FOR_THIS_KEYTYPE", ERR_LIB_EVP, EVP_R_OPERATION_NOT_SUPPORTED_FOR_THIS_KEYTYPE}, - #else - {"OPERATION_NOT_SUPPORTED_FOR_THIS_KEYTYPE", 6, 150}, - #endif - #ifdef EVP_R_OPERATON_NOT_INITIALIZED - {"OPERATON_NOT_INITIALIZED", ERR_LIB_EVP, EVP_R_OPERATON_NOT_INITIALIZED}, - #else - {"OPERATON_NOT_INITIALIZED", 6, 151}, - #endif - #ifdef EVP_R_PARTIALLY_OVERLAPPING - {"PARTIALLY_OVERLAPPING", ERR_LIB_EVP, EVP_R_PARTIALLY_OVERLAPPING}, - #else - {"PARTIALLY_OVERLAPPING", 6, 162}, - #endif - #ifdef EVP_R_PBKDF2_ERROR - {"PBKDF2_ERROR", ERR_LIB_EVP, EVP_R_PBKDF2_ERROR}, - #else - {"PBKDF2_ERROR", 6, 181}, - #endif - #ifdef EVP_R_PKEY_APPLICATION_ASN1_METHOD_ALREADY_REGISTERED - {"PKEY_APPLICATION_ASN1_METHOD_ALREADY_REGISTERED", ERR_LIB_EVP, EVP_R_PKEY_APPLICATION_ASN1_METHOD_ALREADY_REGISTERED}, - #else - {"PKEY_APPLICATION_ASN1_METHOD_ALREADY_REGISTERED", 6, 179}, - #endif - #ifdef EVP_R_PRIVATE_KEY_DECODE_ERROR - {"PRIVATE_KEY_DECODE_ERROR", ERR_LIB_EVP, EVP_R_PRIVATE_KEY_DECODE_ERROR}, - #else - {"PRIVATE_KEY_DECODE_ERROR", 6, 145}, - #endif - #ifdef EVP_R_PRIVATE_KEY_ENCODE_ERROR - {"PRIVATE_KEY_ENCODE_ERROR", ERR_LIB_EVP, EVP_R_PRIVATE_KEY_ENCODE_ERROR}, - #else - {"PRIVATE_KEY_ENCODE_ERROR", 6, 146}, - #endif - #ifdef EVP_R_PUBLIC_KEY_NOT_RSA - {"PUBLIC_KEY_NOT_RSA", ERR_LIB_EVP, EVP_R_PUBLIC_KEY_NOT_RSA}, - #else - {"PUBLIC_KEY_NOT_RSA", 6, 106}, - #endif - #ifdef EVP_R_UNKNOWN_CIPHER - {"UNKNOWN_CIPHER", ERR_LIB_EVP, EVP_R_UNKNOWN_CIPHER}, - #else - {"UNKNOWN_CIPHER", 6, 160}, - #endif - #ifdef EVP_R_UNKNOWN_DIGEST - {"UNKNOWN_DIGEST", ERR_LIB_EVP, EVP_R_UNKNOWN_DIGEST}, - #else - {"UNKNOWN_DIGEST", 6, 161}, - #endif - #ifdef EVP_R_UNKNOWN_OPTION - {"UNKNOWN_OPTION", ERR_LIB_EVP, EVP_R_UNKNOWN_OPTION}, - #else - {"UNKNOWN_OPTION", 6, 169}, - #endif - #ifdef EVP_R_UNKNOWN_PBE_ALGORITHM - {"UNKNOWN_PBE_ALGORITHM", ERR_LIB_EVP, EVP_R_UNKNOWN_PBE_ALGORITHM}, - #else - {"UNKNOWN_PBE_ALGORITHM", 6, 121}, - #endif - #ifdef EVP_R_UNSUPPORTED_ALGORITHM - {"UNSUPPORTED_ALGORITHM", ERR_LIB_EVP, EVP_R_UNSUPPORTED_ALGORITHM}, - #else - {"UNSUPPORTED_ALGORITHM", 6, 156}, - #endif - #ifdef EVP_R_UNSUPPORTED_CIPHER - {"UNSUPPORTED_CIPHER", ERR_LIB_EVP, EVP_R_UNSUPPORTED_CIPHER}, - #else - {"UNSUPPORTED_CIPHER", 6, 107}, - #endif - #ifdef EVP_R_UNSUPPORTED_KEYLENGTH - {"UNSUPPORTED_KEYLENGTH", ERR_LIB_EVP, EVP_R_UNSUPPORTED_KEYLENGTH}, - #else - {"UNSUPPORTED_KEYLENGTH", 6, 123}, - #endif - #ifdef EVP_R_UNSUPPORTED_KEY_DERIVATION_FUNCTION - {"UNSUPPORTED_KEY_DERIVATION_FUNCTION", ERR_LIB_EVP, EVP_R_UNSUPPORTED_KEY_DERIVATION_FUNCTION}, - #else - {"UNSUPPORTED_KEY_DERIVATION_FUNCTION", 6, 124}, - #endif - #ifdef EVP_R_UNSUPPORTED_KEY_SIZE - {"UNSUPPORTED_KEY_SIZE", ERR_LIB_EVP, EVP_R_UNSUPPORTED_KEY_SIZE}, - #else - {"UNSUPPORTED_KEY_SIZE", 6, 108}, - #endif - #ifdef EVP_R_UNSUPPORTED_NUMBER_OF_ROUNDS - {"UNSUPPORTED_NUMBER_OF_ROUNDS", ERR_LIB_EVP, EVP_R_UNSUPPORTED_NUMBER_OF_ROUNDS}, - #else - {"UNSUPPORTED_NUMBER_OF_ROUNDS", 6, 135}, - #endif - #ifdef EVP_R_UNSUPPORTED_PRF - {"UNSUPPORTED_PRF", ERR_LIB_EVP, EVP_R_UNSUPPORTED_PRF}, - #else - {"UNSUPPORTED_PRF", 6, 125}, - #endif - #ifdef EVP_R_UNSUPPORTED_PRIVATE_KEY_ALGORITHM - {"UNSUPPORTED_PRIVATE_KEY_ALGORITHM", ERR_LIB_EVP, EVP_R_UNSUPPORTED_PRIVATE_KEY_ALGORITHM}, - #else - {"UNSUPPORTED_PRIVATE_KEY_ALGORITHM", 6, 118}, - #endif - #ifdef EVP_R_UNSUPPORTED_SALT_TYPE - {"UNSUPPORTED_SALT_TYPE", ERR_LIB_EVP, EVP_R_UNSUPPORTED_SALT_TYPE}, - #else - {"UNSUPPORTED_SALT_TYPE", 6, 126}, - #endif - #ifdef EVP_R_WRAP_MODE_NOT_ALLOWED - {"WRAP_MODE_NOT_ALLOWED", ERR_LIB_EVP, EVP_R_WRAP_MODE_NOT_ALLOWED}, - #else - {"WRAP_MODE_NOT_ALLOWED", 6, 170}, - #endif - #ifdef EVP_R_WRONG_FINAL_BLOCK_LENGTH - {"WRONG_FINAL_BLOCK_LENGTH", ERR_LIB_EVP, EVP_R_WRONG_FINAL_BLOCK_LENGTH}, - #else - {"WRONG_FINAL_BLOCK_LENGTH", 6, 109}, - #endif - #ifdef EVP_R_XTS_DUPLICATED_KEYS - {"XTS_DUPLICATED_KEYS", ERR_LIB_EVP, EVP_R_XTS_DUPLICATED_KEYS}, - #else - {"XTS_DUPLICATED_KEYS", 6, 183}, - #endif - #ifdef KDF_R_INVALID_DIGEST - {"INVALID_DIGEST", ERR_LIB_KDF, KDF_R_INVALID_DIGEST}, - #else - {"INVALID_DIGEST", 52, 100}, - #endif - #ifdef KDF_R_MISSING_ITERATION_COUNT - {"MISSING_ITERATION_COUNT", ERR_LIB_KDF, KDF_R_MISSING_ITERATION_COUNT}, - #else - {"MISSING_ITERATION_COUNT", 52, 109}, - #endif - #ifdef KDF_R_MISSING_KEY - {"MISSING_KEY", ERR_LIB_KDF, KDF_R_MISSING_KEY}, - #else - {"MISSING_KEY", 52, 104}, - #endif - #ifdef KDF_R_MISSING_MESSAGE_DIGEST - {"MISSING_MESSAGE_DIGEST", ERR_LIB_KDF, KDF_R_MISSING_MESSAGE_DIGEST}, - #else - {"MISSING_MESSAGE_DIGEST", 52, 105}, - #endif - #ifdef KDF_R_MISSING_PARAMETER - {"MISSING_PARAMETER", ERR_LIB_KDF, KDF_R_MISSING_PARAMETER}, - #else - {"MISSING_PARAMETER", 52, 101}, - #endif - #ifdef KDF_R_MISSING_PASS - {"MISSING_PASS", ERR_LIB_KDF, KDF_R_MISSING_PASS}, - #else - {"MISSING_PASS", 52, 110}, - #endif - #ifdef KDF_R_MISSING_SALT - {"MISSING_SALT", ERR_LIB_KDF, KDF_R_MISSING_SALT}, - #else - {"MISSING_SALT", 52, 111}, - #endif - #ifdef KDF_R_MISSING_SECRET - {"MISSING_SECRET", ERR_LIB_KDF, KDF_R_MISSING_SECRET}, - #else - {"MISSING_SECRET", 52, 107}, - #endif - #ifdef KDF_R_MISSING_SEED - {"MISSING_SEED", ERR_LIB_KDF, KDF_R_MISSING_SEED}, - #else - {"MISSING_SEED", 52, 106}, - #endif - #ifdef KDF_R_UNKNOWN_PARAMETER_TYPE - {"UNKNOWN_PARAMETER_TYPE", ERR_LIB_KDF, KDF_R_UNKNOWN_PARAMETER_TYPE}, - #else - {"UNKNOWN_PARAMETER_TYPE", 52, 103}, - #endif - #ifdef KDF_R_VALUE_ERROR - {"VALUE_ERROR", ERR_LIB_KDF, KDF_R_VALUE_ERROR}, - #else - {"VALUE_ERROR", 52, 108}, - #endif - #ifdef KDF_R_VALUE_MISSING - {"VALUE_MISSING", ERR_LIB_KDF, KDF_R_VALUE_MISSING}, - #else - {"VALUE_MISSING", 52, 102}, - #endif - #ifdef OCSP_R_CERTIFICATE_VERIFY_ERROR - {"CERTIFICATE_VERIFY_ERROR", ERR_LIB_OCSP, OCSP_R_CERTIFICATE_VERIFY_ERROR}, - #else - {"CERTIFICATE_VERIFY_ERROR", 39, 101}, - #endif - #ifdef OCSP_R_DIGEST_ERR - {"DIGEST_ERR", ERR_LIB_OCSP, OCSP_R_DIGEST_ERR}, - #else - {"DIGEST_ERR", 39, 102}, - #endif - #ifdef OCSP_R_ERROR_IN_NEXTUPDATE_FIELD - {"ERROR_IN_NEXTUPDATE_FIELD", ERR_LIB_OCSP, OCSP_R_ERROR_IN_NEXTUPDATE_FIELD}, - #else - {"ERROR_IN_NEXTUPDATE_FIELD", 39, 122}, - #endif - #ifdef OCSP_R_ERROR_IN_THISUPDATE_FIELD - {"ERROR_IN_THISUPDATE_FIELD", ERR_LIB_OCSP, OCSP_R_ERROR_IN_THISUPDATE_FIELD}, - #else - {"ERROR_IN_THISUPDATE_FIELD", 39, 123}, - #endif - #ifdef OCSP_R_ERROR_PARSING_URL - {"ERROR_PARSING_URL", ERR_LIB_OCSP, OCSP_R_ERROR_PARSING_URL}, - #else - {"ERROR_PARSING_URL", 39, 121}, - #endif - #ifdef OCSP_R_MISSING_OCSPSIGNING_USAGE - {"MISSING_OCSPSIGNING_USAGE", ERR_LIB_OCSP, OCSP_R_MISSING_OCSPSIGNING_USAGE}, - #else - {"MISSING_OCSPSIGNING_USAGE", 39, 103}, - #endif - #ifdef OCSP_R_NEXTUPDATE_BEFORE_THISUPDATE - {"NEXTUPDATE_BEFORE_THISUPDATE", ERR_LIB_OCSP, OCSP_R_NEXTUPDATE_BEFORE_THISUPDATE}, - #else - {"NEXTUPDATE_BEFORE_THISUPDATE", 39, 124}, - #endif - #ifdef OCSP_R_NOT_BASIC_RESPONSE - {"NOT_BASIC_RESPONSE", ERR_LIB_OCSP, OCSP_R_NOT_BASIC_RESPONSE}, - #else - {"NOT_BASIC_RESPONSE", 39, 104}, - #endif - #ifdef OCSP_R_NO_CERTIFICATES_IN_CHAIN - {"NO_CERTIFICATES_IN_CHAIN", ERR_LIB_OCSP, OCSP_R_NO_CERTIFICATES_IN_CHAIN}, - #else - {"NO_CERTIFICATES_IN_CHAIN", 39, 105}, - #endif - #ifdef OCSP_R_NO_RESPONSE_DATA - {"NO_RESPONSE_DATA", ERR_LIB_OCSP, OCSP_R_NO_RESPONSE_DATA}, - #else - {"NO_RESPONSE_DATA", 39, 108}, - #endif - #ifdef OCSP_R_NO_REVOKED_TIME - {"NO_REVOKED_TIME", ERR_LIB_OCSP, OCSP_R_NO_REVOKED_TIME}, - #else - {"NO_REVOKED_TIME", 39, 109}, - #endif - #ifdef OCSP_R_NO_SIGNER_KEY - {"NO_SIGNER_KEY", ERR_LIB_OCSP, OCSP_R_NO_SIGNER_KEY}, - #else - {"NO_SIGNER_KEY", 39, 130}, - #endif - #ifdef OCSP_R_PRIVATE_KEY_DOES_NOT_MATCH_CERTIFICATE - {"PRIVATE_KEY_DOES_NOT_MATCH_CERTIFICATE", ERR_LIB_OCSP, OCSP_R_PRIVATE_KEY_DOES_NOT_MATCH_CERTIFICATE}, - #else - {"PRIVATE_KEY_DOES_NOT_MATCH_CERTIFICATE", 39, 110}, - #endif - #ifdef OCSP_R_REQUEST_NOT_SIGNED - {"REQUEST_NOT_SIGNED", ERR_LIB_OCSP, OCSP_R_REQUEST_NOT_SIGNED}, - #else - {"REQUEST_NOT_SIGNED", 39, 128}, - #endif - #ifdef OCSP_R_RESPONSE_CONTAINS_NO_REVOCATION_DATA - {"RESPONSE_CONTAINS_NO_REVOCATION_DATA", ERR_LIB_OCSP, OCSP_R_RESPONSE_CONTAINS_NO_REVOCATION_DATA}, - #else - {"RESPONSE_CONTAINS_NO_REVOCATION_DATA", 39, 111}, - #endif - #ifdef OCSP_R_ROOT_CA_NOT_TRUSTED - {"ROOT_CA_NOT_TRUSTED", ERR_LIB_OCSP, OCSP_R_ROOT_CA_NOT_TRUSTED}, - #else - {"ROOT_CA_NOT_TRUSTED", 39, 112}, - #endif - #ifdef OCSP_R_SERVER_RESPONSE_ERROR - {"SERVER_RESPONSE_ERROR", ERR_LIB_OCSP, OCSP_R_SERVER_RESPONSE_ERROR}, - #else - {"SERVER_RESPONSE_ERROR", 39, 114}, - #endif - #ifdef OCSP_R_SERVER_RESPONSE_PARSE_ERROR - {"SERVER_RESPONSE_PARSE_ERROR", ERR_LIB_OCSP, OCSP_R_SERVER_RESPONSE_PARSE_ERROR}, - #else - {"SERVER_RESPONSE_PARSE_ERROR", 39, 115}, - #endif - #ifdef OCSP_R_SIGNATURE_FAILURE - {"SIGNATURE_FAILURE", ERR_LIB_OCSP, OCSP_R_SIGNATURE_FAILURE}, - #else - {"SIGNATURE_FAILURE", 39, 117}, - #endif - #ifdef OCSP_R_SIGNER_CERTIFICATE_NOT_FOUND - {"SIGNER_CERTIFICATE_NOT_FOUND", ERR_LIB_OCSP, OCSP_R_SIGNER_CERTIFICATE_NOT_FOUND}, - #else - {"SIGNER_CERTIFICATE_NOT_FOUND", 39, 118}, - #endif - #ifdef OCSP_R_STATUS_EXPIRED - {"STATUS_EXPIRED", ERR_LIB_OCSP, OCSP_R_STATUS_EXPIRED}, - #else - {"STATUS_EXPIRED", 39, 125}, - #endif - #ifdef OCSP_R_STATUS_NOT_YET_VALID - {"STATUS_NOT_YET_VALID", ERR_LIB_OCSP, OCSP_R_STATUS_NOT_YET_VALID}, - #else - {"STATUS_NOT_YET_VALID", 39, 126}, - #endif - #ifdef OCSP_R_STATUS_TOO_OLD - {"STATUS_TOO_OLD", ERR_LIB_OCSP, OCSP_R_STATUS_TOO_OLD}, - #else - {"STATUS_TOO_OLD", 39, 127}, - #endif - #ifdef OCSP_R_UNKNOWN_MESSAGE_DIGEST - {"UNKNOWN_MESSAGE_DIGEST", ERR_LIB_OCSP, OCSP_R_UNKNOWN_MESSAGE_DIGEST}, - #else - {"UNKNOWN_MESSAGE_DIGEST", 39, 119}, - #endif - #ifdef OCSP_R_UNKNOWN_NID - {"UNKNOWN_NID", ERR_LIB_OCSP, OCSP_R_UNKNOWN_NID}, - #else - {"UNKNOWN_NID", 39, 120}, - #endif - #ifdef OCSP_R_UNSUPPORTED_REQUESTORNAME_TYPE - {"UNSUPPORTED_REQUESTORNAME_TYPE", ERR_LIB_OCSP, OCSP_R_UNSUPPORTED_REQUESTORNAME_TYPE}, - #else - {"UNSUPPORTED_REQUESTORNAME_TYPE", 39, 129}, - #endif - #ifdef PEM_R_BAD_BASE64_DECODE - {"BAD_BASE64_DECODE", ERR_LIB_PEM, PEM_R_BAD_BASE64_DECODE}, - #else - {"BAD_BASE64_DECODE", 9, 100}, - #endif - #ifdef PEM_R_BAD_DECRYPT - {"BAD_DECRYPT", ERR_LIB_PEM, PEM_R_BAD_DECRYPT}, - #else - {"BAD_DECRYPT", 9, 101}, - #endif - #ifdef PEM_R_BAD_END_LINE - {"BAD_END_LINE", ERR_LIB_PEM, PEM_R_BAD_END_LINE}, - #else - {"BAD_END_LINE", 9, 102}, - #endif - #ifdef PEM_R_BAD_IV_CHARS - {"BAD_IV_CHARS", ERR_LIB_PEM, PEM_R_BAD_IV_CHARS}, - #else - {"BAD_IV_CHARS", 9, 103}, - #endif - #ifdef PEM_R_BAD_MAGIC_NUMBER - {"BAD_MAGIC_NUMBER", ERR_LIB_PEM, PEM_R_BAD_MAGIC_NUMBER}, - #else - {"BAD_MAGIC_NUMBER", 9, 116}, - #endif - #ifdef PEM_R_BAD_PASSWORD_READ - {"BAD_PASSWORD_READ", ERR_LIB_PEM, PEM_R_BAD_PASSWORD_READ}, - #else - {"BAD_PASSWORD_READ", 9, 104}, - #endif - #ifdef PEM_R_BAD_VERSION_NUMBER - {"BAD_VERSION_NUMBER", ERR_LIB_PEM, PEM_R_BAD_VERSION_NUMBER}, - #else - {"BAD_VERSION_NUMBER", 9, 117}, - #endif - #ifdef PEM_R_BIO_WRITE_FAILURE - {"BIO_WRITE_FAILURE", ERR_LIB_PEM, PEM_R_BIO_WRITE_FAILURE}, - #else - {"BIO_WRITE_FAILURE", 9, 118}, - #endif - #ifdef PEM_R_CIPHER_IS_NULL - {"CIPHER_IS_NULL", ERR_LIB_PEM, PEM_R_CIPHER_IS_NULL}, - #else - {"CIPHER_IS_NULL", 9, 127}, - #endif - #ifdef PEM_R_ERROR_CONVERTING_PRIVATE_KEY - {"ERROR_CONVERTING_PRIVATE_KEY", ERR_LIB_PEM, PEM_R_ERROR_CONVERTING_PRIVATE_KEY}, - #else - {"ERROR_CONVERTING_PRIVATE_KEY", 9, 115}, - #endif - #ifdef PEM_R_EXPECTING_PRIVATE_KEY_BLOB - {"EXPECTING_PRIVATE_KEY_BLOB", ERR_LIB_PEM, PEM_R_EXPECTING_PRIVATE_KEY_BLOB}, - #else - {"EXPECTING_PRIVATE_KEY_BLOB", 9, 119}, - #endif - #ifdef PEM_R_EXPECTING_PUBLIC_KEY_BLOB - {"EXPECTING_PUBLIC_KEY_BLOB", ERR_LIB_PEM, PEM_R_EXPECTING_PUBLIC_KEY_BLOB}, - #else - {"EXPECTING_PUBLIC_KEY_BLOB", 9, 120}, - #endif - #ifdef PEM_R_HEADER_TOO_LONG - {"HEADER_TOO_LONG", ERR_LIB_PEM, PEM_R_HEADER_TOO_LONG}, - #else - {"HEADER_TOO_LONG", 9, 128}, - #endif - #ifdef PEM_R_INCONSISTENT_HEADER - {"INCONSISTENT_HEADER", ERR_LIB_PEM, PEM_R_INCONSISTENT_HEADER}, - #else - {"INCONSISTENT_HEADER", 9, 121}, - #endif - #ifdef PEM_R_KEYBLOB_HEADER_PARSE_ERROR - {"KEYBLOB_HEADER_PARSE_ERROR", ERR_LIB_PEM, PEM_R_KEYBLOB_HEADER_PARSE_ERROR}, - #else - {"KEYBLOB_HEADER_PARSE_ERROR", 9, 122}, - #endif - #ifdef PEM_R_KEYBLOB_TOO_SHORT - {"KEYBLOB_TOO_SHORT", ERR_LIB_PEM, PEM_R_KEYBLOB_TOO_SHORT}, - #else - {"KEYBLOB_TOO_SHORT", 9, 123}, - #endif - #ifdef PEM_R_MISSING_DEK_IV - {"MISSING_DEK_IV", ERR_LIB_PEM, PEM_R_MISSING_DEK_IV}, - #else - {"MISSING_DEK_IV", 9, 129}, - #endif - #ifdef PEM_R_NOT_DEK_INFO - {"NOT_DEK_INFO", ERR_LIB_PEM, PEM_R_NOT_DEK_INFO}, - #else - {"NOT_DEK_INFO", 9, 105}, - #endif - #ifdef PEM_R_NOT_ENCRYPTED - {"NOT_ENCRYPTED", ERR_LIB_PEM, PEM_R_NOT_ENCRYPTED}, - #else - {"NOT_ENCRYPTED", 9, 106}, - #endif - #ifdef PEM_R_NOT_PROC_TYPE - {"NOT_PROC_TYPE", ERR_LIB_PEM, PEM_R_NOT_PROC_TYPE}, - #else - {"NOT_PROC_TYPE", 9, 107}, - #endif - #ifdef PEM_R_NO_START_LINE - {"NO_START_LINE", ERR_LIB_PEM, PEM_R_NO_START_LINE}, - #else - {"NO_START_LINE", 9, 108}, - #endif - #ifdef PEM_R_PROBLEMS_GETTING_PASSWORD - {"PROBLEMS_GETTING_PASSWORD", ERR_LIB_PEM, PEM_R_PROBLEMS_GETTING_PASSWORD}, - #else - {"PROBLEMS_GETTING_PASSWORD", 9, 109}, - #endif - #ifdef PEM_R_PUBLIC_KEY_NO_RSA - {"PUBLIC_KEY_NO_RSA", ERR_LIB_PEM, PEM_R_PUBLIC_KEY_NO_RSA}, - #else - {"PUBLIC_KEY_NO_RSA", 9, 110}, - #endif - #ifdef PEM_R_PVK_DATA_TOO_SHORT - {"PVK_DATA_TOO_SHORT", ERR_LIB_PEM, PEM_R_PVK_DATA_TOO_SHORT}, - #else - {"PVK_DATA_TOO_SHORT", 9, 124}, - #endif - #ifdef PEM_R_PVK_TOO_SHORT - {"PVK_TOO_SHORT", ERR_LIB_PEM, PEM_R_PVK_TOO_SHORT}, - #else - {"PVK_TOO_SHORT", 9, 125}, - #endif - #ifdef PEM_R_READ_KEY - {"READ_KEY", ERR_LIB_PEM, PEM_R_READ_KEY}, - #else - {"READ_KEY", 9, 111}, - #endif - #ifdef PEM_R_SHORT_HEADER - {"SHORT_HEADER", ERR_LIB_PEM, PEM_R_SHORT_HEADER}, - #else - {"SHORT_HEADER", 9, 112}, - #endif - #ifdef PEM_R_UNEXPECTED_DEK_IV - {"UNEXPECTED_DEK_IV", ERR_LIB_PEM, PEM_R_UNEXPECTED_DEK_IV}, - #else - {"UNEXPECTED_DEK_IV", 9, 130}, - #endif - #ifdef PEM_R_UNSUPPORTED_CIPHER - {"UNSUPPORTED_CIPHER", ERR_LIB_PEM, PEM_R_UNSUPPORTED_CIPHER}, - #else - {"UNSUPPORTED_CIPHER", 9, 113}, - #endif - #ifdef PEM_R_UNSUPPORTED_ENCRYPTION - {"UNSUPPORTED_ENCRYPTION", ERR_LIB_PEM, PEM_R_UNSUPPORTED_ENCRYPTION}, - #else - {"UNSUPPORTED_ENCRYPTION", 9, 114}, - #endif - #ifdef PEM_R_UNSUPPORTED_KEY_COMPONENTS - {"UNSUPPORTED_KEY_COMPONENTS", ERR_LIB_PEM, PEM_R_UNSUPPORTED_KEY_COMPONENTS}, - #else - {"UNSUPPORTED_KEY_COMPONENTS", 9, 126}, - #endif - #ifdef PKCS12_R_CANT_PACK_STRUCTURE - {"CANT_PACK_STRUCTURE", ERR_LIB_PKCS12, PKCS12_R_CANT_PACK_STRUCTURE}, - #else - {"CANT_PACK_STRUCTURE", 35, 100}, - #endif - #ifdef PKCS12_R_CONTENT_TYPE_NOT_DATA - {"CONTENT_TYPE_NOT_DATA", ERR_LIB_PKCS12, PKCS12_R_CONTENT_TYPE_NOT_DATA}, - #else - {"CONTENT_TYPE_NOT_DATA", 35, 121}, - #endif - #ifdef PKCS12_R_DECODE_ERROR - {"DECODE_ERROR", ERR_LIB_PKCS12, PKCS12_R_DECODE_ERROR}, - #else - {"DECODE_ERROR", 35, 101}, - #endif - #ifdef PKCS12_R_ENCODE_ERROR - {"ENCODE_ERROR", ERR_LIB_PKCS12, PKCS12_R_ENCODE_ERROR}, - #else - {"ENCODE_ERROR", 35, 102}, - #endif - #ifdef PKCS12_R_ENCRYPT_ERROR - {"ENCRYPT_ERROR", ERR_LIB_PKCS12, PKCS12_R_ENCRYPT_ERROR}, - #else - {"ENCRYPT_ERROR", 35, 103}, - #endif - #ifdef PKCS12_R_ERROR_SETTING_ENCRYPTED_DATA_TYPE - {"ERROR_SETTING_ENCRYPTED_DATA_TYPE", ERR_LIB_PKCS12, PKCS12_R_ERROR_SETTING_ENCRYPTED_DATA_TYPE}, - #else - {"ERROR_SETTING_ENCRYPTED_DATA_TYPE", 35, 120}, - #endif - #ifdef PKCS12_R_INVALID_NULL_ARGUMENT - {"INVALID_NULL_ARGUMENT", ERR_LIB_PKCS12, PKCS12_R_INVALID_NULL_ARGUMENT}, - #else - {"INVALID_NULL_ARGUMENT", 35, 104}, - #endif - #ifdef PKCS12_R_INVALID_NULL_PKCS12_POINTER - {"INVALID_NULL_PKCS12_POINTER", ERR_LIB_PKCS12, PKCS12_R_INVALID_NULL_PKCS12_POINTER}, - #else - {"INVALID_NULL_PKCS12_POINTER", 35, 105}, - #endif - #ifdef PKCS12_R_IV_GEN_ERROR - {"IV_GEN_ERROR", ERR_LIB_PKCS12, PKCS12_R_IV_GEN_ERROR}, - #else - {"IV_GEN_ERROR", 35, 106}, - #endif - #ifdef PKCS12_R_KEY_GEN_ERROR - {"KEY_GEN_ERROR", ERR_LIB_PKCS12, PKCS12_R_KEY_GEN_ERROR}, - #else - {"KEY_GEN_ERROR", 35, 107}, - #endif - #ifdef PKCS12_R_MAC_ABSENT - {"MAC_ABSENT", ERR_LIB_PKCS12, PKCS12_R_MAC_ABSENT}, - #else - {"MAC_ABSENT", 35, 108}, - #endif - #ifdef PKCS12_R_MAC_GENERATION_ERROR - {"MAC_GENERATION_ERROR", ERR_LIB_PKCS12, PKCS12_R_MAC_GENERATION_ERROR}, - #else - {"MAC_GENERATION_ERROR", 35, 109}, - #endif - #ifdef PKCS12_R_MAC_SETUP_ERROR - {"MAC_SETUP_ERROR", ERR_LIB_PKCS12, PKCS12_R_MAC_SETUP_ERROR}, - #else - {"MAC_SETUP_ERROR", 35, 110}, - #endif - #ifdef PKCS12_R_MAC_STRING_SET_ERROR - {"MAC_STRING_SET_ERROR", ERR_LIB_PKCS12, PKCS12_R_MAC_STRING_SET_ERROR}, - #else - {"MAC_STRING_SET_ERROR", 35, 111}, - #endif - #ifdef PKCS12_R_MAC_VERIFY_FAILURE - {"MAC_VERIFY_FAILURE", ERR_LIB_PKCS12, PKCS12_R_MAC_VERIFY_FAILURE}, - #else - {"MAC_VERIFY_FAILURE", 35, 113}, - #endif - #ifdef PKCS12_R_PARSE_ERROR - {"PARSE_ERROR", ERR_LIB_PKCS12, PKCS12_R_PARSE_ERROR}, - #else - {"PARSE_ERROR", 35, 114}, - #endif - #ifdef PKCS12_R_PKCS12_ALGOR_CIPHERINIT_ERROR - {"PKCS12_ALGOR_CIPHERINIT_ERROR", ERR_LIB_PKCS12, PKCS12_R_PKCS12_ALGOR_CIPHERINIT_ERROR}, - #else - {"PKCS12_ALGOR_CIPHERINIT_ERROR", 35, 115}, - #endif - #ifdef PKCS12_R_PKCS12_CIPHERFINAL_ERROR - {"PKCS12_CIPHERFINAL_ERROR", ERR_LIB_PKCS12, PKCS12_R_PKCS12_CIPHERFINAL_ERROR}, - #else - {"PKCS12_CIPHERFINAL_ERROR", 35, 116}, - #endif - #ifdef PKCS12_R_PKCS12_PBE_CRYPT_ERROR - {"PKCS12_PBE_CRYPT_ERROR", ERR_LIB_PKCS12, PKCS12_R_PKCS12_PBE_CRYPT_ERROR}, - #else - {"PKCS12_PBE_CRYPT_ERROR", 35, 117}, - #endif - #ifdef PKCS12_R_UNKNOWN_DIGEST_ALGORITHM - {"UNKNOWN_DIGEST_ALGORITHM", ERR_LIB_PKCS12, PKCS12_R_UNKNOWN_DIGEST_ALGORITHM}, - #else - {"UNKNOWN_DIGEST_ALGORITHM", 35, 118}, - #endif - #ifdef PKCS12_R_UNSUPPORTED_PKCS12_MODE - {"UNSUPPORTED_PKCS12_MODE", ERR_LIB_PKCS12, PKCS12_R_UNSUPPORTED_PKCS12_MODE}, - #else - {"UNSUPPORTED_PKCS12_MODE", 35, 119}, - #endif - #ifdef PKCS7_R_CERTIFICATE_VERIFY_ERROR - {"CERTIFICATE_VERIFY_ERROR", ERR_LIB_PKCS7, PKCS7_R_CERTIFICATE_VERIFY_ERROR}, - #else - {"CERTIFICATE_VERIFY_ERROR", 33, 117}, - #endif - #ifdef PKCS7_R_CIPHER_HAS_NO_OBJECT_IDENTIFIER - {"CIPHER_HAS_NO_OBJECT_IDENTIFIER", ERR_LIB_PKCS7, PKCS7_R_CIPHER_HAS_NO_OBJECT_IDENTIFIER}, - #else - {"CIPHER_HAS_NO_OBJECT_IDENTIFIER", 33, 144}, - #endif - #ifdef PKCS7_R_CIPHER_NOT_INITIALIZED - {"CIPHER_NOT_INITIALIZED", ERR_LIB_PKCS7, PKCS7_R_CIPHER_NOT_INITIALIZED}, - #else - {"CIPHER_NOT_INITIALIZED", 33, 116}, - #endif - #ifdef PKCS7_R_CONTENT_AND_DATA_PRESENT - {"CONTENT_AND_DATA_PRESENT", ERR_LIB_PKCS7, PKCS7_R_CONTENT_AND_DATA_PRESENT}, - #else - {"CONTENT_AND_DATA_PRESENT", 33, 118}, - #endif - #ifdef PKCS7_R_CTRL_ERROR - {"CTRL_ERROR", ERR_LIB_PKCS7, PKCS7_R_CTRL_ERROR}, - #else - {"CTRL_ERROR", 33, 152}, - #endif - #ifdef PKCS7_R_DECRYPT_ERROR - {"DECRYPT_ERROR", ERR_LIB_PKCS7, PKCS7_R_DECRYPT_ERROR}, - #else - {"DECRYPT_ERROR", 33, 119}, - #endif - #ifdef PKCS7_R_DIGEST_FAILURE - {"DIGEST_FAILURE", ERR_LIB_PKCS7, PKCS7_R_DIGEST_FAILURE}, - #else - {"DIGEST_FAILURE", 33, 101}, - #endif - #ifdef PKCS7_R_ENCRYPTION_CTRL_FAILURE - {"ENCRYPTION_CTRL_FAILURE", ERR_LIB_PKCS7, PKCS7_R_ENCRYPTION_CTRL_FAILURE}, - #else - {"ENCRYPTION_CTRL_FAILURE", 33, 149}, - #endif - #ifdef PKCS7_R_ENCRYPTION_NOT_SUPPORTED_FOR_THIS_KEY_TYPE - {"ENCRYPTION_NOT_SUPPORTED_FOR_THIS_KEY_TYPE", ERR_LIB_PKCS7, PKCS7_R_ENCRYPTION_NOT_SUPPORTED_FOR_THIS_KEY_TYPE}, - #else - {"ENCRYPTION_NOT_SUPPORTED_FOR_THIS_KEY_TYPE", 33, 150}, - #endif - #ifdef PKCS7_R_ERROR_ADDING_RECIPIENT - {"ERROR_ADDING_RECIPIENT", ERR_LIB_PKCS7, PKCS7_R_ERROR_ADDING_RECIPIENT}, - #else - {"ERROR_ADDING_RECIPIENT", 33, 120}, - #endif - #ifdef PKCS7_R_ERROR_SETTING_CIPHER - {"ERROR_SETTING_CIPHER", ERR_LIB_PKCS7, PKCS7_R_ERROR_SETTING_CIPHER}, - #else - {"ERROR_SETTING_CIPHER", 33, 121}, - #endif - #ifdef PKCS7_R_INVALID_NULL_POINTER - {"INVALID_NULL_POINTER", ERR_LIB_PKCS7, PKCS7_R_INVALID_NULL_POINTER}, - #else - {"INVALID_NULL_POINTER", 33, 143}, - #endif - #ifdef PKCS7_R_INVALID_SIGNED_DATA_TYPE - {"INVALID_SIGNED_DATA_TYPE", ERR_LIB_PKCS7, PKCS7_R_INVALID_SIGNED_DATA_TYPE}, - #else - {"INVALID_SIGNED_DATA_TYPE", 33, 155}, - #endif - #ifdef PKCS7_R_NO_CONTENT - {"NO_CONTENT", ERR_LIB_PKCS7, PKCS7_R_NO_CONTENT}, - #else - {"NO_CONTENT", 33, 122}, - #endif - #ifdef PKCS7_R_NO_DEFAULT_DIGEST - {"NO_DEFAULT_DIGEST", ERR_LIB_PKCS7, PKCS7_R_NO_DEFAULT_DIGEST}, - #else - {"NO_DEFAULT_DIGEST", 33, 151}, - #endif - #ifdef PKCS7_R_NO_MATCHING_DIGEST_TYPE_FOUND - {"NO_MATCHING_DIGEST_TYPE_FOUND", ERR_LIB_PKCS7, PKCS7_R_NO_MATCHING_DIGEST_TYPE_FOUND}, - #else - {"NO_MATCHING_DIGEST_TYPE_FOUND", 33, 154}, - #endif - #ifdef PKCS7_R_NO_RECIPIENT_MATCHES_CERTIFICATE - {"NO_RECIPIENT_MATCHES_CERTIFICATE", ERR_LIB_PKCS7, PKCS7_R_NO_RECIPIENT_MATCHES_CERTIFICATE}, - #else - {"NO_RECIPIENT_MATCHES_CERTIFICATE", 33, 115}, - #endif - #ifdef PKCS7_R_NO_SIGNATURES_ON_DATA - {"NO_SIGNATURES_ON_DATA", ERR_LIB_PKCS7, PKCS7_R_NO_SIGNATURES_ON_DATA}, - #else - {"NO_SIGNATURES_ON_DATA", 33, 123}, - #endif - #ifdef PKCS7_R_NO_SIGNERS - {"NO_SIGNERS", ERR_LIB_PKCS7, PKCS7_R_NO_SIGNERS}, - #else - {"NO_SIGNERS", 33, 142}, - #endif - #ifdef PKCS7_R_OPERATION_NOT_SUPPORTED_ON_THIS_TYPE - {"OPERATION_NOT_SUPPORTED_ON_THIS_TYPE", ERR_LIB_PKCS7, PKCS7_R_OPERATION_NOT_SUPPORTED_ON_THIS_TYPE}, - #else - {"OPERATION_NOT_SUPPORTED_ON_THIS_TYPE", 33, 104}, - #endif - #ifdef PKCS7_R_PKCS7_ADD_SIGNATURE_ERROR - {"PKCS7_ADD_SIGNATURE_ERROR", ERR_LIB_PKCS7, PKCS7_R_PKCS7_ADD_SIGNATURE_ERROR}, - #else - {"PKCS7_ADD_SIGNATURE_ERROR", 33, 124}, - #endif - #ifdef PKCS7_R_PKCS7_ADD_SIGNER_ERROR - {"PKCS7_ADD_SIGNER_ERROR", ERR_LIB_PKCS7, PKCS7_R_PKCS7_ADD_SIGNER_ERROR}, - #else - {"PKCS7_ADD_SIGNER_ERROR", 33, 153}, - #endif - #ifdef PKCS7_R_PKCS7_DATASIGN - {"PKCS7_DATASIGN", ERR_LIB_PKCS7, PKCS7_R_PKCS7_DATASIGN}, - #else - {"PKCS7_DATASIGN", 33, 145}, - #endif - #ifdef PKCS7_R_PRIVATE_KEY_DOES_NOT_MATCH_CERTIFICATE - {"PRIVATE_KEY_DOES_NOT_MATCH_CERTIFICATE", ERR_LIB_PKCS7, PKCS7_R_PRIVATE_KEY_DOES_NOT_MATCH_CERTIFICATE}, - #else - {"PRIVATE_KEY_DOES_NOT_MATCH_CERTIFICATE", 33, 127}, - #endif - #ifdef PKCS7_R_SIGNATURE_FAILURE - {"SIGNATURE_FAILURE", ERR_LIB_PKCS7, PKCS7_R_SIGNATURE_FAILURE}, - #else - {"SIGNATURE_FAILURE", 33, 105}, - #endif - #ifdef PKCS7_R_SIGNER_CERTIFICATE_NOT_FOUND - {"SIGNER_CERTIFICATE_NOT_FOUND", ERR_LIB_PKCS7, PKCS7_R_SIGNER_CERTIFICATE_NOT_FOUND}, - #else - {"SIGNER_CERTIFICATE_NOT_FOUND", 33, 128}, - #endif - #ifdef PKCS7_R_SIGNING_CTRL_FAILURE - {"SIGNING_CTRL_FAILURE", ERR_LIB_PKCS7, PKCS7_R_SIGNING_CTRL_FAILURE}, - #else - {"SIGNING_CTRL_FAILURE", 33, 147}, - #endif - #ifdef PKCS7_R_SIGNING_NOT_SUPPORTED_FOR_THIS_KEY_TYPE - {"SIGNING_NOT_SUPPORTED_FOR_THIS_KEY_TYPE", ERR_LIB_PKCS7, PKCS7_R_SIGNING_NOT_SUPPORTED_FOR_THIS_KEY_TYPE}, - #else - {"SIGNING_NOT_SUPPORTED_FOR_THIS_KEY_TYPE", 33, 148}, - #endif - #ifdef PKCS7_R_SMIME_TEXT_ERROR - {"SMIME_TEXT_ERROR", ERR_LIB_PKCS7, PKCS7_R_SMIME_TEXT_ERROR}, - #else - {"SMIME_TEXT_ERROR", 33, 129}, - #endif - #ifdef PKCS7_R_UNABLE_TO_FIND_CERTIFICATE - {"UNABLE_TO_FIND_CERTIFICATE", ERR_LIB_PKCS7, PKCS7_R_UNABLE_TO_FIND_CERTIFICATE}, - #else - {"UNABLE_TO_FIND_CERTIFICATE", 33, 106}, - #endif - #ifdef PKCS7_R_UNABLE_TO_FIND_MEM_BIO - {"UNABLE_TO_FIND_MEM_BIO", ERR_LIB_PKCS7, PKCS7_R_UNABLE_TO_FIND_MEM_BIO}, - #else - {"UNABLE_TO_FIND_MEM_BIO", 33, 107}, - #endif - #ifdef PKCS7_R_UNABLE_TO_FIND_MESSAGE_DIGEST - {"UNABLE_TO_FIND_MESSAGE_DIGEST", ERR_LIB_PKCS7, PKCS7_R_UNABLE_TO_FIND_MESSAGE_DIGEST}, - #else - {"UNABLE_TO_FIND_MESSAGE_DIGEST", 33, 108}, - #endif - #ifdef PKCS7_R_UNKNOWN_DIGEST_TYPE - {"UNKNOWN_DIGEST_TYPE", ERR_LIB_PKCS7, PKCS7_R_UNKNOWN_DIGEST_TYPE}, - #else - {"UNKNOWN_DIGEST_TYPE", 33, 109}, - #endif - #ifdef PKCS7_R_UNKNOWN_OPERATION - {"UNKNOWN_OPERATION", ERR_LIB_PKCS7, PKCS7_R_UNKNOWN_OPERATION}, - #else - {"UNKNOWN_OPERATION", 33, 110}, - #endif - #ifdef PKCS7_R_UNSUPPORTED_CIPHER_TYPE - {"UNSUPPORTED_CIPHER_TYPE", ERR_LIB_PKCS7, PKCS7_R_UNSUPPORTED_CIPHER_TYPE}, - #else - {"UNSUPPORTED_CIPHER_TYPE", 33, 111}, - #endif - #ifdef PKCS7_R_UNSUPPORTED_CONTENT_TYPE - {"UNSUPPORTED_CONTENT_TYPE", ERR_LIB_PKCS7, PKCS7_R_UNSUPPORTED_CONTENT_TYPE}, - #else - {"UNSUPPORTED_CONTENT_TYPE", 33, 112}, - #endif - #ifdef PKCS7_R_WRONG_CONTENT_TYPE - {"WRONG_CONTENT_TYPE", ERR_LIB_PKCS7, PKCS7_R_WRONG_CONTENT_TYPE}, - #else - {"WRONG_CONTENT_TYPE", 33, 113}, - #endif - #ifdef PKCS7_R_WRONG_PKCS7_TYPE - {"WRONG_PKCS7_TYPE", ERR_LIB_PKCS7, PKCS7_R_WRONG_PKCS7_TYPE}, - #else - {"WRONG_PKCS7_TYPE", 33, 114}, - #endif - #ifdef RAND_R_ADDITIONAL_INPUT_TOO_LONG - {"ADDITIONAL_INPUT_TOO_LONG", ERR_LIB_RAND, RAND_R_ADDITIONAL_INPUT_TOO_LONG}, - #else - {"ADDITIONAL_INPUT_TOO_LONG", 36, 102}, - #endif - #ifdef RAND_R_ALREADY_INSTANTIATED - {"ALREADY_INSTANTIATED", ERR_LIB_RAND, RAND_R_ALREADY_INSTANTIATED}, - #else - {"ALREADY_INSTANTIATED", 36, 103}, - #endif - #ifdef RAND_R_ARGUMENT_OUT_OF_RANGE - {"ARGUMENT_OUT_OF_RANGE", ERR_LIB_RAND, RAND_R_ARGUMENT_OUT_OF_RANGE}, - #else - {"ARGUMENT_OUT_OF_RANGE", 36, 105}, - #endif - #ifdef RAND_R_CANNOT_OPEN_FILE - {"CANNOT_OPEN_FILE", ERR_LIB_RAND, RAND_R_CANNOT_OPEN_FILE}, - #else - {"CANNOT_OPEN_FILE", 36, 121}, - #endif - #ifdef RAND_R_DRBG_ALREADY_INITIALIZED - {"DRBG_ALREADY_INITIALIZED", ERR_LIB_RAND, RAND_R_DRBG_ALREADY_INITIALIZED}, - #else - {"DRBG_ALREADY_INITIALIZED", 36, 129}, - #endif - #ifdef RAND_R_DRBG_NOT_INITIALISED - {"DRBG_NOT_INITIALISED", ERR_LIB_RAND, RAND_R_DRBG_NOT_INITIALISED}, - #else - {"DRBG_NOT_INITIALISED", 36, 104}, - #endif - #ifdef RAND_R_ENTROPY_INPUT_TOO_LONG - {"ENTROPY_INPUT_TOO_LONG", ERR_LIB_RAND, RAND_R_ENTROPY_INPUT_TOO_LONG}, - #else - {"ENTROPY_INPUT_TOO_LONG", 36, 106}, - #endif - #ifdef RAND_R_ENTROPY_OUT_OF_RANGE - {"ENTROPY_OUT_OF_RANGE", ERR_LIB_RAND, RAND_R_ENTROPY_OUT_OF_RANGE}, - #else - {"ENTROPY_OUT_OF_RANGE", 36, 124}, - #endif - #ifdef RAND_R_ERROR_ENTROPY_POOL_WAS_IGNORED - {"ERROR_ENTROPY_POOL_WAS_IGNORED", ERR_LIB_RAND, RAND_R_ERROR_ENTROPY_POOL_WAS_IGNORED}, - #else - {"ERROR_ENTROPY_POOL_WAS_IGNORED", 36, 127}, - #endif - #ifdef RAND_R_ERROR_INITIALISING_DRBG - {"ERROR_INITIALISING_DRBG", ERR_LIB_RAND, RAND_R_ERROR_INITIALISING_DRBG}, - #else - {"ERROR_INITIALISING_DRBG", 36, 107}, - #endif - #ifdef RAND_R_ERROR_INSTANTIATING_DRBG - {"ERROR_INSTANTIATING_DRBG", ERR_LIB_RAND, RAND_R_ERROR_INSTANTIATING_DRBG}, - #else - {"ERROR_INSTANTIATING_DRBG", 36, 108}, - #endif - #ifdef RAND_R_ERROR_RETRIEVING_ADDITIONAL_INPUT - {"ERROR_RETRIEVING_ADDITIONAL_INPUT", ERR_LIB_RAND, RAND_R_ERROR_RETRIEVING_ADDITIONAL_INPUT}, - #else - {"ERROR_RETRIEVING_ADDITIONAL_INPUT", 36, 109}, - #endif - #ifdef RAND_R_ERROR_RETRIEVING_ENTROPY - {"ERROR_RETRIEVING_ENTROPY", ERR_LIB_RAND, RAND_R_ERROR_RETRIEVING_ENTROPY}, - #else - {"ERROR_RETRIEVING_ENTROPY", 36, 110}, - #endif - #ifdef RAND_R_ERROR_RETRIEVING_NONCE - {"ERROR_RETRIEVING_NONCE", ERR_LIB_RAND, RAND_R_ERROR_RETRIEVING_NONCE}, - #else - {"ERROR_RETRIEVING_NONCE", 36, 111}, - #endif - #ifdef RAND_R_FAILED_TO_CREATE_LOCK - {"FAILED_TO_CREATE_LOCK", ERR_LIB_RAND, RAND_R_FAILED_TO_CREATE_LOCK}, - #else - {"FAILED_TO_CREATE_LOCK", 36, 126}, - #endif - #ifdef RAND_R_FUNC_NOT_IMPLEMENTED - {"FUNC_NOT_IMPLEMENTED", ERR_LIB_RAND, RAND_R_FUNC_NOT_IMPLEMENTED}, - #else - {"FUNC_NOT_IMPLEMENTED", 36, 101}, - #endif - #ifdef RAND_R_FWRITE_ERROR - {"FWRITE_ERROR", ERR_LIB_RAND, RAND_R_FWRITE_ERROR}, - #else - {"FWRITE_ERROR", 36, 123}, - #endif - #ifdef RAND_R_GENERATE_ERROR - {"GENERATE_ERROR", ERR_LIB_RAND, RAND_R_GENERATE_ERROR}, - #else - {"GENERATE_ERROR", 36, 112}, - #endif - #ifdef RAND_R_INTERNAL_ERROR - {"INTERNAL_ERROR", ERR_LIB_RAND, RAND_R_INTERNAL_ERROR}, - #else - {"INTERNAL_ERROR", 36, 113}, - #endif - #ifdef RAND_R_IN_ERROR_STATE - {"IN_ERROR_STATE", ERR_LIB_RAND, RAND_R_IN_ERROR_STATE}, - #else - {"IN_ERROR_STATE", 36, 114}, - #endif - #ifdef RAND_R_NOT_A_REGULAR_FILE - {"NOT_A_REGULAR_FILE", ERR_LIB_RAND, RAND_R_NOT_A_REGULAR_FILE}, - #else - {"NOT_A_REGULAR_FILE", 36, 122}, - #endif - #ifdef RAND_R_NOT_INSTANTIATED - {"NOT_INSTANTIATED", ERR_LIB_RAND, RAND_R_NOT_INSTANTIATED}, - #else - {"NOT_INSTANTIATED", 36, 115}, - #endif - #ifdef RAND_R_NO_DRBG_IMPLEMENTATION_SELECTED - {"NO_DRBG_IMPLEMENTATION_SELECTED", ERR_LIB_RAND, RAND_R_NO_DRBG_IMPLEMENTATION_SELECTED}, - #else - {"NO_DRBG_IMPLEMENTATION_SELECTED", 36, 128}, - #endif - #ifdef RAND_R_PARENT_LOCKING_NOT_ENABLED - {"PARENT_LOCKING_NOT_ENABLED", ERR_LIB_RAND, RAND_R_PARENT_LOCKING_NOT_ENABLED}, - #else - {"PARENT_LOCKING_NOT_ENABLED", 36, 130}, - #endif - #ifdef RAND_R_PARENT_STRENGTH_TOO_WEAK - {"PARENT_STRENGTH_TOO_WEAK", ERR_LIB_RAND, RAND_R_PARENT_STRENGTH_TOO_WEAK}, - #else - {"PARENT_STRENGTH_TOO_WEAK", 36, 131}, - #endif - #ifdef RAND_R_PERSONALISATION_STRING_TOO_LONG - {"PERSONALISATION_STRING_TOO_LONG", ERR_LIB_RAND, RAND_R_PERSONALISATION_STRING_TOO_LONG}, - #else - {"PERSONALISATION_STRING_TOO_LONG", 36, 116}, - #endif - #ifdef RAND_R_PREDICTION_RESISTANCE_NOT_SUPPORTED - {"PREDICTION_RESISTANCE_NOT_SUPPORTED", ERR_LIB_RAND, RAND_R_PREDICTION_RESISTANCE_NOT_SUPPORTED}, - #else - {"PREDICTION_RESISTANCE_NOT_SUPPORTED", 36, 133}, - #endif - #ifdef RAND_R_PRNG_NOT_SEEDED - {"PRNG_NOT_SEEDED", ERR_LIB_RAND, RAND_R_PRNG_NOT_SEEDED}, - #else - {"PRNG_NOT_SEEDED", 36, 100}, - #endif - #ifdef RAND_R_RANDOM_POOL_OVERFLOW - {"RANDOM_POOL_OVERFLOW", ERR_LIB_RAND, RAND_R_RANDOM_POOL_OVERFLOW}, - #else - {"RANDOM_POOL_OVERFLOW", 36, 125}, - #endif - #ifdef RAND_R_RANDOM_POOL_UNDERFLOW - {"RANDOM_POOL_UNDERFLOW", ERR_LIB_RAND, RAND_R_RANDOM_POOL_UNDERFLOW}, - #else - {"RANDOM_POOL_UNDERFLOW", 36, 134}, - #endif - #ifdef RAND_R_REQUEST_TOO_LARGE_FOR_DRBG - {"REQUEST_TOO_LARGE_FOR_DRBG", ERR_LIB_RAND, RAND_R_REQUEST_TOO_LARGE_FOR_DRBG}, - #else - {"REQUEST_TOO_LARGE_FOR_DRBG", 36, 117}, - #endif - #ifdef RAND_R_RESEED_ERROR - {"RESEED_ERROR", ERR_LIB_RAND, RAND_R_RESEED_ERROR}, - #else - {"RESEED_ERROR", 36, 118}, - #endif - #ifdef RAND_R_SELFTEST_FAILURE - {"SELFTEST_FAILURE", ERR_LIB_RAND, RAND_R_SELFTEST_FAILURE}, - #else - {"SELFTEST_FAILURE", 36, 119}, - #endif - #ifdef RAND_R_TOO_LITTLE_NONCE_REQUESTED - {"TOO_LITTLE_NONCE_REQUESTED", ERR_LIB_RAND, RAND_R_TOO_LITTLE_NONCE_REQUESTED}, - #else - {"TOO_LITTLE_NONCE_REQUESTED", 36, 135}, - #endif - #ifdef RAND_R_TOO_MUCH_NONCE_REQUESTED - {"TOO_MUCH_NONCE_REQUESTED", ERR_LIB_RAND, RAND_R_TOO_MUCH_NONCE_REQUESTED}, - #else - {"TOO_MUCH_NONCE_REQUESTED", 36, 136}, - #endif - #ifdef RAND_R_UNSUPPORTED_DRBG_FLAGS - {"UNSUPPORTED_DRBG_FLAGS", ERR_LIB_RAND, RAND_R_UNSUPPORTED_DRBG_FLAGS}, - #else - {"UNSUPPORTED_DRBG_FLAGS", 36, 132}, - #endif - #ifdef RAND_R_UNSUPPORTED_DRBG_TYPE - {"UNSUPPORTED_DRBG_TYPE", ERR_LIB_RAND, RAND_R_UNSUPPORTED_DRBG_TYPE}, - #else - {"UNSUPPORTED_DRBG_TYPE", 36, 120}, - #endif - #ifdef RSA_R_ALGORITHM_MISMATCH - {"ALGORITHM_MISMATCH", ERR_LIB_RSA, RSA_R_ALGORITHM_MISMATCH}, - #else - {"ALGORITHM_MISMATCH", 4, 100}, - #endif - #ifdef RSA_R_BAD_E_VALUE - {"BAD_E_VALUE", ERR_LIB_RSA, RSA_R_BAD_E_VALUE}, - #else - {"BAD_E_VALUE", 4, 101}, - #endif - #ifdef RSA_R_BAD_FIXED_HEADER_DECRYPT - {"BAD_FIXED_HEADER_DECRYPT", ERR_LIB_RSA, RSA_R_BAD_FIXED_HEADER_DECRYPT}, - #else - {"BAD_FIXED_HEADER_DECRYPT", 4, 102}, - #endif - #ifdef RSA_R_BAD_PAD_BYTE_COUNT - {"BAD_PAD_BYTE_COUNT", ERR_LIB_RSA, RSA_R_BAD_PAD_BYTE_COUNT}, - #else - {"BAD_PAD_BYTE_COUNT", 4, 103}, - #endif - #ifdef RSA_R_BAD_SIGNATURE - {"BAD_SIGNATURE", ERR_LIB_RSA, RSA_R_BAD_SIGNATURE}, - #else - {"BAD_SIGNATURE", 4, 104}, - #endif - #ifdef RSA_R_BLOCK_TYPE_IS_NOT_01 - {"BLOCK_TYPE_IS_NOT_01", ERR_LIB_RSA, RSA_R_BLOCK_TYPE_IS_NOT_01}, - #else - {"BLOCK_TYPE_IS_NOT_01", 4, 106}, - #endif - #ifdef RSA_R_BLOCK_TYPE_IS_NOT_02 - {"BLOCK_TYPE_IS_NOT_02", ERR_LIB_RSA, RSA_R_BLOCK_TYPE_IS_NOT_02}, - #else - {"BLOCK_TYPE_IS_NOT_02", 4, 107}, - #endif - #ifdef RSA_R_DATA_GREATER_THAN_MOD_LEN - {"DATA_GREATER_THAN_MOD_LEN", ERR_LIB_RSA, RSA_R_DATA_GREATER_THAN_MOD_LEN}, - #else - {"DATA_GREATER_THAN_MOD_LEN", 4, 108}, - #endif - #ifdef RSA_R_DATA_TOO_LARGE - {"DATA_TOO_LARGE", ERR_LIB_RSA, RSA_R_DATA_TOO_LARGE}, - #else - {"DATA_TOO_LARGE", 4, 109}, - #endif - #ifdef RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE - {"DATA_TOO_LARGE_FOR_KEY_SIZE", ERR_LIB_RSA, RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE}, - #else - {"DATA_TOO_LARGE_FOR_KEY_SIZE", 4, 110}, - #endif - #ifdef RSA_R_DATA_TOO_LARGE_FOR_MODULUS - {"DATA_TOO_LARGE_FOR_MODULUS", ERR_LIB_RSA, RSA_R_DATA_TOO_LARGE_FOR_MODULUS}, - #else - {"DATA_TOO_LARGE_FOR_MODULUS", 4, 132}, - #endif - #ifdef RSA_R_DATA_TOO_SMALL - {"DATA_TOO_SMALL", ERR_LIB_RSA, RSA_R_DATA_TOO_SMALL}, - #else - {"DATA_TOO_SMALL", 4, 111}, - #endif - #ifdef RSA_R_DATA_TOO_SMALL_FOR_KEY_SIZE - {"DATA_TOO_SMALL_FOR_KEY_SIZE", ERR_LIB_RSA, RSA_R_DATA_TOO_SMALL_FOR_KEY_SIZE}, - #else - {"DATA_TOO_SMALL_FOR_KEY_SIZE", 4, 122}, - #endif - #ifdef RSA_R_DIGEST_DOES_NOT_MATCH - {"DIGEST_DOES_NOT_MATCH", ERR_LIB_RSA, RSA_R_DIGEST_DOES_NOT_MATCH}, - #else - {"DIGEST_DOES_NOT_MATCH", 4, 158}, - #endif - #ifdef RSA_R_DIGEST_NOT_ALLOWED - {"DIGEST_NOT_ALLOWED", ERR_LIB_RSA, RSA_R_DIGEST_NOT_ALLOWED}, - #else - {"DIGEST_NOT_ALLOWED", 4, 145}, - #endif - #ifdef RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY - {"DIGEST_TOO_BIG_FOR_RSA_KEY", ERR_LIB_RSA, RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY}, - #else - {"DIGEST_TOO_BIG_FOR_RSA_KEY", 4, 112}, - #endif - #ifdef RSA_R_DMP1_NOT_CONGRUENT_TO_D - {"DMP1_NOT_CONGRUENT_TO_D", ERR_LIB_RSA, RSA_R_DMP1_NOT_CONGRUENT_TO_D}, - #else - {"DMP1_NOT_CONGRUENT_TO_D", 4, 124}, - #endif - #ifdef RSA_R_DMQ1_NOT_CONGRUENT_TO_D - {"DMQ1_NOT_CONGRUENT_TO_D", ERR_LIB_RSA, RSA_R_DMQ1_NOT_CONGRUENT_TO_D}, - #else - {"DMQ1_NOT_CONGRUENT_TO_D", 4, 125}, - #endif - #ifdef RSA_R_D_E_NOT_CONGRUENT_TO_1 - {"D_E_NOT_CONGRUENT_TO_1", ERR_LIB_RSA, RSA_R_D_E_NOT_CONGRUENT_TO_1}, - #else - {"D_E_NOT_CONGRUENT_TO_1", 4, 123}, - #endif - #ifdef RSA_R_FIRST_OCTET_INVALID - {"FIRST_OCTET_INVALID", ERR_LIB_RSA, RSA_R_FIRST_OCTET_INVALID}, - #else - {"FIRST_OCTET_INVALID", 4, 133}, - #endif - #ifdef RSA_R_ILLEGAL_OR_UNSUPPORTED_PADDING_MODE - {"ILLEGAL_OR_UNSUPPORTED_PADDING_MODE", ERR_LIB_RSA, RSA_R_ILLEGAL_OR_UNSUPPORTED_PADDING_MODE}, - #else - {"ILLEGAL_OR_UNSUPPORTED_PADDING_MODE", 4, 144}, - #endif - #ifdef RSA_R_INVALID_DIGEST - {"INVALID_DIGEST", ERR_LIB_RSA, RSA_R_INVALID_DIGEST}, - #else - {"INVALID_DIGEST", 4, 157}, - #endif - #ifdef RSA_R_INVALID_DIGEST_LENGTH - {"INVALID_DIGEST_LENGTH", ERR_LIB_RSA, RSA_R_INVALID_DIGEST_LENGTH}, - #else - {"INVALID_DIGEST_LENGTH", 4, 143}, - #endif - #ifdef RSA_R_INVALID_HEADER - {"INVALID_HEADER", ERR_LIB_RSA, RSA_R_INVALID_HEADER}, - #else - {"INVALID_HEADER", 4, 137}, - #endif - #ifdef RSA_R_INVALID_LABEL - {"INVALID_LABEL", ERR_LIB_RSA, RSA_R_INVALID_LABEL}, - #else - {"INVALID_LABEL", 4, 160}, - #endif - #ifdef RSA_R_INVALID_MESSAGE_LENGTH - {"INVALID_MESSAGE_LENGTH", ERR_LIB_RSA, RSA_R_INVALID_MESSAGE_LENGTH}, - #else - {"INVALID_MESSAGE_LENGTH", 4, 131}, - #endif - #ifdef RSA_R_INVALID_MGF1_MD - {"INVALID_MGF1_MD", ERR_LIB_RSA, RSA_R_INVALID_MGF1_MD}, - #else - {"INVALID_MGF1_MD", 4, 156}, - #endif - #ifdef RSA_R_INVALID_MULTI_PRIME_KEY - {"INVALID_MULTI_PRIME_KEY", ERR_LIB_RSA, RSA_R_INVALID_MULTI_PRIME_KEY}, - #else - {"INVALID_MULTI_PRIME_KEY", 4, 167}, - #endif - #ifdef RSA_R_INVALID_OAEP_PARAMETERS - {"INVALID_OAEP_PARAMETERS", ERR_LIB_RSA, RSA_R_INVALID_OAEP_PARAMETERS}, - #else - {"INVALID_OAEP_PARAMETERS", 4, 161}, - #endif - #ifdef RSA_R_INVALID_PADDING - {"INVALID_PADDING", ERR_LIB_RSA, RSA_R_INVALID_PADDING}, - #else - {"INVALID_PADDING", 4, 138}, - #endif - #ifdef RSA_R_INVALID_PADDING_MODE - {"INVALID_PADDING_MODE", ERR_LIB_RSA, RSA_R_INVALID_PADDING_MODE}, - #else - {"INVALID_PADDING_MODE", 4, 141}, - #endif - #ifdef RSA_R_INVALID_PSS_PARAMETERS - {"INVALID_PSS_PARAMETERS", ERR_LIB_RSA, RSA_R_INVALID_PSS_PARAMETERS}, - #else - {"INVALID_PSS_PARAMETERS", 4, 149}, - #endif - #ifdef RSA_R_INVALID_PSS_SALTLEN - {"INVALID_PSS_SALTLEN", ERR_LIB_RSA, RSA_R_INVALID_PSS_SALTLEN}, - #else - {"INVALID_PSS_SALTLEN", 4, 146}, - #endif - #ifdef RSA_R_INVALID_SALT_LENGTH - {"INVALID_SALT_LENGTH", ERR_LIB_RSA, RSA_R_INVALID_SALT_LENGTH}, - #else - {"INVALID_SALT_LENGTH", 4, 150}, - #endif - #ifdef RSA_R_INVALID_TRAILER - {"INVALID_TRAILER", ERR_LIB_RSA, RSA_R_INVALID_TRAILER}, - #else - {"INVALID_TRAILER", 4, 139}, - #endif - #ifdef RSA_R_INVALID_X931_DIGEST - {"INVALID_X931_DIGEST", ERR_LIB_RSA, RSA_R_INVALID_X931_DIGEST}, - #else - {"INVALID_X931_DIGEST", 4, 142}, - #endif - #ifdef RSA_R_IQMP_NOT_INVERSE_OF_Q - {"IQMP_NOT_INVERSE_OF_Q", ERR_LIB_RSA, RSA_R_IQMP_NOT_INVERSE_OF_Q}, - #else - {"IQMP_NOT_INVERSE_OF_Q", 4, 126}, - #endif - #ifdef RSA_R_KEY_PRIME_NUM_INVALID - {"KEY_PRIME_NUM_INVALID", ERR_LIB_RSA, RSA_R_KEY_PRIME_NUM_INVALID}, - #else - {"KEY_PRIME_NUM_INVALID", 4, 165}, - #endif - #ifdef RSA_R_KEY_SIZE_TOO_SMALL - {"KEY_SIZE_TOO_SMALL", ERR_LIB_RSA, RSA_R_KEY_SIZE_TOO_SMALL}, - #else - {"KEY_SIZE_TOO_SMALL", 4, 120}, - #endif - #ifdef RSA_R_LAST_OCTET_INVALID - {"LAST_OCTET_INVALID", ERR_LIB_RSA, RSA_R_LAST_OCTET_INVALID}, - #else - {"LAST_OCTET_INVALID", 4, 134}, - #endif - #ifdef RSA_R_MGF1_DIGEST_NOT_ALLOWED - {"MGF1_DIGEST_NOT_ALLOWED", ERR_LIB_RSA, RSA_R_MGF1_DIGEST_NOT_ALLOWED}, - #else - {"MGF1_DIGEST_NOT_ALLOWED", 4, 152}, - #endif - #ifdef RSA_R_MISSING_PRIVATE_KEY - {"MISSING_PRIVATE_KEY", ERR_LIB_RSA, RSA_R_MISSING_PRIVATE_KEY}, - #else - {"MISSING_PRIVATE_KEY", 4, 179}, - #endif - #ifdef RSA_R_MODULUS_TOO_LARGE - {"MODULUS_TOO_LARGE", ERR_LIB_RSA, RSA_R_MODULUS_TOO_LARGE}, - #else - {"MODULUS_TOO_LARGE", 4, 105}, - #endif - #ifdef RSA_R_MP_COEFFICIENT_NOT_INVERSE_OF_R - {"MP_COEFFICIENT_NOT_INVERSE_OF_R", ERR_LIB_RSA, RSA_R_MP_COEFFICIENT_NOT_INVERSE_OF_R}, - #else - {"MP_COEFFICIENT_NOT_INVERSE_OF_R", 4, 168}, - #endif - #ifdef RSA_R_MP_EXPONENT_NOT_CONGRUENT_TO_D - {"MP_EXPONENT_NOT_CONGRUENT_TO_D", ERR_LIB_RSA, RSA_R_MP_EXPONENT_NOT_CONGRUENT_TO_D}, - #else - {"MP_EXPONENT_NOT_CONGRUENT_TO_D", 4, 169}, - #endif - #ifdef RSA_R_MP_R_NOT_PRIME - {"MP_R_NOT_PRIME", ERR_LIB_RSA, RSA_R_MP_R_NOT_PRIME}, - #else - {"MP_R_NOT_PRIME", 4, 170}, - #endif - #ifdef RSA_R_NO_PUBLIC_EXPONENT - {"NO_PUBLIC_EXPONENT", ERR_LIB_RSA, RSA_R_NO_PUBLIC_EXPONENT}, - #else - {"NO_PUBLIC_EXPONENT", 4, 140}, - #endif - #ifdef RSA_R_NULL_BEFORE_BLOCK_MISSING - {"NULL_BEFORE_BLOCK_MISSING", ERR_LIB_RSA, RSA_R_NULL_BEFORE_BLOCK_MISSING}, - #else - {"NULL_BEFORE_BLOCK_MISSING", 4, 113}, - #endif - #ifdef RSA_R_N_DOES_NOT_EQUAL_PRODUCT_OF_PRIMES - {"N_DOES_NOT_EQUAL_PRODUCT_OF_PRIMES", ERR_LIB_RSA, RSA_R_N_DOES_NOT_EQUAL_PRODUCT_OF_PRIMES}, - #else - {"N_DOES_NOT_EQUAL_PRODUCT_OF_PRIMES", 4, 172}, - #endif - #ifdef RSA_R_N_DOES_NOT_EQUAL_P_Q - {"N_DOES_NOT_EQUAL_P_Q", ERR_LIB_RSA, RSA_R_N_DOES_NOT_EQUAL_P_Q}, - #else - {"N_DOES_NOT_EQUAL_P_Q", 4, 127}, - #endif - #ifdef RSA_R_OAEP_DECODING_ERROR - {"OAEP_DECODING_ERROR", ERR_LIB_RSA, RSA_R_OAEP_DECODING_ERROR}, - #else - {"OAEP_DECODING_ERROR", 4, 121}, - #endif - #ifdef RSA_R_OPERATION_NOT_SUPPORTED_FOR_THIS_KEYTYPE - {"OPERATION_NOT_SUPPORTED_FOR_THIS_KEYTYPE", ERR_LIB_RSA, RSA_R_OPERATION_NOT_SUPPORTED_FOR_THIS_KEYTYPE}, - #else - {"OPERATION_NOT_SUPPORTED_FOR_THIS_KEYTYPE", 4, 148}, - #endif - #ifdef RSA_R_PADDING_CHECK_FAILED - {"PADDING_CHECK_FAILED", ERR_LIB_RSA, RSA_R_PADDING_CHECK_FAILED}, - #else - {"PADDING_CHECK_FAILED", 4, 114}, - #endif - #ifdef RSA_R_PKCS_DECODING_ERROR - {"PKCS_DECODING_ERROR", ERR_LIB_RSA, RSA_R_PKCS_DECODING_ERROR}, - #else - {"PKCS_DECODING_ERROR", 4, 159}, - #endif - #ifdef RSA_R_PSS_SALTLEN_TOO_SMALL - {"PSS_SALTLEN_TOO_SMALL", ERR_LIB_RSA, RSA_R_PSS_SALTLEN_TOO_SMALL}, - #else - {"PSS_SALTLEN_TOO_SMALL", 4, 164}, - #endif - #ifdef RSA_R_P_NOT_PRIME - {"P_NOT_PRIME", ERR_LIB_RSA, RSA_R_P_NOT_PRIME}, - #else - {"P_NOT_PRIME", 4, 128}, - #endif - #ifdef RSA_R_Q_NOT_PRIME - {"Q_NOT_PRIME", ERR_LIB_RSA, RSA_R_Q_NOT_PRIME}, - #else - {"Q_NOT_PRIME", 4, 129}, - #endif - #ifdef RSA_R_RSA_OPERATIONS_NOT_SUPPORTED - {"RSA_OPERATIONS_NOT_SUPPORTED", ERR_LIB_RSA, RSA_R_RSA_OPERATIONS_NOT_SUPPORTED}, - #else - {"RSA_OPERATIONS_NOT_SUPPORTED", 4, 130}, - #endif - #ifdef RSA_R_SLEN_CHECK_FAILED - {"SLEN_CHECK_FAILED", ERR_LIB_RSA, RSA_R_SLEN_CHECK_FAILED}, - #else - {"SLEN_CHECK_FAILED", 4, 136}, - #endif - #ifdef RSA_R_SLEN_RECOVERY_FAILED - {"SLEN_RECOVERY_FAILED", ERR_LIB_RSA, RSA_R_SLEN_RECOVERY_FAILED}, - #else - {"SLEN_RECOVERY_FAILED", 4, 135}, - #endif - #ifdef RSA_R_SSLV3_ROLLBACK_ATTACK - {"SSLV3_ROLLBACK_ATTACK", ERR_LIB_RSA, RSA_R_SSLV3_ROLLBACK_ATTACK}, - #else - {"SSLV3_ROLLBACK_ATTACK", 4, 115}, - #endif - #ifdef RSA_R_THE_ASN1_OBJECT_IDENTIFIER_IS_NOT_KNOWN_FOR_THIS_MD - {"THE_ASN1_OBJECT_IDENTIFIER_IS_NOT_KNOWN_FOR_THIS_MD", ERR_LIB_RSA, RSA_R_THE_ASN1_OBJECT_IDENTIFIER_IS_NOT_KNOWN_FOR_THIS_MD}, - #else - {"THE_ASN1_OBJECT_IDENTIFIER_IS_NOT_KNOWN_FOR_THIS_MD", 4, 116}, - #endif - #ifdef RSA_R_UNKNOWN_ALGORITHM_TYPE - {"UNKNOWN_ALGORITHM_TYPE", ERR_LIB_RSA, RSA_R_UNKNOWN_ALGORITHM_TYPE}, - #else - {"UNKNOWN_ALGORITHM_TYPE", 4, 117}, - #endif - #ifdef RSA_R_UNKNOWN_DIGEST - {"UNKNOWN_DIGEST", ERR_LIB_RSA, RSA_R_UNKNOWN_DIGEST}, - #else - {"UNKNOWN_DIGEST", 4, 166}, - #endif - #ifdef RSA_R_UNKNOWN_MASK_DIGEST - {"UNKNOWN_MASK_DIGEST", ERR_LIB_RSA, RSA_R_UNKNOWN_MASK_DIGEST}, - #else - {"UNKNOWN_MASK_DIGEST", 4, 151}, - #endif - #ifdef RSA_R_UNKNOWN_PADDING_TYPE - {"UNKNOWN_PADDING_TYPE", ERR_LIB_RSA, RSA_R_UNKNOWN_PADDING_TYPE}, - #else - {"UNKNOWN_PADDING_TYPE", 4, 118}, - #endif - #ifdef RSA_R_UNSUPPORTED_ENCRYPTION_TYPE - {"UNSUPPORTED_ENCRYPTION_TYPE", ERR_LIB_RSA, RSA_R_UNSUPPORTED_ENCRYPTION_TYPE}, - #else - {"UNSUPPORTED_ENCRYPTION_TYPE", 4, 162}, - #endif - #ifdef RSA_R_UNSUPPORTED_LABEL_SOURCE - {"UNSUPPORTED_LABEL_SOURCE", ERR_LIB_RSA, RSA_R_UNSUPPORTED_LABEL_SOURCE}, - #else - {"UNSUPPORTED_LABEL_SOURCE", 4, 163}, - #endif - #ifdef RSA_R_UNSUPPORTED_MASK_ALGORITHM - {"UNSUPPORTED_MASK_ALGORITHM", ERR_LIB_RSA, RSA_R_UNSUPPORTED_MASK_ALGORITHM}, - #else - {"UNSUPPORTED_MASK_ALGORITHM", 4, 153}, - #endif - #ifdef RSA_R_UNSUPPORTED_MASK_PARAMETER - {"UNSUPPORTED_MASK_PARAMETER", ERR_LIB_RSA, RSA_R_UNSUPPORTED_MASK_PARAMETER}, - #else - {"UNSUPPORTED_MASK_PARAMETER", 4, 154}, - #endif - #ifdef RSA_R_UNSUPPORTED_SIGNATURE_TYPE - {"UNSUPPORTED_SIGNATURE_TYPE", ERR_LIB_RSA, RSA_R_UNSUPPORTED_SIGNATURE_TYPE}, - #else - {"UNSUPPORTED_SIGNATURE_TYPE", 4, 155}, - #endif - #ifdef RSA_R_VALUE_MISSING - {"VALUE_MISSING", ERR_LIB_RSA, RSA_R_VALUE_MISSING}, - #else - {"VALUE_MISSING", 4, 147}, - #endif - #ifdef RSA_R_WRONG_SIGNATURE_LENGTH - {"WRONG_SIGNATURE_LENGTH", ERR_LIB_RSA, RSA_R_WRONG_SIGNATURE_LENGTH}, - #else - {"WRONG_SIGNATURE_LENGTH", 4, 119}, - #endif - #ifdef SSL_R_APPLICATION_DATA_AFTER_CLOSE_NOTIFY - {"APPLICATION_DATA_AFTER_CLOSE_NOTIFY", ERR_LIB_SSL, SSL_R_APPLICATION_DATA_AFTER_CLOSE_NOTIFY}, - #else - {"APPLICATION_DATA_AFTER_CLOSE_NOTIFY", 20, 291}, - #endif - #ifdef SSL_R_APP_DATA_IN_HANDSHAKE - {"APP_DATA_IN_HANDSHAKE", ERR_LIB_SSL, SSL_R_APP_DATA_IN_HANDSHAKE}, - #else - {"APP_DATA_IN_HANDSHAKE", 20, 100}, - #endif - #ifdef SSL_R_ATTEMPT_TO_REUSE_SESSION_IN_DIFFERENT_CONTEXT - {"ATTEMPT_TO_REUSE_SESSION_IN_DIFFERENT_CONTEXT", ERR_LIB_SSL, SSL_R_ATTEMPT_TO_REUSE_SESSION_IN_DIFFERENT_CONTEXT}, - #else - {"ATTEMPT_TO_REUSE_SESSION_IN_DIFFERENT_CONTEXT", 20, 272}, - #endif - #ifdef SSL_R_AT_LEAST_TLS_1_0_NEEDED_IN_FIPS_MODE - {"AT_LEAST_TLS_1_0_NEEDED_IN_FIPS_MODE", ERR_LIB_SSL, SSL_R_AT_LEAST_TLS_1_0_NEEDED_IN_FIPS_MODE}, - #else - {"AT_LEAST_TLS_1_0_NEEDED_IN_FIPS_MODE", 20, 143}, - #endif - #ifdef SSL_R_AT_LEAST_TLS_1_2_NEEDED_IN_SUITEB_MODE - {"AT_LEAST_TLS_1_2_NEEDED_IN_SUITEB_MODE", ERR_LIB_SSL, SSL_R_AT_LEAST_TLS_1_2_NEEDED_IN_SUITEB_MODE}, - #else - {"AT_LEAST_TLS_1_2_NEEDED_IN_SUITEB_MODE", 20, 158}, - #endif - #ifdef SSL_R_BAD_CHANGE_CIPHER_SPEC - {"BAD_CHANGE_CIPHER_SPEC", ERR_LIB_SSL, SSL_R_BAD_CHANGE_CIPHER_SPEC}, - #else - {"BAD_CHANGE_CIPHER_SPEC", 20, 103}, - #endif - #ifdef SSL_R_BAD_CIPHER - {"BAD_CIPHER", ERR_LIB_SSL, SSL_R_BAD_CIPHER}, - #else - {"BAD_CIPHER", 20, 186}, - #endif - #ifdef SSL_R_BAD_DATA - {"BAD_DATA", ERR_LIB_SSL, SSL_R_BAD_DATA}, - #else - {"BAD_DATA", 20, 390}, - #endif - #ifdef SSL_R_BAD_DATA_RETURNED_BY_CALLBACK - {"BAD_DATA_RETURNED_BY_CALLBACK", ERR_LIB_SSL, SSL_R_BAD_DATA_RETURNED_BY_CALLBACK}, - #else - {"BAD_DATA_RETURNED_BY_CALLBACK", 20, 106}, - #endif - #ifdef SSL_R_BAD_DECOMPRESSION - {"BAD_DECOMPRESSION", ERR_LIB_SSL, SSL_R_BAD_DECOMPRESSION}, - #else - {"BAD_DECOMPRESSION", 20, 107}, - #endif - #ifdef SSL_R_BAD_DH_VALUE - {"BAD_DH_VALUE", ERR_LIB_SSL, SSL_R_BAD_DH_VALUE}, - #else - {"BAD_DH_VALUE", 20, 102}, - #endif - #ifdef SSL_R_BAD_DIGEST_LENGTH - {"BAD_DIGEST_LENGTH", ERR_LIB_SSL, SSL_R_BAD_DIGEST_LENGTH}, - #else - {"BAD_DIGEST_LENGTH", 20, 111}, - #endif - #ifdef SSL_R_BAD_EARLY_DATA - {"BAD_EARLY_DATA", ERR_LIB_SSL, SSL_R_BAD_EARLY_DATA}, - #else - {"BAD_EARLY_DATA", 20, 233}, - #endif - #ifdef SSL_R_BAD_ECC_CERT - {"BAD_ECC_CERT", ERR_LIB_SSL, SSL_R_BAD_ECC_CERT}, - #else - {"BAD_ECC_CERT", 20, 304}, - #endif - #ifdef SSL_R_BAD_ECDSA_SIGNATURE - {"BAD_ECDSA_SIGNATURE", ERR_LIB_SSL, SSL_R_BAD_ECDSA_SIGNATURE}, - #else - {"BAD_ECDSA_SIGNATURE", 20, 305}, - #endif - #ifdef SSL_R_BAD_ECPOINT - {"BAD_ECPOINT", ERR_LIB_SSL, SSL_R_BAD_ECPOINT}, - #else - {"BAD_ECPOINT", 20, 306}, - #endif - #ifdef SSL_R_BAD_EXTENSION - {"BAD_EXTENSION", ERR_LIB_SSL, SSL_R_BAD_EXTENSION}, - #else - {"BAD_EXTENSION", 20, 110}, - #endif - #ifdef SSL_R_BAD_HANDSHAKE_LENGTH - {"BAD_HANDSHAKE_LENGTH", ERR_LIB_SSL, SSL_R_BAD_HANDSHAKE_LENGTH}, - #else - {"BAD_HANDSHAKE_LENGTH", 20, 332}, - #endif - #ifdef SSL_R_BAD_HANDSHAKE_STATE - {"BAD_HANDSHAKE_STATE", ERR_LIB_SSL, SSL_R_BAD_HANDSHAKE_STATE}, - #else - {"BAD_HANDSHAKE_STATE", 20, 236}, - #endif - #ifdef SSL_R_BAD_HELLO_REQUEST - {"BAD_HELLO_REQUEST", ERR_LIB_SSL, SSL_R_BAD_HELLO_REQUEST}, - #else - {"BAD_HELLO_REQUEST", 20, 105}, - #endif - #ifdef SSL_R_BAD_HRR_VERSION - {"BAD_HRR_VERSION", ERR_LIB_SSL, SSL_R_BAD_HRR_VERSION}, - #else - {"BAD_HRR_VERSION", 20, 263}, - #endif - #ifdef SSL_R_BAD_KEY_SHARE - {"BAD_KEY_SHARE", ERR_LIB_SSL, SSL_R_BAD_KEY_SHARE}, - #else - {"BAD_KEY_SHARE", 20, 108}, - #endif - #ifdef SSL_R_BAD_KEY_UPDATE - {"BAD_KEY_UPDATE", ERR_LIB_SSL, SSL_R_BAD_KEY_UPDATE}, - #else - {"BAD_KEY_UPDATE", 20, 122}, - #endif - #ifdef SSL_R_BAD_LEGACY_VERSION - {"BAD_LEGACY_VERSION", ERR_LIB_SSL, SSL_R_BAD_LEGACY_VERSION}, - #else - {"BAD_LEGACY_VERSION", 20, 292}, - #endif - #ifdef SSL_R_BAD_LENGTH - {"BAD_LENGTH", ERR_LIB_SSL, SSL_R_BAD_LENGTH}, - #else - {"BAD_LENGTH", 20, 271}, - #endif - #ifdef SSL_R_BAD_MAC_LENGTH - {"BAD_MAC_LENGTH", ERR_LIB_SSL, SSL_R_BAD_MAC_LENGTH}, - #else - {"BAD_MAC_LENGTH", 20, 333}, - #endif - #ifdef SSL_R_BAD_PACKET - {"BAD_PACKET", ERR_LIB_SSL, SSL_R_BAD_PACKET}, - #else - {"BAD_PACKET", 20, 240}, - #endif - #ifdef SSL_R_BAD_PACKET_LENGTH - {"BAD_PACKET_LENGTH", ERR_LIB_SSL, SSL_R_BAD_PACKET_LENGTH}, - #else - {"BAD_PACKET_LENGTH", 20, 115}, - #endif - #ifdef SSL_R_BAD_PROTOCOL_VERSION_NUMBER - {"BAD_PROTOCOL_VERSION_NUMBER", ERR_LIB_SSL, SSL_R_BAD_PROTOCOL_VERSION_NUMBER}, - #else - {"BAD_PROTOCOL_VERSION_NUMBER", 20, 116}, - #endif - #ifdef SSL_R_BAD_PSK - {"BAD_PSK", ERR_LIB_SSL, SSL_R_BAD_PSK}, - #else - {"BAD_PSK", 20, 219}, - #endif - #ifdef SSL_R_BAD_PSK_IDENTITY - {"BAD_PSK_IDENTITY", ERR_LIB_SSL, SSL_R_BAD_PSK_IDENTITY}, - #else - {"BAD_PSK_IDENTITY", 20, 114}, - #endif - #ifdef SSL_R_BAD_PSK_IDENTITY_HINT_LENGTH - {"BAD_PSK_IDENTITY_HINT_LENGTH", ERR_LIB_SSL, SSL_R_BAD_PSK_IDENTITY_HINT_LENGTH}, - #else - {"BAD_PSK_IDENTITY_HINT_LENGTH", 20, 316}, - #endif - #ifdef SSL_R_BAD_RECORD_TYPE - {"BAD_RECORD_TYPE", ERR_LIB_SSL, SSL_R_BAD_RECORD_TYPE}, - #else - {"BAD_RECORD_TYPE", 20, 443}, - #endif - #ifdef SSL_R_BAD_RSA_ENCRYPT - {"BAD_RSA_ENCRYPT", ERR_LIB_SSL, SSL_R_BAD_RSA_ENCRYPT}, - #else - {"BAD_RSA_ENCRYPT", 20, 119}, - #endif - #ifdef SSL_R_BAD_SIGNATURE - {"BAD_SIGNATURE", ERR_LIB_SSL, SSL_R_BAD_SIGNATURE}, - #else - {"BAD_SIGNATURE", 20, 123}, - #endif - #ifdef SSL_R_BAD_SRP_A_LENGTH - {"BAD_SRP_A_LENGTH", ERR_LIB_SSL, SSL_R_BAD_SRP_A_LENGTH}, - #else - {"BAD_SRP_A_LENGTH", 20, 347}, - #endif - #ifdef SSL_R_BAD_SRP_B_LENGTH - {"BAD_SRP_B_LENGTH", ERR_LIB_SSL, SSL_R_BAD_SRP_B_LENGTH}, - #else - {"BAD_SRP_B_LENGTH", 20, 348}, - #endif - #ifdef SSL_R_BAD_SRP_G_LENGTH - {"BAD_SRP_G_LENGTH", ERR_LIB_SSL, SSL_R_BAD_SRP_G_LENGTH}, - #else - {"BAD_SRP_G_LENGTH", 20, 349}, - #endif - #ifdef SSL_R_BAD_SRP_N_LENGTH - {"BAD_SRP_N_LENGTH", ERR_LIB_SSL, SSL_R_BAD_SRP_N_LENGTH}, - #else - {"BAD_SRP_N_LENGTH", 20, 350}, - #endif - #ifdef SSL_R_BAD_SRP_PARAMETERS - {"BAD_SRP_PARAMETERS", ERR_LIB_SSL, SSL_R_BAD_SRP_PARAMETERS}, - #else - {"BAD_SRP_PARAMETERS", 20, 371}, - #endif - #ifdef SSL_R_BAD_SRP_S_LENGTH - {"BAD_SRP_S_LENGTH", ERR_LIB_SSL, SSL_R_BAD_SRP_S_LENGTH}, - #else - {"BAD_SRP_S_LENGTH", 20, 351}, - #endif - #ifdef SSL_R_BAD_SRTP_MKI_VALUE - {"BAD_SRTP_MKI_VALUE", ERR_LIB_SSL, SSL_R_BAD_SRTP_MKI_VALUE}, - #else - {"BAD_SRTP_MKI_VALUE", 20, 352}, - #endif - #ifdef SSL_R_BAD_SRTP_PROTECTION_PROFILE_LIST - {"BAD_SRTP_PROTECTION_PROFILE_LIST", ERR_LIB_SSL, SSL_R_BAD_SRTP_PROTECTION_PROFILE_LIST}, - #else - {"BAD_SRTP_PROTECTION_PROFILE_LIST", 20, 353}, - #endif - #ifdef SSL_R_BAD_SSL_FILETYPE - {"BAD_SSL_FILETYPE", ERR_LIB_SSL, SSL_R_BAD_SSL_FILETYPE}, - #else - {"BAD_SSL_FILETYPE", 20, 124}, - #endif - #ifdef SSL_R_BAD_VALUE - {"BAD_VALUE", ERR_LIB_SSL, SSL_R_BAD_VALUE}, - #else - {"BAD_VALUE", 20, 384}, - #endif - #ifdef SSL_R_BAD_WRITE_RETRY - {"BAD_WRITE_RETRY", ERR_LIB_SSL, SSL_R_BAD_WRITE_RETRY}, - #else - {"BAD_WRITE_RETRY", 20, 127}, - #endif - #ifdef SSL_R_BINDER_DOES_NOT_VERIFY - {"BINDER_DOES_NOT_VERIFY", ERR_LIB_SSL, SSL_R_BINDER_DOES_NOT_VERIFY}, - #else - {"BINDER_DOES_NOT_VERIFY", 20, 253}, - #endif - #ifdef SSL_R_BIO_NOT_SET - {"BIO_NOT_SET", ERR_LIB_SSL, SSL_R_BIO_NOT_SET}, - #else - {"BIO_NOT_SET", 20, 128}, - #endif - #ifdef SSL_R_BLOCK_CIPHER_PAD_IS_WRONG - {"BLOCK_CIPHER_PAD_IS_WRONG", ERR_LIB_SSL, SSL_R_BLOCK_CIPHER_PAD_IS_WRONG}, - #else - {"BLOCK_CIPHER_PAD_IS_WRONG", 20, 129}, - #endif - #ifdef SSL_R_BN_LIB - {"BN_LIB", ERR_LIB_SSL, SSL_R_BN_LIB}, - #else - {"BN_LIB", 20, 130}, - #endif - #ifdef SSL_R_CALLBACK_FAILED - {"CALLBACK_FAILED", ERR_LIB_SSL, SSL_R_CALLBACK_FAILED}, - #else - {"CALLBACK_FAILED", 20, 234}, - #endif - #ifdef SSL_R_CANNOT_CHANGE_CIPHER - {"CANNOT_CHANGE_CIPHER", ERR_LIB_SSL, SSL_R_CANNOT_CHANGE_CIPHER}, - #else - {"CANNOT_CHANGE_CIPHER", 20, 109}, - #endif - #ifdef SSL_R_CA_DN_LENGTH_MISMATCH - {"CA_DN_LENGTH_MISMATCH", ERR_LIB_SSL, SSL_R_CA_DN_LENGTH_MISMATCH}, - #else - {"CA_DN_LENGTH_MISMATCH", 20, 131}, - #endif - #ifdef SSL_R_CA_KEY_TOO_SMALL - {"CA_KEY_TOO_SMALL", ERR_LIB_SSL, SSL_R_CA_KEY_TOO_SMALL}, - #else - {"CA_KEY_TOO_SMALL", 20, 397}, - #endif - #ifdef SSL_R_CA_MD_TOO_WEAK - {"CA_MD_TOO_WEAK", ERR_LIB_SSL, SSL_R_CA_MD_TOO_WEAK}, - #else - {"CA_MD_TOO_WEAK", 20, 398}, - #endif - #ifdef SSL_R_CCS_RECEIVED_EARLY - {"CCS_RECEIVED_EARLY", ERR_LIB_SSL, SSL_R_CCS_RECEIVED_EARLY}, - #else - {"CCS_RECEIVED_EARLY", 20, 133}, - #endif - #ifdef SSL_R_CERTIFICATE_VERIFY_FAILED - {"CERTIFICATE_VERIFY_FAILED", ERR_LIB_SSL, SSL_R_CERTIFICATE_VERIFY_FAILED}, - #else - {"CERTIFICATE_VERIFY_FAILED", 20, 134}, - #endif - #ifdef SSL_R_CERT_CB_ERROR - {"CERT_CB_ERROR", ERR_LIB_SSL, SSL_R_CERT_CB_ERROR}, - #else - {"CERT_CB_ERROR", 20, 377}, - #endif - #ifdef SSL_R_CERT_LENGTH_MISMATCH - {"CERT_LENGTH_MISMATCH", ERR_LIB_SSL, SSL_R_CERT_LENGTH_MISMATCH}, - #else - {"CERT_LENGTH_MISMATCH", 20, 135}, - #endif - #ifdef SSL_R_CIPHERSUITE_DIGEST_HAS_CHANGED - {"CIPHERSUITE_DIGEST_HAS_CHANGED", ERR_LIB_SSL, SSL_R_CIPHERSUITE_DIGEST_HAS_CHANGED}, - #else - {"CIPHERSUITE_DIGEST_HAS_CHANGED", 20, 218}, - #endif - #ifdef SSL_R_CIPHER_CODE_WRONG_LENGTH - {"CIPHER_CODE_WRONG_LENGTH", ERR_LIB_SSL, SSL_R_CIPHER_CODE_WRONG_LENGTH}, - #else - {"CIPHER_CODE_WRONG_LENGTH", 20, 137}, - #endif - #ifdef SSL_R_CIPHER_OR_HASH_UNAVAILABLE - {"CIPHER_OR_HASH_UNAVAILABLE", ERR_LIB_SSL, SSL_R_CIPHER_OR_HASH_UNAVAILABLE}, - #else - {"CIPHER_OR_HASH_UNAVAILABLE", 20, 138}, - #endif - #ifdef SSL_R_CLIENTHELLO_TLSEXT - {"CLIENTHELLO_TLSEXT", ERR_LIB_SSL, SSL_R_CLIENTHELLO_TLSEXT}, - #else - {"CLIENTHELLO_TLSEXT", 20, 226}, - #endif - #ifdef SSL_R_COMPRESSED_LENGTH_TOO_LONG - {"COMPRESSED_LENGTH_TOO_LONG", ERR_LIB_SSL, SSL_R_COMPRESSED_LENGTH_TOO_LONG}, - #else - {"COMPRESSED_LENGTH_TOO_LONG", 20, 140}, - #endif - #ifdef SSL_R_COMPRESSION_DISABLED - {"COMPRESSION_DISABLED", ERR_LIB_SSL, SSL_R_COMPRESSION_DISABLED}, - #else - {"COMPRESSION_DISABLED", 20, 343}, - #endif - #ifdef SSL_R_COMPRESSION_FAILURE - {"COMPRESSION_FAILURE", ERR_LIB_SSL, SSL_R_COMPRESSION_FAILURE}, - #else - {"COMPRESSION_FAILURE", 20, 141}, - #endif - #ifdef SSL_R_COMPRESSION_ID_NOT_WITHIN_PRIVATE_RANGE - {"COMPRESSION_ID_NOT_WITHIN_PRIVATE_RANGE", ERR_LIB_SSL, SSL_R_COMPRESSION_ID_NOT_WITHIN_PRIVATE_RANGE}, - #else - {"COMPRESSION_ID_NOT_WITHIN_PRIVATE_RANGE", 20, 307}, - #endif - #ifdef SSL_R_COMPRESSION_LIBRARY_ERROR - {"COMPRESSION_LIBRARY_ERROR", ERR_LIB_SSL, SSL_R_COMPRESSION_LIBRARY_ERROR}, - #else - {"COMPRESSION_LIBRARY_ERROR", 20, 142}, - #endif - #ifdef SSL_R_CONNECTION_TYPE_NOT_SET - {"CONNECTION_TYPE_NOT_SET", ERR_LIB_SSL, SSL_R_CONNECTION_TYPE_NOT_SET}, - #else - {"CONNECTION_TYPE_NOT_SET", 20, 144}, - #endif - #ifdef SSL_R_CONTEXT_NOT_DANE_ENABLED - {"CONTEXT_NOT_DANE_ENABLED", ERR_LIB_SSL, SSL_R_CONTEXT_NOT_DANE_ENABLED}, - #else - {"CONTEXT_NOT_DANE_ENABLED", 20, 167}, - #endif - #ifdef SSL_R_COOKIE_GEN_CALLBACK_FAILURE - {"COOKIE_GEN_CALLBACK_FAILURE", ERR_LIB_SSL, SSL_R_COOKIE_GEN_CALLBACK_FAILURE}, - #else - {"COOKIE_GEN_CALLBACK_FAILURE", 20, 400}, - #endif - #ifdef SSL_R_COOKIE_MISMATCH - {"COOKIE_MISMATCH", ERR_LIB_SSL, SSL_R_COOKIE_MISMATCH}, - #else - {"COOKIE_MISMATCH", 20, 308}, - #endif - #ifdef SSL_R_CUSTOM_EXT_HANDLER_ALREADY_INSTALLED - {"CUSTOM_EXT_HANDLER_ALREADY_INSTALLED", ERR_LIB_SSL, SSL_R_CUSTOM_EXT_HANDLER_ALREADY_INSTALLED}, - #else - {"CUSTOM_EXT_HANDLER_ALREADY_INSTALLED", 20, 206}, - #endif - #ifdef SSL_R_DANE_ALREADY_ENABLED - {"DANE_ALREADY_ENABLED", ERR_LIB_SSL, SSL_R_DANE_ALREADY_ENABLED}, - #else - {"DANE_ALREADY_ENABLED", 20, 172}, - #endif - #ifdef SSL_R_DANE_CANNOT_OVERRIDE_MTYPE_FULL - {"DANE_CANNOT_OVERRIDE_MTYPE_FULL", ERR_LIB_SSL, SSL_R_DANE_CANNOT_OVERRIDE_MTYPE_FULL}, - #else - {"DANE_CANNOT_OVERRIDE_MTYPE_FULL", 20, 173}, - #endif - #ifdef SSL_R_DANE_NOT_ENABLED - {"DANE_NOT_ENABLED", ERR_LIB_SSL, SSL_R_DANE_NOT_ENABLED}, - #else - {"DANE_NOT_ENABLED", 20, 175}, - #endif - #ifdef SSL_R_DANE_TLSA_BAD_CERTIFICATE - {"DANE_TLSA_BAD_CERTIFICATE", ERR_LIB_SSL, SSL_R_DANE_TLSA_BAD_CERTIFICATE}, - #else - {"DANE_TLSA_BAD_CERTIFICATE", 20, 180}, - #endif - #ifdef SSL_R_DANE_TLSA_BAD_CERTIFICATE_USAGE - {"DANE_TLSA_BAD_CERTIFICATE_USAGE", ERR_LIB_SSL, SSL_R_DANE_TLSA_BAD_CERTIFICATE_USAGE}, - #else - {"DANE_TLSA_BAD_CERTIFICATE_USAGE", 20, 184}, - #endif - #ifdef SSL_R_DANE_TLSA_BAD_DATA_LENGTH - {"DANE_TLSA_BAD_DATA_LENGTH", ERR_LIB_SSL, SSL_R_DANE_TLSA_BAD_DATA_LENGTH}, - #else - {"DANE_TLSA_BAD_DATA_LENGTH", 20, 189}, - #endif - #ifdef SSL_R_DANE_TLSA_BAD_DIGEST_LENGTH - {"DANE_TLSA_BAD_DIGEST_LENGTH", ERR_LIB_SSL, SSL_R_DANE_TLSA_BAD_DIGEST_LENGTH}, - #else - {"DANE_TLSA_BAD_DIGEST_LENGTH", 20, 192}, - #endif - #ifdef SSL_R_DANE_TLSA_BAD_MATCHING_TYPE - {"DANE_TLSA_BAD_MATCHING_TYPE", ERR_LIB_SSL, SSL_R_DANE_TLSA_BAD_MATCHING_TYPE}, - #else - {"DANE_TLSA_BAD_MATCHING_TYPE", 20, 200}, - #endif - #ifdef SSL_R_DANE_TLSA_BAD_PUBLIC_KEY - {"DANE_TLSA_BAD_PUBLIC_KEY", ERR_LIB_SSL, SSL_R_DANE_TLSA_BAD_PUBLIC_KEY}, - #else - {"DANE_TLSA_BAD_PUBLIC_KEY", 20, 201}, - #endif - #ifdef SSL_R_DANE_TLSA_BAD_SELECTOR - {"DANE_TLSA_BAD_SELECTOR", ERR_LIB_SSL, SSL_R_DANE_TLSA_BAD_SELECTOR}, - #else - {"DANE_TLSA_BAD_SELECTOR", 20, 202}, - #endif - #ifdef SSL_R_DANE_TLSA_NULL_DATA - {"DANE_TLSA_NULL_DATA", ERR_LIB_SSL, SSL_R_DANE_TLSA_NULL_DATA}, - #else - {"DANE_TLSA_NULL_DATA", 20, 203}, - #endif - #ifdef SSL_R_DATA_BETWEEN_CCS_AND_FINISHED - {"DATA_BETWEEN_CCS_AND_FINISHED", ERR_LIB_SSL, SSL_R_DATA_BETWEEN_CCS_AND_FINISHED}, - #else - {"DATA_BETWEEN_CCS_AND_FINISHED", 20, 145}, - #endif - #ifdef SSL_R_DATA_LENGTH_TOO_LONG - {"DATA_LENGTH_TOO_LONG", ERR_LIB_SSL, SSL_R_DATA_LENGTH_TOO_LONG}, - #else - {"DATA_LENGTH_TOO_LONG", 20, 146}, - #endif - #ifdef SSL_R_DECRYPTION_FAILED - {"DECRYPTION_FAILED", ERR_LIB_SSL, SSL_R_DECRYPTION_FAILED}, - #else - {"DECRYPTION_FAILED", 20, 147}, - #endif - #ifdef SSL_R_DECRYPTION_FAILED_OR_BAD_RECORD_MAC - {"DECRYPTION_FAILED_OR_BAD_RECORD_MAC", ERR_LIB_SSL, SSL_R_DECRYPTION_FAILED_OR_BAD_RECORD_MAC}, - #else - {"DECRYPTION_FAILED_OR_BAD_RECORD_MAC", 20, 281}, - #endif - #ifdef SSL_R_DH_KEY_TOO_SMALL - {"DH_KEY_TOO_SMALL", ERR_LIB_SSL, SSL_R_DH_KEY_TOO_SMALL}, - #else - {"DH_KEY_TOO_SMALL", 20, 394}, - #endif - #ifdef SSL_R_DH_PUBLIC_VALUE_LENGTH_IS_WRONG - {"DH_PUBLIC_VALUE_LENGTH_IS_WRONG", ERR_LIB_SSL, SSL_R_DH_PUBLIC_VALUE_LENGTH_IS_WRONG}, - #else - {"DH_PUBLIC_VALUE_LENGTH_IS_WRONG", 20, 148}, - #endif - #ifdef SSL_R_DIGEST_CHECK_FAILED - {"DIGEST_CHECK_FAILED", ERR_LIB_SSL, SSL_R_DIGEST_CHECK_FAILED}, - #else - {"DIGEST_CHECK_FAILED", 20, 149}, - #endif - #ifdef SSL_R_DTLS_MESSAGE_TOO_BIG - {"DTLS_MESSAGE_TOO_BIG", ERR_LIB_SSL, SSL_R_DTLS_MESSAGE_TOO_BIG}, - #else - {"DTLS_MESSAGE_TOO_BIG", 20, 334}, - #endif - #ifdef SSL_R_DUPLICATE_COMPRESSION_ID - {"DUPLICATE_COMPRESSION_ID", ERR_LIB_SSL, SSL_R_DUPLICATE_COMPRESSION_ID}, - #else - {"DUPLICATE_COMPRESSION_ID", 20, 309}, - #endif - #ifdef SSL_R_ECC_CERT_NOT_FOR_KEY_AGREEMENT - {"ECC_CERT_NOT_FOR_KEY_AGREEMENT", ERR_LIB_SSL, SSL_R_ECC_CERT_NOT_FOR_KEY_AGREEMENT}, - #else - {"ECC_CERT_NOT_FOR_KEY_AGREEMENT", 20, 317}, - #endif - #ifdef SSL_R_ECC_CERT_NOT_FOR_SIGNING - {"ECC_CERT_NOT_FOR_SIGNING", ERR_LIB_SSL, SSL_R_ECC_CERT_NOT_FOR_SIGNING}, - #else - {"ECC_CERT_NOT_FOR_SIGNING", 20, 318}, - #endif - #ifdef SSL_R_ECC_CERT_SHOULD_HAVE_RSA_SIGNATURE - {"ECC_CERT_SHOULD_HAVE_RSA_SIGNATURE", ERR_LIB_SSL, SSL_R_ECC_CERT_SHOULD_HAVE_RSA_SIGNATURE}, - #else - {"ECC_CERT_SHOULD_HAVE_RSA_SIGNATURE", 20, 322}, - #endif - #ifdef SSL_R_ECC_CERT_SHOULD_HAVE_SHA1_SIGNATURE - {"ECC_CERT_SHOULD_HAVE_SHA1_SIGNATURE", ERR_LIB_SSL, SSL_R_ECC_CERT_SHOULD_HAVE_SHA1_SIGNATURE}, - #else - {"ECC_CERT_SHOULD_HAVE_SHA1_SIGNATURE", 20, 323}, - #endif - #ifdef SSL_R_ECDH_REQUIRED_FOR_SUITEB_MODE - {"ECDH_REQUIRED_FOR_SUITEB_MODE", ERR_LIB_SSL, SSL_R_ECDH_REQUIRED_FOR_SUITEB_MODE}, - #else - {"ECDH_REQUIRED_FOR_SUITEB_MODE", 20, 374}, - #endif - #ifdef SSL_R_ECGROUP_TOO_LARGE_FOR_CIPHER - {"ECGROUP_TOO_LARGE_FOR_CIPHER", ERR_LIB_SSL, SSL_R_ECGROUP_TOO_LARGE_FOR_CIPHER}, - #else - {"ECGROUP_TOO_LARGE_FOR_CIPHER", 20, 310}, - #endif - #ifdef SSL_R_EE_KEY_TOO_SMALL - {"EE_KEY_TOO_SMALL", ERR_LIB_SSL, SSL_R_EE_KEY_TOO_SMALL}, - #else - {"EE_KEY_TOO_SMALL", 20, 399}, - #endif - #ifdef SSL_R_EMPTY_SRTP_PROTECTION_PROFILE_LIST - {"EMPTY_SRTP_PROTECTION_PROFILE_LIST", ERR_LIB_SSL, SSL_R_EMPTY_SRTP_PROTECTION_PROFILE_LIST}, - #else - {"EMPTY_SRTP_PROTECTION_PROFILE_LIST", 20, 354}, - #endif - #ifdef SSL_R_ENCRYPTED_LENGTH_TOO_LONG - {"ENCRYPTED_LENGTH_TOO_LONG", ERR_LIB_SSL, SSL_R_ENCRYPTED_LENGTH_TOO_LONG}, - #else - {"ENCRYPTED_LENGTH_TOO_LONG", 20, 150}, - #endif - #ifdef SSL_R_ERROR_IN_RECEIVED_CIPHER_LIST - {"ERROR_IN_RECEIVED_CIPHER_LIST", ERR_LIB_SSL, SSL_R_ERROR_IN_RECEIVED_CIPHER_LIST}, - #else - {"ERROR_IN_RECEIVED_CIPHER_LIST", 20, 151}, - #endif - #ifdef SSL_R_ERROR_SETTING_TLSA_BASE_DOMAIN - {"ERROR_SETTING_TLSA_BASE_DOMAIN", ERR_LIB_SSL, SSL_R_ERROR_SETTING_TLSA_BASE_DOMAIN}, - #else - {"ERROR_SETTING_TLSA_BASE_DOMAIN", 20, 204}, - #endif - #ifdef SSL_R_EXCEEDS_MAX_FRAGMENT_SIZE - {"EXCEEDS_MAX_FRAGMENT_SIZE", ERR_LIB_SSL, SSL_R_EXCEEDS_MAX_FRAGMENT_SIZE}, - #else - {"EXCEEDS_MAX_FRAGMENT_SIZE", 20, 194}, - #endif - #ifdef SSL_R_EXCESSIVE_MESSAGE_SIZE - {"EXCESSIVE_MESSAGE_SIZE", ERR_LIB_SSL, SSL_R_EXCESSIVE_MESSAGE_SIZE}, - #else - {"EXCESSIVE_MESSAGE_SIZE", 20, 152}, - #endif - #ifdef SSL_R_EXTENSION_NOT_RECEIVED - {"EXTENSION_NOT_RECEIVED", ERR_LIB_SSL, SSL_R_EXTENSION_NOT_RECEIVED}, - #else - {"EXTENSION_NOT_RECEIVED", 20, 279}, - #endif - #ifdef SSL_R_EXTRA_DATA_IN_MESSAGE - {"EXTRA_DATA_IN_MESSAGE", ERR_LIB_SSL, SSL_R_EXTRA_DATA_IN_MESSAGE}, - #else - {"EXTRA_DATA_IN_MESSAGE", 20, 153}, - #endif - #ifdef SSL_R_EXT_LENGTH_MISMATCH - {"EXT_LENGTH_MISMATCH", ERR_LIB_SSL, SSL_R_EXT_LENGTH_MISMATCH}, - #else - {"EXT_LENGTH_MISMATCH", 20, 163}, - #endif - #ifdef SSL_R_FAILED_TO_INIT_ASYNC - {"FAILED_TO_INIT_ASYNC", ERR_LIB_SSL, SSL_R_FAILED_TO_INIT_ASYNC}, - #else - {"FAILED_TO_INIT_ASYNC", 20, 405}, - #endif - #ifdef SSL_R_FRAGMENTED_CLIENT_HELLO - {"FRAGMENTED_CLIENT_HELLO", ERR_LIB_SSL, SSL_R_FRAGMENTED_CLIENT_HELLO}, - #else - {"FRAGMENTED_CLIENT_HELLO", 20, 401}, - #endif - #ifdef SSL_R_GOT_A_FIN_BEFORE_A_CCS - {"GOT_A_FIN_BEFORE_A_CCS", ERR_LIB_SSL, SSL_R_GOT_A_FIN_BEFORE_A_CCS}, - #else - {"GOT_A_FIN_BEFORE_A_CCS", 20, 154}, - #endif - #ifdef SSL_R_GOT_NEXT_PROTO_BEFORE_A_CCS - {"GOT_NEXT_PROTO_BEFORE_A_CCS", ERR_LIB_SSL, SSL_R_GOT_NEXT_PROTO_BEFORE_A_CCS}, - #else - {"GOT_NEXT_PROTO_BEFORE_A_CCS", 20, 355}, - #endif - #ifdef SSL_R_GOT_NEXT_PROTO_WITHOUT_EXTENSION - {"GOT_NEXT_PROTO_WITHOUT_EXTENSION", ERR_LIB_SSL, SSL_R_GOT_NEXT_PROTO_WITHOUT_EXTENSION}, - #else - {"GOT_NEXT_PROTO_WITHOUT_EXTENSION", 20, 356}, - #endif - #ifdef SSL_R_HTTPS_PROXY_REQUEST - {"HTTPS_PROXY_REQUEST", ERR_LIB_SSL, SSL_R_HTTPS_PROXY_REQUEST}, - #else - {"HTTPS_PROXY_REQUEST", 20, 155}, - #endif - #ifdef SSL_R_HTTP_REQUEST - {"HTTP_REQUEST", ERR_LIB_SSL, SSL_R_HTTP_REQUEST}, - #else - {"HTTP_REQUEST", 20, 156}, - #endif - #ifdef SSL_R_ILLEGAL_POINT_COMPRESSION - {"ILLEGAL_POINT_COMPRESSION", ERR_LIB_SSL, SSL_R_ILLEGAL_POINT_COMPRESSION}, - #else - {"ILLEGAL_POINT_COMPRESSION", 20, 162}, - #endif - #ifdef SSL_R_ILLEGAL_SUITEB_DIGEST - {"ILLEGAL_SUITEB_DIGEST", ERR_LIB_SSL, SSL_R_ILLEGAL_SUITEB_DIGEST}, - #else - {"ILLEGAL_SUITEB_DIGEST", 20, 380}, - #endif - #ifdef SSL_R_INAPPROPRIATE_FALLBACK - {"INAPPROPRIATE_FALLBACK", ERR_LIB_SSL, SSL_R_INAPPROPRIATE_FALLBACK}, - #else - {"INAPPROPRIATE_FALLBACK", 20, 373}, - #endif - #ifdef SSL_R_INCONSISTENT_COMPRESSION - {"INCONSISTENT_COMPRESSION", ERR_LIB_SSL, SSL_R_INCONSISTENT_COMPRESSION}, - #else - {"INCONSISTENT_COMPRESSION", 20, 340}, - #endif - #ifdef SSL_R_INCONSISTENT_EARLY_DATA_ALPN - {"INCONSISTENT_EARLY_DATA_ALPN", ERR_LIB_SSL, SSL_R_INCONSISTENT_EARLY_DATA_ALPN}, - #else - {"INCONSISTENT_EARLY_DATA_ALPN", 20, 222}, - #endif - #ifdef SSL_R_INCONSISTENT_EARLY_DATA_SNI - {"INCONSISTENT_EARLY_DATA_SNI", ERR_LIB_SSL, SSL_R_INCONSISTENT_EARLY_DATA_SNI}, - #else - {"INCONSISTENT_EARLY_DATA_SNI", 20, 231}, - #endif - #ifdef SSL_R_INCONSISTENT_EXTMS - {"INCONSISTENT_EXTMS", ERR_LIB_SSL, SSL_R_INCONSISTENT_EXTMS}, - #else - {"INCONSISTENT_EXTMS", 20, 104}, - #endif - #ifdef SSL_R_INSUFFICIENT_SECURITY - {"INSUFFICIENT_SECURITY", ERR_LIB_SSL, SSL_R_INSUFFICIENT_SECURITY}, - #else - {"INSUFFICIENT_SECURITY", 20, 241}, - #endif - #ifdef SSL_R_INVALID_ALERT - {"INVALID_ALERT", ERR_LIB_SSL, SSL_R_INVALID_ALERT}, - #else - {"INVALID_ALERT", 20, 205}, - #endif - #ifdef SSL_R_INVALID_CCS_MESSAGE - {"INVALID_CCS_MESSAGE", ERR_LIB_SSL, SSL_R_INVALID_CCS_MESSAGE}, - #else - {"INVALID_CCS_MESSAGE", 20, 260}, - #endif - #ifdef SSL_R_INVALID_CERTIFICATE_OR_ALG - {"INVALID_CERTIFICATE_OR_ALG", ERR_LIB_SSL, SSL_R_INVALID_CERTIFICATE_OR_ALG}, - #else - {"INVALID_CERTIFICATE_OR_ALG", 20, 238}, - #endif - #ifdef SSL_R_INVALID_COMMAND - {"INVALID_COMMAND", ERR_LIB_SSL, SSL_R_INVALID_COMMAND}, - #else - {"INVALID_COMMAND", 20, 280}, - #endif - #ifdef SSL_R_INVALID_COMPRESSION_ALGORITHM - {"INVALID_COMPRESSION_ALGORITHM", ERR_LIB_SSL, SSL_R_INVALID_COMPRESSION_ALGORITHM}, - #else - {"INVALID_COMPRESSION_ALGORITHM", 20, 341}, - #endif - #ifdef SSL_R_INVALID_CONFIG - {"INVALID_CONFIG", ERR_LIB_SSL, SSL_R_INVALID_CONFIG}, - #else - {"INVALID_CONFIG", 20, 283}, - #endif - #ifdef SSL_R_INVALID_CONFIGURATION_NAME - {"INVALID_CONFIGURATION_NAME", ERR_LIB_SSL, SSL_R_INVALID_CONFIGURATION_NAME}, - #else - {"INVALID_CONFIGURATION_NAME", 20, 113}, - #endif - #ifdef SSL_R_INVALID_CONTEXT - {"INVALID_CONTEXT", ERR_LIB_SSL, SSL_R_INVALID_CONTEXT}, - #else - {"INVALID_CONTEXT", 20, 282}, - #endif - #ifdef SSL_R_INVALID_CT_VALIDATION_TYPE - {"INVALID_CT_VALIDATION_TYPE", ERR_LIB_SSL, SSL_R_INVALID_CT_VALIDATION_TYPE}, - #else - {"INVALID_CT_VALIDATION_TYPE", 20, 212}, - #endif - #ifdef SSL_R_INVALID_KEY_UPDATE_TYPE - {"INVALID_KEY_UPDATE_TYPE", ERR_LIB_SSL, SSL_R_INVALID_KEY_UPDATE_TYPE}, - #else - {"INVALID_KEY_UPDATE_TYPE", 20, 120}, - #endif - #ifdef SSL_R_INVALID_MAX_EARLY_DATA - {"INVALID_MAX_EARLY_DATA", ERR_LIB_SSL, SSL_R_INVALID_MAX_EARLY_DATA}, - #else - {"INVALID_MAX_EARLY_DATA", 20, 174}, - #endif - #ifdef SSL_R_INVALID_NULL_CMD_NAME - {"INVALID_NULL_CMD_NAME", ERR_LIB_SSL, SSL_R_INVALID_NULL_CMD_NAME}, - #else - {"INVALID_NULL_CMD_NAME", 20, 385}, - #endif - #ifdef SSL_R_INVALID_SEQUENCE_NUMBER - {"INVALID_SEQUENCE_NUMBER", ERR_LIB_SSL, SSL_R_INVALID_SEQUENCE_NUMBER}, - #else - {"INVALID_SEQUENCE_NUMBER", 20, 402}, - #endif - #ifdef SSL_R_INVALID_SERVERINFO_DATA - {"INVALID_SERVERINFO_DATA", ERR_LIB_SSL, SSL_R_INVALID_SERVERINFO_DATA}, - #else - {"INVALID_SERVERINFO_DATA", 20, 388}, - #endif - #ifdef SSL_R_INVALID_SESSION_ID - {"INVALID_SESSION_ID", ERR_LIB_SSL, SSL_R_INVALID_SESSION_ID}, - #else - {"INVALID_SESSION_ID", 20, 999}, - #endif - #ifdef SSL_R_INVALID_SRP_USERNAME - {"INVALID_SRP_USERNAME", ERR_LIB_SSL, SSL_R_INVALID_SRP_USERNAME}, - #else - {"INVALID_SRP_USERNAME", 20, 357}, - #endif - #ifdef SSL_R_INVALID_STATUS_RESPONSE - {"INVALID_STATUS_RESPONSE", ERR_LIB_SSL, SSL_R_INVALID_STATUS_RESPONSE}, - #else - {"INVALID_STATUS_RESPONSE", 20, 328}, - #endif - #ifdef SSL_R_INVALID_TICKET_KEYS_LENGTH - {"INVALID_TICKET_KEYS_LENGTH", ERR_LIB_SSL, SSL_R_INVALID_TICKET_KEYS_LENGTH}, - #else - {"INVALID_TICKET_KEYS_LENGTH", 20, 325}, - #endif - #ifdef SSL_R_KRB5_S_TKT_NYV - {"KRB5_S_TKT_NYV", ERR_LIB_SSL, SSL_R_KRB5_S_TKT_NYV}, - #else - {"KRB5_S_TKT_NYV", 20, 294}, - #endif - #ifdef SSL_R_KRB5_S_TKT_SKEW - {"KRB5_S_TKT_SKEW", ERR_LIB_SSL, SSL_R_KRB5_S_TKT_SKEW}, - #else - {"KRB5_S_TKT_SKEW", 20, 295}, - #endif - #ifdef SSL_R_LENGTH_MISMATCH - {"LENGTH_MISMATCH", ERR_LIB_SSL, SSL_R_LENGTH_MISMATCH}, - #else - {"LENGTH_MISMATCH", 20, 159}, - #endif - #ifdef SSL_R_LENGTH_TOO_LONG - {"LENGTH_TOO_LONG", ERR_LIB_SSL, SSL_R_LENGTH_TOO_LONG}, - #else - {"LENGTH_TOO_LONG", 20, 404}, - #endif - #ifdef SSL_R_LENGTH_TOO_SHORT - {"LENGTH_TOO_SHORT", ERR_LIB_SSL, SSL_R_LENGTH_TOO_SHORT}, - #else - {"LENGTH_TOO_SHORT", 20, 160}, - #endif - #ifdef SSL_R_LIBRARY_BUG - {"LIBRARY_BUG", ERR_LIB_SSL, SSL_R_LIBRARY_BUG}, - #else - {"LIBRARY_BUG", 20, 274}, - #endif - #ifdef SSL_R_LIBRARY_HAS_NO_CIPHERS - {"LIBRARY_HAS_NO_CIPHERS", ERR_LIB_SSL, SSL_R_LIBRARY_HAS_NO_CIPHERS}, - #else - {"LIBRARY_HAS_NO_CIPHERS", 20, 161}, - #endif - #ifdef SSL_R_MESSAGE_TOO_LONG - {"MESSAGE_TOO_LONG", ERR_LIB_SSL, SSL_R_MESSAGE_TOO_LONG}, - #else - {"MESSAGE_TOO_LONG", 20, 296}, - #endif - #ifdef SSL_R_MISSING_DSA_SIGNING_CERT - {"MISSING_DSA_SIGNING_CERT", ERR_LIB_SSL, SSL_R_MISSING_DSA_SIGNING_CERT}, - #else - {"MISSING_DSA_SIGNING_CERT", 20, 165}, - #endif - #ifdef SSL_R_MISSING_ECDH_CERT - {"MISSING_ECDH_CERT", ERR_LIB_SSL, SSL_R_MISSING_ECDH_CERT}, - #else - {"MISSING_ECDH_CERT", 20, 382}, - #endif - #ifdef SSL_R_MISSING_ECDSA_SIGNING_CERT - {"MISSING_ECDSA_SIGNING_CERT", ERR_LIB_SSL, SSL_R_MISSING_ECDSA_SIGNING_CERT}, - #else - {"MISSING_ECDSA_SIGNING_CERT", 20, 381}, - #endif - #ifdef SSL_R_MISSING_FATAL - {"MISSING_FATAL", ERR_LIB_SSL, SSL_R_MISSING_FATAL}, - #else - {"MISSING_FATAL", 20, 256}, - #endif - #ifdef SSL_R_MISSING_PARAMETERS - {"MISSING_PARAMETERS", ERR_LIB_SSL, SSL_R_MISSING_PARAMETERS}, - #else - {"MISSING_PARAMETERS", 20, 290}, - #endif - #ifdef SSL_R_MISSING_RSA_CERTIFICATE - {"MISSING_RSA_CERTIFICATE", ERR_LIB_SSL, SSL_R_MISSING_RSA_CERTIFICATE}, - #else - {"MISSING_RSA_CERTIFICATE", 20, 168}, - #endif - #ifdef SSL_R_MISSING_RSA_ENCRYPTING_CERT - {"MISSING_RSA_ENCRYPTING_CERT", ERR_LIB_SSL, SSL_R_MISSING_RSA_ENCRYPTING_CERT}, - #else - {"MISSING_RSA_ENCRYPTING_CERT", 20, 169}, - #endif - #ifdef SSL_R_MISSING_RSA_SIGNING_CERT - {"MISSING_RSA_SIGNING_CERT", ERR_LIB_SSL, SSL_R_MISSING_RSA_SIGNING_CERT}, - #else - {"MISSING_RSA_SIGNING_CERT", 20, 170}, - #endif - #ifdef SSL_R_MISSING_SIGALGS_EXTENSION - {"MISSING_SIGALGS_EXTENSION", ERR_LIB_SSL, SSL_R_MISSING_SIGALGS_EXTENSION}, - #else - {"MISSING_SIGALGS_EXTENSION", 20, 112}, - #endif - #ifdef SSL_R_MISSING_SIGNING_CERT - {"MISSING_SIGNING_CERT", ERR_LIB_SSL, SSL_R_MISSING_SIGNING_CERT}, - #else - {"MISSING_SIGNING_CERT", 20, 221}, - #endif - #ifdef SSL_R_MISSING_SRP_PARAM - {"MISSING_SRP_PARAM", ERR_LIB_SSL, SSL_R_MISSING_SRP_PARAM}, - #else - {"MISSING_SRP_PARAM", 20, 358}, - #endif - #ifdef SSL_R_MISSING_SUPPORTED_GROUPS_EXTENSION - {"MISSING_SUPPORTED_GROUPS_EXTENSION", ERR_LIB_SSL, SSL_R_MISSING_SUPPORTED_GROUPS_EXTENSION}, - #else - {"MISSING_SUPPORTED_GROUPS_EXTENSION", 20, 209}, - #endif - #ifdef SSL_R_MISSING_TMP_DH_KEY - {"MISSING_TMP_DH_KEY", ERR_LIB_SSL, SSL_R_MISSING_TMP_DH_KEY}, - #else - {"MISSING_TMP_DH_KEY", 20, 171}, - #endif - #ifdef SSL_R_MISSING_TMP_ECDH_KEY - {"MISSING_TMP_ECDH_KEY", ERR_LIB_SSL, SSL_R_MISSING_TMP_ECDH_KEY}, - #else - {"MISSING_TMP_ECDH_KEY", 20, 311}, - #endif - #ifdef SSL_R_MIXED_HANDSHAKE_AND_NON_HANDSHAKE_DATA - {"MIXED_HANDSHAKE_AND_NON_HANDSHAKE_DATA", ERR_LIB_SSL, SSL_R_MIXED_HANDSHAKE_AND_NON_HANDSHAKE_DATA}, - #else - {"MIXED_HANDSHAKE_AND_NON_HANDSHAKE_DATA", 20, 293}, - #endif - #ifdef SSL_R_MULTIPLE_SGC_RESTARTS - {"MULTIPLE_SGC_RESTARTS", ERR_LIB_SSL, SSL_R_MULTIPLE_SGC_RESTARTS}, - #else - {"MULTIPLE_SGC_RESTARTS", 20, 346}, - #endif - #ifdef SSL_R_NOT_ON_RECORD_BOUNDARY - {"NOT_ON_RECORD_BOUNDARY", ERR_LIB_SSL, SSL_R_NOT_ON_RECORD_BOUNDARY}, - #else - {"NOT_ON_RECORD_BOUNDARY", 20, 182}, - #endif - #ifdef SSL_R_NOT_REPLACING_CERTIFICATE - {"NOT_REPLACING_CERTIFICATE", ERR_LIB_SSL, SSL_R_NOT_REPLACING_CERTIFICATE}, - #else - {"NOT_REPLACING_CERTIFICATE", 20, 289}, - #endif - #ifdef SSL_R_NOT_SERVER - {"NOT_SERVER", ERR_LIB_SSL, SSL_R_NOT_SERVER}, - #else - {"NOT_SERVER", 20, 284}, - #endif - #ifdef SSL_R_NO_APPLICATION_PROTOCOL - {"NO_APPLICATION_PROTOCOL", ERR_LIB_SSL, SSL_R_NO_APPLICATION_PROTOCOL}, - #else - {"NO_APPLICATION_PROTOCOL", 20, 235}, - #endif - #ifdef SSL_R_NO_CERTIFICATES_RETURNED - {"NO_CERTIFICATES_RETURNED", ERR_LIB_SSL, SSL_R_NO_CERTIFICATES_RETURNED}, - #else - {"NO_CERTIFICATES_RETURNED", 20, 176}, - #endif - #ifdef SSL_R_NO_CERTIFICATE_ASSIGNED - {"NO_CERTIFICATE_ASSIGNED", ERR_LIB_SSL, SSL_R_NO_CERTIFICATE_ASSIGNED}, - #else - {"NO_CERTIFICATE_ASSIGNED", 20, 177}, - #endif - #ifdef SSL_R_NO_CERTIFICATE_SET - {"NO_CERTIFICATE_SET", ERR_LIB_SSL, SSL_R_NO_CERTIFICATE_SET}, - #else - {"NO_CERTIFICATE_SET", 20, 179}, - #endif - #ifdef SSL_R_NO_CHANGE_FOLLOWING_HRR - {"NO_CHANGE_FOLLOWING_HRR", ERR_LIB_SSL, SSL_R_NO_CHANGE_FOLLOWING_HRR}, - #else - {"NO_CHANGE_FOLLOWING_HRR", 20, 214}, - #endif - #ifdef SSL_R_NO_CIPHERS_AVAILABLE - {"NO_CIPHERS_AVAILABLE", ERR_LIB_SSL, SSL_R_NO_CIPHERS_AVAILABLE}, - #else - {"NO_CIPHERS_AVAILABLE", 20, 181}, - #endif - #ifdef SSL_R_NO_CIPHERS_SPECIFIED - {"NO_CIPHERS_SPECIFIED", ERR_LIB_SSL, SSL_R_NO_CIPHERS_SPECIFIED}, - #else - {"NO_CIPHERS_SPECIFIED", 20, 183}, - #endif - #ifdef SSL_R_NO_CIPHER_MATCH - {"NO_CIPHER_MATCH", ERR_LIB_SSL, SSL_R_NO_CIPHER_MATCH}, - #else - {"NO_CIPHER_MATCH", 20, 185}, - #endif - #ifdef SSL_R_NO_CLIENT_CERT_METHOD - {"NO_CLIENT_CERT_METHOD", ERR_LIB_SSL, SSL_R_NO_CLIENT_CERT_METHOD}, - #else - {"NO_CLIENT_CERT_METHOD", 20, 331}, - #endif - #ifdef SSL_R_NO_COMPRESSION_SPECIFIED - {"NO_COMPRESSION_SPECIFIED", ERR_LIB_SSL, SSL_R_NO_COMPRESSION_SPECIFIED}, - #else - {"NO_COMPRESSION_SPECIFIED", 20, 187}, - #endif - #ifdef SSL_R_NO_COOKIE_CALLBACK_SET - {"NO_COOKIE_CALLBACK_SET", ERR_LIB_SSL, SSL_R_NO_COOKIE_CALLBACK_SET}, - #else - {"NO_COOKIE_CALLBACK_SET", 20, 287}, - #endif - #ifdef SSL_R_NO_GOST_CERTIFICATE_SENT_BY_PEER - {"NO_GOST_CERTIFICATE_SENT_BY_PEER", ERR_LIB_SSL, SSL_R_NO_GOST_CERTIFICATE_SENT_BY_PEER}, - #else - {"NO_GOST_CERTIFICATE_SENT_BY_PEER", 20, 330}, - #endif - #ifdef SSL_R_NO_METHOD_SPECIFIED - {"NO_METHOD_SPECIFIED", ERR_LIB_SSL, SSL_R_NO_METHOD_SPECIFIED}, - #else - {"NO_METHOD_SPECIFIED", 20, 188}, - #endif - #ifdef SSL_R_NO_PEM_EXTENSIONS - {"NO_PEM_EXTENSIONS", ERR_LIB_SSL, SSL_R_NO_PEM_EXTENSIONS}, - #else - {"NO_PEM_EXTENSIONS", 20, 389}, - #endif - #ifdef SSL_R_NO_PRIVATE_KEY_ASSIGNED - {"NO_PRIVATE_KEY_ASSIGNED", ERR_LIB_SSL, SSL_R_NO_PRIVATE_KEY_ASSIGNED}, - #else - {"NO_PRIVATE_KEY_ASSIGNED", 20, 190}, - #endif - #ifdef SSL_R_NO_PROTOCOLS_AVAILABLE - {"NO_PROTOCOLS_AVAILABLE", ERR_LIB_SSL, SSL_R_NO_PROTOCOLS_AVAILABLE}, - #else - {"NO_PROTOCOLS_AVAILABLE", 20, 191}, - #endif - #ifdef SSL_R_NO_RENEGOTIATION - {"NO_RENEGOTIATION", ERR_LIB_SSL, SSL_R_NO_RENEGOTIATION}, - #else - {"NO_RENEGOTIATION", 20, 339}, - #endif - #ifdef SSL_R_NO_REQUIRED_DIGEST - {"NO_REQUIRED_DIGEST", ERR_LIB_SSL, SSL_R_NO_REQUIRED_DIGEST}, - #else - {"NO_REQUIRED_DIGEST", 20, 324}, - #endif - #ifdef SSL_R_NO_SHARED_CIPHER - {"NO_SHARED_CIPHER", ERR_LIB_SSL, SSL_R_NO_SHARED_CIPHER}, - #else - {"NO_SHARED_CIPHER", 20, 193}, - #endif - #ifdef SSL_R_NO_SHARED_GROUPS - {"NO_SHARED_GROUPS", ERR_LIB_SSL, SSL_R_NO_SHARED_GROUPS}, - #else - {"NO_SHARED_GROUPS", 20, 410}, - #endif - #ifdef SSL_R_NO_SHARED_SIGNATURE_ALGORITHMS - {"NO_SHARED_SIGNATURE_ALGORITHMS", ERR_LIB_SSL, SSL_R_NO_SHARED_SIGNATURE_ALGORITHMS}, - #else - {"NO_SHARED_SIGNATURE_ALGORITHMS", 20, 376}, - #endif - #ifdef SSL_R_NO_SRTP_PROFILES - {"NO_SRTP_PROFILES", ERR_LIB_SSL, SSL_R_NO_SRTP_PROFILES}, - #else - {"NO_SRTP_PROFILES", 20, 359}, - #endif - #ifdef SSL_R_NO_SUITABLE_KEY_SHARE - {"NO_SUITABLE_KEY_SHARE", ERR_LIB_SSL, SSL_R_NO_SUITABLE_KEY_SHARE}, - #else - {"NO_SUITABLE_KEY_SHARE", 20, 101}, - #endif - #ifdef SSL_R_NO_SUITABLE_SIGNATURE_ALGORITHM - {"NO_SUITABLE_SIGNATURE_ALGORITHM", ERR_LIB_SSL, SSL_R_NO_SUITABLE_SIGNATURE_ALGORITHM}, - #else - {"NO_SUITABLE_SIGNATURE_ALGORITHM", 20, 118}, - #endif - #ifdef SSL_R_NO_VALID_SCTS - {"NO_VALID_SCTS", ERR_LIB_SSL, SSL_R_NO_VALID_SCTS}, - #else - {"NO_VALID_SCTS", 20, 216}, - #endif - #ifdef SSL_R_NO_VERIFY_COOKIE_CALLBACK - {"NO_VERIFY_COOKIE_CALLBACK", ERR_LIB_SSL, SSL_R_NO_VERIFY_COOKIE_CALLBACK}, - #else - {"NO_VERIFY_COOKIE_CALLBACK", 20, 403}, - #endif - #ifdef SSL_R_NULL_SSL_CTX - {"NULL_SSL_CTX", ERR_LIB_SSL, SSL_R_NULL_SSL_CTX}, - #else - {"NULL_SSL_CTX", 20, 195}, - #endif - #ifdef SSL_R_NULL_SSL_METHOD_PASSED - {"NULL_SSL_METHOD_PASSED", ERR_LIB_SSL, SSL_R_NULL_SSL_METHOD_PASSED}, - #else - {"NULL_SSL_METHOD_PASSED", 20, 196}, - #endif - #ifdef SSL_R_OLD_SESSION_CIPHER_NOT_RETURNED - {"OLD_SESSION_CIPHER_NOT_RETURNED", ERR_LIB_SSL, SSL_R_OLD_SESSION_CIPHER_NOT_RETURNED}, - #else - {"OLD_SESSION_CIPHER_NOT_RETURNED", 20, 197}, - #endif - #ifdef SSL_R_OLD_SESSION_COMPRESSION_ALGORITHM_NOT_RETURNED - {"OLD_SESSION_COMPRESSION_ALGORITHM_NOT_RETURNED", ERR_LIB_SSL, SSL_R_OLD_SESSION_COMPRESSION_ALGORITHM_NOT_RETURNED}, - #else - {"OLD_SESSION_COMPRESSION_ALGORITHM_NOT_RETURNED", 20, 344}, - #endif - #ifdef SSL_R_ONLY_DTLS_1_2_ALLOWED_IN_SUITEB_MODE - {"ONLY_DTLS_1_2_ALLOWED_IN_SUITEB_MODE", ERR_LIB_SSL, SSL_R_ONLY_DTLS_1_2_ALLOWED_IN_SUITEB_MODE}, - #else - {"ONLY_DTLS_1_2_ALLOWED_IN_SUITEB_MODE", 20, 387}, - #endif - #ifdef SSL_R_ONLY_TLS_1_2_ALLOWED_IN_SUITEB_MODE - {"ONLY_TLS_1_2_ALLOWED_IN_SUITEB_MODE", ERR_LIB_SSL, SSL_R_ONLY_TLS_1_2_ALLOWED_IN_SUITEB_MODE}, - #else - {"ONLY_TLS_1_2_ALLOWED_IN_SUITEB_MODE", 20, 379}, - #endif - #ifdef SSL_R_ONLY_TLS_ALLOWED_IN_FIPS_MODE - {"ONLY_TLS_ALLOWED_IN_FIPS_MODE", ERR_LIB_SSL, SSL_R_ONLY_TLS_ALLOWED_IN_FIPS_MODE}, - #else - {"ONLY_TLS_ALLOWED_IN_FIPS_MODE", 20, 297}, - #endif - #ifdef SSL_R_OPAQUE_PRF_INPUT_TOO_LONG - {"OPAQUE_PRF_INPUT_TOO_LONG", ERR_LIB_SSL, SSL_R_OPAQUE_PRF_INPUT_TOO_LONG}, - #else - {"OPAQUE_PRF_INPUT_TOO_LONG", 20, 327}, - #endif - #ifdef SSL_R_OVERFLOW_ERROR - {"OVERFLOW_ERROR", ERR_LIB_SSL, SSL_R_OVERFLOW_ERROR}, - #else - {"OVERFLOW_ERROR", 20, 237}, - #endif - #ifdef SSL_R_PACKET_LENGTH_TOO_LONG - {"PACKET_LENGTH_TOO_LONG", ERR_LIB_SSL, SSL_R_PACKET_LENGTH_TOO_LONG}, - #else - {"PACKET_LENGTH_TOO_LONG", 20, 198}, - #endif - #ifdef SSL_R_PARSE_TLSEXT - {"PARSE_TLSEXT", ERR_LIB_SSL, SSL_R_PARSE_TLSEXT}, - #else - {"PARSE_TLSEXT", 20, 227}, - #endif - #ifdef SSL_R_PATH_TOO_LONG - {"PATH_TOO_LONG", ERR_LIB_SSL, SSL_R_PATH_TOO_LONG}, - #else - {"PATH_TOO_LONG", 20, 270}, - #endif - #ifdef SSL_R_PEER_DID_NOT_RETURN_A_CERTIFICATE - {"PEER_DID_NOT_RETURN_A_CERTIFICATE", ERR_LIB_SSL, SSL_R_PEER_DID_NOT_RETURN_A_CERTIFICATE}, - #else - {"PEER_DID_NOT_RETURN_A_CERTIFICATE", 20, 199}, - #endif - #ifdef SSL_R_PEM_NAME_BAD_PREFIX - {"PEM_NAME_BAD_PREFIX", ERR_LIB_SSL, SSL_R_PEM_NAME_BAD_PREFIX}, - #else - {"PEM_NAME_BAD_PREFIX", 20, 391}, - #endif - #ifdef SSL_R_PEM_NAME_TOO_SHORT - {"PEM_NAME_TOO_SHORT", ERR_LIB_SSL, SSL_R_PEM_NAME_TOO_SHORT}, - #else - {"PEM_NAME_TOO_SHORT", 20, 392}, - #endif - #ifdef SSL_R_PIPELINE_FAILURE - {"PIPELINE_FAILURE", ERR_LIB_SSL, SSL_R_PIPELINE_FAILURE}, - #else - {"PIPELINE_FAILURE", 20, 406}, - #endif - #ifdef SSL_R_POST_HANDSHAKE_AUTH_ENCODING_ERR - {"POST_HANDSHAKE_AUTH_ENCODING_ERR", ERR_LIB_SSL, SSL_R_POST_HANDSHAKE_AUTH_ENCODING_ERR}, - #else - {"POST_HANDSHAKE_AUTH_ENCODING_ERR", 20, 278}, - #endif - #ifdef SSL_R_PRIVATE_KEY_MISMATCH - {"PRIVATE_KEY_MISMATCH", ERR_LIB_SSL, SSL_R_PRIVATE_KEY_MISMATCH}, - #else - {"PRIVATE_KEY_MISMATCH", 20, 288}, - #endif - #ifdef SSL_R_PROTOCOL_IS_SHUTDOWN - {"PROTOCOL_IS_SHUTDOWN", ERR_LIB_SSL, SSL_R_PROTOCOL_IS_SHUTDOWN}, - #else - {"PROTOCOL_IS_SHUTDOWN", 20, 207}, - #endif - #ifdef SSL_R_PSK_IDENTITY_NOT_FOUND - {"PSK_IDENTITY_NOT_FOUND", ERR_LIB_SSL, SSL_R_PSK_IDENTITY_NOT_FOUND}, - #else - {"PSK_IDENTITY_NOT_FOUND", 20, 223}, - #endif - #ifdef SSL_R_PSK_NO_CLIENT_CB - {"PSK_NO_CLIENT_CB", ERR_LIB_SSL, SSL_R_PSK_NO_CLIENT_CB}, - #else - {"PSK_NO_CLIENT_CB", 20, 224}, - #endif - #ifdef SSL_R_PSK_NO_SERVER_CB - {"PSK_NO_SERVER_CB", ERR_LIB_SSL, SSL_R_PSK_NO_SERVER_CB}, - #else - {"PSK_NO_SERVER_CB", 20, 225}, - #endif - #ifdef SSL_R_READ_BIO_NOT_SET - {"READ_BIO_NOT_SET", ERR_LIB_SSL, SSL_R_READ_BIO_NOT_SET}, - #else - {"READ_BIO_NOT_SET", 20, 211}, - #endif - #ifdef SSL_R_READ_TIMEOUT_EXPIRED - {"READ_TIMEOUT_EXPIRED", ERR_LIB_SSL, SSL_R_READ_TIMEOUT_EXPIRED}, - #else - {"READ_TIMEOUT_EXPIRED", 20, 312}, - #endif - #ifdef SSL_R_RECORD_LENGTH_MISMATCH - {"RECORD_LENGTH_MISMATCH", ERR_LIB_SSL, SSL_R_RECORD_LENGTH_MISMATCH}, - #else - {"RECORD_LENGTH_MISMATCH", 20, 213}, - #endif - #ifdef SSL_R_RECORD_TOO_SMALL - {"RECORD_TOO_SMALL", ERR_LIB_SSL, SSL_R_RECORD_TOO_SMALL}, - #else - {"RECORD_TOO_SMALL", 20, 298}, - #endif - #ifdef SSL_R_RENEGOTIATE_EXT_TOO_LONG - {"RENEGOTIATE_EXT_TOO_LONG", ERR_LIB_SSL, SSL_R_RENEGOTIATE_EXT_TOO_LONG}, - #else - {"RENEGOTIATE_EXT_TOO_LONG", 20, 335}, - #endif - #ifdef SSL_R_RENEGOTIATION_ENCODING_ERR - {"RENEGOTIATION_ENCODING_ERR", ERR_LIB_SSL, SSL_R_RENEGOTIATION_ENCODING_ERR}, - #else - {"RENEGOTIATION_ENCODING_ERR", 20, 336}, - #endif - #ifdef SSL_R_RENEGOTIATION_MISMATCH - {"RENEGOTIATION_MISMATCH", ERR_LIB_SSL, SSL_R_RENEGOTIATION_MISMATCH}, - #else - {"RENEGOTIATION_MISMATCH", 20, 337}, - #endif - #ifdef SSL_R_REQUEST_PENDING - {"REQUEST_PENDING", ERR_LIB_SSL, SSL_R_REQUEST_PENDING}, - #else - {"REQUEST_PENDING", 20, 285}, - #endif - #ifdef SSL_R_REQUEST_SENT - {"REQUEST_SENT", ERR_LIB_SSL, SSL_R_REQUEST_SENT}, - #else - {"REQUEST_SENT", 20, 286}, - #endif - #ifdef SSL_R_REQUIRED_CIPHER_MISSING - {"REQUIRED_CIPHER_MISSING", ERR_LIB_SSL, SSL_R_REQUIRED_CIPHER_MISSING}, - #else - {"REQUIRED_CIPHER_MISSING", 20, 215}, - #endif - #ifdef SSL_R_REQUIRED_COMPRESSION_ALGORITHM_MISSING - {"REQUIRED_COMPRESSION_ALGORITHM_MISSING", ERR_LIB_SSL, SSL_R_REQUIRED_COMPRESSION_ALGORITHM_MISSING}, - #else - {"REQUIRED_COMPRESSION_ALGORITHM_MISSING", 20, 342}, - #endif - #ifdef SSL_R_SCSV_RECEIVED_WHEN_RENEGOTIATING - {"SCSV_RECEIVED_WHEN_RENEGOTIATING", ERR_LIB_SSL, SSL_R_SCSV_RECEIVED_WHEN_RENEGOTIATING}, - #else - {"SCSV_RECEIVED_WHEN_RENEGOTIATING", 20, 345}, - #endif - #ifdef SSL_R_SCT_VERIFICATION_FAILED - {"SCT_VERIFICATION_FAILED", ERR_LIB_SSL, SSL_R_SCT_VERIFICATION_FAILED}, - #else - {"SCT_VERIFICATION_FAILED", 20, 208}, - #endif - #ifdef SSL_R_SERVERHELLO_TLSEXT - {"SERVERHELLO_TLSEXT", ERR_LIB_SSL, SSL_R_SERVERHELLO_TLSEXT}, - #else - {"SERVERHELLO_TLSEXT", 20, 275}, - #endif - #ifdef SSL_R_SESSION_ID_CONTEXT_UNINITIALIZED - {"SESSION_ID_CONTEXT_UNINITIALIZED", ERR_LIB_SSL, SSL_R_SESSION_ID_CONTEXT_UNINITIALIZED}, - #else - {"SESSION_ID_CONTEXT_UNINITIALIZED", 20, 277}, - #endif - #ifdef SSL_R_SHUTDOWN_WHILE_IN_INIT - {"SHUTDOWN_WHILE_IN_INIT", ERR_LIB_SSL, SSL_R_SHUTDOWN_WHILE_IN_INIT}, - #else - {"SHUTDOWN_WHILE_IN_INIT", 20, 407}, - #endif - #ifdef SSL_R_SIGNATURE_ALGORITHMS_ERROR - {"SIGNATURE_ALGORITHMS_ERROR", ERR_LIB_SSL, SSL_R_SIGNATURE_ALGORITHMS_ERROR}, - #else - {"SIGNATURE_ALGORITHMS_ERROR", 20, 360}, - #endif - #ifdef SSL_R_SIGNATURE_FOR_NON_SIGNING_CERTIFICATE - {"SIGNATURE_FOR_NON_SIGNING_CERTIFICATE", ERR_LIB_SSL, SSL_R_SIGNATURE_FOR_NON_SIGNING_CERTIFICATE}, - #else - {"SIGNATURE_FOR_NON_SIGNING_CERTIFICATE", 20, 220}, - #endif - #ifdef SSL_R_SRP_A_CALC - {"SRP_A_CALC", ERR_LIB_SSL, SSL_R_SRP_A_CALC}, - #else - {"SRP_A_CALC", 20, 361}, - #endif - #ifdef SSL_R_SRTP_COULD_NOT_ALLOCATE_PROFILES - {"SRTP_COULD_NOT_ALLOCATE_PROFILES", ERR_LIB_SSL, SSL_R_SRTP_COULD_NOT_ALLOCATE_PROFILES}, - #else - {"SRTP_COULD_NOT_ALLOCATE_PROFILES", 20, 362}, - #endif - #ifdef SSL_R_SRTP_PROTECTION_PROFILE_LIST_TOO_LONG - {"SRTP_PROTECTION_PROFILE_LIST_TOO_LONG", ERR_LIB_SSL, SSL_R_SRTP_PROTECTION_PROFILE_LIST_TOO_LONG}, - #else - {"SRTP_PROTECTION_PROFILE_LIST_TOO_LONG", 20, 363}, - #endif - #ifdef SSL_R_SRTP_UNKNOWN_PROTECTION_PROFILE - {"SRTP_UNKNOWN_PROTECTION_PROFILE", ERR_LIB_SSL, SSL_R_SRTP_UNKNOWN_PROTECTION_PROFILE}, - #else - {"SRTP_UNKNOWN_PROTECTION_PROFILE", 20, 364}, - #endif - #ifdef SSL_R_SSL2_CONNECTION_ID_TOO_LONG - {"SSL2_CONNECTION_ID_TOO_LONG", ERR_LIB_SSL, SSL_R_SSL2_CONNECTION_ID_TOO_LONG}, - #else - {"SSL2_CONNECTION_ID_TOO_LONG", 20, 299}, - #endif - #ifdef SSL_R_SSL3_EXT_INVALID_ECPOINTFORMAT - {"SSL3_EXT_INVALID_ECPOINTFORMAT", ERR_LIB_SSL, SSL_R_SSL3_EXT_INVALID_ECPOINTFORMAT}, - #else - {"SSL3_EXT_INVALID_ECPOINTFORMAT", 20, 321}, - #endif - #ifdef SSL_R_SSL3_EXT_INVALID_MAX_FRAGMENT_LENGTH - {"SSL3_EXT_INVALID_MAX_FRAGMENT_LENGTH", ERR_LIB_SSL, SSL_R_SSL3_EXT_INVALID_MAX_FRAGMENT_LENGTH}, - #else - {"SSL3_EXT_INVALID_MAX_FRAGMENT_LENGTH", 20, 232}, - #endif - #ifdef SSL_R_SSL3_EXT_INVALID_SERVERNAME - {"SSL3_EXT_INVALID_SERVERNAME", ERR_LIB_SSL, SSL_R_SSL3_EXT_INVALID_SERVERNAME}, - #else - {"SSL3_EXT_INVALID_SERVERNAME", 20, 319}, - #endif - #ifdef SSL_R_SSL3_EXT_INVALID_SERVERNAME_TYPE - {"SSL3_EXT_INVALID_SERVERNAME_TYPE", ERR_LIB_SSL, SSL_R_SSL3_EXT_INVALID_SERVERNAME_TYPE}, - #else - {"SSL3_EXT_INVALID_SERVERNAME_TYPE", 20, 320}, - #endif - #ifdef SSL_R_SSL3_SESSION_ID_TOO_LONG - {"SSL3_SESSION_ID_TOO_LONG", ERR_LIB_SSL, SSL_R_SSL3_SESSION_ID_TOO_LONG}, - #else - {"SSL3_SESSION_ID_TOO_LONG", 20, 300}, - #endif - #ifdef SSL_R_SSLV3_ALERT_BAD_CERTIFICATE - {"SSLV3_ALERT_BAD_CERTIFICATE", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_BAD_CERTIFICATE}, - #else - {"SSLV3_ALERT_BAD_CERTIFICATE", 20, 1042}, - #endif - #ifdef SSL_R_SSLV3_ALERT_BAD_RECORD_MAC - {"SSLV3_ALERT_BAD_RECORD_MAC", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_BAD_RECORD_MAC}, - #else - {"SSLV3_ALERT_BAD_RECORD_MAC", 20, 1020}, - #endif - #ifdef SSL_R_SSLV3_ALERT_CERTIFICATE_EXPIRED - {"SSLV3_ALERT_CERTIFICATE_EXPIRED", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_CERTIFICATE_EXPIRED}, - #else - {"SSLV3_ALERT_CERTIFICATE_EXPIRED", 20, 1045}, - #endif - #ifdef SSL_R_SSLV3_ALERT_CERTIFICATE_REVOKED - {"SSLV3_ALERT_CERTIFICATE_REVOKED", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_CERTIFICATE_REVOKED}, - #else - {"SSLV3_ALERT_CERTIFICATE_REVOKED", 20, 1044}, - #endif - #ifdef SSL_R_SSLV3_ALERT_CERTIFICATE_UNKNOWN - {"SSLV3_ALERT_CERTIFICATE_UNKNOWN", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_CERTIFICATE_UNKNOWN}, - #else - {"SSLV3_ALERT_CERTIFICATE_UNKNOWN", 20, 1046}, - #endif - #ifdef SSL_R_SSLV3_ALERT_DECOMPRESSION_FAILURE - {"SSLV3_ALERT_DECOMPRESSION_FAILURE", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_DECOMPRESSION_FAILURE}, - #else - {"SSLV3_ALERT_DECOMPRESSION_FAILURE", 20, 1030}, - #endif - #ifdef SSL_R_SSLV3_ALERT_HANDSHAKE_FAILURE - {"SSLV3_ALERT_HANDSHAKE_FAILURE", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_HANDSHAKE_FAILURE}, - #else - {"SSLV3_ALERT_HANDSHAKE_FAILURE", 20, 1040}, - #endif - #ifdef SSL_R_SSLV3_ALERT_ILLEGAL_PARAMETER - {"SSLV3_ALERT_ILLEGAL_PARAMETER", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_ILLEGAL_PARAMETER}, - #else - {"SSLV3_ALERT_ILLEGAL_PARAMETER", 20, 1047}, - #endif - #ifdef SSL_R_SSLV3_ALERT_NO_CERTIFICATE - {"SSLV3_ALERT_NO_CERTIFICATE", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_NO_CERTIFICATE}, - #else - {"SSLV3_ALERT_NO_CERTIFICATE", 20, 1041}, - #endif - #ifdef SSL_R_SSLV3_ALERT_UNEXPECTED_MESSAGE - {"SSLV3_ALERT_UNEXPECTED_MESSAGE", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_UNEXPECTED_MESSAGE}, - #else - {"SSLV3_ALERT_UNEXPECTED_MESSAGE", 20, 1010}, - #endif - #ifdef SSL_R_SSLV3_ALERT_UNSUPPORTED_CERTIFICATE - {"SSLV3_ALERT_UNSUPPORTED_CERTIFICATE", ERR_LIB_SSL, SSL_R_SSLV3_ALERT_UNSUPPORTED_CERTIFICATE}, - #else - {"SSLV3_ALERT_UNSUPPORTED_CERTIFICATE", 20, 1043}, - #endif - #ifdef SSL_R_SSL_COMMAND_SECTION_EMPTY - {"SSL_COMMAND_SECTION_EMPTY", ERR_LIB_SSL, SSL_R_SSL_COMMAND_SECTION_EMPTY}, - #else - {"SSL_COMMAND_SECTION_EMPTY", 20, 117}, - #endif - #ifdef SSL_R_SSL_COMMAND_SECTION_NOT_FOUND - {"SSL_COMMAND_SECTION_NOT_FOUND", ERR_LIB_SSL, SSL_R_SSL_COMMAND_SECTION_NOT_FOUND}, - #else - {"SSL_COMMAND_SECTION_NOT_FOUND", 20, 125}, - #endif - #ifdef SSL_R_SSL_CTX_HAS_NO_DEFAULT_SSL_VERSION - {"SSL_CTX_HAS_NO_DEFAULT_SSL_VERSION", ERR_LIB_SSL, SSL_R_SSL_CTX_HAS_NO_DEFAULT_SSL_VERSION}, - #else - {"SSL_CTX_HAS_NO_DEFAULT_SSL_VERSION", 20, 228}, - #endif - #ifdef SSL_R_SSL_HANDSHAKE_FAILURE - {"SSL_HANDSHAKE_FAILURE", ERR_LIB_SSL, SSL_R_SSL_HANDSHAKE_FAILURE}, - #else - {"SSL_HANDSHAKE_FAILURE", 20, 229}, - #endif - #ifdef SSL_R_SSL_LIBRARY_HAS_NO_CIPHERS - {"SSL_LIBRARY_HAS_NO_CIPHERS", ERR_LIB_SSL, SSL_R_SSL_LIBRARY_HAS_NO_CIPHERS}, - #else - {"SSL_LIBRARY_HAS_NO_CIPHERS", 20, 230}, - #endif - #ifdef SSL_R_SSL_NEGATIVE_LENGTH - {"SSL_NEGATIVE_LENGTH", ERR_LIB_SSL, SSL_R_SSL_NEGATIVE_LENGTH}, - #else - {"SSL_NEGATIVE_LENGTH", 20, 372}, - #endif - #ifdef SSL_R_SSL_SECTION_EMPTY - {"SSL_SECTION_EMPTY", ERR_LIB_SSL, SSL_R_SSL_SECTION_EMPTY}, - #else - {"SSL_SECTION_EMPTY", 20, 126}, - #endif - #ifdef SSL_R_SSL_SECTION_NOT_FOUND - {"SSL_SECTION_NOT_FOUND", ERR_LIB_SSL, SSL_R_SSL_SECTION_NOT_FOUND}, - #else - {"SSL_SECTION_NOT_FOUND", 20, 136}, - #endif - #ifdef SSL_R_SSL_SESSION_ID_CALLBACK_FAILED - {"SSL_SESSION_ID_CALLBACK_FAILED", ERR_LIB_SSL, SSL_R_SSL_SESSION_ID_CALLBACK_FAILED}, - #else - {"SSL_SESSION_ID_CALLBACK_FAILED", 20, 301}, - #endif - #ifdef SSL_R_SSL_SESSION_ID_CONFLICT - {"SSL_SESSION_ID_CONFLICT", ERR_LIB_SSL, SSL_R_SSL_SESSION_ID_CONFLICT}, - #else - {"SSL_SESSION_ID_CONFLICT", 20, 302}, - #endif - #ifdef SSL_R_SSL_SESSION_ID_CONTEXT_TOO_LONG - {"SSL_SESSION_ID_CONTEXT_TOO_LONG", ERR_LIB_SSL, SSL_R_SSL_SESSION_ID_CONTEXT_TOO_LONG}, - #else - {"SSL_SESSION_ID_CONTEXT_TOO_LONG", 20, 273}, - #endif - #ifdef SSL_R_SSL_SESSION_ID_HAS_BAD_LENGTH - {"SSL_SESSION_ID_HAS_BAD_LENGTH", ERR_LIB_SSL, SSL_R_SSL_SESSION_ID_HAS_BAD_LENGTH}, - #else - {"SSL_SESSION_ID_HAS_BAD_LENGTH", 20, 303}, - #endif - #ifdef SSL_R_SSL_SESSION_ID_TOO_LONG - {"SSL_SESSION_ID_TOO_LONG", ERR_LIB_SSL, SSL_R_SSL_SESSION_ID_TOO_LONG}, - #else - {"SSL_SESSION_ID_TOO_LONG", 20, 408}, - #endif - #ifdef SSL_R_SSL_SESSION_VERSION_MISMATCH - {"SSL_SESSION_VERSION_MISMATCH", ERR_LIB_SSL, SSL_R_SSL_SESSION_VERSION_MISMATCH}, - #else - {"SSL_SESSION_VERSION_MISMATCH", 20, 210}, - #endif - #ifdef SSL_R_STILL_IN_INIT - {"STILL_IN_INIT", ERR_LIB_SSL, SSL_R_STILL_IN_INIT}, - #else - {"STILL_IN_INIT", 20, 121}, - #endif - #ifdef SSL_R_TLSV13_ALERT_CERTIFICATE_REQUIRED - {"TLSV13_ALERT_CERTIFICATE_REQUIRED", ERR_LIB_SSL, SSL_R_TLSV13_ALERT_CERTIFICATE_REQUIRED}, - #else - {"TLSV13_ALERT_CERTIFICATE_REQUIRED", 20, 1116}, - #endif - #ifdef SSL_R_TLSV13_ALERT_MISSING_EXTENSION - {"TLSV13_ALERT_MISSING_EXTENSION", ERR_LIB_SSL, SSL_R_TLSV13_ALERT_MISSING_EXTENSION}, - #else - {"TLSV13_ALERT_MISSING_EXTENSION", 20, 1109}, - #endif - #ifdef SSL_R_TLSV1_ALERT_ACCESS_DENIED - {"TLSV1_ALERT_ACCESS_DENIED", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_ACCESS_DENIED}, - #else - {"TLSV1_ALERT_ACCESS_DENIED", 20, 1049}, - #endif - #ifdef SSL_R_TLSV1_ALERT_DECODE_ERROR - {"TLSV1_ALERT_DECODE_ERROR", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_DECODE_ERROR}, - #else - {"TLSV1_ALERT_DECODE_ERROR", 20, 1050}, - #endif - #ifdef SSL_R_TLSV1_ALERT_DECRYPTION_FAILED - {"TLSV1_ALERT_DECRYPTION_FAILED", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_DECRYPTION_FAILED}, - #else - {"TLSV1_ALERT_DECRYPTION_FAILED", 20, 1021}, - #endif - #ifdef SSL_R_TLSV1_ALERT_DECRYPT_ERROR - {"TLSV1_ALERT_DECRYPT_ERROR", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_DECRYPT_ERROR}, - #else - {"TLSV1_ALERT_DECRYPT_ERROR", 20, 1051}, - #endif - #ifdef SSL_R_TLSV1_ALERT_EXPORT_RESTRICTION - {"TLSV1_ALERT_EXPORT_RESTRICTION", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_EXPORT_RESTRICTION}, - #else - {"TLSV1_ALERT_EXPORT_RESTRICTION", 20, 1060}, - #endif - #ifdef SSL_R_TLSV1_ALERT_INAPPROPRIATE_FALLBACK - {"TLSV1_ALERT_INAPPROPRIATE_FALLBACK", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_INAPPROPRIATE_FALLBACK}, - #else - {"TLSV1_ALERT_INAPPROPRIATE_FALLBACK", 20, 1086}, - #endif - #ifdef SSL_R_TLSV1_ALERT_INSUFFICIENT_SECURITY - {"TLSV1_ALERT_INSUFFICIENT_SECURITY", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_INSUFFICIENT_SECURITY}, - #else - {"TLSV1_ALERT_INSUFFICIENT_SECURITY", 20, 1071}, - #endif - #ifdef SSL_R_TLSV1_ALERT_INTERNAL_ERROR - {"TLSV1_ALERT_INTERNAL_ERROR", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_INTERNAL_ERROR}, - #else - {"TLSV1_ALERT_INTERNAL_ERROR", 20, 1080}, - #endif - #ifdef SSL_R_TLSV1_ALERT_NO_RENEGOTIATION - {"TLSV1_ALERT_NO_RENEGOTIATION", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_NO_RENEGOTIATION}, - #else - {"TLSV1_ALERT_NO_RENEGOTIATION", 20, 1100}, - #endif - #ifdef SSL_R_TLSV1_ALERT_PROTOCOL_VERSION - {"TLSV1_ALERT_PROTOCOL_VERSION", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_PROTOCOL_VERSION}, - #else - {"TLSV1_ALERT_PROTOCOL_VERSION", 20, 1070}, - #endif - #ifdef SSL_R_TLSV1_ALERT_RECORD_OVERFLOW - {"TLSV1_ALERT_RECORD_OVERFLOW", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_RECORD_OVERFLOW}, - #else - {"TLSV1_ALERT_RECORD_OVERFLOW", 20, 1022}, - #endif - #ifdef SSL_R_TLSV1_ALERT_UNKNOWN_CA - {"TLSV1_ALERT_UNKNOWN_CA", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_UNKNOWN_CA}, - #else - {"TLSV1_ALERT_UNKNOWN_CA", 20, 1048}, - #endif - #ifdef SSL_R_TLSV1_ALERT_USER_CANCELLED - {"TLSV1_ALERT_USER_CANCELLED", ERR_LIB_SSL, SSL_R_TLSV1_ALERT_USER_CANCELLED}, - #else - {"TLSV1_ALERT_USER_CANCELLED", 20, 1090}, - #endif - #ifdef SSL_R_TLSV1_BAD_CERTIFICATE_HASH_VALUE - {"TLSV1_BAD_CERTIFICATE_HASH_VALUE", ERR_LIB_SSL, SSL_R_TLSV1_BAD_CERTIFICATE_HASH_VALUE}, - #else - {"TLSV1_BAD_CERTIFICATE_HASH_VALUE", 20, 1114}, - #endif - #ifdef SSL_R_TLSV1_BAD_CERTIFICATE_STATUS_RESPONSE - {"TLSV1_BAD_CERTIFICATE_STATUS_RESPONSE", ERR_LIB_SSL, SSL_R_TLSV1_BAD_CERTIFICATE_STATUS_RESPONSE}, - #else - {"TLSV1_BAD_CERTIFICATE_STATUS_RESPONSE", 20, 1113}, - #endif - #ifdef SSL_R_TLSV1_CERTIFICATE_UNOBTAINABLE - {"TLSV1_CERTIFICATE_UNOBTAINABLE", ERR_LIB_SSL, SSL_R_TLSV1_CERTIFICATE_UNOBTAINABLE}, - #else - {"TLSV1_CERTIFICATE_UNOBTAINABLE", 20, 1111}, - #endif - #ifdef SSL_R_TLSV1_UNRECOGNIZED_NAME - {"TLSV1_UNRECOGNIZED_NAME", ERR_LIB_SSL, SSL_R_TLSV1_UNRECOGNIZED_NAME}, - #else - {"TLSV1_UNRECOGNIZED_NAME", 20, 1112}, - #endif - #ifdef SSL_R_TLSV1_UNSUPPORTED_EXTENSION - {"TLSV1_UNSUPPORTED_EXTENSION", ERR_LIB_SSL, SSL_R_TLSV1_UNSUPPORTED_EXTENSION}, - #else - {"TLSV1_UNSUPPORTED_EXTENSION", 20, 1110}, - #endif - #ifdef SSL_R_TLS_HEARTBEAT_PEER_DOESNT_ACCEPT - {"TLS_HEARTBEAT_PEER_DOESNT_ACCEPT", ERR_LIB_SSL, SSL_R_TLS_HEARTBEAT_PEER_DOESNT_ACCEPT}, - #else - {"TLS_HEARTBEAT_PEER_DOESNT_ACCEPT", 20, 365}, - #endif - #ifdef SSL_R_TLS_HEARTBEAT_PENDING - {"TLS_HEARTBEAT_PENDING", ERR_LIB_SSL, SSL_R_TLS_HEARTBEAT_PENDING}, - #else - {"TLS_HEARTBEAT_PENDING", 20, 366}, - #endif - #ifdef SSL_R_TLS_ILLEGAL_EXPORTER_LABEL - {"TLS_ILLEGAL_EXPORTER_LABEL", ERR_LIB_SSL, SSL_R_TLS_ILLEGAL_EXPORTER_LABEL}, - #else - {"TLS_ILLEGAL_EXPORTER_LABEL", 20, 367}, - #endif - #ifdef SSL_R_TLS_INVALID_ECPOINTFORMAT_LIST - {"TLS_INVALID_ECPOINTFORMAT_LIST", ERR_LIB_SSL, SSL_R_TLS_INVALID_ECPOINTFORMAT_LIST}, - #else - {"TLS_INVALID_ECPOINTFORMAT_LIST", 20, 157}, - #endif - #ifdef SSL_R_TOO_MANY_KEY_UPDATES - {"TOO_MANY_KEY_UPDATES", ERR_LIB_SSL, SSL_R_TOO_MANY_KEY_UPDATES}, - #else - {"TOO_MANY_KEY_UPDATES", 20, 132}, - #endif - #ifdef SSL_R_TOO_MANY_WARN_ALERTS - {"TOO_MANY_WARN_ALERTS", ERR_LIB_SSL, SSL_R_TOO_MANY_WARN_ALERTS}, - #else - {"TOO_MANY_WARN_ALERTS", 20, 409}, - #endif - #ifdef SSL_R_TOO_MUCH_EARLY_DATA - {"TOO_MUCH_EARLY_DATA", ERR_LIB_SSL, SSL_R_TOO_MUCH_EARLY_DATA}, - #else - {"TOO_MUCH_EARLY_DATA", 20, 164}, - #endif - #ifdef SSL_R_UNABLE_TO_DECODE_ECDH_CERTS - {"UNABLE_TO_DECODE_ECDH_CERTS", ERR_LIB_SSL, SSL_R_UNABLE_TO_DECODE_ECDH_CERTS}, - #else - {"UNABLE_TO_DECODE_ECDH_CERTS", 20, 313}, - #endif - #ifdef SSL_R_UNABLE_TO_FIND_ECDH_PARAMETERS - {"UNABLE_TO_FIND_ECDH_PARAMETERS", ERR_LIB_SSL, SSL_R_UNABLE_TO_FIND_ECDH_PARAMETERS}, - #else - {"UNABLE_TO_FIND_ECDH_PARAMETERS", 20, 314}, - #endif - #ifdef SSL_R_UNABLE_TO_FIND_PUBLIC_KEY_PARAMETERS - {"UNABLE_TO_FIND_PUBLIC_KEY_PARAMETERS", ERR_LIB_SSL, SSL_R_UNABLE_TO_FIND_PUBLIC_KEY_PARAMETERS}, - #else - {"UNABLE_TO_FIND_PUBLIC_KEY_PARAMETERS", 20, 239}, - #endif - #ifdef SSL_R_UNABLE_TO_LOAD_SSL3_MD5_ROUTINES - {"UNABLE_TO_LOAD_SSL3_MD5_ROUTINES", ERR_LIB_SSL, SSL_R_UNABLE_TO_LOAD_SSL3_MD5_ROUTINES}, - #else - {"UNABLE_TO_LOAD_SSL3_MD5_ROUTINES", 20, 242}, - #endif - #ifdef SSL_R_UNABLE_TO_LOAD_SSL3_SHA1_ROUTINES - {"UNABLE_TO_LOAD_SSL3_SHA1_ROUTINES", ERR_LIB_SSL, SSL_R_UNABLE_TO_LOAD_SSL3_SHA1_ROUTINES}, - #else - {"UNABLE_TO_LOAD_SSL3_SHA1_ROUTINES", 20, 243}, - #endif - #ifdef SSL_R_UNEXPECTED_CCS_MESSAGE - {"UNEXPECTED_CCS_MESSAGE", ERR_LIB_SSL, SSL_R_UNEXPECTED_CCS_MESSAGE}, - #else - {"UNEXPECTED_CCS_MESSAGE", 20, 262}, - #endif - #ifdef SSL_R_UNEXPECTED_END_OF_EARLY_DATA - {"UNEXPECTED_END_OF_EARLY_DATA", ERR_LIB_SSL, SSL_R_UNEXPECTED_END_OF_EARLY_DATA}, - #else - {"UNEXPECTED_END_OF_EARLY_DATA", 20, 178}, - #endif - #ifdef SSL_R_UNEXPECTED_MESSAGE - {"UNEXPECTED_MESSAGE", ERR_LIB_SSL, SSL_R_UNEXPECTED_MESSAGE}, - #else - {"UNEXPECTED_MESSAGE", 20, 244}, - #endif - #ifdef SSL_R_UNEXPECTED_RECORD - {"UNEXPECTED_RECORD", ERR_LIB_SSL, SSL_R_UNEXPECTED_RECORD}, - #else - {"UNEXPECTED_RECORD", 20, 245}, - #endif - #ifdef SSL_R_UNINITIALIZED - {"UNINITIALIZED", ERR_LIB_SSL, SSL_R_UNINITIALIZED}, - #else - {"UNINITIALIZED", 20, 276}, - #endif - #ifdef SSL_R_UNKNOWN_ALERT_TYPE - {"UNKNOWN_ALERT_TYPE", ERR_LIB_SSL, SSL_R_UNKNOWN_ALERT_TYPE}, - #else - {"UNKNOWN_ALERT_TYPE", 20, 246}, - #endif - #ifdef SSL_R_UNKNOWN_CERTIFICATE_TYPE - {"UNKNOWN_CERTIFICATE_TYPE", ERR_LIB_SSL, SSL_R_UNKNOWN_CERTIFICATE_TYPE}, - #else - {"UNKNOWN_CERTIFICATE_TYPE", 20, 247}, - #endif - #ifdef SSL_R_UNKNOWN_CIPHER_RETURNED - {"UNKNOWN_CIPHER_RETURNED", ERR_LIB_SSL, SSL_R_UNKNOWN_CIPHER_RETURNED}, - #else - {"UNKNOWN_CIPHER_RETURNED", 20, 248}, - #endif - #ifdef SSL_R_UNKNOWN_CIPHER_TYPE - {"UNKNOWN_CIPHER_TYPE", ERR_LIB_SSL, SSL_R_UNKNOWN_CIPHER_TYPE}, - #else - {"UNKNOWN_CIPHER_TYPE", 20, 249}, - #endif - #ifdef SSL_R_UNKNOWN_CMD_NAME - {"UNKNOWN_CMD_NAME", ERR_LIB_SSL, SSL_R_UNKNOWN_CMD_NAME}, - #else - {"UNKNOWN_CMD_NAME", 20, 386}, - #endif - #ifdef SSL_R_UNKNOWN_COMMAND - {"UNKNOWN_COMMAND", ERR_LIB_SSL, SSL_R_UNKNOWN_COMMAND}, - #else - {"UNKNOWN_COMMAND", 20, 139}, - #endif - #ifdef SSL_R_UNKNOWN_DIGEST - {"UNKNOWN_DIGEST", ERR_LIB_SSL, SSL_R_UNKNOWN_DIGEST}, - #else - {"UNKNOWN_DIGEST", 20, 368}, - #endif - #ifdef SSL_R_UNKNOWN_KEY_EXCHANGE_TYPE - {"UNKNOWN_KEY_EXCHANGE_TYPE", ERR_LIB_SSL, SSL_R_UNKNOWN_KEY_EXCHANGE_TYPE}, - #else - {"UNKNOWN_KEY_EXCHANGE_TYPE", 20, 250}, - #endif - #ifdef SSL_R_UNKNOWN_PKEY_TYPE - {"UNKNOWN_PKEY_TYPE", ERR_LIB_SSL, SSL_R_UNKNOWN_PKEY_TYPE}, - #else - {"UNKNOWN_PKEY_TYPE", 20, 251}, - #endif - #ifdef SSL_R_UNKNOWN_PROTOCOL - {"UNKNOWN_PROTOCOL", ERR_LIB_SSL, SSL_R_UNKNOWN_PROTOCOL}, - #else - {"UNKNOWN_PROTOCOL", 20, 252}, - #endif - #ifdef SSL_R_UNKNOWN_SSL_VERSION - {"UNKNOWN_SSL_VERSION", ERR_LIB_SSL, SSL_R_UNKNOWN_SSL_VERSION}, - #else - {"UNKNOWN_SSL_VERSION", 20, 254}, - #endif - #ifdef SSL_R_UNKNOWN_STATE - {"UNKNOWN_STATE", ERR_LIB_SSL, SSL_R_UNKNOWN_STATE}, - #else - {"UNKNOWN_STATE", 20, 255}, - #endif - #ifdef SSL_R_UNSAFE_LEGACY_RENEGOTIATION_DISABLED - {"UNSAFE_LEGACY_RENEGOTIATION_DISABLED", ERR_LIB_SSL, SSL_R_UNSAFE_LEGACY_RENEGOTIATION_DISABLED}, - #else - {"UNSAFE_LEGACY_RENEGOTIATION_DISABLED", 20, 338}, - #endif - #ifdef SSL_R_UNSOLICITED_EXTENSION - {"UNSOLICITED_EXTENSION", ERR_LIB_SSL, SSL_R_UNSOLICITED_EXTENSION}, - #else - {"UNSOLICITED_EXTENSION", 20, 217}, - #endif - #ifdef SSL_R_UNSUPPORTED_COMPRESSION_ALGORITHM - {"UNSUPPORTED_COMPRESSION_ALGORITHM", ERR_LIB_SSL, SSL_R_UNSUPPORTED_COMPRESSION_ALGORITHM}, - #else - {"UNSUPPORTED_COMPRESSION_ALGORITHM", 20, 257}, - #endif - #ifdef SSL_R_UNSUPPORTED_DIGEST_TYPE - {"UNSUPPORTED_DIGEST_TYPE", ERR_LIB_SSL, SSL_R_UNSUPPORTED_DIGEST_TYPE}, - #else - {"UNSUPPORTED_DIGEST_TYPE", 20, 326}, - #endif - #ifdef SSL_R_UNSUPPORTED_ELLIPTIC_CURVE - {"UNSUPPORTED_ELLIPTIC_CURVE", ERR_LIB_SSL, SSL_R_UNSUPPORTED_ELLIPTIC_CURVE}, - #else - {"UNSUPPORTED_ELLIPTIC_CURVE", 20, 315}, - #endif - #ifdef SSL_R_UNSUPPORTED_PROTOCOL - {"UNSUPPORTED_PROTOCOL", ERR_LIB_SSL, SSL_R_UNSUPPORTED_PROTOCOL}, - #else - {"UNSUPPORTED_PROTOCOL", 20, 258}, - #endif - #ifdef SSL_R_UNSUPPORTED_SSL_VERSION - {"UNSUPPORTED_SSL_VERSION", ERR_LIB_SSL, SSL_R_UNSUPPORTED_SSL_VERSION}, - #else - {"UNSUPPORTED_SSL_VERSION", 20, 259}, - #endif - #ifdef SSL_R_UNSUPPORTED_STATUS_TYPE - {"UNSUPPORTED_STATUS_TYPE", ERR_LIB_SSL, SSL_R_UNSUPPORTED_STATUS_TYPE}, - #else - {"UNSUPPORTED_STATUS_TYPE", 20, 329}, - #endif - #ifdef SSL_R_USE_SRTP_NOT_NEGOTIATED - {"USE_SRTP_NOT_NEGOTIATED", ERR_LIB_SSL, SSL_R_USE_SRTP_NOT_NEGOTIATED}, - #else - {"USE_SRTP_NOT_NEGOTIATED", 20, 369}, - #endif - #ifdef SSL_R_VERSION_TOO_HIGH - {"VERSION_TOO_HIGH", ERR_LIB_SSL, SSL_R_VERSION_TOO_HIGH}, - #else - {"VERSION_TOO_HIGH", 20, 166}, - #endif - #ifdef SSL_R_VERSION_TOO_LOW - {"VERSION_TOO_LOW", ERR_LIB_SSL, SSL_R_VERSION_TOO_LOW}, - #else - {"VERSION_TOO_LOW", 20, 396}, - #endif - #ifdef SSL_R_WRONG_CERTIFICATE_TYPE - {"WRONG_CERTIFICATE_TYPE", ERR_LIB_SSL, SSL_R_WRONG_CERTIFICATE_TYPE}, - #else - {"WRONG_CERTIFICATE_TYPE", 20, 383}, - #endif - #ifdef SSL_R_WRONG_CIPHER_RETURNED - {"WRONG_CIPHER_RETURNED", ERR_LIB_SSL, SSL_R_WRONG_CIPHER_RETURNED}, - #else - {"WRONG_CIPHER_RETURNED", 20, 261}, - #endif - #ifdef SSL_R_WRONG_CURVE - {"WRONG_CURVE", ERR_LIB_SSL, SSL_R_WRONG_CURVE}, - #else - {"WRONG_CURVE", 20, 378}, - #endif - #ifdef SSL_R_WRONG_SIGNATURE_LENGTH - {"WRONG_SIGNATURE_LENGTH", ERR_LIB_SSL, SSL_R_WRONG_SIGNATURE_LENGTH}, - #else - {"WRONG_SIGNATURE_LENGTH", 20, 264}, - #endif - #ifdef SSL_R_WRONG_SIGNATURE_SIZE - {"WRONG_SIGNATURE_SIZE", ERR_LIB_SSL, SSL_R_WRONG_SIGNATURE_SIZE}, - #else - {"WRONG_SIGNATURE_SIZE", 20, 265}, - #endif - #ifdef SSL_R_WRONG_SIGNATURE_TYPE - {"WRONG_SIGNATURE_TYPE", ERR_LIB_SSL, SSL_R_WRONG_SIGNATURE_TYPE}, - #else - {"WRONG_SIGNATURE_TYPE", 20, 370}, - #endif - #ifdef SSL_R_WRONG_SSL_VERSION - {"WRONG_SSL_VERSION", ERR_LIB_SSL, SSL_R_WRONG_SSL_VERSION}, - #else - {"WRONG_SSL_VERSION", 20, 266}, - #endif - #ifdef SSL_R_WRONG_VERSION_NUMBER - {"WRONG_VERSION_NUMBER", ERR_LIB_SSL, SSL_R_WRONG_VERSION_NUMBER}, - #else - {"WRONG_VERSION_NUMBER", 20, 267}, - #endif - #ifdef SSL_R_X509_LIB - {"X509_LIB", ERR_LIB_SSL, SSL_R_X509_LIB}, - #else - {"X509_LIB", 20, 268}, - #endif - #ifdef SSL_R_X509_VERIFICATION_SETUP_PROBLEMS - {"X509_VERIFICATION_SETUP_PROBLEMS", ERR_LIB_SSL, SSL_R_X509_VERIFICATION_SETUP_PROBLEMS}, - #else - {"X509_VERIFICATION_SETUP_PROBLEMS", 20, 269}, - #endif - #ifdef TS_R_BAD_PKCS7_TYPE - {"BAD_PKCS7_TYPE", ERR_LIB_TS, TS_R_BAD_PKCS7_TYPE}, - #else - {"BAD_PKCS7_TYPE", 47, 132}, - #endif - #ifdef TS_R_BAD_TYPE - {"BAD_TYPE", ERR_LIB_TS, TS_R_BAD_TYPE}, - #else - {"BAD_TYPE", 47, 133}, - #endif - #ifdef TS_R_CANNOT_LOAD_CERT - {"CANNOT_LOAD_CERT", ERR_LIB_TS, TS_R_CANNOT_LOAD_CERT}, - #else - {"CANNOT_LOAD_CERT", 47, 137}, - #endif - #ifdef TS_R_CANNOT_LOAD_KEY - {"CANNOT_LOAD_KEY", ERR_LIB_TS, TS_R_CANNOT_LOAD_KEY}, - #else - {"CANNOT_LOAD_KEY", 47, 138}, - #endif - #ifdef TS_R_CERTIFICATE_VERIFY_ERROR - {"CERTIFICATE_VERIFY_ERROR", ERR_LIB_TS, TS_R_CERTIFICATE_VERIFY_ERROR}, - #else - {"CERTIFICATE_VERIFY_ERROR", 47, 100}, - #endif - #ifdef TS_R_COULD_NOT_SET_ENGINE - {"COULD_NOT_SET_ENGINE", ERR_LIB_TS, TS_R_COULD_NOT_SET_ENGINE}, - #else - {"COULD_NOT_SET_ENGINE", 47, 127}, - #endif - #ifdef TS_R_COULD_NOT_SET_TIME - {"COULD_NOT_SET_TIME", ERR_LIB_TS, TS_R_COULD_NOT_SET_TIME}, - #else - {"COULD_NOT_SET_TIME", 47, 115}, - #endif - #ifdef TS_R_DETACHED_CONTENT - {"DETACHED_CONTENT", ERR_LIB_TS, TS_R_DETACHED_CONTENT}, - #else - {"DETACHED_CONTENT", 47, 134}, - #endif - #ifdef TS_R_ESS_ADD_SIGNING_CERT_ERROR - {"ESS_ADD_SIGNING_CERT_ERROR", ERR_LIB_TS, TS_R_ESS_ADD_SIGNING_CERT_ERROR}, - #else - {"ESS_ADD_SIGNING_CERT_ERROR", 47, 116}, - #endif - #ifdef TS_R_ESS_ADD_SIGNING_CERT_V2_ERROR - {"ESS_ADD_SIGNING_CERT_V2_ERROR", ERR_LIB_TS, TS_R_ESS_ADD_SIGNING_CERT_V2_ERROR}, - #else - {"ESS_ADD_SIGNING_CERT_V2_ERROR", 47, 139}, - #endif - #ifdef TS_R_ESS_SIGNING_CERTIFICATE_ERROR - {"ESS_SIGNING_CERTIFICATE_ERROR", ERR_LIB_TS, TS_R_ESS_SIGNING_CERTIFICATE_ERROR}, - #else - {"ESS_SIGNING_CERTIFICATE_ERROR", 47, 101}, - #endif - #ifdef TS_R_INVALID_NULL_POINTER - {"INVALID_NULL_POINTER", ERR_LIB_TS, TS_R_INVALID_NULL_POINTER}, - #else - {"INVALID_NULL_POINTER", 47, 102}, - #endif - #ifdef TS_R_INVALID_SIGNER_CERTIFICATE_PURPOSE - {"INVALID_SIGNER_CERTIFICATE_PURPOSE", ERR_LIB_TS, TS_R_INVALID_SIGNER_CERTIFICATE_PURPOSE}, - #else - {"INVALID_SIGNER_CERTIFICATE_PURPOSE", 47, 117}, - #endif - #ifdef TS_R_MESSAGE_IMPRINT_MISMATCH - {"MESSAGE_IMPRINT_MISMATCH", ERR_LIB_TS, TS_R_MESSAGE_IMPRINT_MISMATCH}, - #else - {"MESSAGE_IMPRINT_MISMATCH", 47, 103}, - #endif - #ifdef TS_R_NONCE_MISMATCH - {"NONCE_MISMATCH", ERR_LIB_TS, TS_R_NONCE_MISMATCH}, - #else - {"NONCE_MISMATCH", 47, 104}, - #endif - #ifdef TS_R_NONCE_NOT_RETURNED - {"NONCE_NOT_RETURNED", ERR_LIB_TS, TS_R_NONCE_NOT_RETURNED}, - #else - {"NONCE_NOT_RETURNED", 47, 105}, - #endif - #ifdef TS_R_NO_CONTENT - {"NO_CONTENT", ERR_LIB_TS, TS_R_NO_CONTENT}, - #else - {"NO_CONTENT", 47, 106}, - #endif - #ifdef TS_R_NO_TIME_STAMP_TOKEN - {"NO_TIME_STAMP_TOKEN", ERR_LIB_TS, TS_R_NO_TIME_STAMP_TOKEN}, - #else - {"NO_TIME_STAMP_TOKEN", 47, 107}, - #endif - #ifdef TS_R_PKCS7_ADD_SIGNATURE_ERROR - {"PKCS7_ADD_SIGNATURE_ERROR", ERR_LIB_TS, TS_R_PKCS7_ADD_SIGNATURE_ERROR}, - #else - {"PKCS7_ADD_SIGNATURE_ERROR", 47, 118}, - #endif - #ifdef TS_R_PKCS7_ADD_SIGNED_ATTR_ERROR - {"PKCS7_ADD_SIGNED_ATTR_ERROR", ERR_LIB_TS, TS_R_PKCS7_ADD_SIGNED_ATTR_ERROR}, - #else - {"PKCS7_ADD_SIGNED_ATTR_ERROR", 47, 119}, - #endif - #ifdef TS_R_PKCS7_TO_TS_TST_INFO_FAILED - {"PKCS7_TO_TS_TST_INFO_FAILED", ERR_LIB_TS, TS_R_PKCS7_TO_TS_TST_INFO_FAILED}, - #else - {"PKCS7_TO_TS_TST_INFO_FAILED", 47, 129}, - #endif - #ifdef TS_R_POLICY_MISMATCH - {"POLICY_MISMATCH", ERR_LIB_TS, TS_R_POLICY_MISMATCH}, - #else - {"POLICY_MISMATCH", 47, 108}, - #endif - #ifdef TS_R_PRIVATE_KEY_DOES_NOT_MATCH_CERTIFICATE - {"PRIVATE_KEY_DOES_NOT_MATCH_CERTIFICATE", ERR_LIB_TS, TS_R_PRIVATE_KEY_DOES_NOT_MATCH_CERTIFICATE}, - #else - {"PRIVATE_KEY_DOES_NOT_MATCH_CERTIFICATE", 47, 120}, - #endif - #ifdef TS_R_RESPONSE_SETUP_ERROR - {"RESPONSE_SETUP_ERROR", ERR_LIB_TS, TS_R_RESPONSE_SETUP_ERROR}, - #else - {"RESPONSE_SETUP_ERROR", 47, 121}, - #endif - #ifdef TS_R_SIGNATURE_FAILURE - {"SIGNATURE_FAILURE", ERR_LIB_TS, TS_R_SIGNATURE_FAILURE}, - #else - {"SIGNATURE_FAILURE", 47, 109}, - #endif - #ifdef TS_R_THERE_MUST_BE_ONE_SIGNER - {"THERE_MUST_BE_ONE_SIGNER", ERR_LIB_TS, TS_R_THERE_MUST_BE_ONE_SIGNER}, - #else - {"THERE_MUST_BE_ONE_SIGNER", 47, 110}, - #endif - #ifdef TS_R_TIME_SYSCALL_ERROR - {"TIME_SYSCALL_ERROR", ERR_LIB_TS, TS_R_TIME_SYSCALL_ERROR}, - #else - {"TIME_SYSCALL_ERROR", 47, 122}, - #endif - #ifdef TS_R_TOKEN_NOT_PRESENT - {"TOKEN_NOT_PRESENT", ERR_LIB_TS, TS_R_TOKEN_NOT_PRESENT}, - #else - {"TOKEN_NOT_PRESENT", 47, 130}, - #endif - #ifdef TS_R_TOKEN_PRESENT - {"TOKEN_PRESENT", ERR_LIB_TS, TS_R_TOKEN_PRESENT}, - #else - {"TOKEN_PRESENT", 47, 131}, - #endif - #ifdef TS_R_TSA_NAME_MISMATCH - {"TSA_NAME_MISMATCH", ERR_LIB_TS, TS_R_TSA_NAME_MISMATCH}, - #else - {"TSA_NAME_MISMATCH", 47, 111}, - #endif - #ifdef TS_R_TSA_UNTRUSTED - {"TSA_UNTRUSTED", ERR_LIB_TS, TS_R_TSA_UNTRUSTED}, - #else - {"TSA_UNTRUSTED", 47, 112}, - #endif - #ifdef TS_R_TST_INFO_SETUP_ERROR - {"TST_INFO_SETUP_ERROR", ERR_LIB_TS, TS_R_TST_INFO_SETUP_ERROR}, - #else - {"TST_INFO_SETUP_ERROR", 47, 123}, - #endif - #ifdef TS_R_TS_DATASIGN - {"TS_DATASIGN", ERR_LIB_TS, TS_R_TS_DATASIGN}, - #else - {"TS_DATASIGN", 47, 124}, - #endif - #ifdef TS_R_UNACCEPTABLE_POLICY - {"UNACCEPTABLE_POLICY", ERR_LIB_TS, TS_R_UNACCEPTABLE_POLICY}, - #else - {"UNACCEPTABLE_POLICY", 47, 125}, - #endif - #ifdef TS_R_UNSUPPORTED_MD_ALGORITHM - {"UNSUPPORTED_MD_ALGORITHM", ERR_LIB_TS, TS_R_UNSUPPORTED_MD_ALGORITHM}, - #else - {"UNSUPPORTED_MD_ALGORITHM", 47, 126}, - #endif - #ifdef TS_R_UNSUPPORTED_VERSION - {"UNSUPPORTED_VERSION", ERR_LIB_TS, TS_R_UNSUPPORTED_VERSION}, - #else - {"UNSUPPORTED_VERSION", 47, 113}, - #endif - #ifdef TS_R_VAR_BAD_VALUE - {"VAR_BAD_VALUE", ERR_LIB_TS, TS_R_VAR_BAD_VALUE}, - #else - {"VAR_BAD_VALUE", 47, 135}, - #endif - #ifdef TS_R_VAR_LOOKUP_FAILURE - {"VAR_LOOKUP_FAILURE", ERR_LIB_TS, TS_R_VAR_LOOKUP_FAILURE}, - #else - {"VAR_LOOKUP_FAILURE", 47, 136}, - #endif - #ifdef TS_R_WRONG_CONTENT_TYPE - {"WRONG_CONTENT_TYPE", ERR_LIB_TS, TS_R_WRONG_CONTENT_TYPE}, - #else - {"WRONG_CONTENT_TYPE", 47, 114}, - #endif - #ifdef UI_R_COMMON_OK_AND_CANCEL_CHARACTERS - {"COMMON_OK_AND_CANCEL_CHARACTERS", ERR_LIB_UI, UI_R_COMMON_OK_AND_CANCEL_CHARACTERS}, - #else - {"COMMON_OK_AND_CANCEL_CHARACTERS", 40, 104}, - #endif - #ifdef UI_R_INDEX_TOO_LARGE - {"INDEX_TOO_LARGE", ERR_LIB_UI, UI_R_INDEX_TOO_LARGE}, - #else - {"INDEX_TOO_LARGE", 40, 102}, - #endif - #ifdef UI_R_INDEX_TOO_SMALL - {"INDEX_TOO_SMALL", ERR_LIB_UI, UI_R_INDEX_TOO_SMALL}, - #else - {"INDEX_TOO_SMALL", 40, 103}, - #endif - #ifdef UI_R_NO_RESULT_BUFFER - {"NO_RESULT_BUFFER", ERR_LIB_UI, UI_R_NO_RESULT_BUFFER}, - #else - {"NO_RESULT_BUFFER", 40, 105}, - #endif - #ifdef UI_R_PROCESSING_ERROR - {"PROCESSING_ERROR", ERR_LIB_UI, UI_R_PROCESSING_ERROR}, - #else - {"PROCESSING_ERROR", 40, 107}, - #endif - #ifdef UI_R_RESULT_TOO_LARGE - {"RESULT_TOO_LARGE", ERR_LIB_UI, UI_R_RESULT_TOO_LARGE}, - #else - {"RESULT_TOO_LARGE", 40, 100}, - #endif - #ifdef UI_R_RESULT_TOO_SMALL - {"RESULT_TOO_SMALL", ERR_LIB_UI, UI_R_RESULT_TOO_SMALL}, - #else - {"RESULT_TOO_SMALL", 40, 101}, - #endif - #ifdef UI_R_SYSASSIGN_ERROR - {"SYSASSIGN_ERROR", ERR_LIB_UI, UI_R_SYSASSIGN_ERROR}, - #else - {"SYSASSIGN_ERROR", 40, 109}, - #endif - #ifdef UI_R_SYSDASSGN_ERROR - {"SYSDASSGN_ERROR", ERR_LIB_UI, UI_R_SYSDASSGN_ERROR}, - #else - {"SYSDASSGN_ERROR", 40, 110}, - #endif - #ifdef UI_R_SYSQIOW_ERROR - {"SYSQIOW_ERROR", ERR_LIB_UI, UI_R_SYSQIOW_ERROR}, - #else - {"SYSQIOW_ERROR", 40, 111}, - #endif - #ifdef UI_R_UNKNOWN_CONTROL_COMMAND - {"UNKNOWN_CONTROL_COMMAND", ERR_LIB_UI, UI_R_UNKNOWN_CONTROL_COMMAND}, - #else - {"UNKNOWN_CONTROL_COMMAND", 40, 106}, - #endif - #ifdef UI_R_UNKNOWN_TTYGET_ERRNO_VALUE - {"UNKNOWN_TTYGET_ERRNO_VALUE", ERR_LIB_UI, UI_R_UNKNOWN_TTYGET_ERRNO_VALUE}, - #else - {"UNKNOWN_TTYGET_ERRNO_VALUE", 40, 108}, - #endif - #ifdef UI_R_USER_DATA_DUPLICATION_UNSUPPORTED - {"USER_DATA_DUPLICATION_UNSUPPORTED", ERR_LIB_UI, UI_R_USER_DATA_DUPLICATION_UNSUPPORTED}, - #else - {"USER_DATA_DUPLICATION_UNSUPPORTED", 40, 112}, - #endif - #ifdef X509V3_R_BAD_IP_ADDRESS - {"BAD_IP_ADDRESS", ERR_LIB_X509V3, X509V3_R_BAD_IP_ADDRESS}, - #else - {"BAD_IP_ADDRESS", 34, 118}, - #endif - #ifdef X509V3_R_BAD_OBJECT - {"BAD_OBJECT", ERR_LIB_X509V3, X509V3_R_BAD_OBJECT}, - #else - {"BAD_OBJECT", 34, 119}, - #endif - #ifdef X509V3_R_BN_DEC2BN_ERROR - {"BN_DEC2BN_ERROR", ERR_LIB_X509V3, X509V3_R_BN_DEC2BN_ERROR}, - #else - {"BN_DEC2BN_ERROR", 34, 100}, - #endif - #ifdef X509V3_R_BN_TO_ASN1_INTEGER_ERROR - {"BN_TO_ASN1_INTEGER_ERROR", ERR_LIB_X509V3, X509V3_R_BN_TO_ASN1_INTEGER_ERROR}, - #else - {"BN_TO_ASN1_INTEGER_ERROR", 34, 101}, - #endif - #ifdef X509V3_R_DIRNAME_ERROR - {"DIRNAME_ERROR", ERR_LIB_X509V3, X509V3_R_DIRNAME_ERROR}, - #else - {"DIRNAME_ERROR", 34, 149}, - #endif - #ifdef X509V3_R_DISTPOINT_ALREADY_SET - {"DISTPOINT_ALREADY_SET", ERR_LIB_X509V3, X509V3_R_DISTPOINT_ALREADY_SET}, - #else - {"DISTPOINT_ALREADY_SET", 34, 160}, - #endif - #ifdef X509V3_R_DUPLICATE_ZONE_ID - {"DUPLICATE_ZONE_ID", ERR_LIB_X509V3, X509V3_R_DUPLICATE_ZONE_ID}, - #else - {"DUPLICATE_ZONE_ID", 34, 133}, - #endif - #ifdef X509V3_R_ERROR_CONVERTING_ZONE - {"ERROR_CONVERTING_ZONE", ERR_LIB_X509V3, X509V3_R_ERROR_CONVERTING_ZONE}, - #else - {"ERROR_CONVERTING_ZONE", 34, 131}, - #endif - #ifdef X509V3_R_ERROR_CREATING_EXTENSION - {"ERROR_CREATING_EXTENSION", ERR_LIB_X509V3, X509V3_R_ERROR_CREATING_EXTENSION}, - #else - {"ERROR_CREATING_EXTENSION", 34, 144}, - #endif - #ifdef X509V3_R_ERROR_IN_EXTENSION - {"ERROR_IN_EXTENSION", ERR_LIB_X509V3, X509V3_R_ERROR_IN_EXTENSION}, - #else - {"ERROR_IN_EXTENSION", 34, 128}, - #endif - #ifdef X509V3_R_EXPECTED_A_SECTION_NAME - {"EXPECTED_A_SECTION_NAME", ERR_LIB_X509V3, X509V3_R_EXPECTED_A_SECTION_NAME}, - #else - {"EXPECTED_A_SECTION_NAME", 34, 137}, - #endif - #ifdef X509V3_R_EXTENSION_EXISTS - {"EXTENSION_EXISTS", ERR_LIB_X509V3, X509V3_R_EXTENSION_EXISTS}, - #else - {"EXTENSION_EXISTS", 34, 145}, - #endif - #ifdef X509V3_R_EXTENSION_NAME_ERROR - {"EXTENSION_NAME_ERROR", ERR_LIB_X509V3, X509V3_R_EXTENSION_NAME_ERROR}, - #else - {"EXTENSION_NAME_ERROR", 34, 115}, - #endif - #ifdef X509V3_R_EXTENSION_NOT_FOUND - {"EXTENSION_NOT_FOUND", ERR_LIB_X509V3, X509V3_R_EXTENSION_NOT_FOUND}, - #else - {"EXTENSION_NOT_FOUND", 34, 102}, - #endif - #ifdef X509V3_R_EXTENSION_SETTING_NOT_SUPPORTED - {"EXTENSION_SETTING_NOT_SUPPORTED", ERR_LIB_X509V3, X509V3_R_EXTENSION_SETTING_NOT_SUPPORTED}, - #else - {"EXTENSION_SETTING_NOT_SUPPORTED", 34, 103}, - #endif - #ifdef X509V3_R_EXTENSION_VALUE_ERROR - {"EXTENSION_VALUE_ERROR", ERR_LIB_X509V3, X509V3_R_EXTENSION_VALUE_ERROR}, - #else - {"EXTENSION_VALUE_ERROR", 34, 116}, - #endif - #ifdef X509V3_R_ILLEGAL_EMPTY_EXTENSION - {"ILLEGAL_EMPTY_EXTENSION", ERR_LIB_X509V3, X509V3_R_ILLEGAL_EMPTY_EXTENSION}, - #else - {"ILLEGAL_EMPTY_EXTENSION", 34, 151}, - #endif - #ifdef X509V3_R_INCORRECT_POLICY_SYNTAX_TAG - {"INCORRECT_POLICY_SYNTAX_TAG", ERR_LIB_X509V3, X509V3_R_INCORRECT_POLICY_SYNTAX_TAG}, - #else - {"INCORRECT_POLICY_SYNTAX_TAG", 34, 152}, - #endif - #ifdef X509V3_R_INVALID_ASNUMBER - {"INVALID_ASNUMBER", ERR_LIB_X509V3, X509V3_R_INVALID_ASNUMBER}, - #else - {"INVALID_ASNUMBER", 34, 162}, - #endif - #ifdef X509V3_R_INVALID_ASRANGE - {"INVALID_ASRANGE", ERR_LIB_X509V3, X509V3_R_INVALID_ASRANGE}, - #else - {"INVALID_ASRANGE", 34, 163}, - #endif - #ifdef X509V3_R_INVALID_BOOLEAN_STRING - {"INVALID_BOOLEAN_STRING", ERR_LIB_X509V3, X509V3_R_INVALID_BOOLEAN_STRING}, - #else - {"INVALID_BOOLEAN_STRING", 34, 104}, - #endif - #ifdef X509V3_R_INVALID_EXTENSION_STRING - {"INVALID_EXTENSION_STRING", ERR_LIB_X509V3, X509V3_R_INVALID_EXTENSION_STRING}, - #else - {"INVALID_EXTENSION_STRING", 34, 105}, - #endif - #ifdef X509V3_R_INVALID_INHERITANCE - {"INVALID_INHERITANCE", ERR_LIB_X509V3, X509V3_R_INVALID_INHERITANCE}, - #else - {"INVALID_INHERITANCE", 34, 165}, - #endif - #ifdef X509V3_R_INVALID_IPADDRESS - {"INVALID_IPADDRESS", ERR_LIB_X509V3, X509V3_R_INVALID_IPADDRESS}, - #else - {"INVALID_IPADDRESS", 34, 166}, - #endif - #ifdef X509V3_R_INVALID_MULTIPLE_RDNS - {"INVALID_MULTIPLE_RDNS", ERR_LIB_X509V3, X509V3_R_INVALID_MULTIPLE_RDNS}, - #else - {"INVALID_MULTIPLE_RDNS", 34, 161}, - #endif - #ifdef X509V3_R_INVALID_NAME - {"INVALID_NAME", ERR_LIB_X509V3, X509V3_R_INVALID_NAME}, - #else - {"INVALID_NAME", 34, 106}, - #endif - #ifdef X509V3_R_INVALID_NULL_ARGUMENT - {"INVALID_NULL_ARGUMENT", ERR_LIB_X509V3, X509V3_R_INVALID_NULL_ARGUMENT}, - #else - {"INVALID_NULL_ARGUMENT", 34, 107}, - #endif - #ifdef X509V3_R_INVALID_NULL_NAME - {"INVALID_NULL_NAME", ERR_LIB_X509V3, X509V3_R_INVALID_NULL_NAME}, - #else - {"INVALID_NULL_NAME", 34, 108}, - #endif - #ifdef X509V3_R_INVALID_NULL_VALUE - {"INVALID_NULL_VALUE", ERR_LIB_X509V3, X509V3_R_INVALID_NULL_VALUE}, - #else - {"INVALID_NULL_VALUE", 34, 109}, - #endif - #ifdef X509V3_R_INVALID_NUMBER - {"INVALID_NUMBER", ERR_LIB_X509V3, X509V3_R_INVALID_NUMBER}, - #else - {"INVALID_NUMBER", 34, 140}, - #endif - #ifdef X509V3_R_INVALID_NUMBERS - {"INVALID_NUMBERS", ERR_LIB_X509V3, X509V3_R_INVALID_NUMBERS}, - #else - {"INVALID_NUMBERS", 34, 141}, - #endif - #ifdef X509V3_R_INVALID_OBJECT_IDENTIFIER - {"INVALID_OBJECT_IDENTIFIER", ERR_LIB_X509V3, X509V3_R_INVALID_OBJECT_IDENTIFIER}, - #else - {"INVALID_OBJECT_IDENTIFIER", 34, 110}, - #endif - #ifdef X509V3_R_INVALID_OPTION - {"INVALID_OPTION", ERR_LIB_X509V3, X509V3_R_INVALID_OPTION}, - #else - {"INVALID_OPTION", 34, 138}, - #endif - #ifdef X509V3_R_INVALID_POLICY_IDENTIFIER - {"INVALID_POLICY_IDENTIFIER", ERR_LIB_X509V3, X509V3_R_INVALID_POLICY_IDENTIFIER}, - #else - {"INVALID_POLICY_IDENTIFIER", 34, 134}, - #endif - #ifdef X509V3_R_INVALID_PROXY_POLICY_SETTING - {"INVALID_PROXY_POLICY_SETTING", ERR_LIB_X509V3, X509V3_R_INVALID_PROXY_POLICY_SETTING}, - #else - {"INVALID_PROXY_POLICY_SETTING", 34, 153}, - #endif - #ifdef X509V3_R_INVALID_PURPOSE - {"INVALID_PURPOSE", ERR_LIB_X509V3, X509V3_R_INVALID_PURPOSE}, - #else - {"INVALID_PURPOSE", 34, 146}, - #endif - #ifdef X509V3_R_INVALID_SAFI - {"INVALID_SAFI", ERR_LIB_X509V3, X509V3_R_INVALID_SAFI}, - #else - {"INVALID_SAFI", 34, 164}, - #endif - #ifdef X509V3_R_INVALID_SECTION - {"INVALID_SECTION", ERR_LIB_X509V3, X509V3_R_INVALID_SECTION}, - #else - {"INVALID_SECTION", 34, 135}, - #endif - #ifdef X509V3_R_INVALID_SYNTAX - {"INVALID_SYNTAX", ERR_LIB_X509V3, X509V3_R_INVALID_SYNTAX}, - #else - {"INVALID_SYNTAX", 34, 143}, - #endif - #ifdef X509V3_R_ISSUER_DECODE_ERROR - {"ISSUER_DECODE_ERROR", ERR_LIB_X509V3, X509V3_R_ISSUER_DECODE_ERROR}, - #else - {"ISSUER_DECODE_ERROR", 34, 126}, - #endif - #ifdef X509V3_R_MISSING_VALUE - {"MISSING_VALUE", ERR_LIB_X509V3, X509V3_R_MISSING_VALUE}, - #else - {"MISSING_VALUE", 34, 124}, - #endif - #ifdef X509V3_R_NEED_ORGANIZATION_AND_NUMBERS - {"NEED_ORGANIZATION_AND_NUMBERS", ERR_LIB_X509V3, X509V3_R_NEED_ORGANIZATION_AND_NUMBERS}, - #else - {"NEED_ORGANIZATION_AND_NUMBERS", 34, 142}, - #endif - #ifdef X509V3_R_NO_CONFIG_DATABASE - {"NO_CONFIG_DATABASE", ERR_LIB_X509V3, X509V3_R_NO_CONFIG_DATABASE}, - #else - {"NO_CONFIG_DATABASE", 34, 136}, - #endif - #ifdef X509V3_R_NO_ISSUER_CERTIFICATE - {"NO_ISSUER_CERTIFICATE", ERR_LIB_X509V3, X509V3_R_NO_ISSUER_CERTIFICATE}, - #else - {"NO_ISSUER_CERTIFICATE", 34, 121}, - #endif - #ifdef X509V3_R_NO_ISSUER_DETAILS - {"NO_ISSUER_DETAILS", ERR_LIB_X509V3, X509V3_R_NO_ISSUER_DETAILS}, - #else - {"NO_ISSUER_DETAILS", 34, 127}, - #endif - #ifdef X509V3_R_NO_POLICY_IDENTIFIER - {"NO_POLICY_IDENTIFIER", ERR_LIB_X509V3, X509V3_R_NO_POLICY_IDENTIFIER}, - #else - {"NO_POLICY_IDENTIFIER", 34, 139}, - #endif - #ifdef X509V3_R_NO_PROXY_CERT_POLICY_LANGUAGE_DEFINED - {"NO_PROXY_CERT_POLICY_LANGUAGE_DEFINED", ERR_LIB_X509V3, X509V3_R_NO_PROXY_CERT_POLICY_LANGUAGE_DEFINED}, - #else - {"NO_PROXY_CERT_POLICY_LANGUAGE_DEFINED", 34, 154}, - #endif - #ifdef X509V3_R_NO_PUBLIC_KEY - {"NO_PUBLIC_KEY", ERR_LIB_X509V3, X509V3_R_NO_PUBLIC_KEY}, - #else - {"NO_PUBLIC_KEY", 34, 114}, - #endif - #ifdef X509V3_R_NO_SUBJECT_DETAILS - {"NO_SUBJECT_DETAILS", ERR_LIB_X509V3, X509V3_R_NO_SUBJECT_DETAILS}, - #else - {"NO_SUBJECT_DETAILS", 34, 125}, - #endif - #ifdef X509V3_R_OPERATION_NOT_DEFINED - {"OPERATION_NOT_DEFINED", ERR_LIB_X509V3, X509V3_R_OPERATION_NOT_DEFINED}, - #else - {"OPERATION_NOT_DEFINED", 34, 148}, - #endif - #ifdef X509V3_R_OTHERNAME_ERROR - {"OTHERNAME_ERROR", ERR_LIB_X509V3, X509V3_R_OTHERNAME_ERROR}, - #else - {"OTHERNAME_ERROR", 34, 147}, - #endif - #ifdef X509V3_R_POLICY_LANGUAGE_ALREADY_DEFINED - {"POLICY_LANGUAGE_ALREADY_DEFINED", ERR_LIB_X509V3, X509V3_R_POLICY_LANGUAGE_ALREADY_DEFINED}, - #else - {"POLICY_LANGUAGE_ALREADY_DEFINED", 34, 155}, - #endif - #ifdef X509V3_R_POLICY_PATH_LENGTH - {"POLICY_PATH_LENGTH", ERR_LIB_X509V3, X509V3_R_POLICY_PATH_LENGTH}, - #else - {"POLICY_PATH_LENGTH", 34, 156}, - #endif - #ifdef X509V3_R_POLICY_PATH_LENGTH_ALREADY_DEFINED - {"POLICY_PATH_LENGTH_ALREADY_DEFINED", ERR_LIB_X509V3, X509V3_R_POLICY_PATH_LENGTH_ALREADY_DEFINED}, - #else - {"POLICY_PATH_LENGTH_ALREADY_DEFINED", 34, 157}, - #endif - #ifdef X509V3_R_POLICY_WHEN_PROXY_LANGUAGE_REQUIRES_NO_POLICY - {"POLICY_WHEN_PROXY_LANGUAGE_REQUIRES_NO_POLICY", ERR_LIB_X509V3, X509V3_R_POLICY_WHEN_PROXY_LANGUAGE_REQUIRES_NO_POLICY}, - #else - {"POLICY_WHEN_PROXY_LANGUAGE_REQUIRES_NO_POLICY", 34, 159}, - #endif - #ifdef X509V3_R_SECTION_NOT_FOUND - {"SECTION_NOT_FOUND", ERR_LIB_X509V3, X509V3_R_SECTION_NOT_FOUND}, - #else - {"SECTION_NOT_FOUND", 34, 150}, - #endif - #ifdef X509V3_R_UNABLE_TO_GET_ISSUER_DETAILS - {"UNABLE_TO_GET_ISSUER_DETAILS", ERR_LIB_X509V3, X509V3_R_UNABLE_TO_GET_ISSUER_DETAILS}, - #else - {"UNABLE_TO_GET_ISSUER_DETAILS", 34, 122}, - #endif - #ifdef X509V3_R_UNABLE_TO_GET_ISSUER_KEYID - {"UNABLE_TO_GET_ISSUER_KEYID", ERR_LIB_X509V3, X509V3_R_UNABLE_TO_GET_ISSUER_KEYID}, - #else - {"UNABLE_TO_GET_ISSUER_KEYID", 34, 123}, - #endif - #ifdef X509V3_R_UNKNOWN_BIT_STRING_ARGUMENT - {"UNKNOWN_BIT_STRING_ARGUMENT", ERR_LIB_X509V3, X509V3_R_UNKNOWN_BIT_STRING_ARGUMENT}, - #else - {"UNKNOWN_BIT_STRING_ARGUMENT", 34, 111}, - #endif - #ifdef X509V3_R_UNKNOWN_EXTENSION - {"UNKNOWN_EXTENSION", ERR_LIB_X509V3, X509V3_R_UNKNOWN_EXTENSION}, - #else - {"UNKNOWN_EXTENSION", 34, 129}, - #endif - #ifdef X509V3_R_UNKNOWN_EXTENSION_NAME - {"UNKNOWN_EXTENSION_NAME", ERR_LIB_X509V3, X509V3_R_UNKNOWN_EXTENSION_NAME}, - #else - {"UNKNOWN_EXTENSION_NAME", 34, 130}, - #endif - #ifdef X509V3_R_UNKNOWN_OPTION - {"UNKNOWN_OPTION", ERR_LIB_X509V3, X509V3_R_UNKNOWN_OPTION}, - #else - {"UNKNOWN_OPTION", 34, 120}, - #endif - #ifdef X509V3_R_UNSUPPORTED_OPTION - {"UNSUPPORTED_OPTION", ERR_LIB_X509V3, X509V3_R_UNSUPPORTED_OPTION}, - #else - {"UNSUPPORTED_OPTION", 34, 117}, - #endif - #ifdef X509V3_R_UNSUPPORTED_TYPE - {"UNSUPPORTED_TYPE", ERR_LIB_X509V3, X509V3_R_UNSUPPORTED_TYPE}, - #else - {"UNSUPPORTED_TYPE", 34, 167}, - #endif - #ifdef X509V3_R_USER_TOO_LONG - {"USER_TOO_LONG", ERR_LIB_X509V3, X509V3_R_USER_TOO_LONG}, - #else - {"USER_TOO_LONG", 34, 132}, - #endif - #ifdef X509_R_AKID_MISMATCH - {"AKID_MISMATCH", ERR_LIB_X509, X509_R_AKID_MISMATCH}, - #else - {"AKID_MISMATCH", 11, 110}, - #endif - #ifdef X509_R_BAD_SELECTOR - {"BAD_SELECTOR", ERR_LIB_X509, X509_R_BAD_SELECTOR}, - #else - {"BAD_SELECTOR", 11, 133}, - #endif - #ifdef X509_R_BAD_X509_FILETYPE - {"BAD_X509_FILETYPE", ERR_LIB_X509, X509_R_BAD_X509_FILETYPE}, - #else - {"BAD_X509_FILETYPE", 11, 100}, - #endif - #ifdef X509_R_BASE64_DECODE_ERROR - {"BASE64_DECODE_ERROR", ERR_LIB_X509, X509_R_BASE64_DECODE_ERROR}, - #else - {"BASE64_DECODE_ERROR", 11, 118}, - #endif - #ifdef X509_R_CANT_CHECK_DH_KEY - {"CANT_CHECK_DH_KEY", ERR_LIB_X509, X509_R_CANT_CHECK_DH_KEY}, - #else - {"CANT_CHECK_DH_KEY", 11, 114}, - #endif - #ifdef X509_R_CERT_ALREADY_IN_HASH_TABLE - {"CERT_ALREADY_IN_HASH_TABLE", ERR_LIB_X509, X509_R_CERT_ALREADY_IN_HASH_TABLE}, - #else - {"CERT_ALREADY_IN_HASH_TABLE", 11, 101}, - #endif - #ifdef X509_R_CRL_ALREADY_DELTA - {"CRL_ALREADY_DELTA", ERR_LIB_X509, X509_R_CRL_ALREADY_DELTA}, - #else - {"CRL_ALREADY_DELTA", 11, 127}, - #endif - #ifdef X509_R_CRL_VERIFY_FAILURE - {"CRL_VERIFY_FAILURE", ERR_LIB_X509, X509_R_CRL_VERIFY_FAILURE}, - #else - {"CRL_VERIFY_FAILURE", 11, 131}, - #endif - #ifdef X509_R_ERR_ASN1_LIB - {"ERR_ASN1_LIB", ERR_LIB_X509, X509_R_ERR_ASN1_LIB}, - #else - {"ERR_ASN1_LIB", 11, 102}, - #endif - #ifdef X509_R_IDP_MISMATCH - {"IDP_MISMATCH", ERR_LIB_X509, X509_R_IDP_MISMATCH}, - #else - {"IDP_MISMATCH", 11, 128}, - #endif - #ifdef X509_R_INVALID_ATTRIBUTES - {"INVALID_ATTRIBUTES", ERR_LIB_X509, X509_R_INVALID_ATTRIBUTES}, - #else - {"INVALID_ATTRIBUTES", 11, 138}, - #endif - #ifdef X509_R_INVALID_DIRECTORY - {"INVALID_DIRECTORY", ERR_LIB_X509, X509_R_INVALID_DIRECTORY}, - #else - {"INVALID_DIRECTORY", 11, 113}, - #endif - #ifdef X509_R_INVALID_FIELD_NAME - {"INVALID_FIELD_NAME", ERR_LIB_X509, X509_R_INVALID_FIELD_NAME}, - #else - {"INVALID_FIELD_NAME", 11, 119}, - #endif - #ifdef X509_R_INVALID_TRUST - {"INVALID_TRUST", ERR_LIB_X509, X509_R_INVALID_TRUST}, - #else - {"INVALID_TRUST", 11, 123}, - #endif - #ifdef X509_R_ISSUER_MISMATCH - {"ISSUER_MISMATCH", ERR_LIB_X509, X509_R_ISSUER_MISMATCH}, - #else - {"ISSUER_MISMATCH", 11, 129}, - #endif - #ifdef X509_R_KEY_TYPE_MISMATCH - {"KEY_TYPE_MISMATCH", ERR_LIB_X509, X509_R_KEY_TYPE_MISMATCH}, - #else - {"KEY_TYPE_MISMATCH", 11, 115}, - #endif - #ifdef X509_R_KEY_VALUES_MISMATCH - {"KEY_VALUES_MISMATCH", ERR_LIB_X509, X509_R_KEY_VALUES_MISMATCH}, - #else - {"KEY_VALUES_MISMATCH", 11, 116}, - #endif - #ifdef X509_R_LOADING_CERT_DIR - {"LOADING_CERT_DIR", ERR_LIB_X509, X509_R_LOADING_CERT_DIR}, - #else - {"LOADING_CERT_DIR", 11, 103}, - #endif - #ifdef X509_R_LOADING_DEFAULTS - {"LOADING_DEFAULTS", ERR_LIB_X509, X509_R_LOADING_DEFAULTS}, - #else - {"LOADING_DEFAULTS", 11, 104}, - #endif - #ifdef X509_R_METHOD_NOT_SUPPORTED - {"METHOD_NOT_SUPPORTED", ERR_LIB_X509, X509_R_METHOD_NOT_SUPPORTED}, - #else - {"METHOD_NOT_SUPPORTED", 11, 124}, - #endif - #ifdef X509_R_NAME_TOO_LONG - {"NAME_TOO_LONG", ERR_LIB_X509, X509_R_NAME_TOO_LONG}, - #else - {"NAME_TOO_LONG", 11, 134}, - #endif - #ifdef X509_R_NEWER_CRL_NOT_NEWER - {"NEWER_CRL_NOT_NEWER", ERR_LIB_X509, X509_R_NEWER_CRL_NOT_NEWER}, - #else - {"NEWER_CRL_NOT_NEWER", 11, 132}, - #endif - #ifdef X509_R_NO_CERTIFICATE_FOUND - {"NO_CERTIFICATE_FOUND", ERR_LIB_X509, X509_R_NO_CERTIFICATE_FOUND}, - #else - {"NO_CERTIFICATE_FOUND", 11, 135}, - #endif - #ifdef X509_R_NO_CERTIFICATE_OR_CRL_FOUND - {"NO_CERTIFICATE_OR_CRL_FOUND", ERR_LIB_X509, X509_R_NO_CERTIFICATE_OR_CRL_FOUND}, - #else - {"NO_CERTIFICATE_OR_CRL_FOUND", 11, 136}, - #endif - #ifdef X509_R_NO_CERT_SET_FOR_US_TO_VERIFY - {"NO_CERT_SET_FOR_US_TO_VERIFY", ERR_LIB_X509, X509_R_NO_CERT_SET_FOR_US_TO_VERIFY}, - #else - {"NO_CERT_SET_FOR_US_TO_VERIFY", 11, 105}, - #endif - #ifdef X509_R_NO_CRL_FOUND - {"NO_CRL_FOUND", ERR_LIB_X509, X509_R_NO_CRL_FOUND}, - #else - {"NO_CRL_FOUND", 11, 137}, - #endif - #ifdef X509_R_NO_CRL_NUMBER - {"NO_CRL_NUMBER", ERR_LIB_X509, X509_R_NO_CRL_NUMBER}, - #else - {"NO_CRL_NUMBER", 11, 130}, - #endif - #ifdef X509_R_PUBLIC_KEY_DECODE_ERROR - {"PUBLIC_KEY_DECODE_ERROR", ERR_LIB_X509, X509_R_PUBLIC_KEY_DECODE_ERROR}, - #else - {"PUBLIC_KEY_DECODE_ERROR", 11, 125}, - #endif - #ifdef X509_R_PUBLIC_KEY_ENCODE_ERROR - {"PUBLIC_KEY_ENCODE_ERROR", ERR_LIB_X509, X509_R_PUBLIC_KEY_ENCODE_ERROR}, - #else - {"PUBLIC_KEY_ENCODE_ERROR", 11, 126}, - #endif - #ifdef X509_R_SHOULD_RETRY - {"SHOULD_RETRY", ERR_LIB_X509, X509_R_SHOULD_RETRY}, - #else - {"SHOULD_RETRY", 11, 106}, - #endif - #ifdef X509_R_UNABLE_TO_FIND_PARAMETERS_IN_CHAIN - {"UNABLE_TO_FIND_PARAMETERS_IN_CHAIN", ERR_LIB_X509, X509_R_UNABLE_TO_FIND_PARAMETERS_IN_CHAIN}, - #else - {"UNABLE_TO_FIND_PARAMETERS_IN_CHAIN", 11, 107}, - #endif - #ifdef X509_R_UNABLE_TO_GET_CERTS_PUBLIC_KEY - {"UNABLE_TO_GET_CERTS_PUBLIC_KEY", ERR_LIB_X509, X509_R_UNABLE_TO_GET_CERTS_PUBLIC_KEY}, - #else - {"UNABLE_TO_GET_CERTS_PUBLIC_KEY", 11, 108}, - #endif - #ifdef X509_R_UNKNOWN_KEY_TYPE - {"UNKNOWN_KEY_TYPE", ERR_LIB_X509, X509_R_UNKNOWN_KEY_TYPE}, - #else - {"UNKNOWN_KEY_TYPE", 11, 117}, - #endif - #ifdef X509_R_UNKNOWN_NID - {"UNKNOWN_NID", ERR_LIB_X509, X509_R_UNKNOWN_NID}, - #else - {"UNKNOWN_NID", 11, 109}, - #endif - #ifdef X509_R_UNKNOWN_PURPOSE_ID - {"UNKNOWN_PURPOSE_ID", ERR_LIB_X509, X509_R_UNKNOWN_PURPOSE_ID}, - #else - {"UNKNOWN_PURPOSE_ID", 11, 121}, - #endif - #ifdef X509_R_UNKNOWN_TRUST_ID - {"UNKNOWN_TRUST_ID", ERR_LIB_X509, X509_R_UNKNOWN_TRUST_ID}, - #else - {"UNKNOWN_TRUST_ID", 11, 120}, - #endif - #ifdef X509_R_UNSUPPORTED_ALGORITHM - {"UNSUPPORTED_ALGORITHM", ERR_LIB_X509, X509_R_UNSUPPORTED_ALGORITHM}, - #else - {"UNSUPPORTED_ALGORITHM", 11, 111}, - #endif - #ifdef X509_R_WRONG_LOOKUP_TYPE - {"WRONG_LOOKUP_TYPE", ERR_LIB_X509, X509_R_WRONG_LOOKUP_TYPE}, - #else - {"WRONG_LOOKUP_TYPE", 11, 112}, - #endif - #ifdef X509_R_WRONG_TYPE - {"WRONG_TYPE", ERR_LIB_X509, X509_R_WRONG_TYPE}, - #else - {"WRONG_TYPE", 11, 122}, - #endif - { NULL } -}; diff --git a/Modules/_stat.c b/Modules/_stat.c index 80f8a92668976b..b43e79453f5b2f 100644 --- a/Modules/_stat.c +++ b/Modules/_stat.c @@ -202,6 +202,10 @@ typedef unsigned short mode_t; /* Names for file flags */ +#ifndef UF_SETTABLE +# define UF_SETTABLE 0x0000ffff +#endif + #ifndef UF_NODUMP # define UF_NODUMP 0x00000001 #endif @@ -226,10 +230,22 @@ typedef unsigned short mode_t; # define UF_COMPRESSED 0x00000020 #endif +#ifndef UF_TRACKED +# define UF_TRACKED 0x00000040 +#endif + +#ifndef UF_DATAVAULT +# define UF_DATAVAULT 0x00000080 +#endif + #ifndef UF_HIDDEN # define UF_HIDDEN 0x00008000 #endif +#ifndef SF_SETTABLE +# define SF_SETTABLE 0xffff0000 +#endif + #ifndef SF_ARCHIVED # define SF_ARCHIVED 0x00010000 #endif @@ -250,6 +266,30 @@ typedef unsigned short mode_t; # define SF_SNAPSHOT 0x00200000 #endif +#ifndef SF_FIRMLINK +# define SF_FIRMLINK 0x00800000 +#endif + +#ifndef SF_DATALESS +# define SF_DATALESS 0x40000000 +#endif + +#if defined(__APPLE__) && !defined(SF_SUPPORTED) + /* On older macOS versions the definition of SF_SUPPORTED is different + * from that on newer versions. + * + * Provide a consistent experience by redefining. + * + * None of bit bits set in the actual SF_SUPPORTED but not in this + * definition are defined on these versions of macOS. + */ +# undef SF_SETTABLE +# define SF_SUPPORTED 0x009f0000 +# define SF_SETTABLE 0x3fff0000 +# define SF_SYNTHETIC 0xc0000000 +#endif + + static mode_t _PyLong_AsMode_t(PyObject *op) { @@ -467,18 +507,29 @@ S_IWOTH: write by others\n\ S_IXOTH: execute by others\n\ \n" -"UF_NODUMP: do not dump file\n\ +"UF_SETTABLE: mask of owner changable flags\n\ +UF_NODUMP: do not dump file\n\ UF_IMMUTABLE: file may not be changed\n\ UF_APPEND: file may only be appended to\n\ UF_OPAQUE: directory is opaque when viewed through a union stack\n\ UF_NOUNLINK: file may not be renamed or deleted\n\ -UF_COMPRESSED: OS X: file is hfs-compressed\n\ -UF_HIDDEN: OS X: file should not be displayed\n\ +UF_COMPRESSED: macOS: file is hfs-compressed\n\ +UF_TRACKED: used for dealing with document IDs\n\ +UF_DATAVAULT: entitlement required for reading and writing\n\ +UF_HIDDEN: macOS: file should not be displayed\n\ +SF_SETTABLE: mask of super user changeable flags\n\ SF_ARCHIVED: file may be archived\n\ SF_IMMUTABLE: file may not be changed\n\ SF_APPEND: file may only be appended to\n\ +SF_RESTRICTED: entitlement required for writing\n\ SF_NOUNLINK: file may not be renamed or deleted\n\ SF_SNAPSHOT: file is a snapshot file\n\ +SF_FIRMLINK: file is a firmlink\n\ +SF_DATALESS: file is a dataless object\n\ +\n\ +On macOS:\n\ +SF_SUPPORTED: mask of super user supported flags\n\ +SF_SYNTHETIC: mask of read-only synthetic flags\n\ \n" "ST_MODE\n\ @@ -543,18 +594,32 @@ stat_exec(PyObject *module) ADD_INT_MACRO(module, S_IWOTH); ADD_INT_MACRO(module, S_IXOTH); + ADD_INT_MACRO(module, UF_SETTABLE); ADD_INT_MACRO(module, UF_NODUMP); ADD_INT_MACRO(module, UF_IMMUTABLE); ADD_INT_MACRO(module, UF_APPEND); ADD_INT_MACRO(module, UF_OPAQUE); ADD_INT_MACRO(module, UF_NOUNLINK); ADD_INT_MACRO(module, UF_COMPRESSED); + ADD_INT_MACRO(module, UF_TRACKED); + ADD_INT_MACRO(module, UF_DATAVAULT); ADD_INT_MACRO(module, UF_HIDDEN); + ADD_INT_MACRO(module, SF_SETTABLE); ADD_INT_MACRO(module, SF_ARCHIVED); ADD_INT_MACRO(module, SF_IMMUTABLE); ADD_INT_MACRO(module, SF_APPEND); ADD_INT_MACRO(module, SF_NOUNLINK); ADD_INT_MACRO(module, SF_SNAPSHOT); + ADD_INT_MACRO(module, SF_FIRMLINK); + ADD_INT_MACRO(module, SF_DATALESS); + +#ifdef SF_SUPPORTED + ADD_INT_MACRO(module, SF_SUPPORTED); +#endif +#ifdef SF_SYNTHETIC + ADD_INT_MACRO(module, SF_SYNTHETIC); +#endif + const char* st_constants[] = { "ST_MODE", diff --git a/Modules/_suggestions.c b/Modules/_suggestions.c new file mode 100644 index 00000000000000..30b524d70c1211 --- /dev/null +++ b/Modules/_suggestions.c @@ -0,0 +1,63 @@ +#include "Python.h" +#include "pycore_pyerrors.h" +#include "clinic/_suggestions.c.h" + +/*[clinic input] +module _suggestions +[clinic start generated code]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=e58d81fafad5637b]*/ + +/*[clinic input] +_suggestions._generate_suggestions + candidates: object + item: unicode + / +Returns the candidate in candidates that's closest to item +[clinic start generated code]*/ + +static PyObject * +_suggestions__generate_suggestions_impl(PyObject *module, + PyObject *candidates, PyObject *item) +/*[clinic end generated code: output=79be7b653ae5e7ca input=ba2a8dddc654e33a]*/ +{ + // Check if dir is a list + if (!PyList_Check(candidates)) { + PyErr_SetString(PyExc_TypeError, "candidates must be a list"); + return NULL; + } + + // Check if all elements in the list are Unicode + Py_ssize_t size = PyList_Size(candidates); + for (Py_ssize_t i = 0; i < size; ++i) { + PyObject *elem = PyList_GetItem(candidates, i); + if (!PyUnicode_Check(elem)) { + PyErr_SetString(PyExc_TypeError, "all elements in 'candidates' must be strings"); + return NULL; + } + } + + PyObject* result = _Py_CalculateSuggestions(candidates, item); + if (!result && !PyErr_Occurred()) { + Py_RETURN_NONE; + } + return result; +} + + +static PyMethodDef module_methods[] = { + _SUGGESTIONS__GENERATE_SUGGESTIONS_METHODDEF + {NULL, NULL, 0, NULL} // Sentinel +}; + +static struct PyModuleDef suggestions_module = { + PyModuleDef_HEAD_INIT, + "_suggestions", + NULL, + -1, + module_methods +}; + +PyMODINIT_FUNC PyInit__suggestions(void) { + return PyModule_Create(&suggestions_module); +} + diff --git a/Modules/_testcapi/gc.c b/Modules/_testcapi/gc.c index 829200ad12cd3c..f4feaaafbdc6cc 100644 --- a/Modules/_testcapi/gc.c +++ b/Modules/_testcapi/gc.c @@ -126,9 +126,7 @@ slot_tp_del(PyObject *self) * never happened. */ { - Py_ssize_t refcnt = Py_REFCNT(self); - _Py_NewReferenceNoTotal(self); - Py_SET_REFCNT(self, refcnt); + _Py_ResurrectReference(self); } assert(!PyType_IS_GC(Py_TYPE(self)) || PyObject_GC_IsTracked(self)); } diff --git a/Modules/_testcapi/getargs.c b/Modules/_testcapi/getargs.c index 33e8af7d7bbb39..0d61d8c8969f82 100644 --- a/Modules/_testcapi/getargs.c +++ b/Modules/_testcapi/getargs.c @@ -56,9 +56,9 @@ parse_tuple_and_keywords(PyObject *self, PyObject *args) keywords[i] = PyBytes_AS_STRING(o); } else { - PyErr_Format(PyExc_ValueError, + PyErr_SetString(PyExc_ValueError, "parse_tuple_and_keywords: " - "keywords must be str or bytes", i); + "keywords must be str or bytes"); goto exit; } } diff --git a/Modules/_testcapi/vectorcall_limited.c b/Modules/_testcapi/vectorcall_limited.c index d7b8d33b7f7162..d7070d37bb9e9b 100644 --- a/Modules/_testcapi/vectorcall_limited.c +++ b/Modules/_testcapi/vectorcall_limited.c @@ -195,6 +195,6 @@ _PyTestCapi_Init_VectorcallLimited(PyObject *m) { if (PyModule_AddType(m, (PyTypeObject *)LimitedVectorCallClass) < 0) { return -1; } - + Py_DECREF(LimitedVectorCallClass); return 0; } diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c index 6762c611fb12a2..6def680190b1a6 100644 --- a/Modules/_testcapimodule.c +++ b/Modules/_testcapimodule.c @@ -49,7 +49,7 @@ get_testcapi_state(PyObject *module) static PyObject * get_testerror(PyObject *self) { - testcapistate_t *state = get_testcapi_state((PyObject *)Py_TYPE(self)); + testcapistate_t *state = get_testcapi_state(self); return state->error; } @@ -112,12 +112,12 @@ test_sizeof_c_types(PyObject *self, PyObject *Py_UNUSED(ignored)) return (PyObject*)NULL; \ } #define IS_SIGNED(TYPE) (((TYPE)-1) < (TYPE)0) -#define CHECK_SIGNNESS(TYPE, SIGNED) \ - if (IS_SIGNED(TYPE) != SIGNED) { \ - PyErr_Format(get_testerror(self), \ - "%s signness is, instead of %i", \ - #TYPE, IS_SIGNED(TYPE), SIGNED); \ - return (PyObject*)NULL; \ +#define CHECK_SIGNNESS(TYPE, SIGNED) \ + if (IS_SIGNED(TYPE) != SIGNED) { \ + PyErr_Format(get_testerror(self), \ + "%s signness is %i, instead of %i", \ + #TYPE, IS_SIGNED(TYPE), SIGNED); \ + return (PyObject*)NULL; \ } /* integer types */ @@ -2409,6 +2409,32 @@ type_get_version(PyObject *self, PyObject *type) return res; } +static PyObject * +type_modified(PyObject *self, PyObject *type) +{ + if (!PyType_Check(type)) { + PyErr_SetString(PyExc_TypeError, "argument must be a type"); + return NULL; + } + PyType_Modified((PyTypeObject *)type); + Py_RETURN_NONE; +} + +// Circumvents standard version assignment machinery - use with caution and only on +// short-lived heap types +static PyObject * +type_assign_specific_version_unsafe(PyObject *self, PyObject *args) +{ + PyTypeObject *type; + unsigned int version; + if (!PyArg_ParseTuple(args, "Oi:type_assign_specific_version_unsafe", &type, &version)) { + return NULL; + } + assert(!PyType_HasFeature(type, Py_TPFLAGS_IMMUTABLETYPE)); + type->tp_version_tag = version; + type->tp_flags |= Py_TPFLAGS_VALID_VERSION_TAG; + Py_RETURN_NONE; +} static PyObject * type_assign_version(PyObject *self, PyObject *type) @@ -3342,6 +3368,9 @@ static PyMethodDef TestMethods[] = { {"test_py_is_macros", test_py_is_macros, METH_NOARGS}, {"test_py_is_funcs", test_py_is_funcs, METH_NOARGS}, {"type_get_version", type_get_version, METH_O, PyDoc_STR("type->tp_version_tag")}, + {"type_modified", type_modified, METH_O, PyDoc_STR("PyType_Modified")}, + {"type_assign_specific_version_unsafe", type_assign_specific_version_unsafe, METH_VARARGS, + PyDoc_STR("forcefully assign type->tp_version_tag")}, {"type_assign_version", type_assign_version, METH_O, PyDoc_STR("PyUnstable_Type_AssignVersionTag")}, {"type_get_tp_bases", type_get_tp_bases, METH_O}, {"type_get_tp_mro", type_get_tp_mro, METH_O}, @@ -3947,7 +3976,6 @@ PyInit__testcapi(void) testcapistate_t *state = get_testcapi_state(m); state->error = PyErr_NewException("_testcapi.error", NULL, NULL); - Py_INCREF(state->error); PyModule_AddObject(m, "error", state->error); if (PyType_Ready(&ContainerNoGC_type) < 0) { diff --git a/Modules/_winapi.c b/Modules/_winapi.c index 8c48b6f3ec6ef6..26302b559817b3 100644 --- a/Modules/_winapi.c +++ b/Modules/_winapi.c @@ -532,7 +532,12 @@ _winapi_CreateJunction_impl(PyObject *module, LPCWSTR src_path, { /* Privilege adjustment */ HANDLE token = NULL; - TOKEN_PRIVILEGES tp; + struct { + TOKEN_PRIVILEGES base; + /* overallocate by a few array elements */ + LUID_AND_ATTRIBUTES privs[4]; + } tp, previousTp; + int previousTpSize = 0; /* Reparse data buffer */ const USHORT prefix_len = 4; @@ -556,17 +561,21 @@ _winapi_CreateJunction_impl(PyObject *module, LPCWSTR src_path, /* Adjust privileges to allow rewriting directory entry as a junction point. */ - if (!OpenProcessToken(GetCurrentProcess(), TOKEN_ADJUST_PRIVILEGES, &token)) + if (!OpenProcessToken(GetCurrentProcess(), + TOKEN_ADJUST_PRIVILEGES | TOKEN_QUERY, &token)) { goto cleanup; + } - if (!LookupPrivilegeValue(NULL, SE_RESTORE_NAME, &tp.Privileges[0].Luid)) + if (!LookupPrivilegeValue(NULL, SE_RESTORE_NAME, &tp.base.Privileges[0].Luid)) { goto cleanup; + } - tp.PrivilegeCount = 1; - tp.Privileges[0].Attributes = SE_PRIVILEGE_ENABLED; - if (!AdjustTokenPrivileges(token, FALSE, &tp, sizeof(TOKEN_PRIVILEGES), - NULL, NULL)) + tp.base.PrivilegeCount = 1; + tp.base.Privileges[0].Attributes = SE_PRIVILEGE_ENABLED; + if (!AdjustTokenPrivileges(token, FALSE, &tp.base, sizeof(previousTp), + &previousTp.base, &previousTpSize)) { goto cleanup; + } if (GetFileAttributesW(src_path) == INVALID_FILE_ATTRIBUTES) goto cleanup; @@ -647,6 +656,11 @@ _winapi_CreateJunction_impl(PyObject *module, LPCWSTR src_path, cleanup: ret = GetLastError(); + if (previousTpSize) { + AdjustTokenPrivileges(token, FALSE, &previousTp.base, previousTpSize, + NULL, NULL); + } + if (token != NULL) CloseHandle(token); if (junction != NULL) @@ -774,12 +788,157 @@ gethandle(PyObject* obj, const char* name) return ret; } +static PyObject * +sortenvironmentkey(PyObject *module, PyObject *item) +{ + return _winapi_LCMapStringEx_impl(NULL, LOCALE_NAME_INVARIANT, + LCMAP_UPPERCASE, item); +} + +static PyMethodDef sortenvironmentkey_def = { + "sortenvironmentkey", _PyCFunction_CAST(sortenvironmentkey), METH_O, "", +}; + +static int +sort_environment_keys(PyObject *keys) +{ + PyObject *keyfunc = PyCFunction_New(&sortenvironmentkey_def, NULL); + if (keyfunc == NULL) { + return -1; + } + PyObject *kwnames = Py_BuildValue("(s)", "key"); + if (kwnames == NULL) { + Py_DECREF(keyfunc); + return -1; + } + PyObject *args[] = { keys, keyfunc }; + PyObject *ret = PyObject_VectorcallMethod(&_Py_ID(sort), args, 1, kwnames); + Py_DECREF(keyfunc); + Py_DECREF(kwnames); + if (ret == NULL) { + return -1; + } + Py_DECREF(ret); + + return 0; +} + +static int +compare_string_ordinal(PyObject *str1, PyObject *str2, int *result) +{ + wchar_t *s1 = PyUnicode_AsWideCharString(str1, NULL); + if (s1 == NULL) { + return -1; + } + wchar_t *s2 = PyUnicode_AsWideCharString(str2, NULL); + if (s2 == NULL) { + PyMem_Free(s1); + return -1; + } + *result = CompareStringOrdinal(s1, -1, s2, -1, TRUE); + PyMem_Free(s1); + PyMem_Free(s2); + return 0; +} + +static PyObject * +dedup_environment_keys(PyObject *keys) +{ + PyObject *result = PyList_New(0); + if (result == NULL) { + return NULL; + } + + // Iterate over the pre-ordered keys, check whether the current key is equal + // to the next key (ignoring case), if different, insert the current value + // into the result list. If they are equal, do nothing because we always + // want to keep the last inserted one. + for (Py_ssize_t i = 0; i < PyList_GET_SIZE(keys); i++) { + PyObject *key = PyList_GET_ITEM(keys, i); + + // The last key will always be kept. + if (i + 1 == PyList_GET_SIZE(keys)) { + if (PyList_Append(result, key) < 0) { + Py_DECREF(result); + return NULL; + } + continue; + } + + PyObject *next_key = PyList_GET_ITEM(keys, i + 1); + int compare_result; + if (compare_string_ordinal(key, next_key, &compare_result) < 0) { + Py_DECREF(result); + return NULL; + } + if (compare_result == CSTR_EQUAL) { + continue; + } + if (PyList_Append(result, key) < 0) { + Py_DECREF(result); + return NULL; + } + } + + return result; +} + +static PyObject * +normalize_environment(PyObject *environment) +{ + PyObject *keys = PyMapping_Keys(environment); + if (keys == NULL) { + return NULL; + } + + if (sort_environment_keys(keys) < 0) { + Py_DECREF(keys); + return NULL; + } + + PyObject *normalized_keys = dedup_environment_keys(keys); + Py_DECREF(keys); + if (normalized_keys == NULL) { + return NULL; + } + + PyObject *result = PyDict_New(); + if (result == NULL) { + Py_DECREF(normalized_keys); + return NULL; + } + + for (int i = 0; i < PyList_GET_SIZE(normalized_keys); i++) { + PyObject *key = PyList_GET_ITEM(normalized_keys, i); + PyObject *value = PyObject_GetItem(environment, key); + if (value == NULL) { + Py_DECREF(normalized_keys); + Py_DECREF(result); + return NULL; + } + + int ret = PyObject_SetItem(result, key, value); + Py_DECREF(value); + if (ret < 0) { + Py_DECREF(normalized_keys); + Py_DECREF(result); + return NULL; + } + } + + Py_DECREF(normalized_keys); + + return result; +} + static wchar_t * getenvironment(PyObject* environment) { Py_ssize_t i, envsize, totalsize; wchar_t *buffer = NULL, *p, *end; - PyObject *keys, *values; + PyObject *normalized_environment = NULL; + PyObject *keys = NULL; + PyObject *values = NULL; /* convert environment dictionary to windows environment string */ if (! PyMapping_Check(environment)) { @@ -788,11 +947,16 @@ getenvironment(PyObject* environment) return NULL; } - keys = PyMapping_Keys(environment); - if (!keys) { + normalized_environment = normalize_environment(environment); + if (normalize_environment == NULL) { return NULL; } - values = PyMapping_Values(environment); + + keys = PyMapping_Keys(normalized_environment); + if (!keys) { + goto error; + } + values = PyMapping_Values(normalized_environment); if (!values) { goto error; } @@ -884,6 +1048,7 @@ getenvironment(PyObject* environment) cleanup: error: + Py_XDECREF(normalized_environment); Py_XDECREF(keys); Py_XDECREF(values); return buffer; diff --git a/Modules/_zoneinfo.c b/Modules/_zoneinfo.c index 77644c3155bc33..fcd4af64df0be9 100644 --- a/Modules/_zoneinfo.c +++ b/Modules/_zoneinfo.c @@ -853,28 +853,19 @@ load_timedelta(zoneinfo_state *state, long seconds) if (pyoffset == NULL) { return NULL; } - rv = PyDict_GetItemWithError(state->TIMEDELTA_CACHE, pyoffset); - if (rv == NULL) { - if (PyErr_Occurred()) { - goto error; - } + if (PyDict_GetItemRef(state->TIMEDELTA_CACHE, pyoffset, &rv) == 0) { PyObject *tmp = PyDateTimeAPI->Delta_FromDelta( 0, seconds, 0, 1, PyDateTimeAPI->DeltaType); - if (tmp == NULL) { - goto error; + if (tmp != NULL) { + rv = PyDict_SetDefault(state->TIMEDELTA_CACHE, pyoffset, tmp); + Py_XINCREF(rv); + Py_DECREF(tmp); } - - rv = PyDict_SetDefault(state->TIMEDELTA_CACHE, pyoffset, tmp); - Py_DECREF(tmp); } - Py_XINCREF(rv); Py_DECREF(pyoffset); return rv; -error: - Py_DECREF(pyoffset); - return NULL; } /* Constructor for _ttinfo object - this starts by initializing the _ttinfo diff --git a/Modules/clinic/_pickle.c.h b/Modules/clinic/_pickle.c.h index 932ace190e6059..fb086925e3941d 100644 --- a/Modules/clinic/_pickle.c.h +++ b/Modules/clinic/_pickle.c.h @@ -266,6 +266,49 @@ _pickle_PicklerMemoProxy___reduce__(PicklerMemoProxyObject *self, PyObject *Py_U return _pickle_PicklerMemoProxy___reduce___impl(self); } +PyDoc_STRVAR(_pickle_Unpickler_persistent_load__doc__, +"persistent_load($self, pid, /)\n" +"--\n" +"\n"); + +#define _PICKLE_UNPICKLER_PERSISTENT_LOAD_METHODDEF \ + {"persistent_load", _PyCFunction_CAST(_pickle_Unpickler_persistent_load), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _pickle_Unpickler_persistent_load__doc__}, + +static PyObject * +_pickle_Unpickler_persistent_load_impl(UnpicklerObject *self, + PyTypeObject *cls, PyObject *pid); + +static PyObject * +_pickle_Unpickler_persistent_load(UnpicklerObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + # define KWTUPLE (PyObject *)&_Py_SINGLETON(tuple_empty) + #else + # define KWTUPLE NULL + #endif + + static const char * const _keywords[] = {"", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "persistent_load", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[1]; + PyObject *pid; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); + if (!args) { + goto exit; + } + pid = args[0]; + return_value = _pickle_Unpickler_persistent_load_impl(self, cls, pid); + +exit: + return return_value; +} + PyDoc_STRVAR(_pickle_Unpickler_load__doc__, "load($self, /)\n" "--\n" @@ -1034,4 +1077,4 @@ _pickle_loads(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObjec exit: return return_value; } -/*[clinic end generated code: output=7f0564b5fb5410a8 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=ebe78653233827a6 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_suggestions.c.h b/Modules/clinic/_suggestions.c.h new file mode 100644 index 00000000000000..51484b13d5af89 --- /dev/null +++ b/Modules/clinic/_suggestions.c.h @@ -0,0 +1,41 @@ +/*[clinic input] +preserve +[clinic start generated code]*/ + +#include "pycore_modsupport.h" // _PyArg_CheckPositional() + +PyDoc_STRVAR(_suggestions__generate_suggestions__doc__, +"_generate_suggestions($module, candidates, item, /)\n" +"--\n" +"\n" +"Returns the candidate in candidates that\'s closest to item"); + +#define _SUGGESTIONS__GENERATE_SUGGESTIONS_METHODDEF \ + {"_generate_suggestions", _PyCFunction_CAST(_suggestions__generate_suggestions), METH_FASTCALL, _suggestions__generate_suggestions__doc__}, + +static PyObject * +_suggestions__generate_suggestions_impl(PyObject *module, + PyObject *candidates, PyObject *item); + +static PyObject * +_suggestions__generate_suggestions(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + PyObject *candidates; + PyObject *item; + + if (!_PyArg_CheckPositional("_generate_suggestions", nargs, 2, 2)) { + goto exit; + } + candidates = args[0]; + if (!PyUnicode_Check(args[1])) { + _PyArg_BadArgument("_generate_suggestions", "argument 2", "str", args[1]); + goto exit; + } + item = args[1]; + return_value = _suggestions__generate_suggestions_impl(module, candidates, item); + +exit: + return return_value; +} +/*[clinic end generated code: output=1d8e963cdae30b13 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/gcmodule.c.h b/Modules/clinic/gcmodule.c.h index ad4469350447cb..d50d170589a2cd 100644 --- a/Modules/clinic/gcmodule.c.h +++ b/Modules/clinic/gcmodule.c.h @@ -214,6 +214,58 @@ gc_get_debug(PyObject *module, PyObject *Py_UNUSED(ignored)) return return_value; } +PyDoc_STRVAR(gc_set_threshold__doc__, +"set_threshold(threshold0, [threshold1, [threshold2]])\n" +"Set the collection thresholds (the collection frequency).\n" +"\n" +"Setting \'threshold0\' to zero disables collection."); + +#define GC_SET_THRESHOLD_METHODDEF \ + {"set_threshold", (PyCFunction)gc_set_threshold, METH_VARARGS, gc_set_threshold__doc__}, + +static PyObject * +gc_set_threshold_impl(PyObject *module, int threshold0, int group_right_1, + int threshold1, int group_right_2, int threshold2); + +static PyObject * +gc_set_threshold(PyObject *module, PyObject *args) +{ + PyObject *return_value = NULL; + int threshold0; + int group_right_1 = 0; + int threshold1 = 0; + int group_right_2 = 0; + int threshold2 = 0; + + switch (PyTuple_GET_SIZE(args)) { + case 1: + if (!PyArg_ParseTuple(args, "i:set_threshold", &threshold0)) { + goto exit; + } + break; + case 2: + if (!PyArg_ParseTuple(args, "ii:set_threshold", &threshold0, &threshold1)) { + goto exit; + } + group_right_1 = 1; + break; + case 3: + if (!PyArg_ParseTuple(args, "iii:set_threshold", &threshold0, &threshold1, &threshold2)) { + goto exit; + } + group_right_1 = 1; + group_right_2 = 1; + break; + default: + PyErr_SetString(PyExc_TypeError, "gc.set_threshold requires 1 to 3 arguments"); + goto exit; + } + return_value = gc_set_threshold_impl(module, threshold0, group_right_1, threshold1, group_right_2, threshold2); + +exit: + return return_value; +} + PyDoc_STRVAR(gc_get_threshold__doc__, "get_threshold($module, /)\n" "--\n" @@ -250,6 +302,76 @@ gc_get_count(PyObject *module, PyObject *Py_UNUSED(ignored)) return gc_get_count_impl(module); } +PyDoc_STRVAR(gc_get_referrers__doc__, +"get_referrers($module, /, *objs)\n" +"--\n" +"\n" +"Return the list of objects that directly refer to any of \'objs\'."); + +#define GC_GET_REFERRERS_METHODDEF \ + {"get_referrers", _PyCFunction_CAST(gc_get_referrers), METH_FASTCALL, gc_get_referrers__doc__}, + +static PyObject * +gc_get_referrers_impl(PyObject *module, PyObject *args); + +static PyObject * +gc_get_referrers(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + PyObject *__clinic_args = NULL; + + if (!_PyArg_CheckPositional("get_referrers", nargs, 0, PY_SSIZE_T_MAX)) { + goto exit; + } + __clinic_args = PyTuple_New(nargs - 0); + if (!__clinic_args) { + goto exit; + } + for (Py_ssize_t i = 0; i < nargs - 0; ++i) { + PyTuple_SET_ITEM(__clinic_args, i, Py_NewRef(args[0 + i])); + } + return_value = gc_get_referrers_impl(module, __clinic_args); + +exit: + Py_XDECREF(__clinic_args); + return return_value; +} + +PyDoc_STRVAR(gc_get_referents__doc__, +"get_referents($module, /, *objs)\n" +"--\n" +"\n" +"Return the list of objects that are directly referred to by \'objs\'."); + +#define GC_GET_REFERENTS_METHODDEF \ + {"get_referents", _PyCFunction_CAST(gc_get_referents), METH_FASTCALL, gc_get_referents__doc__}, + +static PyObject * +gc_get_referents_impl(PyObject *module, PyObject *args); + +static PyObject * +gc_get_referents(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + PyObject *__clinic_args = NULL; + + if (!_PyArg_CheckPositional("get_referents", nargs, 0, PY_SSIZE_T_MAX)) { + goto exit; + } + __clinic_args = PyTuple_New(nargs - 0); + if (!__clinic_args) { + goto exit; + } + for (Py_ssize_t i = 0; i < nargs - 0; ++i) { + PyTuple_SET_ITEM(__clinic_args, i, Py_NewRef(args[0 + i])); + } + return_value = gc_get_referents_impl(module, __clinic_args); + +exit: + Py_XDECREF(__clinic_args); + return return_value; +} + PyDoc_STRVAR(gc_get_objects__doc__, "get_objects($module, /, generation=None)\n" "--\n" @@ -425,4 +547,4 @@ gc_get_freeze_count(PyObject *module, PyObject *Py_UNUSED(ignored)) exit: return return_value; } -/*[clinic end generated code: output=5c345e7b4ce6085a input=a9049054013a1b77]*/ +/*[clinic end generated code: output=258f92524c1141fc input=a9049054013a1b77]*/ diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c index 2d1f381e622226..ffddef34ecce7a 100644 --- a/Modules/gcmodule.c +++ b/Modules/gcmodule.c @@ -1,1530 +1,27 @@ /* + * Python interface to the garbage collector. + * + * See Python/gc.c for the implementation of the garbage collector. + */ - Reference Cycle Garbage Collection - ================================== - - Neil Schemenauer - - Based on a post on the python-dev list. Ideas from Guido van Rossum, - Eric Tiedemann, and various others. - - http://www.arctrix.com/nas/python/gc/ - - The following mailing list threads provide a historical perspective on - the design of this module. Note that a fair amount of refinement has - occurred since those discussions. - - http://mail.python.org/pipermail/python-dev/2000-March/002385.html - http://mail.python.org/pipermail/python-dev/2000-March/002434.html - http://mail.python.org/pipermail/python-dev/2000-March/002497.html - - For a highlevel view of the collection process, read the collect - function. - -*/ - -#include "Python.h" -#include "pycore_ceval.h" // _Py_set_eval_breaker_bit() -#include "pycore_context.h" -#include "pycore_dict.h" // _PyDict_MaybeUntrack() -#include "pycore_initconfig.h" -#include "pycore_interp.h" // PyInterpreterState.gc -#include "pycore_object.h" -#include "pycore_pyerrors.h" -#include "pycore_pystate.h" // _PyThreadState_GET() -#include "pycore_weakref.h" // _PyWeakref_ClearRef() -#include "pydtrace.h" - -typedef struct _gc_runtime_state GCState; - -/*[clinic input] -module gc -[clinic start generated code]*/ -/*[clinic end generated code: output=da39a3ee5e6b4b0d input=b5c9690ecc842d79]*/ - - -#ifdef Py_DEBUG -# define GC_DEBUG -#endif - -#define GC_NEXT _PyGCHead_NEXT -#define GC_PREV _PyGCHead_PREV - -// update_refs() set this bit for all objects in current generation. -// subtract_refs() and move_unreachable() uses this to distinguish -// visited object is in GCing or not. -// -// move_unreachable() removes this flag from reachable objects. -// Only unreachable objects have this flag. -// -// No objects in interpreter have this flag after GC ends. -#define PREV_MASK_COLLECTING _PyGC_PREV_MASK_COLLECTING - -// Lowest bit of _gc_next is used for UNREACHABLE flag. -// -// This flag represents the object is in unreachable list in move_unreachable() -// -// Although this flag is used only in move_unreachable(), move_unreachable() -// doesn't clear this flag to skip unnecessary iteration. -// move_legacy_finalizers() removes this flag instead. -// Between them, unreachable list is not normal list and we can not use -// most gc_list_* functions for it. -#define NEXT_MASK_UNREACHABLE (1) - -#define AS_GC(op) _Py_AS_GC(op) -#define FROM_GC(gc) _Py_FROM_GC(gc) - -// Automatically choose the generation that needs collecting. -#define GENERATION_AUTO (-1) - -typedef enum { - // GC was triggered by heap allocation - _Py_GC_REASON_HEAP, - - // GC was called during shutdown - _Py_GC_REASON_SHUTDOWN, - - // GC was called by gc.collect() or PyGC_Collect() - _Py_GC_REASON_MANUAL -} _PyGC_Reason; - - -static inline int -gc_is_collecting(PyGC_Head *g) -{ - return (g->_gc_prev & PREV_MASK_COLLECTING) != 0; -} - -static inline void -gc_clear_collecting(PyGC_Head *g) -{ - g->_gc_prev &= ~PREV_MASK_COLLECTING; -} - -static inline Py_ssize_t -gc_get_refs(PyGC_Head *g) -{ - return (Py_ssize_t)(g->_gc_prev >> _PyGC_PREV_SHIFT); -} - -static inline void -gc_set_refs(PyGC_Head *g, Py_ssize_t refs) -{ - g->_gc_prev = (g->_gc_prev & ~_PyGC_PREV_MASK) - | ((uintptr_t)(refs) << _PyGC_PREV_SHIFT); -} - -static inline void -gc_reset_refs(PyGC_Head *g, Py_ssize_t refs) -{ - g->_gc_prev = (g->_gc_prev & _PyGC_PREV_MASK_FINALIZED) - | PREV_MASK_COLLECTING - | ((uintptr_t)(refs) << _PyGC_PREV_SHIFT); -} - -static inline void -gc_decref(PyGC_Head *g) -{ - _PyObject_ASSERT_WITH_MSG(FROM_GC(g), - gc_get_refs(g) > 0, - "refcount is too small"); - g->_gc_prev -= 1 << _PyGC_PREV_SHIFT; -} - -/* set for debugging information */ -#define DEBUG_STATS (1<<0) /* print collection statistics */ -#define DEBUG_COLLECTABLE (1<<1) /* print collectable objects */ -#define DEBUG_UNCOLLECTABLE (1<<2) /* print uncollectable objects */ -#define DEBUG_SAVEALL (1<<5) /* save all garbage in gc.garbage */ -#define DEBUG_LEAK DEBUG_COLLECTABLE | \ - DEBUG_UNCOLLECTABLE | \ - DEBUG_SAVEALL - -#define GEN_HEAD(gcstate, n) (&(gcstate)->generations[n].head) - - -static GCState * -get_gc_state(void) -{ - PyInterpreterState *interp = _PyInterpreterState_GET(); - return &interp->gc; -} - - -void -_PyGC_InitState(GCState *gcstate) -{ -#define INIT_HEAD(GEN) \ - do { \ - GEN.head._gc_next = (uintptr_t)&GEN.head; \ - GEN.head._gc_prev = (uintptr_t)&GEN.head; \ - } while (0) - - for (int i = 0; i < NUM_GENERATIONS; i++) { - assert(gcstate->generations[i].count == 0); - INIT_HEAD(gcstate->generations[i]); - }; - gcstate->generation0 = GEN_HEAD(gcstate, 0); - INIT_HEAD(gcstate->permanent_generation); - -#undef INIT_HEAD -} - - -PyStatus -_PyGC_Init(PyInterpreterState *interp) -{ - GCState *gcstate = &interp->gc; - - gcstate->garbage = PyList_New(0); - if (gcstate->garbage == NULL) { - return _PyStatus_NO_MEMORY(); - } - - gcstate->callbacks = PyList_New(0); - if (gcstate->callbacks == NULL) { - return _PyStatus_NO_MEMORY(); - } - - return _PyStatus_OK(); -} - - -/* -_gc_prev values ---------------- - -Between collections, _gc_prev is used for doubly linked list. - -Lowest two bits of _gc_prev are used for flags. -PREV_MASK_COLLECTING is used only while collecting and cleared before GC ends -or _PyObject_GC_UNTRACK() is called. - -During a collection, _gc_prev is temporary used for gc_refs, and the gc list -is singly linked until _gc_prev is restored. - -gc_refs - At the start of a collection, update_refs() copies the true refcount - to gc_refs, for each object in the generation being collected. - subtract_refs() then adjusts gc_refs so that it equals the number of - times an object is referenced directly from outside the generation - being collected. - -PREV_MASK_COLLECTING - Objects in generation being collected are marked PREV_MASK_COLLECTING in - update_refs(). - - -_gc_next values ---------------- - -_gc_next takes these values: - -0 - The object is not tracked - -!= 0 - Pointer to the next object in the GC list. - Additionally, lowest bit is used temporary for - NEXT_MASK_UNREACHABLE flag described below. - -NEXT_MASK_UNREACHABLE - move_unreachable() then moves objects not reachable (whether directly or - indirectly) from outside the generation into an "unreachable" set and - set this flag. - - Objects that are found to be reachable have gc_refs set to 1. - When this flag is set for the reachable object, the object must be in - "unreachable" set. - The flag is unset and the object is moved back to "reachable" set. - - move_legacy_finalizers() will remove this flag from "unreachable" set. -*/ - -/*** list functions ***/ - -static inline void -gc_list_init(PyGC_Head *list) -{ - // List header must not have flags. - // We can assign pointer by simple cast. - list->_gc_prev = (uintptr_t)list; - list->_gc_next = (uintptr_t)list; -} - -static inline int -gc_list_is_empty(PyGC_Head *list) -{ - return (list->_gc_next == (uintptr_t)list); -} - -/* Append `node` to `list`. */ -static inline void -gc_list_append(PyGC_Head *node, PyGC_Head *list) -{ - PyGC_Head *last = (PyGC_Head *)list->_gc_prev; - - // last <-> node - _PyGCHead_SET_PREV(node, last); - _PyGCHead_SET_NEXT(last, node); - - // node <-> list - _PyGCHead_SET_NEXT(node, list); - list->_gc_prev = (uintptr_t)node; -} - -/* Remove `node` from the gc list it's currently in. */ -static inline void -gc_list_remove(PyGC_Head *node) -{ - PyGC_Head *prev = GC_PREV(node); - PyGC_Head *next = GC_NEXT(node); - - _PyGCHead_SET_NEXT(prev, next); - _PyGCHead_SET_PREV(next, prev); - - node->_gc_next = 0; /* object is not currently tracked */ -} - -/* Move `node` from the gc list it's currently in (which is not explicitly - * named here) to the end of `list`. This is semantically the same as - * gc_list_remove(node) followed by gc_list_append(node, list). - */ -static void -gc_list_move(PyGC_Head *node, PyGC_Head *list) -{ - /* Unlink from current list. */ - PyGC_Head *from_prev = GC_PREV(node); - PyGC_Head *from_next = GC_NEXT(node); - _PyGCHead_SET_NEXT(from_prev, from_next); - _PyGCHead_SET_PREV(from_next, from_prev); - - /* Relink at end of new list. */ - // list must not have flags. So we can skip macros. - PyGC_Head *to_prev = (PyGC_Head*)list->_gc_prev; - _PyGCHead_SET_PREV(node, to_prev); - _PyGCHead_SET_NEXT(to_prev, node); - list->_gc_prev = (uintptr_t)node; - _PyGCHead_SET_NEXT(node, list); -} - -/* append list `from` onto list `to`; `from` becomes an empty list */ -static void -gc_list_merge(PyGC_Head *from, PyGC_Head *to) -{ - assert(from != to); - if (!gc_list_is_empty(from)) { - PyGC_Head *to_tail = GC_PREV(to); - PyGC_Head *from_head = GC_NEXT(from); - PyGC_Head *from_tail = GC_PREV(from); - assert(from_head != from); - assert(from_tail != from); - - _PyGCHead_SET_NEXT(to_tail, from_head); - _PyGCHead_SET_PREV(from_head, to_tail); - - _PyGCHead_SET_NEXT(from_tail, to); - _PyGCHead_SET_PREV(to, from_tail); - } - gc_list_init(from); -} - -static Py_ssize_t -gc_list_size(PyGC_Head *list) -{ - PyGC_Head *gc; - Py_ssize_t n = 0; - for (gc = GC_NEXT(list); gc != list; gc = GC_NEXT(gc)) { - n++; - } - return n; -} - -/* Walk the list and mark all objects as non-collecting */ -static inline void -gc_list_clear_collecting(PyGC_Head *collectable) -{ - PyGC_Head *gc; - for (gc = GC_NEXT(collectable); gc != collectable; gc = GC_NEXT(gc)) { - gc_clear_collecting(gc); - } -} - -/* Append objects in a GC list to a Python list. - * Return 0 if all OK, < 0 if error (out of memory for list) - */ -static int -append_objects(PyObject *py_list, PyGC_Head *gc_list) -{ - PyGC_Head *gc; - for (gc = GC_NEXT(gc_list); gc != gc_list; gc = GC_NEXT(gc)) { - PyObject *op = FROM_GC(gc); - if (op != py_list) { - if (PyList_Append(py_list, op)) { - return -1; /* exception */ - } - } - } - return 0; -} - -// Constants for validate_list's flags argument. -enum flagstates {collecting_clear_unreachable_clear, - collecting_clear_unreachable_set, - collecting_set_unreachable_clear, - collecting_set_unreachable_set}; - -#ifdef GC_DEBUG -// validate_list checks list consistency. And it works as document -// describing when flags are expected to be set / unset. -// `head` must be a doubly-linked gc list, although it's fine (expected!) if -// the prev and next pointers are "polluted" with flags. -// What's checked: -// - The `head` pointers are not polluted. -// - The objects' PREV_MASK_COLLECTING and NEXT_MASK_UNREACHABLE flags are all -// `set or clear, as specified by the 'flags' argument. -// - The prev and next pointers are mutually consistent. -static void -validate_list(PyGC_Head *head, enum flagstates flags) -{ - assert((head->_gc_prev & PREV_MASK_COLLECTING) == 0); - assert((head->_gc_next & NEXT_MASK_UNREACHABLE) == 0); - uintptr_t prev_value = 0, next_value = 0; - switch (flags) { - case collecting_clear_unreachable_clear: - break; - case collecting_set_unreachable_clear: - prev_value = PREV_MASK_COLLECTING; - break; - case collecting_clear_unreachable_set: - next_value = NEXT_MASK_UNREACHABLE; - break; - case collecting_set_unreachable_set: - prev_value = PREV_MASK_COLLECTING; - next_value = NEXT_MASK_UNREACHABLE; - break; - default: - assert(! "bad internal flags argument"); - } - PyGC_Head *prev = head; - PyGC_Head *gc = GC_NEXT(head); - while (gc != head) { - PyGC_Head *trueprev = GC_PREV(gc); - PyGC_Head *truenext = (PyGC_Head *)(gc->_gc_next & ~NEXT_MASK_UNREACHABLE); - assert(truenext != NULL); - assert(trueprev == prev); - assert((gc->_gc_prev & PREV_MASK_COLLECTING) == prev_value); - assert((gc->_gc_next & NEXT_MASK_UNREACHABLE) == next_value); - prev = gc; - gc = truenext; - } - assert(prev == GC_PREV(head)); -} -#else -#define validate_list(x, y) do{}while(0) -#endif - -/*** end of list stuff ***/ - - -/* Set all gc_refs = ob_refcnt. After this, gc_refs is > 0 and - * PREV_MASK_COLLECTING bit is set for all objects in containers. - */ -static void -update_refs(PyGC_Head *containers) -{ - PyGC_Head *next; - PyGC_Head *gc = GC_NEXT(containers); - - while (gc != containers) { - next = GC_NEXT(gc); - /* Move any object that might have become immortal to the - * permanent generation as the reference count is not accurately - * reflecting the actual number of live references to this object - */ - if (_Py_IsImmortal(FROM_GC(gc))) { - gc_list_move(gc, &get_gc_state()->permanent_generation.head); - gc = next; - continue; - } - gc_reset_refs(gc, Py_REFCNT(FROM_GC(gc))); - /* Python's cyclic gc should never see an incoming refcount - * of 0: if something decref'ed to 0, it should have been - * deallocated immediately at that time. - * Possible cause (if the assert triggers): a tp_dealloc - * routine left a gc-aware object tracked during its teardown - * phase, and did something-- or allowed something to happen -- - * that called back into Python. gc can trigger then, and may - * see the still-tracked dying object. Before this assert - * was added, such mistakes went on to allow gc to try to - * delete the object again. In a debug build, that caused - * a mysterious segfault, when _Py_ForgetReference tried - * to remove the object from the doubly-linked list of all - * objects a second time. In a release build, an actual - * double deallocation occurred, which leads to corruption - * of the allocator's internal bookkeeping pointers. That's - * so serious that maybe this should be a release-build - * check instead of an assert? - */ - _PyObject_ASSERT(FROM_GC(gc), gc_get_refs(gc) != 0); - gc = next; - } -} - -/* A traversal callback for subtract_refs. */ -static int -visit_decref(PyObject *op, void *parent) -{ - OBJECT_STAT_INC(object_visits); - _PyObject_ASSERT(_PyObject_CAST(parent), !_PyObject_IsFreed(op)); - - if (_PyObject_IS_GC(op)) { - PyGC_Head *gc = AS_GC(op); - /* We're only interested in gc_refs for objects in the - * generation being collected, which can be recognized - * because only they have positive gc_refs. - */ - if (gc_is_collecting(gc)) { - gc_decref(gc); - } - } - return 0; -} - -/* Subtract internal references from gc_refs. After this, gc_refs is >= 0 - * for all objects in containers, and is GC_REACHABLE for all tracked gc - * objects not in containers. The ones with gc_refs > 0 are directly - * reachable from outside containers, and so can't be collected. - */ -static void -subtract_refs(PyGC_Head *containers) -{ - traverseproc traverse; - PyGC_Head *gc = GC_NEXT(containers); - for (; gc != containers; gc = GC_NEXT(gc)) { - PyObject *op = FROM_GC(gc); - traverse = Py_TYPE(op)->tp_traverse; - (void) traverse(op, - visit_decref, - op); - } -} - -/* A traversal callback for move_unreachable. */ -static int -visit_reachable(PyObject *op, void *arg) -{ - PyGC_Head *reachable = arg; - OBJECT_STAT_INC(object_visits); - if (!_PyObject_IS_GC(op)) { - return 0; - } - - PyGC_Head *gc = AS_GC(op); - const Py_ssize_t gc_refs = gc_get_refs(gc); - - // Ignore objects in other generation. - // This also skips objects "to the left" of the current position in - // move_unreachable's scan of the 'young' list - they've already been - // traversed, and no longer have the PREV_MASK_COLLECTING flag. - if (! gc_is_collecting(gc)) { - return 0; - } - // It would be a logic error elsewhere if the collecting flag were set on - // an untracked object. - assert(gc->_gc_next != 0); - - if (gc->_gc_next & NEXT_MASK_UNREACHABLE) { - /* This had gc_refs = 0 when move_unreachable got - * to it, but turns out it's reachable after all. - * Move it back to move_unreachable's 'young' list, - * and move_unreachable will eventually get to it - * again. - */ - // Manually unlink gc from unreachable list because the list functions - // don't work right in the presence of NEXT_MASK_UNREACHABLE flags. - PyGC_Head *prev = GC_PREV(gc); - PyGC_Head *next = (PyGC_Head*)(gc->_gc_next & ~NEXT_MASK_UNREACHABLE); - _PyObject_ASSERT(FROM_GC(prev), - prev->_gc_next & NEXT_MASK_UNREACHABLE); - _PyObject_ASSERT(FROM_GC(next), - next->_gc_next & NEXT_MASK_UNREACHABLE); - prev->_gc_next = gc->_gc_next; // copy NEXT_MASK_UNREACHABLE - _PyGCHead_SET_PREV(next, prev); - - gc_list_append(gc, reachable); - gc_set_refs(gc, 1); - } - else if (gc_refs == 0) { - /* This is in move_unreachable's 'young' list, but - * the traversal hasn't yet gotten to it. All - * we need to do is tell move_unreachable that it's - * reachable. - */ - gc_set_refs(gc, 1); - } - /* Else there's nothing to do. - * If gc_refs > 0, it must be in move_unreachable's 'young' - * list, and move_unreachable will eventually get to it. - */ - else { - _PyObject_ASSERT_WITH_MSG(op, gc_refs > 0, "refcount is too small"); - } - return 0; -} - -/* Move the unreachable objects from young to unreachable. After this, - * all objects in young don't have PREV_MASK_COLLECTING flag and - * unreachable have the flag. - * All objects in young after this are directly or indirectly reachable - * from outside the original young; and all objects in unreachable are - * not. - * - * This function restores _gc_prev pointer. young and unreachable are - * doubly linked list after this function. - * But _gc_next in unreachable list has NEXT_MASK_UNREACHABLE flag. - * So we can not gc_list_* functions for unreachable until we remove the flag. - */ -static void -move_unreachable(PyGC_Head *young, PyGC_Head *unreachable) -{ - // previous elem in the young list, used for restore gc_prev. - PyGC_Head *prev = young; - PyGC_Head *gc = GC_NEXT(young); - - /* Invariants: all objects "to the left" of us in young are reachable - * (directly or indirectly) from outside the young list as it was at entry. - * - * All other objects from the original young "to the left" of us are in - * unreachable now, and have NEXT_MASK_UNREACHABLE. All objects to the - * left of us in 'young' now have been scanned, and no objects here - * or to the right have been scanned yet. - */ - - while (gc != young) { - if (gc_get_refs(gc)) { - /* gc is definitely reachable from outside the - * original 'young'. Mark it as such, and traverse - * its pointers to find any other objects that may - * be directly reachable from it. Note that the - * call to tp_traverse may append objects to young, - * so we have to wait until it returns to determine - * the next object to visit. - */ - PyObject *op = FROM_GC(gc); - traverseproc traverse = Py_TYPE(op)->tp_traverse; - _PyObject_ASSERT_WITH_MSG(op, gc_get_refs(gc) > 0, - "refcount is too small"); - // NOTE: visit_reachable may change gc->_gc_next when - // young->_gc_prev == gc. Don't do gc = GC_NEXT(gc) before! - (void) traverse(op, - visit_reachable, - (void *)young); - // relink gc_prev to prev element. - _PyGCHead_SET_PREV(gc, prev); - // gc is not COLLECTING state after here. - gc_clear_collecting(gc); - prev = gc; - } - else { - /* This *may* be unreachable. To make progress, - * assume it is. gc isn't directly reachable from - * any object we've already traversed, but may be - * reachable from an object we haven't gotten to yet. - * visit_reachable will eventually move gc back into - * young if that's so, and we'll see it again. - */ - // Move gc to unreachable. - // No need to gc->next->prev = prev because it is single linked. - prev->_gc_next = gc->_gc_next; - - // We can't use gc_list_append() here because we use - // NEXT_MASK_UNREACHABLE here. - PyGC_Head *last = GC_PREV(unreachable); - // NOTE: Since all objects in unreachable set has - // NEXT_MASK_UNREACHABLE flag, we set it unconditionally. - // But this may pollute the unreachable list head's 'next' pointer - // too. That's semantically senseless but expedient here - the - // damage is repaired when this function ends. - last->_gc_next = (NEXT_MASK_UNREACHABLE | (uintptr_t)gc); - _PyGCHead_SET_PREV(gc, last); - gc->_gc_next = (NEXT_MASK_UNREACHABLE | (uintptr_t)unreachable); - unreachable->_gc_prev = (uintptr_t)gc; - } - gc = (PyGC_Head*)prev->_gc_next; - } - // young->_gc_prev must be last element remained in the list. - young->_gc_prev = (uintptr_t)prev; - // don't let the pollution of the list head's next pointer leak - unreachable->_gc_next &= ~NEXT_MASK_UNREACHABLE; -} - -static void -untrack_tuples(PyGC_Head *head) -{ - PyGC_Head *next, *gc = GC_NEXT(head); - while (gc != head) { - PyObject *op = FROM_GC(gc); - next = GC_NEXT(gc); - if (PyTuple_CheckExact(op)) { - _PyTuple_MaybeUntrack(op); - } - gc = next; - } -} - -/* Try to untrack all currently tracked dictionaries */ -static void -untrack_dicts(PyGC_Head *head) -{ - PyGC_Head *next, *gc = GC_NEXT(head); - while (gc != head) { - PyObject *op = FROM_GC(gc); - next = GC_NEXT(gc); - if (PyDict_CheckExact(op)) { - _PyDict_MaybeUntrack(op); - } - gc = next; - } -} - -/* Return true if object has a pre-PEP 442 finalization method. */ -static int -has_legacy_finalizer(PyObject *op) -{ - return Py_TYPE(op)->tp_del != NULL; -} - -/* Move the objects in unreachable with tp_del slots into `finalizers`. - * - * This function also removes NEXT_MASK_UNREACHABLE flag - * from _gc_next in unreachable. - */ -static void -move_legacy_finalizers(PyGC_Head *unreachable, PyGC_Head *finalizers) -{ - PyGC_Head *gc, *next; - assert((unreachable->_gc_next & NEXT_MASK_UNREACHABLE) == 0); - - /* March over unreachable. Move objects with finalizers into - * `finalizers`. - */ - for (gc = GC_NEXT(unreachable); gc != unreachable; gc = next) { - PyObject *op = FROM_GC(gc); - - _PyObject_ASSERT(op, gc->_gc_next & NEXT_MASK_UNREACHABLE); - gc->_gc_next &= ~NEXT_MASK_UNREACHABLE; - next = (PyGC_Head*)gc->_gc_next; - - if (has_legacy_finalizer(op)) { - gc_clear_collecting(gc); - gc_list_move(gc, finalizers); - } - } -} - -static inline void -clear_unreachable_mask(PyGC_Head *unreachable) -{ - /* Check that the list head does not have the unreachable bit set */ - assert(((uintptr_t)unreachable & NEXT_MASK_UNREACHABLE) == 0); - - PyGC_Head *gc, *next; - assert((unreachable->_gc_next & NEXT_MASK_UNREACHABLE) == 0); - for (gc = GC_NEXT(unreachable); gc != unreachable; gc = next) { - _PyObject_ASSERT((PyObject*)FROM_GC(gc), gc->_gc_next & NEXT_MASK_UNREACHABLE); - gc->_gc_next &= ~NEXT_MASK_UNREACHABLE; - next = (PyGC_Head*)gc->_gc_next; - } - validate_list(unreachable, collecting_set_unreachable_clear); -} - -/* A traversal callback for move_legacy_finalizer_reachable. */ -static int -visit_move(PyObject *op, void *arg) -{ - PyGC_Head *tolist = arg; - OBJECT_STAT_INC(object_visits); - if (_PyObject_IS_GC(op)) { - PyGC_Head *gc = AS_GC(op); - if (gc_is_collecting(gc)) { - gc_list_move(gc, tolist); - gc_clear_collecting(gc); - } - } - return 0; -} - -/* Move objects that are reachable from finalizers, from the unreachable set - * into finalizers set. - */ -static void -move_legacy_finalizer_reachable(PyGC_Head *finalizers) -{ - traverseproc traverse; - PyGC_Head *gc = GC_NEXT(finalizers); - for (; gc != finalizers; gc = GC_NEXT(gc)) { - /* Note that the finalizers list may grow during this. */ - traverse = Py_TYPE(FROM_GC(gc))->tp_traverse; - (void) traverse(FROM_GC(gc), - visit_move, - (void *)finalizers); - } -} - -/* Clear all weakrefs to unreachable objects, and if such a weakref has a - * callback, invoke it if necessary. Note that it's possible for such - * weakrefs to be outside the unreachable set -- indeed, those are precisely - * the weakrefs whose callbacks must be invoked. See gc_weakref.txt for - * overview & some details. Some weakrefs with callbacks may be reclaimed - * directly by this routine; the number reclaimed is the return value. Other - * weakrefs with callbacks may be moved into the `old` generation. Objects - * moved into `old` have gc_refs set to GC_REACHABLE; the objects remaining in - * unreachable are left at GC_TENTATIVELY_UNREACHABLE. When this returns, - * no object in `unreachable` is weakly referenced anymore. - */ -static int -handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old) -{ - PyGC_Head *gc; - PyObject *op; /* generally FROM_GC(gc) */ - PyWeakReference *wr; /* generally a cast of op */ - PyGC_Head wrcb_to_call; /* weakrefs with callbacks to call */ - PyGC_Head *next; - int num_freed = 0; - - gc_list_init(&wrcb_to_call); - - /* Clear all weakrefs to the objects in unreachable. If such a weakref - * also has a callback, move it into `wrcb_to_call` if the callback - * needs to be invoked. Note that we cannot invoke any callbacks until - * all weakrefs to unreachable objects are cleared, lest the callback - * resurrect an unreachable object via a still-active weakref. We - * make another pass over wrcb_to_call, invoking callbacks, after this - * pass completes. - */ - for (gc = GC_NEXT(unreachable); gc != unreachable; gc = next) { - PyWeakReference **wrlist; - - op = FROM_GC(gc); - next = GC_NEXT(gc); - - if (PyWeakref_Check(op)) { - /* A weakref inside the unreachable set must be cleared. If we - * allow its callback to execute inside delete_garbage(), it - * could expose objects that have tp_clear already called on - * them. Or, it could resurrect unreachable objects. One way - * this can happen is if some container objects do not implement - * tp_traverse. Then, wr_object can be outside the unreachable - * set but can be deallocated as a result of breaking the - * reference cycle. If we don't clear the weakref, the callback - * will run and potentially cause a crash. See bpo-38006 for - * one example. - */ - _PyWeakref_ClearRef((PyWeakReference *)op); - } - - if (! _PyType_SUPPORTS_WEAKREFS(Py_TYPE(op))) - continue; - - /* It supports weakrefs. Does it have any? - * - * This is never triggered for static types so we can avoid the - * (slightly) more costly _PyObject_GET_WEAKREFS_LISTPTR(). - */ - wrlist = _PyObject_GET_WEAKREFS_LISTPTR_FROM_OFFSET(op); - - /* `op` may have some weakrefs. March over the list, clear - * all the weakrefs, and move the weakrefs with callbacks - * that must be called into wrcb_to_call. - */ - for (wr = *wrlist; wr != NULL; wr = *wrlist) { - PyGC_Head *wrasgc; /* AS_GC(wr) */ - - /* _PyWeakref_ClearRef clears the weakref but leaves - * the callback pointer intact. Obscure: it also - * changes *wrlist. - */ - _PyObject_ASSERT((PyObject *)wr, wr->wr_object == op); - _PyWeakref_ClearRef(wr); - _PyObject_ASSERT((PyObject *)wr, wr->wr_object == Py_None); - if (wr->wr_callback == NULL) { - /* no callback */ - continue; - } - - /* Headache time. `op` is going away, and is weakly referenced by - * `wr`, which has a callback. Should the callback be invoked? If wr - * is also trash, no: - * - * 1. There's no need to call it. The object and the weakref are - * both going away, so it's legitimate to pretend the weakref is - * going away first. The user has to ensure a weakref outlives its - * referent if they want a guarantee that the wr callback will get - * invoked. - * - * 2. It may be catastrophic to call it. If the callback is also in - * cyclic trash (CT), then although the CT is unreachable from - * outside the current generation, CT may be reachable from the - * callback. Then the callback could resurrect insane objects. - * - * Since the callback is never needed and may be unsafe in this case, - * wr is simply left in the unreachable set. Note that because we - * already called _PyWeakref_ClearRef(wr), its callback will never - * trigger. - * - * OTOH, if wr isn't part of CT, we should invoke the callback: the - * weakref outlived the trash. Note that since wr isn't CT in this - * case, its callback can't be CT either -- wr acted as an external - * root to this generation, and therefore its callback did too. So - * nothing in CT is reachable from the callback either, so it's hard - * to imagine how calling it later could create a problem for us. wr - * is moved to wrcb_to_call in this case. - */ - if (gc_is_collecting(AS_GC((PyObject *)wr))) { - /* it should already have been cleared above */ - assert(wr->wr_object == Py_None); - continue; - } - - /* Create a new reference so that wr can't go away - * before we can process it again. - */ - Py_INCREF(wr); - - /* Move wr to wrcb_to_call, for the next pass. */ - wrasgc = AS_GC((PyObject *)wr); - assert(wrasgc != next); /* wrasgc is reachable, but - next isn't, so they can't - be the same */ - gc_list_move(wrasgc, &wrcb_to_call); - } - } - - /* Invoke the callbacks we decided to honor. It's safe to invoke them - * because they can't reference unreachable objects. - */ - while (! gc_list_is_empty(&wrcb_to_call)) { - PyObject *temp; - PyObject *callback; - - gc = (PyGC_Head*)wrcb_to_call._gc_next; - op = FROM_GC(gc); - _PyObject_ASSERT(op, PyWeakref_Check(op)); - wr = (PyWeakReference *)op; - callback = wr->wr_callback; - _PyObject_ASSERT(op, callback != NULL); - - /* copy-paste of weakrefobject.c's handle_callback() */ - temp = PyObject_CallOneArg(callback, (PyObject *)wr); - if (temp == NULL) - PyErr_WriteUnraisable(callback); - else - Py_DECREF(temp); - - /* Give up the reference we created in the first pass. When - * op's refcount hits 0 (which it may or may not do right now), - * op's tp_dealloc will decref op->wr_callback too. Note - * that the refcount probably will hit 0 now, and because this - * weakref was reachable to begin with, gc didn't already - * add it to its count of freed objects. Example: a reachable - * weak value dict maps some key to this reachable weakref. - * The callback removes this key->weakref mapping from the - * dict, leaving no other references to the weakref (excepting - * ours). - */ - Py_DECREF(op); - if (wrcb_to_call._gc_next == (uintptr_t)gc) { - /* object is still alive -- move it */ - gc_list_move(gc, old); - } - else { - ++num_freed; - } - } - - return num_freed; -} - -static void -debug_cycle(const char *msg, PyObject *op) -{ - PySys_FormatStderr("gc: %s <%s %p>\n", - msg, Py_TYPE(op)->tp_name, op); -} - -/* Handle uncollectable garbage (cycles with tp_del slots, and stuff reachable - * only from such cycles). - * If DEBUG_SAVEALL, all objects in finalizers are appended to the module - * garbage list (a Python list), else only the objects in finalizers with - * __del__ methods are appended to garbage. All objects in finalizers are - * merged into the old list regardless. - */ -static void -handle_legacy_finalizers(PyThreadState *tstate, - GCState *gcstate, - PyGC_Head *finalizers, PyGC_Head *old) -{ - assert(!_PyErr_Occurred(tstate)); - assert(gcstate->garbage != NULL); - - PyGC_Head *gc = GC_NEXT(finalizers); - for (; gc != finalizers; gc = GC_NEXT(gc)) { - PyObject *op = FROM_GC(gc); - - if ((gcstate->debug & DEBUG_SAVEALL) || has_legacy_finalizer(op)) { - if (PyList_Append(gcstate->garbage, op) < 0) { - _PyErr_Clear(tstate); - break; - } - } - } - - gc_list_merge(finalizers, old); -} - -/* Run first-time finalizers (if any) on all the objects in collectable. - * Note that this may remove some (or even all) of the objects from the - * list, due to refcounts falling to 0. - */ -static void -finalize_garbage(PyThreadState *tstate, PyGC_Head *collectable) -{ - destructor finalize; - PyGC_Head seen; - - /* While we're going through the loop, `finalize(op)` may cause op, or - * other objects, to be reclaimed via refcounts falling to zero. So - * there's little we can rely on about the structure of the input - * `collectable` list across iterations. For safety, we always take the - * first object in that list and move it to a temporary `seen` list. - * If objects vanish from the `collectable` and `seen` lists we don't - * care. - */ - gc_list_init(&seen); - - while (!gc_list_is_empty(collectable)) { - PyGC_Head *gc = GC_NEXT(collectable); - PyObject *op = FROM_GC(gc); - gc_list_move(gc, &seen); - if (!_PyGCHead_FINALIZED(gc) && - (finalize = Py_TYPE(op)->tp_finalize) != NULL) { - _PyGCHead_SET_FINALIZED(gc); - Py_INCREF(op); - finalize(op); - assert(!_PyErr_Occurred(tstate)); - Py_DECREF(op); - } - } - gc_list_merge(&seen, collectable); -} - -/* Break reference cycles by clearing the containers involved. This is - * tricky business as the lists can be changing and we don't know which - * objects may be freed. It is possible I screwed something up here. - */ -static void -delete_garbage(PyThreadState *tstate, GCState *gcstate, - PyGC_Head *collectable, PyGC_Head *old) -{ - assert(!_PyErr_Occurred(tstate)); - - while (!gc_list_is_empty(collectable)) { - PyGC_Head *gc = GC_NEXT(collectable); - PyObject *op = FROM_GC(gc); - - _PyObject_ASSERT_WITH_MSG(op, Py_REFCNT(op) > 0, - "refcount is too small"); - - if (gcstate->debug & DEBUG_SAVEALL) { - assert(gcstate->garbage != NULL); - if (PyList_Append(gcstate->garbage, op) < 0) { - _PyErr_Clear(tstate); - } - } - else { - inquiry clear; - if ((clear = Py_TYPE(op)->tp_clear) != NULL) { - Py_INCREF(op); - (void) clear(op); - if (_PyErr_Occurred(tstate)) { - PyErr_FormatUnraisable("Exception ignored in tp_clear of %s", - Py_TYPE(op)->tp_name); - } - Py_DECREF(op); - } - } - if (GC_NEXT(collectable) == gc) { - /* object is still alive, move it, it may die later */ - gc_clear_collecting(gc); - gc_list_move(gc, old); - } - } -} - -/* Clear all free lists - * All free lists are cleared during the collection of the highest generation. - * Allocated items in the free list may keep a pymalloc arena occupied. - * Clearing the free lists may give back memory to the OS earlier. - */ -static void -clear_freelists(PyInterpreterState *interp) -{ - _PyTuple_ClearFreeList(interp); - _PyFloat_ClearFreeList(interp); - _PyList_ClearFreeList(interp); - _PyDict_ClearFreeList(interp); - _PyAsyncGen_ClearFreeLists(interp); - _PyContext_ClearFreeList(interp); -} - -// Show stats for objects in each generations -static void -show_stats_each_generations(GCState *gcstate) -{ - char buf[100]; - size_t pos = 0; - - for (int i = 0; i < NUM_GENERATIONS && pos < sizeof(buf); i++) { - pos += PyOS_snprintf(buf+pos, sizeof(buf)-pos, - " %zd", - gc_list_size(GEN_HEAD(gcstate, i))); - } - - PySys_FormatStderr( - "gc: objects in each generation:%s\n" - "gc: objects in permanent generation: %zd\n", - buf, gc_list_size(&gcstate->permanent_generation.head)); -} - -/* Deduce which objects among "base" are unreachable from outside the list - and move them to 'unreachable'. The process consist in the following steps: - -1. Copy all reference counts to a different field (gc_prev is used to hold - this copy to save memory). -2. Traverse all objects in "base" and visit all referred objects using - "tp_traverse" and for every visited object, subtract 1 to the reference - count (the one that we copied in the previous step). After this step, all - objects that can be reached directly from outside must have strictly positive - reference count, while all unreachable objects must have a count of exactly 0. -3. Identify all unreachable objects (the ones with 0 reference count) and move - them to the "unreachable" list. This step also needs to move back to "base" all - objects that were initially marked as unreachable but are referred transitively - by the reachable objects (the ones with strictly positive reference count). - -Contracts: - - * The "base" has to be a valid list with no mask set. - - * The "unreachable" list must be uninitialized (this function calls - gc_list_init over 'unreachable'). - -IMPORTANT: This function leaves 'unreachable' with the NEXT_MASK_UNREACHABLE -flag set but it does not clear it to skip unnecessary iteration. Before the -flag is cleared (for example, by using 'clear_unreachable_mask' function or -by a call to 'move_legacy_finalizers'), the 'unreachable' list is not a normal -list and we can not use most gc_list_* functions for it. */ -static inline void -deduce_unreachable(PyGC_Head *base, PyGC_Head *unreachable) { - validate_list(base, collecting_clear_unreachable_clear); - /* Using ob_refcnt and gc_refs, calculate which objects in the - * container set are reachable from outside the set (i.e., have a - * refcount greater than 0 when all the references within the - * set are taken into account). - */ - update_refs(base); // gc_prev is used for gc_refs - subtract_refs(base); - - /* Leave everything reachable from outside base in base, and move - * everything else (in base) to unreachable. - * - * NOTE: This used to move the reachable objects into a reachable - * set instead. But most things usually turn out to be reachable, - * so it's more efficient to move the unreachable things. It "sounds slick" - * to move the unreachable objects, until you think about it - the reason it - * pays isn't actually obvious. - * - * Suppose we create objects A, B, C in that order. They appear in the young - * generation in the same order. If B points to A, and C to B, and C is - * reachable from outside, then the adjusted refcounts will be 0, 0, and 1 - * respectively. - * - * When move_unreachable finds A, A is moved to the unreachable list. The - * same for B when it's first encountered. Then C is traversed, B is moved - * _back_ to the reachable list. B is eventually traversed, and then A is - * moved back to the reachable list. - * - * So instead of not moving at all, the reachable objects B and A are moved - * twice each. Why is this a win? A straightforward algorithm to move the - * reachable objects instead would move A, B, and C once each. - * - * The key is that this dance leaves the objects in order C, B, A - it's - * reversed from the original order. On all _subsequent_ scans, none of - * them will move. Since most objects aren't in cycles, this can save an - * unbounded number of moves across an unbounded number of later collections. - * It can cost more only the first time the chain is scanned. - * - * Drawback: move_unreachable is also used to find out what's still trash - * after finalizers may resurrect objects. In _that_ case most unreachable - * objects will remain unreachable, so it would be more efficient to move - * the reachable objects instead. But this is a one-time cost, probably not - * worth complicating the code to speed just a little. - */ - gc_list_init(unreachable); - move_unreachable(base, unreachable); // gc_prev is pointer again - validate_list(base, collecting_clear_unreachable_clear); - validate_list(unreachable, collecting_set_unreachable_set); -} - -/* Handle objects that may have resurrected after a call to 'finalize_garbage', moving - them to 'old_generation' and placing the rest on 'still_unreachable'. - - Contracts: - * After this function 'unreachable' must not be used anymore and 'still_unreachable' - will contain the objects that did not resurrect. - - * The "still_unreachable" list must be uninitialized (this function calls - gc_list_init over 'still_unreachable'). - -IMPORTANT: After a call to this function, the 'still_unreachable' set will have the -PREV_MARK_COLLECTING set, but the objects in this set are going to be removed so -we can skip the expense of clearing the flag to avoid extra iteration. */ -static inline void -handle_resurrected_objects(PyGC_Head *unreachable, PyGC_Head* still_unreachable, - PyGC_Head *old_generation) -{ - // Remove the PREV_MASK_COLLECTING from unreachable - // to prepare it for a new call to 'deduce_unreachable' - gc_list_clear_collecting(unreachable); - - // After the call to deduce_unreachable, the 'still_unreachable' set will - // have the PREV_MARK_COLLECTING set, but the objects are going to be - // removed so we can skip the expense of clearing the flag. - PyGC_Head* resurrected = unreachable; - deduce_unreachable(resurrected, still_unreachable); - clear_unreachable_mask(still_unreachable); - - // Move the resurrected objects to the old generation for future collection. - gc_list_merge(resurrected, old_generation); -} - - -/* Invoke progress callbacks to notify clients that garbage collection - * is starting or stopping - */ -static void -invoke_gc_callback(PyThreadState *tstate, const char *phase, - int generation, Py_ssize_t collected, - Py_ssize_t uncollectable) -{ - assert(!_PyErr_Occurred(tstate)); - - /* we may get called very early */ - GCState *gcstate = &tstate->interp->gc; - if (gcstate->callbacks == NULL) { - return; - } - - /* The local variable cannot be rebound, check it for sanity */ - assert(PyList_CheckExact(gcstate->callbacks)); - PyObject *info = NULL; - if (PyList_GET_SIZE(gcstate->callbacks) != 0) { - info = Py_BuildValue("{sisnsn}", - "generation", generation, - "collected", collected, - "uncollectable", uncollectable); - if (info == NULL) { - PyErr_FormatUnraisable("Exception ignored on invoking gc callbacks"); - return; - } - } - - PyObject *phase_obj = PyUnicode_FromString(phase); - if (phase_obj == NULL) { - Py_XDECREF(info); - PyErr_FormatUnraisable("Exception ignored on invoking gc callbacks"); - return; - } - - PyObject *stack[] = {phase_obj, info}; - for (Py_ssize_t i=0; icallbacks); i++) { - PyObject *r, *cb = PyList_GET_ITEM(gcstate->callbacks, i); - Py_INCREF(cb); /* make sure cb doesn't go away */ - r = PyObject_Vectorcall(cb, stack, 2, NULL); - if (r == NULL) { - PyErr_WriteUnraisable(cb); - } - else { - Py_DECREF(r); - } - Py_DECREF(cb); - } - Py_DECREF(phase_obj); - Py_XDECREF(info); - assert(!_PyErr_Occurred(tstate)); -} - - -/* Find the oldest generation (highest numbered) where the count - * exceeds the threshold. Objects in the that generation and - * generations younger than it will be collected. */ -static int -gc_select_generation(GCState *gcstate) -{ - for (int i = NUM_GENERATIONS-1; i >= 0; i--) { - if (gcstate->generations[i].count > gcstate->generations[i].threshold) { - /* Avoid quadratic performance degradation in number - of tracked objects (see also issue #4074): - - To limit the cost of garbage collection, there are two strategies; - - make each collection faster, e.g. by scanning fewer objects - - do less collections - This heuristic is about the latter strategy. - - In addition to the various configurable thresholds, we only trigger a - full collection if the ratio - - long_lived_pending / long_lived_total - - is above a given value (hardwired to 25%). - - The reason is that, while "non-full" collections (i.e., collections of - the young and middle generations) will always examine roughly the same - number of objects -- determined by the aforementioned thresholds --, - the cost of a full collection is proportional to the total number of - long-lived objects, which is virtually unbounded. - - Indeed, it has been remarked that doing a full collection every - of object creations entails a dramatic performance - degradation in workloads which consist in creating and storing lots of - long-lived objects (e.g. building a large list of GC-tracked objects would - show quadratic performance, instead of linear as expected: see issue #4074). - - Using the above ratio, instead, yields amortized linear performance in - the total number of objects (the effect of which can be summarized - thusly: "each full garbage collection is more and more costly as the - number of objects grows, but we do fewer and fewer of them"). - - This heuristic was suggested by Martin von Löwis on python-dev in - June 2008. His original analysis and proposal can be found at: - http://mail.python.org/pipermail/python-dev/2008-June/080579.html - */ - if (i == NUM_GENERATIONS - 1 - && gcstate->long_lived_pending < gcstate->long_lived_total / 4) - continue; - return i; - } - } - return -1; -} +#include "Python.h" +#include "pycore_gc.h" +#include "pycore_object.h" // _PyObject_IS_GC() +#include "pycore_pystate.h" // _PyInterpreterState_GET() +typedef struct _gc_runtime_state GCState; -/* This is the main function. Read this to understand how the - * collection process works. */ -static Py_ssize_t -gc_collect_main(PyThreadState *tstate, int generation, _PyGC_Reason reason) +static GCState * +get_gc_state(void) { - int i; - Py_ssize_t m = 0; /* # objects collected */ - Py_ssize_t n = 0; /* # unreachable objects that couldn't be collected */ - PyGC_Head *young; /* the generation we are examining */ - PyGC_Head *old; /* next older generation */ - PyGC_Head unreachable; /* non-problematic unreachable trash */ - PyGC_Head finalizers; /* objects with, & reachable from, __del__ */ - PyGC_Head *gc; - _PyTime_t t1 = 0; /* initialize to prevent a compiler warning */ - GCState *gcstate = &tstate->interp->gc; - - // gc_collect_main() must not be called before _PyGC_Init - // or after _PyGC_Fini() - assert(gcstate->garbage != NULL); - assert(!_PyErr_Occurred(tstate)); - - int expected = 0; - if (!_Py_atomic_compare_exchange_int(&gcstate->collecting, &expected, 1)) { - // Don't start a garbage collection if one is already in progress. - return 0; - } - - if (generation == GENERATION_AUTO) { - // Select the oldest generation that needs collecting. We will collect - // objects from that generation and all generations younger than it. - generation = gc_select_generation(gcstate); - if (generation < 0) { - // No generation needs to be collected. - _Py_atomic_store_int(&gcstate->collecting, 0); - return 0; - } - } - - assert(generation >= 0 && generation < NUM_GENERATIONS); - -#ifdef Py_STATS - if (_Py_stats) { - _Py_stats->object_stats.object_visits = 0; - } -#endif - GC_STAT_ADD(generation, collections, 1); - - if (reason != _Py_GC_REASON_SHUTDOWN) { - invoke_gc_callback(tstate, "start", generation, 0, 0); - } - - if (gcstate->debug & DEBUG_STATS) { - PySys_WriteStderr("gc: collecting generation %d...\n", generation); - show_stats_each_generations(gcstate); - t1 = _PyTime_GetPerfCounter(); - } - - if (PyDTrace_GC_START_ENABLED()) - PyDTrace_GC_START(generation); - - /* update collection and allocation counters */ - if (generation+1 < NUM_GENERATIONS) - gcstate->generations[generation+1].count += 1; - for (i = 0; i <= generation; i++) - gcstate->generations[i].count = 0; - - /* merge younger generations with one we are currently collecting */ - for (i = 0; i < generation; i++) { - gc_list_merge(GEN_HEAD(gcstate, i), GEN_HEAD(gcstate, generation)); - } - - /* handy references */ - young = GEN_HEAD(gcstate, generation); - if (generation < NUM_GENERATIONS-1) - old = GEN_HEAD(gcstate, generation+1); - else - old = young; - validate_list(old, collecting_clear_unreachable_clear); - - deduce_unreachable(young, &unreachable); - - untrack_tuples(young); - /* Move reachable objects to next generation. */ - if (young != old) { - if (generation == NUM_GENERATIONS - 2) { - gcstate->long_lived_pending += gc_list_size(young); - } - gc_list_merge(young, old); - } - else { - /* We only un-track dicts in full collections, to avoid quadratic - dict build-up. See issue #14775. */ - untrack_dicts(young); - gcstate->long_lived_pending = 0; - gcstate->long_lived_total = gc_list_size(young); - } - - /* All objects in unreachable are trash, but objects reachable from - * legacy finalizers (e.g. tp_del) can't safely be deleted. - */ - gc_list_init(&finalizers); - // NEXT_MASK_UNREACHABLE is cleared here. - // After move_legacy_finalizers(), unreachable is normal list. - move_legacy_finalizers(&unreachable, &finalizers); - /* finalizers contains the unreachable objects with a legacy finalizer; - * unreachable objects reachable *from* those are also uncollectable, - * and we move those into the finalizers list too. - */ - move_legacy_finalizer_reachable(&finalizers); - - validate_list(&finalizers, collecting_clear_unreachable_clear); - validate_list(&unreachable, collecting_set_unreachable_clear); - - /* Print debugging information. */ - if (gcstate->debug & DEBUG_COLLECTABLE) { - for (gc = GC_NEXT(&unreachable); gc != &unreachable; gc = GC_NEXT(gc)) { - debug_cycle("collectable", FROM_GC(gc)); - } - } - - /* Clear weakrefs and invoke callbacks as necessary. */ - m += handle_weakrefs(&unreachable, old); - - validate_list(old, collecting_clear_unreachable_clear); - validate_list(&unreachable, collecting_set_unreachable_clear); - - /* Call tp_finalize on objects which have one. */ - finalize_garbage(tstate, &unreachable); - - /* Handle any objects that may have resurrected after the call - * to 'finalize_garbage' and continue the collection with the - * objects that are still unreachable */ - PyGC_Head final_unreachable; - handle_resurrected_objects(&unreachable, &final_unreachable, old); - - /* Call tp_clear on objects in the final_unreachable set. This will cause - * the reference cycles to be broken. It may also cause some objects - * in finalizers to be freed. - */ - m += gc_list_size(&final_unreachable); - delete_garbage(tstate, gcstate, &final_unreachable, old); - - /* Collect statistics on uncollectable objects found and print - * debugging information. */ - for (gc = GC_NEXT(&finalizers); gc != &finalizers; gc = GC_NEXT(gc)) { - n++; - if (gcstate->debug & DEBUG_UNCOLLECTABLE) - debug_cycle("uncollectable", FROM_GC(gc)); - } - if (gcstate->debug & DEBUG_STATS) { - double d = _PyTime_AsSecondsDouble(_PyTime_GetPerfCounter() - t1); - PySys_WriteStderr( - "gc: done, %zd unreachable, %zd uncollectable, %.4fs elapsed\n", - n+m, n, d); - } - - /* Append instances in the uncollectable set to a Python - * reachable list of garbage. The programmer has to deal with - * this if they insist on creating this type of structure. - */ - handle_legacy_finalizers(tstate, gcstate, &finalizers, old); - validate_list(old, collecting_clear_unreachable_clear); - - /* Clear free list only during the collection of the highest - * generation */ - if (generation == NUM_GENERATIONS-1) { - clear_freelists(tstate->interp); - } - - if (_PyErr_Occurred(tstate)) { - if (reason == _Py_GC_REASON_SHUTDOWN) { - _PyErr_Clear(tstate); - } - else { - PyErr_FormatUnraisable("Exception ignored in garbage collection"); - } - } - - /* Update stats */ - struct gc_generation_stats *stats = &gcstate->generation_stats[generation]; - stats->collections++; - stats->collected += m; - stats->uncollectable += n; - - GC_STAT_ADD(generation, objects_collected, m); -#ifdef Py_STATS - if (_Py_stats) { - GC_STAT_ADD(generation, object_visits, - _Py_stats->object_stats.object_visits); - _Py_stats->object_stats.object_visits = 0; - } -#endif - - if (PyDTrace_GC_DONE_ENABLED()) { - PyDTrace_GC_DONE(n + m); - } - - if (reason != _Py_GC_REASON_SHUTDOWN) { - invoke_gc_callback(tstate, "stop", generation, m, n); - } - - assert(!_PyErr_Occurred(tstate)); - _Py_atomic_store_int(&gcstate->collecting, 0); - return n + m; + PyInterpreterState *interp = _PyInterpreterState_GET(); + return &interp->gc; } +/*[clinic input] +module gc +[clinic start generated code]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=b5c9690ecc842d79]*/ #include "clinic/gcmodule.c.h" /*[clinic input] @@ -1593,7 +90,7 @@ gc_collect_impl(PyObject *module, int generation) return -1; } - return gc_collect_main(tstate, generation, _Py_GC_REASON_MANUAL); + return _PyGC_Collect(tstate, generation, _Py_GC_REASON_MANUAL); } /*[clinic input] @@ -1637,24 +134,41 @@ gc_get_debug_impl(PyObject *module) return gcstate->debug; } -PyDoc_STRVAR(gc_set_thresh__doc__, -"set_threshold(threshold0, [threshold1, threshold2]) -> None\n" -"\n" -"Sets the collection thresholds. Setting threshold0 to zero disables\n" -"collection.\n"); +/*[clinic input] +gc.set_threshold + + threshold0: int + [ + threshold1: int + [ + threshold2: int + ] + ] + / + +Set the collection thresholds (the collection frequency). + +Setting 'threshold0' to zero disables collection. +[clinic start generated code]*/ static PyObject * -gc_set_threshold(PyObject *self, PyObject *args) +gc_set_threshold_impl(PyObject *module, int threshold0, int group_right_1, + int threshold1, int group_right_2, int threshold2) +/*[clinic end generated code: output=2e3c7c7dd59060f3 input=0d9612db50984eec]*/ { GCState *gcstate = get_gc_state(); - if (!PyArg_ParseTuple(args, "i|ii:set_threshold", - &gcstate->generations[0].threshold, - &gcstate->generations[1].threshold, - &gcstate->generations[2].threshold)) - return NULL; - for (int i = 3; i < NUM_GENERATIONS; i++) { + + gcstate->generations[0].threshold = threshold0; + if (group_right_1) { + gcstate->generations[1].threshold = threshold1; + } + if (group_right_2) { + gcstate->generations[2].threshold = threshold2; + /* generations higher than 2 get the same threshold */ - gcstate->generations[i].threshold = gcstate->generations[2].threshold; + for (int i = 3; i < NUM_GENERATIONS; i++) { + gcstate->generations[i].threshold = gcstate->generations[2].threshold; + } } Py_RETURN_NONE; } @@ -1693,60 +207,24 @@ gc_get_count_impl(PyObject *module) gcstate->generations[2].count); } -static int -referrersvisit(PyObject* obj, void *arg) -{ - PyObject *objs = arg; - Py_ssize_t i; - for (i = 0; i < PyTuple_GET_SIZE(objs); i++) - if (PyTuple_GET_ITEM(objs, i) == obj) - return 1; - return 0; -} +/*[clinic input] +gc.get_referrers -static int -gc_referrers_for(PyObject *objs, PyGC_Head *list, PyObject *resultlist) -{ - PyGC_Head *gc; - PyObject *obj; - traverseproc traverse; - for (gc = GC_NEXT(list); gc != list; gc = GC_NEXT(gc)) { - obj = FROM_GC(gc); - traverse = Py_TYPE(obj)->tp_traverse; - if (obj == objs || obj == resultlist) - continue; - if (traverse(obj, referrersvisit, objs)) { - if (PyList_Append(resultlist, obj) < 0) - return 0; /* error */ - } - } - return 1; /* no error */ -} + *objs as args: object -PyDoc_STRVAR(gc_get_referrers__doc__, -"get_referrers(*objs) -> list\n\ -Return the list of objects that directly refer to any of objs."); +Return the list of objects that directly refer to any of 'objs'. +[clinic start generated code]*/ static PyObject * -gc_get_referrers(PyObject *self, PyObject *args) +gc_get_referrers_impl(PyObject *module, PyObject *args) +/*[clinic end generated code: output=296a09587f6a86b5 input=bae96961b14a0922]*/ { if (PySys_Audit("gc.get_referrers", "(O)", args) < 0) { return NULL; } - PyObject *result = PyList_New(0); - if (!result) { - return NULL; - } - - GCState *gcstate = get_gc_state(); - for (int i = 0; i < NUM_GENERATIONS; i++) { - if (!(gc_referrers_for(args, GEN_HEAD(gcstate, i), result))) { - Py_DECREF(result); - return NULL; - } - } - return result; + PyInterpreterState *interp = _PyInterpreterState_GET(); + return _PyGC_GetReferrers(interp, args); } /* Append obj to list; return true if error (out of memory), false if OK. */ @@ -1757,12 +235,17 @@ referentsvisit(PyObject *obj, void *arg) return PyList_Append(list, obj) < 0; } -PyDoc_STRVAR(gc_get_referents__doc__, -"get_referents(*objs) -> list\n\ -Return the list of objects that are directly referred to by objs."); +/*[clinic input] +gc.get_referents + + *objs as args: object + +Return the list of objects that are directly referred to by 'objs'. +[clinic start generated code]*/ static PyObject * -gc_get_referents(PyObject *self, PyObject *args) +gc_get_referents_impl(PyObject *module, PyObject *args) +/*[clinic end generated code: output=d47dc02cefd06fe8 input=b3ceab0c34038cbf]*/ { Py_ssize_t i; if (PySys_Audit("gc.get_referents", "(O)", args) < 0) { @@ -1805,54 +288,25 @@ static PyObject * gc_get_objects_impl(PyObject *module, Py_ssize_t generation) /*[clinic end generated code: output=48b35fea4ba6cb0e input=ef7da9df9806754c]*/ { - PyThreadState *tstate = _PyThreadState_GET(); - int i; - PyObject* result; - GCState *gcstate = &tstate->interp->gc; - if (PySys_Audit("gc.get_objects", "n", generation) < 0) { return NULL; } - result = PyList_New(0); - if (result == NULL) { - return NULL; - } - - /* If generation is passed, we extract only that generation */ - if (generation != -1) { - if (generation >= NUM_GENERATIONS) { - _PyErr_Format(tstate, PyExc_ValueError, - "generation parameter must be less than the number of " - "available generations (%i)", - NUM_GENERATIONS); - goto error; - } - - if (generation < 0) { - _PyErr_SetString(tstate, PyExc_ValueError, - "generation parameter cannot be negative"); - goto error; - } - - if (append_objects(result, GEN_HEAD(gcstate, generation))) { - goto error; - } - - return result; + if (generation >= NUM_GENERATIONS) { + return PyErr_Format(PyExc_ValueError, + "generation parameter must be less than the number of " + "available generations (%i)", + NUM_GENERATIONS); } - /* If generation is not passed or None, get all objects from all generations */ - for (i = 0; i < NUM_GENERATIONS; i++) { - if (append_objects(result, GEN_HEAD(gcstate, i))) { - goto error; - } + if (generation < -1) { + PyErr_SetString(PyExc_ValueError, + "generation parameter cannot be negative"); + return NULL; } - return result; -error: - Py_DECREF(result); - return NULL; + PyInterpreterState *interp = _PyInterpreterState_GET(); + return _PyGC_GetObjects(interp, generation); } /*[clinic input] @@ -1960,11 +414,8 @@ static PyObject * gc_freeze_impl(PyObject *module) /*[clinic end generated code: output=502159d9cdc4c139 input=b602b16ac5febbe5]*/ { - GCState *gcstate = get_gc_state(); - for (int i = 0; i < NUM_GENERATIONS; ++i) { - gc_list_merge(GEN_HEAD(gcstate, i), &gcstate->permanent_generation.head); - gcstate->generations[i].count = 0; - } + PyInterpreterState *interp = _PyInterpreterState_GET(); + _PyGC_Freeze(interp); Py_RETURN_NONE; } @@ -1980,9 +431,8 @@ static PyObject * gc_unfreeze_impl(PyObject *module) /*[clinic end generated code: output=1c15f2043b25e169 input=2dd52b170f4cef6c]*/ { - GCState *gcstate = get_gc_state(); - gc_list_merge(&gcstate->permanent_generation.head, - GEN_HEAD(gcstate, NUM_GENERATIONS-1)); + PyInterpreterState *interp = _PyInterpreterState_GET(); + _PyGC_Unfreeze(interp); Py_RETURN_NONE; } @@ -1996,8 +446,8 @@ static Py_ssize_t gc_get_freeze_count_impl(PyObject *module) /*[clinic end generated code: output=61cbd9f43aa032e1 input=45ffbc65cfe2a6ed]*/ { - GCState *gcstate = get_gc_state(); - return gc_list_size(&gcstate->permanent_generation.head); + PyInterpreterState *interp = _PyInterpreterState_GET(); + return _PyGC_GetFreezeCount(interp); } @@ -2030,17 +480,15 @@ static PyMethodDef GcMethods[] = { GC_SET_DEBUG_METHODDEF GC_GET_DEBUG_METHODDEF GC_GET_COUNT_METHODDEF - {"set_threshold", gc_set_threshold, METH_VARARGS, gc_set_thresh__doc__}, + GC_SET_THRESHOLD_METHODDEF GC_GET_THRESHOLD_METHODDEF GC_COLLECT_METHODDEF GC_GET_OBJECTS_METHODDEF GC_GET_STATS_METHODDEF GC_IS_TRACKED_METHODDEF GC_IS_FINALIZED_METHODDEF - {"get_referrers", gc_get_referrers, METH_VARARGS, - gc_get_referrers__doc__}, - {"get_referents", gc_get_referents, METH_VARARGS, - gc_get_referents__doc__}, + GC_GET_REFERRERS_METHODDEF + GC_GET_REFERENTS_METHODDEF GC_FREEZE_METHODDEF GC_UNFREEZE_METHODDEF GC_GET_FREEZE_COUNT_METHODDEF @@ -2063,7 +511,7 @@ gcmodule_exec(PyObject *module) return -1; } -#define ADD_INT(NAME) if (PyModule_AddIntConstant(module, #NAME, NAME) < 0) { return -1; } +#define ADD_INT(NAME) if (PyModule_AddIntConstant(module, #NAME, _PyGC_ ## NAME) < 0) { return -1; } ADD_INT(DEBUG_STATS); ADD_INT(DEBUG_COLLECTABLE); ADD_INT(DEBUG_UNCOLLECTABLE); @@ -2093,353 +541,3 @@ PyInit_gc(void) { return PyModuleDef_Init(&gcmodule); } - -/* C API for controlling the state of the garbage collector */ -int -PyGC_Enable(void) -{ - GCState *gcstate = get_gc_state(); - int old_state = gcstate->enabled; - gcstate->enabled = 1; - return old_state; -} - -int -PyGC_Disable(void) -{ - GCState *gcstate = get_gc_state(); - int old_state = gcstate->enabled; - gcstate->enabled = 0; - return old_state; -} - -int -PyGC_IsEnabled(void) -{ - GCState *gcstate = get_gc_state(); - return gcstate->enabled; -} - -/* Public API to invoke gc.collect() from C */ -Py_ssize_t -PyGC_Collect(void) -{ - PyThreadState *tstate = _PyThreadState_GET(); - GCState *gcstate = &tstate->interp->gc; - - if (!gcstate->enabled) { - return 0; - } - - Py_ssize_t n; - PyObject *exc = _PyErr_GetRaisedException(tstate); - n = gc_collect_main(tstate, NUM_GENERATIONS - 1, _Py_GC_REASON_MANUAL); - _PyErr_SetRaisedException(tstate, exc); - - return n; -} - -Py_ssize_t -_PyGC_CollectNoFail(PyThreadState *tstate) -{ - /* Ideally, this function is only called on interpreter shutdown, - and therefore not recursively. Unfortunately, when there are daemon - threads, a daemon thread can start a cyclic garbage collection - during interpreter shutdown (and then never finish it). - See http://bugs.python.org/issue8713#msg195178 for an example. - */ - return gc_collect_main(tstate, NUM_GENERATIONS - 1, _Py_GC_REASON_SHUTDOWN); -} - -void -_PyGC_DumpShutdownStats(PyInterpreterState *interp) -{ - GCState *gcstate = &interp->gc; - if (!(gcstate->debug & DEBUG_SAVEALL) - && gcstate->garbage != NULL && PyList_GET_SIZE(gcstate->garbage) > 0) { - const char *message; - if (gcstate->debug & DEBUG_UNCOLLECTABLE) - message = "gc: %zd uncollectable objects at " \ - "shutdown"; - else - message = "gc: %zd uncollectable objects at " \ - "shutdown; use gc.set_debug(gc.DEBUG_UNCOLLECTABLE) to list them"; - /* PyErr_WarnFormat does too many things and we are at shutdown, - the warnings module's dependencies (e.g. linecache) may be gone - already. */ - if (PyErr_WarnExplicitFormat(PyExc_ResourceWarning, "gc", 0, - "gc", NULL, message, - PyList_GET_SIZE(gcstate->garbage))) - PyErr_WriteUnraisable(NULL); - if (gcstate->debug & DEBUG_UNCOLLECTABLE) { - PyObject *repr = NULL, *bytes = NULL; - repr = PyObject_Repr(gcstate->garbage); - if (!repr || !(bytes = PyUnicode_EncodeFSDefault(repr))) - PyErr_WriteUnraisable(gcstate->garbage); - else { - PySys_WriteStderr( - " %s\n", - PyBytes_AS_STRING(bytes) - ); - } - Py_XDECREF(repr); - Py_XDECREF(bytes); - } - } -} - - -void -_PyGC_Fini(PyInterpreterState *interp) -{ - GCState *gcstate = &interp->gc; - Py_CLEAR(gcstate->garbage); - Py_CLEAR(gcstate->callbacks); - - /* We expect that none of this interpreters objects are shared - with other interpreters. - See https://github.com/python/cpython/issues/90228. */ -} - -/* for debugging */ -void -_PyGC_Dump(PyGC_Head *g) -{ - _PyObject_Dump(FROM_GC(g)); -} - - -#ifdef Py_DEBUG -static int -visit_validate(PyObject *op, void *parent_raw) -{ - PyObject *parent = _PyObject_CAST(parent_raw); - if (_PyObject_IsFreed(op)) { - _PyObject_ASSERT_FAILED_MSG(parent, - "PyObject_GC_Track() object is not valid"); - } - return 0; -} -#endif - - -/* extension modules might be compiled with GC support so these - functions must always be available */ - -void -PyObject_GC_Track(void *op_raw) -{ - PyObject *op = _PyObject_CAST(op_raw); - if (_PyObject_GC_IS_TRACKED(op)) { - _PyObject_ASSERT_FAILED_MSG(op, - "object already tracked " - "by the garbage collector"); - } - _PyObject_GC_TRACK(op); - -#ifdef Py_DEBUG - /* Check that the object is valid: validate objects traversed - by tp_traverse() */ - traverseproc traverse = Py_TYPE(op)->tp_traverse; - (void)traverse(op, visit_validate, op); -#endif -} - -void -PyObject_GC_UnTrack(void *op_raw) -{ - PyObject *op = _PyObject_CAST(op_raw); - /* Obscure: the Py_TRASHCAN mechanism requires that we be able to - * call PyObject_GC_UnTrack twice on an object. - */ - if (_PyObject_GC_IS_TRACKED(op)) { - _PyObject_GC_UNTRACK(op); - } -} - -int -PyObject_IS_GC(PyObject *obj) -{ - return _PyObject_IS_GC(obj); -} - -void -_Py_ScheduleGC(PyInterpreterState *interp) -{ - _Py_set_eval_breaker_bit(interp, _PY_GC_SCHEDULED_BIT, 1); -} - -void -_PyObject_GC_Link(PyObject *op) -{ - PyGC_Head *g = AS_GC(op); - assert(((uintptr_t)g & (sizeof(uintptr_t)-1)) == 0); // g must be correctly aligned - - PyThreadState *tstate = _PyThreadState_GET(); - GCState *gcstate = &tstate->interp->gc; - g->_gc_next = 0; - g->_gc_prev = 0; - gcstate->generations[0].count++; /* number of allocated GC objects */ - if (gcstate->generations[0].count > gcstate->generations[0].threshold && - gcstate->enabled && - gcstate->generations[0].threshold && - !_Py_atomic_load_int_relaxed(&gcstate->collecting) && - !_PyErr_Occurred(tstate)) - { - _Py_ScheduleGC(tstate->interp); - } -} - -void -_Py_RunGC(PyThreadState *tstate) -{ - gc_collect_main(tstate, GENERATION_AUTO, _Py_GC_REASON_HEAP); -} - -static PyObject * -gc_alloc(size_t basicsize, size_t presize) -{ - PyThreadState *tstate = _PyThreadState_GET(); - if (basicsize > PY_SSIZE_T_MAX - presize) { - return _PyErr_NoMemory(tstate); - } - size_t size = presize + basicsize; - char *mem = PyObject_Malloc(size); - if (mem == NULL) { - return _PyErr_NoMemory(tstate); - } - ((PyObject **)mem)[0] = NULL; - ((PyObject **)mem)[1] = NULL; - PyObject *op = (PyObject *)(mem + presize); - _PyObject_GC_Link(op); - return op; -} - -PyObject * -_PyObject_GC_New(PyTypeObject *tp) -{ - size_t presize = _PyType_PreHeaderSize(tp); - PyObject *op = gc_alloc(_PyObject_SIZE(tp), presize); - if (op == NULL) { - return NULL; - } - _PyObject_Init(op, tp); - return op; -} - -PyVarObject * -_PyObject_GC_NewVar(PyTypeObject *tp, Py_ssize_t nitems) -{ - PyVarObject *op; - - if (nitems < 0) { - PyErr_BadInternalCall(); - return NULL; - } - size_t presize = _PyType_PreHeaderSize(tp); - size_t size = _PyObject_VAR_SIZE(tp, nitems); - op = (PyVarObject *)gc_alloc(size, presize); - if (op == NULL) { - return NULL; - } - _PyObject_InitVar(op, tp, nitems); - return op; -} - -PyObject * -PyUnstable_Object_GC_NewWithExtraData(PyTypeObject *tp, size_t extra_size) -{ - size_t presize = _PyType_PreHeaderSize(tp); - PyObject *op = gc_alloc(_PyObject_SIZE(tp) + extra_size, presize); - if (op == NULL) { - return NULL; - } - memset(op, 0, _PyObject_SIZE(tp) + extra_size); - _PyObject_Init(op, tp); - return op; -} - -PyVarObject * -_PyObject_GC_Resize(PyVarObject *op, Py_ssize_t nitems) -{ - const size_t basicsize = _PyObject_VAR_SIZE(Py_TYPE(op), nitems); - const size_t presize = _PyType_PreHeaderSize(((PyObject *)op)->ob_type); - _PyObject_ASSERT((PyObject *)op, !_PyObject_GC_IS_TRACKED(op)); - if (basicsize > (size_t)PY_SSIZE_T_MAX - presize) { - return (PyVarObject *)PyErr_NoMemory(); - } - char *mem = (char *)op - presize; - mem = (char *)PyObject_Realloc(mem, presize + basicsize); - if (mem == NULL) { - return (PyVarObject *)PyErr_NoMemory(); - } - op = (PyVarObject *) (mem + presize); - Py_SET_SIZE(op, nitems); - return op; -} - -void -PyObject_GC_Del(void *op) -{ - size_t presize = _PyType_PreHeaderSize(((PyObject *)op)->ob_type); - PyGC_Head *g = AS_GC(op); - if (_PyObject_GC_IS_TRACKED(op)) { - gc_list_remove(g); -#ifdef Py_DEBUG - PyObject *exc = PyErr_GetRaisedException(); - if (PyErr_WarnExplicitFormat(PyExc_ResourceWarning, "gc", 0, - "gc", NULL, "Object of type %s is not untracked before destruction", - ((PyObject*)op)->ob_type->tp_name)) { - PyErr_WriteUnraisable(NULL); - } - PyErr_SetRaisedException(exc); -#endif - } - GCState *gcstate = get_gc_state(); - if (gcstate->generations[0].count > 0) { - gcstate->generations[0].count--; - } - PyObject_Free(((char *)op)-presize); -} - -int -PyObject_GC_IsTracked(PyObject* obj) -{ - if (_PyObject_IS_GC(obj) && _PyObject_GC_IS_TRACKED(obj)) { - return 1; - } - return 0; -} - -int -PyObject_GC_IsFinalized(PyObject *obj) -{ - if (_PyObject_IS_GC(obj) && _PyGC_FINALIZED(obj)) { - return 1; - } - return 0; -} - -void -PyUnstable_GC_VisitObjects(gcvisitobjects_t callback, void *arg) -{ - size_t i; - GCState *gcstate = get_gc_state(); - int origenstate = gcstate->enabled; - gcstate->enabled = 0; - for (i = 0; i < NUM_GENERATIONS; i++) { - PyGC_Head *gc_list, *gc; - gc_list = GEN_HEAD(gcstate, i); - for (gc = GC_NEXT(gc_list); gc != gc_list; gc = GC_NEXT(gc)) { - PyObject *op = FROM_GC(gc); - Py_INCREF(op); - int res = callback(op, arg); - Py_DECREF(op); - if (!res) { - goto done; - } - } - } -done: - gcstate->enabled = origenstate; -} diff --git a/Modules/mathmodule.c b/Modules/mathmodule.c index 6cd61e9ab75424..2a796c1c55d2f0 100644 --- a/Modules/mathmodule.c +++ b/Modules/mathmodule.c @@ -759,13 +759,17 @@ m_log10(double x) static PyObject * math_gcd(PyObject *module, PyObject * const *args, Py_ssize_t nargs) { - PyObject *res, *x; - Py_ssize_t i; + // Fast-path for the common case: gcd(int, int) + if (nargs == 2 && PyLong_CheckExact(args[0]) && PyLong_CheckExact(args[1])) + { + return _PyLong_GCD(args[0], args[1]); + } if (nargs == 0) { return PyLong_FromLong(0); } - res = PyNumber_Index(args[0]); + + PyObject *res = PyNumber_Index(args[0]); if (res == NULL) { return NULL; } @@ -775,8 +779,8 @@ math_gcd(PyObject *module, PyObject * const *args, Py_ssize_t nargs) } PyObject *one = _PyLong_GetOne(); // borrowed ref - for (i = 1; i < nargs; i++) { - x = _PyNumber_Index(args[i]); + for (Py_ssize_t i = 1; i < nargs; i++) { + PyObject *x = _PyNumber_Index(args[i]); if (x == NULL) { Py_DECREF(res); return NULL; diff --git a/Modules/mmapmodule.c b/Modules/mmapmodule.c index 66ed0b8efb775c..0cce7c27f9b16a 100644 --- a/Modules/mmapmodule.c +++ b/Modules/mmapmodule.c @@ -32,10 +32,6 @@ # include // close() #endif -// to support MS_WINDOWS_SYSTEM OpenFileMappingA / CreateFileMappingA -// need to be replaced with OpenFileMappingW / CreateFileMappingW -#if !defined(MS_WINDOWS) || defined(MS_WINDOWS_DESKTOP) || defined(MS_WINDOWS_GAMES) - #ifndef MS_WINDOWS #define UNIX # ifdef HAVE_FCNTL_H @@ -116,11 +112,12 @@ typedef struct { #ifdef MS_WINDOWS HANDLE map_handle; HANDLE file_handle; - char * tagname; + wchar_t * tagname; #endif #ifdef UNIX int fd; + _Bool trackfd; #endif PyObject *weakreflist; @@ -397,6 +394,13 @@ is_resizeable(mmap_object *self) "mmap can't resize with extant buffers exported."); return 0; } +#ifdef UNIX + if (!self->trackfd) { + PyErr_SetString(PyExc_ValueError, + "mmap can't resize with trackfd=False."); + return 0; + } +#endif if ((self->access == ACCESS_WRITE) || (self->access == ACCESS_DEFAULT)) return 1; PyErr_Format(PyExc_TypeError, @@ -534,7 +538,7 @@ mmap_resize_method(mmap_object *self, CloseHandle(self->map_handle); /* if the file mapping still exists, it cannot be resized. */ if (self->tagname) { - self->map_handle = OpenFileMappingA(FILE_MAP_WRITE, FALSE, + self->map_handle = OpenFileMappingW(FILE_MAP_WRITE, FALSE, self->tagname); if (self->map_handle) { PyErr_SetFromWindowsErr(ERROR_USER_MAPPED_FILE); @@ -563,7 +567,7 @@ mmap_resize_method(mmap_object *self, /* create a new file mapping and map a new view */ /* FIXME: call CreateFileMappingW with wchar_t tagname */ - self->map_handle = CreateFileMappingA( + self->map_handle = CreateFileMappingW( self->file_handle, NULL, PAGE_READWRITE, @@ -845,7 +849,7 @@ mmap__sizeof__method(mmap_object *self, void *Py_UNUSED(ignored)) { size_t res = _PyObject_SIZE(Py_TYPE(self)); if (self->tagname) { - res += strlen(self->tagname) + 1; + res += (wcslen(self->tagname) + 1) * sizeof(self->tagname[0]); } return PyLong_FromSize_t(res); } @@ -1158,7 +1162,7 @@ is 0, the maximum length of the map is the current size of the file,\n\ except that if the file is empty Windows raises an exception (you cannot\n\ create an empty mapping on Windows).\n\ \n\ -Unix: mmap(fileno, length[, flags[, prot[, access[, offset]]]])\n\ +Unix: mmap(fileno, length[, flags[, prot[, access[, offset[, trackfd]]]]])\n\ \n\ Maps length bytes from the file specified by the file descriptor fileno,\n\ and returns a mmap object. If length is 0, the maximum length of the map\n\ @@ -1225,15 +1229,17 @@ new_mmap_object(PyTypeObject *type, PyObject *args, PyObject *kwdict) off_t offset = 0; int fd, flags = MAP_SHARED, prot = PROT_WRITE | PROT_READ; int devzero = -1; - int access = (int)ACCESS_DEFAULT; + int access = (int)ACCESS_DEFAULT, trackfd = 1; static char *keywords[] = {"fileno", "length", "flags", "prot", - "access", "offset", NULL}; + "access", "offset", "trackfd", NULL}; - if (!PyArg_ParseTupleAndKeywords(args, kwdict, "in|iii" _Py_PARSE_OFF_T, keywords, + if (!PyArg_ParseTupleAndKeywords(args, kwdict, + "in|iii" _Py_PARSE_OFF_T "$p", keywords, &fd, &map_size, &flags, &prot, - &access, &offset)) + &access, &offset, &trackfd)) { return NULL; + } if (map_size < 0) { PyErr_SetString(PyExc_OverflowError, "memory mapped length must be positive"); @@ -1329,6 +1335,7 @@ new_mmap_object(PyTypeObject *type, PyObject *args, PyObject *kwdict) m_obj->weakreflist = NULL; m_obj->exports = 0; m_obj->offset = offset; + m_obj->trackfd = trackfd; if (fd == -1) { m_obj->fd = -1; /* Assume the caller wants to map anonymous memory. @@ -1354,13 +1361,16 @@ new_mmap_object(PyTypeObject *type, PyObject *args, PyObject *kwdict) } #endif } - else { + else if (trackfd) { m_obj->fd = _Py_dup(fd); if (m_obj->fd == -1) { Py_DECREF(m_obj); return NULL; } } + else { + m_obj->fd = -1; + } Py_BEGIN_ALLOW_THREADS m_obj->data = mmap(NULL, map_size, prot, flags, fd, offset); @@ -1400,7 +1410,7 @@ new_mmap_object(PyTypeObject *type, PyObject *args, PyObject *kwdict) DWORD off_lo; /* lower 32 bits of offset */ DWORD size_hi; /* upper 32 bits of size */ DWORD size_lo; /* lower 32 bits of size */ - const char *tagname = ""; + PyObject *tagname = Py_None; DWORD dwErr = 0; int fileno; HANDLE fh = 0; @@ -1410,7 +1420,7 @@ new_mmap_object(PyTypeObject *type, PyObject *args, PyObject *kwdict) "tagname", "access", "offset", NULL }; - if (!PyArg_ParseTupleAndKeywords(args, kwdict, "in|ziL", keywords, + if (!PyArg_ParseTupleAndKeywords(args, kwdict, "in|OiL", keywords, &fileno, &map_size, &tagname, &access, &offset)) { return NULL; @@ -1543,17 +1553,19 @@ new_mmap_object(PyTypeObject *type, PyObject *args, PyObject *kwdict) m_obj->weakreflist = NULL; m_obj->exports = 0; /* set the tag name */ - if (tagname != NULL && *tagname != '\0') { - m_obj->tagname = PyMem_Malloc(strlen(tagname)+1); + if (!Py_IsNone(tagname)) { + if (!PyUnicode_Check(tagname)) { + Py_DECREF(m_obj); + return PyErr_Format(PyExc_TypeError, "expected str or None for " + "'tagname', not %.200s", + Py_TYPE(tagname)->tp_name); + } + m_obj->tagname = PyUnicode_AsWideCharString(tagname, NULL); if (m_obj->tagname == NULL) { - PyErr_NoMemory(); Py_DECREF(m_obj); return NULL; } - strcpy(m_obj->tagname, tagname); } - else - m_obj->tagname = NULL; m_obj->access = (access_mode)access; size_hi = (DWORD)(size >> 32); @@ -1562,7 +1574,7 @@ new_mmap_object(PyTypeObject *type, PyObject *args, PyObject *kwdict) off_lo = (DWORD)(offset & 0xFFFFFFFF); /* For files, it would be sufficient to pass 0 as size. For anonymous maps, we have to pass the size explicitly. */ - m_obj->map_handle = CreateFileMappingA(m_obj->file_handle, + m_obj->map_handle = CreateFileMappingW(m_obj->file_handle, NULL, flProtect, size_hi, @@ -1655,6 +1667,39 @@ mmap_exec(PyObject *module) #endif #ifdef MAP_CONCEAL ADD_INT_MACRO(module, MAP_CONCEAL); +#endif +#ifdef MAP_NORESERVE + ADD_INT_MACRO(module, MAP_NORESERVE); +#endif +#ifdef MAP_NOEXTEND + ADD_INT_MACRO(module, MAP_NOEXTEND); +#endif +#ifdef MAP_HASSEMAPHORE + ADD_INT_MACRO(module, MAP_HASSEMAPHORE); +#endif +#ifdef MAP_NOCACHE + ADD_INT_MACRO(module, MAP_NOCACHE); +#endif +#ifdef MAP_JIT + ADD_INT_MACRO(module, MAP_JIT); +#endif +#ifdef MAP_RESILIENT_CODESIGN + ADD_INT_MACRO(module, MAP_RESILIENT_CODESIGN); +#endif +#ifdef MAP_RESILIENT_MEDIA + ADD_INT_MACRO(module, MAP_RESILIENT_MEDIA); +#endif +#ifdef MAP_32BIT + ADD_INT_MACRO(module, MAP_32BIT); +#endif +#ifdef MAP_TRANSLATED_ALLOW_EXECUTE + ADD_INT_MACRO(module, MAP_TRANSLATED_ALLOW_EXECUTE); +#endif +#ifdef MAP_UNIX03 + ADD_INT_MACRO(module, MAP_UNIX03); +#endif +#ifdef MAP_TPRO + ADD_INT_MACRO(module, MAP_TPRO); #endif if (PyModule_AddIntConstant(module, "PAGESIZE", (long)my_getpagesize()) < 0 ) { return -1; @@ -1771,5 +1816,3 @@ PyInit_mmap(void) { return PyModuleDef_Init(&mmapmodule); } - -#endif /* !MS_WINDOWS || MS_WINDOWS_DESKTOP || MS_WINDOWS_GAMES */ diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index 39b1f3cb7b2b9b..007fc1cb116f84 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -1886,8 +1886,9 @@ win32_xstat_slow_impl(const wchar_t *path, struct _Py_stat_struct *result, HANDLE hFile; BY_HANDLE_FILE_INFORMATION fileInfo; FILE_BASIC_INFO basicInfo; + FILE_BASIC_INFO *pBasicInfo = NULL; FILE_ID_INFO idInfo; - FILE_ID_INFO *pIdInfo = &idInfo; + FILE_ID_INFO *pIdInfo = NULL; FILE_ATTRIBUTE_TAG_INFO tagInfo = { 0 }; DWORD fileType, error; BOOL isUnhandledTag = FALSE; @@ -2038,14 +2039,17 @@ win32_xstat_slow_impl(const wchar_t *path, struct _Py_stat_struct *result, retval = -1; goto cleanup; } - } - if (!GetFileInformationByHandleEx(hFile, FileIdInfo, &idInfo, sizeof(idInfo))) { - /* Failed to get FileIdInfo, so do not pass it along */ - pIdInfo = NULL; + /* Successfully got FileBasicInfo, so we'll pass it along */ + pBasicInfo = &basicInfo; + + if (GetFileInformationByHandleEx(hFile, FileIdInfo, &idInfo, sizeof(idInfo))) { + /* Successfully got FileIdInfo, so pass it along */ + pIdInfo = &idInfo; + } } - _Py_attribute_data_to_stat(&fileInfo, tagInfo.ReparseTag, &basicInfo, pIdInfo, result); + _Py_attribute_data_to_stat(&fileInfo, tagInfo.ReparseTag, pBasicInfo, pIdInfo, result); update_st_mode_from_path(path, fileInfo.dwFileAttributes, result); cleanup: @@ -11578,8 +11582,8 @@ os_pipe_impl(PyObject *module) Py_BEGIN_ALLOW_THREADS ok = CreatePipe(&read, &write, &attr, 0); if (ok) { - fds[0] = _Py_open_osfhandle_noraise(read, _O_RDONLY); - fds[1] = _Py_open_osfhandle_noraise(write, _O_WRONLY); + fds[0] = _Py_open_osfhandle_noraise(read, _O_RDONLY | _O_NOINHERIT); + fds[1] = _Py_open_osfhandle_noraise(write, _O_WRONLY | _O_NOINHERIT); if (fds[0] == -1 || fds[1] == -1) { CloseHandle(read); CloseHandle(write); diff --git a/Modules/timemodule.c b/Modules/timemodule.c index b3fe175d9b184a..2b0d3900dbddd6 100644 --- a/Modules/timemodule.c +++ b/Modules/timemodule.c @@ -1993,20 +1993,16 @@ time_exec(PyObject *module) return -1; } #endif - #ifdef CLOCK_MONOTONIC - if (PyModule_AddIntMacro(module, CLOCK_MONOTONIC) < 0) { return -1; } - #endif #ifdef CLOCK_MONOTONIC_RAW if (PyModule_AddIntMacro(module, CLOCK_MONOTONIC_RAW) < 0) { return -1; } #endif - #ifdef CLOCK_HIGHRES if (PyModule_AddIntMacro(module, CLOCK_HIGHRES) < 0) { return -1; @@ -2017,7 +2013,6 @@ time_exec(PyObject *module) return -1; } #endif - #ifdef CLOCK_THREAD_CPUTIME_ID if (PyModule_AddIntMacro(module, CLOCK_THREAD_CPUTIME_ID) < 0) { return -1; @@ -2044,10 +2039,19 @@ time_exec(PyObject *module) } #endif #ifdef CLOCK_UPTIME_RAW - if (PyModule_AddIntMacro(module, CLOCK_UPTIME_RAW) < 0) { return -1; } +#endif +#ifdef CLOCK_MONOTONIC_RAW_APPROX + if (PyModule_AddIntMacro(module, CLOCK_MONOTONIC_RAW_APPROX) < 0) { + return -1; + } +#endif +#ifdef CLOCK_UPTIME_RAW_APPROX + if (PyModule_AddIntMacro(module, CLOCK_UPTIME_RAW_APPROX) < 0) { + return -1; + } #endif } diff --git a/Objects/clinic/funcobject.c.h b/Objects/clinic/funcobject.c.h index 138f87716acbf7..8f20bda26438cf 100644 --- a/Objects/clinic/funcobject.c.h +++ b/Objects/clinic/funcobject.c.h @@ -9,7 +9,8 @@ preserve #include "pycore_modsupport.h" // _PyArg_UnpackKeywords() PyDoc_STRVAR(func_new__doc__, -"function(code, globals, name=None, argdefs=None, closure=None)\n" +"function(code, globals, name=None, argdefs=None, closure=None,\n" +" kwdefaults=None)\n" "--\n" "\n" "Create a function object.\n" @@ -23,11 +24,14 @@ PyDoc_STRVAR(func_new__doc__, " argdefs\n" " a tuple that specifies the default argument values\n" " closure\n" -" a tuple that supplies the bindings for free variables"); +" a tuple that supplies the bindings for free variables\n" +" kwdefaults\n" +" a dictionary that specifies the default keyword argument values"); static PyObject * func_new_impl(PyTypeObject *type, PyCodeObject *code, PyObject *globals, - PyObject *name, PyObject *defaults, PyObject *closure); + PyObject *name, PyObject *defaults, PyObject *closure, + PyObject *kwdefaults); static PyObject * func_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) @@ -35,14 +39,14 @@ func_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) - #define NUM_KEYWORDS 5 + #define NUM_KEYWORDS 6 static struct { PyGC_Head _this_is_not_used; PyObject_VAR_HEAD PyObject *ob_item[NUM_KEYWORDS]; } _kwtuple = { .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) - .ob_item = { &_Py_ID(code), &_Py_ID(globals), &_Py_ID(name), &_Py_ID(argdefs), &_Py_ID(closure), }, + .ob_item = { &_Py_ID(code), &_Py_ID(globals), &_Py_ID(name), &_Py_ID(argdefs), &_Py_ID(closure), &_Py_ID(kwdefaults), }, }; #undef NUM_KEYWORDS #define KWTUPLE (&_kwtuple.ob_base.ob_base) @@ -51,14 +55,14 @@ func_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) # define KWTUPLE NULL #endif // !Py_BUILD_CORE - static const char * const _keywords[] = {"code", "globals", "name", "argdefs", "closure", NULL}; + static const char * const _keywords[] = {"code", "globals", "name", "argdefs", "closure", "kwdefaults", NULL}; static _PyArg_Parser _parser = { .keywords = _keywords, .fname = "function", .kwtuple = KWTUPLE, }; #undef KWTUPLE - PyObject *argsbuf[5]; + PyObject *argsbuf[6]; PyObject * const *fastargs; Py_ssize_t nargs = PyTuple_GET_SIZE(args); Py_ssize_t noptargs = nargs + (kwargs ? PyDict_GET_SIZE(kwargs) : 0) - 2; @@ -67,8 +71,9 @@ func_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) PyObject *name = Py_None; PyObject *defaults = Py_None; PyObject *closure = Py_None; + PyObject *kwdefaults = Py_None; - fastargs = _PyArg_UnpackKeywords(_PyTuple_CAST(args)->ob_item, nargs, kwargs, NULL, &_parser, 2, 5, 0, argsbuf); + fastargs = _PyArg_UnpackKeywords(_PyTuple_CAST(args)->ob_item, nargs, kwargs, NULL, &_parser, 2, 6, 0, argsbuf); if (!fastargs) { goto exit; } @@ -97,11 +102,17 @@ func_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) goto skip_optional_pos; } } - closure = fastargs[4]; + if (fastargs[4]) { + closure = fastargs[4]; + if (!--noptargs) { + goto skip_optional_pos; + } + } + kwdefaults = fastargs[5]; skip_optional_pos: - return_value = func_new_impl(type, code, globals, name, defaults, closure); + return_value = func_new_impl(type, code, globals, name, defaults, closure, kwdefaults); exit: return return_value; } -/*[clinic end generated code: output=ff7b995500d2bee6 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=10947342188f38a9 input=a9049054013a1b77]*/ diff --git a/Objects/clinic/listobject.c.h b/Objects/clinic/listobject.c.h index 54e6060451f3ff..a61550a49b66fc 100644 --- a/Objects/clinic/listobject.c.h +++ b/Objects/clinic/listobject.c.h @@ -7,6 +7,7 @@ preserve # include "pycore_runtime.h" // _Py_ID() #endif #include "pycore_abstract.h" // _PyNumber_Index() +#include "pycore_critical_section.h"// Py_BEGIN_CRITICAL_SECTION() #include "pycore_modsupport.h" // _PyArg_CheckPositional() PyDoc_STRVAR(list_insert__doc__, @@ -44,7 +45,9 @@ list_insert(PyListObject *self, PyObject *const *args, Py_ssize_t nargs) index = ival; } object = args[1]; + Py_BEGIN_CRITICAL_SECTION(self); return_value = list_insert_impl(self, index, object); + Py_END_CRITICAL_SECTION(); exit: return return_value; @@ -65,7 +68,13 @@ py_list_clear_impl(PyListObject *self); static PyObject * py_list_clear(PyListObject *self, PyObject *Py_UNUSED(ignored)) { - return py_list_clear_impl(self); + PyObject *return_value = NULL; + + Py_BEGIN_CRITICAL_SECTION(self); + return_value = py_list_clear_impl(self); + Py_END_CRITICAL_SECTION(); + + return return_value; } PyDoc_STRVAR(list_copy__doc__, @@ -83,7 +92,13 @@ list_copy_impl(PyListObject *self); static PyObject * list_copy(PyListObject *self, PyObject *Py_UNUSED(ignored)) { - return list_copy_impl(self); + PyObject *return_value = NULL; + + Py_BEGIN_CRITICAL_SECTION(self); + return_value = list_copy_impl(self); + Py_END_CRITICAL_SECTION(); + + return return_value; } PyDoc_STRVAR(list_append__doc__, @@ -95,6 +110,21 @@ PyDoc_STRVAR(list_append__doc__, #define LIST_APPEND_METHODDEF \ {"append", (PyCFunction)list_append, METH_O, list_append__doc__}, +static PyObject * +list_append_impl(PyListObject *self, PyObject *object); + +static PyObject * +list_append(PyListObject *self, PyObject *object) +{ + PyObject *return_value = NULL; + + Py_BEGIN_CRITICAL_SECTION(self); + return_value = list_append_impl(self, object); + Py_END_CRITICAL_SECTION(); + + return return_value; +} + PyDoc_STRVAR(py_list_extend__doc__, "extend($self, iterable, /)\n" "--\n" @@ -104,6 +134,21 @@ PyDoc_STRVAR(py_list_extend__doc__, #define PY_LIST_EXTEND_METHODDEF \ {"extend", (PyCFunction)py_list_extend, METH_O, py_list_extend__doc__}, +static PyObject * +py_list_extend_impl(PyListObject *self, PyObject *iterable); + +static PyObject * +py_list_extend(PyListObject *self, PyObject *iterable) +{ + PyObject *return_value = NULL; + + Py_BEGIN_CRITICAL_SECTION2(self, iterable); + return_value = py_list_extend_impl(self, iterable); + Py_END_CRITICAL_SECTION2(); + + return return_value; +} + PyDoc_STRVAR(list_pop__doc__, "pop($self, index=-1, /)\n" "--\n" @@ -143,7 +188,9 @@ list_pop(PyListObject *self, PyObject *const *args, Py_ssize_t nargs) index = ival; } skip_optional: + Py_BEGIN_CRITICAL_SECTION(self); return_value = list_pop_impl(self, index); + Py_END_CRITICAL_SECTION(); exit: return return_value; @@ -242,7 +289,13 @@ list_reverse_impl(PyListObject *self); static PyObject * list_reverse(PyListObject *self, PyObject *Py_UNUSED(ignored)) { - return list_reverse_impl(self); + PyObject *return_value = NULL; + + Py_BEGIN_CRITICAL_SECTION(self); + return_value = list_reverse_impl(self); + Py_END_CRITICAL_SECTION(); + + return return_value; } PyDoc_STRVAR(list_index__doc__, @@ -311,6 +364,21 @@ PyDoc_STRVAR(list_remove__doc__, #define LIST_REMOVE_METHODDEF \ {"remove", (PyCFunction)list_remove, METH_O, list_remove__doc__}, +static PyObject * +list_remove_impl(PyListObject *self, PyObject *value); + +static PyObject * +list_remove(PyListObject *self, PyObject *value) +{ + PyObject *return_value = NULL; + + Py_BEGIN_CRITICAL_SECTION(self); + return_value = list_remove_impl(self, value); + Py_END_CRITICAL_SECTION(); + + return return_value; +} + PyDoc_STRVAR(list___init____doc__, "list(iterable=(), /)\n" "--\n" @@ -384,4 +452,4 @@ list___reversed__(PyListObject *self, PyObject *Py_UNUSED(ignored)) { return list___reversed___impl(self); } -/*[clinic end generated code: output=f2d7b63119464ff4 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=26dfb2c9846348f9 input=a9049054013a1b77]*/ diff --git a/Objects/complexobject.c b/Objects/complexobject.c index 0e96f54584677c..d8b0e84da5df4a 100644 --- a/Objects/complexobject.c +++ b/Objects/complexobject.c @@ -256,26 +256,51 @@ PyComplex_FromDoubles(double real, double imag) return PyComplex_FromCComplex(c); } +static PyObject * try_complex_special_method(PyObject *); + double PyComplex_RealAsDouble(PyObject *op) { + double real = -1.0; + if (PyComplex_Check(op)) { - return ((PyComplexObject *)op)->cval.real; + real = ((PyComplexObject *)op)->cval.real; } else { - return PyFloat_AsDouble(op); + PyObject* newop = try_complex_special_method(op); + if (newop) { + real = ((PyComplexObject *)newop)->cval.real; + Py_DECREF(newop); + } else if (!PyErr_Occurred()) { + real = PyFloat_AsDouble(op); + } } + + return real; } double PyComplex_ImagAsDouble(PyObject *op) { + double imag = -1.0; + if (PyComplex_Check(op)) { - return ((PyComplexObject *)op)->cval.imag; + imag = ((PyComplexObject *)op)->cval.imag; } else { - return 0.0; + PyObject* newop = try_complex_special_method(op); + if (newop) { + imag = ((PyComplexObject *)newop)->cval.imag; + Py_DECREF(newop); + } else if (!PyErr_Occurred()) { + PyFloat_AsDouble(op); + if (!PyErr_Occurred()) { + imag = 0.0; + } + } } + + return imag; } static PyObject * diff --git a/Objects/floatobject.c b/Objects/floatobject.c index 364cf1553bb5d4..912c450a5e1055 100644 --- a/Objects/floatobject.c +++ b/Objects/floatobject.c @@ -26,17 +26,13 @@ class float "PyObject *" "&PyFloat_Type" #include "clinic/floatobject.c.h" -#ifndef PyFloat_MAXFREELIST -# define PyFloat_MAXFREELIST 100 -#endif - - -#if PyFloat_MAXFREELIST > 0 +#ifdef WITH_FREELISTS static struct _Py_float_state * get_float_state(void) { - PyInterpreterState *interp = _PyInterpreterState_GET(); - return &interp->float_state; + _PyFreeListState *state = _PyFreeListState_GET(); + assert(state != NULL); + return &state->float_state; } #endif @@ -132,14 +128,10 @@ PyObject * PyFloat_FromDouble(double fval) { PyFloatObject *op; -#if PyFloat_MAXFREELIST > 0 +#ifdef WITH_FREELISTS struct _Py_float_state *state = get_float_state(); op = state->free_list; if (op != NULL) { -#ifdef Py_DEBUG - // PyFloat_FromDouble() must not be called after _PyFloat_Fini() - assert(state->numfree != -1); -#endif state->free_list = (PyFloatObject *) Py_TYPE(op); state->numfree--; OBJECT_STAT_INC(from_freelist); @@ -252,13 +244,9 @@ _PyFloat_ExactDealloc(PyObject *obj) { assert(PyFloat_CheckExact(obj)); PyFloatObject *op = (PyFloatObject *)obj; -#if PyFloat_MAXFREELIST > 0 +#ifdef WITH_FREELISTS struct _Py_float_state *state = get_float_state(); -#ifdef Py_DEBUG - // float_dealloc() must not be called after _PyFloat_Fini() - assert(state->numfree != -1); -#endif - if (state->numfree >= PyFloat_MAXFREELIST) { + if (state->numfree >= PyFloat_MAXFREELIST || state->numfree < 0) { PyObject_Free(op); return; } @@ -275,7 +263,7 @@ static void float_dealloc(PyObject *op) { assert(PyFloat_Check(op)); -#if PyFloat_MAXFREELIST > 0 +#ifdef WITH_FREELISTS if (PyFloat_CheckExact(op)) { _PyFloat_ExactDealloc(op); } @@ -2002,10 +1990,10 @@ _PyFloat_InitTypes(PyInterpreterState *interp) } void -_PyFloat_ClearFreeList(PyInterpreterState *interp) +_PyFloat_ClearFreeList(_PyFreeListState *freelist_state, int is_finalization) { -#if PyFloat_MAXFREELIST > 0 - struct _Py_float_state *state = &interp->float_state; +#ifdef WITH_FREELISTS + struct _Py_float_state *state = &freelist_state->float_state; PyFloatObject *f = state->free_list; while (f != NULL) { PyFloatObject *next = (PyFloatObject*) Py_TYPE(f); @@ -2013,18 +2001,19 @@ _PyFloat_ClearFreeList(PyInterpreterState *interp) f = next; } state->free_list = NULL; - state->numfree = 0; + if (is_finalization) { + state->numfree = -1; + } + else { + state->numfree = 0; + } #endif } void -_PyFloat_Fini(PyInterpreterState *interp) +_PyFloat_Fini(_PyFreeListState *state) { - _PyFloat_ClearFreeList(interp); -#if defined(Py_DEBUG) && PyFloat_MAXFREELIST > 0 - struct _Py_float_state *state = &interp->float_state; - state->numfree = -1; -#endif + _PyFloat_ClearFreeList(state, 1); } void @@ -2037,7 +2026,7 @@ _PyFloat_FiniType(PyInterpreterState *interp) void _PyFloat_DebugMallocStats(FILE *out) { -#if PyFloat_MAXFREELIST > 0 +#ifdef WITH_FREELISTS struct _Py_float_state *state = get_float_state(); _PyDebugAllocatorStats(out, "free PyFloatObject", diff --git a/Objects/funcobject.c b/Objects/funcobject.c index 4d88dd2229295d..2620dc69bfd79b 100644 --- a/Objects/funcobject.c +++ b/Objects/funcobject.c @@ -809,14 +809,17 @@ function.__new__ as func_new a tuple that specifies the default argument values closure: object = None a tuple that supplies the bindings for free variables + kwdefaults: object = None + a dictionary that specifies the default keyword argument values Create a function object. [clinic start generated code]*/ static PyObject * func_new_impl(PyTypeObject *type, PyCodeObject *code, PyObject *globals, - PyObject *name, PyObject *defaults, PyObject *closure) -/*[clinic end generated code: output=99c6d9da3a24e3be input=93611752fc2daf11]*/ + PyObject *name, PyObject *defaults, PyObject *closure, + PyObject *kwdefaults) +/*[clinic end generated code: output=de72f4c22ac57144 input=20c9c9f04ad2d3f2]*/ { PyFunctionObject *newfunc; Py_ssize_t nclosure; @@ -843,6 +846,11 @@ func_new_impl(PyTypeObject *type, PyCodeObject *code, PyObject *globals, return NULL; } } + if (kwdefaults != Py_None && !PyDict_Check(kwdefaults)) { + PyErr_SetString(PyExc_TypeError, + "arg 6 (kwdefaults) must be None or dict"); + return NULL; + } /* check that the closure is well-formed */ nclosure = closure == Py_None ? 0 : PyTuple_GET_SIZE(closure); @@ -879,6 +887,9 @@ func_new_impl(PyTypeObject *type, PyCodeObject *code, PyObject *globals, if (closure != Py_None) { newfunc->func_closure = Py_NewRef(closure); } + if (kwdefaults != Py_None) { + newfunc->func_kwdefaults = Py_NewRef(kwdefaults); + } return (PyObject *)newfunc; } diff --git a/Objects/genobject.c b/Objects/genobject.c index 9614713883741c..e9aeb7ab9a9fa8 100644 --- a/Objects/genobject.c +++ b/Objects/genobject.c @@ -6,6 +6,7 @@ #include "pycore_call.h" // _PyObject_CallNoArgs() #include "pycore_ceval.h" // _PyEval_EvalFrame() #include "pycore_frame.h" // _PyInterpreterFrame +#include "pycore_gc.h" // _PyGC_CLEAR_FINALIZED() #include "pycore_genobject.h" // struct _Py_async_gen_state #include "pycore_modsupport.h" // _PyArg_CheckPositional() #include "pycore_object.h" // _PyObject_GC_UNTRACK() @@ -1627,12 +1628,12 @@ PyTypeObject PyAsyncGen_Type = { }; -#if _PyAsyncGen_MAXFREELIST > 0 +#ifdef WITH_FREELISTS static struct _Py_async_gen_state * get_async_gen_state(void) { - PyInterpreterState *interp = _PyInterpreterState_GET(); - return &interp->async_gen; + _PyFreeListState *state = _PyFreeListState_GET(); + return &state->async_gen_state; } #endif @@ -1655,36 +1656,36 @@ PyAsyncGen_New(PyFrameObject *f, PyObject *name, PyObject *qualname) void -_PyAsyncGen_ClearFreeLists(PyInterpreterState *interp) +_PyAsyncGen_ClearFreeLists(_PyFreeListState *freelist_state, int is_finalization) { -#if _PyAsyncGen_MAXFREELIST > 0 - struct _Py_async_gen_state *state = &interp->async_gen; +#ifdef WITH_FREELISTS + struct _Py_async_gen_state *state = &freelist_state->async_gen_state; - while (state->value_numfree) { + while (state->value_numfree > 0) { _PyAsyncGenWrappedValue *o; o = state->value_freelist[--state->value_numfree]; assert(_PyAsyncGenWrappedValue_CheckExact(o)); PyObject_GC_Del(o); } - while (state->asend_numfree) { + while (state->asend_numfree > 0) { PyAsyncGenASend *o; o = state->asend_freelist[--state->asend_numfree]; assert(Py_IS_TYPE(o, &_PyAsyncGenASend_Type)); PyObject_GC_Del(o); } + + if (is_finalization) { + state->value_numfree = -1; + state->asend_numfree = -1; + } #endif } void -_PyAsyncGen_Fini(PyInterpreterState *interp) +_PyAsyncGen_Fini(_PyFreeListState *state) { - _PyAsyncGen_ClearFreeLists(interp); -#if defined(Py_DEBUG) && _PyAsyncGen_MAXFREELIST > 0 - struct _Py_async_gen_state *state = &interp->async_gen; - state->value_numfree = -1; - state->asend_numfree = -1; -#endif + _PyAsyncGen_ClearFreeLists(state, 1); } @@ -1731,14 +1732,11 @@ async_gen_asend_dealloc(PyAsyncGenASend *o) _PyObject_GC_UNTRACK((PyObject *)o); Py_CLEAR(o->ags_gen); Py_CLEAR(o->ags_sendval); -#if _PyAsyncGen_MAXFREELIST > 0 +#ifdef WITH_FREELISTS struct _Py_async_gen_state *state = get_async_gen_state(); -#ifdef Py_DEBUG - // async_gen_asend_dealloc() must not be called after _PyAsyncGen_Fini() - assert(state->asend_numfree != -1); -#endif - if (state->asend_numfree < _PyAsyncGen_MAXFREELIST) { + if (state->asend_numfree >= 0 && state->asend_numfree < _PyAsyncGen_MAXFREELIST) { assert(PyAsyncGenASend_CheckExact(o)); + _PyGC_CLEAR_FINALIZED((PyObject *)o); state->asend_freelist[state->asend_numfree++] = o; } else @@ -1904,13 +1902,9 @@ static PyObject * async_gen_asend_new(PyAsyncGenObject *gen, PyObject *sendval) { PyAsyncGenASend *o; -#if _PyAsyncGen_MAXFREELIST > 0 +#ifdef WITH_FREELISTS struct _Py_async_gen_state *state = get_async_gen_state(); -#ifdef Py_DEBUG - // async_gen_asend_new() must not be called after _PyAsyncGen_Fini() - assert(state->asend_numfree != -1); -#endif - if (state->asend_numfree) { + if (state->asend_numfree > 0) { state->asend_numfree--; o = state->asend_freelist[state->asend_numfree]; _Py_NewReference((PyObject *)o); @@ -1943,13 +1937,9 @@ async_gen_wrapped_val_dealloc(_PyAsyncGenWrappedValue *o) { _PyObject_GC_UNTRACK((PyObject *)o); Py_CLEAR(o->agw_val); -#if _PyAsyncGen_MAXFREELIST > 0 +#ifdef WITH_FREELISTS struct _Py_async_gen_state *state = get_async_gen_state(); -#ifdef Py_DEBUG - // async_gen_wrapped_val_dealloc() must not be called after _PyAsyncGen_Fini() - assert(state->value_numfree != -1); -#endif - if (state->value_numfree < _PyAsyncGen_MAXFREELIST) { + if (state->value_numfree >= 0 && state->value_numfree < _PyAsyncGen_MAXFREELIST) { assert(_PyAsyncGenWrappedValue_CheckExact(o)); state->value_freelist[state->value_numfree++] = o; OBJECT_STAT_INC(to_freelist); @@ -2020,13 +2010,9 @@ _PyAsyncGenValueWrapperNew(PyThreadState *tstate, PyObject *val) _PyAsyncGenWrappedValue *o; assert(val); -#if _PyAsyncGen_MAXFREELIST > 0 - struct _Py_async_gen_state *state = &tstate->interp->async_gen; -#ifdef Py_DEBUG - // _PyAsyncGenValueWrapperNew() must not be called after _PyAsyncGen_Fini() - assert(state->value_numfree != -1); -#endif - if (state->value_numfree) { +#ifdef WITH_FREELISTS + struct _Py_async_gen_state *state = get_async_gen_state(); + if (state->value_numfree > 0) { state->value_numfree--; o = state->value_freelist[state->value_numfree]; OBJECT_STAT_INC(from_freelist); diff --git a/Objects/listobject.c b/Objects/listobject.c index dfb8cd2b106511..401d1026133f4e 100644 --- a/Objects/listobject.c +++ b/Objects/listobject.c @@ -20,12 +20,13 @@ class list "PyListObject *" "&PyList_Type" _Py_DECLARE_STR(list_err, "list index out of range"); -#if PyList_MAXFREELIST > 0 +#ifdef WITH_FREELISTS static struct _Py_list_state * get_list_state(void) { - PyInterpreterState *interp = _PyInterpreterState_GET(); - return &interp->list; + _PyFreeListState *state = _PyFreeListState_GET(); + assert(state != NULL); + return &state->list_state; } #endif @@ -120,33 +121,32 @@ list_preallocate_exact(PyListObject *self, Py_ssize_t size) } void -_PyList_ClearFreeList(PyInterpreterState *interp) +_PyList_ClearFreeList(_PyFreeListState *freelist_state, int is_finalization) { -#if PyList_MAXFREELIST > 0 - struct _Py_list_state *state = &interp->list; - while (state->numfree) { +#ifdef WITH_FREELISTS + struct _Py_list_state *state = &freelist_state->list_state; + while (state->numfree > 0) { PyListObject *op = state->free_list[--state->numfree]; assert(PyList_CheckExact(op)); PyObject_GC_Del(op); } + if (is_finalization) { + state->numfree = -1; + } #endif } void -_PyList_Fini(PyInterpreterState *interp) +_PyList_Fini(_PyFreeListState *state) { - _PyList_ClearFreeList(interp); -#if defined(Py_DEBUG) && PyList_MAXFREELIST > 0 - struct _Py_list_state *state = &interp->list; - state->numfree = -1; -#endif + _PyList_ClearFreeList(state, 1); } /* Print summary info about the state of the optimized allocator */ void _PyList_DebugMallocStats(FILE *out) { -#if PyList_MAXFREELIST > 0 +#ifdef WITH_FREELISTS struct _Py_list_state *state = get_list_state(); _PyDebugAllocatorStats(out, "free PyListObject", @@ -164,13 +164,9 @@ PyList_New(Py_ssize_t size) return NULL; } -#if PyList_MAXFREELIST > 0 +#ifdef WITH_FREELISTS struct _Py_list_state *state = get_list_state(); -#ifdef Py_DEBUG - // PyList_New() must not be called after _PyList_Fini() - assert(state->numfree != -1); -#endif - if (PyList_MAXFREELIST && state->numfree) { + if (PyList_MAXFREELIST && state->numfree > 0) { state->numfree--; op = state->free_list[state->numfree]; OBJECT_STAT_INC(from_freelist); @@ -267,15 +263,22 @@ PyList_SetItem(PyObject *op, Py_ssize_t i, PyErr_BadInternalCall(); return -1; } - if (!valid_index(i, Py_SIZE(op))) { + int ret; + PyListObject *self = ((PyListObject *)op); + Py_BEGIN_CRITICAL_SECTION(self); + if (!valid_index(i, Py_SIZE(self))) { Py_XDECREF(newitem); PyErr_SetString(PyExc_IndexError, "list assignment index out of range"); - return -1; + ret = -1; + goto end; } - p = ((PyListObject *)op) -> ob_item + i; + p = self->ob_item + i; Py_XSETREF(*p, newitem); - return 0; + ret = 0; +end: + Py_END_CRITICAL_SECTION(); + return ret; } static int @@ -313,7 +316,12 @@ PyList_Insert(PyObject *op, Py_ssize_t where, PyObject *newitem) PyErr_BadInternalCall(); return -1; } - return ins1((PyListObject *)op, where, newitem); + PyListObject *self = (PyListObject *)op; + int err; + Py_BEGIN_CRITICAL_SECTION(self); + err = ins1(self, where, newitem); + Py_END_CRITICAL_SECTION(); + return err; } /* internal, used by _PyList_AppendTakeRef */ @@ -360,13 +368,9 @@ list_dealloc(PyObject *self) } PyMem_Free(op->ob_item); } -#if PyList_MAXFREELIST > 0 +#ifdef WITH_FREELISTS struct _Py_list_state *state = get_list_state(); -#ifdef Py_DEBUG - // list_dealloc() must not be called after _PyList_Fini() - assert(state->numfree != -1); -#endif - if (state->numfree < PyList_MAXFREELIST && PyList_CheckExact(op)) { + if (state->numfree < PyList_MAXFREELIST && state->numfree >= 0 && PyList_CheckExact(op)) { state->free_list[state->numfree++] = op; OBJECT_STAT_INC(to_freelist); } @@ -798,6 +802,7 @@ list_ass_item(PyObject *aa, Py_ssize_t i, PyObject *v) } /*[clinic input] +@critical_section list.insert index: Py_ssize_t @@ -809,14 +814,16 @@ Insert object before index. static PyObject * list_insert_impl(PyListObject *self, Py_ssize_t index, PyObject *object) -/*[clinic end generated code: output=7f35e32f60c8cb78 input=858514cf894c7eab]*/ +/*[clinic end generated code: output=7f35e32f60c8cb78 input=b1987ca998a4ae2d]*/ { - if (ins1(self, index, object) == 0) + if (ins1(self, index, object) == 0) { Py_RETURN_NONE; + } return NULL; } /*[clinic input] +@critical_section list.clear as py_list_clear Remove all items from list. @@ -824,13 +831,14 @@ Remove all items from list. static PyObject * py_list_clear_impl(PyListObject *self) -/*[clinic end generated code: output=83726743807e3518 input=378711e10f545c53]*/ +/*[clinic end generated code: output=83726743807e3518 input=e285b7f09051a9ba]*/ { list_clear(self); Py_RETURN_NONE; } /*[clinic input] +@critical_section list.copy Return a shallow copy of the list. @@ -838,12 +846,13 @@ Return a shallow copy of the list. static PyObject * list_copy_impl(PyListObject *self) -/*[clinic end generated code: output=ec6b72d6209d418e input=6453ab159e84771f]*/ +/*[clinic end generated code: output=ec6b72d6209d418e input=81c54b0c7bb4f73d]*/ { return list_slice(self, 0, Py_SIZE(self)); } /*[clinic input] +@critical_section list.append object: object @@ -853,8 +862,8 @@ Append object to the end of the list. [clinic start generated code]*/ static PyObject * -list_append(PyListObject *self, PyObject *object) -/*[clinic end generated code: output=7c096003a29c0eae input=43a3fe48a7066e91]*/ +list_append_impl(PyListObject *self, PyObject *object) +/*[clinic end generated code: output=78423561d92ed405 input=122b0853de54004f]*/ { if (_PyList_AppendTakeRef(self, Py_NewRef(object)) < 0) { return NULL; @@ -1012,6 +1021,7 @@ _PyList_Extend(PyListObject *self, PyObject *iterable) /*[clinic input] +@critical_section self iterable list.extend as py_list_extend iterable: object @@ -1021,8 +1031,8 @@ Extend list by appending elements from the iterable. [clinic start generated code]*/ static PyObject * -py_list_extend(PyListObject *self, PyObject *iterable) -/*[clinic end generated code: output=b8e0bff0ceae2abd input=9a8376a8633ed3ba]*/ +py_list_extend_impl(PyListObject *self, PyObject *iterable) +/*[clinic end generated code: output=a2f115ceace2c845 input=1d42175414e1a5f3]*/ { return _PyList_Extend(self, iterable); } @@ -1062,6 +1072,7 @@ list_inplace_concat(PyObject *_self, PyObject *other) } /*[clinic input] +@critical_section list.pop index: Py_ssize_t = -1 @@ -1074,7 +1085,7 @@ Raises IndexError if list is empty or index is out of range. static PyObject * list_pop_impl(PyListObject *self, Py_ssize_t index) -/*[clinic end generated code: output=6bd69dcb3f17eca8 input=b83675976f329e6f]*/ +/*[clinic end generated code: output=6bd69dcb3f17eca8 input=c269141068ae4b8f]*/ { PyObject *v; int status; @@ -2593,6 +2604,7 @@ PyList_Sort(PyObject *v) } /*[clinic input] +@critical_section list.reverse Reverse *IN PLACE*. @@ -2600,7 +2612,7 @@ Reverse *IN PLACE*. static PyObject * list_reverse_impl(PyListObject *self) -/*[clinic end generated code: output=482544fc451abea9 input=eefd4c3ae1bc9887]*/ +/*[clinic end generated code: output=482544fc451abea9 input=04ac8e0c6a66e4d9]*/ { if (Py_SIZE(self) > 1) reverse_slice(self->ob_item, self->ob_item + Py_SIZE(self)); @@ -2616,8 +2628,11 @@ PyList_Reverse(PyObject *v) PyErr_BadInternalCall(); return -1; } - if (Py_SIZE(self) > 1) + Py_BEGIN_CRITICAL_SECTION(self); + if (Py_SIZE(self) > 1) { reverse_slice(self->ob_item, self->ob_item + Py_SIZE(self)); + } + Py_END_CRITICAL_SECTION() return 0; } @@ -2628,7 +2643,12 @@ PyList_AsTuple(PyObject *v) PyErr_BadInternalCall(); return NULL; } - return _PyTuple_FromArray(((PyListObject *)v)->ob_item, Py_SIZE(v)); + PyObject *ret; + PyListObject *self = (PyListObject *)v; + Py_BEGIN_CRITICAL_SECTION(self); + ret = _PyTuple_FromArray(self->ob_item, Py_SIZE(v)); + Py_END_CRITICAL_SECTION(); + return ret; } PyObject * @@ -2730,6 +2750,7 @@ list_count(PyListObject *self, PyObject *value) } /*[clinic input] +@critical_section list.remove value: object @@ -2741,8 +2762,8 @@ Raises ValueError if the value is not present. [clinic start generated code]*/ static PyObject * -list_remove(PyListObject *self, PyObject *value) -/*[clinic end generated code: output=f087e1951a5e30d1 input=2dc2ba5bb2fb1f82]*/ +list_remove_impl(PyListObject *self, PyObject *value) +/*[clinic end generated code: output=b9b76a6633b18778 input=26c813dbb95aa93b]*/ { Py_ssize_t i; @@ -2776,7 +2797,7 @@ list_traverse(PyObject *self, visitproc visit, void *arg) } static PyObject * -list_richcompare(PyObject *v, PyObject *w, int op) +list_richcompare_impl(PyObject *v, PyObject *w, int op) { PyListObject *vl, *wl; Py_ssize_t i; @@ -2831,6 +2852,16 @@ list_richcompare(PyObject *v, PyObject *w, int op) return PyObject_RichCompare(vl->ob_item[i], wl->ob_item[i], op); } +static PyObject * +list_richcompare(PyObject *v, PyObject *w, int op) +{ + PyObject *ret; + Py_BEGIN_CRITICAL_SECTION2(v, w); + ret = list_richcompare_impl(v, w, op); + Py_END_CRITICAL_SECTION2() + return ret; +} + /*[clinic input] list.__init__ diff --git a/Objects/longobject.c b/Objects/longobject.c index fae70dd13bb18a..e655ba19e8f1c1 100644 --- a/Objects/longobject.c +++ b/Objects/longobject.c @@ -6171,7 +6171,7 @@ long_vectorcall(PyObject *type, PyObject * const*args, return long_new_impl(_PyType_CAST(type), args[0], args[1]); default: return PyErr_Format(PyExc_TypeError, - "int expected at most 2 argument%s, got %zd", + "int expected at most 2 arguments, got %zd", nargs); } } diff --git a/Objects/mimalloc/heap.c b/Objects/mimalloc/heap.c index c50e3b05590b6f..6468999a7d5766 100644 --- a/Objects/mimalloc/heap.c +++ b/Objects/mimalloc/heap.c @@ -209,7 +209,7 @@ mi_heap_t* mi_heap_get_backing(void) { return bheap; } -void _mi_heap_init_ex(mi_heap_t* heap, mi_tld_t* tld, mi_arena_id_t arena_id) +void _mi_heap_init_ex(mi_heap_t* heap, mi_tld_t* tld, mi_arena_id_t arena_id, bool no_reclaim, uint8_t tag) { _mi_memcpy_aligned(heap, &_mi_heap_empty, sizeof(mi_heap_t)); heap->tld = tld; @@ -224,17 +224,19 @@ void _mi_heap_init_ex(mi_heap_t* heap, mi_tld_t* tld, mi_arena_id_t arena_id) heap->cookie = _mi_heap_random_next(heap) | 1; heap->keys[0] = _mi_heap_random_next(heap); heap->keys[1] = _mi_heap_random_next(heap); + heap->no_reclaim = no_reclaim; + heap->tag = tag; + // push on the thread local heaps list + heap->next = heap->tld->heaps; + heap->tld->heaps = heap; } mi_decl_nodiscard mi_heap_t* mi_heap_new_in_arena(mi_arena_id_t arena_id) { mi_heap_t* bheap = mi_heap_get_backing(); mi_heap_t* heap = mi_heap_malloc_tp(bheap, mi_heap_t); // todo: OS allocate in secure mode? if (heap == NULL) return NULL; - _mi_heap_init_ex(heap, bheap->tld, arena_id); - heap->no_reclaim = true; // don't reclaim abandoned pages or otherwise destroy is unsafe - // push on the thread local heaps list - heap->next = heap->tld->heaps; - heap->tld->heaps = heap; + // don't reclaim abandoned pages or otherwise destroy is unsafe + _mi_heap_init_ex(heap, bheap->tld, arena_id, true, 0); return heap; } diff --git a/Objects/mimalloc/init.c b/Objects/mimalloc/init.c index 376e14b49b7c7b..5897f0512f8ef9 100644 --- a/Objects/mimalloc/init.c +++ b/Objects/mimalloc/init.c @@ -14,7 +14,7 @@ terms of the MIT license. A copy of the license can be found in the file // Empty page used to initialize the small free pages array const mi_page_t _mi_page_empty = { - 0, false, false, false, + 0, false, false, false, 0, 0, // capacity 0, // reserved capacity { 0 }, // flags @@ -121,7 +121,8 @@ mi_decl_cache_align const mi_heap_t _mi_heap_empty = { 0, // page count MI_BIN_FULL, 0, // page retired min/max NULL, // next - false + false, + 0 }; #define tld_empty_stats ((mi_stats_t*)((uint8_t*)&tld_empty + offsetof(mi_tld_t,stats))) @@ -131,7 +132,7 @@ mi_decl_cache_align static const mi_tld_t tld_empty = { 0, false, NULL, NULL, - { MI_SEGMENT_SPAN_QUEUES_EMPTY, 0, 0, 0, 0, tld_empty_stats, tld_empty_os }, // segments + { MI_SEGMENT_SPAN_QUEUES_EMPTY, 0, 0, 0, 0, tld_empty_stats, tld_empty_os, &_mi_abandoned_default }, // segments { 0, tld_empty_stats }, // os { MI_STATS_NULL } // stats }; @@ -148,7 +149,7 @@ extern mi_heap_t _mi_heap_main; static mi_tld_t tld_main = { 0, false, &_mi_heap_main, & _mi_heap_main, - { MI_SEGMENT_SPAN_QUEUES_EMPTY, 0, 0, 0, 0, &tld_main.stats, &tld_main.os }, // segments + { MI_SEGMENT_SPAN_QUEUES_EMPTY, 0, 0, 0, 0, &tld_main.stats, &tld_main.os, &_mi_abandoned_default }, // segments { 0, &tld_main.stats }, // os { MI_STATS_NULL } // stats }; @@ -298,7 +299,7 @@ static bool _mi_heap_init(void) { if (td == NULL) return false; _mi_tld_init(&td->tld, &td->heap); - _mi_heap_init_ex(&td->heap, &td->tld, _mi_arena_id_none()); + _mi_heap_init_ex(&td->heap, &td->tld, _mi_arena_id_none(), false, 0); _mi_heap_set_default_direct(&td->heap); } return false; @@ -308,9 +309,9 @@ void _mi_tld_init(mi_tld_t* tld, mi_heap_t* bheap) { _mi_memcpy_aligned(tld, &tld_empty, sizeof(*tld)); tld->segments.stats = &tld->stats; tld->segments.os = &tld->os; + tld->segments.abandoned = &_mi_abandoned_default; tld->os.stats = &tld->stats; tld->heap_backing = bheap; - tld->heaps = bheap; } // Free the thread local default heap (called from `mi_thread_done`) diff --git a/Objects/mimalloc/page.c b/Objects/mimalloc/page.c index 4610cf27afff75..8f0ce920156e04 100644 --- a/Objects/mimalloc/page.c +++ b/Objects/mimalloc/page.c @@ -660,6 +660,7 @@ static void mi_page_init(mi_heap_t* heap, mi_page_t* page, size_t block_size, mi mi_assert_internal(block_size > 0); // set fields mi_page_set_heap(page, heap); + page->tag = heap->tag; page->xblock_size = (block_size < MI_HUGE_BLOCK_SIZE ? (uint32_t)block_size : MI_HUGE_BLOCK_SIZE); // initialize before _mi_segment_page_start size_t page_size; const void* page_start = _mi_segment_page_start(segment, page, &page_size); diff --git a/Objects/mimalloc/prim/unix/prim.c b/Objects/mimalloc/prim/unix/prim.c index 2152017e01fb43..ec8447ab40d70c 100644 --- a/Objects/mimalloc/prim/unix/prim.c +++ b/Objects/mimalloc/prim/unix/prim.c @@ -50,7 +50,7 @@ terms of the MIT license. A copy of the license can be found in the file #include #endif -#if !defined(__HAIKU__) && !defined(__APPLE__) && !defined(__CYGWIN__) && !defined(_AIX) && !defined(__FreeBSD__) +#if !defined(__HAIKU__) && !defined(__APPLE__) && !defined(__CYGWIN__) && !defined(_AIX) && !defined(__FreeBSD__) && !defined(__sun) #define MI_HAS_SYSCALL_H #include #endif @@ -76,7 +76,7 @@ static int mi_prim_access(const char *fpath, int mode) { return syscall(SYS_access,fpath,mode); } -#elif !defined(__APPLE__) && !defined(_AIX) && !defined(__FreeBSD__) // avoid unused warnings +#elif !defined(__APPLE__) && !defined(_AIX) && !defined(__FreeBSD__) && !defined(__sun) // avoid unused warnings static int mi_prim_open(const char* fpath, int open_flags) { return open(fpath,open_flags); @@ -310,7 +310,7 @@ static void* unix_mmap(void* addr, size_t size, size_t try_alignment, int protec #elif defined(__sun) if (allow_large && _mi_os_use_large_page(size, try_alignment)) { struct memcntl_mha cmd = {0}; - cmd.mha_pagesize = large_os_page_size; + cmd.mha_pagesize = 2*MI_MiB; cmd.mha_cmd = MHA_MAPSIZE_VA; if (memcntl((caddr_t)p, size, MC_HAT_ADVISE, (caddr_t)&cmd, 0, 0) == 0) { *is_large = true; diff --git a/Objects/mimalloc/segment.c b/Objects/mimalloc/segment.c index 033e0f97c36c14..d9b39b03fd6c5f 100644 --- a/Objects/mimalloc/segment.c +++ b/Objects/mimalloc/segment.c @@ -395,7 +395,7 @@ static void mi_segment_os_free(mi_segment_t* segment, mi_segments_tld_t* tld) { const size_t size = mi_segment_size(segment); const size_t csize = _mi_commit_mask_committed_size(&segment->commit_mask, size); - _mi_abandoned_await_readers(); // wait until safe to free + _mi_abandoned_await_readers(tld->abandoned); // wait until safe to free _mi_arena_free(segment, mi_segment_size(segment), csize, segment->memid, tld->stats); } @@ -1059,7 +1059,6 @@ would be spread among all other segments in the arenas. // Use the bottom 20-bits (on 64-bit) of the aligned segment pointers // to put in a tag that increments on update to avoid the A-B-A problem. #define MI_TAGGED_MASK MI_SEGMENT_MASK -typedef uintptr_t mi_tagged_segment_t; static mi_segment_t* mi_tagged_segment_ptr(mi_tagged_segment_t ts) { return (mi_segment_t*)(ts & ~MI_TAGGED_MASK); @@ -1071,55 +1070,40 @@ static mi_tagged_segment_t mi_tagged_segment(mi_segment_t* segment, mi_tagged_se return ((uintptr_t)segment | tag); } -// This is a list of visited abandoned pages that were full at the time. -// this list migrates to `abandoned` when that becomes NULL. The use of -// this list reduces contention and the rate at which segments are visited. -static mi_decl_cache_align _Atomic(mi_segment_t*) abandoned_visited; // = NULL - -// The abandoned page list (tagged as it supports pop) -static mi_decl_cache_align _Atomic(mi_tagged_segment_t) abandoned; // = NULL - -// Maintain these for debug purposes (these counts may be a bit off) -static mi_decl_cache_align _Atomic(size_t) abandoned_count; -static mi_decl_cache_align _Atomic(size_t) abandoned_visited_count; - -// We also maintain a count of current readers of the abandoned list -// in order to prevent resetting/decommitting segment memory if it might -// still be read. -static mi_decl_cache_align _Atomic(size_t) abandoned_readers; // = 0 +mi_abandoned_pool_t _mi_abandoned_default; // Push on the visited list -static void mi_abandoned_visited_push(mi_segment_t* segment) { +static void mi_abandoned_visited_push(mi_abandoned_pool_t *pool, mi_segment_t* segment) { mi_assert_internal(segment->thread_id == 0); mi_assert_internal(mi_atomic_load_ptr_relaxed(mi_segment_t,&segment->abandoned_next) == NULL); mi_assert_internal(segment->next == NULL); mi_assert_internal(segment->used > 0); - mi_segment_t* anext = mi_atomic_load_ptr_relaxed(mi_segment_t, &abandoned_visited); + mi_segment_t* anext = mi_atomic_load_ptr_relaxed(mi_segment_t, &pool->abandoned_visited); do { mi_atomic_store_ptr_release(mi_segment_t, &segment->abandoned_next, anext); - } while (!mi_atomic_cas_ptr_weak_release(mi_segment_t, &abandoned_visited, &anext, segment)); - mi_atomic_increment_relaxed(&abandoned_visited_count); + } while (!mi_atomic_cas_ptr_weak_release(mi_segment_t, &pool->abandoned_visited, &anext, segment)); + mi_atomic_increment_relaxed(&pool->abandoned_visited_count); } // Move the visited list to the abandoned list. -static bool mi_abandoned_visited_revisit(void) +static bool mi_abandoned_visited_revisit(mi_abandoned_pool_t *pool) { // quick check if the visited list is empty - if (mi_atomic_load_ptr_relaxed(mi_segment_t, &abandoned_visited) == NULL) return false; + if (mi_atomic_load_ptr_relaxed(mi_segment_t, &pool->abandoned_visited) == NULL) return false; // grab the whole visited list - mi_segment_t* first = mi_atomic_exchange_ptr_acq_rel(mi_segment_t, &abandoned_visited, NULL); + mi_segment_t* first = mi_atomic_exchange_ptr_acq_rel(mi_segment_t, &pool->abandoned_visited, NULL); if (first == NULL) return false; // first try to swap directly if the abandoned list happens to be NULL mi_tagged_segment_t afirst; - mi_tagged_segment_t ts = mi_atomic_load_relaxed(&abandoned); + mi_tagged_segment_t ts = mi_atomic_load_relaxed(&pool->abandoned); if (mi_tagged_segment_ptr(ts)==NULL) { - size_t count = mi_atomic_load_relaxed(&abandoned_visited_count); + size_t count = mi_atomic_load_relaxed(&pool->abandoned_visited_count); afirst = mi_tagged_segment(first, ts); - if (mi_atomic_cas_strong_acq_rel(&abandoned, &ts, afirst)) { - mi_atomic_add_relaxed(&abandoned_count, count); - mi_atomic_sub_relaxed(&abandoned_visited_count, count); + if (mi_atomic_cas_strong_acq_rel(&pool->abandoned, &ts, afirst)) { + mi_atomic_add_relaxed(&pool->abandoned_count, count); + mi_atomic_sub_relaxed(&pool->abandoned_visited_count, count); return true; } } @@ -1133,51 +1117,51 @@ static bool mi_abandoned_visited_revisit(void) // and atomically prepend to the abandoned list // (no need to increase the readers as we don't access the abandoned segments) - mi_tagged_segment_t anext = mi_atomic_load_relaxed(&abandoned); + mi_tagged_segment_t anext = mi_atomic_load_relaxed(&pool->abandoned); size_t count; do { - count = mi_atomic_load_relaxed(&abandoned_visited_count); + count = mi_atomic_load_relaxed(&pool->abandoned_visited_count); mi_atomic_store_ptr_release(mi_segment_t, &last->abandoned_next, mi_tagged_segment_ptr(anext)); afirst = mi_tagged_segment(first, anext); - } while (!mi_atomic_cas_weak_release(&abandoned, &anext, afirst)); - mi_atomic_add_relaxed(&abandoned_count, count); - mi_atomic_sub_relaxed(&abandoned_visited_count, count); + } while (!mi_atomic_cas_weak_release(&pool->abandoned, &anext, afirst)); + mi_atomic_add_relaxed(&pool->abandoned_count, count); + mi_atomic_sub_relaxed(&pool->abandoned_visited_count, count); return true; } // Push on the abandoned list. -static void mi_abandoned_push(mi_segment_t* segment) { +static void mi_abandoned_push(mi_abandoned_pool_t* pool, mi_segment_t* segment) { mi_assert_internal(segment->thread_id == 0); mi_assert_internal(mi_atomic_load_ptr_relaxed(mi_segment_t, &segment->abandoned_next) == NULL); mi_assert_internal(segment->next == NULL); mi_assert_internal(segment->used > 0); mi_tagged_segment_t next; - mi_tagged_segment_t ts = mi_atomic_load_relaxed(&abandoned); + mi_tagged_segment_t ts = mi_atomic_load_relaxed(&pool->abandoned); do { mi_atomic_store_ptr_release(mi_segment_t, &segment->abandoned_next, mi_tagged_segment_ptr(ts)); next = mi_tagged_segment(segment, ts); - } while (!mi_atomic_cas_weak_release(&abandoned, &ts, next)); - mi_atomic_increment_relaxed(&abandoned_count); + } while (!mi_atomic_cas_weak_release(&pool->abandoned, &ts, next)); + mi_atomic_increment_relaxed(&pool->abandoned_count); } // Wait until there are no more pending reads on segments that used to be in the abandoned list // called for example from `arena.c` before decommitting -void _mi_abandoned_await_readers(void) { +void _mi_abandoned_await_readers(mi_abandoned_pool_t* pool) { size_t n; do { - n = mi_atomic_load_acquire(&abandoned_readers); + n = mi_atomic_load_acquire(&pool->abandoned_readers); if (n != 0) mi_atomic_yield(); } while (n != 0); } // Pop from the abandoned list -static mi_segment_t* mi_abandoned_pop(void) { +static mi_segment_t* mi_abandoned_pop(mi_abandoned_pool_t* pool) { mi_segment_t* segment; // Check efficiently if it is empty (or if the visited list needs to be moved) - mi_tagged_segment_t ts = mi_atomic_load_relaxed(&abandoned); + mi_tagged_segment_t ts = mi_atomic_load_relaxed(&pool->abandoned); segment = mi_tagged_segment_ptr(ts); if mi_likely(segment == NULL) { - if mi_likely(!mi_abandoned_visited_revisit()) { // try to swap in the visited list on NULL + if mi_likely(!mi_abandoned_visited_revisit(pool)) { // try to swap in the visited list on NULL return NULL; } } @@ -1186,20 +1170,20 @@ static mi_segment_t* mi_abandoned_pop(void) { // a segment to be decommitted while a read is still pending, // and a tagged pointer to prevent A-B-A link corruption. // (this is called from `region.c:_mi_mem_free` for example) - mi_atomic_increment_relaxed(&abandoned_readers); // ensure no segment gets decommitted + mi_atomic_increment_relaxed(&pool->abandoned_readers); // ensure no segment gets decommitted mi_tagged_segment_t next = 0; - ts = mi_atomic_load_acquire(&abandoned); + ts = mi_atomic_load_acquire(&pool->abandoned); do { segment = mi_tagged_segment_ptr(ts); if (segment != NULL) { mi_segment_t* anext = mi_atomic_load_ptr_relaxed(mi_segment_t, &segment->abandoned_next); next = mi_tagged_segment(anext, ts); // note: reads the segment's `abandoned_next` field so should not be decommitted } - } while (segment != NULL && !mi_atomic_cas_weak_acq_rel(&abandoned, &ts, next)); - mi_atomic_decrement_relaxed(&abandoned_readers); // release reader lock + } while (segment != NULL && !mi_atomic_cas_weak_acq_rel(&pool->abandoned, &ts, next)); + mi_atomic_decrement_relaxed(&pool->abandoned_readers); // release reader lock if (segment != NULL) { mi_atomic_store_ptr_release(mi_segment_t, &segment->abandoned_next, NULL); - mi_atomic_decrement_relaxed(&abandoned_count); + mi_atomic_decrement_relaxed(&pool->abandoned_count); } return segment; } @@ -1237,7 +1221,7 @@ static void mi_segment_abandon(mi_segment_t* segment, mi_segments_tld_t* tld) { segment->thread_id = 0; mi_atomic_store_ptr_release(mi_segment_t, &segment->abandoned_next, NULL); segment->abandoned_visits = 1; // from 0 to 1 to signify it is abandoned - mi_abandoned_push(segment); + mi_abandoned_push(tld->abandoned, segment); } void _mi_segment_page_abandon(mi_page_t* page, mi_segments_tld_t* tld) { @@ -1315,6 +1299,18 @@ static bool mi_segment_check_free(mi_segment_t* segment, size_t slices_needed, s return has_page; } +static mi_heap_t* mi_heap_by_tag(mi_heap_t* heap, uint8_t tag) { + if (heap->tag == tag) { + return heap; + } + for (mi_heap_t *curr = heap->tld->heaps; curr != NULL; curr = curr->next) { + if (curr->tag == tag) { + return curr; + } + } + return NULL; +} + // Reclaim an abandoned segment; returns NULL if the segment was freed // set `right_page_reclaimed` to `true` if it reclaimed a page of the right `block_size` that was not full. static mi_segment_t* mi_segment_reclaim(mi_segment_t* segment, mi_heap_t* heap, size_t requested_block_size, bool* right_page_reclaimed, mi_segments_tld_t* tld) { @@ -1337,6 +1333,7 @@ static mi_segment_t* mi_segment_reclaim(mi_segment_t* segment, mi_heap_t* heap, if (mi_slice_is_used(slice)) { // in use: reclaim the page in our heap mi_page_t* page = mi_slice_to_page(slice); + mi_heap_t* target_heap = mi_heap_by_tag(heap, page->tag); mi_assert_internal(page->is_committed); mi_assert_internal(mi_page_thread_free_flag(page)==MI_NEVER_DELAYED_FREE); mi_assert_internal(mi_page_heap(page) == NULL); @@ -1344,7 +1341,7 @@ static mi_segment_t* mi_segment_reclaim(mi_segment_t* segment, mi_heap_t* heap, _mi_stat_decrease(&tld->stats->pages_abandoned, 1); segment->abandoned--; // set the heap again and allow delayed free again - mi_page_set_heap(page, heap); + mi_page_set_heap(page, target_heap); _mi_page_use_delayed_free(page, MI_USE_DELAYED_FREE, true); // override never (after heap is set) _mi_page_free_collect(page, false); // ensure used count is up to date if (mi_page_all_free(page)) { @@ -1353,8 +1350,9 @@ static mi_segment_t* mi_segment_reclaim(mi_segment_t* segment, mi_heap_t* heap, } else { // otherwise reclaim it into the heap - _mi_page_reclaim(heap, page); - if (requested_block_size == page->xblock_size && mi_page_has_any_available(page)) { + _mi_page_reclaim(target_heap, page); + if (requested_block_size == page->xblock_size && mi_page_has_any_available(page) && + heap == target_heap) { if (right_page_reclaimed != NULL) { *right_page_reclaimed = true; } } } @@ -1381,7 +1379,7 @@ static mi_segment_t* mi_segment_reclaim(mi_segment_t* segment, mi_heap_t* heap, void _mi_abandoned_reclaim_all(mi_heap_t* heap, mi_segments_tld_t* tld) { mi_segment_t* segment; - while ((segment = mi_abandoned_pop()) != NULL) { + while ((segment = mi_abandoned_pop(tld->abandoned)) != NULL) { mi_segment_reclaim(segment, heap, 0, NULL, tld); } } @@ -1391,7 +1389,7 @@ static mi_segment_t* mi_segment_try_reclaim(mi_heap_t* heap, size_t needed_slice *reclaimed = false; mi_segment_t* segment; long max_tries = mi_option_get_clamp(mi_option_max_segment_reclaim, 8, 1024); // limit the work to bound allocation times - while ((max_tries-- > 0) && ((segment = mi_abandoned_pop()) != NULL)) { + while ((max_tries-- > 0) && ((segment = mi_abandoned_pop(tld->abandoned)) != NULL)) { segment->abandoned_visits++; // todo: an arena exclusive heap will potentially visit many abandoned unsuitable segments // and push them into the visited list and use many tries. Perhaps we can skip non-suitable ones in a better way? @@ -1418,7 +1416,7 @@ static mi_segment_t* mi_segment_try_reclaim(mi_heap_t* heap, size_t needed_slice else { // otherwise, push on the visited list so it gets not looked at too quickly again mi_segment_try_purge(segment, true /* force? */, tld->stats); // force purge if needed as we may not visit soon again - mi_abandoned_visited_push(segment); + mi_abandoned_visited_push(tld->abandoned, segment); } } return NULL; @@ -1428,11 +1426,12 @@ static mi_segment_t* mi_segment_try_reclaim(mi_heap_t* heap, size_t needed_slice void _mi_abandoned_collect(mi_heap_t* heap, bool force, mi_segments_tld_t* tld) { mi_segment_t* segment; + mi_abandoned_pool_t* pool = tld->abandoned; int max_tries = (force ? 16*1024 : 1024); // limit latency if (force) { - mi_abandoned_visited_revisit(); + mi_abandoned_visited_revisit(pool); } - while ((max_tries-- > 0) && ((segment = mi_abandoned_pop()) != NULL)) { + while ((max_tries-- > 0) && ((segment = mi_abandoned_pop(pool)) != NULL)) { mi_segment_check_free(segment,0,0,tld); // try to free up pages (due to concurrent frees) if (segment->used == 0) { // free the segment (by forced reclaim) to make it available to other threads. @@ -1444,7 +1443,7 @@ void _mi_abandoned_collect(mi_heap_t* heap, bool force, mi_segments_tld_t* tld) // otherwise, purge if needed and push on the visited list // note: forced purge can be expensive if many threads are destroyed/created as in mstress. mi_segment_try_purge(segment, force, tld->stats); - mi_abandoned_visited_push(segment); + mi_abandoned_visited_push(pool, segment); } } } diff --git a/Objects/object.c b/Objects/object.c index d970a26756173b..587c5528c01345 100644 --- a/Objects/object.c +++ b/Objects/object.c @@ -509,9 +509,7 @@ PyObject_CallFinalizerFromDealloc(PyObject *self) /* tp_finalize resurrected it! Make it look like the original Py_DECREF * never happened. */ - Py_ssize_t refcnt = Py_REFCNT(self); - _Py_NewReferenceNoTotal(self); - Py_SET_REFCNT(self, refcnt); + _Py_ResurrectReference(self); _PyObject_ASSERT(self, (!_PyType_IS_GC(Py_TYPE(self)) @@ -2389,6 +2387,17 @@ _Py_NewReferenceNoTotal(PyObject *op) new_reference(op); } +void +_Py_ResurrectReference(PyObject *op) +{ + if (_PyRuntime.tracemalloc.config.tracing) { + _PyTraceMalloc_NewReference(op); + } +#ifdef Py_TRACE_REFS + _Py_AddToAllObjects(op); +#endif +} + #ifdef Py_TRACE_REFS void diff --git a/Objects/object_layout.md b/Objects/object_layout.md index 3f7d72eb22f224..4f379bed8d77e2 100644 --- a/Objects/object_layout.md +++ b/Objects/object_layout.md @@ -7,7 +7,7 @@ Each Python object starts with two fields: * ob_refcnt * ob_type -which the form the header common to all Python objects, for all versions, +which form the header common to all Python objects, for all versions, and hold the reference count and class of the object, respectively. ## Pre-header @@ -36,7 +36,7 @@ and the ``dict`` field points to the dictionary. ## 3.12 pre-header -In 3.12 the pointer to the list of weak references is added to the +In 3.12, the pointer to the list of weak references is added to the pre-header. In order to make space for it, the ``dict`` and ``values`` pointers are combined into a single tagged pointer: @@ -62,7 +62,7 @@ the values pointer, to enable the (legacy) C-API function * ob_refcnt * ob_type -For a "normal" Python object, that is one that doesn't inherit from a builtin +For a "normal" Python object, one that doesn't inherit from a builtin class or have slots, the header and pre-header form the entire object. ![Layout of "normal" object in 3.12](./object_layout_312.png) diff --git a/Objects/obmalloc.c b/Objects/obmalloc.c index 883adcb1c19b6e..16d5bcb53e7eb7 100644 --- a/Objects/obmalloc.c +++ b/Objects/obmalloc.c @@ -439,12 +439,14 @@ set_up_allocators_unlocked(PyMemAllocatorName allocator) (void)set_default_allocator_unlocked(PYMEM_DOMAIN_RAW, pydebug, NULL); (void)set_default_allocator_unlocked(PYMEM_DOMAIN_MEM, pydebug, NULL); (void)set_default_allocator_unlocked(PYMEM_DOMAIN_OBJ, pydebug, NULL); + _PyRuntime.allocators.is_debug_enabled = pydebug; break; case PYMEM_ALLOCATOR_DEBUG: (void)set_default_allocator_unlocked(PYMEM_DOMAIN_RAW, 1, NULL); (void)set_default_allocator_unlocked(PYMEM_DOMAIN_MEM, 1, NULL); (void)set_default_allocator_unlocked(PYMEM_DOMAIN_OBJ, 1, NULL); + _PyRuntime.allocators.is_debug_enabled = 1; break; #ifdef WITH_PYMALLOC @@ -458,7 +460,9 @@ set_up_allocators_unlocked(PyMemAllocatorName allocator) set_allocator_unlocked(PYMEM_DOMAIN_MEM, &pymalloc); set_allocator_unlocked(PYMEM_DOMAIN_OBJ, &pymalloc); - if (allocator == PYMEM_ALLOCATOR_PYMALLOC_DEBUG) { + int is_debug = (allocator == PYMEM_ALLOCATOR_PYMALLOC_DEBUG); + _PyRuntime.allocators.is_debug_enabled = is_debug; + if (is_debug) { set_up_debug_hooks_unlocked(); } break; @@ -477,7 +481,9 @@ set_up_allocators_unlocked(PyMemAllocatorName allocator) PyMemAllocatorEx objmalloc = MIMALLOC_OBJALLOC; set_allocator_unlocked(PYMEM_DOMAIN_OBJ, &objmalloc); - if (allocator == PYMEM_ALLOCATOR_MIMALLOC_DEBUG) { + int is_debug = (allocator == PYMEM_ALLOCATOR_MIMALLOC_DEBUG); + _PyRuntime.allocators.is_debug_enabled = is_debug; + if (is_debug) { set_up_debug_hooks_unlocked(); } @@ -493,7 +499,9 @@ set_up_allocators_unlocked(PyMemAllocatorName allocator) set_allocator_unlocked(PYMEM_DOMAIN_MEM, &malloc_alloc); set_allocator_unlocked(PYMEM_DOMAIN_OBJ, &malloc_alloc); - if (allocator == PYMEM_ALLOCATOR_MALLOC_DEBUG) { + int is_debug = (allocator == PYMEM_ALLOCATOR_MALLOC_DEBUG); + _PyRuntime.allocators.is_debug_enabled = is_debug; + if (is_debug) { set_up_debug_hooks_unlocked(); } break; @@ -604,13 +612,13 @@ _PyMem_GetCurrentAllocatorName(void) } -#ifdef WITH_PYMALLOC -static int +int _PyMem_DebugEnabled(void) { - return (_PyObject.malloc == _PyMem_DebugMalloc); + return _PyRuntime.allocators.is_debug_enabled; } +#ifdef WITH_PYMALLOC static int _PyMem_PymallocEnabled(void) { @@ -626,12 +634,16 @@ _PyMem_PymallocEnabled(void) static int _PyMem_MimallocEnabled(void) { +#ifdef Py_GIL_DISABLED + return 1; +#else if (_PyMem_DebugEnabled()) { return (_PyMem_Debug.obj.alloc.malloc == _PyObject_MiMalloc); } else { return (_PyObject.malloc == _PyObject_MiMalloc); } +#endif } #endif // WITH_MIMALLOC @@ -691,6 +703,7 @@ set_up_debug_hooks_unlocked(void) set_up_debug_hooks_domain_unlocked(PYMEM_DOMAIN_RAW); set_up_debug_hooks_domain_unlocked(PYMEM_DOMAIN_MEM); set_up_debug_hooks_domain_unlocked(PYMEM_DOMAIN_OBJ); + _PyRuntime.allocators.is_debug_enabled = 1; } void @@ -1041,20 +1054,35 @@ static bool count_blocks( *(size_t *)allocated_blocks += area->used; return 1; } + +static Py_ssize_t +get_mimalloc_allocated_blocks(PyInterpreterState *interp) +{ + size_t allocated_blocks = 0; +#ifdef Py_GIL_DISABLED + for (PyThreadState *t = interp->threads.head; t != NULL; t = t->next) { + _PyThreadStateImpl *tstate = (_PyThreadStateImpl *)t; + for (int i = 0; i < _Py_MIMALLOC_HEAP_COUNT; i++) { + mi_heap_t *heap = &tstate->mimalloc.heaps[i]; + mi_heap_visit_blocks(heap, false, &count_blocks, &allocated_blocks); + } + } + // TODO(sgross): count blocks in abandoned segments. +#else + // TODO(sgross): this only counts the current thread's blocks. + mi_heap_t *heap = mi_heap_get_default(); + mi_heap_visit_blocks(heap, false, &count_blocks, &allocated_blocks); +#endif + return allocated_blocks; +} #endif Py_ssize_t _PyInterpreterState_GetAllocatedBlocks(PyInterpreterState *interp) { #ifdef WITH_MIMALLOC - // TODO(sgross): this only counts the current thread's blocks. if (_PyMem_MimallocEnabled()) { - size_t allocated_blocks = 0; - - mi_heap_t *heap = mi_heap_get_default(); - mi_heap_visit_blocks(heap, false, &count_blocks, &allocated_blocks); - - return allocated_blocks; + return get_mimalloc_allocated_blocks(interp); } #endif @@ -1105,7 +1133,7 @@ _PyInterpreterState_FinalizeAllocatedBlocks(PyInterpreterState *interp) static Py_ssize_t get_num_global_allocated_blocks(_PyRuntimeState *); -/* We preserve the number of blockss leaked during runtime finalization, +/* We preserve the number of blocks leaked during runtime finalization, so they can be reported if the runtime is initialized again. */ // XXX We don't lose any information by dropping this, // so we should consider doing so. @@ -1121,16 +1149,6 @@ _Py_FinalizeAllocatedBlocks(_PyRuntimeState *runtime) static Py_ssize_t get_num_global_allocated_blocks(_PyRuntimeState *runtime) { -#ifdef WITH_MIMALLOC - if (_PyMem_MimallocEnabled()) { - size_t allocated_blocks = 0; - - mi_heap_t *heap = mi_heap_get_default(); - mi_heap_visit_blocks(heap, false, &count_blocks, &allocated_blocks); - - return allocated_blocks; - } -#endif Py_ssize_t total = 0; if (_PyRuntimeState_GetFinalizing(runtime) != NULL) { PyInterpreterState *interp = _PyInterpreterState_Main(); diff --git a/Objects/sliceobject.c b/Objects/sliceobject.c index a3ed0c096d84ed..440c1da30620c3 100644 --- a/Objects/sliceobject.c +++ b/Objects/sliceobject.c @@ -103,14 +103,22 @@ PyObject _Py_EllipsisObject = _PyObject_HEAD_INIT(&PyEllipsis_Type); /* Slice object implementation */ - -void _PySlice_Fini(PyInterpreterState *interp) +void _PySlice_ClearCache(_PyFreeListState *state) { - PySliceObject *obj = interp->slice_cache; +#ifdef WITH_FREELISTS + PySliceObject *obj = state->slice_state.slice_cache; if (obj != NULL) { - interp->slice_cache = NULL; + state->slice_state.slice_cache = NULL; PyObject_GC_Del(obj); } +#endif +} + +void _PySlice_Fini(_PyFreeListState *state) +{ +#ifdef WITH_FREELISTS + _PySlice_ClearCache(state); +#endif } /* start, stop, and step are python objects with None indicating no @@ -121,15 +129,17 @@ static PySliceObject * _PyBuildSlice_Consume2(PyObject *start, PyObject *stop, PyObject *step) { assert(start != NULL && stop != NULL && step != NULL); - - PyInterpreterState *interp = _PyInterpreterState_GET(); PySliceObject *obj; - if (interp->slice_cache != NULL) { - obj = interp->slice_cache; - interp->slice_cache = NULL; +#ifdef WITH_FREELISTS + _PyFreeListState *state = _PyFreeListState_GET(); + if (state->slice_state.slice_cache != NULL) { + obj = state->slice_state.slice_cache; + state->slice_state.slice_cache = NULL; _Py_NewReference((PyObject *)obj); } - else { + else +#endif + { obj = PyObject_GC_New(PySliceObject, &PySlice_Type); if (obj == NULL) { goto error; @@ -354,15 +364,18 @@ Create a slice object. This is used for extended slicing (e.g. a[0:10:2])."); static void slice_dealloc(PySliceObject *r) { - PyInterpreterState *interp = _PyInterpreterState_GET(); _PyObject_GC_UNTRACK(r); Py_DECREF(r->step); Py_DECREF(r->start); Py_DECREF(r->stop); - if (interp->slice_cache == NULL) { - interp->slice_cache = r; +#ifdef WITH_FREELISTS + _PyFreeListState *state = _PyFreeListState_GET(); + if (state->slice_state.slice_cache == NULL) { + state->slice_state.slice_cache = r; } - else { + else +#endif + { PyObject_GC_Del(r); } } diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c index d567839c5e3a0b..e1b8e4004c6163 100644 --- a/Objects/tupleobject.c +++ b/Objects/tupleobject.c @@ -962,18 +962,18 @@ _PyTuple_Resize(PyObject **pv, Py_ssize_t newsize) } -static void maybe_freelist_clear(PyInterpreterState *, int); +static void maybe_freelist_clear(_PyFreeListState *, int); void -_PyTuple_Fini(PyInterpreterState *interp) +_PyTuple_Fini(_PyFreeListState *state) { - maybe_freelist_clear(interp, 1); + maybe_freelist_clear(state, 1); } void -_PyTuple_ClearFreeList(PyInterpreterState *interp) +_PyTuple_ClearFreeList(_PyFreeListState *state, int is_finalization) { - maybe_freelist_clear(interp, 0); + maybe_freelist_clear(state, is_finalization); } /*********************** Tuple Iterator **************************/ @@ -1125,18 +1125,14 @@ tuple_iter(PyObject *seq) * freelists * *************/ -#define STATE (interp->tuple) +#define STATE (state->tuple_state) #define FREELIST_FINALIZED (STATE.numfree[0] < 0) static inline PyTupleObject * maybe_freelist_pop(Py_ssize_t size) { -#if PyTuple_NFREELISTS > 0 - PyInterpreterState *interp = _PyInterpreterState_GET(); -#ifdef Py_DEBUG - /* maybe_freelist_pop() must not be called after maybe_freelist_fini(). */ - assert(!FREELIST_FINALIZED); -#endif +#ifdef WITH_FREELISTS + _PyFreeListState *state = _PyFreeListState_GET(); if (size == 0) { return NULL; } @@ -1169,18 +1165,15 @@ maybe_freelist_pop(Py_ssize_t size) static inline int maybe_freelist_push(PyTupleObject *op) { -#if PyTuple_NFREELISTS > 0 - PyInterpreterState *interp = _PyInterpreterState_GET(); -#ifdef Py_DEBUG - /* maybe_freelist_push() must not be called after maybe_freelist_fini(). */ - assert(!FREELIST_FINALIZED); -#endif +#ifdef WITH_FREELISTS + _PyFreeListState *state = _PyFreeListState_GET(); if (Py_SIZE(op) == 0) { return 0; } Py_ssize_t index = Py_SIZE(op) - 1; if (index < PyTuple_NFREELISTS && STATE.numfree[index] < PyTuple_MAXFREELIST + && STATE.numfree[index] >= 0 && Py_IS_TYPE(op, &PyTuple_Type)) { /* op is the head of a linked list, with the first item @@ -1196,9 +1189,9 @@ maybe_freelist_push(PyTupleObject *op) } static void -maybe_freelist_clear(PyInterpreterState *interp, int fini) +maybe_freelist_clear(_PyFreeListState *state, int fini) { -#if PyTuple_NFREELISTS > 0 +#ifdef WITH_FREELISTS for (Py_ssize_t i = 0; i < PyTuple_NFREELISTS; i++) { PyTupleObject *p = STATE.free_list[i]; STATE.free_list[i] = NULL; @@ -1216,8 +1209,8 @@ maybe_freelist_clear(PyInterpreterState *interp, int fini) void _PyTuple_DebugMallocStats(FILE *out) { -#if PyTuple_NFREELISTS > 0 - PyInterpreterState *interp = _PyInterpreterState_GET(); +#ifdef WITH_FREELISTS + _PyFreeListState *state = _PyFreeListState_GET(); for (int i = 0; i < PyTuple_NFREELISTS; i++) { int len = i + 1; char buf[128]; diff --git a/PC/config.c b/PC/config.c index f754ce6d3b057b..5eff2f5b2310bb 100644 --- a/PC/config.c +++ b/PC/config.c @@ -44,9 +44,7 @@ extern PyObject* PyInit__collections(void); extern PyObject* PyInit__heapq(void); extern PyObject* PyInit__bisect(void); extern PyObject* PyInit__symtable(void); -#if defined(MS_WINDOWS_DESKTOP) || defined(MS_WINDOWS_GAMES) extern PyObject* PyInit_mmap(void); -#endif extern PyObject* PyInit__csv(void); extern PyObject* PyInit__sre(void); #if defined(MS_WINDOWS_DESKTOP) || defined(MS_WINDOWS_SYSTEM) || defined(MS_WINDOWS_GAMES) diff --git a/PC/layout/main.py b/PC/layout/main.py index accfd51dd978fb..d176b272f1c19d 100644 --- a/PC/layout/main.py +++ b/PC/layout/main.py @@ -41,7 +41,7 @@ VENV_DIRS_ONLY = FileNameSet("venv", "ensurepip") -EXCLUDE_FROM_PYDS = FileStemSet("python*", "pyshellext", "vcruntime*") +EXCLUDE_FROM_DLLS = FileStemSet("python*", "pyshellext", "vcruntime*") EXCLUDE_FROM_LIB = FileNameSet("*.pyc", "__pycache__", "*.pickle") EXCLUDE_FROM_PACKAGED_LIB = FileNameSet("readme.txt") EXCLUDE_FROM_COMPILE = FileNameSet("badsyntax_*", "bad_*") @@ -126,9 +126,9 @@ def in_build(f, dest="", new_name=None): n = new_name or n src = ns.build / f if ns.debug and src not in REQUIRED_DLLS: - if not src.stem.endswith("_d"): + if not "_d." in src.name: src = src.parent / (src.stem + "_d" + src.suffix) - if not n.endswith("_d"): + if "_d." not in f: n += "_d" f = n + "." + x yield dest + n + "." + x, src @@ -141,17 +141,45 @@ def in_build(f, dest="", new_name=None): if lib.is_file(): yield "libs/" + n + ".lib", lib + source = "python.exe" + sourcew = "pythonw.exe" + alias = [ + "python", + "python{}".format(VER_MAJOR) if ns.include_alias3 else "", + "python{}".format(VER_DOT) if ns.include_alias3x else "", + ] + aliasw = [ + "pythonw", + "pythonw{}".format(VER_MAJOR) if ns.include_alias3 else "", + "pythonw{}".format(VER_DOT) if ns.include_alias3x else "", + ] if ns.include_appxmanifest: - yield from in_build("python_uwp.exe", new_name="python{}".format(VER_DOT)) - yield from in_build("pythonw_uwp.exe", new_name="pythonw{}".format(VER_DOT)) - # For backwards compatibility, but we don't reference these ourselves. - yield from in_build("python_uwp.exe", new_name="python") - yield from in_build("pythonw_uwp.exe", new_name="pythonw") + source = "python_uwp.exe" + sourcew = "pythonw_uwp.exe" + elif ns.include_freethreaded: + source = "python{}t.exe".format(VER_DOT) + sourcew = "pythonw{}t.exe".format(VER_DOT) + if not ns.include_alias: + alias = [] + aliasw = [] + alias.extend([ + "python{}t".format(VER_DOT), + "python{}t".format(VER_MAJOR) if ns.include_alias3 else None, + ]) + aliasw.extend([ + "pythonw{}t".format(VER_DOT), + "pythonw{}t".format(VER_MAJOR) if ns.include_alias3 else None, + ]) + + for a in filter(None, alias): + yield from in_build(source, new_name=a) + for a in filter(None, aliasw): + yield from in_build(sourcew, new_name=a) + + if ns.include_freethreaded: + yield from in_build(FREETHREADED_PYTHON_DLL_NAME) else: - yield from in_build("python.exe", new_name="python") - yield from in_build("pythonw.exe", new_name="pythonw") - - yield from in_build(PYTHON_DLL_NAME) + yield from in_build(PYTHON_DLL_NAME) if ns.include_launchers and ns.include_appxmanifest: if ns.include_pip: @@ -160,7 +188,10 @@ def in_build(f, dest="", new_name=None): yield from in_build("pythonw_uwp.exe", new_name="idle{}".format(VER_DOT)) if ns.include_stable: - yield from in_build(PYTHON_STABLE_DLL_NAME) + if ns.include_freethreaded: + yield from in_build(FREETHREADED_PYTHON_STABLE_DLL_NAME) + else: + yield from in_build(PYTHON_STABLE_DLL_NAME) found_any = False for dest, src in rglob(ns.build, "vcruntime*.dll"): @@ -171,16 +202,28 @@ def in_build(f, dest="", new_name=None): yield "LICENSE.txt", ns.build / "LICENSE.txt" - for dest, src in rglob(ns.build, ("*.pyd", "*.dll")): - if src.stem.endswith("_d") != bool(ns.debug) and src not in REQUIRED_DLLS: - continue - if src in EXCLUDE_FROM_PYDS: - continue + for dest, src in rglob(ns.build, "*.pyd"): + if ns.include_freethreaded: + if not src.match("*.cp*t-win*.pyd"): + continue + if bool(src.match("*_d.cp*.pyd")) != bool(ns.debug): + continue + else: + if src.match("*.cp*t-win*.pyd"): + continue + if bool(src.match("*_d.pyd")) != bool(ns.debug): + continue if src in TEST_PYDS_ONLY and not ns.include_tests: continue if src in TCLTK_PYDS_ONLY and not ns.include_tcltk: continue + yield from in_build(src.name, dest="" if ns.flat_dlls else "DLLs/") + for dest, src in rglob(ns.build, "*.dll"): + if src.stem.endswith("_d") != bool(ns.debug) and src not in REQUIRED_DLLS: + continue + if src in EXCLUDE_FROM_DLLS: + continue yield from in_build(src.name, dest="" if ns.flat_dlls else "DLLs/") if ns.zip_lib: @@ -191,8 +234,12 @@ def in_build(f, dest="", new_name=None): yield "Lib/{}".format(dest), src if ns.include_venv: - yield from in_build("venvlauncher.exe", "Lib/venv/scripts/nt/", "python") - yield from in_build("venvwlauncher.exe", "Lib/venv/scripts/nt/", "pythonw") + if ns.include_freethreaded: + yield from in_build("venvlaunchert.exe", "Lib/venv/scripts/nt/") + yield from in_build("venvwlaunchert.exe", "Lib/venv/scripts/nt/") + else: + yield from in_build("venvlauncher.exe", "Lib/venv/scripts/nt/") + yield from in_build("venvwlauncher.exe", "Lib/venv/scripts/nt/") if ns.include_tools: @@ -208,7 +255,6 @@ def _c(d): yield PYTHON_PTH_NAME, ns.temp / PYTHON_PTH_NAME if ns.include_dev: - for dest, src in rglob(ns.source / "Include", "**/*.h"): yield "include/{}".format(dest), src yield "include/pyconfig.h", ns.build / "pyconfig.h" @@ -552,7 +598,6 @@ def main(): ns.source = ns.source or (Path(__file__).resolve().parent.parent.parent) ns.build = ns.build or Path(sys.executable).parent - ns.temp = ns.temp or Path(tempfile.mkdtemp()) ns.doc_build = ns.doc_build or (ns.source / "Doc" / "build") if not ns.source.is_absolute(): ns.source = (Path.cwd() / ns.source).resolve() @@ -565,7 +610,12 @@ def main(): if ns.include_cat and not ns.include_cat.is_absolute(): ns.include_cat = (Path.cwd() / ns.include_cat).resolve() if not ns.arch: - ns.arch = "amd64" if sys.maxsize > 2 ** 32 else "win32" + if sys.winver.endswith("-arm64"): + ns.arch = "arm64" + elif sys.winver.endswith("-32"): + ns.arch = "win32" + else: + ns.arch = "amd64" if ns.copy and not ns.copy.is_absolute(): ns.copy = (Path.cwd() / ns.copy).resolve() @@ -574,6 +624,14 @@ def main(): if ns.catalog and not ns.catalog.is_absolute(): ns.catalog = (Path.cwd() / ns.catalog).resolve() + if not ns.temp: + # Put temp on a Dev Drive for speed if we're copying to one. + # If not, the regular temp dir will have to do. + if ns.copy and getattr(os.path, "isdevdrive", lambda d: False)(ns.copy): + ns.temp = ns.copy.with_name(ns.copy.name + "_temp") + else: + ns.temp = Path(tempfile.mkdtemp()) + configure_logger(ns) log_info( @@ -602,6 +660,12 @@ def main(): log_warning("Assuming --include-tcltk to support --include-idle") ns.include_tcltk = True + if not (ns.include_alias or ns.include_alias3 or ns.include_alias3x): + if ns.include_freethreaded: + ns.include_alias3x = True + else: + ns.include_alias = True + try: generate_source_files(ns) files = list(get_layout(ns)) diff --git a/PC/layout/support/constants.py b/PC/layout/support/constants.py index 8195c3dc30cdc7..ae22aa16ebfa5d 100644 --- a/PC/layout/support/constants.py +++ b/PC/layout/support/constants.py @@ -39,3 +39,6 @@ def _get_suffix(field4): PYTHON_CHM_NAME = "python{}{}{}{}.chm".format( VER_MAJOR, VER_MINOR, VER_MICRO, VER_SUFFIX ) + +FREETHREADED_PYTHON_DLL_NAME = "python{}{}t.dll".format(VER_MAJOR, VER_MINOR) +FREETHREADED_PYTHON_STABLE_DLL_NAME = "python{}t.dll".format(VER_MAJOR) diff --git a/PC/layout/support/nuspec.py b/PC/layout/support/nuspec.py index dbcb713ef9d0c0..a87e0bea049427 100644 --- a/PC/layout/support/nuspec.py +++ b/PC/layout/support/nuspec.py @@ -24,6 +24,10 @@ amd64=("64-bit", "python", "Python"), arm32=("ARM", "pythonarm", "Python (ARM)"), arm64=("ARM64", "pythonarm64", "Python (ARM64)"), + win32t=("32-bit free-threaded", "pythonx86-freethreaded", "Python (32-bit, free-threaded)"), + amd64t=("64-bit free-threaded", "python-freethreaded", "Python (free-threaded)"), + arm32t=("ARM free-threaded", "pythonarm-freethreaded", "Python (ARM, free-threaded)"), + arm64t=("ARM64 free-threaded", "pythonarm64-freethreaded", "Python (ARM64, free-threaded)"), ) if not NUSPEC_DATA["PYTHON_VERSION"]: @@ -58,7 +62,10 @@ def _get_nuspec_data_overrides(ns): - for k, v in zip(NUSPEC_PLATFORM_DATA["_keys"], NUSPEC_PLATFORM_DATA[ns.arch]): + arch = ns.arch + if ns.include_freethreaded: + arch += "t" + for k, v in zip(NUSPEC_PLATFORM_DATA["_keys"], NUSPEC_PLATFORM_DATA[arch]): ev = os.getenv("PYTHON_NUSPEC_" + k) if ev: yield k, ev diff --git a/PC/layout/support/options.py b/PC/layout/support/options.py index 60256fb32fe329..f1a8eb0b317744 100644 --- a/PC/layout/support/options.py +++ b/PC/layout/support/options.py @@ -32,6 +32,10 @@ def public(f): "nuspec": {"help": "a python.nuspec file"}, "chm": {"help": "the CHM documentation"}, "html-doc": {"help": "the HTML documentation"}, + "freethreaded": {"help": "freethreaded binaries", "not-in-all": True}, + "alias": {"help": "aliased python.exe entry-point binaries"}, + "alias3": {"help": "aliased python3.exe entry-point binaries"}, + "alias3x": {"help": "aliased python3.x.exe entry-point binaries"}, } @@ -47,6 +51,8 @@ def public(f): "dev", "launchers", "appxmanifest", + "alias", + "alias3x", # XXX: Disabled for now "precompile", ], }, @@ -59,9 +65,10 @@ def public(f): "venv", "props", "nuspec", + "alias", ], }, - "iot": {"help": "Windows IoT Core", "options": ["stable", "pip"]}, + "iot": {"help": "Windows IoT Core", "options": ["alias", "stable", "pip"]}, "default": { "help": "development kit package", "options": [ @@ -74,11 +81,19 @@ def public(f): "dev", "symbols", "html-doc", + "alias", ], }, "embed": { "help": "embeddable package", - "options": ["stable", "zip-lib", "flat-dlls", "underpth", "precompile"], + "options": [ + "alias", + "stable", + "zip-lib", + "flat-dlls", + "underpth", + "precompile", + ], }, } diff --git a/PC/pyconfig.h.in b/PC/pyconfig.h.in index d8f0a6be69c21a..8bbf877a5bb5ed 100644 --- a/PC/pyconfig.h.in +++ b/PC/pyconfig.h.in @@ -94,6 +94,9 @@ WIN32 is still required for the locale module. #endif #endif /* Py_BUILD_CORE || Py_BUILD_CORE_BUILTIN || Py_BUILD_CORE_MODULE */ +/* Define to 1 if you want to disable the GIL */ +#undef Py_GIL_DISABLED + /* Compiler specific defines */ /* ------------------------------------------------------------------------*/ @@ -305,8 +308,16 @@ Py_NO_ENABLE_SHARED to find out. Also support MS_NO_COREDLL for b/w compat */ /* not building the core - must be an ext */ # if defined(_MSC_VER) /* So MSVC users need not specify the .lib - file in their Makefile (other compilers are - generally taken care of by distutils.) */ + file in their Makefile */ +# if defined(Py_GIL_DISABLED) +# if defined(_DEBUG) +# pragma comment(lib,"python313t_d.lib") +# elif defined(Py_LIMITED_API) +# pragma comment(lib,"python3t.lib") +# else +# pragma comment(lib,"python313t.lib") +# endif /* _DEBUG */ +# else /* Py_GIL_DISABLED */ # if defined(_DEBUG) # pragma comment(lib,"python313_d.lib") # elif defined(Py_LIMITED_API) @@ -314,6 +325,7 @@ Py_NO_ENABLE_SHARED to find out. Also support MS_NO_COREDLL for b/w compat */ # else # pragma comment(lib,"python313.lib") # endif /* _DEBUG */ +# endif /* Py_GIL_DISABLED */ # endif /* _MSC_VER */ # endif /* Py_BUILD_CORE */ #endif /* MS_COREDLL */ @@ -739,7 +751,4 @@ Py_NO_ENABLE_SHARED to find out. Also support MS_NO_COREDLL for b/w compat */ /* Define if libssl has X509_VERIFY_PARAM_set1_host and related function */ #define HAVE_X509_VERIFY_PARAM_SET1_HOST 1 -/* Define if you want to disable the GIL */ -#undef Py_GIL_DISABLED - #endif /* !Py_CONFIG_H */ diff --git a/PC/python_ver_rc.h b/PC/python_ver_rc.h index 5b55b810cd2152..08509f96ed1db8 100644 --- a/PC/python_ver_rc.h +++ b/PC/python_ver_rc.h @@ -5,7 +5,7 @@ #include "winver.h" #define PYTHON_COMPANY "Python Software Foundation" -#define PYTHON_COPYRIGHT "Copyright \xA9 2001-2023 Python Software Foundation. Copyright \xA9 2000 BeOpen.com. Copyright \xA9 1995-2001 CNRI. Copyright \xA9 1991-1995 SMC." +#define PYTHON_COPYRIGHT "Copyright \xA9 2001-2024 Python Software Foundation. Copyright \xA9 2000 BeOpen.com. Copyright \xA9 1995-2001 CNRI. Copyright \xA9 1991-1995 SMC." #define MS_WINDOWS #include "modsupport.h" diff --git a/PC/venvlauncher.c b/PC/venvlauncher.c new file mode 100644 index 00000000000000..fe97d32e93b5f6 --- /dev/null +++ b/PC/venvlauncher.c @@ -0,0 +1,510 @@ +/* + * venv redirector for Windows + * + * This launcher looks for a nearby pyvenv.cfg to find the correct home + * directory, and then launches the original Python executable from it. + * The name of this executable is passed as argv[0]. + */ + +#define __STDC_WANT_LIB_EXT1__ 1 + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#define MS_WINDOWS +#include "patchlevel.h" + +#define MAXLEN PATHCCH_MAX_CCH +#define MSGSIZE 1024 + +#define RC_NO_STD_HANDLES 100 +#define RC_CREATE_PROCESS 101 +#define RC_NO_PYTHON 103 +#define RC_NO_MEMORY 104 +#define RC_NO_VENV_CFG 106 +#define RC_BAD_VENV_CFG 107 +#define RC_NO_COMMANDLINE 108 +#define RC_INTERNAL_ERROR 109 + +// This should always be defined when we build for real, +// but it's handy to have a definition for quick testing +#ifndef EXENAME +#define EXENAME L"python.exe" +#endif + +#ifndef CFGNAME +#define CFGNAME L"pyvenv.cfg" +#endif + +static FILE * log_fp = NULL; + +void +debug(wchar_t * format, ...) +{ + va_list va; + + if (log_fp != NULL) { + wchar_t buffer[MAXLEN]; + int r = 0; + va_start(va, format); + r = vswprintf_s(buffer, MAXLEN, format, va); + va_end(va); + + if (r <= 0) { + return; + } + fwprintf(log_fp, L"%ls\n", buffer); + while (r && isspace(buffer[r])) { + buffer[r--] = L'\0'; + } + if (buffer[0]) { + OutputDebugStringW(buffer); + } + } +} + + +void +formatWinerror(int rc, wchar_t * message, int size) +{ + FormatMessageW( + FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS, + NULL, rc, MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT), + message, size, NULL); +} + + +void +winerror(int err, wchar_t * format, ... ) +{ + va_list va; + wchar_t message[MSGSIZE]; + wchar_t win_message[MSGSIZE]; + int len; + + if (err == 0) { + err = GetLastError(); + } + + va_start(va, format); + len = _vsnwprintf_s(message, MSGSIZE, _TRUNCATE, format, va); + va_end(va); + + formatWinerror(err, win_message, MSGSIZE); + if (len >= 0) { + _snwprintf_s(&message[len], MSGSIZE - len, _TRUNCATE, L": %ls", + win_message); + } + +#if !defined(_WINDOWS) + fwprintf(stderr, L"%ls\n", message); +#else + MessageBoxW(NULL, message, L"Python venv launcher is sorry to say ...", + MB_OK); +#endif +} + + +void +error(wchar_t * format, ... ) +{ + va_list va; + wchar_t message[MSGSIZE]; + + va_start(va, format); + _vsnwprintf_s(message, MSGSIZE, _TRUNCATE, format, va); + va_end(va); + +#if !defined(_WINDOWS) + fwprintf(stderr, L"%ls\n", message); +#else + MessageBoxW(NULL, message, L"Python venv launcher is sorry to say ...", + MB_OK); +#endif +} + + +bool +isEnvVarSet(const wchar_t *name) +{ + /* only looking for non-empty, which means at least one character + and the null terminator */ + return GetEnvironmentVariableW(name, NULL, 0) >= 2; +} + + +bool +join(wchar_t *buffer, size_t bufferLength, const wchar_t *fragment) +{ + if (SUCCEEDED(PathCchCombineEx(buffer, bufferLength, buffer, fragment, PATHCCH_ALLOW_LONG_PATHS))) { + return true; + } + return false; +} + + +bool +split_parent(wchar_t *buffer, size_t bufferLength) +{ + return SUCCEEDED(PathCchRemoveFileSpec(buffer, bufferLength)); +} + + +/* + * Path calculation + */ + +int +calculate_pyvenvcfg_path(wchar_t *pyvenvcfg_path, size_t maxlen) +{ + if (!pyvenvcfg_path) { + error(L"invalid buffer provided"); + return RC_INTERNAL_ERROR; + } + if ((DWORD)maxlen != maxlen) { + error(L"path buffer is too large"); + return RC_INTERNAL_ERROR; + } + if (!GetModuleFileNameW(NULL, pyvenvcfg_path, (DWORD)maxlen)) { + winerror(GetLastError(), L"failed to read executable directory"); + return RC_NO_COMMANDLINE; + } + // Remove 'python.exe' from our path + if (!split_parent(pyvenvcfg_path, maxlen)) { + error(L"failed to remove segment from '%ls'", pyvenvcfg_path); + return RC_NO_COMMANDLINE; + } + // Replace with 'pyvenv.cfg' + if (!join(pyvenvcfg_path, maxlen, CFGNAME)) { + error(L"failed to append '%ls' to '%ls'", CFGNAME, pyvenvcfg_path); + return RC_NO_MEMORY; + } + // If it exists, return + if (GetFileAttributesW(pyvenvcfg_path) != INVALID_FILE_ATTRIBUTES) { + return 0; + } + // Otherwise, remove 'pyvenv.cfg' and (probably) 'Scripts' + if (!split_parent(pyvenvcfg_path, maxlen) || + !split_parent(pyvenvcfg_path, maxlen)) { + error(L"failed to remove segments from '%ls'", pyvenvcfg_path); + return RC_NO_COMMANDLINE; + } + // Replace 'pyvenv.cfg' + if (!join(pyvenvcfg_path, maxlen, CFGNAME)) { + error(L"failed to append '%ls' to '%ls'", CFGNAME, pyvenvcfg_path); + return RC_NO_MEMORY; + } + // If it exists, return + if (GetFileAttributesW(pyvenvcfg_path) != INVALID_FILE_ATTRIBUTES) { + return 0; + } + // Otherwise, we fail + winerror(GetLastError(), L"failed to locate %ls", CFGNAME); + return RC_NO_VENV_CFG; +} + + +/* + * pyvenv.cfg parsing + */ + +static int +find_home_value(const char *buffer, DWORD maxlen, const char **start, DWORD *length) +{ + if (!buffer || !start || !length) { + error(L"invalid find_home_value parameters()"); + return 0; + } + for (const char *s = strstr(buffer, "home"); + s && ((ptrdiff_t)s - (ptrdiff_t)buffer) < maxlen; + s = strstr(s + 1, "\nhome") + ) { + if (*s == '\n') { + ++s; + } + for (int i = 4; i > 0 && *s; --i, ++s); + + while (*s && iswspace(*s)) { + ++s; + } + if (*s != L'=') { + continue; + } + + do { + ++s; + } while (*s && iswspace(*s)); + + *start = s; + char *nl = strchr(s, '\n'); + if (nl) { + while (nl != s && iswspace(nl[-1])) { + --nl; + } + *length = (DWORD)((ptrdiff_t)nl - (ptrdiff_t)s); + } else { + *length = (DWORD)strlen(s); + } + return 1; + } + return 0; +} + + +int +read_home(const wchar_t *pyvenv_cfg, wchar_t *home_path, size_t maxlen) +{ + HANDLE hFile = CreateFileW(pyvenv_cfg, GENERIC_READ, + FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE, + NULL, OPEN_EXISTING, 0, NULL); + + if (hFile == INVALID_HANDLE_VALUE) { + winerror(GetLastError(), L"failed to open '%ls'", pyvenv_cfg); + return RC_BAD_VENV_CFG; + } + + // 8192 characters ought to be enough for anyone + // (doubled compared to the old implementation!) + char buffer[8192]; + DWORD len; + if (!ReadFile(hFile, buffer, sizeof(buffer) - 1, &len, NULL)) { + winerror(GetLastError(), L"failed to read '%ls'", pyvenv_cfg); + CloseHandle(hFile); + return RC_BAD_VENV_CFG; + } + CloseHandle(hFile); + // Ensure null termination + buffer[len] = '\0'; + + char *home; + DWORD home_len; + if (!find_home_value(buffer, sizeof(buffer), &home, &home_len)) { + error(L"no home= specified in '%ls'", pyvenv_cfg); + return RC_BAD_VENV_CFG; + } + + if ((DWORD)maxlen != maxlen) { + maxlen = 8192; + } + len = MultiByteToWideChar(CP_UTF8, 0, home, home_len, home_path, (DWORD)maxlen); + if (!len) { + winerror(GetLastError(), L"failed to decode home setting in '%ls'", pyvenv_cfg); + return RC_BAD_VENV_CFG; + } + home_path[len] = L'\0'; + + return 0; +} + + +int +locate_python(wchar_t *path, size_t maxlen) +{ + if (!join(path, maxlen, EXENAME)) { + error(L"failed to append %ls to '%ls'", EXENAME, path); + return RC_NO_MEMORY; + } + + if (GetFileAttributesW(path) == INVALID_FILE_ATTRIBUTES) { + winerror(GetLastError(), L"did not find executable at '%ls'", path); + return RC_NO_PYTHON; + } + + return 0; +} + + +int +smuggle_path() +{ + wchar_t buffer[MAXLEN]; + // We could use argv[0], but that may be wrong in certain rare cases (if the + // user is doing something weird like symlinks to venv redirectors), and + // what we _really_ want is the directory of the venv. We always copy the + // redirectors, so if we've made the venv, this will be correct. + DWORD len = GetModuleFileNameW(NULL, buffer, MAXLEN); + if (!len) { + winerror(GetLastError(), L"Failed to get own executable path"); + return RC_INTERNAL_ERROR; + } + buffer[len] = L'\0'; + debug(L"Setting __PYVENV_LAUNCHER__ = '%s'", buffer); + + if (!SetEnvironmentVariableW(L"__PYVENV_LAUNCHER__", buffer)) { + winerror(GetLastError(), L"Failed to set launcher environment"); + return RC_INTERNAL_ERROR; + } + + return 0; +} + +/* + * Process creation + */ + +static BOOL +safe_duplicate_handle(HANDLE in, HANDLE * pout, const wchar_t *name) +{ + BOOL ok; + HANDLE process = GetCurrentProcess(); + DWORD rc; + + *pout = NULL; + ok = DuplicateHandle(process, in, process, pout, 0, TRUE, + DUPLICATE_SAME_ACCESS); + if (!ok) { + rc = GetLastError(); + if (rc == ERROR_INVALID_HANDLE) { + debug(L"DuplicateHandle(%ls) returned ERROR_INVALID_HANDLE\n", name); + ok = TRUE; + } + else { + debug(L"DuplicateHandle(%ls) returned %d\n", name, rc); + } + } + return ok; +} + +static BOOL WINAPI +ctrl_c_handler(DWORD code) +{ + return TRUE; /* We just ignore all control events. */ +} + +static int +launch(const wchar_t *executable, wchar_t *cmdline) +{ + HANDLE job; + JOBOBJECT_EXTENDED_LIMIT_INFORMATION info; + DWORD rc; + BOOL ok; + STARTUPINFOW si; + PROCESS_INFORMATION pi; + +#if defined(_WINDOWS) + /* + When explorer launches a Windows (GUI) application, it displays + the "app starting" (the "pointer + hourglass") cursor for a number + of seconds, or until the app does something UI-ish (eg, creating a + window, or fetching a message). As this launcher doesn't do this + directly, that cursor remains even after the child process does these + things. We avoid that by doing a simple post+get message. + See http://bugs.python.org/issue17290 + */ + MSG msg; + + PostMessage(0, 0, 0, 0); + GetMessage(&msg, 0, 0, 0); +#endif + + debug(L"run_child: about to run '%ls' with '%ls'\n", executable, cmdline); + job = CreateJobObject(NULL, NULL); + ok = QueryInformationJobObject(job, JobObjectExtendedLimitInformation, + &info, sizeof(info), &rc); + if (!ok || (rc != sizeof(info)) || !job) { + winerror(GetLastError(), L"Job information querying failed"); + return RC_CREATE_PROCESS; + } + info.BasicLimitInformation.LimitFlags |= JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE | + JOB_OBJECT_LIMIT_SILENT_BREAKAWAY_OK; + ok = SetInformationJobObject(job, JobObjectExtendedLimitInformation, &info, + sizeof(info)); + if (!ok) { + winerror(GetLastError(), L"Job information setting failed"); + return RC_CREATE_PROCESS; + } + memset(&si, 0, sizeof(si)); + GetStartupInfoW(&si); + ok = safe_duplicate_handle(GetStdHandle(STD_INPUT_HANDLE), &si.hStdInput, L"stdin"); + if (!ok) { + return RC_NO_STD_HANDLES; + } + ok = safe_duplicate_handle(GetStdHandle(STD_OUTPUT_HANDLE), &si.hStdOutput, L"stdout"); + if (!ok) { + return RC_NO_STD_HANDLES; + } + ok = safe_duplicate_handle(GetStdHandle(STD_ERROR_HANDLE), &si.hStdError, L"stderr"); + if (!ok) { + return RC_NO_STD_HANDLES; + } + + ok = SetConsoleCtrlHandler(ctrl_c_handler, TRUE); + if (!ok) { + winerror(GetLastError(), L"control handler setting failed"); + return RC_CREATE_PROCESS; + } + + si.dwFlags = STARTF_USESTDHANDLES; + ok = CreateProcessW(executable, cmdline, NULL, NULL, TRUE, + 0, NULL, NULL, &si, &pi); + if (!ok) { + winerror(GetLastError(), L"Unable to create process using '%ls'", cmdline); + return RC_CREATE_PROCESS; + } + AssignProcessToJobObject(job, pi.hProcess); + CloseHandle(pi.hThread); + WaitForSingleObjectEx(pi.hProcess, INFINITE, FALSE); + ok = GetExitCodeProcess(pi.hProcess, &rc); + if (!ok) { + winerror(GetLastError(), L"Failed to get exit code of process"); + return RC_CREATE_PROCESS; + } + debug(L"child process exit code: %d", rc); + return rc; +} + + +int +process(int argc, wchar_t ** argv) +{ + int exitCode; + wchar_t pyvenvcfg_path[MAXLEN]; + wchar_t home_path[MAXLEN]; + + if (isEnvVarSet(L"PYLAUNCHER_DEBUG")) { + setvbuf(stderr, (char *)NULL, _IONBF, 0); + log_fp = stderr; + } + + exitCode = calculate_pyvenvcfg_path(pyvenvcfg_path, MAXLEN); + if (exitCode) return exitCode; + + exitCode = read_home(pyvenvcfg_path, home_path, MAXLEN); + if (exitCode) return exitCode; + + exitCode = locate_python(home_path, MAXLEN); + if (exitCode) return exitCode; + + // We do not update argv[0] to point at the target runtime, and so we do not + // pass through our original argv[0] in an environment variable. + //exitCode = smuggle_path(); + //if (exitCode) return exitCode; + + exitCode = launch(home_path, GetCommandLineW()); + return exitCode; +} + + +#if defined(_WINDOWS) + +int WINAPI wWinMain(HINSTANCE hInstance, HINSTANCE hPrevInstance, + LPWSTR lpstrCmd, int nShow) +{ + return process(__argc, __wargv); +} + +#else + +int cdecl wmain(int argc, wchar_t ** argv) +{ + return process(argc, argv); +} + +#endif diff --git a/PCbuild/_asyncio.vcxproj b/PCbuild/_asyncio.vcxproj index ed1e1bc0a420dc..76b0ffd660dba0 100644 --- a/PCbuild/_asyncio.vcxproj +++ b/PCbuild/_asyncio.vcxproj @@ -79,7 +79,7 @@ - .pyd + $(PyStdlibPydExt) diff --git a/PCbuild/_bz2.vcxproj b/PCbuild/_bz2.vcxproj index 3fe95fbf83993a..e0dc6ec187a08d 100644 --- a/PCbuild/_bz2.vcxproj +++ b/PCbuild/_bz2.vcxproj @@ -80,7 +80,7 @@ - .pyd + $(PyStdlibPydExt) diff --git a/PCbuild/_ctypes.vcxproj b/PCbuild/_ctypes.vcxproj index 253da31e9ce182..63d5fa49cd4e17 100644 --- a/PCbuild/_ctypes.vcxproj +++ b/PCbuild/_ctypes.vcxproj @@ -79,7 +79,7 @@ - .pyd + $(PyStdlibPydExt) diff --git a/PCbuild/_ctypes_test.vcxproj b/PCbuild/_ctypes_test.vcxproj index 8a01e743a4d86f..97354739c09834 100644 --- a/PCbuild/_ctypes_test.vcxproj +++ b/PCbuild/_ctypes_test.vcxproj @@ -80,7 +80,7 @@ - .pyd + $(PyStdlibPydExt) diff --git a/PCbuild/_decimal.vcxproj b/PCbuild/_decimal.vcxproj index 0916f1a2d37887..490d7df87eb1c6 100644 --- a/PCbuild/_decimal.vcxproj +++ b/PCbuild/_decimal.vcxproj @@ -79,7 +79,7 @@ - .pyd + $(PyStdlibPydExt) diff --git a/PCbuild/_elementtree.vcxproj b/PCbuild/_elementtree.vcxproj index 8da5244bac0cb6..8c9c0e42f7fe3e 100644 --- a/PCbuild/_elementtree.vcxproj +++ b/PCbuild/_elementtree.vcxproj @@ -79,7 +79,7 @@ - .pyd + $(PyStdlibPydExt) diff --git a/PCbuild/_freeze_module.vcxproj b/PCbuild/_freeze_module.vcxproj index 292bfa76519507..610581bc96cb1a 100644 --- a/PCbuild/_freeze_module.vcxproj +++ b/PCbuild/_freeze_module.vcxproj @@ -207,6 +207,9 @@ + + + diff --git a/PCbuild/_freeze_module.vcxproj.filters b/PCbuild/_freeze_module.vcxproj.filters index 1c5a6d623f4dad..3141913c043869 100644 --- a/PCbuild/_freeze_module.vcxproj.filters +++ b/PCbuild/_freeze_module.vcxproj.filters @@ -166,6 +166,15 @@ Source Files + + Source Files + + + Source Files + + + Source Files + Source Files diff --git a/PCbuild/_hashlib.vcxproj b/PCbuild/_hashlib.vcxproj index 6dad8183c57ae3..2cd205224bc089 100644 --- a/PCbuild/_hashlib.vcxproj +++ b/PCbuild/_hashlib.vcxproj @@ -79,7 +79,7 @@ - .pyd + $(PyStdlibPydExt) diff --git a/PCbuild/_lzma.vcxproj b/PCbuild/_lzma.vcxproj index fe076a6fc57168..40107d4b76cd53 100644 --- a/PCbuild/_lzma.vcxproj +++ b/PCbuild/_lzma.vcxproj @@ -79,7 +79,7 @@ - .pyd + $(PyStdlibPydExt) diff --git a/PCbuild/_multiprocessing.vcxproj b/PCbuild/_multiprocessing.vcxproj index 77b6bfc8e1e483..a65397f532aa86 100644 --- a/PCbuild/_multiprocessing.vcxproj +++ b/PCbuild/_multiprocessing.vcxproj @@ -79,7 +79,7 @@ - .pyd + $(PyStdlibPydExt) diff --git a/PCbuild/_overlapped.vcxproj b/PCbuild/_overlapped.vcxproj index 9e60d3b5db336c..224bf05d5303a0 100644 --- a/PCbuild/_overlapped.vcxproj +++ b/PCbuild/_overlapped.vcxproj @@ -79,7 +79,7 @@ - .pyd + $(PyStdlibPydExt) diff --git a/PCbuild/_queue.vcxproj b/PCbuild/_queue.vcxproj index 8065b235851686..80a1c3c6a4ad3e 100644 --- a/PCbuild/_queue.vcxproj +++ b/PCbuild/_queue.vcxproj @@ -79,7 +79,7 @@ - .pyd + $(PyStdlibPydExt) diff --git a/PCbuild/_socket.vcxproj b/PCbuild/_socket.vcxproj index 78fa4d6729abb9..41af0895921bbb 100644 --- a/PCbuild/_socket.vcxproj +++ b/PCbuild/_socket.vcxproj @@ -79,7 +79,7 @@ - .pyd + $(PyStdlibPydExt) diff --git a/PCbuild/_sqlite3.vcxproj b/PCbuild/_sqlite3.vcxproj index 57c7413671e54e..9ae0a0fc3a009d 100644 --- a/PCbuild/_sqlite3.vcxproj +++ b/PCbuild/_sqlite3.vcxproj @@ -79,7 +79,7 @@ - .pyd + $(PyStdlibPydExt) diff --git a/PCbuild/_ssl.vcxproj b/PCbuild/_ssl.vcxproj index 226ff506f8c62b..d4e1affab031d7 100644 --- a/PCbuild/_ssl.vcxproj +++ b/PCbuild/_ssl.vcxproj @@ -79,7 +79,7 @@ - .pyd + $(PyStdlibPydExt) diff --git a/PCbuild/_testbuffer.vcxproj b/PCbuild/_testbuffer.vcxproj index 917d7ae50feb14..4e721e8ce09f0c 100644 --- a/PCbuild/_testbuffer.vcxproj +++ b/PCbuild/_testbuffer.vcxproj @@ -80,7 +80,7 @@ - .pyd + $(PyStdlibPydExt) diff --git a/PCbuild/_testcapi.vcxproj b/PCbuild/_testcapi.vcxproj index 1c15541d3ec735..6911aacab29b97 100644 --- a/PCbuild/_testcapi.vcxproj +++ b/PCbuild/_testcapi.vcxproj @@ -80,7 +80,7 @@ - .pyd + $(PyStdlibPydExt) diff --git a/PCbuild/_testclinic.vcxproj b/PCbuild/_testclinic.vcxproj index e319b3c0f42e0f..ef981332c6ab03 100644 --- a/PCbuild/_testclinic.vcxproj +++ b/PCbuild/_testclinic.vcxproj @@ -80,7 +80,7 @@ - .pyd + $(PyStdlibPydExt) @@ -107,4 +107,4 @@ - \ No newline at end of file + diff --git a/PCbuild/_testclinic_limited.vcxproj b/PCbuild/_testclinic_limited.vcxproj index b00b2be491b423..183a55080e8693 100644 --- a/PCbuild/_testclinic_limited.vcxproj +++ b/PCbuild/_testclinic_limited.vcxproj @@ -79,7 +79,7 @@ - .pyd + $(PyStdlibPydExt) diff --git a/PCbuild/_testconsole.vcxproj b/PCbuild/_testconsole.vcxproj index 5d7e14eff10294..69d312b17a5a62 100644 --- a/PCbuild/_testconsole.vcxproj +++ b/PCbuild/_testconsole.vcxproj @@ -80,7 +80,7 @@ - .pyd + $(PyStdlibPydExt) diff --git a/PCbuild/_testimportmultiple.vcxproj b/PCbuild/_testimportmultiple.vcxproj index 6d80d5779f24d8..c35ac83c1c739f 100644 --- a/PCbuild/_testimportmultiple.vcxproj +++ b/PCbuild/_testimportmultiple.vcxproj @@ -80,7 +80,7 @@ - .pyd + $(PyStdlibPydExt) diff --git a/PCbuild/_testinternalcapi.vcxproj b/PCbuild/_testinternalcapi.vcxproj index 558f66ca95cd33..a825cac9138674 100644 --- a/PCbuild/_testinternalcapi.vcxproj +++ b/PCbuild/_testinternalcapi.vcxproj @@ -80,7 +80,7 @@ - .pyd + $(PyStdlibPydExt) diff --git a/PCbuild/_testmultiphase.vcxproj b/PCbuild/_testmultiphase.vcxproj index 430eb528cc3927..e730fe308ab835 100644 --- a/PCbuild/_testmultiphase.vcxproj +++ b/PCbuild/_testmultiphase.vcxproj @@ -80,7 +80,7 @@ - .pyd + $(PyStdlibPydExt) diff --git a/PCbuild/_testsinglephase.vcxproj b/PCbuild/_testsinglephase.vcxproj index fb4bcd953923f8..bf4dabf66c1040 100644 --- a/PCbuild/_testsinglephase.vcxproj +++ b/PCbuild/_testsinglephase.vcxproj @@ -80,7 +80,7 @@ - .pyd + $(PyStdlibPydExt) diff --git a/PCbuild/_tkinter.vcxproj b/PCbuild/_tkinter.vcxproj index 30cedcbb43de76..117488a01621cc 100644 --- a/PCbuild/_tkinter.vcxproj +++ b/PCbuild/_tkinter.vcxproj @@ -79,7 +79,7 @@ - .pyd + $(PyStdlibPydExt) diff --git a/PCbuild/_uuid.vcxproj b/PCbuild/_uuid.vcxproj index 2437b7eb2d9399..50d81cc7916dbd 100644 --- a/PCbuild/_uuid.vcxproj +++ b/PCbuild/_uuid.vcxproj @@ -80,7 +80,7 @@ - .pyd + $(PyStdlibPydExt) diff --git a/PCbuild/_wmi.vcxproj b/PCbuild/_wmi.vcxproj index c1914a3fa5a1bf..22fa8960982528 100644 --- a/PCbuild/_wmi.vcxproj +++ b/PCbuild/_wmi.vcxproj @@ -80,7 +80,7 @@ - .pyd + $(PyStdlibPydExt) diff --git a/PCbuild/_zoneinfo.vcxproj b/PCbuild/_zoneinfo.vcxproj index 6e6389c3773397..47b5bfa5b8815a 100644 --- a/PCbuild/_zoneinfo.vcxproj +++ b/PCbuild/_zoneinfo.vcxproj @@ -79,7 +79,7 @@ - .pyd + $(PyStdlibPydExt) diff --git a/PCbuild/pyexpat.vcxproj b/PCbuild/pyexpat.vcxproj index 001f8afd89b9e9..dc9161a8b290f9 100644 --- a/PCbuild/pyexpat.vcxproj +++ b/PCbuild/pyexpat.vcxproj @@ -79,7 +79,7 @@ - .pyd + $(PyStdlibPydExt) diff --git a/PCbuild/pyproject.props b/PCbuild/pyproject.props index d69b43b0406ce0..fd5fbc9e910eee 100644 --- a/PCbuild/pyproject.props +++ b/PCbuild/pyproject.props @@ -12,7 +12,7 @@ $(IntDir.Replace(`\\`, `\`)) $(Py_IntDir)\$(MajorVersionNumber)$(MinorVersionNumber)$(ArchName)_$(Configuration)\pythoncore\ - $(Py_IntDir)\$(MajorVersionNumber)$(MinorVersionNumber)_frozen_$(Configuration)\ + $(Py_IntDir)\$(MajorVersionNumber)$(MinorVersionNumber)_frozen\ $(ProjectName) $(TargetName)$(PyDebugExt) false @@ -24,11 +24,19 @@ false + + $(TargetName)$(TargetExt) + <_TargetNameSep>$(TargetNameExt.LastIndexOf(`.`)) + $(TargetNameExt.Substring(0, $(_TargetNameSep))) + $(TargetNameExt.Substring($(_TargetNameSep))) + + <_VCToolsVersion>$([System.Version]::Parse(`$(VCToolsVersion)`).Major).$([System.Version]::Parse(`$(VCToolsVersion)`).Minor) true + true @@ -37,7 +45,7 @@ <_PlatformPreprocessorDefinition>_WIN32; <_PlatformPreprocessorDefinition Condition="$(Platform) == 'x64'">_WIN64; <_PlatformPreprocessorDefinition Condition="$(Platform) == 'x64' and $(PlatformToolset) != 'ClangCL'">_M_X64;$(_PlatformPreprocessorDefinition) - <_Py3NamePreprocessorDefinition>PY3_DLLNAME=L"$(Py3DllName)"; + <_Py3NamePreprocessorDefinition>PY3_DLLNAME=L"$(Py3DllName)$(PyDebugExt)"; @@ -62,6 +70,7 @@ -Wno-deprecated-non-prototype -Wno-unused-label -Wno-pointer-sign -Wno-incompatible-pointer-types-discards-qualifiers -Wno-unused-function %(AdditionalOptions) -flto %(AdditionalOptions) -d2pattern-opt-disable:-932189325 %(AdditionalOptions) + -d2ssa-patterns-all- %(AdditionalOptions) /sourceDependencies "$(IntDir.Trim(`\`))" %(AdditionalOptions) @@ -156,8 +165,8 @@ public override bool Execute() { - - + + $([System.IO.Path]::GetFullPath($(MSBuildThisFileDirectory)\..\)) $(PySourcePath)\ - + $(PySourcePath)PCbuild\win32\ $(Py_OutDir)\win32\ @@ -52,7 +52,7 @@ $(PySourcePath)PCbuild\$(ArchName)\ $(BuildPath)\ $(BuildPath)instrumented\ - + ..\\.. ..\\..\\.. @@ -84,22 +84,19 @@ _d - + -test - + -32 -arm32 -arm64 - - - $(BuildPath)python$(PyDebugExt).exe true - + true @@ -141,7 +138,7 @@ @@ -223,22 +220,55 @@ )) )) $([msbuild]::Add($(Field3Value), 9000)) - + + + python$(MajorVersionNumber).$(MinorVersionNumber)t + python + $(BuildPath)$(PyExeName)$(PyDebugExt).exe + pythonw$(MajorVersionNumber).$(MinorVersionNumber)t + pythonw + - python$(MajorVersionNumber)$(MinorVersionNumber)$(PyDebugExt) + python$(MajorVersionNumber)$(MinorVersionNumber)t$(PyDebugExt) + python$(MajorVersionNumber)$(MinorVersionNumber)$(PyDebugExt) - python3$(PyDebugExt) + python3t + python3 .cp$(MajorVersionNumber)$(MinorVersionNumber)-win32 .cp$(MajorVersionNumber)$(MinorVersionNumber)-win_arm32 .cp$(MajorVersionNumber)$(MinorVersionNumber)-win_arm64 .cp$(MajorVersionNumber)$(MinorVersionNumber)-win_amd64 - + $(MajorVersionNumber).$(MinorVersionNumber)$(PyArchExt)$(PyTestExt) + + + .cp$(MajorVersionNumber)$(MinorVersionNumber)t-win32 + .cp$(MajorVersionNumber)$(MinorVersionNumber)t-win_arm32 + .cp$(MajorVersionNumber)$(MinorVersionNumber)t-win_arm64 + .cp$(MajorVersionNumber)$(MinorVersionNumber)t-win_amd64 + + + $(MajorVersionNumber).$(MinorVersionNumber)t$(PyArchExt)$(PyTestExt) + + + + + .pyd - + + + $(FreethreadedPydTag) + + + $(PydTag).pyd + + + $(FreethreadedSysWinVer) + + diff --git a/PCbuild/python.vcxproj b/PCbuild/python.vcxproj index 8b733865962373..1e5ab877488e4a 100644 --- a/PCbuild/python.vcxproj +++ b/PCbuild/python.vcxproj @@ -72,6 +72,7 @@ + $(PyExeName) Application false MultiByte @@ -94,8 +95,11 @@ Console - 2000000 + 2000000 12000000 + 12000000 + + 3000000 @@ -129,7 +133,7 @@ +"$(OutDir)$(PyExeName)$(PyDebugExt).exe" "$(PySourcePath)PC\validate_ucrtbase.py" $(UcrtName)' ContinueOnError="true" /> @@ -142,7 +146,7 @@ set PYTHONPATH=$(PySourcePath)Lib @echo Running $(Configuration)^|$(Platform) interpreter... @setlocal @set PYTHONHOME=$(PySourcePath) -@"$(OutDir)python$(PyDebugExt).exe" %* +@"$(OutDir)$(PyExeName)$(PyDebugExt).exe" %* <_ExistingContent Condition="Exists('$(PySourcePath)python.bat')">$([System.IO.File]::ReadAllText('$(PySourcePath)python.bat')) diff --git a/PCbuild/python3dll.vcxproj b/PCbuild/python3dll.vcxproj index ec22e6fc76e584..235ea1cf9d33fb 100644 --- a/PCbuild/python3dll.vcxproj +++ b/PCbuild/python3dll.vcxproj @@ -70,12 +70,12 @@ {885D4898-D08D-4091-9C40-C700CFE3FC5A} python3dll Win32Proj - python3 false + $(Py3DllName) DynamicLibrary diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj index be5b34220aa0bc..64738b1bbf235d 100644 --- a/PCbuild/pythoncore.vcxproj +++ b/PCbuild/pythoncore.vcxproj @@ -231,6 +231,7 @@ + @@ -294,7 +295,6 @@ - @@ -424,6 +424,7 @@ + @@ -566,6 +567,9 @@ $(GeneratedFrozenModulesDir)Python;%(AdditionalIncludeDirectories) + + + diff --git a/PCbuild/pythoncore.vcxproj.filters b/PCbuild/pythoncore.vcxproj.filters index a96ca24cf08b66..b37ca2dfed55ab 100644 --- a/PCbuild/pythoncore.vcxproj.filters +++ b/PCbuild/pythoncore.vcxproj.filters @@ -618,6 +618,12 @@ Include\internal + + Include\internal + + + Include\internal + Include\internal @@ -798,9 +804,6 @@ Include\internal - - Include\internal - Include\internal\mimalloc @@ -932,6 +935,9 @@ Modules + + Modules + Modules @@ -1280,6 +1286,15 @@ Python + + Python + + + Python + + + Python + Python diff --git a/PCbuild/pythonw.vcxproj b/PCbuild/pythonw.vcxproj index e23635e5ea9411..d6cf0c97dedb09 100644 --- a/PCbuild/pythonw.vcxproj +++ b/PCbuild/pythonw.vcxproj @@ -73,6 +73,7 @@ + $(PyWExeName) Application false @@ -89,8 +90,11 @@ - 2000000 - 8000000 + 2000000 + 12000000 + 12000000 + + 3000000 diff --git a/PCbuild/rt.bat b/PCbuild/rt.bat index 332ba5edcf4082..293f99ae135faa 100644 --- a/PCbuild/rt.bat +++ b/PCbuild/rt.bat @@ -9,6 +9,7 @@ rem -q runs the tests just once, and without deleting .pyc files. rem -p or -win32, -x64, -arm32, -arm64 rem Run the specified architecture of python (or python_d if -d rem was specified). If omitted, uses %PREFIX% if set or 64-bit. +rem --disable-gil Run free-threaded build. rem All leading instances of these switches are shifted off, and rem whatever remains (up to 9 arguments) is passed to regrtest.py. rem For example, @@ -29,6 +30,7 @@ rem rt -u "network,largefile" setlocal set pcbuild=%~dp0 +set pyname=python set suffix= set qmode= set dashO= @@ -39,15 +41,18 @@ set exe= if "%1"=="-O" (set dashO=-O) & shift & goto CheckOpts if "%1"=="-q" (set qmode=yes) & shift & goto CheckOpts if "%1"=="-d" (set suffix=_d) & shift & goto CheckOpts +rem HACK: Need some way to infer the version number in this script +if "%1"=="--disable-gil" (set pyname=python3.13t) & shift & goto CheckOpts if "%1"=="-win32" (set prefix=%pcbuild%win32) & shift & goto CheckOpts if "%1"=="-x64" (set prefix=%pcbuild%amd64) & shift & goto CheckOpts +if "%1"=="-amd64" (set prefix=%pcbuild%amd64) & shift & goto CheckOpts if "%1"=="-arm64" (set prefix=%pcbuild%arm64) & shift & goto CheckOpts if "%1"=="-arm32" (set prefix=%pcbuild%arm32) & shift & goto CheckOpts if "%1"=="-p" (call :SetPlatform %~2) & shift & shift & goto CheckOpts if NOT "%1"=="" (set regrtestargs=%regrtestargs% %1) & shift & goto CheckOpts if not defined prefix set prefix=%pcbuild%amd64 -set exe=%prefix%\python%suffix%.exe +set exe=%prefix%\%pyname%%suffix%.exe set cmd="%exe%" %dashO% -m test %regrtestargs% if defined qmode goto Qmode diff --git a/PCbuild/select.vcxproj b/PCbuild/select.vcxproj index 750a713949919a..d7448fd4d72380 100644 --- a/PCbuild/select.vcxproj +++ b/PCbuild/select.vcxproj @@ -78,7 +78,7 @@ - .pyd + $(PyStdlibPydExt) diff --git a/PCbuild/sqlite3.vcxproj b/PCbuild/sqlite3.vcxproj index c502d51833b91a..6bcc4e913c8e77 100644 --- a/PCbuild/sqlite3.vcxproj +++ b/PCbuild/sqlite3.vcxproj @@ -69,12 +69,12 @@ {A1A295E5-463C-437F-81CA-1F32367685DA} sqlite3 - .pyd false + $(PyStdlibPydExt) DynamicLibrary NotSet diff --git a/PCbuild/unicodedata.vcxproj b/PCbuild/unicodedata.vcxproj index addef753359ed6..781f938e2ab78e 100644 --- a/PCbuild/unicodedata.vcxproj +++ b/PCbuild/unicodedata.vcxproj @@ -79,7 +79,7 @@ - .pyd + $(PyStdlibPydExt) diff --git a/PCbuild/venvlauncher.vcxproj b/PCbuild/venvlauncher.vcxproj index 123e84ec4e3682..1193e032245c94 100644 --- a/PCbuild/venvlauncher.vcxproj +++ b/PCbuild/venvlauncher.vcxproj @@ -69,12 +69,13 @@ {494BAC80-A60C-43A9-99E7-ACB691CE2C4D} venvlauncher - venvlauncher false + venvlauncher + $(TargetName)t Application MultiByte @@ -91,19 +92,19 @@ - _CONSOLE;VENV_REDIRECT;%(PreprocessorDefinitions) + EXENAME=L"$(PyExeName)$(PyDebugExt).exe";_CONSOLE;%(PreprocessorDefinitions) MultiThreaded PY_ICON;%(PreprocessorDefinitions) - version.lib;%(AdditionalDependencies) + pathcch.lib;%(AdditionalDependencies) Console - + diff --git a/PCbuild/venvlauncher.vcxproj.filters b/PCbuild/venvlauncher.vcxproj.filters index ec13936bf6cb7e..56a0f005a3fa2a 100644 --- a/PCbuild/venvlauncher.vcxproj.filters +++ b/PCbuild/venvlauncher.vcxproj.filters @@ -19,7 +19,7 @@ - + Source Files diff --git a/PCbuild/venvwlauncher.vcxproj b/PCbuild/venvwlauncher.vcxproj index b8504d5d08e52f..1b61718201367f 100644 --- a/PCbuild/venvwlauncher.vcxproj +++ b/PCbuild/venvwlauncher.vcxproj @@ -69,12 +69,13 @@ {FDB84CBB-2FB6-47C8-A2D6-091E0833239D} venvwlauncher - venvwlauncher false + venvwlauncher + $(TargetName)t Application MultiByte @@ -91,19 +92,19 @@ - _WINDOWS;VENV_REDIRECT;%(PreprocessorDefinitions) + EXENAME=L"$(PyExeName)$(PyDebugExt).exe";_WINDOWS;%(PreprocessorDefinitions) MultiThreaded PYW_ICON;%(PreprocessorDefinitions) - version.lib;%(AdditionalDependencies) + pathcch.lib;%(AdditionalDependencies) Windows - + diff --git a/PCbuild/venvwlauncher.vcxproj.filters b/PCbuild/venvwlauncher.vcxproj.filters index 8addc13e977e7a..61a514395e82dc 100644 --- a/PCbuild/venvwlauncher.vcxproj.filters +++ b/PCbuild/venvwlauncher.vcxproj.filters @@ -9,7 +9,7 @@ - + Source Files diff --git a/PCbuild/winsound.vcxproj b/PCbuild/winsound.vcxproj index 32cedc9b444902..c26029b15a339f 100644 --- a/PCbuild/winsound.vcxproj +++ b/PCbuild/winsound.vcxproj @@ -80,7 +80,7 @@ - .pyd + $(PyStdlibPydExt) diff --git a/PCbuild/xxlimited.vcxproj b/PCbuild/xxlimited.vcxproj index 1c776fb0da3e72..093e6920c0b76c 100644 --- a/PCbuild/xxlimited.vcxproj +++ b/PCbuild/xxlimited.vcxproj @@ -80,7 +80,7 @@ - .pyd + $(PyStdlibPydExt) diff --git a/PCbuild/xxlimited_35.vcxproj b/PCbuild/xxlimited_35.vcxproj index dd830b3b6aaa91..3f4d4463f24af0 100644 --- a/PCbuild/xxlimited_35.vcxproj +++ b/PCbuild/xxlimited_35.vcxproj @@ -80,7 +80,7 @@ - .pyd + $(PyStdlibPydExt) diff --git a/Parser/asdl_c.py b/Parser/asdl_c.py index 4bb337349748cf..ce92672bf00776 100755 --- a/Parser/asdl_c.py +++ b/Parser/asdl_c.py @@ -1388,15 +1388,14 @@ class PartingShots(StaticVisitor): int starting_recursion_depth; /* Be careful here to prevent overflow. */ - int COMPILER_STACK_FRAME_SCALE = 2; PyThreadState *tstate = _PyThreadState_GET(); if (!tstate) { return NULL; } struct validator vstate; - vstate.recursion_limit = Py_C_RECURSION_LIMIT * COMPILER_STACK_FRAME_SCALE; + vstate.recursion_limit = Py_C_RECURSION_LIMIT; int recursion_depth = Py_C_RECURSION_LIMIT - tstate->c_recursion_remaining; - starting_recursion_depth = recursion_depth * COMPILER_STACK_FRAME_SCALE; + starting_recursion_depth = recursion_depth; vstate.recursion_depth = starting_recursion_depth; PyObject *result = ast2obj_mod(state, &vstate, t); diff --git a/Parser/lexer/lexer.c b/Parser/lexer/lexer.c index ea4bdf7ce4a24c..ebf7686773ff45 100644 --- a/Parser/lexer/lexer.c +++ b/Parser/lexer/lexer.c @@ -1355,9 +1355,13 @@ tok_get_fstring_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct tok->lineno = the_current_tok->f_string_line_start; if (current_tok->f_string_quote_size == 3) { - return MAKE_TOKEN(_PyTokenizer_syntaxerror(tok, + _PyTokenizer_syntaxerror(tok, "unterminated triple-quoted f-string literal" - " (detected at line %d)", start)); + " (detected at line %d)", start); + if (c != '\n') { + tok->done = E_EOFS; + } + return MAKE_TOKEN(ERRORTOKEN); } else { return MAKE_TOKEN(_PyTokenizer_syntaxerror(tok, diff --git a/Parser/parser.c b/Parser/parser.c index 9006efb09a59fa..779b18e9650e9f 100644 --- a/Parser/parser.c +++ b/Parser/parser.c @@ -21,54 +21,54 @@ static KeywordToken *reserved_keywords[] = { (KeywordToken[]) {{NULL, -1}}, (KeywordToken[]) {{NULL, -1}}, (KeywordToken[]) { - {"if", 658}, - {"as", 656}, - {"in", 669}, + {"if", 661}, + {"as", 659}, + {"in", 672}, {"or", 581}, {"is", 589}, {NULL, -1}, }, (KeywordToken[]) { - {"del", 613}, - {"def", 671}, - {"for", 668}, - {"try", 640}, + {"del", 616}, + {"def", 674}, + {"for", 671}, + {"try", 643}, {"and", 582}, {"not", 588}, {NULL, -1}, }, (KeywordToken[]) { - {"from", 618}, + {"from", 621}, {"pass", 504}, - {"with", 631}, - {"elif", 660}, - {"else", 661}, - {"None", 611}, - {"True", 610}, + {"with", 634}, + {"elif", 663}, + {"else", 664}, + {"None", 614}, + {"True", 613}, {NULL, -1}, }, (KeywordToken[]) { {"raise", 525}, {"yield", 580}, {"break", 508}, - {"async", 670}, - {"class", 673}, - {"while", 663}, - {"False", 612}, + {"async", 673}, + {"class", 676}, + {"while", 666}, + {"False", 615}, {"await", 590}, {NULL, -1}, }, (KeywordToken[]) { {"return", 522}, - {"import", 617}, + {"import", 620}, {"assert", 529}, {"global", 526}, - {"except", 653}, - {"lambda", 609}, + {"except", 656}, + {"lambda", 612}, {NULL, -1}, }, (KeywordToken[]) { - {"finally", 649}, + {"finally", 652}, {NULL, -1}, }, (KeywordToken[]) { @@ -451,33 +451,33 @@ static char *soft_keywords[] = { #define _loop0_120_type 1364 #define _loop0_121_type 1365 #define _tmp_122_type 1366 -#define _loop0_124_type 1367 -#define _gather_123_type 1368 -#define _tmp_125_type 1369 -#define _loop0_127_type 1370 -#define _gather_126_type 1371 -#define _loop0_129_type 1372 -#define _gather_128_type 1373 -#define _loop0_131_type 1374 -#define _gather_130_type 1375 -#define _loop0_133_type 1376 -#define _gather_132_type 1377 -#define _loop0_134_type 1378 -#define _loop0_136_type 1379 -#define _gather_135_type 1380 -#define _loop1_137_type 1381 -#define _tmp_138_type 1382 -#define _loop0_140_type 1383 -#define _gather_139_type 1384 -#define _loop0_142_type 1385 -#define _gather_141_type 1386 -#define _loop0_144_type 1387 -#define _gather_143_type 1388 -#define _loop0_146_type 1389 -#define _gather_145_type 1390 -#define _loop0_148_type 1391 -#define _gather_147_type 1392 -#define _tmp_149_type 1393 +#define _tmp_123_type 1367 +#define _loop0_125_type 1368 +#define _gather_124_type 1369 +#define _tmp_126_type 1370 +#define _loop0_128_type 1371 +#define _gather_127_type 1372 +#define _loop0_130_type 1373 +#define _gather_129_type 1374 +#define _loop0_132_type 1375 +#define _gather_131_type 1376 +#define _loop0_134_type 1377 +#define _gather_133_type 1378 +#define _loop0_135_type 1379 +#define _loop0_137_type 1380 +#define _gather_136_type 1381 +#define _loop1_138_type 1382 +#define _tmp_139_type 1383 +#define _loop0_141_type 1384 +#define _gather_140_type 1385 +#define _loop0_143_type 1386 +#define _gather_142_type 1387 +#define _loop0_145_type 1388 +#define _gather_144_type 1389 +#define _loop0_147_type 1390 +#define _gather_146_type 1391 +#define _loop0_149_type 1392 +#define _gather_148_type 1393 #define _tmp_150_type 1394 #define _tmp_151_type 1395 #define _tmp_152_type 1396 @@ -489,65 +489,65 @@ static char *soft_keywords[] = { #define _tmp_158_type 1402 #define _tmp_159_type 1403 #define _tmp_160_type 1404 -#define _loop0_161_type 1405 +#define _tmp_161_type 1405 #define _loop0_162_type 1406 #define _loop0_163_type 1407 -#define _tmp_164_type 1408 +#define _loop0_164_type 1408 #define _tmp_165_type 1409 #define _tmp_166_type 1410 #define _tmp_167_type 1411 #define _tmp_168_type 1412 -#define _loop0_169_type 1413 +#define _tmp_169_type 1413 #define _loop0_170_type 1414 #define _loop0_171_type 1415 -#define _loop1_172_type 1416 -#define _tmp_173_type 1417 -#define _loop0_174_type 1418 -#define _tmp_175_type 1419 -#define _loop0_176_type 1420 -#define _loop1_177_type 1421 -#define _tmp_178_type 1422 +#define _loop0_172_type 1416 +#define _loop1_173_type 1417 +#define _tmp_174_type 1418 +#define _loop0_175_type 1419 +#define _tmp_176_type 1420 +#define _loop0_177_type 1421 +#define _loop1_178_type 1422 #define _tmp_179_type 1423 #define _tmp_180_type 1424 -#define _loop0_181_type 1425 -#define _tmp_182_type 1426 +#define _tmp_181_type 1425 +#define _loop0_182_type 1426 #define _tmp_183_type 1427 -#define _loop1_184_type 1428 -#define _tmp_185_type 1429 -#define _loop0_186_type 1430 +#define _tmp_184_type 1428 +#define _loop1_185_type 1429 +#define _tmp_186_type 1430 #define _loop0_187_type 1431 #define _loop0_188_type 1432 -#define _loop0_190_type 1433 -#define _gather_189_type 1434 -#define _tmp_191_type 1435 -#define _loop0_192_type 1436 -#define _tmp_193_type 1437 -#define _loop0_194_type 1438 -#define _loop1_195_type 1439 +#define _loop0_189_type 1433 +#define _loop0_191_type 1434 +#define _gather_190_type 1435 +#define _tmp_192_type 1436 +#define _loop0_193_type 1437 +#define _tmp_194_type 1438 +#define _loop0_195_type 1439 #define _loop1_196_type 1440 -#define _tmp_197_type 1441 +#define _loop1_197_type 1441 #define _tmp_198_type 1442 -#define _loop0_199_type 1443 -#define _tmp_200_type 1444 +#define _tmp_199_type 1443 +#define _loop0_200_type 1444 #define _tmp_201_type 1445 #define _tmp_202_type 1446 -#define _loop0_204_type 1447 -#define _gather_203_type 1448 -#define _loop0_206_type 1449 -#define _gather_205_type 1450 -#define _loop0_208_type 1451 -#define _gather_207_type 1452 -#define _loop0_210_type 1453 -#define _gather_209_type 1454 -#define _loop0_212_type 1455 -#define _gather_211_type 1456 -#define _tmp_213_type 1457 -#define _loop0_214_type 1458 -#define _loop1_215_type 1459 -#define _tmp_216_type 1460 -#define _loop0_217_type 1461 -#define _loop1_218_type 1462 -#define _tmp_219_type 1463 +#define _tmp_203_type 1447 +#define _loop0_205_type 1448 +#define _gather_204_type 1449 +#define _loop0_207_type 1450 +#define _gather_206_type 1451 +#define _loop0_209_type 1452 +#define _gather_208_type 1453 +#define _loop0_211_type 1454 +#define _gather_210_type 1455 +#define _loop0_213_type 1456 +#define _gather_212_type 1457 +#define _tmp_214_type 1458 +#define _loop0_215_type 1459 +#define _loop1_216_type 1460 +#define _tmp_217_type 1461 +#define _loop0_218_type 1462 +#define _loop1_219_type 1463 #define _tmp_220_type 1464 #define _tmp_221_type 1465 #define _tmp_222_type 1466 @@ -557,9 +557,9 @@ static char *soft_keywords[] = { #define _tmp_226_type 1470 #define _tmp_227_type 1471 #define _tmp_228_type 1472 -#define _loop0_230_type 1473 -#define _gather_229_type 1474 -#define _tmp_231_type 1475 +#define _tmp_229_type 1473 +#define _loop0_231_type 1474 +#define _gather_230_type 1475 #define _tmp_232_type 1476 #define _tmp_233_type 1477 #define _tmp_234_type 1478 @@ -572,8 +572,8 @@ static char *soft_keywords[] = { #define _tmp_241_type 1485 #define _tmp_242_type 1486 #define _tmp_243_type 1487 -#define _loop0_244_type 1488 -#define _tmp_245_type 1489 +#define _tmp_244_type 1488 +#define _loop0_245_type 1489 #define _tmp_246_type 1490 #define _tmp_247_type 1491 #define _tmp_248_type 1492 @@ -589,7 +589,7 @@ static char *soft_keywords[] = { #define _tmp_258_type 1502 #define _tmp_259_type 1503 #define _tmp_260_type 1504 -#define _tmp_261_type 1505 +#define _loop0_261_type 1505 #define _tmp_262_type 1506 #define _tmp_263_type 1507 #define _tmp_264_type 1508 @@ -602,14 +602,17 @@ static char *soft_keywords[] = { #define _tmp_271_type 1515 #define _tmp_272_type 1516 #define _tmp_273_type 1517 -#define _loop0_275_type 1518 -#define _gather_274_type 1519 +#define _tmp_274_type 1518 +#define _tmp_275_type 1519 #define _tmp_276_type 1520 -#define _tmp_277_type 1521 -#define _tmp_278_type 1522 +#define _loop0_278_type 1521 +#define _gather_277_type 1522 #define _tmp_279_type 1523 #define _tmp_280_type 1524 #define _tmp_281_type 1525 +#define _tmp_282_type 1526 +#define _tmp_283_type 1527 +#define _tmp_284_type 1528 static mod_ty file_rule(Parser *p); static mod_ty interactive_rule(Parser *p); @@ -978,33 +981,33 @@ static asdl_seq *_loop1_119_rule(Parser *p); static asdl_seq *_loop0_120_rule(Parser *p); static asdl_seq *_loop0_121_rule(Parser *p); static void *_tmp_122_rule(Parser *p); -static asdl_seq *_loop0_124_rule(Parser *p); -static asdl_seq *_gather_123_rule(Parser *p); -static void *_tmp_125_rule(Parser *p); -static asdl_seq *_loop0_127_rule(Parser *p); -static asdl_seq *_gather_126_rule(Parser *p); -static asdl_seq *_loop0_129_rule(Parser *p); -static asdl_seq *_gather_128_rule(Parser *p); -static asdl_seq *_loop0_131_rule(Parser *p); -static asdl_seq *_gather_130_rule(Parser *p); -static asdl_seq *_loop0_133_rule(Parser *p); -static asdl_seq *_gather_132_rule(Parser *p); +static void *_tmp_123_rule(Parser *p); +static asdl_seq *_loop0_125_rule(Parser *p); +static asdl_seq *_gather_124_rule(Parser *p); +static void *_tmp_126_rule(Parser *p); +static asdl_seq *_loop0_128_rule(Parser *p); +static asdl_seq *_gather_127_rule(Parser *p); +static asdl_seq *_loop0_130_rule(Parser *p); +static asdl_seq *_gather_129_rule(Parser *p); +static asdl_seq *_loop0_132_rule(Parser *p); +static asdl_seq *_gather_131_rule(Parser *p); static asdl_seq *_loop0_134_rule(Parser *p); -static asdl_seq *_loop0_136_rule(Parser *p); -static asdl_seq *_gather_135_rule(Parser *p); -static asdl_seq *_loop1_137_rule(Parser *p); -static void *_tmp_138_rule(Parser *p); -static asdl_seq *_loop0_140_rule(Parser *p); -static asdl_seq *_gather_139_rule(Parser *p); -static asdl_seq *_loop0_142_rule(Parser *p); -static asdl_seq *_gather_141_rule(Parser *p); -static asdl_seq *_loop0_144_rule(Parser *p); -static asdl_seq *_gather_143_rule(Parser *p); -static asdl_seq *_loop0_146_rule(Parser *p); -static asdl_seq *_gather_145_rule(Parser *p); -static asdl_seq *_loop0_148_rule(Parser *p); -static asdl_seq *_gather_147_rule(Parser *p); -static void *_tmp_149_rule(Parser *p); +static asdl_seq *_gather_133_rule(Parser *p); +static asdl_seq *_loop0_135_rule(Parser *p); +static asdl_seq *_loop0_137_rule(Parser *p); +static asdl_seq *_gather_136_rule(Parser *p); +static asdl_seq *_loop1_138_rule(Parser *p); +static void *_tmp_139_rule(Parser *p); +static asdl_seq *_loop0_141_rule(Parser *p); +static asdl_seq *_gather_140_rule(Parser *p); +static asdl_seq *_loop0_143_rule(Parser *p); +static asdl_seq *_gather_142_rule(Parser *p); +static asdl_seq *_loop0_145_rule(Parser *p); +static asdl_seq *_gather_144_rule(Parser *p); +static asdl_seq *_loop0_147_rule(Parser *p); +static asdl_seq *_gather_146_rule(Parser *p); +static asdl_seq *_loop0_149_rule(Parser *p); +static asdl_seq *_gather_148_rule(Parser *p); static void *_tmp_150_rule(Parser *p); static void *_tmp_151_rule(Parser *p); static void *_tmp_152_rule(Parser *p); @@ -1016,65 +1019,65 @@ static void *_tmp_157_rule(Parser *p); static void *_tmp_158_rule(Parser *p); static void *_tmp_159_rule(Parser *p); static void *_tmp_160_rule(Parser *p); -static asdl_seq *_loop0_161_rule(Parser *p); +static void *_tmp_161_rule(Parser *p); static asdl_seq *_loop0_162_rule(Parser *p); static asdl_seq *_loop0_163_rule(Parser *p); -static void *_tmp_164_rule(Parser *p); +static asdl_seq *_loop0_164_rule(Parser *p); static void *_tmp_165_rule(Parser *p); static void *_tmp_166_rule(Parser *p); static void *_tmp_167_rule(Parser *p); static void *_tmp_168_rule(Parser *p); -static asdl_seq *_loop0_169_rule(Parser *p); +static void *_tmp_169_rule(Parser *p); static asdl_seq *_loop0_170_rule(Parser *p); static asdl_seq *_loop0_171_rule(Parser *p); -static asdl_seq *_loop1_172_rule(Parser *p); -static void *_tmp_173_rule(Parser *p); -static asdl_seq *_loop0_174_rule(Parser *p); -static void *_tmp_175_rule(Parser *p); -static asdl_seq *_loop0_176_rule(Parser *p); -static asdl_seq *_loop1_177_rule(Parser *p); -static void *_tmp_178_rule(Parser *p); +static asdl_seq *_loop0_172_rule(Parser *p); +static asdl_seq *_loop1_173_rule(Parser *p); +static void *_tmp_174_rule(Parser *p); +static asdl_seq *_loop0_175_rule(Parser *p); +static void *_tmp_176_rule(Parser *p); +static asdl_seq *_loop0_177_rule(Parser *p); +static asdl_seq *_loop1_178_rule(Parser *p); static void *_tmp_179_rule(Parser *p); static void *_tmp_180_rule(Parser *p); -static asdl_seq *_loop0_181_rule(Parser *p); -static void *_tmp_182_rule(Parser *p); +static void *_tmp_181_rule(Parser *p); +static asdl_seq *_loop0_182_rule(Parser *p); static void *_tmp_183_rule(Parser *p); -static asdl_seq *_loop1_184_rule(Parser *p); -static void *_tmp_185_rule(Parser *p); -static asdl_seq *_loop0_186_rule(Parser *p); +static void *_tmp_184_rule(Parser *p); +static asdl_seq *_loop1_185_rule(Parser *p); +static void *_tmp_186_rule(Parser *p); static asdl_seq *_loop0_187_rule(Parser *p); static asdl_seq *_loop0_188_rule(Parser *p); -static asdl_seq *_loop0_190_rule(Parser *p); -static asdl_seq *_gather_189_rule(Parser *p); -static void *_tmp_191_rule(Parser *p); -static asdl_seq *_loop0_192_rule(Parser *p); -static void *_tmp_193_rule(Parser *p); -static asdl_seq *_loop0_194_rule(Parser *p); -static asdl_seq *_loop1_195_rule(Parser *p); +static asdl_seq *_loop0_189_rule(Parser *p); +static asdl_seq *_loop0_191_rule(Parser *p); +static asdl_seq *_gather_190_rule(Parser *p); +static void *_tmp_192_rule(Parser *p); +static asdl_seq *_loop0_193_rule(Parser *p); +static void *_tmp_194_rule(Parser *p); +static asdl_seq *_loop0_195_rule(Parser *p); static asdl_seq *_loop1_196_rule(Parser *p); -static void *_tmp_197_rule(Parser *p); +static asdl_seq *_loop1_197_rule(Parser *p); static void *_tmp_198_rule(Parser *p); -static asdl_seq *_loop0_199_rule(Parser *p); -static void *_tmp_200_rule(Parser *p); +static void *_tmp_199_rule(Parser *p); +static asdl_seq *_loop0_200_rule(Parser *p); static void *_tmp_201_rule(Parser *p); static void *_tmp_202_rule(Parser *p); -static asdl_seq *_loop0_204_rule(Parser *p); -static asdl_seq *_gather_203_rule(Parser *p); -static asdl_seq *_loop0_206_rule(Parser *p); -static asdl_seq *_gather_205_rule(Parser *p); -static asdl_seq *_loop0_208_rule(Parser *p); -static asdl_seq *_gather_207_rule(Parser *p); -static asdl_seq *_loop0_210_rule(Parser *p); -static asdl_seq *_gather_209_rule(Parser *p); -static asdl_seq *_loop0_212_rule(Parser *p); -static asdl_seq *_gather_211_rule(Parser *p); -static void *_tmp_213_rule(Parser *p); -static asdl_seq *_loop0_214_rule(Parser *p); -static asdl_seq *_loop1_215_rule(Parser *p); -static void *_tmp_216_rule(Parser *p); -static asdl_seq *_loop0_217_rule(Parser *p); -static asdl_seq *_loop1_218_rule(Parser *p); -static void *_tmp_219_rule(Parser *p); +static void *_tmp_203_rule(Parser *p); +static asdl_seq *_loop0_205_rule(Parser *p); +static asdl_seq *_gather_204_rule(Parser *p); +static asdl_seq *_loop0_207_rule(Parser *p); +static asdl_seq *_gather_206_rule(Parser *p); +static asdl_seq *_loop0_209_rule(Parser *p); +static asdl_seq *_gather_208_rule(Parser *p); +static asdl_seq *_loop0_211_rule(Parser *p); +static asdl_seq *_gather_210_rule(Parser *p); +static asdl_seq *_loop0_213_rule(Parser *p); +static asdl_seq *_gather_212_rule(Parser *p); +static void *_tmp_214_rule(Parser *p); +static asdl_seq *_loop0_215_rule(Parser *p); +static asdl_seq *_loop1_216_rule(Parser *p); +static void *_tmp_217_rule(Parser *p); +static asdl_seq *_loop0_218_rule(Parser *p); +static asdl_seq *_loop1_219_rule(Parser *p); static void *_tmp_220_rule(Parser *p); static void *_tmp_221_rule(Parser *p); static void *_tmp_222_rule(Parser *p); @@ -1084,9 +1087,9 @@ static void *_tmp_225_rule(Parser *p); static void *_tmp_226_rule(Parser *p); static void *_tmp_227_rule(Parser *p); static void *_tmp_228_rule(Parser *p); -static asdl_seq *_loop0_230_rule(Parser *p); -static asdl_seq *_gather_229_rule(Parser *p); -static void *_tmp_231_rule(Parser *p); +static void *_tmp_229_rule(Parser *p); +static asdl_seq *_loop0_231_rule(Parser *p); +static asdl_seq *_gather_230_rule(Parser *p); static void *_tmp_232_rule(Parser *p); static void *_tmp_233_rule(Parser *p); static void *_tmp_234_rule(Parser *p); @@ -1099,8 +1102,8 @@ static void *_tmp_240_rule(Parser *p); static void *_tmp_241_rule(Parser *p); static void *_tmp_242_rule(Parser *p); static void *_tmp_243_rule(Parser *p); -static asdl_seq *_loop0_244_rule(Parser *p); -static void *_tmp_245_rule(Parser *p); +static void *_tmp_244_rule(Parser *p); +static asdl_seq *_loop0_245_rule(Parser *p); static void *_tmp_246_rule(Parser *p); static void *_tmp_247_rule(Parser *p); static void *_tmp_248_rule(Parser *p); @@ -1116,7 +1119,7 @@ static void *_tmp_257_rule(Parser *p); static void *_tmp_258_rule(Parser *p); static void *_tmp_259_rule(Parser *p); static void *_tmp_260_rule(Parser *p); -static void *_tmp_261_rule(Parser *p); +static asdl_seq *_loop0_261_rule(Parser *p); static void *_tmp_262_rule(Parser *p); static void *_tmp_263_rule(Parser *p); static void *_tmp_264_rule(Parser *p); @@ -1129,14 +1132,17 @@ static void *_tmp_270_rule(Parser *p); static void *_tmp_271_rule(Parser *p); static void *_tmp_272_rule(Parser *p); static void *_tmp_273_rule(Parser *p); -static asdl_seq *_loop0_275_rule(Parser *p); -static asdl_seq *_gather_274_rule(Parser *p); +static void *_tmp_274_rule(Parser *p); +static void *_tmp_275_rule(Parser *p); static void *_tmp_276_rule(Parser *p); -static void *_tmp_277_rule(Parser *p); -static void *_tmp_278_rule(Parser *p); +static asdl_seq *_loop0_278_rule(Parser *p); +static asdl_seq *_gather_277_rule(Parser *p); static void *_tmp_279_rule(Parser *p); static void *_tmp_280_rule(Parser *p); static void *_tmp_281_rule(Parser *p); +static void *_tmp_282_rule(Parser *p); +static void *_tmp_283_rule(Parser *p); +static void *_tmp_284_rule(Parser *p); // file: statements? $ @@ -1875,7 +1881,7 @@ simple_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'del' del_stmt")); stmt_ty del_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 613) // token='del' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 616) // token='del' && (del_stmt_var = del_stmt_rule(p)) // del_stmt ) @@ -2115,7 +2121,7 @@ compound_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'if' if_stmt")); stmt_ty if_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 658) // token='if' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 661) // token='if' && (if_stmt_var = if_stmt_rule(p)) // if_stmt ) @@ -2199,7 +2205,7 @@ compound_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'try' try_stmt")); stmt_ty try_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 640) // token='try' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 643) // token='try' && (try_stmt_var = try_stmt_rule(p)) // try_stmt ) @@ -2220,7 +2226,7 @@ compound_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'while' while_stmt")); stmt_ty while_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 663) // token='while' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 666) // token='while' && (while_stmt_var = while_stmt_rule(p)) // while_stmt ) @@ -3205,7 +3211,7 @@ del_stmt_rule(Parser *p) Token * _keyword; asdl_expr_seq* a; if ( - (_keyword = _PyPegen_expect_token(p, 613)) // token='del' + (_keyword = _PyPegen_expect_token(p, 616)) // token='del' && (a = del_targets_rule(p)) // del_targets && @@ -3494,7 +3500,7 @@ import_name_rule(Parser *p) Token * _keyword; asdl_alias_seq* a; if ( - (_keyword = _PyPegen_expect_token(p, 617)) // token='import' + (_keyword = _PyPegen_expect_token(p, 620)) // token='import' && (a = dotted_as_names_rule(p)) // dotted_as_names ) @@ -3563,13 +3569,13 @@ import_from_rule(Parser *p) expr_ty b; asdl_alias_seq* c; if ( - (_keyword = _PyPegen_expect_token(p, 618)) // token='from' + (_keyword = _PyPegen_expect_token(p, 621)) // token='from' && (a = _loop0_24_rule(p)) // (('.' | '...'))* && (b = dotted_name_rule(p)) // dotted_name && - (_keyword_1 = _PyPegen_expect_token(p, 617)) // token='import' + (_keyword_1 = _PyPegen_expect_token(p, 620)) // token='import' && (c = import_from_targets_rule(p)) // import_from_targets ) @@ -3607,11 +3613,11 @@ import_from_rule(Parser *p) asdl_seq * a; asdl_alias_seq* b; if ( - (_keyword = _PyPegen_expect_token(p, 618)) // token='from' + (_keyword = _PyPegen_expect_token(p, 621)) // token='from' && (a = _loop1_25_rule(p)) // (('.' | '...'))+ && - (_keyword_1 = _PyPegen_expect_token(p, 617)) // token='import' + (_keyword_1 = _PyPegen_expect_token(p, 620)) // token='import' && (b = import_from_targets_rule(p)) // import_from_targets ) @@ -4360,7 +4366,7 @@ class_def_raw_rule(Parser *p) asdl_stmt_seq* c; void *t; if ( - (_keyword = _PyPegen_expect_token(p, 673)) // token='class' + (_keyword = _PyPegen_expect_token(p, 676)) // token='class' && (a = _PyPegen_name_token(p)) // NAME && @@ -4527,7 +4533,7 @@ function_def_raw_rule(Parser *p) void *t; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 671)) // token='def' + (_keyword = _PyPegen_expect_token(p, 674)) // token='def' && (n = _PyPegen_name_token(p)) // NAME && @@ -4588,9 +4594,9 @@ function_def_raw_rule(Parser *p) void *t; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 670)) // token='async' + (_keyword = _PyPegen_expect_token(p, 673)) // token='async' && - (_keyword_1 = _PyPegen_expect_token(p, 671)) // token='def' + (_keyword_1 = _PyPegen_expect_token(p, 674)) // token='def' && (n = _PyPegen_name_token(p)) // NAME && @@ -5928,7 +5934,7 @@ if_stmt_rule(Parser *p) asdl_stmt_seq* b; stmt_ty c; if ( - (_keyword = _PyPegen_expect_token(p, 658)) // token='if' + (_keyword = _PyPegen_expect_token(p, 661)) // token='if' && (a = named_expression_rule(p)) // named_expression && @@ -5973,7 +5979,7 @@ if_stmt_rule(Parser *p) asdl_stmt_seq* b; void *c; if ( - (_keyword = _PyPegen_expect_token(p, 658)) // token='if' + (_keyword = _PyPegen_expect_token(p, 661)) // token='if' && (a = named_expression_rule(p)) // named_expression && @@ -6068,7 +6074,7 @@ elif_stmt_rule(Parser *p) asdl_stmt_seq* b; stmt_ty c; if ( - (_keyword = _PyPegen_expect_token(p, 660)) // token='elif' + (_keyword = _PyPegen_expect_token(p, 663)) // token='elif' && (a = named_expression_rule(p)) // named_expression && @@ -6113,7 +6119,7 @@ elif_stmt_rule(Parser *p) asdl_stmt_seq* b; void *c; if ( - (_keyword = _PyPegen_expect_token(p, 660)) // token='elif' + (_keyword = _PyPegen_expect_token(p, 663)) // token='elif' && (a = named_expression_rule(p)) // named_expression && @@ -6194,7 +6200,7 @@ else_block_rule(Parser *p) Token * _literal; asdl_stmt_seq* b; if ( - (_keyword = _PyPegen_expect_token(p, 661)) // token='else' + (_keyword = _PyPegen_expect_token(p, 664)) // token='else' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -6273,7 +6279,7 @@ while_stmt_rule(Parser *p) asdl_stmt_seq* b; void *c; if ( - (_keyword = _PyPegen_expect_token(p, 663)) // token='while' + (_keyword = _PyPegen_expect_token(p, 666)) // token='while' && (a = named_expression_rule(p)) // named_expression && @@ -6373,11 +6379,11 @@ for_stmt_rule(Parser *p) expr_ty t; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 668)) // token='for' + (_keyword = _PyPegen_expect_token(p, 671)) // token='for' && (t = star_targets_rule(p)) // star_targets && - (_keyword_1 = _PyPegen_expect_token(p, 669)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 672)) // token='in' && (_cut_var = 1) && @@ -6435,13 +6441,13 @@ for_stmt_rule(Parser *p) expr_ty t; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 670)) // token='async' + (_keyword = _PyPegen_expect_token(p, 673)) // token='async' && - (_keyword_1 = _PyPegen_expect_token(p, 668)) // token='for' + (_keyword_1 = _PyPegen_expect_token(p, 671)) // token='for' && (t = star_targets_rule(p)) // star_targets && - (_keyword_2 = _PyPegen_expect_token(p, 669)) // token='in' + (_keyword_2 = _PyPegen_expect_token(p, 672)) // token='in' && (_cut_var = 1) && @@ -6570,7 +6576,7 @@ with_stmt_rule(Parser *p) asdl_stmt_seq* b; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 631)) // token='with' + (_keyword = _PyPegen_expect_token(p, 634)) // token='with' && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && @@ -6621,7 +6627,7 @@ with_stmt_rule(Parser *p) asdl_stmt_seq* b; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 631)) // token='with' + (_keyword = _PyPegen_expect_token(p, 634)) // token='with' && (a = (asdl_withitem_seq*)_gather_53_rule(p)) // ','.with_item+ && @@ -6670,9 +6676,9 @@ with_stmt_rule(Parser *p) asdl_withitem_seq* a; asdl_stmt_seq* b; if ( - (_keyword = _PyPegen_expect_token(p, 670)) // token='async' + (_keyword = _PyPegen_expect_token(p, 673)) // token='async' && - (_keyword_1 = _PyPegen_expect_token(p, 631)) // token='with' + (_keyword_1 = _PyPegen_expect_token(p, 634)) // token='with' && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && @@ -6722,9 +6728,9 @@ with_stmt_rule(Parser *p) asdl_stmt_seq* b; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 670)) // token='async' + (_keyword = _PyPegen_expect_token(p, 673)) // token='async' && - (_keyword_1 = _PyPegen_expect_token(p, 631)) // token='with' + (_keyword_1 = _PyPegen_expect_token(p, 634)) // token='with' && (a = (asdl_withitem_seq*)_gather_57_rule(p)) // ','.with_item+ && @@ -6810,7 +6816,7 @@ with_item_rule(Parser *p) if ( (e = expression_rule(p)) // expression && - (_keyword = _PyPegen_expect_token(p, 656)) // token='as' + (_keyword = _PyPegen_expect_token(p, 659)) // token='as' && (t = star_target_rule(p)) // star_target && @@ -6935,7 +6941,7 @@ try_stmt_rule(Parser *p) asdl_stmt_seq* b; asdl_stmt_seq* f; if ( - (_keyword = _PyPegen_expect_token(p, 640)) // token='try' + (_keyword = _PyPegen_expect_token(p, 643)) // token='try' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -6979,7 +6985,7 @@ try_stmt_rule(Parser *p) asdl_excepthandler_seq* ex; void *f; if ( - (_keyword = _PyPegen_expect_token(p, 640)) // token='try' + (_keyword = _PyPegen_expect_token(p, 643)) // token='try' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -7027,7 +7033,7 @@ try_stmt_rule(Parser *p) asdl_excepthandler_seq* ex; void *f; if ( - (_keyword = _PyPegen_expect_token(p, 640)) // token='try' + (_keyword = _PyPegen_expect_token(p, 643)) // token='try' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -7125,7 +7131,7 @@ except_block_rule(Parser *p) expr_ty e; void *t; if ( - (_keyword = _PyPegen_expect_token(p, 653)) // token='except' + (_keyword = _PyPegen_expect_token(p, 656)) // token='except' && (e = expression_rule(p)) // expression && @@ -7168,7 +7174,7 @@ except_block_rule(Parser *p) Token * _literal; asdl_stmt_seq* b; if ( - (_keyword = _PyPegen_expect_token(p, 653)) // token='except' + (_keyword = _PyPegen_expect_token(p, 656)) // token='except' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -7279,7 +7285,7 @@ except_star_block_rule(Parser *p) expr_ty e; void *t; if ( - (_keyword = _PyPegen_expect_token(p, 653)) // token='except' + (_keyword = _PyPegen_expect_token(p, 656)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && @@ -7381,7 +7387,7 @@ finally_block_rule(Parser *p) Token * _literal; asdl_stmt_seq* a; if ( - (_keyword = _PyPegen_expect_token(p, 649)) // token='finally' + (_keyword = _PyPegen_expect_token(p, 652)) // token='finally' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -7689,7 +7695,7 @@ guard_rule(Parser *p) Token * _keyword; expr_ty guard; if ( - (_keyword = _PyPegen_expect_token(p, 658)) // token='if' + (_keyword = _PyPegen_expect_token(p, 661)) // token='if' && (guard = named_expression_rule(p)) // named_expression ) @@ -7884,7 +7890,7 @@ as_pattern_rule(Parser *p) if ( (pattern = or_pattern_rule(p)) // or_pattern && - (_keyword = _PyPegen_expect_token(p, 656)) // token='as' + (_keyword = _PyPegen_expect_token(p, 659)) // token='as' && (target = pattern_capture_target_rule(p)) // pattern_capture_target ) @@ -8318,7 +8324,7 @@ literal_pattern_rule(Parser *p) D(fprintf(stderr, "%*c> literal_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 611)) // token='None' + (_keyword = _PyPegen_expect_token(p, 614)) // token='None' ) { D(fprintf(stderr, "%*c+ literal_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'")); @@ -8351,7 +8357,7 @@ literal_pattern_rule(Parser *p) D(fprintf(stderr, "%*c> literal_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 610)) // token='True' + (_keyword = _PyPegen_expect_token(p, 613)) // token='True' ) { D(fprintf(stderr, "%*c+ literal_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'")); @@ -8384,7 +8390,7 @@ literal_pattern_rule(Parser *p) D(fprintf(stderr, "%*c> literal_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 612)) // token='False' + (_keyword = _PyPegen_expect_token(p, 615)) // token='False' ) { D(fprintf(stderr, "%*c+ literal_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'")); @@ -8510,7 +8516,7 @@ literal_expr_rule(Parser *p) D(fprintf(stderr, "%*c> literal_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 611)) // token='None' + (_keyword = _PyPegen_expect_token(p, 614)) // token='None' ) { D(fprintf(stderr, "%*c+ literal_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'")); @@ -8543,7 +8549,7 @@ literal_expr_rule(Parser *p) D(fprintf(stderr, "%*c> literal_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 610)) // token='True' + (_keyword = _PyPegen_expect_token(p, 613)) // token='True' ) { D(fprintf(stderr, "%*c+ literal_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'")); @@ -8576,7 +8582,7 @@ literal_expr_rule(Parser *p) D(fprintf(stderr, "%*c> literal_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 612)) // token='False' + (_keyword = _PyPegen_expect_token(p, 615)) // token='False' ) { D(fprintf(stderr, "%*c+ literal_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'")); @@ -11107,11 +11113,11 @@ expression_rule(Parser *p) if ( (a = disjunction_rule(p)) // disjunction && - (_keyword = _PyPegen_expect_token(p, 658)) // token='if' + (_keyword = _PyPegen_expect_token(p, 661)) // token='if' && (b = disjunction_rule(p)) // disjunction && - (_keyword_1 = _PyPegen_expect_token(p, 661)) // token='else' + (_keyword_1 = _PyPegen_expect_token(p, 664)) // token='else' && (c = expression_rule(p)) // expression ) @@ -11217,7 +11223,7 @@ yield_expr_rule(Parser *p) if ( (_keyword = _PyPegen_expect_token(p, 580)) // token='yield' && - (_keyword_1 = _PyPegen_expect_token(p, 618)) // token='from' + (_keyword_1 = _PyPegen_expect_token(p, 621)) // token='from' && (a = expression_rule(p)) // expression ) @@ -12649,7 +12655,7 @@ notin_bitwise_or_rule(Parser *p) if ( (_keyword = _PyPegen_expect_token(p, 588)) // token='not' && - (_keyword_1 = _PyPegen_expect_token(p, 669)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 672)) // token='in' && (a = bitwise_or_rule(p)) // bitwise_or ) @@ -12695,7 +12701,7 @@ in_bitwise_or_rule(Parser *p) Token * _keyword; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 669)) // token='in' + (_keyword = _PyPegen_expect_token(p, 672)) // token='in' && (a = bitwise_or_rule(p)) // bitwise_or ) @@ -14611,7 +14617,7 @@ atom_rule(Parser *p) D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 610)) // token='True' + (_keyword = _PyPegen_expect_token(p, 613)) // token='True' ) { D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'")); @@ -14644,7 +14650,7 @@ atom_rule(Parser *p) D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 612)) // token='False' + (_keyword = _PyPegen_expect_token(p, 615)) // token='False' ) { D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'")); @@ -14677,7 +14683,7 @@ atom_rule(Parser *p) D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 611)) // token='None' + (_keyword = _PyPegen_expect_token(p, 614)) // token='None' ) { D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'")); @@ -14945,7 +14951,7 @@ lambdef_rule(Parser *p) void *a; expr_ty b; if ( - (_keyword = _PyPegen_expect_token(p, 609)) // token='lambda' + (_keyword = _PyPegen_expect_token(p, 612)) // token='lambda' && (a = lambda_params_rule(p), !p->error_indicator) // lambda_params? && @@ -16848,6 +16854,7 @@ for_if_clauses_rule(Parser *p) // for_if_clause: // | 'async' 'for' star_targets 'in' ~ disjunction (('if' disjunction))* // | 'for' star_targets 'in' ~ disjunction (('if' disjunction))* +// | 'async'? 'for' (bitwise_or ((',' bitwise_or))* ','?) !'in' // | invalid_for_target static comprehension_ty for_if_clause_rule(Parser *p) @@ -16875,13 +16882,13 @@ for_if_clause_rule(Parser *p) expr_ty b; asdl_expr_seq* c; if ( - (_keyword = _PyPegen_expect_token(p, 670)) // token='async' + (_keyword = _PyPegen_expect_token(p, 673)) // token='async' && - (_keyword_1 = _PyPegen_expect_token(p, 668)) // token='for' + (_keyword_1 = _PyPegen_expect_token(p, 671)) // token='for' && (a = star_targets_rule(p)) // star_targets && - (_keyword_2 = _PyPegen_expect_token(p, 669)) // token='in' + (_keyword_2 = _PyPegen_expect_token(p, 672)) // token='in' && (_cut_var = 1) && @@ -16920,11 +16927,11 @@ for_if_clause_rule(Parser *p) expr_ty b; asdl_expr_seq* c; if ( - (_keyword = _PyPegen_expect_token(p, 668)) // token='for' + (_keyword = _PyPegen_expect_token(p, 671)) // token='for' && (a = star_targets_rule(p)) // star_targets && - (_keyword_1 = _PyPegen_expect_token(p, 669)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 672)) // token='in' && (_cut_var = 1) && @@ -16950,6 +16957,39 @@ for_if_clause_rule(Parser *p) return NULL; } } + { // 'async'? 'for' (bitwise_or ((',' bitwise_or))* ','?) !'in' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> for_if_clause[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'? 'for' (bitwise_or ((',' bitwise_or))* ','?) !'in'")); + Token * _keyword; + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings + void *_tmp_122_var; + if ( + (_opt_var = _PyPegen_expect_token(p, 673), !p->error_indicator) // 'async'? + && + (_keyword = _PyPegen_expect_token(p, 671)) // token='for' + && + (_tmp_122_var = _tmp_122_rule(p)) // bitwise_or ((',' bitwise_or))* ','? + && + _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 672) // token='in' + ) + { + D(fprintf(stderr, "%*c+ for_if_clause[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'? 'for' (bitwise_or ((',' bitwise_or))* ','?) !'in'")); + _res = RAISE_SYNTAX_ERROR ( "'in' expected after for-loop variables" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s for_if_clause[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'async'? 'for' (bitwise_or ((',' bitwise_or))* ','?) !'in'")); + } if (p->call_invalid_rules) { // invalid_for_target if (p->error_indicator) { p->level--; @@ -17190,7 +17230,7 @@ genexp_rule(Parser *p) if ( (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = _tmp_122_rule(p)) // assignment_expression | expression !':=' + (a = _tmp_123_rule(p)) // assignment_expression | expression !':=' && (b = for_if_clauses_rule(p)) // for_if_clauses && @@ -17439,9 +17479,9 @@ args_rule(Parser *p) asdl_expr_seq* a; void *b; if ( - (a = (asdl_expr_seq*)_gather_123_rule(p)) // ','.(starred_expression | (assignment_expression | expression !':=') !'=')+ + (a = (asdl_expr_seq*)_gather_124_rule(p)) // ','.(starred_expression | (assignment_expression | expression !':=') !'=')+ && - (b = _tmp_125_rule(p), !p->error_indicator) // [',' kwargs] + (b = _tmp_126_rule(p), !p->error_indicator) // [',' kwargs] ) { D(fprintf(stderr, "%*c+ args[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.(starred_expression | (assignment_expression | expression !':=') !'=')+ [',' kwargs]")); @@ -17531,11 +17571,11 @@ kwargs_rule(Parser *p) asdl_seq * a; asdl_seq * b; if ( - (a = _gather_126_rule(p)) // ','.kwarg_or_starred+ + (a = _gather_127_rule(p)) // ','.kwarg_or_starred+ && (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (b = _gather_128_rule(p)) // ','.kwarg_or_double_starred+ + (b = _gather_129_rule(p)) // ','.kwarg_or_double_starred+ ) { D(fprintf(stderr, "%*c+ kwargs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_starred+ ',' ','.kwarg_or_double_starred+")); @@ -17557,13 +17597,13 @@ kwargs_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> kwargs[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_starred+")); - asdl_seq * _gather_130_var; + asdl_seq * _gather_131_var; if ( - (_gather_130_var = _gather_130_rule(p)) // ','.kwarg_or_starred+ + (_gather_131_var = _gather_131_rule(p)) // ','.kwarg_or_starred+ ) { D(fprintf(stderr, "%*c+ kwargs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_starred+")); - _res = _gather_130_var; + _res = _gather_131_var; goto done; } p->mark = _mark; @@ -17576,13 +17616,13 @@ kwargs_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> kwargs[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_double_starred+")); - asdl_seq * _gather_132_var; + asdl_seq * _gather_133_var; if ( - (_gather_132_var = _gather_132_rule(p)) // ','.kwarg_or_double_starred+ + (_gather_133_var = _gather_133_rule(p)) // ','.kwarg_or_double_starred+ ) { D(fprintf(stderr, "%*c+ kwargs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_double_starred+")); - _res = _gather_132_var; + _res = _gather_133_var; goto done; } p->mark = _mark; @@ -17971,7 +18011,7 @@ star_targets_rule(Parser *p) if ( (a = star_target_rule(p)) // star_target && - (b = _loop0_134_rule(p)) // ((',' star_target))* + (b = _loop0_135_rule(p)) // ((',' star_target))* && (_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','? ) @@ -18027,7 +18067,7 @@ star_targets_list_seq_rule(Parser *p) UNUSED(_opt_var); // Silence compiler warnings asdl_expr_seq* a; if ( - (a = (asdl_expr_seq*)_gather_135_rule(p)) // ','.star_target+ + (a = (asdl_expr_seq*)_gather_136_rule(p)) // ','.star_target+ && (_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','? ) @@ -18077,7 +18117,7 @@ star_targets_tuple_seq_rule(Parser *p) if ( (a = star_target_rule(p)) // star_target && - (b = _loop1_137_rule(p)) // ((',' star_target))+ + (b = _loop1_138_rule(p)) // ((',' star_target))+ && (_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','? ) @@ -18165,7 +18205,7 @@ star_target_rule(Parser *p) if ( (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (a = _tmp_138_rule(p)) // !'*' star_target + (a = _tmp_139_rule(p)) // !'*' star_target ) { D(fprintf(stderr, "%*c+ star_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (!'*' star_target)")); @@ -19088,7 +19128,7 @@ del_targets_rule(Parser *p) UNUSED(_opt_var); // Silence compiler warnings asdl_expr_seq* a; if ( - (a = (asdl_expr_seq*)_gather_139_rule(p)) // ','.del_target+ + (a = (asdl_expr_seq*)_gather_140_rule(p)) // ','.del_target+ && (_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','? ) @@ -19446,7 +19486,7 @@ type_expressions_rule(Parser *p) expr_ty b; expr_ty c; if ( - (a = _gather_141_rule(p)) // ','.expression+ + (a = _gather_142_rule(p)) // ','.expression+ && (_literal = _PyPegen_expect_token(p, 12)) // token=',' && @@ -19485,7 +19525,7 @@ type_expressions_rule(Parser *p) asdl_seq * a; expr_ty b; if ( - (a = _gather_143_rule(p)) // ','.expression+ + (a = _gather_144_rule(p)) // ','.expression+ && (_literal = _PyPegen_expect_token(p, 12)) // token=',' && @@ -19518,7 +19558,7 @@ type_expressions_rule(Parser *p) asdl_seq * a; expr_ty b; if ( - (a = _gather_145_rule(p)) // ','.expression+ + (a = _gather_146_rule(p)) // ','.expression+ && (_literal = _PyPegen_expect_token(p, 12)) // token=',' && @@ -19638,7 +19678,7 @@ type_expressions_rule(Parser *p) D(fprintf(stderr, "%*c> type_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.expression+")); asdl_expr_seq* a; if ( - (a = (asdl_expr_seq*)_gather_147_rule(p)) // ','.expression+ + (a = (asdl_expr_seq*)_gather_148_rule(p)) // ','.expression+ ) { D(fprintf(stderr, "%*c+ type_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.expression+")); @@ -19689,7 +19729,7 @@ func_type_comment_rule(Parser *p) && (t = _PyPegen_expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' && - _PyPegen_lookahead(1, _tmp_149_rule, p) + _PyPegen_lookahead(1, _tmp_150_rule, p) ) { D(fprintf(stderr, "%*c+ func_type_comment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE TYPE_COMMENT &(NEWLINE INDENT)")); @@ -19776,10 +19816,10 @@ invalid_arguments_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_arguments[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "((','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs) | kwargs) ',' '*'")); Token * _literal; - void *_tmp_150_var; + void *_tmp_151_var; Token * b; if ( - (_tmp_150_var = _tmp_150_rule(p)) // (','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs) | kwargs + (_tmp_151_var = _tmp_151_rule(p)) // (','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs) | kwargs && (_literal = _PyPegen_expect_token(p, 12)) // token=',' && @@ -19817,7 +19857,7 @@ invalid_arguments_rule(Parser *p) && (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (_opt_var = _tmp_151_rule(p), !p->error_indicator) // [args | expression for_if_clauses] + (_opt_var = _tmp_152_rule(p), !p->error_indicator) // [args | expression for_if_clauses] ) { D(fprintf(stderr, "%*c+ invalid_arguments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses ',' [args | expression for_if_clauses]")); @@ -19877,13 +19917,13 @@ invalid_arguments_rule(Parser *p) expr_ty a; Token * b; if ( - (_opt_var = _tmp_152_rule(p), !p->error_indicator) // [(args ',')] + (_opt_var = _tmp_153_rule(p), !p->error_indicator) // [(args ',')] && (a = _PyPegen_name_token(p)) // NAME && (b = _PyPegen_expect_token(p, 22)) // token='=' && - _PyPegen_lookahead(1, _tmp_153_rule, p) + _PyPegen_lookahead(1, _tmp_154_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_arguments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "[(args ',')] NAME '=' &(',' | ')')")); @@ -20021,7 +20061,7 @@ invalid_kwarg_rule(Parser *p) Token* a; Token * b; if ( - (a = (Token*)_tmp_154_rule(p)) // 'True' | 'False' | 'None' + (a = (Token*)_tmp_155_rule(p)) // 'True' | 'False' | 'None' && (b = _PyPegen_expect_token(p, 22)) // token='=' ) @@ -20081,7 +20121,7 @@ invalid_kwarg_rule(Parser *p) expr_ty a; Token * b; if ( - _PyPegen_lookahead(0, _tmp_155_rule, p) + _PyPegen_lookahead(0, _tmp_156_rule, p) && (a = expression_rule(p)) // expression && @@ -20184,11 +20224,11 @@ expression_without_invalid_rule(Parser *p) if ( (a = disjunction_rule(p)) // disjunction && - (_keyword = _PyPegen_expect_token(p, 658)) // token='if' + (_keyword = _PyPegen_expect_token(p, 661)) // token='if' && (b = disjunction_rule(p)) // disjunction && - (_keyword_1 = _PyPegen_expect_token(p, 661)) // token='else' + (_keyword_1 = _PyPegen_expect_token(p, 664)) // token='else' && (c = expression_rule(p)) // expression ) @@ -20337,7 +20377,7 @@ invalid_expression_rule(Parser *p) expr_ty a; expr_ty b; if ( - _PyPegen_lookahead(0, _tmp_156_rule, p) + _PyPegen_lookahead(0, _tmp_157_rule, p) && (a = disjunction_rule(p)) // disjunction && @@ -20369,11 +20409,11 @@ invalid_expression_rule(Parser *p) if ( (a = disjunction_rule(p)) // disjunction && - (_keyword = _PyPegen_expect_token(p, 658)) // token='if' + (_keyword = _PyPegen_expect_token(p, 661)) // token='if' && (b = disjunction_rule(p)) // disjunction && - _PyPegen_lookahead(0, _tmp_157_rule, p) + _PyPegen_lookahead(0, _tmp_158_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "disjunction 'if' disjunction !('else' | ':')")); @@ -20400,7 +20440,7 @@ invalid_expression_rule(Parser *p) Token * a; Token * b; if ( - (a = _PyPegen_expect_token(p, 609)) // token='lambda' + (a = _PyPegen_expect_token(p, 612)) // token='lambda' && (_opt_var = lambda_params_rule(p), !p->error_indicator) // lambda_params? && @@ -20494,7 +20534,7 @@ invalid_named_expression_rule(Parser *p) && (b = bitwise_or_rule(p)) // bitwise_or && - _PyPegen_lookahead(0, _tmp_158_rule, p) + _PyPegen_lookahead(0, _tmp_159_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_named_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME '=' bitwise_or !('=' | ':=')")); @@ -20520,7 +20560,7 @@ invalid_named_expression_rule(Parser *p) Token * b; expr_ty bitwise_or_var; if ( - _PyPegen_lookahead(0, _tmp_159_rule, p) + _PyPegen_lookahead(0, _tmp_160_rule, p) && (a = bitwise_or_rule(p)) // bitwise_or && @@ -20528,7 +20568,7 @@ invalid_named_expression_rule(Parser *p) && (bitwise_or_var = bitwise_or_rule(p)) // bitwise_or && - _PyPegen_lookahead(0, _tmp_160_rule, p) + _PyPegen_lookahead(0, _tmp_161_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_named_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "!(list | tuple | genexp | 'True' | 'None' | 'False') bitwise_or '=' bitwise_or !('=' | ':=')")); @@ -20608,7 +20648,7 @@ invalid_assignment_rule(Parser *p) D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions* ':' expression")); Token * _literal; Token * _literal_1; - asdl_seq * _loop0_161_var; + asdl_seq * _loop0_162_var; expr_ty a; expr_ty expression_var; if ( @@ -20616,7 +20656,7 @@ invalid_assignment_rule(Parser *p) && (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (_loop0_161_var = _loop0_161_rule(p)) // star_named_expressions* + (_loop0_162_var = _loop0_162_rule(p)) // star_named_expressions* && (_literal_1 = _PyPegen_expect_token(p, 11)) // token=':' && @@ -20673,10 +20713,10 @@ invalid_assignment_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "((star_targets '='))* star_expressions '='")); Token * _literal; - asdl_seq * _loop0_162_var; + asdl_seq * _loop0_163_var; expr_ty a; if ( - (_loop0_162_var = _loop0_162_rule(p)) // ((star_targets '='))* + (_loop0_163_var = _loop0_163_rule(p)) // ((star_targets '='))* && (a = star_expressions_rule(p)) // star_expressions && @@ -20703,10 +20743,10 @@ invalid_assignment_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "((star_targets '='))* yield_expr '='")); Token * _literal; - asdl_seq * _loop0_163_var; + asdl_seq * _loop0_164_var; expr_ty a; if ( - (_loop0_163_var = _loop0_163_rule(p)) // ((star_targets '='))* + (_loop0_164_var = _loop0_164_rule(p)) // ((star_targets '='))* && (a = yield_expr_rule(p)) // yield_expr && @@ -20732,7 +20772,7 @@ invalid_assignment_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions augassign (yield_expr | star_expressions)")); - void *_tmp_164_var; + void *_tmp_165_var; expr_ty a; AugOperator* augassign_var; if ( @@ -20740,7 +20780,7 @@ invalid_assignment_rule(Parser *p) && (augassign_var = augassign_rule(p)) // augassign && - (_tmp_164_var = _tmp_164_rule(p)) // yield_expr | star_expressions + (_tmp_165_var = _tmp_165_rule(p)) // yield_expr | star_expressions ) { D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions augassign (yield_expr | star_expressions)")); @@ -20871,7 +20911,7 @@ invalid_del_stmt_rule(Parser *p) Token * _keyword; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 613)) // token='del' + (_keyword = _PyPegen_expect_token(p, 616)) // token='del' && (a = star_expressions_rule(p)) // star_expressions ) @@ -20962,11 +21002,11 @@ invalid_comprehension_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_comprehension[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('[' | '(' | '{') starred_expression for_if_clauses")); - void *_tmp_165_var; + void *_tmp_166_var; expr_ty a; asdl_comprehension_seq* for_if_clauses_var; if ( - (_tmp_165_var = _tmp_165_rule(p)) // '[' | '(' | '{' + (_tmp_166_var = _tmp_166_rule(p)) // '[' | '(' | '{' && (a = starred_expression_rule(p)) // starred_expression && @@ -20993,12 +21033,12 @@ invalid_comprehension_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_comprehension[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('[' | '{') star_named_expression ',' star_named_expressions for_if_clauses")); Token * _literal; - void *_tmp_166_var; + void *_tmp_167_var; expr_ty a; asdl_expr_seq* b; asdl_comprehension_seq* for_if_clauses_var; if ( - (_tmp_166_var = _tmp_166_rule(p)) // '[' | '{' + (_tmp_167_var = _tmp_167_rule(p)) // '[' | '{' && (a = star_named_expression_rule(p)) // star_named_expression && @@ -21028,12 +21068,12 @@ invalid_comprehension_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_comprehension[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('[' | '{') star_named_expression ',' for_if_clauses")); - void *_tmp_167_var; + void *_tmp_168_var; expr_ty a; Token * b; asdl_comprehension_seq* for_if_clauses_var; if ( - (_tmp_167_var = _tmp_167_rule(p)) // '[' | '{' + (_tmp_168_var = _tmp_168_rule(p)) // '[' | '{' && (a = star_named_expression_rule(p)) // star_named_expression && @@ -21168,13 +21208,13 @@ invalid_parameters_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(slash_no_default | slash_with_default) param_maybe_default* '/'")); - asdl_seq * _loop0_169_var; - void *_tmp_168_var; + asdl_seq * _loop0_170_var; + void *_tmp_169_var; Token * a; if ( - (_tmp_168_var = _tmp_168_rule(p)) // slash_no_default | slash_with_default + (_tmp_169_var = _tmp_169_rule(p)) // slash_no_default | slash_with_default && - (_loop0_169_var = _loop0_169_rule(p)) // param_maybe_default* + (_loop0_170_var = _loop0_170_rule(p)) // param_maybe_default* && (a = _PyPegen_expect_token(p, 17)) // token='/' ) @@ -21198,7 +21238,7 @@ invalid_parameters_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_no_default? param_no_default* invalid_parameters_helper param_no_default")); - asdl_seq * _loop0_170_var; + asdl_seq * _loop0_171_var; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings arg_ty a; @@ -21206,7 +21246,7 @@ invalid_parameters_rule(Parser *p) if ( (_opt_var = slash_no_default_rule(p), !p->error_indicator) // slash_no_default? && - (_loop0_170_var = _loop0_170_rule(p)) // param_no_default* + (_loop0_171_var = _loop0_171_rule(p)) // param_no_default* && (invalid_parameters_helper_var = invalid_parameters_helper_rule(p)) // invalid_parameters_helper && @@ -21232,18 +21272,18 @@ invalid_parameters_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default* '(' param_no_default+ ','? ')'")); - asdl_seq * _loop0_171_var; - asdl_seq * _loop1_172_var; + asdl_seq * _loop0_172_var; + asdl_seq * _loop1_173_var; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings Token * a; Token * b; if ( - (_loop0_171_var = _loop0_171_rule(p)) // param_no_default* + (_loop0_172_var = _loop0_172_rule(p)) // param_no_default* && (a = _PyPegen_expect_token(p, 7)) // token='(' && - (_loop1_172_var = _loop1_172_rule(p)) // param_no_default+ + (_loop1_173_var = _loop1_173_rule(p)) // param_no_default+ && (_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','? && @@ -21270,22 +21310,22 @@ invalid_parameters_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "[(slash_no_default | slash_with_default)] param_maybe_default* '*' (',' | param_no_default) param_maybe_default* '/'")); Token * _literal; - asdl_seq * _loop0_174_var; - asdl_seq * _loop0_176_var; + asdl_seq * _loop0_175_var; + asdl_seq * _loop0_177_var; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings - void *_tmp_175_var; + void *_tmp_176_var; Token * a; if ( - (_opt_var = _tmp_173_rule(p), !p->error_indicator) // [(slash_no_default | slash_with_default)] + (_opt_var = _tmp_174_rule(p), !p->error_indicator) // [(slash_no_default | slash_with_default)] && - (_loop0_174_var = _loop0_174_rule(p)) // param_maybe_default* + (_loop0_175_var = _loop0_175_rule(p)) // param_maybe_default* && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_175_var = _tmp_175_rule(p)) // ',' | param_no_default + (_tmp_176_var = _tmp_176_rule(p)) // ',' | param_no_default && - (_loop0_176_var = _loop0_176_rule(p)) // param_maybe_default* + (_loop0_177_var = _loop0_177_rule(p)) // param_maybe_default* && (a = _PyPegen_expect_token(p, 17)) // token='/' ) @@ -21310,10 +21350,10 @@ invalid_parameters_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default+ '/' '*'")); Token * _literal; - asdl_seq * _loop1_177_var; + asdl_seq * _loop1_178_var; Token * a; if ( - (_loop1_177_var = _loop1_177_rule(p)) // param_maybe_default+ + (_loop1_178_var = _loop1_178_rule(p)) // param_maybe_default+ && (_literal = _PyPegen_expect_token(p, 17)) // token='/' && @@ -21362,7 +21402,7 @@ invalid_default_rule(Parser *p) if ( (a = _PyPegen_expect_token(p, 22)) // token='=' && - _PyPegen_lookahead(1, _tmp_178_rule, p) + _PyPegen_lookahead(1, _tmp_179_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'=' &(')' | ',')")); @@ -21407,12 +21447,12 @@ invalid_star_etc_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (')' | ',' (')' | '**'))")); - void *_tmp_179_var; + void *_tmp_180_var; Token * a; if ( (a = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_179_var = _tmp_179_rule(p)) // ')' | ',' (')' | '**') + (_tmp_180_var = _tmp_180_rule(p)) // ')' | ',' (')' | '**') ) { D(fprintf(stderr, "%*c+ invalid_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (')' | ',' (')' | '**'))")); @@ -21495,20 +21535,20 @@ invalid_star_etc_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (param_no_default | ',') param_maybe_default* '*' (param_no_default | ',')")); Token * _literal; - asdl_seq * _loop0_181_var; - void *_tmp_180_var; - void *_tmp_182_var; + asdl_seq * _loop0_182_var; + void *_tmp_181_var; + void *_tmp_183_var; Token * a; if ( (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_180_var = _tmp_180_rule(p)) // param_no_default | ',' + (_tmp_181_var = _tmp_181_rule(p)) // param_no_default | ',' && - (_loop0_181_var = _loop0_181_rule(p)) // param_maybe_default* + (_loop0_182_var = _loop0_182_rule(p)) // param_maybe_default* && (a = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_182_var = _tmp_182_rule(p)) // param_no_default | ',' + (_tmp_183_var = _tmp_183_rule(p)) // param_no_default | ',' ) { D(fprintf(stderr, "%*c+ invalid_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (param_no_default | ',') param_maybe_default* '*' (param_no_default | ',')")); @@ -21623,7 +21663,7 @@ invalid_kwds_rule(Parser *p) && (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' && - (a = (Token*)_tmp_183_rule(p)) // '*' | '**' | '/' + (a = (Token*)_tmp_184_rule(p)) // '*' | '**' | '/' ) { D(fprintf(stderr, "%*c+ invalid_kwds[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**' param ',' ('*' | '**' | '/')")); @@ -21688,13 +21728,13 @@ invalid_parameters_helper_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_parameters_helper[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default+")); - asdl_seq * _loop1_184_var; + asdl_seq * _loop1_185_var; if ( - (_loop1_184_var = _loop1_184_rule(p)) // param_with_default+ + (_loop1_185_var = _loop1_185_rule(p)) // param_with_default+ ) { D(fprintf(stderr, "%*c+ invalid_parameters_helper[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_with_default+")); - _res = _loop1_184_var; + _res = _loop1_185_var; goto done; } p->mark = _mark; @@ -21759,13 +21799,13 @@ invalid_lambda_parameters_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(lambda_slash_no_default | lambda_slash_with_default) lambda_param_maybe_default* '/'")); - asdl_seq * _loop0_186_var; - void *_tmp_185_var; + asdl_seq * _loop0_187_var; + void *_tmp_186_var; Token * a; if ( - (_tmp_185_var = _tmp_185_rule(p)) // lambda_slash_no_default | lambda_slash_with_default + (_tmp_186_var = _tmp_186_rule(p)) // lambda_slash_no_default | lambda_slash_with_default && - (_loop0_186_var = _loop0_186_rule(p)) // lambda_param_maybe_default* + (_loop0_187_var = _loop0_187_rule(p)) // lambda_param_maybe_default* && (a = _PyPegen_expect_token(p, 17)) // token='/' ) @@ -21789,7 +21829,7 @@ invalid_lambda_parameters_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default? lambda_param_no_default* invalid_lambda_parameters_helper lambda_param_no_default")); - asdl_seq * _loop0_187_var; + asdl_seq * _loop0_188_var; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings arg_ty a; @@ -21797,7 +21837,7 @@ invalid_lambda_parameters_rule(Parser *p) if ( (_opt_var = lambda_slash_no_default_rule(p), !p->error_indicator) // lambda_slash_no_default? && - (_loop0_187_var = _loop0_187_rule(p)) // lambda_param_no_default* + (_loop0_188_var = _loop0_188_rule(p)) // lambda_param_no_default* && (invalid_lambda_parameters_helper_var = invalid_lambda_parameters_helper_rule(p)) // invalid_lambda_parameters_helper && @@ -21823,18 +21863,18 @@ invalid_lambda_parameters_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default* '(' ','.lambda_param+ ','? ')'")); - asdl_seq * _gather_189_var; - asdl_seq * _loop0_188_var; + asdl_seq * _gather_190_var; + asdl_seq * _loop0_189_var; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings Token * a; Token * b; if ( - (_loop0_188_var = _loop0_188_rule(p)) // lambda_param_no_default* + (_loop0_189_var = _loop0_189_rule(p)) // lambda_param_no_default* && (a = _PyPegen_expect_token(p, 7)) // token='(' && - (_gather_189_var = _gather_189_rule(p)) // ','.lambda_param+ + (_gather_190_var = _gather_190_rule(p)) // ','.lambda_param+ && (_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','? && @@ -21861,22 +21901,22 @@ invalid_lambda_parameters_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "[(lambda_slash_no_default | lambda_slash_with_default)] lambda_param_maybe_default* '*' (',' | lambda_param_no_default) lambda_param_maybe_default* '/'")); Token * _literal; - asdl_seq * _loop0_192_var; - asdl_seq * _loop0_194_var; + asdl_seq * _loop0_193_var; + asdl_seq * _loop0_195_var; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings - void *_tmp_193_var; + void *_tmp_194_var; Token * a; if ( - (_opt_var = _tmp_191_rule(p), !p->error_indicator) // [(lambda_slash_no_default | lambda_slash_with_default)] + (_opt_var = _tmp_192_rule(p), !p->error_indicator) // [(lambda_slash_no_default | lambda_slash_with_default)] && - (_loop0_192_var = _loop0_192_rule(p)) // lambda_param_maybe_default* + (_loop0_193_var = _loop0_193_rule(p)) // lambda_param_maybe_default* && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_193_var = _tmp_193_rule(p)) // ',' | lambda_param_no_default + (_tmp_194_var = _tmp_194_rule(p)) // ',' | lambda_param_no_default && - (_loop0_194_var = _loop0_194_rule(p)) // lambda_param_maybe_default* + (_loop0_195_var = _loop0_195_rule(p)) // lambda_param_maybe_default* && (a = _PyPegen_expect_token(p, 17)) // token='/' ) @@ -21901,10 +21941,10 @@ invalid_lambda_parameters_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default+ '/' '*'")); Token * _literal; - asdl_seq * _loop1_195_var; + asdl_seq * _loop1_196_var; Token * a; if ( - (_loop1_195_var = _loop1_195_rule(p)) // lambda_param_maybe_default+ + (_loop1_196_var = _loop1_196_rule(p)) // lambda_param_maybe_default+ && (_literal = _PyPegen_expect_token(p, 17)) // token='/' && @@ -21975,13 +22015,13 @@ invalid_lambda_parameters_helper_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_lambda_parameters_helper[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default+")); - asdl_seq * _loop1_196_var; + asdl_seq * _loop1_197_var; if ( - (_loop1_196_var = _loop1_196_rule(p)) // lambda_param_with_default+ + (_loop1_197_var = _loop1_197_rule(p)) // lambda_param_with_default+ ) { D(fprintf(stderr, "%*c+ invalid_lambda_parameters_helper[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default+")); - _res = _loop1_196_var; + _res = _loop1_197_var; goto done; } p->mark = _mark; @@ -22017,11 +22057,11 @@ invalid_lambda_star_etc_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_lambda_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (':' | ',' (':' | '**'))")); Token * _literal; - void *_tmp_197_var; + void *_tmp_198_var; if ( (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_197_var = _tmp_197_rule(p)) // ':' | ',' (':' | '**') + (_tmp_198_var = _tmp_198_rule(p)) // ':' | ',' (':' | '**') ) { D(fprintf(stderr, "%*c+ invalid_lambda_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (':' | ',' (':' | '**'))")); @@ -22074,20 +22114,20 @@ invalid_lambda_star_etc_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_lambda_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (lambda_param_no_default | ',') lambda_param_maybe_default* '*' (lambda_param_no_default | ',')")); Token * _literal; - asdl_seq * _loop0_199_var; - void *_tmp_198_var; - void *_tmp_200_var; + asdl_seq * _loop0_200_var; + void *_tmp_199_var; + void *_tmp_201_var; Token * a; if ( (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_198_var = _tmp_198_rule(p)) // lambda_param_no_default | ',' + (_tmp_199_var = _tmp_199_rule(p)) // lambda_param_no_default | ',' && - (_loop0_199_var = _loop0_199_rule(p)) // lambda_param_maybe_default* + (_loop0_200_var = _loop0_200_rule(p)) // lambda_param_maybe_default* && (a = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_200_var = _tmp_200_rule(p)) // lambda_param_no_default | ',' + (_tmp_201_var = _tmp_201_rule(p)) // lambda_param_no_default | ',' ) { D(fprintf(stderr, "%*c+ invalid_lambda_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (lambda_param_no_default | ',') lambda_param_maybe_default* '*' (lambda_param_no_default | ',')")); @@ -22205,7 +22245,7 @@ invalid_lambda_kwds_rule(Parser *p) && (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' && - (a = (Token*)_tmp_201_rule(p)) // '*' | '**' | '/' + (a = (Token*)_tmp_202_rule(p)) // '*' | '**' | '/' ) { D(fprintf(stderr, "%*c+ invalid_lambda_kwds[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**' lambda_param ',' ('*' | '**' | '/')")); @@ -22307,11 +22347,11 @@ invalid_with_item_rule(Parser *p) if ( (expression_var = expression_rule(p)) // expression && - (_keyword = _PyPegen_expect_token(p, 656)) // token='as' + (_keyword = _PyPegen_expect_token(p, 659)) // token='as' && (a = expression_rule(p)) // expression && - _PyPegen_lookahead(1, _tmp_202_rule, p) + _PyPegen_lookahead(1, _tmp_203_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_with_item[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression 'as' expression &(',' | ')' | ':')")); @@ -22357,9 +22397,9 @@ invalid_for_target_rule(Parser *p) UNUSED(_opt_var); // Silence compiler warnings expr_ty a; if ( - (_opt_var = _PyPegen_expect_token(p, 670), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 673), !p->error_indicator) // 'async'? && - (_keyword = _PyPegen_expect_token(p, 668)) // token='for' + (_keyword = _PyPegen_expect_token(p, 671)) // token='for' && (a = star_expressions_rule(p)) // star_expressions ) @@ -22484,16 +22524,16 @@ invalid_import_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_import[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'import' ','.dotted_name+ 'from' dotted_name")); - asdl_seq * _gather_203_var; + asdl_seq * _gather_204_var; Token * _keyword; Token * a; expr_ty dotted_name_var; if ( - (a = _PyPegen_expect_token(p, 617)) // token='import' + (a = _PyPegen_expect_token(p, 620)) // token='import' && - (_gather_203_var = _gather_203_rule(p)) // ','.dotted_name+ + (_gather_204_var = _gather_204_rule(p)) // ','.dotted_name+ && - (_keyword = _PyPegen_expect_token(p, 618)) // token='from' + (_keyword = _PyPegen_expect_token(p, 621)) // token='from' && (dotted_name_var = dotted_name_rule(p)) // dotted_name ) @@ -22589,7 +22629,7 @@ invalid_compound_stmt_rule(Parser *p) Token * a; expr_ty named_expression_var; if ( - (a = _PyPegen_expect_token(p, 660)) // token='elif' + (a = _PyPegen_expect_token(p, 663)) // token='elif' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -22618,7 +22658,7 @@ invalid_compound_stmt_rule(Parser *p) Token * _literal; Token * a; if ( - (a = _PyPegen_expect_token(p, 661)) // token='else' + (a = _PyPegen_expect_token(p, 664)) // token='else' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) @@ -22663,17 +22703,17 @@ invalid_with_stmt_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'? 'with' ','.(expression ['as' star_target])+ NEWLINE")); - asdl_seq * _gather_205_var; + asdl_seq * _gather_206_var; Token * _keyword; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings Token * newline_var; if ( - (_opt_var = _PyPegen_expect_token(p, 670), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 673), !p->error_indicator) // 'async'? && - (_keyword = _PyPegen_expect_token(p, 631)) // token='with' + (_keyword = _PyPegen_expect_token(p, 634)) // token='with' && - (_gather_205_var = _gather_205_rule(p)) // ','.(expression ['as' star_target])+ + (_gather_206_var = _gather_206_rule(p)) // ','.(expression ['as' star_target])+ && (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) @@ -22697,7 +22737,7 @@ invalid_with_stmt_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE")); - asdl_seq * _gather_207_var; + asdl_seq * _gather_208_var; Token * _keyword; Token * _literal; Token * _literal_1; @@ -22707,13 +22747,13 @@ invalid_with_stmt_rule(Parser *p) UNUSED(_opt_var_1); // Silence compiler warnings Token * newline_var; if ( - (_opt_var = _PyPegen_expect_token(p, 670), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 673), !p->error_indicator) // 'async'? && - (_keyword = _PyPegen_expect_token(p, 631)) // token='with' + (_keyword = _PyPegen_expect_token(p, 634)) // token='with' && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (_gather_207_var = _gather_207_rule(p)) // ','.(expressions ['as' star_target])+ + (_gather_208_var = _gather_208_rule(p)) // ','.(expressions ['as' star_target])+ && (_opt_var_1 = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','? && @@ -22762,18 +22802,18 @@ invalid_with_stmt_indent_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_with_stmt_indent[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'? 'with' ','.(expression ['as' star_target])+ ':' NEWLINE !INDENT")); - asdl_seq * _gather_209_var; + asdl_seq * _gather_210_var; Token * _literal; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings Token * a; Token * newline_var; if ( - (_opt_var = _PyPegen_expect_token(p, 670), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 673), !p->error_indicator) // 'async'? && - (a = _PyPegen_expect_token(p, 631)) // token='with' + (a = _PyPegen_expect_token(p, 634)) // token='with' && - (_gather_209_var = _gather_209_rule(p)) // ','.(expression ['as' star_target])+ + (_gather_210_var = _gather_210_rule(p)) // ','.(expression ['as' star_target])+ && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -22801,7 +22841,7 @@ invalid_with_stmt_indent_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_with_stmt_indent[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' ':' NEWLINE !INDENT")); - asdl_seq * _gather_211_var; + asdl_seq * _gather_212_var; Token * _literal; Token * _literal_1; Token * _literal_2; @@ -22812,13 +22852,13 @@ invalid_with_stmt_indent_rule(Parser *p) Token * a; Token * newline_var; if ( - (_opt_var = _PyPegen_expect_token(p, 670), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 673), !p->error_indicator) // 'async'? && - (a = _PyPegen_expect_token(p, 631)) // token='with' + (a = _PyPegen_expect_token(p, 634)) // token='with' && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (_gather_211_var = _gather_211_rule(p)) // ','.(expressions ['as' star_target])+ + (_gather_212_var = _gather_212_rule(p)) // ','.(expressions ['as' star_target])+ && (_opt_var_1 = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','? && @@ -22877,7 +22917,7 @@ invalid_try_stmt_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 640)) // token='try' + (a = _PyPegen_expect_token(p, 643)) // token='try' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -22909,13 +22949,13 @@ invalid_try_stmt_rule(Parser *p) Token * _literal; asdl_stmt_seq* block_var; if ( - (_keyword = _PyPegen_expect_token(p, 640)) // token='try' + (_keyword = _PyPegen_expect_token(p, 643)) // token='try' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (block_var = block_rule(p)) // block && - _PyPegen_lookahead(0, _tmp_213_rule, p) + _PyPegen_lookahead(0, _tmp_214_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_try_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'try' ':' block !('except' | 'finally')")); @@ -22940,29 +22980,29 @@ invalid_try_stmt_rule(Parser *p) Token * _keyword; Token * _literal; Token * _literal_1; - asdl_seq * _loop0_214_var; - asdl_seq * _loop1_215_var; + asdl_seq * _loop0_215_var; + asdl_seq * _loop1_216_var; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings Token * a; Token * b; expr_ty expression_var; if ( - (_keyword = _PyPegen_expect_token(p, 640)) // token='try' + (_keyword = _PyPegen_expect_token(p, 643)) // token='try' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && - (_loop0_214_var = _loop0_214_rule(p)) // block* + (_loop0_215_var = _loop0_215_rule(p)) // block* && - (_loop1_215_var = _loop1_215_rule(p)) // except_block+ + (_loop1_216_var = _loop1_216_rule(p)) // except_block+ && - (a = _PyPegen_expect_token(p, 653)) // token='except' + (a = _PyPegen_expect_token(p, 656)) // token='except' && (b = _PyPegen_expect_token(p, 16)) // token='*' && (expression_var = expression_rule(p)) // expression && - (_opt_var = _tmp_216_rule(p), !p->error_indicator) // ['as' NAME] + (_opt_var = _tmp_217_rule(p), !p->error_indicator) // ['as' NAME] && (_literal_1 = _PyPegen_expect_token(p, 11)) // token=':' ) @@ -22989,23 +23029,23 @@ invalid_try_stmt_rule(Parser *p) Token * _keyword; Token * _literal; Token * _literal_1; - asdl_seq * _loop0_217_var; - asdl_seq * _loop1_218_var; + asdl_seq * _loop0_218_var; + asdl_seq * _loop1_219_var; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings Token * a; if ( - (_keyword = _PyPegen_expect_token(p, 640)) // token='try' + (_keyword = _PyPegen_expect_token(p, 643)) // token='try' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && - (_loop0_217_var = _loop0_217_rule(p)) // block* + (_loop0_218_var = _loop0_218_rule(p)) // block* && - (_loop1_218_var = _loop1_218_rule(p)) // except_star_block+ + (_loop1_219_var = _loop1_219_rule(p)) // except_star_block+ && - (a = _PyPegen_expect_token(p, 653)) // token='except' + (a = _PyPegen_expect_token(p, 656)) // token='except' && - (_opt_var = _tmp_219_rule(p), !p->error_indicator) // [expression ['as' NAME]] + (_opt_var = _tmp_220_rule(p), !p->error_indicator) // [expression ['as' NAME]] && (_literal_1 = _PyPegen_expect_token(p, 11)) // token=':' ) @@ -23062,7 +23102,7 @@ invalid_except_stmt_rule(Parser *p) expr_ty a; expr_ty expressions_var; if ( - (_keyword = _PyPegen_expect_token(p, 653)) // token='except' + (_keyword = _PyPegen_expect_token(p, 656)) // token='except' && (_opt_var = _PyPegen_expect_token(p, 16), !p->error_indicator) // '*'? && @@ -23072,7 +23112,7 @@ invalid_except_stmt_rule(Parser *p) && (expressions_var = expressions_rule(p)) // expressions && - (_opt_var_1 = _tmp_220_rule(p), !p->error_indicator) // ['as' NAME] + (_opt_var_1 = _tmp_221_rule(p), !p->error_indicator) // ['as' NAME] && (_literal_1 = _PyPegen_expect_token(p, 11)) // token=':' ) @@ -23104,13 +23144,13 @@ invalid_except_stmt_rule(Parser *p) expr_ty expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 653)) // token='except' + (a = _PyPegen_expect_token(p, 656)) // token='except' && (_opt_var = _PyPegen_expect_token(p, 16), !p->error_indicator) // '*'? && (expression_var = expression_rule(p)) // expression && - (_opt_var_1 = _tmp_221_rule(p), !p->error_indicator) // ['as' NAME] + (_opt_var_1 = _tmp_222_rule(p), !p->error_indicator) // ['as' NAME] && (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) @@ -23137,7 +23177,7 @@ invalid_except_stmt_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 653)) // token='except' + (a = _PyPegen_expect_token(p, 656)) // token='except' && (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) @@ -23162,14 +23202,14 @@ invalid_except_stmt_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_except_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except' '*' (NEWLINE | ':')")); Token * _literal; - void *_tmp_222_var; + void *_tmp_223_var; Token * a; if ( - (a = _PyPegen_expect_token(p, 653)) // token='except' + (a = _PyPegen_expect_token(p, 656)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_222_var = _tmp_222_rule(p)) // NEWLINE | ':' + (_tmp_223_var = _tmp_223_rule(p)) // NEWLINE | ':' ) { D(fprintf(stderr, "%*c+ invalid_except_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except' '*' (NEWLINE | ':')")); @@ -23214,7 +23254,7 @@ invalid_finally_stmt_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 649)) // token='finally' + (a = _PyPegen_expect_token(p, 652)) // token='finally' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -23270,11 +23310,11 @@ invalid_except_stmt_indent_rule(Parser *p) expr_ty expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 653)) // token='except' + (a = _PyPegen_expect_token(p, 656)) // token='except' && (expression_var = expression_rule(p)) // expression && - (_opt_var = _tmp_223_rule(p), !p->error_indicator) // ['as' NAME] + (_opt_var = _tmp_224_rule(p), !p->error_indicator) // ['as' NAME] && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -23306,7 +23346,7 @@ invalid_except_stmt_indent_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 653)) // token='except' + (a = _PyPegen_expect_token(p, 656)) // token='except' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -23362,13 +23402,13 @@ invalid_except_star_stmt_indent_rule(Parser *p) expr_ty expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 653)) // token='except' + (a = _PyPegen_expect_token(p, 656)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && (expression_var = expression_rule(p)) // expression && - (_opt_var = _tmp_224_rule(p), !p->error_indicator) // ['as' NAME] + (_opt_var = _tmp_225_rule(p), !p->error_indicator) // ['as' NAME] && (_literal_1 = _PyPegen_expect_token(p, 11)) // token=':' && @@ -23601,7 +23641,7 @@ invalid_as_pattern_rule(Parser *p) if ( (or_pattern_var = or_pattern_rule(p)) // or_pattern && - (_keyword = _PyPegen_expect_token(p, 656)) // token='as' + (_keyword = _PyPegen_expect_token(p, 659)) // token='as' && (a = _PyPegen_expect_soft_keyword(p, "_")) // soft_keyword='"_"' ) @@ -23631,7 +23671,7 @@ invalid_as_pattern_rule(Parser *p) if ( (or_pattern_var = or_pattern_rule(p)) // or_pattern && - (_keyword = _PyPegen_expect_token(p, 656)) // token='as' + (_keyword = _PyPegen_expect_token(p, 659)) // token='as' && _PyPegen_lookahead_with_name(0, _PyPegen_name_token, p) && @@ -23732,7 +23772,7 @@ invalid_class_argument_pattern_rule(Parser *p) asdl_pattern_seq* a; asdl_seq* keyword_patterns_var; if ( - (_opt_var = _tmp_225_rule(p), !p->error_indicator) // [positional_patterns ','] + (_opt_var = _tmp_226_rule(p), !p->error_indicator) // [positional_patterns ','] && (keyword_patterns_var = keyword_patterns_rule(p)) // keyword_patterns && @@ -23785,7 +23825,7 @@ invalid_if_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (_keyword = _PyPegen_expect_token(p, 658)) // token='if' + (_keyword = _PyPegen_expect_token(p, 661)) // token='if' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -23816,7 +23856,7 @@ invalid_if_stmt_rule(Parser *p) expr_ty a_1; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 658)) // token='if' + (a = _PyPegen_expect_token(p, 661)) // token='if' && (a_1 = named_expression_rule(p)) // named_expression && @@ -23871,7 +23911,7 @@ invalid_elif_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (_keyword = _PyPegen_expect_token(p, 660)) // token='elif' + (_keyword = _PyPegen_expect_token(p, 663)) // token='elif' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -23902,7 +23942,7 @@ invalid_elif_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 660)) // token='elif' + (a = _PyPegen_expect_token(p, 663)) // token='elif' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -23955,7 +23995,7 @@ invalid_else_stmt_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 661)) // token='else' + (a = _PyPegen_expect_token(p, 664)) // token='else' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -24008,7 +24048,7 @@ invalid_while_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (_keyword = _PyPegen_expect_token(p, 663)) // token='while' + (_keyword = _PyPegen_expect_token(p, 666)) // token='while' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -24039,7 +24079,7 @@ invalid_while_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 663)) // token='while' + (a = _PyPegen_expect_token(p, 666)) // token='while' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -24098,13 +24138,13 @@ invalid_for_stmt_rule(Parser *p) expr_ty star_expressions_var; expr_ty star_targets_var; if ( - (_opt_var = _PyPegen_expect_token(p, 670), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 673), !p->error_indicator) // 'async'? && - (_keyword = _PyPegen_expect_token(p, 668)) // token='for' + (_keyword = _PyPegen_expect_token(p, 671)) // token='for' && (star_targets_var = star_targets_rule(p)) // star_targets && - (_keyword_1 = _PyPegen_expect_token(p, 669)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 672)) // token='in' && (star_expressions_var = star_expressions_rule(p)) // star_expressions && @@ -24139,13 +24179,13 @@ invalid_for_stmt_rule(Parser *p) expr_ty star_expressions_var; expr_ty star_targets_var; if ( - (_opt_var = _PyPegen_expect_token(p, 670), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 673), !p->error_indicator) // 'async'? && - (a = _PyPegen_expect_token(p, 668)) // token='for' + (a = _PyPegen_expect_token(p, 671)) // token='for' && (star_targets_var = star_targets_rule(p)) // star_targets && - (_keyword = _PyPegen_expect_token(p, 669)) // token='in' + (_keyword = _PyPegen_expect_token(p, 672)) // token='in' && (star_expressions_var = star_expressions_rule(p)) // star_expressions && @@ -24210,9 +24250,9 @@ invalid_def_raw_rule(Parser *p) expr_ty name_var; Token * newline_var; if ( - (_opt_var = _PyPegen_expect_token(p, 670), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 673), !p->error_indicator) // 'async'? && - (a = _PyPegen_expect_token(p, 671)) // token='def' + (a = _PyPegen_expect_token(p, 674)) // token='def' && (name_var = _PyPegen_name_token(p)) // NAME && @@ -24224,7 +24264,7 @@ invalid_def_raw_rule(Parser *p) && (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && - (_opt_var_3 = _tmp_226_rule(p), !p->error_indicator) // ['->' expression] + (_opt_var_3 = _tmp_227_rule(p), !p->error_indicator) // ['->' expression] && (_literal_2 = _PyPegen_expect_token(p, 11)) // token=':' && @@ -24281,13 +24321,13 @@ invalid_class_def_raw_rule(Parser *p) expr_ty name_var; Token * newline_var; if ( - (_keyword = _PyPegen_expect_token(p, 673)) // token='class' + (_keyword = _PyPegen_expect_token(p, 676)) // token='class' && (name_var = _PyPegen_name_token(p)) // NAME && (_opt_var = type_params_rule(p), !p->error_indicator) // type_params? && - (_opt_var_1 = _tmp_227_rule(p), !p->error_indicator) // ['(' arguments? ')'] + (_opt_var_1 = _tmp_228_rule(p), !p->error_indicator) // ['(' arguments? ')'] && (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) @@ -24320,13 +24360,13 @@ invalid_class_def_raw_rule(Parser *p) expr_ty name_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 673)) // token='class' + (a = _PyPegen_expect_token(p, 676)) // token='class' && (name_var = _PyPegen_name_token(p)) // NAME && (_opt_var = type_params_rule(p), !p->error_indicator) // type_params? && - (_opt_var_1 = _tmp_228_rule(p), !p->error_indicator) // ['(' arguments? ')'] + (_opt_var_1 = _tmp_229_rule(p), !p->error_indicator) // ['(' arguments? ')'] && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -24376,11 +24416,11 @@ invalid_double_starred_kvpairs_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_double_starred_kvpairs[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.double_starred_kvpair+ ',' invalid_kvpair")); - asdl_seq * _gather_229_var; + asdl_seq * _gather_230_var; Token * _literal; void *invalid_kvpair_var; if ( - (_gather_229_var = _gather_229_rule(p)) // ','.double_starred_kvpair+ + (_gather_230_var = _gather_230_rule(p)) // ','.double_starred_kvpair+ && (_literal = _PyPegen_expect_token(p, 12)) // token=',' && @@ -24388,7 +24428,7 @@ invalid_double_starred_kvpairs_rule(Parser *p) ) { D(fprintf(stderr, "%*c+ invalid_double_starred_kvpairs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.double_starred_kvpair+ ',' invalid_kvpair")); - _res = _PyPegen_dummy_name(p, _gather_229_var, _literal, invalid_kvpair_var); + _res = _PyPegen_dummy_name(p, _gather_230_var, _literal, invalid_kvpair_var); goto done; } p->mark = _mark; @@ -24441,7 +24481,7 @@ invalid_double_starred_kvpairs_rule(Parser *p) && (a = _PyPegen_expect_token(p, 11)) // token=':' && - _PyPegen_lookahead(1, _tmp_231_rule, p) + _PyPegen_lookahead(1, _tmp_232_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_double_starred_kvpairs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ':' &('}' | ',')")); @@ -24551,7 +24591,7 @@ invalid_kvpair_rule(Parser *p) && (a = _PyPegen_expect_token(p, 11)) // token=':' && - _PyPegen_lookahead(1, _tmp_232_rule, p) + _PyPegen_lookahead(1, _tmp_233_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_kvpair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ':' &('}' | ',')")); @@ -24767,7 +24807,7 @@ invalid_replacement_field_rule(Parser *p) if ( (_literal = _PyPegen_expect_token(p, 25)) // token='{' && - _PyPegen_lookahead(0, _tmp_233_rule, p) + _PyPegen_lookahead(0, _tmp_234_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' !(yield_expr | star_expressions)")); @@ -24790,13 +24830,13 @@ invalid_replacement_field_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) !('=' | '!' | ':' | '}')")); Token * _literal; - void *_tmp_234_var; + void *_tmp_235_var; if ( (_literal = _PyPegen_expect_token(p, 25)) // token='{' && - (_tmp_234_var = _tmp_234_rule(p)) // yield_expr | star_expressions + (_tmp_235_var = _tmp_235_rule(p)) // yield_expr | star_expressions && - _PyPegen_lookahead(0, _tmp_235_rule, p) + _PyPegen_lookahead(0, _tmp_236_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) !('=' | '!' | ':' | '}')")); @@ -24820,15 +24860,15 @@ invalid_replacement_field_rule(Parser *p) D(fprintf(stderr, "%*c> invalid_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) '=' !('!' | ':' | '}')")); Token * _literal; Token * _literal_1; - void *_tmp_236_var; + void *_tmp_237_var; if ( (_literal = _PyPegen_expect_token(p, 25)) // token='{' && - (_tmp_236_var = _tmp_236_rule(p)) // yield_expr | star_expressions + (_tmp_237_var = _tmp_237_rule(p)) // yield_expr | star_expressions && (_literal_1 = _PyPegen_expect_token(p, 22)) // token='=' && - _PyPegen_lookahead(0, _tmp_237_rule, p) + _PyPegen_lookahead(0, _tmp_238_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) '=' !('!' | ':' | '}')")); @@ -24853,12 +24893,12 @@ invalid_replacement_field_rule(Parser *p) Token * _literal; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings - void *_tmp_238_var; + void *_tmp_239_var; void *invalid_conversion_character_var; if ( (_literal = _PyPegen_expect_token(p, 25)) // token='{' && - (_tmp_238_var = _tmp_238_rule(p)) // yield_expr | star_expressions + (_tmp_239_var = _tmp_239_rule(p)) // yield_expr | star_expressions && (_opt_var = _PyPegen_expect_token(p, 22), !p->error_indicator) // '='? && @@ -24866,7 +24906,7 @@ invalid_replacement_field_rule(Parser *p) ) { D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) '='? invalid_conversion_character")); - _res = _PyPegen_dummy_name(p, _literal, _tmp_238_var, _opt_var, invalid_conversion_character_var); + _res = _PyPegen_dummy_name(p, _literal, _tmp_239_var, _opt_var, invalid_conversion_character_var); goto done; } p->mark = _mark; @@ -24884,17 +24924,17 @@ invalid_replacement_field_rule(Parser *p) UNUSED(_opt_var); // Silence compiler warnings void *_opt_var_1; UNUSED(_opt_var_1); // Silence compiler warnings - void *_tmp_239_var; + void *_tmp_240_var; if ( (_literal = _PyPegen_expect_token(p, 25)) // token='{' && - (_tmp_239_var = _tmp_239_rule(p)) // yield_expr | star_expressions + (_tmp_240_var = _tmp_240_rule(p)) // yield_expr | star_expressions && (_opt_var = _PyPegen_expect_token(p, 22), !p->error_indicator) // '='? && - (_opt_var_1 = _tmp_240_rule(p), !p->error_indicator) // ['!' NAME] + (_opt_var_1 = _tmp_241_rule(p), !p->error_indicator) // ['!' NAME] && - _PyPegen_lookahead(0, _tmp_241_rule, p) + _PyPegen_lookahead(0, _tmp_242_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) '='? ['!' NAME] !(':' | '}')")); @@ -24918,24 +24958,24 @@ invalid_replacement_field_rule(Parser *p) D(fprintf(stderr, "%*c> invalid_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) '='? ['!' NAME] ':' fstring_format_spec* !'}'")); Token * _literal; Token * _literal_1; - asdl_seq * _loop0_244_var; + asdl_seq * _loop0_245_var; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings void *_opt_var_1; UNUSED(_opt_var_1); // Silence compiler warnings - void *_tmp_242_var; + void *_tmp_243_var; if ( (_literal = _PyPegen_expect_token(p, 25)) // token='{' && - (_tmp_242_var = _tmp_242_rule(p)) // yield_expr | star_expressions + (_tmp_243_var = _tmp_243_rule(p)) // yield_expr | star_expressions && (_opt_var = _PyPegen_expect_token(p, 22), !p->error_indicator) // '='? && - (_opt_var_1 = _tmp_243_rule(p), !p->error_indicator) // ['!' NAME] + (_opt_var_1 = _tmp_244_rule(p), !p->error_indicator) // ['!' NAME] && (_literal_1 = _PyPegen_expect_token(p, 11)) // token=':' && - (_loop0_244_var = _loop0_244_rule(p)) // fstring_format_spec* + (_loop0_245_var = _loop0_245_rule(p)) // fstring_format_spec* && _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 26) // token='}' ) @@ -24964,15 +25004,15 @@ invalid_replacement_field_rule(Parser *p) UNUSED(_opt_var); // Silence compiler warnings void *_opt_var_1; UNUSED(_opt_var_1); // Silence compiler warnings - void *_tmp_245_var; + void *_tmp_246_var; if ( (_literal = _PyPegen_expect_token(p, 25)) // token='{' && - (_tmp_245_var = _tmp_245_rule(p)) // yield_expr | star_expressions + (_tmp_246_var = _tmp_246_rule(p)) // yield_expr | star_expressions && (_opt_var = _PyPegen_expect_token(p, 22), !p->error_indicator) // '='? && - (_opt_var_1 = _tmp_246_rule(p), !p->error_indicator) // ['!' NAME] + (_opt_var_1 = _tmp_247_rule(p), !p->error_indicator) // ['!' NAME] && _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 26) // token='}' ) @@ -25019,7 +25059,7 @@ invalid_conversion_character_rule(Parser *p) if ( (_literal = _PyPegen_expect_token(p, 54)) // token='!' && - _PyPegen_lookahead(1, _tmp_247_rule, p) + _PyPegen_lookahead(1, _tmp_248_rule, p) ) { D(fprintf(stderr, "%*c+ invalid_conversion_character[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!' &(':' | '}')")); @@ -25411,7 +25451,7 @@ _tmp_6_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_6[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'import'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 617)) // token='import' + (_keyword = _PyPegen_expect_token(p, 620)) // token='import' ) { D(fprintf(stderr, "%*c+ _tmp_6[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'import'")); @@ -25430,7 +25470,7 @@ _tmp_6_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_6[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 618)) // token='from' + (_keyword = _PyPegen_expect_token(p, 621)) // token='from' ) { D(fprintf(stderr, "%*c+ _tmp_6[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from'")); @@ -25468,7 +25508,7 @@ _tmp_7_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_7[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'def'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 671)) // token='def' + (_keyword = _PyPegen_expect_token(p, 674)) // token='def' ) { D(fprintf(stderr, "%*c+ _tmp_7[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'def'")); @@ -25506,7 +25546,7 @@ _tmp_7_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_7[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 670)) // token='async' + (_keyword = _PyPegen_expect_token(p, 673)) // token='async' ) { D(fprintf(stderr, "%*c+ _tmp_7[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'")); @@ -25544,7 +25584,7 @@ _tmp_8_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'class'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 673)) // token='class' + (_keyword = _PyPegen_expect_token(p, 676)) // token='class' ) { D(fprintf(stderr, "%*c+ _tmp_8[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'class'")); @@ -25601,7 +25641,7 @@ _tmp_9_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_9[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'with'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 631)) // token='with' + (_keyword = _PyPegen_expect_token(p, 634)) // token='with' ) { D(fprintf(stderr, "%*c+ _tmp_9[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'with'")); @@ -25620,7 +25660,7 @@ _tmp_9_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_9[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 670)) // token='async' + (_keyword = _PyPegen_expect_token(p, 673)) // token='async' ) { D(fprintf(stderr, "%*c+ _tmp_9[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'")); @@ -25658,7 +25698,7 @@ _tmp_10_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_10[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'for'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 668)) // token='for' + (_keyword = _PyPegen_expect_token(p, 671)) // token='for' ) { D(fprintf(stderr, "%*c+ _tmp_10[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'for'")); @@ -25677,7 +25717,7 @@ _tmp_10_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_10[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 670)) // token='async' + (_keyword = _PyPegen_expect_token(p, 673)) // token='async' ) { D(fprintf(stderr, "%*c+ _tmp_10[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'")); @@ -25882,12 +25922,12 @@ _loop1_14_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_14[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); - void *_tmp_248_var; + void *_tmp_249_var; while ( - (_tmp_248_var = _tmp_248_rule(p)) // star_targets '=' + (_tmp_249_var = _tmp_249_rule(p)) // star_targets '=' ) { - _res = _tmp_248_var; + _res = _tmp_249_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -26062,7 +26102,7 @@ _tmp_17_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 618)) // token='from' + (_keyword = _PyPegen_expect_token(p, 621)) // token='from' && (z = expression_rule(p)) // expression ) @@ -26451,12 +26491,12 @@ _loop0_24_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop0_24[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')")); - void *_tmp_249_var; + void *_tmp_250_var; while ( - (_tmp_249_var = _tmp_249_rule(p)) // '.' | '...' + (_tmp_250_var = _tmp_250_rule(p)) // '.' | '...' ) { - _res = _tmp_249_var; + _res = _tmp_250_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -26518,12 +26558,12 @@ _loop1_25_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_25[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')")); - void *_tmp_250_var; + void *_tmp_251_var; while ( - (_tmp_250_var = _tmp_250_rule(p)) // '.' | '...' + (_tmp_251_var = _tmp_251_rule(p)) // '.' | '...' ) { - _res = _tmp_250_var; + _res = _tmp_251_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -26701,7 +26741,7 @@ _tmp_28_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 656)) // token='as' + (_keyword = _PyPegen_expect_token(p, 659)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) @@ -26864,7 +26904,7 @@ _tmp_31_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 656)) // token='as' + (_keyword = _PyPegen_expect_token(p, 659)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) @@ -26916,12 +26956,12 @@ _loop1_32_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_32[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('@' named_expression NEWLINE)")); - void *_tmp_251_var; + void *_tmp_252_var; while ( - (_tmp_251_var = _tmp_251_rule(p)) // '@' named_expression NEWLINE + (_tmp_252_var = _tmp_252_rule(p)) // '@' named_expression NEWLINE ) { - _res = _tmp_251_var; + _res = _tmp_252_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -28851,7 +28891,7 @@ _tmp_62_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 656)) // token='as' + (_keyword = _PyPegen_expect_token(p, 659)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) @@ -28897,7 +28937,7 @@ _tmp_63_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 656)) // token='as' + (_keyword = _PyPegen_expect_token(p, 659)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) @@ -30046,12 +30086,12 @@ _loop1_82_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_82[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' expression)")); - void *_tmp_252_var; + void *_tmp_253_var; while ( - (_tmp_252_var = _tmp_252_rule(p)) // ',' expression + (_tmp_253_var = _tmp_253_rule(p)) // ',' expression ) { - _res = _tmp_252_var; + _res = _tmp_253_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -30118,12 +30158,12 @@ _loop1_83_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_83[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_expression)")); - void *_tmp_253_var; + void *_tmp_254_var; while ( - (_tmp_253_var = _tmp_253_rule(p)) // ',' star_expression + (_tmp_254_var = _tmp_254_rule(p)) // ',' star_expression ) { - _res = _tmp_253_var; + _res = _tmp_254_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -30307,12 +30347,12 @@ _loop1_86_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_86[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('or' conjunction)")); - void *_tmp_254_var; + void *_tmp_255_var; while ( - (_tmp_254_var = _tmp_254_rule(p)) // 'or' conjunction + (_tmp_255_var = _tmp_255_rule(p)) // 'or' conjunction ) { - _res = _tmp_254_var; + _res = _tmp_255_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -30379,12 +30419,12 @@ _loop1_87_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_87[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('and' inversion)")); - void *_tmp_255_var; + void *_tmp_256_var; while ( - (_tmp_255_var = _tmp_255_rule(p)) // 'and' inversion + (_tmp_256_var = _tmp_256_rule(p)) // 'and' inversion ) { - _res = _tmp_255_var; + _res = _tmp_256_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -30571,7 +30611,7 @@ _loop0_91_rule(Parser *p) while ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _tmp_256_rule(p)) // slice | starred_expression + (elem = _tmp_257_rule(p)) // slice | starred_expression ) { _res = elem; @@ -30636,7 +30676,7 @@ _gather_90_rule(Parser *p) void *elem; asdl_seq * seq; if ( - (elem = _tmp_256_rule(p)) // slice | starred_expression + (elem = _tmp_257_rule(p)) // slice | starred_expression && (seq = _loop0_91_rule(p)) // _loop0_91 ) @@ -32235,12 +32275,12 @@ _loop1_115_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_115[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(fstring | string)")); - void *_tmp_257_var; + void *_tmp_258_var; while ( - (_tmp_257_var = _tmp_257_rule(p)) // fstring | string + (_tmp_258_var = _tmp_258_rule(p)) // fstring | string ) { - _res = _tmp_257_var; + _res = _tmp_258_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -32545,12 +32585,12 @@ _loop0_120_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop0_120[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)")); - void *_tmp_258_var; + void *_tmp_259_var; while ( - (_tmp_258_var = _tmp_258_rule(p)) // 'if' disjunction + (_tmp_259_var = _tmp_259_rule(p)) // 'if' disjunction ) { - _res = _tmp_258_var; + _res = _tmp_259_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -32612,12 +32652,12 @@ _loop0_121_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop0_121[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)")); - void *_tmp_259_var; + void *_tmp_260_var; while ( - (_tmp_259_var = _tmp_259_rule(p)) // 'if' disjunction + (_tmp_260_var = _tmp_260_rule(p)) // 'if' disjunction ) { - _res = _tmp_259_var; + _res = _tmp_260_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -32651,9 +32691,54 @@ _loop0_121_rule(Parser *p) return _seq; } -// _tmp_122: assignment_expression | expression !':=' +// _tmp_122: bitwise_or ((',' bitwise_or))* ','? static void * _tmp_122_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + _Pypegen_stack_overflow(p); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // bitwise_or ((',' bitwise_or))* ','? + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_122[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "bitwise_or ((',' bitwise_or))* ','?")); + asdl_seq * _loop0_261_var; + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings + expr_ty bitwise_or_var; + if ( + (bitwise_or_var = bitwise_or_rule(p)) // bitwise_or + && + (_loop0_261_var = _loop0_261_rule(p)) // ((',' bitwise_or))* + && + (_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','? + ) + { + D(fprintf(stderr, "%*c+ _tmp_122[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "bitwise_or ((',' bitwise_or))* ','?")); + _res = _PyPegen_dummy_name(p, bitwise_or_var, _loop0_261_var, _opt_var); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_122[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "bitwise_or ((',' bitwise_or))* ','?")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// _tmp_123: assignment_expression | expression !':=' +static void * +_tmp_123_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -32669,18 +32754,18 @@ _tmp_122_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_122[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "assignment_expression")); + D(fprintf(stderr, "%*c> _tmp_123[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "assignment_expression")); expr_ty assignment_expression_var; if ( (assignment_expression_var = assignment_expression_rule(p)) // assignment_expression ) { - D(fprintf(stderr, "%*c+ _tmp_122[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "assignment_expression")); + D(fprintf(stderr, "%*c+ _tmp_123[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "assignment_expression")); _res = assignment_expression_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_122[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_123[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "assignment_expression")); } { // expression !':=' @@ -32688,7 +32773,7 @@ _tmp_122_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_122[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression !':='")); + D(fprintf(stderr, "%*c> _tmp_123[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression !':='")); expr_ty expression_var; if ( (expression_var = expression_rule(p)) // expression @@ -32696,12 +32781,12 @@ _tmp_122_rule(Parser *p) _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 53) // token=':=' ) { - D(fprintf(stderr, "%*c+ _tmp_122[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression !':='")); + D(fprintf(stderr, "%*c+ _tmp_123[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression !':='")); _res = expression_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_122[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_123[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression !':='")); } _res = NULL; @@ -32710,9 +32795,9 @@ _tmp_122_rule(Parser *p) return _res; } -// _loop0_124: ',' (starred_expression | (assignment_expression | expression !':=') !'=') +// _loop0_125: ',' (starred_expression | (assignment_expression | expression !':=') !'=') static asdl_seq * -_loop0_124_rule(Parser *p) +_loop0_125_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -32737,13 +32822,13 @@ _loop0_124_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_124[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (starred_expression | (assignment_expression | expression !':=') !'=')")); + D(fprintf(stderr, "%*c> _loop0_125[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (starred_expression | (assignment_expression | expression !':=') !'=')")); Token * _literal; void *elem; while ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _tmp_260_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'=' + (elem = _tmp_262_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'=' ) { _res = elem; @@ -32769,7 +32854,7 @@ _loop0_124_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_124[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_125[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (starred_expression | (assignment_expression | expression !':=') !'=')")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -32786,10 +32871,10 @@ _loop0_124_rule(Parser *p) return _seq; } -// _gather_123: -// | (starred_expression | (assignment_expression | expression !':=') !'=') _loop0_124 +// _gather_124: +// | (starred_expression | (assignment_expression | expression !':=') !'=') _loop0_125 static asdl_seq * -_gather_123_rule(Parser *p) +_gather_124_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -32800,27 +32885,27 @@ _gather_123_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // (starred_expression | (assignment_expression | expression !':=') !'=') _loop0_124 + { // (starred_expression | (assignment_expression | expression !':=') !'=') _loop0_125 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_123[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_124")); + D(fprintf(stderr, "%*c> _gather_124[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_125")); void *elem; asdl_seq * seq; if ( - (elem = _tmp_260_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'=' + (elem = _tmp_262_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'=' && - (seq = _loop0_124_rule(p)) // _loop0_124 + (seq = _loop0_125_rule(p)) // _loop0_125 ) { - D(fprintf(stderr, "%*c+ _gather_123[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_124")); + D(fprintf(stderr, "%*c+ _gather_124[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_125")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_123[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_124")); + D(fprintf(stderr, "%*c%s _gather_124[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_125")); } _res = NULL; done: @@ -32828,9 +32913,9 @@ _gather_123_rule(Parser *p) return _res; } -// _tmp_125: ',' kwargs +// _tmp_126: ',' kwargs static void * -_tmp_125_rule(Parser *p) +_tmp_126_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -32846,7 +32931,7 @@ _tmp_125_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_125[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwargs")); + D(fprintf(stderr, "%*c> _tmp_126[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwargs")); Token * _literal; asdl_seq* k; if ( @@ -32855,7 +32940,7 @@ _tmp_125_rule(Parser *p) (k = kwargs_rule(p)) // kwargs ) { - D(fprintf(stderr, "%*c+ _tmp_125[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' kwargs")); + D(fprintf(stderr, "%*c+ _tmp_126[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' kwargs")); _res = k; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -32865,7 +32950,7 @@ _tmp_125_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_125[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_126[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwargs")); } _res = NULL; @@ -32874,9 +32959,9 @@ _tmp_125_rule(Parser *p) return _res; } -// _loop0_127: ',' kwarg_or_starred +// _loop0_128: ',' kwarg_or_starred static asdl_seq * -_loop0_127_rule(Parser *p) +_loop0_128_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -32901,7 +32986,7 @@ _loop0_127_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_127[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_starred")); + D(fprintf(stderr, "%*c> _loop0_128[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_starred")); Token * _literal; KeywordOrStarred* elem; while ( @@ -32933,7 +33018,7 @@ _loop0_127_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_127[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_128[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwarg_or_starred")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -32950,9 +33035,9 @@ _loop0_127_rule(Parser *p) return _seq; } -// _gather_126: kwarg_or_starred _loop0_127 +// _gather_127: kwarg_or_starred _loop0_128 static asdl_seq * -_gather_126_rule(Parser *p) +_gather_127_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -32963,27 +33048,27 @@ _gather_126_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // kwarg_or_starred _loop0_127 + { // kwarg_or_starred _loop0_128 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_126[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_127")); + D(fprintf(stderr, "%*c> _gather_127[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_128")); KeywordOrStarred* elem; asdl_seq * seq; if ( (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred && - (seq = _loop0_127_rule(p)) // _loop0_127 + (seq = _loop0_128_rule(p)) // _loop0_128 ) { - D(fprintf(stderr, "%*c+ _gather_126[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_127")); + D(fprintf(stderr, "%*c+ _gather_127[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_128")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_126[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_starred _loop0_127")); + D(fprintf(stderr, "%*c%s _gather_127[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_starred _loop0_128")); } _res = NULL; done: @@ -32991,9 +33076,9 @@ _gather_126_rule(Parser *p) return _res; } -// _loop0_129: ',' kwarg_or_double_starred +// _loop0_130: ',' kwarg_or_double_starred static asdl_seq * -_loop0_129_rule(Parser *p) +_loop0_130_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -33018,7 +33103,7 @@ _loop0_129_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_129[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_double_starred")); + D(fprintf(stderr, "%*c> _loop0_130[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_double_starred")); Token * _literal; KeywordOrStarred* elem; while ( @@ -33050,7 +33135,7 @@ _loop0_129_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_129[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_130[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwarg_or_double_starred")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -33067,9 +33152,9 @@ _loop0_129_rule(Parser *p) return _seq; } -// _gather_128: kwarg_or_double_starred _loop0_129 +// _gather_129: kwarg_or_double_starred _loop0_130 static asdl_seq * -_gather_128_rule(Parser *p) +_gather_129_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -33080,27 +33165,27 @@ _gather_128_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // kwarg_or_double_starred _loop0_129 + { // kwarg_or_double_starred _loop0_130 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_128[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_129")); + D(fprintf(stderr, "%*c> _gather_129[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_130")); KeywordOrStarred* elem; asdl_seq * seq; if ( (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred && - (seq = _loop0_129_rule(p)) // _loop0_129 + (seq = _loop0_130_rule(p)) // _loop0_130 ) { - D(fprintf(stderr, "%*c+ _gather_128[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_129")); + D(fprintf(stderr, "%*c+ _gather_129[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_130")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_128[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_double_starred _loop0_129")); + D(fprintf(stderr, "%*c%s _gather_129[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_double_starred _loop0_130")); } _res = NULL; done: @@ -33108,9 +33193,9 @@ _gather_128_rule(Parser *p) return _res; } -// _loop0_131: ',' kwarg_or_starred +// _loop0_132: ',' kwarg_or_starred static asdl_seq * -_loop0_131_rule(Parser *p) +_loop0_132_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -33135,7 +33220,7 @@ _loop0_131_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_131[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_starred")); + D(fprintf(stderr, "%*c> _loop0_132[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_starred")); Token * _literal; KeywordOrStarred* elem; while ( @@ -33167,7 +33252,7 @@ _loop0_131_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_131[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_132[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwarg_or_starred")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -33184,9 +33269,9 @@ _loop0_131_rule(Parser *p) return _seq; } -// _gather_130: kwarg_or_starred _loop0_131 +// _gather_131: kwarg_or_starred _loop0_132 static asdl_seq * -_gather_130_rule(Parser *p) +_gather_131_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -33197,27 +33282,27 @@ _gather_130_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // kwarg_or_starred _loop0_131 + { // kwarg_or_starred _loop0_132 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_130[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_131")); + D(fprintf(stderr, "%*c> _gather_131[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_132")); KeywordOrStarred* elem; asdl_seq * seq; if ( (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred && - (seq = _loop0_131_rule(p)) // _loop0_131 + (seq = _loop0_132_rule(p)) // _loop0_132 ) { - D(fprintf(stderr, "%*c+ _gather_130[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_131")); + D(fprintf(stderr, "%*c+ _gather_131[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_132")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_130[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_starred _loop0_131")); + D(fprintf(stderr, "%*c%s _gather_131[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_starred _loop0_132")); } _res = NULL; done: @@ -33225,9 +33310,9 @@ _gather_130_rule(Parser *p) return _res; } -// _loop0_133: ',' kwarg_or_double_starred +// _loop0_134: ',' kwarg_or_double_starred static asdl_seq * -_loop0_133_rule(Parser *p) +_loop0_134_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -33252,7 +33337,7 @@ _loop0_133_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_133[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_double_starred")); + D(fprintf(stderr, "%*c> _loop0_134[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_double_starred")); Token * _literal; KeywordOrStarred* elem; while ( @@ -33284,7 +33369,7 @@ _loop0_133_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_133[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_134[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwarg_or_double_starred")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -33301,9 +33386,9 @@ _loop0_133_rule(Parser *p) return _seq; } -// _gather_132: kwarg_or_double_starred _loop0_133 +// _gather_133: kwarg_or_double_starred _loop0_134 static asdl_seq * -_gather_132_rule(Parser *p) +_gather_133_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -33314,27 +33399,27 @@ _gather_132_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // kwarg_or_double_starred _loop0_133 + { // kwarg_or_double_starred _loop0_134 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_132[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_133")); + D(fprintf(stderr, "%*c> _gather_133[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_134")); KeywordOrStarred* elem; asdl_seq * seq; if ( (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred && - (seq = _loop0_133_rule(p)) // _loop0_133 + (seq = _loop0_134_rule(p)) // _loop0_134 ) { - D(fprintf(stderr, "%*c+ _gather_132[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_133")); + D(fprintf(stderr, "%*c+ _gather_133[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_134")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_132[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_double_starred _loop0_133")); + D(fprintf(stderr, "%*c%s _gather_133[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_double_starred _loop0_134")); } _res = NULL; done: @@ -33342,9 +33427,9 @@ _gather_132_rule(Parser *p) return _res; } -// _loop0_134: (',' star_target) +// _loop0_135: (',' star_target) static asdl_seq * -_loop0_134_rule(Parser *p) +_loop0_135_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -33369,13 +33454,13 @@ _loop0_134_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_134[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_target)")); - void *_tmp_261_var; + D(fprintf(stderr, "%*c> _loop0_135[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_target)")); + void *_tmp_263_var; while ( - (_tmp_261_var = _tmp_261_rule(p)) // ',' star_target + (_tmp_263_var = _tmp_263_rule(p)) // ',' star_target ) { - _res = _tmp_261_var; + _res = _tmp_263_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -33392,7 +33477,7 @@ _loop0_134_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_134[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_135[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(',' star_target)")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -33409,9 +33494,9 @@ _loop0_134_rule(Parser *p) return _seq; } -// _loop0_136: ',' star_target +// _loop0_137: ',' star_target static asdl_seq * -_loop0_136_rule(Parser *p) +_loop0_137_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -33436,7 +33521,7 @@ _loop0_136_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_136[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); + D(fprintf(stderr, "%*c> _loop0_137[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); Token * _literal; expr_ty elem; while ( @@ -33468,7 +33553,7 @@ _loop0_136_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_136[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_137[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_target")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -33485,9 +33570,9 @@ _loop0_136_rule(Parser *p) return _seq; } -// _gather_135: star_target _loop0_136 +// _gather_136: star_target _loop0_137 static asdl_seq * -_gather_135_rule(Parser *p) +_gather_136_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -33498,27 +33583,27 @@ _gather_135_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // star_target _loop0_136 + { // star_target _loop0_137 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_135[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_target _loop0_136")); + D(fprintf(stderr, "%*c> _gather_136[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_target _loop0_137")); expr_ty elem; asdl_seq * seq; if ( (elem = star_target_rule(p)) // star_target && - (seq = _loop0_136_rule(p)) // _loop0_136 + (seq = _loop0_137_rule(p)) // _loop0_137 ) { - D(fprintf(stderr, "%*c+ _gather_135[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_target _loop0_136")); + D(fprintf(stderr, "%*c+ _gather_136[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_target _loop0_137")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_135[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_target _loop0_136")); + D(fprintf(stderr, "%*c%s _gather_136[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_target _loop0_137")); } _res = NULL; done: @@ -33526,9 +33611,9 @@ _gather_135_rule(Parser *p) return _res; } -// _loop1_137: (',' star_target) +// _loop1_138: (',' star_target) static asdl_seq * -_loop1_137_rule(Parser *p) +_loop1_138_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -33553,13 +33638,13 @@ _loop1_137_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_137[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_target)")); - void *_tmp_262_var; + D(fprintf(stderr, "%*c> _loop1_138[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_target)")); + void *_tmp_264_var; while ( - (_tmp_262_var = _tmp_262_rule(p)) // ',' star_target + (_tmp_264_var = _tmp_264_rule(p)) // ',' star_target ) { - _res = _tmp_262_var; + _res = _tmp_264_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -33576,7 +33661,7 @@ _loop1_137_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_137[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_138[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(',' star_target)")); } if (_n == 0 || p->error_indicator) { @@ -33598,9 +33683,9 @@ _loop1_137_rule(Parser *p) return _seq; } -// _tmp_138: !'*' star_target +// _tmp_139: !'*' star_target static void * -_tmp_138_rule(Parser *p) +_tmp_139_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -33616,7 +33701,7 @@ _tmp_138_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_138[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "!'*' star_target")); + D(fprintf(stderr, "%*c> _tmp_139[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "!'*' star_target")); expr_ty star_target_var; if ( _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 16) // token='*' @@ -33624,12 +33709,12 @@ _tmp_138_rule(Parser *p) (star_target_var = star_target_rule(p)) // star_target ) { - D(fprintf(stderr, "%*c+ _tmp_138[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "!'*' star_target")); + D(fprintf(stderr, "%*c+ _tmp_139[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "!'*' star_target")); _res = star_target_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_138[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_139[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "!'*' star_target")); } _res = NULL; @@ -33638,9 +33723,9 @@ _tmp_138_rule(Parser *p) return _res; } -// _loop0_140: ',' del_target +// _loop0_141: ',' del_target static asdl_seq * -_loop0_140_rule(Parser *p) +_loop0_141_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -33665,7 +33750,7 @@ _loop0_140_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_140[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' del_target")); + D(fprintf(stderr, "%*c> _loop0_141[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' del_target")); Token * _literal; expr_ty elem; while ( @@ -33697,7 +33782,7 @@ _loop0_140_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_140[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_141[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' del_target")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -33714,9 +33799,9 @@ _loop0_140_rule(Parser *p) return _seq; } -// _gather_139: del_target _loop0_140 +// _gather_140: del_target _loop0_141 static asdl_seq * -_gather_139_rule(Parser *p) +_gather_140_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -33727,27 +33812,27 @@ _gather_139_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // del_target _loop0_140 + { // del_target _loop0_141 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_139[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "del_target _loop0_140")); + D(fprintf(stderr, "%*c> _gather_140[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "del_target _loop0_141")); expr_ty elem; asdl_seq * seq; if ( (elem = del_target_rule(p)) // del_target && - (seq = _loop0_140_rule(p)) // _loop0_140 + (seq = _loop0_141_rule(p)) // _loop0_141 ) { - D(fprintf(stderr, "%*c+ _gather_139[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "del_target _loop0_140")); + D(fprintf(stderr, "%*c+ _gather_140[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "del_target _loop0_141")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_139[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "del_target _loop0_140")); + D(fprintf(stderr, "%*c%s _gather_140[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "del_target _loop0_141")); } _res = NULL; done: @@ -33755,9 +33840,9 @@ _gather_139_rule(Parser *p) return _res; } -// _loop0_142: ',' expression +// _loop0_143: ',' expression static asdl_seq * -_loop0_142_rule(Parser *p) +_loop0_143_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -33782,7 +33867,7 @@ _loop0_142_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_142[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); + D(fprintf(stderr, "%*c> _loop0_143[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); Token * _literal; expr_ty elem; while ( @@ -33814,7 +33899,7 @@ _loop0_142_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_142[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_143[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -33831,9 +33916,9 @@ _loop0_142_rule(Parser *p) return _seq; } -// _gather_141: expression _loop0_142 +// _gather_142: expression _loop0_143 static asdl_seq * -_gather_141_rule(Parser *p) +_gather_142_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -33844,27 +33929,27 @@ _gather_141_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // expression _loop0_142 + { // expression _loop0_143 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_141[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_142")); + D(fprintf(stderr, "%*c> _gather_142[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_143")); expr_ty elem; asdl_seq * seq; if ( (elem = expression_rule(p)) // expression && - (seq = _loop0_142_rule(p)) // _loop0_142 + (seq = _loop0_143_rule(p)) // _loop0_143 ) { - D(fprintf(stderr, "%*c+ _gather_141[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_142")); + D(fprintf(stderr, "%*c+ _gather_142[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_143")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_141[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_142")); + D(fprintf(stderr, "%*c%s _gather_142[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_143")); } _res = NULL; done: @@ -33872,9 +33957,9 @@ _gather_141_rule(Parser *p) return _res; } -// _loop0_144: ',' expression +// _loop0_145: ',' expression static asdl_seq * -_loop0_144_rule(Parser *p) +_loop0_145_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -33899,7 +33984,7 @@ _loop0_144_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_144[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); + D(fprintf(stderr, "%*c> _loop0_145[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); Token * _literal; expr_ty elem; while ( @@ -33931,7 +34016,7 @@ _loop0_144_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_144[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_145[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -33948,9 +34033,9 @@ _loop0_144_rule(Parser *p) return _seq; } -// _gather_143: expression _loop0_144 +// _gather_144: expression _loop0_145 static asdl_seq * -_gather_143_rule(Parser *p) +_gather_144_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -33961,27 +34046,27 @@ _gather_143_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // expression _loop0_144 + { // expression _loop0_145 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_143[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_144")); + D(fprintf(stderr, "%*c> _gather_144[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_145")); expr_ty elem; asdl_seq * seq; if ( (elem = expression_rule(p)) // expression && - (seq = _loop0_144_rule(p)) // _loop0_144 + (seq = _loop0_145_rule(p)) // _loop0_145 ) { - D(fprintf(stderr, "%*c+ _gather_143[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_144")); + D(fprintf(stderr, "%*c+ _gather_144[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_145")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_143[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_144")); + D(fprintf(stderr, "%*c%s _gather_144[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_145")); } _res = NULL; done: @@ -33989,9 +34074,9 @@ _gather_143_rule(Parser *p) return _res; } -// _loop0_146: ',' expression +// _loop0_147: ',' expression static asdl_seq * -_loop0_146_rule(Parser *p) +_loop0_147_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -34016,7 +34101,7 @@ _loop0_146_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_146[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); + D(fprintf(stderr, "%*c> _loop0_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); Token * _literal; expr_ty elem; while ( @@ -34048,7 +34133,7 @@ _loop0_146_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_146[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_147[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -34065,9 +34150,9 @@ _loop0_146_rule(Parser *p) return _seq; } -// _gather_145: expression _loop0_146 +// _gather_146: expression _loop0_147 static asdl_seq * -_gather_145_rule(Parser *p) +_gather_146_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -34078,27 +34163,27 @@ _gather_145_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // expression _loop0_146 + { // expression _loop0_147 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_145[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_146")); + D(fprintf(stderr, "%*c> _gather_146[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_147")); expr_ty elem; asdl_seq * seq; if ( (elem = expression_rule(p)) // expression && - (seq = _loop0_146_rule(p)) // _loop0_146 + (seq = _loop0_147_rule(p)) // _loop0_147 ) { - D(fprintf(stderr, "%*c+ _gather_145[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_146")); + D(fprintf(stderr, "%*c+ _gather_146[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_147")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_145[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_146")); + D(fprintf(stderr, "%*c%s _gather_146[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_147")); } _res = NULL; done: @@ -34106,9 +34191,9 @@ _gather_145_rule(Parser *p) return _res; } -// _loop0_148: ',' expression +// _loop0_149: ',' expression static asdl_seq * -_loop0_148_rule(Parser *p) +_loop0_149_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -34133,7 +34218,7 @@ _loop0_148_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_148[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); + D(fprintf(stderr, "%*c> _loop0_149[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); Token * _literal; expr_ty elem; while ( @@ -34165,7 +34250,7 @@ _loop0_148_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_148[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_149[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -34182,9 +34267,9 @@ _loop0_148_rule(Parser *p) return _seq; } -// _gather_147: expression _loop0_148 +// _gather_148: expression _loop0_149 static asdl_seq * -_gather_147_rule(Parser *p) +_gather_148_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -34195,27 +34280,27 @@ _gather_147_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // expression _loop0_148 + { // expression _loop0_149 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_148")); + D(fprintf(stderr, "%*c> _gather_148[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_149")); expr_ty elem; asdl_seq * seq; if ( (elem = expression_rule(p)) // expression && - (seq = _loop0_148_rule(p)) // _loop0_148 + (seq = _loop0_149_rule(p)) // _loop0_149 ) { - D(fprintf(stderr, "%*c+ _gather_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_148")); + D(fprintf(stderr, "%*c+ _gather_148[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_149")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_147[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_148")); + D(fprintf(stderr, "%*c%s _gather_148[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_149")); } _res = NULL; done: @@ -34223,9 +34308,9 @@ _gather_147_rule(Parser *p) return _res; } -// _tmp_149: NEWLINE INDENT +// _tmp_150: NEWLINE INDENT static void * -_tmp_149_rule(Parser *p) +_tmp_150_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -34241,7 +34326,7 @@ _tmp_149_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_149[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE INDENT")); + D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE INDENT")); Token * indent_var; Token * newline_var; if ( @@ -34250,12 +34335,12 @@ _tmp_149_rule(Parser *p) (indent_var = _PyPegen_expect_token(p, INDENT)) // token='INDENT' ) { - D(fprintf(stderr, "%*c+ _tmp_149[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE INDENT")); + D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE INDENT")); _res = _PyPegen_dummy_name(p, newline_var, indent_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_149[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE INDENT")); } _res = NULL; @@ -34264,11 +34349,11 @@ _tmp_149_rule(Parser *p) return _res; } -// _tmp_150: +// _tmp_151: // | (','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs) // | kwargs static void * -_tmp_150_rule(Parser *p) +_tmp_151_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -34284,18 +34369,18 @@ _tmp_150_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs)")); - void *_tmp_263_var; + D(fprintf(stderr, "%*c> _tmp_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs)")); + void *_tmp_265_var; if ( - (_tmp_263_var = _tmp_263_rule(p)) // ','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs + (_tmp_265_var = _tmp_265_rule(p)) // ','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs ) { - D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs)")); - _res = _tmp_263_var; + D(fprintf(stderr, "%*c+ _tmp_151[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs)")); + _res = _tmp_265_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_151[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs)")); } { // kwargs @@ -34303,18 +34388,18 @@ _tmp_150_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwargs")); + D(fprintf(stderr, "%*c> _tmp_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwargs")); asdl_seq* kwargs_var; if ( (kwargs_var = kwargs_rule(p)) // kwargs ) { - D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwargs")); + D(fprintf(stderr, "%*c+ _tmp_151[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwargs")); _res = kwargs_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_151[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwargs")); } _res = NULL; @@ -34323,9 +34408,9 @@ _tmp_150_rule(Parser *p) return _res; } -// _tmp_151: args | expression for_if_clauses +// _tmp_152: args | expression for_if_clauses static void * -_tmp_151_rule(Parser *p) +_tmp_152_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -34341,18 +34426,18 @@ _tmp_151_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args")); + D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args")); expr_ty args_var; if ( (args_var = args_rule(p)) // args ) { - D(fprintf(stderr, "%*c+ _tmp_151[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args")); + D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args")); _res = args_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_151[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "args")); } { // expression for_if_clauses @@ -34360,7 +34445,7 @@ _tmp_151_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses")); + D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses")); expr_ty expression_var; asdl_comprehension_seq* for_if_clauses_var; if ( @@ -34369,12 +34454,12 @@ _tmp_151_rule(Parser *p) (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses ) { - D(fprintf(stderr, "%*c+ _tmp_151[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses")); + D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses")); _res = _PyPegen_dummy_name(p, expression_var, for_if_clauses_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_151[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression for_if_clauses")); } _res = NULL; @@ -34383,9 +34468,9 @@ _tmp_151_rule(Parser *p) return _res; } -// _tmp_152: args ',' +// _tmp_153: args ',' static void * -_tmp_152_rule(Parser *p) +_tmp_153_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -34401,7 +34486,7 @@ _tmp_152_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args ','")); + D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args ','")); Token * _literal; expr_ty args_var; if ( @@ -34410,12 +34495,12 @@ _tmp_152_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args ','")); + D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args ','")); _res = _PyPegen_dummy_name(p, args_var, _literal); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "args ','")); } _res = NULL; @@ -34424,9 +34509,9 @@ _tmp_152_rule(Parser *p) return _res; } -// _tmp_153: ',' | ')' +// _tmp_154: ',' | ')' static void * -_tmp_153_rule(Parser *p) +_tmp_154_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -34442,18 +34527,18 @@ _tmp_153_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_154[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_154[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_154[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } { // ')' @@ -34461,18 +34546,18 @@ _tmp_153_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c> _tmp_154[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c+ _tmp_154[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_154[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); } _res = NULL; @@ -34481,9 +34566,9 @@ _tmp_153_rule(Parser *p) return _res; } -// _tmp_154: 'True' | 'False' | 'None' +// _tmp_155: 'True' | 'False' | 'None' static void * -_tmp_154_rule(Parser *p) +_tmp_155_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -34499,18 +34584,18 @@ _tmp_154_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_154[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'")); + D(fprintf(stderr, "%*c> _tmp_155[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 610)) // token='True' + (_keyword = _PyPegen_expect_token(p, 613)) // token='True' ) { - D(fprintf(stderr, "%*c+ _tmp_154[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'")); + D(fprintf(stderr, "%*c+ _tmp_155[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_154[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_155[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'True'")); } { // 'False' @@ -34518,18 +34603,18 @@ _tmp_154_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_154[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'")); + D(fprintf(stderr, "%*c> _tmp_155[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 612)) // token='False' + (_keyword = _PyPegen_expect_token(p, 615)) // token='False' ) { - D(fprintf(stderr, "%*c+ _tmp_154[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'")); + D(fprintf(stderr, "%*c+ _tmp_155[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_154[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_155[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'False'")); } { // 'None' @@ -34537,18 +34622,18 @@ _tmp_154_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_154[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'")); + D(fprintf(stderr, "%*c> _tmp_155[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 611)) // token='None' + (_keyword = _PyPegen_expect_token(p, 614)) // token='None' ) { - D(fprintf(stderr, "%*c+ _tmp_154[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'")); + D(fprintf(stderr, "%*c+ _tmp_155[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_154[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_155[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'None'")); } _res = NULL; @@ -34557,9 +34642,9 @@ _tmp_154_rule(Parser *p) return _res; } -// _tmp_155: NAME '=' +// _tmp_156: NAME '=' static void * -_tmp_155_rule(Parser *p) +_tmp_156_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -34575,7 +34660,7 @@ _tmp_155_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_155[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME '='")); + D(fprintf(stderr, "%*c> _tmp_156[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME '='")); Token * _literal; expr_ty name_var; if ( @@ -34584,12 +34669,12 @@ _tmp_155_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_155[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME '='")); + D(fprintf(stderr, "%*c+ _tmp_156[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME '='")); _res = _PyPegen_dummy_name(p, name_var, _literal); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_155[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_156[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME '='")); } _res = NULL; @@ -34598,9 +34683,9 @@ _tmp_155_rule(Parser *p) return _res; } -// _tmp_156: NAME STRING | SOFT_KEYWORD +// _tmp_157: NAME STRING | SOFT_KEYWORD static void * -_tmp_156_rule(Parser *p) +_tmp_157_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -34616,7 +34701,7 @@ _tmp_156_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_156[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME STRING")); + D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME STRING")); expr_ty name_var; expr_ty string_var; if ( @@ -34625,12 +34710,12 @@ _tmp_156_rule(Parser *p) (string_var = _PyPegen_string_token(p)) // STRING ) { - D(fprintf(stderr, "%*c+ _tmp_156[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME STRING")); + D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME STRING")); _res = _PyPegen_dummy_name(p, name_var, string_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_156[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME STRING")); } { // SOFT_KEYWORD @@ -34638,18 +34723,18 @@ _tmp_156_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_156[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "SOFT_KEYWORD")); + D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "SOFT_KEYWORD")); expr_ty soft_keyword_var; if ( (soft_keyword_var = _PyPegen_soft_keyword_token(p)) // SOFT_KEYWORD ) { - D(fprintf(stderr, "%*c+ _tmp_156[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "SOFT_KEYWORD")); + D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "SOFT_KEYWORD")); _res = soft_keyword_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_156[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "SOFT_KEYWORD")); } _res = NULL; @@ -34658,9 +34743,9 @@ _tmp_156_rule(Parser *p) return _res; } -// _tmp_157: 'else' | ':' +// _tmp_158: 'else' | ':' static void * -_tmp_157_rule(Parser *p) +_tmp_158_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -34676,18 +34761,18 @@ _tmp_157_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'else'")); + D(fprintf(stderr, "%*c> _tmp_158[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'else'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 661)) // token='else' + (_keyword = _PyPegen_expect_token(p, 664)) // token='else' ) { - D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'else'")); + D(fprintf(stderr, "%*c+ _tmp_158[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'else'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_158[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'else'")); } { // ':' @@ -34695,18 +34780,18 @@ _tmp_157_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c> _tmp_158[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c+ _tmp_158[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_158[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); } _res = NULL; @@ -34715,9 +34800,9 @@ _tmp_157_rule(Parser *p) return _res; } -// _tmp_158: '=' | ':=' +// _tmp_159: '=' | ':=' static void * -_tmp_158_rule(Parser *p) +_tmp_159_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -34733,18 +34818,18 @@ _tmp_158_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_158[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='")); + D(fprintf(stderr, "%*c> _tmp_159[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_158[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='")); + D(fprintf(stderr, "%*c+ _tmp_159[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_158[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_159[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'='")); } { // ':=' @@ -34752,18 +34837,18 @@ _tmp_158_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_158[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':='")); + D(fprintf(stderr, "%*c> _tmp_159[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':='")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 53)) // token=':=' ) { - D(fprintf(stderr, "%*c+ _tmp_158[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':='")); + D(fprintf(stderr, "%*c+ _tmp_159[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':='")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_158[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_159[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':='")); } _res = NULL; @@ -34772,9 +34857,9 @@ _tmp_158_rule(Parser *p) return _res; } -// _tmp_159: list | tuple | genexp | 'True' | 'None' | 'False' +// _tmp_160: list | tuple | genexp | 'True' | 'None' | 'False' static void * -_tmp_159_rule(Parser *p) +_tmp_160_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -34790,18 +34875,18 @@ _tmp_159_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_159[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "list")); + D(fprintf(stderr, "%*c> _tmp_160[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "list")); expr_ty list_var; if ( (list_var = list_rule(p)) // list ) { - D(fprintf(stderr, "%*c+ _tmp_159[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "list")); + D(fprintf(stderr, "%*c+ _tmp_160[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "list")); _res = list_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_159[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_160[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "list")); } { // tuple @@ -34809,18 +34894,18 @@ _tmp_159_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_159[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tuple")); + D(fprintf(stderr, "%*c> _tmp_160[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tuple")); expr_ty tuple_var; if ( (tuple_var = tuple_rule(p)) // tuple ) { - D(fprintf(stderr, "%*c+ _tmp_159[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "tuple")); + D(fprintf(stderr, "%*c+ _tmp_160[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "tuple")); _res = tuple_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_159[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_160[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "tuple")); } { // genexp @@ -34828,18 +34913,18 @@ _tmp_159_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_159[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "genexp")); + D(fprintf(stderr, "%*c> _tmp_160[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "genexp")); expr_ty genexp_var; if ( (genexp_var = genexp_rule(p)) // genexp ) { - D(fprintf(stderr, "%*c+ _tmp_159[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "genexp")); + D(fprintf(stderr, "%*c+ _tmp_160[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "genexp")); _res = genexp_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_159[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_160[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "genexp")); } { // 'True' @@ -34847,18 +34932,18 @@ _tmp_159_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_159[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'")); + D(fprintf(stderr, "%*c> _tmp_160[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 610)) // token='True' + (_keyword = _PyPegen_expect_token(p, 613)) // token='True' ) { - D(fprintf(stderr, "%*c+ _tmp_159[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'")); + D(fprintf(stderr, "%*c+ _tmp_160[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_159[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_160[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'True'")); } { // 'None' @@ -34866,18 +34951,18 @@ _tmp_159_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_159[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'")); + D(fprintf(stderr, "%*c> _tmp_160[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 611)) // token='None' + (_keyword = _PyPegen_expect_token(p, 614)) // token='None' ) { - D(fprintf(stderr, "%*c+ _tmp_159[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'")); + D(fprintf(stderr, "%*c+ _tmp_160[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_159[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_160[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'None'")); } { // 'False' @@ -34885,18 +34970,18 @@ _tmp_159_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_159[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'")); + D(fprintf(stderr, "%*c> _tmp_160[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 612)) // token='False' + (_keyword = _PyPegen_expect_token(p, 615)) // token='False' ) { - D(fprintf(stderr, "%*c+ _tmp_159[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'")); + D(fprintf(stderr, "%*c+ _tmp_160[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_159[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_160[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'False'")); } _res = NULL; @@ -34905,9 +34990,9 @@ _tmp_159_rule(Parser *p) return _res; } -// _tmp_160: '=' | ':=' +// _tmp_161: '=' | ':=' static void * -_tmp_160_rule(Parser *p) +_tmp_161_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -34923,18 +35008,18 @@ _tmp_160_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_160[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='")); + D(fprintf(stderr, "%*c> _tmp_161[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_160[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='")); + D(fprintf(stderr, "%*c+ _tmp_161[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_160[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_161[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'='")); } { // ':=' @@ -34942,18 +35027,18 @@ _tmp_160_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_160[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':='")); + D(fprintf(stderr, "%*c> _tmp_161[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':='")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 53)) // token=':=' ) { - D(fprintf(stderr, "%*c+ _tmp_160[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':='")); + D(fprintf(stderr, "%*c+ _tmp_161[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':='")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_160[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_161[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':='")); } _res = NULL; @@ -34962,9 +35047,9 @@ _tmp_160_rule(Parser *p) return _res; } -// _loop0_161: star_named_expressions +// _loop0_162: star_named_expressions static asdl_seq * -_loop0_161_rule(Parser *p) +_loop0_162_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -34989,7 +35074,7 @@ _loop0_161_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_161[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expressions")); + D(fprintf(stderr, "%*c> _loop0_162[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expressions")); asdl_expr_seq* star_named_expressions_var; while ( (star_named_expressions_var = star_named_expressions_rule(p)) // star_named_expressions @@ -35012,7 +35097,7 @@ _loop0_161_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_161[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_162[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expressions")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -35029,9 +35114,9 @@ _loop0_161_rule(Parser *p) return _seq; } -// _loop0_162: (star_targets '=') +// _loop0_163: (star_targets '=') static asdl_seq * -_loop0_162_rule(Parser *p) +_loop0_163_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -35056,13 +35141,13 @@ _loop0_162_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_162[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); - void *_tmp_264_var; + D(fprintf(stderr, "%*c> _loop0_163[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); + void *_tmp_266_var; while ( - (_tmp_264_var = _tmp_264_rule(p)) // star_targets '=' + (_tmp_266_var = _tmp_266_rule(p)) // star_targets '=' ) { - _res = _tmp_264_var; + _res = _tmp_266_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -35079,7 +35164,7 @@ _loop0_162_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_162[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_163[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(star_targets '=')")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -35096,9 +35181,9 @@ _loop0_162_rule(Parser *p) return _seq; } -// _loop0_163: (star_targets '=') +// _loop0_164: (star_targets '=') static asdl_seq * -_loop0_163_rule(Parser *p) +_loop0_164_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -35123,13 +35208,13 @@ _loop0_163_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_163[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); - void *_tmp_265_var; + D(fprintf(stderr, "%*c> _loop0_164[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); + void *_tmp_267_var; while ( - (_tmp_265_var = _tmp_265_rule(p)) // star_targets '=' + (_tmp_267_var = _tmp_267_rule(p)) // star_targets '=' ) { - _res = _tmp_265_var; + _res = _tmp_267_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -35146,7 +35231,7 @@ _loop0_163_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_163[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_164[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(star_targets '=')")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -35163,9 +35248,9 @@ _loop0_163_rule(Parser *p) return _seq; } -// _tmp_164: yield_expr | star_expressions +// _tmp_165: yield_expr | star_expressions static void * -_tmp_164_rule(Parser *p) +_tmp_165_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -35181,18 +35266,18 @@ _tmp_164_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_164[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); + D(fprintf(stderr, "%*c> _tmp_165[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { - D(fprintf(stderr, "%*c+ _tmp_164[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); + D(fprintf(stderr, "%*c+ _tmp_165[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); _res = yield_expr_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_164[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_165[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); } { // star_expressions @@ -35200,18 +35285,18 @@ _tmp_164_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_164[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); + D(fprintf(stderr, "%*c> _tmp_165[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); expr_ty star_expressions_var; if ( (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { - D(fprintf(stderr, "%*c+ _tmp_164[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); + D(fprintf(stderr, "%*c+ _tmp_165[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); _res = star_expressions_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_164[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_165[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); } _res = NULL; @@ -35220,9 +35305,9 @@ _tmp_164_rule(Parser *p) return _res; } -// _tmp_165: '[' | '(' | '{' +// _tmp_166: '[' | '(' | '{' static void * -_tmp_165_rule(Parser *p) +_tmp_166_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -35238,18 +35323,18 @@ _tmp_165_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_165[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['")); + D(fprintf(stderr, "%*c> _tmp_166[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 9)) // token='[' ) { - D(fprintf(stderr, "%*c+ _tmp_165[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['")); + D(fprintf(stderr, "%*c+ _tmp_166[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_165[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_166[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'['")); } { // '(' @@ -35257,18 +35342,18 @@ _tmp_165_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_165[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'('")); + D(fprintf(stderr, "%*c> _tmp_166[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'('")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 7)) // token='(' ) { - D(fprintf(stderr, "%*c+ _tmp_165[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'('")); + D(fprintf(stderr, "%*c+ _tmp_166[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'('")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_165[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_166[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'('")); } { // '{' @@ -35276,18 +35361,18 @@ _tmp_165_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_165[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'")); + D(fprintf(stderr, "%*c> _tmp_166[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 25)) // token='{' ) { - D(fprintf(stderr, "%*c+ _tmp_165[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'")); + D(fprintf(stderr, "%*c+ _tmp_166[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_165[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_166[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{'")); } _res = NULL; @@ -35296,9 +35381,9 @@ _tmp_165_rule(Parser *p) return _res; } -// _tmp_166: '[' | '{' +// _tmp_167: '[' | '{' static void * -_tmp_166_rule(Parser *p) +_tmp_167_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -35314,18 +35399,18 @@ _tmp_166_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_166[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['")); + D(fprintf(stderr, "%*c> _tmp_167[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 9)) // token='[' ) { - D(fprintf(stderr, "%*c+ _tmp_166[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['")); + D(fprintf(stderr, "%*c+ _tmp_167[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_166[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_167[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'['")); } { // '{' @@ -35333,18 +35418,18 @@ _tmp_166_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_166[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'")); + D(fprintf(stderr, "%*c> _tmp_167[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 25)) // token='{' ) { - D(fprintf(stderr, "%*c+ _tmp_166[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'")); + D(fprintf(stderr, "%*c+ _tmp_167[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_166[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_167[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{'")); } _res = NULL; @@ -35353,9 +35438,9 @@ _tmp_166_rule(Parser *p) return _res; } -// _tmp_167: '[' | '{' +// _tmp_168: '[' | '{' static void * -_tmp_167_rule(Parser *p) +_tmp_168_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -35371,18 +35456,18 @@ _tmp_167_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_167[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['")); + D(fprintf(stderr, "%*c> _tmp_168[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 9)) // token='[' ) { - D(fprintf(stderr, "%*c+ _tmp_167[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['")); + D(fprintf(stderr, "%*c+ _tmp_168[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_167[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_168[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'['")); } { // '{' @@ -35390,18 +35475,18 @@ _tmp_167_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_167[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'")); + D(fprintf(stderr, "%*c> _tmp_168[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 25)) // token='{' ) { - D(fprintf(stderr, "%*c+ _tmp_167[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'")); + D(fprintf(stderr, "%*c+ _tmp_168[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_167[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_168[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{'")); } _res = NULL; @@ -35410,9 +35495,9 @@ _tmp_167_rule(Parser *p) return _res; } -// _tmp_168: slash_no_default | slash_with_default +// _tmp_169: slash_no_default | slash_with_default static void * -_tmp_168_rule(Parser *p) +_tmp_169_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -35428,18 +35513,18 @@ _tmp_168_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_168[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_no_default")); + D(fprintf(stderr, "%*c> _tmp_169[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_no_default")); asdl_arg_seq* slash_no_default_var; if ( (slash_no_default_var = slash_no_default_rule(p)) // slash_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_168[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_no_default")); + D(fprintf(stderr, "%*c+ _tmp_169[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_no_default")); _res = slash_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_168[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_169[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_no_default")); } { // slash_with_default @@ -35447,18 +35532,18 @@ _tmp_168_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_168[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default")); + D(fprintf(stderr, "%*c> _tmp_169[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default")); SlashWithDefault* slash_with_default_var; if ( (slash_with_default_var = slash_with_default_rule(p)) // slash_with_default ) { - D(fprintf(stderr, "%*c+ _tmp_168[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default")); + D(fprintf(stderr, "%*c+ _tmp_169[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default")); _res = slash_with_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_168[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_169[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_with_default")); } _res = NULL; @@ -35467,9 +35552,9 @@ _tmp_168_rule(Parser *p) return _res; } -// _loop0_169: param_maybe_default +// _loop0_170: param_maybe_default static asdl_seq * -_loop0_169_rule(Parser *p) +_loop0_170_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -35494,7 +35579,7 @@ _loop0_169_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_169[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); + D(fprintf(stderr, "%*c> _loop0_170[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); NameDefaultPair* param_maybe_default_var; while ( (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default @@ -35517,7 +35602,7 @@ _loop0_169_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_169[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_170[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -35534,9 +35619,9 @@ _loop0_169_rule(Parser *p) return _seq; } -// _loop0_170: param_no_default +// _loop0_171: param_no_default static asdl_seq * -_loop0_170_rule(Parser *p) +_loop0_171_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -35561,7 +35646,7 @@ _loop0_170_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_170[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _loop0_171[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -35584,7 +35669,7 @@ _loop0_170_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_170[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_171[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -35601,9 +35686,9 @@ _loop0_170_rule(Parser *p) return _seq; } -// _loop0_171: param_no_default +// _loop0_172: param_no_default static asdl_seq * -_loop0_171_rule(Parser *p) +_loop0_172_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -35628,7 +35713,7 @@ _loop0_171_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_171[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _loop0_172[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -35651,7 +35736,7 @@ _loop0_171_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_171[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_172[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -35668,9 +35753,9 @@ _loop0_171_rule(Parser *p) return _seq; } -// _loop1_172: param_no_default +// _loop1_173: param_no_default static asdl_seq * -_loop1_172_rule(Parser *p) +_loop1_173_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -35695,7 +35780,7 @@ _loop1_172_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_172[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _loop1_173[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -35718,7 +35803,7 @@ _loop1_172_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_172[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_173[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } if (_n == 0 || p->error_indicator) { @@ -35740,9 +35825,9 @@ _loop1_172_rule(Parser *p) return _seq; } -// _tmp_173: slash_no_default | slash_with_default +// _tmp_174: slash_no_default | slash_with_default static void * -_tmp_173_rule(Parser *p) +_tmp_174_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -35758,18 +35843,18 @@ _tmp_173_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_173[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_no_default")); + D(fprintf(stderr, "%*c> _tmp_174[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_no_default")); asdl_arg_seq* slash_no_default_var; if ( (slash_no_default_var = slash_no_default_rule(p)) // slash_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_173[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_no_default")); + D(fprintf(stderr, "%*c+ _tmp_174[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_no_default")); _res = slash_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_173[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_174[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_no_default")); } { // slash_with_default @@ -35777,18 +35862,18 @@ _tmp_173_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_173[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default")); + D(fprintf(stderr, "%*c> _tmp_174[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default")); SlashWithDefault* slash_with_default_var; if ( (slash_with_default_var = slash_with_default_rule(p)) // slash_with_default ) { - D(fprintf(stderr, "%*c+ _tmp_173[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default")); + D(fprintf(stderr, "%*c+ _tmp_174[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default")); _res = slash_with_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_173[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_174[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_with_default")); } _res = NULL; @@ -35797,9 +35882,9 @@ _tmp_173_rule(Parser *p) return _res; } -// _loop0_174: param_maybe_default +// _loop0_175: param_maybe_default static asdl_seq * -_loop0_174_rule(Parser *p) +_loop0_175_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -35824,7 +35909,7 @@ _loop0_174_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_174[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); + D(fprintf(stderr, "%*c> _loop0_175[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); NameDefaultPair* param_maybe_default_var; while ( (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default @@ -35847,7 +35932,7 @@ _loop0_174_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_174[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_175[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -35864,9 +35949,9 @@ _loop0_174_rule(Parser *p) return _seq; } -// _tmp_175: ',' | param_no_default +// _tmp_176: ',' | param_no_default static void * -_tmp_175_rule(Parser *p) +_tmp_176_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -35882,18 +35967,18 @@ _tmp_175_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_175[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_176[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_175[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_176[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_175[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_176[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } { // param_no_default @@ -35901,18 +35986,18 @@ _tmp_175_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_175[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _tmp_176[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; if ( (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_175[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c+ _tmp_176[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default")); _res = param_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_175[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_176[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } _res = NULL; @@ -35921,9 +36006,9 @@ _tmp_175_rule(Parser *p) return _res; } -// _loop0_176: param_maybe_default +// _loop0_177: param_maybe_default static asdl_seq * -_loop0_176_rule(Parser *p) +_loop0_177_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -35948,7 +36033,7 @@ _loop0_176_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_176[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); + D(fprintf(stderr, "%*c> _loop0_177[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); NameDefaultPair* param_maybe_default_var; while ( (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default @@ -35971,7 +36056,7 @@ _loop0_176_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_176[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_177[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -35988,9 +36073,9 @@ _loop0_176_rule(Parser *p) return _seq; } -// _loop1_177: param_maybe_default +// _loop1_178: param_maybe_default static asdl_seq * -_loop1_177_rule(Parser *p) +_loop1_178_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -36015,7 +36100,7 @@ _loop1_177_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_177[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); + D(fprintf(stderr, "%*c> _loop1_178[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); NameDefaultPair* param_maybe_default_var; while ( (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default @@ -36038,7 +36123,7 @@ _loop1_177_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_177[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_178[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default")); } if (_n == 0 || p->error_indicator) { @@ -36060,9 +36145,9 @@ _loop1_177_rule(Parser *p) return _seq; } -// _tmp_178: ')' | ',' +// _tmp_179: ')' | ',' static void * -_tmp_178_rule(Parser *p) +_tmp_179_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -36078,18 +36163,18 @@ _tmp_178_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_178[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c> _tmp_179[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_178[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c+ _tmp_179[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_178[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_179[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); } { // ',' @@ -36097,18 +36182,18 @@ _tmp_178_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_178[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_179[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_178[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_179[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_178[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_179[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } _res = NULL; @@ -36117,9 +36202,9 @@ _tmp_178_rule(Parser *p) return _res; } -// _tmp_179: ')' | ',' (')' | '**') +// _tmp_180: ')' | ',' (')' | '**') static void * -_tmp_179_rule(Parser *p) +_tmp_180_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -36135,18 +36220,18 @@ _tmp_179_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_179[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c> _tmp_180[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_179[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c+ _tmp_180[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_179[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_180[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); } { // ',' (')' | '**') @@ -36154,21 +36239,21 @@ _tmp_179_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_179[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); + D(fprintf(stderr, "%*c> _tmp_180[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); Token * _literal; - void *_tmp_266_var; + void *_tmp_268_var; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (_tmp_266_var = _tmp_266_rule(p)) // ')' | '**' + (_tmp_268_var = _tmp_268_rule(p)) // ')' | '**' ) { - D(fprintf(stderr, "%*c+ _tmp_179[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); - _res = _PyPegen_dummy_name(p, _literal, _tmp_266_var); + D(fprintf(stderr, "%*c+ _tmp_180[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); + _res = _PyPegen_dummy_name(p, _literal, _tmp_268_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_179[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_180[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (')' | '**')")); } _res = NULL; @@ -36177,9 +36262,9 @@ _tmp_179_rule(Parser *p) return _res; } -// _tmp_180: param_no_default | ',' +// _tmp_181: param_no_default | ',' static void * -_tmp_180_rule(Parser *p) +_tmp_181_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -36195,18 +36280,18 @@ _tmp_180_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_180[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _tmp_181[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; if ( (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_180[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c+ _tmp_181[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default")); _res = param_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_180[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_181[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } { // ',' @@ -36214,18 +36299,18 @@ _tmp_180_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_180[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_181[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_180[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_181[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_180[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_181[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } _res = NULL; @@ -36234,9 +36319,9 @@ _tmp_180_rule(Parser *p) return _res; } -// _loop0_181: param_maybe_default +// _loop0_182: param_maybe_default static asdl_seq * -_loop0_181_rule(Parser *p) +_loop0_182_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -36261,7 +36346,7 @@ _loop0_181_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_181[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); + D(fprintf(stderr, "%*c> _loop0_182[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); NameDefaultPair* param_maybe_default_var; while ( (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default @@ -36284,7 +36369,7 @@ _loop0_181_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_181[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_182[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -36301,9 +36386,9 @@ _loop0_181_rule(Parser *p) return _seq; } -// _tmp_182: param_no_default | ',' +// _tmp_183: param_no_default | ',' static void * -_tmp_182_rule(Parser *p) +_tmp_183_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -36319,18 +36404,18 @@ _tmp_182_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_182[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _tmp_183[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; if ( (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_182[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c+ _tmp_183[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default")); _res = param_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_182[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_183[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } { // ',' @@ -36338,18 +36423,18 @@ _tmp_182_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_182[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_183[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_182[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_183[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_182[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_183[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } _res = NULL; @@ -36358,9 +36443,9 @@ _tmp_182_rule(Parser *p) return _res; } -// _tmp_183: '*' | '**' | '/' +// _tmp_184: '*' | '**' | '/' static void * -_tmp_183_rule(Parser *p) +_tmp_184_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -36376,18 +36461,18 @@ _tmp_183_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_183[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*'")); + D(fprintf(stderr, "%*c> _tmp_184[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 16)) // token='*' ) { - D(fprintf(stderr, "%*c+ _tmp_183[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*'")); + D(fprintf(stderr, "%*c+ _tmp_184[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_183[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_184[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*'")); } { // '**' @@ -36395,18 +36480,18 @@ _tmp_183_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_183[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c> _tmp_184[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 35)) // token='**' ) { - D(fprintf(stderr, "%*c+ _tmp_183[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c+ _tmp_184[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_183[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_184[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'")); } { // '/' @@ -36414,18 +36499,18 @@ _tmp_183_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_183[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'/'")); + D(fprintf(stderr, "%*c> _tmp_184[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'/'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 17)) // token='/' ) { - D(fprintf(stderr, "%*c+ _tmp_183[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'/'")); + D(fprintf(stderr, "%*c+ _tmp_184[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'/'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_183[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_184[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'/'")); } _res = NULL; @@ -36434,9 +36519,9 @@ _tmp_183_rule(Parser *p) return _res; } -// _loop1_184: param_with_default +// _loop1_185: param_with_default static asdl_seq * -_loop1_184_rule(Parser *p) +_loop1_185_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -36461,7 +36546,7 @@ _loop1_184_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_184[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); + D(fprintf(stderr, "%*c> _loop1_185[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default @@ -36484,7 +36569,7 @@ _loop1_184_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_184[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_185[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); } if (_n == 0 || p->error_indicator) { @@ -36506,9 +36591,9 @@ _loop1_184_rule(Parser *p) return _seq; } -// _tmp_185: lambda_slash_no_default | lambda_slash_with_default +// _tmp_186: lambda_slash_no_default | lambda_slash_with_default static void * -_tmp_185_rule(Parser *p) +_tmp_186_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -36524,18 +36609,18 @@ _tmp_185_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_185[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default")); + D(fprintf(stderr, "%*c> _tmp_186[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default")); asdl_arg_seq* lambda_slash_no_default_var; if ( (lambda_slash_no_default_var = lambda_slash_no_default_rule(p)) // lambda_slash_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_185[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default")); + D(fprintf(stderr, "%*c+ _tmp_186[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default")); _res = lambda_slash_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_185[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_186[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_slash_no_default")); } { // lambda_slash_with_default @@ -36543,18 +36628,18 @@ _tmp_185_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_185[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); + D(fprintf(stderr, "%*c> _tmp_186[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); SlashWithDefault* lambda_slash_with_default_var; if ( (lambda_slash_with_default_var = lambda_slash_with_default_rule(p)) // lambda_slash_with_default ) { - D(fprintf(stderr, "%*c+ _tmp_185[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); + D(fprintf(stderr, "%*c+ _tmp_186[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); _res = lambda_slash_with_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_185[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_186[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_slash_with_default")); } _res = NULL; @@ -36563,9 +36648,9 @@ _tmp_185_rule(Parser *p) return _res; } -// _loop0_186: lambda_param_maybe_default +// _loop0_187: lambda_param_maybe_default static asdl_seq * -_loop0_186_rule(Parser *p) +_loop0_187_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -36590,7 +36675,7 @@ _loop0_186_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_186[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); + D(fprintf(stderr, "%*c> _loop0_187[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); NameDefaultPair* lambda_param_maybe_default_var; while ( (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default @@ -36613,7 +36698,7 @@ _loop0_186_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_186[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_187[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -36630,9 +36715,9 @@ _loop0_186_rule(Parser *p) return _seq; } -// _loop0_187: lambda_param_no_default +// _loop0_188: lambda_param_no_default static asdl_seq * -_loop0_187_rule(Parser *p) +_loop0_188_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -36657,7 +36742,7 @@ _loop0_187_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_187[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c> _loop0_188[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default @@ -36680,7 +36765,7 @@ _loop0_187_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_187[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_188[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -36697,9 +36782,9 @@ _loop0_187_rule(Parser *p) return _seq; } -// _loop0_188: lambda_param_no_default +// _loop0_189: lambda_param_no_default static asdl_seq * -_loop0_188_rule(Parser *p) +_loop0_189_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -36724,7 +36809,7 @@ _loop0_188_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_188[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c> _loop0_189[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default @@ -36747,7 +36832,7 @@ _loop0_188_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_188[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_189[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -36764,9 +36849,9 @@ _loop0_188_rule(Parser *p) return _seq; } -// _loop0_190: ',' lambda_param +// _loop0_191: ',' lambda_param static asdl_seq * -_loop0_190_rule(Parser *p) +_loop0_191_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -36791,7 +36876,7 @@ _loop0_190_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_190[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' lambda_param")); + D(fprintf(stderr, "%*c> _loop0_191[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' lambda_param")); Token * _literal; arg_ty elem; while ( @@ -36823,7 +36908,7 @@ _loop0_190_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_190[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_191[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' lambda_param")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -36840,9 +36925,9 @@ _loop0_190_rule(Parser *p) return _seq; } -// _gather_189: lambda_param _loop0_190 +// _gather_190: lambda_param _loop0_191 static asdl_seq * -_gather_189_rule(Parser *p) +_gather_190_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -36853,27 +36938,27 @@ _gather_189_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // lambda_param _loop0_190 + { // lambda_param _loop0_191 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_189[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param _loop0_190")); + D(fprintf(stderr, "%*c> _gather_190[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param _loop0_191")); arg_ty elem; asdl_seq * seq; if ( (elem = lambda_param_rule(p)) // lambda_param && - (seq = _loop0_190_rule(p)) // _loop0_190 + (seq = _loop0_191_rule(p)) // _loop0_191 ) { - D(fprintf(stderr, "%*c+ _gather_189[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param _loop0_190")); + D(fprintf(stderr, "%*c+ _gather_190[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param _loop0_191")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_189[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param _loop0_190")); + D(fprintf(stderr, "%*c%s _gather_190[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param _loop0_191")); } _res = NULL; done: @@ -36881,9 +36966,9 @@ _gather_189_rule(Parser *p) return _res; } -// _tmp_191: lambda_slash_no_default | lambda_slash_with_default +// _tmp_192: lambda_slash_no_default | lambda_slash_with_default static void * -_tmp_191_rule(Parser *p) +_tmp_192_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -36899,18 +36984,18 @@ _tmp_191_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_191[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default")); + D(fprintf(stderr, "%*c> _tmp_192[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default")); asdl_arg_seq* lambda_slash_no_default_var; if ( (lambda_slash_no_default_var = lambda_slash_no_default_rule(p)) // lambda_slash_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_191[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default")); + D(fprintf(stderr, "%*c+ _tmp_192[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default")); _res = lambda_slash_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_191[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_192[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_slash_no_default")); } { // lambda_slash_with_default @@ -36918,18 +37003,18 @@ _tmp_191_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_191[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); + D(fprintf(stderr, "%*c> _tmp_192[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); SlashWithDefault* lambda_slash_with_default_var; if ( (lambda_slash_with_default_var = lambda_slash_with_default_rule(p)) // lambda_slash_with_default ) { - D(fprintf(stderr, "%*c+ _tmp_191[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); + D(fprintf(stderr, "%*c+ _tmp_192[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); _res = lambda_slash_with_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_191[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_192[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_slash_with_default")); } _res = NULL; @@ -36938,9 +37023,9 @@ _tmp_191_rule(Parser *p) return _res; } -// _loop0_192: lambda_param_maybe_default +// _loop0_193: lambda_param_maybe_default static asdl_seq * -_loop0_192_rule(Parser *p) +_loop0_193_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -36965,7 +37050,7 @@ _loop0_192_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_192[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); + D(fprintf(stderr, "%*c> _loop0_193[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); NameDefaultPair* lambda_param_maybe_default_var; while ( (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default @@ -36988,7 +37073,7 @@ _loop0_192_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_192[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_193[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -37005,9 +37090,9 @@ _loop0_192_rule(Parser *p) return _seq; } -// _tmp_193: ',' | lambda_param_no_default +// _tmp_194: ',' | lambda_param_no_default static void * -_tmp_193_rule(Parser *p) +_tmp_194_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -37023,18 +37108,18 @@ _tmp_193_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_193[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_194[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_193[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_194[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_193[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_194[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } { // lambda_param_no_default @@ -37042,18 +37127,18 @@ _tmp_193_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_193[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c> _tmp_194[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); arg_ty lambda_param_no_default_var; if ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_193[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c+ _tmp_194[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); _res = lambda_param_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_193[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_194[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } _res = NULL; @@ -37062,9 +37147,9 @@ _tmp_193_rule(Parser *p) return _res; } -// _loop0_194: lambda_param_maybe_default +// _loop0_195: lambda_param_maybe_default static asdl_seq * -_loop0_194_rule(Parser *p) +_loop0_195_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -37089,7 +37174,7 @@ _loop0_194_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_194[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); + D(fprintf(stderr, "%*c> _loop0_195[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); NameDefaultPair* lambda_param_maybe_default_var; while ( (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default @@ -37112,7 +37197,7 @@ _loop0_194_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_194[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_195[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -37129,9 +37214,9 @@ _loop0_194_rule(Parser *p) return _seq; } -// _loop1_195: lambda_param_maybe_default +// _loop1_196: lambda_param_maybe_default static asdl_seq * -_loop1_195_rule(Parser *p) +_loop1_196_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -37156,7 +37241,7 @@ _loop1_195_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_195[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); + D(fprintf(stderr, "%*c> _loop1_196[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); NameDefaultPair* lambda_param_maybe_default_var; while ( (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default @@ -37179,7 +37264,7 @@ _loop1_195_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_195[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_196[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default")); } if (_n == 0 || p->error_indicator) { @@ -37201,9 +37286,9 @@ _loop1_195_rule(Parser *p) return _seq; } -// _loop1_196: lambda_param_with_default +// _loop1_197: lambda_param_with_default static asdl_seq * -_loop1_196_rule(Parser *p) +_loop1_197_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -37228,7 +37313,7 @@ _loop1_196_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_196[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); + D(fprintf(stderr, "%*c> _loop1_197[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); NameDefaultPair* lambda_param_with_default_var; while ( (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default @@ -37251,7 +37336,7 @@ _loop1_196_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_196[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_197[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); } if (_n == 0 || p->error_indicator) { @@ -37273,9 +37358,9 @@ _loop1_196_rule(Parser *p) return _seq; } -// _tmp_197: ':' | ',' (':' | '**') +// _tmp_198: ':' | ',' (':' | '**') static void * -_tmp_197_rule(Parser *p) +_tmp_198_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -37291,18 +37376,18 @@ _tmp_197_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_197[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c> _tmp_198[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - D(fprintf(stderr, "%*c+ _tmp_197[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c+ _tmp_198[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_197[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_198[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); } { // ',' (':' | '**') @@ -37310,21 +37395,21 @@ _tmp_197_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_197[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); + D(fprintf(stderr, "%*c> _tmp_198[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); Token * _literal; - void *_tmp_267_var; + void *_tmp_269_var; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (_tmp_267_var = _tmp_267_rule(p)) // ':' | '**' + (_tmp_269_var = _tmp_269_rule(p)) // ':' | '**' ) { - D(fprintf(stderr, "%*c+ _tmp_197[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); - _res = _PyPegen_dummy_name(p, _literal, _tmp_267_var); + D(fprintf(stderr, "%*c+ _tmp_198[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); + _res = _PyPegen_dummy_name(p, _literal, _tmp_269_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_197[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_198[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (':' | '**')")); } _res = NULL; @@ -37333,9 +37418,9 @@ _tmp_197_rule(Parser *p) return _res; } -// _tmp_198: lambda_param_no_default | ',' +// _tmp_199: lambda_param_no_default | ',' static void * -_tmp_198_rule(Parser *p) +_tmp_199_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -37351,18 +37436,18 @@ _tmp_198_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_198[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c> _tmp_199[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); arg_ty lambda_param_no_default_var; if ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_198[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c+ _tmp_199[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); _res = lambda_param_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_198[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_199[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } { // ',' @@ -37370,18 +37455,18 @@ _tmp_198_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_198[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_199[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_198[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_199[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_198[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_199[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } _res = NULL; @@ -37390,9 +37475,9 @@ _tmp_198_rule(Parser *p) return _res; } -// _loop0_199: lambda_param_maybe_default +// _loop0_200: lambda_param_maybe_default static asdl_seq * -_loop0_199_rule(Parser *p) +_loop0_200_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -37417,7 +37502,7 @@ _loop0_199_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_199[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); + D(fprintf(stderr, "%*c> _loop0_200[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); NameDefaultPair* lambda_param_maybe_default_var; while ( (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default @@ -37440,7 +37525,7 @@ _loop0_199_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_199[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_200[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -37457,9 +37542,9 @@ _loop0_199_rule(Parser *p) return _seq; } -// _tmp_200: lambda_param_no_default | ',' +// _tmp_201: lambda_param_no_default | ',' static void * -_tmp_200_rule(Parser *p) +_tmp_201_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -37475,18 +37560,18 @@ _tmp_200_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_200[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c> _tmp_201[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); arg_ty lambda_param_no_default_var; if ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { - D(fprintf(stderr, "%*c+ _tmp_200[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c+ _tmp_201[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); _res = lambda_param_no_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_200[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_201[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } { // ',' @@ -37494,18 +37579,18 @@ _tmp_200_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_200[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_201[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_200[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_201[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_200[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_201[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } _res = NULL; @@ -37514,9 +37599,9 @@ _tmp_200_rule(Parser *p) return _res; } -// _tmp_201: '*' | '**' | '/' +// _tmp_202: '*' | '**' | '/' static void * -_tmp_201_rule(Parser *p) +_tmp_202_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -37532,18 +37617,18 @@ _tmp_201_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_201[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*'")); + D(fprintf(stderr, "%*c> _tmp_202[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 16)) // token='*' ) { - D(fprintf(stderr, "%*c+ _tmp_201[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*'")); + D(fprintf(stderr, "%*c+ _tmp_202[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_201[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_202[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*'")); } { // '**' @@ -37551,18 +37636,18 @@ _tmp_201_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_201[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c> _tmp_202[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 35)) // token='**' ) { - D(fprintf(stderr, "%*c+ _tmp_201[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c+ _tmp_202[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_201[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_202[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'")); } { // '/' @@ -37570,18 +37655,18 @@ _tmp_201_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_201[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'/'")); + D(fprintf(stderr, "%*c> _tmp_202[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'/'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 17)) // token='/' ) { - D(fprintf(stderr, "%*c+ _tmp_201[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'/'")); + D(fprintf(stderr, "%*c+ _tmp_202[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'/'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_201[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_202[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'/'")); } _res = NULL; @@ -37590,9 +37675,9 @@ _tmp_201_rule(Parser *p) return _res; } -// _tmp_202: ',' | ')' | ':' +// _tmp_203: ',' | ')' | ':' static void * -_tmp_202_rule(Parser *p) +_tmp_203_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -37608,18 +37693,18 @@ _tmp_202_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_202[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_203[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_202[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_203[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_202[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_203[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } { // ')' @@ -37627,18 +37712,18 @@ _tmp_202_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_202[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c> _tmp_203[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_202[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c+ _tmp_203[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_202[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_203[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); } { // ':' @@ -37646,18 +37731,18 @@ _tmp_202_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_202[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c> _tmp_203[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - D(fprintf(stderr, "%*c+ _tmp_202[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c+ _tmp_203[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_202[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_203[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); } _res = NULL; @@ -37666,9 +37751,9 @@ _tmp_202_rule(Parser *p) return _res; } -// _loop0_204: ',' dotted_name +// _loop0_205: ',' dotted_name static asdl_seq * -_loop0_204_rule(Parser *p) +_loop0_205_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -37693,7 +37778,7 @@ _loop0_204_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_204[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' dotted_name")); + D(fprintf(stderr, "%*c> _loop0_205[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' dotted_name")); Token * _literal; expr_ty elem; while ( @@ -37725,7 +37810,7 @@ _loop0_204_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_204[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_205[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' dotted_name")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -37742,9 +37827,9 @@ _loop0_204_rule(Parser *p) return _seq; } -// _gather_203: dotted_name _loop0_204 +// _gather_204: dotted_name _loop0_205 static asdl_seq * -_gather_203_rule(Parser *p) +_gather_204_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -37755,27 +37840,27 @@ _gather_203_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // dotted_name _loop0_204 + { // dotted_name _loop0_205 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_203[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dotted_name _loop0_204")); + D(fprintf(stderr, "%*c> _gather_204[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dotted_name _loop0_205")); expr_ty elem; asdl_seq * seq; if ( (elem = dotted_name_rule(p)) // dotted_name && - (seq = _loop0_204_rule(p)) // _loop0_204 + (seq = _loop0_205_rule(p)) // _loop0_205 ) { - D(fprintf(stderr, "%*c+ _gather_203[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dotted_name _loop0_204")); + D(fprintf(stderr, "%*c+ _gather_204[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dotted_name _loop0_205")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_203[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dotted_name _loop0_204")); + D(fprintf(stderr, "%*c%s _gather_204[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dotted_name _loop0_205")); } _res = NULL; done: @@ -37783,9 +37868,9 @@ _gather_203_rule(Parser *p) return _res; } -// _loop0_206: ',' (expression ['as' star_target]) +// _loop0_207: ',' (expression ['as' star_target]) static asdl_seq * -_loop0_206_rule(Parser *p) +_loop0_207_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -37810,13 +37895,13 @@ _loop0_206_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_206[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expression ['as' star_target])")); + D(fprintf(stderr, "%*c> _loop0_207[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expression ['as' star_target])")); Token * _literal; void *elem; while ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _tmp_268_rule(p)) // expression ['as' star_target] + (elem = _tmp_270_rule(p)) // expression ['as' star_target] ) { _res = elem; @@ -37842,7 +37927,7 @@ _loop0_206_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_206[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_207[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (expression ['as' star_target])")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -37859,9 +37944,9 @@ _loop0_206_rule(Parser *p) return _seq; } -// _gather_205: (expression ['as' star_target]) _loop0_206 +// _gather_206: (expression ['as' star_target]) _loop0_207 static asdl_seq * -_gather_205_rule(Parser *p) +_gather_206_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -37872,27 +37957,27 @@ _gather_205_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // (expression ['as' star_target]) _loop0_206 + { // (expression ['as' star_target]) _loop0_207 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_205[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_206")); + D(fprintf(stderr, "%*c> _gather_206[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_207")); void *elem; asdl_seq * seq; if ( - (elem = _tmp_268_rule(p)) // expression ['as' star_target] + (elem = _tmp_270_rule(p)) // expression ['as' star_target] && - (seq = _loop0_206_rule(p)) // _loop0_206 + (seq = _loop0_207_rule(p)) // _loop0_207 ) { - D(fprintf(stderr, "%*c+ _gather_205[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_206")); + D(fprintf(stderr, "%*c+ _gather_206[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_207")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_205[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expression ['as' star_target]) _loop0_206")); + D(fprintf(stderr, "%*c%s _gather_206[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expression ['as' star_target]) _loop0_207")); } _res = NULL; done: @@ -37900,9 +37985,9 @@ _gather_205_rule(Parser *p) return _res; } -// _loop0_208: ',' (expressions ['as' star_target]) +// _loop0_209: ',' (expressions ['as' star_target]) static asdl_seq * -_loop0_208_rule(Parser *p) +_loop0_209_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -37927,13 +38012,13 @@ _loop0_208_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_208[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expressions ['as' star_target])")); + D(fprintf(stderr, "%*c> _loop0_209[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expressions ['as' star_target])")); Token * _literal; void *elem; while ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _tmp_269_rule(p)) // expressions ['as' star_target] + (elem = _tmp_271_rule(p)) // expressions ['as' star_target] ) { _res = elem; @@ -37959,7 +38044,7 @@ _loop0_208_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_208[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_209[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (expressions ['as' star_target])")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -37976,9 +38061,9 @@ _loop0_208_rule(Parser *p) return _seq; } -// _gather_207: (expressions ['as' star_target]) _loop0_208 +// _gather_208: (expressions ['as' star_target]) _loop0_209 static asdl_seq * -_gather_207_rule(Parser *p) +_gather_208_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -37989,27 +38074,27 @@ _gather_207_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // (expressions ['as' star_target]) _loop0_208 + { // (expressions ['as' star_target]) _loop0_209 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_207[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_208")); + D(fprintf(stderr, "%*c> _gather_208[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_209")); void *elem; asdl_seq * seq; if ( - (elem = _tmp_269_rule(p)) // expressions ['as' star_target] + (elem = _tmp_271_rule(p)) // expressions ['as' star_target] && - (seq = _loop0_208_rule(p)) // _loop0_208 + (seq = _loop0_209_rule(p)) // _loop0_209 ) { - D(fprintf(stderr, "%*c+ _gather_207[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_208")); + D(fprintf(stderr, "%*c+ _gather_208[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_209")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_207[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expressions ['as' star_target]) _loop0_208")); + D(fprintf(stderr, "%*c%s _gather_208[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expressions ['as' star_target]) _loop0_209")); } _res = NULL; done: @@ -38017,9 +38102,9 @@ _gather_207_rule(Parser *p) return _res; } -// _loop0_210: ',' (expression ['as' star_target]) +// _loop0_211: ',' (expression ['as' star_target]) static asdl_seq * -_loop0_210_rule(Parser *p) +_loop0_211_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -38044,13 +38129,13 @@ _loop0_210_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_210[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expression ['as' star_target])")); + D(fprintf(stderr, "%*c> _loop0_211[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expression ['as' star_target])")); Token * _literal; void *elem; while ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _tmp_270_rule(p)) // expression ['as' star_target] + (elem = _tmp_272_rule(p)) // expression ['as' star_target] ) { _res = elem; @@ -38076,7 +38161,7 @@ _loop0_210_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_210[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_211[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (expression ['as' star_target])")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -38093,9 +38178,9 @@ _loop0_210_rule(Parser *p) return _seq; } -// _gather_209: (expression ['as' star_target]) _loop0_210 +// _gather_210: (expression ['as' star_target]) _loop0_211 static asdl_seq * -_gather_209_rule(Parser *p) +_gather_210_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -38106,27 +38191,27 @@ _gather_209_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // (expression ['as' star_target]) _loop0_210 + { // (expression ['as' star_target]) _loop0_211 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_209[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_210")); + D(fprintf(stderr, "%*c> _gather_210[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_211")); void *elem; asdl_seq * seq; if ( - (elem = _tmp_270_rule(p)) // expression ['as' star_target] + (elem = _tmp_272_rule(p)) // expression ['as' star_target] && - (seq = _loop0_210_rule(p)) // _loop0_210 + (seq = _loop0_211_rule(p)) // _loop0_211 ) { - D(fprintf(stderr, "%*c+ _gather_209[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_210")); + D(fprintf(stderr, "%*c+ _gather_210[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_211")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_209[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expression ['as' star_target]) _loop0_210")); + D(fprintf(stderr, "%*c%s _gather_210[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expression ['as' star_target]) _loop0_211")); } _res = NULL; done: @@ -38134,9 +38219,9 @@ _gather_209_rule(Parser *p) return _res; } -// _loop0_212: ',' (expressions ['as' star_target]) +// _loop0_213: ',' (expressions ['as' star_target]) static asdl_seq * -_loop0_212_rule(Parser *p) +_loop0_213_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -38161,13 +38246,13 @@ _loop0_212_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_212[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expressions ['as' star_target])")); + D(fprintf(stderr, "%*c> _loop0_213[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expressions ['as' star_target])")); Token * _literal; void *elem; while ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _tmp_271_rule(p)) // expressions ['as' star_target] + (elem = _tmp_273_rule(p)) // expressions ['as' star_target] ) { _res = elem; @@ -38193,7 +38278,7 @@ _loop0_212_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_212[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_213[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (expressions ['as' star_target])")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -38210,9 +38295,9 @@ _loop0_212_rule(Parser *p) return _seq; } -// _gather_211: (expressions ['as' star_target]) _loop0_212 +// _gather_212: (expressions ['as' star_target]) _loop0_213 static asdl_seq * -_gather_211_rule(Parser *p) +_gather_212_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -38223,27 +38308,27 @@ _gather_211_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // (expressions ['as' star_target]) _loop0_212 + { // (expressions ['as' star_target]) _loop0_213 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_211[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_212")); + D(fprintf(stderr, "%*c> _gather_212[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_213")); void *elem; asdl_seq * seq; if ( - (elem = _tmp_271_rule(p)) // expressions ['as' star_target] + (elem = _tmp_273_rule(p)) // expressions ['as' star_target] && - (seq = _loop0_212_rule(p)) // _loop0_212 + (seq = _loop0_213_rule(p)) // _loop0_213 ) { - D(fprintf(stderr, "%*c+ _gather_211[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_212")); + D(fprintf(stderr, "%*c+ _gather_212[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_213")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_211[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expressions ['as' star_target]) _loop0_212")); + D(fprintf(stderr, "%*c%s _gather_212[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expressions ['as' star_target]) _loop0_213")); } _res = NULL; done: @@ -38251,9 +38336,9 @@ _gather_211_rule(Parser *p) return _res; } -// _tmp_213: 'except' | 'finally' +// _tmp_214: 'except' | 'finally' static void * -_tmp_213_rule(Parser *p) +_tmp_214_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -38269,18 +38354,18 @@ _tmp_213_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_213[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except'")); + D(fprintf(stderr, "%*c> _tmp_214[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 653)) // token='except' + (_keyword = _PyPegen_expect_token(p, 656)) // token='except' ) { - D(fprintf(stderr, "%*c+ _tmp_213[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except'")); + D(fprintf(stderr, "%*c+ _tmp_214[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_213[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_214[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'except'")); } { // 'finally' @@ -38288,18 +38373,18 @@ _tmp_213_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_213[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'finally'")); + D(fprintf(stderr, "%*c> _tmp_214[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'finally'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 649)) // token='finally' + (_keyword = _PyPegen_expect_token(p, 652)) // token='finally' ) { - D(fprintf(stderr, "%*c+ _tmp_213[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'finally'")); + D(fprintf(stderr, "%*c+ _tmp_214[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'finally'")); _res = _keyword; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_213[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_214[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'finally'")); } _res = NULL; @@ -38308,9 +38393,9 @@ _tmp_213_rule(Parser *p) return _res; } -// _loop0_214: block +// _loop0_215: block static asdl_seq * -_loop0_214_rule(Parser *p) +_loop0_215_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -38335,7 +38420,7 @@ _loop0_214_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_214[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "block")); + D(fprintf(stderr, "%*c> _loop0_215[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "block")); asdl_stmt_seq* block_var; while ( (block_var = block_rule(p)) // block @@ -38358,7 +38443,7 @@ _loop0_214_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_214[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_215[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "block")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -38375,9 +38460,9 @@ _loop0_214_rule(Parser *p) return _seq; } -// _loop1_215: except_block +// _loop1_216: except_block static asdl_seq * -_loop1_215_rule(Parser *p) +_loop1_216_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -38402,7 +38487,7 @@ _loop1_215_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_215[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_block")); + D(fprintf(stderr, "%*c> _loop1_216[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_block")); excepthandler_ty except_block_var; while ( (except_block_var = except_block_rule(p)) // except_block @@ -38425,7 +38510,7 @@ _loop1_215_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_215[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_216[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "except_block")); } if (_n == 0 || p->error_indicator) { @@ -38447,9 +38532,9 @@ _loop1_215_rule(Parser *p) return _seq; } -// _tmp_216: 'as' NAME +// _tmp_217: 'as' NAME static void * -_tmp_216_rule(Parser *p) +_tmp_217_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -38465,21 +38550,21 @@ _tmp_216_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_216[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c> _tmp_217[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); Token * _keyword; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 656)) // token='as' + (_keyword = _PyPegen_expect_token(p, 659)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME ) { - D(fprintf(stderr, "%*c+ _tmp_216[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c+ _tmp_217[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); _res = _PyPegen_dummy_name(p, _keyword, name_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_216[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_217[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); } _res = NULL; @@ -38488,9 +38573,9 @@ _tmp_216_rule(Parser *p) return _res; } -// _loop0_217: block +// _loop0_218: block static asdl_seq * -_loop0_217_rule(Parser *p) +_loop0_218_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -38515,7 +38600,7 @@ _loop0_217_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_217[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "block")); + D(fprintf(stderr, "%*c> _loop0_218[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "block")); asdl_stmt_seq* block_var; while ( (block_var = block_rule(p)) // block @@ -38538,7 +38623,7 @@ _loop0_217_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_217[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_218[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "block")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -38555,9 +38640,9 @@ _loop0_217_rule(Parser *p) return _seq; } -// _loop1_218: except_star_block +// _loop1_219: except_star_block static asdl_seq * -_loop1_218_rule(Parser *p) +_loop1_219_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -38582,7 +38667,7 @@ _loop1_218_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop1_218[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_star_block")); + D(fprintf(stderr, "%*c> _loop1_219[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_star_block")); excepthandler_ty except_star_block_var; while ( (except_star_block_var = except_star_block_rule(p)) // except_star_block @@ -38605,7 +38690,7 @@ _loop1_218_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_218[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_219[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "except_star_block")); } if (_n == 0 || p->error_indicator) { @@ -38627,9 +38712,9 @@ _loop1_218_rule(Parser *p) return _seq; } -// _tmp_219: expression ['as' NAME] +// _tmp_220: expression ['as' NAME] static void * -_tmp_219_rule(Parser *p) +_tmp_220_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -38645,22 +38730,22 @@ _tmp_219_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_219[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' NAME]")); + D(fprintf(stderr, "%*c> _tmp_220[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' NAME]")); void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings expr_ty expression_var; if ( (expression_var = expression_rule(p)) // expression && - (_opt_var = _tmp_272_rule(p), !p->error_indicator) // ['as' NAME] + (_opt_var = _tmp_274_rule(p), !p->error_indicator) // ['as' NAME] ) { - D(fprintf(stderr, "%*c+ _tmp_219[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' NAME]")); + D(fprintf(stderr, "%*c+ _tmp_220[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' NAME]")); _res = _PyPegen_dummy_name(p, expression_var, _opt_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_219[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_220[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ['as' NAME]")); } _res = NULL; @@ -38669,9 +38754,9 @@ _tmp_219_rule(Parser *p) return _res; } -// _tmp_220: 'as' NAME +// _tmp_221: 'as' NAME static void * -_tmp_220_rule(Parser *p) +_tmp_221_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -38687,21 +38772,21 @@ _tmp_220_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_220[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c> _tmp_221[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); Token * _keyword; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 656)) // token='as' + (_keyword = _PyPegen_expect_token(p, 659)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME ) { - D(fprintf(stderr, "%*c+ _tmp_220[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c+ _tmp_221[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); _res = _PyPegen_dummy_name(p, _keyword, name_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_220[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_221[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); } _res = NULL; @@ -38710,9 +38795,9 @@ _tmp_220_rule(Parser *p) return _res; } -// _tmp_221: 'as' NAME +// _tmp_222: 'as' NAME static void * -_tmp_221_rule(Parser *p) +_tmp_222_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -38728,21 +38813,21 @@ _tmp_221_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_221[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c> _tmp_222[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); Token * _keyword; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 656)) // token='as' + (_keyword = _PyPegen_expect_token(p, 659)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME ) { - D(fprintf(stderr, "%*c+ _tmp_221[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c+ _tmp_222[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); _res = _PyPegen_dummy_name(p, _keyword, name_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_221[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_222[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); } _res = NULL; @@ -38751,9 +38836,9 @@ _tmp_221_rule(Parser *p) return _res; } -// _tmp_222: NEWLINE | ':' +// _tmp_223: NEWLINE | ':' static void * -_tmp_222_rule(Parser *p) +_tmp_223_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -38769,18 +38854,18 @@ _tmp_222_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_222[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE")); + D(fprintf(stderr, "%*c> _tmp_223[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE")); Token * newline_var; if ( (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - D(fprintf(stderr, "%*c+ _tmp_222[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE")); + D(fprintf(stderr, "%*c+ _tmp_223[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE")); _res = newline_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_222[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_223[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE")); } { // ':' @@ -38788,18 +38873,18 @@ _tmp_222_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_222[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c> _tmp_223[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - D(fprintf(stderr, "%*c+ _tmp_222[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c+ _tmp_223[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_222[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_223[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); } _res = NULL; @@ -38808,9 +38893,9 @@ _tmp_222_rule(Parser *p) return _res; } -// _tmp_223: 'as' NAME +// _tmp_224: 'as' NAME static void * -_tmp_223_rule(Parser *p) +_tmp_224_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -38826,21 +38911,21 @@ _tmp_223_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_223[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c> _tmp_224[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); Token * _keyword; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 656)) // token='as' + (_keyword = _PyPegen_expect_token(p, 659)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME ) { - D(fprintf(stderr, "%*c+ _tmp_223[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c+ _tmp_224[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); _res = _PyPegen_dummy_name(p, _keyword, name_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_223[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_224[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); } _res = NULL; @@ -38849,9 +38934,9 @@ _tmp_223_rule(Parser *p) return _res; } -// _tmp_224: 'as' NAME +// _tmp_225: 'as' NAME static void * -_tmp_224_rule(Parser *p) +_tmp_225_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -38867,21 +38952,21 @@ _tmp_224_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_224[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c> _tmp_225[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); Token * _keyword; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 656)) // token='as' + (_keyword = _PyPegen_expect_token(p, 659)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME ) { - D(fprintf(stderr, "%*c+ _tmp_224[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c+ _tmp_225[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); _res = _PyPegen_dummy_name(p, _keyword, name_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_224[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_225[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); } _res = NULL; @@ -38890,9 +38975,9 @@ _tmp_224_rule(Parser *p) return _res; } -// _tmp_225: positional_patterns ',' +// _tmp_226: positional_patterns ',' static void * -_tmp_225_rule(Parser *p) +_tmp_226_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -38908,7 +38993,7 @@ _tmp_225_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_225[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "positional_patterns ','")); + D(fprintf(stderr, "%*c> _tmp_226[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "positional_patterns ','")); Token * _literal; asdl_pattern_seq* positional_patterns_var; if ( @@ -38917,12 +39002,12 @@ _tmp_225_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_225[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "positional_patterns ','")); + D(fprintf(stderr, "%*c+ _tmp_226[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "positional_patterns ','")); _res = _PyPegen_dummy_name(p, positional_patterns_var, _literal); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_225[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_226[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "positional_patterns ','")); } _res = NULL; @@ -38931,9 +39016,9 @@ _tmp_225_rule(Parser *p) return _res; } -// _tmp_226: '->' expression +// _tmp_227: '->' expression static void * -_tmp_226_rule(Parser *p) +_tmp_227_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -38949,7 +39034,7 @@ _tmp_226_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_226[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'->' expression")); + D(fprintf(stderr, "%*c> _tmp_227[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'->' expression")); Token * _literal; expr_ty expression_var; if ( @@ -38958,12 +39043,12 @@ _tmp_226_rule(Parser *p) (expression_var = expression_rule(p)) // expression ) { - D(fprintf(stderr, "%*c+ _tmp_226[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'->' expression")); + D(fprintf(stderr, "%*c+ _tmp_227[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'->' expression")); _res = _PyPegen_dummy_name(p, _literal, expression_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_226[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_227[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'->' expression")); } _res = NULL; @@ -38972,9 +39057,9 @@ _tmp_226_rule(Parser *p) return _res; } -// _tmp_227: '(' arguments? ')' +// _tmp_228: '(' arguments? ')' static void * -_tmp_227_rule(Parser *p) +_tmp_228_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -38990,7 +39075,7 @@ _tmp_227_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_227[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); + D(fprintf(stderr, "%*c> _tmp_228[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); Token * _literal; Token * _literal_1; void *_opt_var; @@ -39003,12 +39088,12 @@ _tmp_227_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_227[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); + D(fprintf(stderr, "%*c+ _tmp_228[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); _res = _PyPegen_dummy_name(p, _literal, _opt_var, _literal_1); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_227[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_228[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' arguments? ')'")); } _res = NULL; @@ -39017,9 +39102,9 @@ _tmp_227_rule(Parser *p) return _res; } -// _tmp_228: '(' arguments? ')' +// _tmp_229: '(' arguments? ')' static void * -_tmp_228_rule(Parser *p) +_tmp_229_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -39035,7 +39120,7 @@ _tmp_228_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_228[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); + D(fprintf(stderr, "%*c> _tmp_229[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); Token * _literal; Token * _literal_1; void *_opt_var; @@ -39048,12 +39133,12 @@ _tmp_228_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_228[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); + D(fprintf(stderr, "%*c+ _tmp_229[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); _res = _PyPegen_dummy_name(p, _literal, _opt_var, _literal_1); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_228[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_229[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' arguments? ')'")); } _res = NULL; @@ -39062,9 +39147,9 @@ _tmp_228_rule(Parser *p) return _res; } -// _loop0_230: ',' double_starred_kvpair +// _loop0_231: ',' double_starred_kvpair static asdl_seq * -_loop0_230_rule(Parser *p) +_loop0_231_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -39089,7 +39174,7 @@ _loop0_230_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_230[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' double_starred_kvpair")); + D(fprintf(stderr, "%*c> _loop0_231[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' double_starred_kvpair")); Token * _literal; KeyValuePair* elem; while ( @@ -39121,7 +39206,7 @@ _loop0_230_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_230[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_231[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' double_starred_kvpair")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -39138,9 +39223,9 @@ _loop0_230_rule(Parser *p) return _seq; } -// _gather_229: double_starred_kvpair _loop0_230 +// _gather_230: double_starred_kvpair _loop0_231 static asdl_seq * -_gather_229_rule(Parser *p) +_gather_230_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -39151,27 +39236,27 @@ _gather_229_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // double_starred_kvpair _loop0_230 + { // double_starred_kvpair _loop0_231 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_229[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_230")); + D(fprintf(stderr, "%*c> _gather_230[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_231")); KeyValuePair* elem; asdl_seq * seq; if ( (elem = double_starred_kvpair_rule(p)) // double_starred_kvpair && - (seq = _loop0_230_rule(p)) // _loop0_230 + (seq = _loop0_231_rule(p)) // _loop0_231 ) { - D(fprintf(stderr, "%*c+ _gather_229[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_230")); + D(fprintf(stderr, "%*c+ _gather_230[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_231")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_229[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "double_starred_kvpair _loop0_230")); + D(fprintf(stderr, "%*c%s _gather_230[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "double_starred_kvpair _loop0_231")); } _res = NULL; done: @@ -39179,9 +39264,9 @@ _gather_229_rule(Parser *p) return _res; } -// _tmp_231: '}' | ',' +// _tmp_232: '}' | ',' static void * -_tmp_231_rule(Parser *p) +_tmp_232_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -39197,18 +39282,18 @@ _tmp_231_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_231[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'")); + D(fprintf(stderr, "%*c> _tmp_232[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 26)) // token='}' ) { - D(fprintf(stderr, "%*c+ _tmp_231[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'")); + D(fprintf(stderr, "%*c+ _tmp_232[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_231[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_232[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'}'")); } { // ',' @@ -39216,18 +39301,18 @@ _tmp_231_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_231[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_232[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_231[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_232[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_231[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_232[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } _res = NULL; @@ -39236,9 +39321,9 @@ _tmp_231_rule(Parser *p) return _res; } -// _tmp_232: '}' | ',' +// _tmp_233: '}' | ',' static void * -_tmp_232_rule(Parser *p) +_tmp_233_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -39254,18 +39339,18 @@ _tmp_232_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_232[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'")); + D(fprintf(stderr, "%*c> _tmp_233[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 26)) // token='}' ) { - D(fprintf(stderr, "%*c+ _tmp_232[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'")); + D(fprintf(stderr, "%*c+ _tmp_233[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_232[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_233[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'}'")); } { // ',' @@ -39273,18 +39358,18 @@ _tmp_232_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_232[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c> _tmp_233[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_232[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + D(fprintf(stderr, "%*c+ _tmp_233[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_232[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_233[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); } _res = NULL; @@ -39293,9 +39378,9 @@ _tmp_232_rule(Parser *p) return _res; } -// _tmp_233: yield_expr | star_expressions +// _tmp_234: yield_expr | star_expressions static void * -_tmp_233_rule(Parser *p) +_tmp_234_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -39311,18 +39396,18 @@ _tmp_233_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_233[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); + D(fprintf(stderr, "%*c> _tmp_234[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { - D(fprintf(stderr, "%*c+ _tmp_233[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); + D(fprintf(stderr, "%*c+ _tmp_234[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); _res = yield_expr_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_233[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_234[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); } { // star_expressions @@ -39330,18 +39415,18 @@ _tmp_233_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_233[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); + D(fprintf(stderr, "%*c> _tmp_234[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); expr_ty star_expressions_var; if ( (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { - D(fprintf(stderr, "%*c+ _tmp_233[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); + D(fprintf(stderr, "%*c+ _tmp_234[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); _res = star_expressions_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_233[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_234[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); } _res = NULL; @@ -39350,9 +39435,9 @@ _tmp_233_rule(Parser *p) return _res; } -// _tmp_234: yield_expr | star_expressions +// _tmp_235: yield_expr | star_expressions static void * -_tmp_234_rule(Parser *p) +_tmp_235_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -39368,18 +39453,18 @@ _tmp_234_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_234[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); + D(fprintf(stderr, "%*c> _tmp_235[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { - D(fprintf(stderr, "%*c+ _tmp_234[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); + D(fprintf(stderr, "%*c+ _tmp_235[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); _res = yield_expr_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_234[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_235[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); } { // star_expressions @@ -39387,18 +39472,18 @@ _tmp_234_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_234[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); + D(fprintf(stderr, "%*c> _tmp_235[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); expr_ty star_expressions_var; if ( (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { - D(fprintf(stderr, "%*c+ _tmp_234[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); + D(fprintf(stderr, "%*c+ _tmp_235[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); _res = star_expressions_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_234[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_235[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); } _res = NULL; @@ -39407,9 +39492,9 @@ _tmp_234_rule(Parser *p) return _res; } -// _tmp_235: '=' | '!' | ':' | '}' +// _tmp_236: '=' | '!' | ':' | '}' static void * -_tmp_235_rule(Parser *p) +_tmp_236_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -39425,18 +39510,18 @@ _tmp_235_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_235[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='")); + D(fprintf(stderr, "%*c> _tmp_236[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_235[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='")); + D(fprintf(stderr, "%*c+ _tmp_236[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_235[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_236[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'='")); } { // '!' @@ -39444,18 +39529,18 @@ _tmp_235_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_235[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!'")); + D(fprintf(stderr, "%*c> _tmp_236[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 54)) // token='!' ) { - D(fprintf(stderr, "%*c+ _tmp_235[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!'")); + D(fprintf(stderr, "%*c+ _tmp_236[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_235[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_236[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'!'")); } { // ':' @@ -39463,18 +39548,18 @@ _tmp_235_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_235[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c> _tmp_236[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - D(fprintf(stderr, "%*c+ _tmp_235[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c+ _tmp_236[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_235[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_236[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); } { // '}' @@ -39482,18 +39567,18 @@ _tmp_235_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_235[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'")); + D(fprintf(stderr, "%*c> _tmp_236[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 26)) // token='}' ) { - D(fprintf(stderr, "%*c+ _tmp_235[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'")); + D(fprintf(stderr, "%*c+ _tmp_236[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_235[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_236[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'}'")); } _res = NULL; @@ -39502,9 +39587,9 @@ _tmp_235_rule(Parser *p) return _res; } -// _tmp_236: yield_expr | star_expressions +// _tmp_237: yield_expr | star_expressions static void * -_tmp_236_rule(Parser *p) +_tmp_237_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -39520,18 +39605,18 @@ _tmp_236_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_236[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); + D(fprintf(stderr, "%*c> _tmp_237[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { - D(fprintf(stderr, "%*c+ _tmp_236[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); + D(fprintf(stderr, "%*c+ _tmp_237[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); _res = yield_expr_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_236[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_237[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); } { // star_expressions @@ -39539,18 +39624,18 @@ _tmp_236_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_236[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); + D(fprintf(stderr, "%*c> _tmp_237[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); expr_ty star_expressions_var; if ( (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { - D(fprintf(stderr, "%*c+ _tmp_236[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); + D(fprintf(stderr, "%*c+ _tmp_237[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); _res = star_expressions_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_236[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_237[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); } _res = NULL; @@ -39559,9 +39644,9 @@ _tmp_236_rule(Parser *p) return _res; } -// _tmp_237: '!' | ':' | '}' +// _tmp_238: '!' | ':' | '}' static void * -_tmp_237_rule(Parser *p) +_tmp_238_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -39577,18 +39662,18 @@ _tmp_237_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_237[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!'")); + D(fprintf(stderr, "%*c> _tmp_238[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 54)) // token='!' ) { - D(fprintf(stderr, "%*c+ _tmp_237[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!'")); + D(fprintf(stderr, "%*c+ _tmp_238[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_237[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_238[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'!'")); } { // ':' @@ -39596,18 +39681,18 @@ _tmp_237_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_237[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c> _tmp_238[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - D(fprintf(stderr, "%*c+ _tmp_237[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c+ _tmp_238[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_237[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_238[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); } { // '}' @@ -39615,18 +39700,18 @@ _tmp_237_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_237[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'")); + D(fprintf(stderr, "%*c> _tmp_238[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 26)) // token='}' ) { - D(fprintf(stderr, "%*c+ _tmp_237[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'")); + D(fprintf(stderr, "%*c+ _tmp_238[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_237[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_238[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'}'")); } _res = NULL; @@ -39635,9 +39720,9 @@ _tmp_237_rule(Parser *p) return _res; } -// _tmp_238: yield_expr | star_expressions +// _tmp_239: yield_expr | star_expressions static void * -_tmp_238_rule(Parser *p) +_tmp_239_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -39653,18 +39738,18 @@ _tmp_238_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_238[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); + D(fprintf(stderr, "%*c> _tmp_239[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { - D(fprintf(stderr, "%*c+ _tmp_238[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); + D(fprintf(stderr, "%*c+ _tmp_239[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); _res = yield_expr_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_238[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_239[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); } { // star_expressions @@ -39672,18 +39757,18 @@ _tmp_238_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_238[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); + D(fprintf(stderr, "%*c> _tmp_239[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); expr_ty star_expressions_var; if ( (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { - D(fprintf(stderr, "%*c+ _tmp_238[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); + D(fprintf(stderr, "%*c+ _tmp_239[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); _res = star_expressions_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_238[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_239[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); } _res = NULL; @@ -39692,9 +39777,9 @@ _tmp_238_rule(Parser *p) return _res; } -// _tmp_239: yield_expr | star_expressions +// _tmp_240: yield_expr | star_expressions static void * -_tmp_239_rule(Parser *p) +_tmp_240_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -39710,18 +39795,18 @@ _tmp_239_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_239[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); + D(fprintf(stderr, "%*c> _tmp_240[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { - D(fprintf(stderr, "%*c+ _tmp_239[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); + D(fprintf(stderr, "%*c+ _tmp_240[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); _res = yield_expr_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_239[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_240[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); } { // star_expressions @@ -39729,18 +39814,18 @@ _tmp_239_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_239[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); + D(fprintf(stderr, "%*c> _tmp_240[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); expr_ty star_expressions_var; if ( (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { - D(fprintf(stderr, "%*c+ _tmp_239[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); + D(fprintf(stderr, "%*c+ _tmp_240[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); _res = star_expressions_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_239[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_240[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); } _res = NULL; @@ -39749,9 +39834,9 @@ _tmp_239_rule(Parser *p) return _res; } -// _tmp_240: '!' NAME +// _tmp_241: '!' NAME static void * -_tmp_240_rule(Parser *p) +_tmp_241_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -39767,7 +39852,7 @@ _tmp_240_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_240[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!' NAME")); + D(fprintf(stderr, "%*c> _tmp_241[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!' NAME")); Token * _literal; expr_ty name_var; if ( @@ -39776,12 +39861,12 @@ _tmp_240_rule(Parser *p) (name_var = _PyPegen_name_token(p)) // NAME ) { - D(fprintf(stderr, "%*c+ _tmp_240[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!' NAME")); + D(fprintf(stderr, "%*c+ _tmp_241[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!' NAME")); _res = _PyPegen_dummy_name(p, _literal, name_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_240[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_241[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'!' NAME")); } _res = NULL; @@ -39790,9 +39875,9 @@ _tmp_240_rule(Parser *p) return _res; } -// _tmp_241: ':' | '}' +// _tmp_242: ':' | '}' static void * -_tmp_241_rule(Parser *p) +_tmp_242_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -39808,18 +39893,18 @@ _tmp_241_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_241[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c> _tmp_242[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - D(fprintf(stderr, "%*c+ _tmp_241[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c+ _tmp_242[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_241[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_242[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); } { // '}' @@ -39827,18 +39912,18 @@ _tmp_241_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_241[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'")); + D(fprintf(stderr, "%*c> _tmp_242[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 26)) // token='}' ) { - D(fprintf(stderr, "%*c+ _tmp_241[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'")); + D(fprintf(stderr, "%*c+ _tmp_242[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_241[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_242[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'}'")); } _res = NULL; @@ -39847,9 +39932,9 @@ _tmp_241_rule(Parser *p) return _res; } -// _tmp_242: yield_expr | star_expressions +// _tmp_243: yield_expr | star_expressions static void * -_tmp_242_rule(Parser *p) +_tmp_243_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -39865,18 +39950,18 @@ _tmp_242_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_242[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); + D(fprintf(stderr, "%*c> _tmp_243[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { - D(fprintf(stderr, "%*c+ _tmp_242[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); + D(fprintf(stderr, "%*c+ _tmp_243[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); _res = yield_expr_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_242[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_243[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); } { // star_expressions @@ -39884,18 +39969,18 @@ _tmp_242_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_242[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); + D(fprintf(stderr, "%*c> _tmp_243[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); expr_ty star_expressions_var; if ( (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { - D(fprintf(stderr, "%*c+ _tmp_242[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); + D(fprintf(stderr, "%*c+ _tmp_243[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); _res = star_expressions_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_242[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_243[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); } _res = NULL; @@ -39904,9 +39989,9 @@ _tmp_242_rule(Parser *p) return _res; } -// _tmp_243: '!' NAME +// _tmp_244: '!' NAME static void * -_tmp_243_rule(Parser *p) +_tmp_244_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -39922,7 +40007,7 @@ _tmp_243_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_243[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!' NAME")); + D(fprintf(stderr, "%*c> _tmp_244[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!' NAME")); Token * _literal; expr_ty name_var; if ( @@ -39931,12 +40016,12 @@ _tmp_243_rule(Parser *p) (name_var = _PyPegen_name_token(p)) // NAME ) { - D(fprintf(stderr, "%*c+ _tmp_243[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!' NAME")); + D(fprintf(stderr, "%*c+ _tmp_244[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!' NAME")); _res = _PyPegen_dummy_name(p, _literal, name_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_243[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_244[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'!' NAME")); } _res = NULL; @@ -39945,9 +40030,9 @@ _tmp_243_rule(Parser *p) return _res; } -// _loop0_244: fstring_format_spec +// _loop0_245: fstring_format_spec static asdl_seq * -_loop0_244_rule(Parser *p) +_loop0_245_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -39972,7 +40057,7 @@ _loop0_244_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_244[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "fstring_format_spec")); + D(fprintf(stderr, "%*c> _loop0_245[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "fstring_format_spec")); expr_ty fstring_format_spec_var; while ( (fstring_format_spec_var = fstring_format_spec_rule(p)) // fstring_format_spec @@ -39995,7 +40080,7 @@ _loop0_244_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_244[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_245[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "fstring_format_spec")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -40012,9 +40097,9 @@ _loop0_244_rule(Parser *p) return _seq; } -// _tmp_245: yield_expr | star_expressions +// _tmp_246: yield_expr | star_expressions static void * -_tmp_245_rule(Parser *p) +_tmp_246_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -40030,18 +40115,18 @@ _tmp_245_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_245[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); + D(fprintf(stderr, "%*c> _tmp_246[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { - D(fprintf(stderr, "%*c+ _tmp_245[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); + D(fprintf(stderr, "%*c+ _tmp_246[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); _res = yield_expr_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_245[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_246[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); } { // star_expressions @@ -40049,18 +40134,18 @@ _tmp_245_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_245[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); + D(fprintf(stderr, "%*c> _tmp_246[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); expr_ty star_expressions_var; if ( (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { - D(fprintf(stderr, "%*c+ _tmp_245[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); + D(fprintf(stderr, "%*c+ _tmp_246[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); _res = star_expressions_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_245[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_246[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); } _res = NULL; @@ -40069,9 +40154,9 @@ _tmp_245_rule(Parser *p) return _res; } -// _tmp_246: '!' NAME +// _tmp_247: '!' NAME static void * -_tmp_246_rule(Parser *p) +_tmp_247_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -40087,7 +40172,7 @@ _tmp_246_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_246[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!' NAME")); + D(fprintf(stderr, "%*c> _tmp_247[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!' NAME")); Token * _literal; expr_ty name_var; if ( @@ -40096,12 +40181,12 @@ _tmp_246_rule(Parser *p) (name_var = _PyPegen_name_token(p)) // NAME ) { - D(fprintf(stderr, "%*c+ _tmp_246[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!' NAME")); + D(fprintf(stderr, "%*c+ _tmp_247[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!' NAME")); _res = _PyPegen_dummy_name(p, _literal, name_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_246[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_247[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'!' NAME")); } _res = NULL; @@ -40110,9 +40195,9 @@ _tmp_246_rule(Parser *p) return _res; } -// _tmp_247: ':' | '}' +// _tmp_248: ':' | '}' static void * -_tmp_247_rule(Parser *p) +_tmp_248_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -40128,18 +40213,18 @@ _tmp_247_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_247[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c> _tmp_248[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - D(fprintf(stderr, "%*c+ _tmp_247[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c+ _tmp_248[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_247[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_248[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); } { // '}' @@ -40147,18 +40232,18 @@ _tmp_247_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_247[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'")); + D(fprintf(stderr, "%*c> _tmp_248[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 26)) // token='}' ) { - D(fprintf(stderr, "%*c+ _tmp_247[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'")); + D(fprintf(stderr, "%*c+ _tmp_248[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_247[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_248[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'}'")); } _res = NULL; @@ -40167,9 +40252,9 @@ _tmp_247_rule(Parser *p) return _res; } -// _tmp_248: star_targets '=' +// _tmp_249: star_targets '=' static void * -_tmp_248_rule(Parser *p) +_tmp_249_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -40185,7 +40270,7 @@ _tmp_248_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_248[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c> _tmp_249[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); Token * _literal; expr_ty z; if ( @@ -40194,7 +40279,7 @@ _tmp_248_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_248[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c+ _tmp_249[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -40204,7 +40289,7 @@ _tmp_248_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_248[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_249[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='")); } _res = NULL; @@ -40213,9 +40298,9 @@ _tmp_248_rule(Parser *p) return _res; } -// _tmp_249: '.' | '...' +// _tmp_250: '.' | '...' static void * -_tmp_249_rule(Parser *p) +_tmp_250_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -40231,18 +40316,18 @@ _tmp_249_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_249[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); + D(fprintf(stderr, "%*c> _tmp_250[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 23)) // token='.' ) { - D(fprintf(stderr, "%*c+ _tmp_249[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); + D(fprintf(stderr, "%*c+ _tmp_250[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_249[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_250[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'")); } { // '...' @@ -40250,18 +40335,18 @@ _tmp_249_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_249[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); + D(fprintf(stderr, "%*c> _tmp_250[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 52)) // token='...' ) { - D(fprintf(stderr, "%*c+ _tmp_249[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); + D(fprintf(stderr, "%*c+ _tmp_250[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_249[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_250[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'...'")); } _res = NULL; @@ -40270,9 +40355,9 @@ _tmp_249_rule(Parser *p) return _res; } -// _tmp_250: '.' | '...' +// _tmp_251: '.' | '...' static void * -_tmp_250_rule(Parser *p) +_tmp_251_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -40288,18 +40373,18 @@ _tmp_250_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_250[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); + D(fprintf(stderr, "%*c> _tmp_251[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 23)) // token='.' ) { - D(fprintf(stderr, "%*c+ _tmp_250[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); + D(fprintf(stderr, "%*c+ _tmp_251[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_250[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_251[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'")); } { // '...' @@ -40307,18 +40392,18 @@ _tmp_250_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_250[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); + D(fprintf(stderr, "%*c> _tmp_251[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 52)) // token='...' ) { - D(fprintf(stderr, "%*c+ _tmp_250[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); + D(fprintf(stderr, "%*c+ _tmp_251[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_250[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_251[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'...'")); } _res = NULL; @@ -40327,9 +40412,9 @@ _tmp_250_rule(Parser *p) return _res; } -// _tmp_251: '@' named_expression NEWLINE +// _tmp_252: '@' named_expression NEWLINE static void * -_tmp_251_rule(Parser *p) +_tmp_252_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -40345,7 +40430,7 @@ _tmp_251_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_251[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); + D(fprintf(stderr, "%*c> _tmp_252[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); Token * _literal; expr_ty f; Token * newline_var; @@ -40357,7 +40442,7 @@ _tmp_251_rule(Parser *p) (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - D(fprintf(stderr, "%*c+ _tmp_251[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); + D(fprintf(stderr, "%*c+ _tmp_252[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); _res = f; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -40367,7 +40452,7 @@ _tmp_251_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_251[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_252[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'@' named_expression NEWLINE")); } _res = NULL; @@ -40376,9 +40461,9 @@ _tmp_251_rule(Parser *p) return _res; } -// _tmp_252: ',' expression +// _tmp_253: ',' expression static void * -_tmp_252_rule(Parser *p) +_tmp_253_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -40394,7 +40479,7 @@ _tmp_252_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_252[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); + D(fprintf(stderr, "%*c> _tmp_253[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); Token * _literal; expr_ty c; if ( @@ -40403,7 +40488,7 @@ _tmp_252_rule(Parser *p) (c = expression_rule(p)) // expression ) { - D(fprintf(stderr, "%*c+ _tmp_252[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' expression")); + D(fprintf(stderr, "%*c+ _tmp_253[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' expression")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -40413,7 +40498,7 @@ _tmp_252_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_252[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_253[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression")); } _res = NULL; @@ -40422,9 +40507,9 @@ _tmp_252_rule(Parser *p) return _res; } -// _tmp_253: ',' star_expression +// _tmp_254: ',' star_expression static void * -_tmp_253_rule(Parser *p) +_tmp_254_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -40440,7 +40525,7 @@ _tmp_253_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_253[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_expression")); + D(fprintf(stderr, "%*c> _tmp_254[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_expression")); Token * _literal; expr_ty c; if ( @@ -40449,7 +40534,7 @@ _tmp_253_rule(Parser *p) (c = star_expression_rule(p)) // star_expression ) { - D(fprintf(stderr, "%*c+ _tmp_253[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_expression")); + D(fprintf(stderr, "%*c+ _tmp_254[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_expression")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -40459,7 +40544,7 @@ _tmp_253_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_253[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_254[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_expression")); } _res = NULL; @@ -40468,9 +40553,9 @@ _tmp_253_rule(Parser *p) return _res; } -// _tmp_254: 'or' conjunction +// _tmp_255: 'or' conjunction static void * -_tmp_254_rule(Parser *p) +_tmp_255_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -40486,7 +40571,7 @@ _tmp_254_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_254[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); + D(fprintf(stderr, "%*c> _tmp_255[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); Token * _keyword; expr_ty c; if ( @@ -40495,7 +40580,7 @@ _tmp_254_rule(Parser *p) (c = conjunction_rule(p)) // conjunction ) { - D(fprintf(stderr, "%*c+ _tmp_254[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); + D(fprintf(stderr, "%*c+ _tmp_255[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -40505,7 +40590,7 @@ _tmp_254_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_254[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_255[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'or' conjunction")); } _res = NULL; @@ -40514,9 +40599,9 @@ _tmp_254_rule(Parser *p) return _res; } -// _tmp_255: 'and' inversion +// _tmp_256: 'and' inversion static void * -_tmp_255_rule(Parser *p) +_tmp_256_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -40532,7 +40617,7 @@ _tmp_255_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_255[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'and' inversion")); + D(fprintf(stderr, "%*c> _tmp_256[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'and' inversion")); Token * _keyword; expr_ty c; if ( @@ -40541,7 +40626,7 @@ _tmp_255_rule(Parser *p) (c = inversion_rule(p)) // inversion ) { - D(fprintf(stderr, "%*c+ _tmp_255[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'and' inversion")); + D(fprintf(stderr, "%*c+ _tmp_256[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'and' inversion")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -40551,7 +40636,7 @@ _tmp_255_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_255[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_256[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'and' inversion")); } _res = NULL; @@ -40560,9 +40645,9 @@ _tmp_255_rule(Parser *p) return _res; } -// _tmp_256: slice | starred_expression +// _tmp_257: slice | starred_expression static void * -_tmp_256_rule(Parser *p) +_tmp_257_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -40578,18 +40663,18 @@ _tmp_256_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_256[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slice")); + D(fprintf(stderr, "%*c> _tmp_257[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slice")); expr_ty slice_var; if ( (slice_var = slice_rule(p)) // slice ) { - D(fprintf(stderr, "%*c+ _tmp_256[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slice")); + D(fprintf(stderr, "%*c+ _tmp_257[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slice")); _res = slice_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_256[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_257[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slice")); } { // starred_expression @@ -40597,18 +40682,18 @@ _tmp_256_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_256[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression")); + D(fprintf(stderr, "%*c> _tmp_257[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression")); expr_ty starred_expression_var; if ( (starred_expression_var = starred_expression_rule(p)) // starred_expression ) { - D(fprintf(stderr, "%*c+ _tmp_256[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression")); + D(fprintf(stderr, "%*c+ _tmp_257[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression")); _res = starred_expression_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_256[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_257[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "starred_expression")); } _res = NULL; @@ -40617,9 +40702,9 @@ _tmp_256_rule(Parser *p) return _res; } -// _tmp_257: fstring | string +// _tmp_258: fstring | string static void * -_tmp_257_rule(Parser *p) +_tmp_258_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -40635,18 +40720,18 @@ _tmp_257_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_257[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "fstring")); + D(fprintf(stderr, "%*c> _tmp_258[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "fstring")); expr_ty fstring_var; if ( (fstring_var = fstring_rule(p)) // fstring ) { - D(fprintf(stderr, "%*c+ _tmp_257[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "fstring")); + D(fprintf(stderr, "%*c+ _tmp_258[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "fstring")); _res = fstring_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_257[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_258[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "fstring")); } { // string @@ -40654,18 +40739,18 @@ _tmp_257_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_257[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "string")); + D(fprintf(stderr, "%*c> _tmp_258[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "string")); expr_ty string_var; if ( (string_var = string_rule(p)) // string ) { - D(fprintf(stderr, "%*c+ _tmp_257[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "string")); + D(fprintf(stderr, "%*c+ _tmp_258[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "string")); _res = string_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_257[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_258[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "string")); } _res = NULL; @@ -40674,9 +40759,9 @@ _tmp_257_rule(Parser *p) return _res; } -// _tmp_258: 'if' disjunction +// _tmp_259: 'if' disjunction static void * -_tmp_258_rule(Parser *p) +_tmp_259_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -40692,16 +40777,16 @@ _tmp_258_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_258[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + D(fprintf(stderr, "%*c> _tmp_259[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 658)) // token='if' + (_keyword = _PyPegen_expect_token(p, 661)) // token='if' && (z = disjunction_rule(p)) // disjunction ) { - D(fprintf(stderr, "%*c+ _tmp_258[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + D(fprintf(stderr, "%*c+ _tmp_259[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -40711,7 +40796,7 @@ _tmp_258_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_258[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_259[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'if' disjunction")); } _res = NULL; @@ -40720,9 +40805,9 @@ _tmp_258_rule(Parser *p) return _res; } -// _tmp_259: 'if' disjunction +// _tmp_260: 'if' disjunction static void * -_tmp_259_rule(Parser *p) +_tmp_260_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -40738,16 +40823,16 @@ _tmp_259_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_259[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + D(fprintf(stderr, "%*c> _tmp_260[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 658)) // token='if' + (_keyword = _PyPegen_expect_token(p, 661)) // token='if' && (z = disjunction_rule(p)) // disjunction ) { - D(fprintf(stderr, "%*c+ _tmp_259[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + D(fprintf(stderr, "%*c+ _tmp_260[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -40757,7 +40842,7 @@ _tmp_259_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_259[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_260[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'if' disjunction")); } _res = NULL; @@ -40766,9 +40851,76 @@ _tmp_259_rule(Parser *p) return _res; } -// _tmp_260: starred_expression | (assignment_expression | expression !':=') !'=' +// _loop0_261: (',' bitwise_or) +static asdl_seq * +_loop0_261_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + _Pypegen_stack_overflow(p); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + p->level--; + return NULL; + } + Py_ssize_t _children_capacity = 1; + Py_ssize_t _n = 0; + { // (',' bitwise_or) + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_261[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' bitwise_or)")); + void *_tmp_275_var; + while ( + (_tmp_275_var = _tmp_275_rule(p)) // ',' bitwise_or + ) + { + _res = _tmp_275_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + p->level--; + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_261[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(',' bitwise_or)")); + } + asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + p->level--; + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]); + PyMem_Free(_children); + p->level--; + return _seq; +} + +// _tmp_262: starred_expression | (assignment_expression | expression !':=') !'=' static void * -_tmp_260_rule(Parser *p) +_tmp_262_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -40784,18 +40936,18 @@ _tmp_260_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_260[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression")); + D(fprintf(stderr, "%*c> _tmp_262[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression")); expr_ty starred_expression_var; if ( (starred_expression_var = starred_expression_rule(p)) // starred_expression ) { - D(fprintf(stderr, "%*c+ _tmp_260[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression")); + D(fprintf(stderr, "%*c+ _tmp_262[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression")); _res = starred_expression_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_260[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_262[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "starred_expression")); } { // (assignment_expression | expression !':=') !'=' @@ -40803,20 +40955,20 @@ _tmp_260_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_260[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='")); - void *_tmp_273_var; + D(fprintf(stderr, "%*c> _tmp_262[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='")); + void *_tmp_276_var; if ( - (_tmp_273_var = _tmp_273_rule(p)) // assignment_expression | expression !':=' + (_tmp_276_var = _tmp_276_rule(p)) // assignment_expression | expression !':=' && _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 22) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_260[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='")); - _res = _tmp_273_var; + D(fprintf(stderr, "%*c+ _tmp_262[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='")); + _res = _tmp_276_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_260[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_262[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(assignment_expression | expression !':=') !'='")); } _res = NULL; @@ -40825,9 +40977,9 @@ _tmp_260_rule(Parser *p) return _res; } -// _tmp_261: ',' star_target +// _tmp_263: ',' star_target static void * -_tmp_261_rule(Parser *p) +_tmp_263_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -40843,7 +40995,7 @@ _tmp_261_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_261[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); + D(fprintf(stderr, "%*c> _tmp_263[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); Token * _literal; expr_ty c; if ( @@ -40852,7 +41004,7 @@ _tmp_261_rule(Parser *p) (c = star_target_rule(p)) // star_target ) { - D(fprintf(stderr, "%*c+ _tmp_261[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target")); + D(fprintf(stderr, "%*c+ _tmp_263[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -40862,7 +41014,7 @@ _tmp_261_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_261[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_263[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_target")); } _res = NULL; @@ -40871,9 +41023,9 @@ _tmp_261_rule(Parser *p) return _res; } -// _tmp_262: ',' star_target +// _tmp_264: ',' star_target static void * -_tmp_262_rule(Parser *p) +_tmp_264_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -40889,7 +41041,7 @@ _tmp_262_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_262[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); + D(fprintf(stderr, "%*c> _tmp_264[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); Token * _literal; expr_ty c; if ( @@ -40898,7 +41050,7 @@ _tmp_262_rule(Parser *p) (c = star_target_rule(p)) // star_target ) { - D(fprintf(stderr, "%*c+ _tmp_262[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target")); + D(fprintf(stderr, "%*c+ _tmp_264[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -40908,7 +41060,7 @@ _tmp_262_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_262[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_264[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_target")); } _res = NULL; @@ -40917,10 +41069,10 @@ _tmp_262_rule(Parser *p) return _res; } -// _tmp_263: +// _tmp_265: // | ','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs static void * -_tmp_263_rule(Parser *p) +_tmp_265_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -40936,24 +41088,24 @@ _tmp_263_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_263[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs")); - asdl_seq * _gather_274_var; + D(fprintf(stderr, "%*c> _tmp_265[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs")); + asdl_seq * _gather_277_var; Token * _literal; asdl_seq* kwargs_var; if ( - (_gather_274_var = _gather_274_rule(p)) // ','.(starred_expression | (assignment_expression | expression !':=') !'=')+ + (_gather_277_var = _gather_277_rule(p)) // ','.(starred_expression | (assignment_expression | expression !':=') !'=')+ && (_literal = _PyPegen_expect_token(p, 12)) // token=',' && (kwargs_var = kwargs_rule(p)) // kwargs ) { - D(fprintf(stderr, "%*c+ _tmp_263[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs")); - _res = _PyPegen_dummy_name(p, _gather_274_var, _literal, kwargs_var); + D(fprintf(stderr, "%*c+ _tmp_265[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs")); + _res = _PyPegen_dummy_name(p, _gather_277_var, _literal, kwargs_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_263[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_265[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs")); } _res = NULL; @@ -40962,9 +41114,9 @@ _tmp_263_rule(Parser *p) return _res; } -// _tmp_264: star_targets '=' +// _tmp_266: star_targets '=' static void * -_tmp_264_rule(Parser *p) +_tmp_266_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -40980,7 +41132,7 @@ _tmp_264_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_264[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c> _tmp_266[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); Token * _literal; expr_ty star_targets_var; if ( @@ -40989,12 +41141,12 @@ _tmp_264_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_264[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c+ _tmp_266[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); _res = _PyPegen_dummy_name(p, star_targets_var, _literal); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_264[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_266[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='")); } _res = NULL; @@ -41003,9 +41155,9 @@ _tmp_264_rule(Parser *p) return _res; } -// _tmp_265: star_targets '=' +// _tmp_267: star_targets '=' static void * -_tmp_265_rule(Parser *p) +_tmp_267_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41021,7 +41173,7 @@ _tmp_265_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_265[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c> _tmp_267[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); Token * _literal; expr_ty star_targets_var; if ( @@ -41030,12 +41182,12 @@ _tmp_265_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_265[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c+ _tmp_267[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); _res = _PyPegen_dummy_name(p, star_targets_var, _literal); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_265[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_267[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='")); } _res = NULL; @@ -41044,9 +41196,9 @@ _tmp_265_rule(Parser *p) return _res; } -// _tmp_266: ')' | '**' +// _tmp_268: ')' | '**' static void * -_tmp_266_rule(Parser *p) +_tmp_268_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41062,18 +41214,18 @@ _tmp_266_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_266[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c> _tmp_268[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_266[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c+ _tmp_268[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_266[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_268[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); } { // '**' @@ -41081,18 +41233,18 @@ _tmp_266_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_266[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c> _tmp_268[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 35)) // token='**' ) { - D(fprintf(stderr, "%*c+ _tmp_266[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c+ _tmp_268[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_266[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_268[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'")); } _res = NULL; @@ -41101,9 +41253,9 @@ _tmp_266_rule(Parser *p) return _res; } -// _tmp_267: ':' | '**' +// _tmp_269: ':' | '**' static void * -_tmp_267_rule(Parser *p) +_tmp_269_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41119,18 +41271,18 @@ _tmp_267_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_267[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c> _tmp_269[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - D(fprintf(stderr, "%*c+ _tmp_267[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c+ _tmp_269[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_267[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_269[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); } { // '**' @@ -41138,18 +41290,18 @@ _tmp_267_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_267[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c> _tmp_269[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 35)) // token='**' ) { - D(fprintf(stderr, "%*c+ _tmp_267[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c+ _tmp_269[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_267[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_269[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'")); } _res = NULL; @@ -41158,9 +41310,9 @@ _tmp_267_rule(Parser *p) return _res; } -// _tmp_268: expression ['as' star_target] +// _tmp_270: expression ['as' star_target] static void * -_tmp_268_rule(Parser *p) +_tmp_270_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41176,22 +41328,22 @@ _tmp_268_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_268[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); + D(fprintf(stderr, "%*c> _tmp_270[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings expr_ty expression_var; if ( (expression_var = expression_rule(p)) // expression && - (_opt_var = _tmp_276_rule(p), !p->error_indicator) // ['as' star_target] + (_opt_var = _tmp_279_rule(p), !p->error_indicator) // ['as' star_target] ) { - D(fprintf(stderr, "%*c+ _tmp_268[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); + D(fprintf(stderr, "%*c+ _tmp_270[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); _res = _PyPegen_dummy_name(p, expression_var, _opt_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_268[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_270[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ['as' star_target]")); } _res = NULL; @@ -41200,9 +41352,9 @@ _tmp_268_rule(Parser *p) return _res; } -// _tmp_269: expressions ['as' star_target] +// _tmp_271: expressions ['as' star_target] static void * -_tmp_269_rule(Parser *p) +_tmp_271_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41218,22 +41370,22 @@ _tmp_269_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_269[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); + D(fprintf(stderr, "%*c> _tmp_271[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings expr_ty expressions_var; if ( (expressions_var = expressions_rule(p)) // expressions && - (_opt_var = _tmp_277_rule(p), !p->error_indicator) // ['as' star_target] + (_opt_var = _tmp_280_rule(p), !p->error_indicator) // ['as' star_target] ) { - D(fprintf(stderr, "%*c+ _tmp_269[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); + D(fprintf(stderr, "%*c+ _tmp_271[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); _res = _PyPegen_dummy_name(p, expressions_var, _opt_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_269[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_271[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expressions ['as' star_target]")); } _res = NULL; @@ -41242,9 +41394,9 @@ _tmp_269_rule(Parser *p) return _res; } -// _tmp_270: expression ['as' star_target] +// _tmp_272: expression ['as' star_target] static void * -_tmp_270_rule(Parser *p) +_tmp_272_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41260,22 +41412,22 @@ _tmp_270_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_270[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); + D(fprintf(stderr, "%*c> _tmp_272[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings expr_ty expression_var; if ( (expression_var = expression_rule(p)) // expression && - (_opt_var = _tmp_278_rule(p), !p->error_indicator) // ['as' star_target] + (_opt_var = _tmp_281_rule(p), !p->error_indicator) // ['as' star_target] ) { - D(fprintf(stderr, "%*c+ _tmp_270[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); + D(fprintf(stderr, "%*c+ _tmp_272[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]")); _res = _PyPegen_dummy_name(p, expression_var, _opt_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_270[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_272[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ['as' star_target]")); } _res = NULL; @@ -41284,9 +41436,9 @@ _tmp_270_rule(Parser *p) return _res; } -// _tmp_271: expressions ['as' star_target] +// _tmp_273: expressions ['as' star_target] static void * -_tmp_271_rule(Parser *p) +_tmp_273_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41302,22 +41454,22 @@ _tmp_271_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_271[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); + D(fprintf(stderr, "%*c> _tmp_273[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings expr_ty expressions_var; if ( (expressions_var = expressions_rule(p)) // expressions && - (_opt_var = _tmp_279_rule(p), !p->error_indicator) // ['as' star_target] + (_opt_var = _tmp_282_rule(p), !p->error_indicator) // ['as' star_target] ) { - D(fprintf(stderr, "%*c+ _tmp_271[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); + D(fprintf(stderr, "%*c+ _tmp_273[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]")); _res = _PyPegen_dummy_name(p, expressions_var, _opt_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_271[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_273[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expressions ['as' star_target]")); } _res = NULL; @@ -41326,9 +41478,9 @@ _tmp_271_rule(Parser *p) return _res; } -// _tmp_272: 'as' NAME +// _tmp_274: 'as' NAME static void * -_tmp_272_rule(Parser *p) +_tmp_274_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41344,21 +41496,21 @@ _tmp_272_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_272[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c> _tmp_274[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); Token * _keyword; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 656)) // token='as' + (_keyword = _PyPegen_expect_token(p, 659)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME ) { - D(fprintf(stderr, "%*c+ _tmp_272[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c+ _tmp_274[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); _res = _PyPegen_dummy_name(p, _keyword, name_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_272[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_274[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); } _res = NULL; @@ -41367,9 +41519,50 @@ _tmp_272_rule(Parser *p) return _res; } -// _tmp_273: assignment_expression | expression !':=' +// _tmp_275: ',' bitwise_or static void * -_tmp_273_rule(Parser *p) +_tmp_275_rule(Parser *p) +{ + if (p->level++ == MAXSTACK) { + _Pypegen_stack_overflow(p); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // ',' bitwise_or + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_275[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' bitwise_or")); + Token * _literal; + expr_ty bitwise_or_var; + if ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (bitwise_or_var = bitwise_or_rule(p)) // bitwise_or + ) + { + D(fprintf(stderr, "%*c+ _tmp_275[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' bitwise_or")); + _res = _PyPegen_dummy_name(p, _literal, bitwise_or_var); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_275[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' bitwise_or")); + } + _res = NULL; + done: + p->level--; + return _res; +} + +// _tmp_276: assignment_expression | expression !':=' +static void * +_tmp_276_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41385,18 +41578,18 @@ _tmp_273_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_273[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "assignment_expression")); + D(fprintf(stderr, "%*c> _tmp_276[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "assignment_expression")); expr_ty assignment_expression_var; if ( (assignment_expression_var = assignment_expression_rule(p)) // assignment_expression ) { - D(fprintf(stderr, "%*c+ _tmp_273[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "assignment_expression")); + D(fprintf(stderr, "%*c+ _tmp_276[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "assignment_expression")); _res = assignment_expression_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_273[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_276[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "assignment_expression")); } { // expression !':=' @@ -41404,7 +41597,7 @@ _tmp_273_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_273[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression !':='")); + D(fprintf(stderr, "%*c> _tmp_276[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression !':='")); expr_ty expression_var; if ( (expression_var = expression_rule(p)) // expression @@ -41412,12 +41605,12 @@ _tmp_273_rule(Parser *p) _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 53) // token=':=' ) { - D(fprintf(stderr, "%*c+ _tmp_273[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression !':='")); + D(fprintf(stderr, "%*c+ _tmp_276[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression !':='")); _res = expression_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_273[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_276[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression !':='")); } _res = NULL; @@ -41426,9 +41619,9 @@ _tmp_273_rule(Parser *p) return _res; } -// _loop0_275: ',' (starred_expression | (assignment_expression | expression !':=') !'=') +// _loop0_278: ',' (starred_expression | (assignment_expression | expression !':=') !'=') static asdl_seq * -_loop0_275_rule(Parser *p) +_loop0_278_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41453,13 +41646,13 @@ _loop0_275_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _loop0_275[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (starred_expression | (assignment_expression | expression !':=') !'=')")); + D(fprintf(stderr, "%*c> _loop0_278[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (starred_expression | (assignment_expression | expression !':=') !'=')")); Token * _literal; void *elem; while ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _tmp_280_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'=' + (elem = _tmp_283_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'=' ) { _res = elem; @@ -41485,7 +41678,7 @@ _loop0_275_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_275[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_278[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (starred_expression | (assignment_expression | expression !':=') !'=')")); } asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena); @@ -41502,10 +41695,10 @@ _loop0_275_rule(Parser *p) return _seq; } -// _gather_274: -// | (starred_expression | (assignment_expression | expression !':=') !'=') _loop0_275 +// _gather_277: +// | (starred_expression | (assignment_expression | expression !':=') !'=') _loop0_278 static asdl_seq * -_gather_274_rule(Parser *p) +_gather_277_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41516,27 +41709,27 @@ _gather_274_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // (starred_expression | (assignment_expression | expression !':=') !'=') _loop0_275 + { // (starred_expression | (assignment_expression | expression !':=') !'=') _loop0_278 if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _gather_274[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_275")); + D(fprintf(stderr, "%*c> _gather_277[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_278")); void *elem; asdl_seq * seq; if ( - (elem = _tmp_280_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'=' + (elem = _tmp_283_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'=' && - (seq = _loop0_275_rule(p)) // _loop0_275 + (seq = _loop0_278_rule(p)) // _loop0_278 ) { - D(fprintf(stderr, "%*c+ _gather_274[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_275")); + D(fprintf(stderr, "%*c+ _gather_277[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_278")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_274[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_275")); + D(fprintf(stderr, "%*c%s _gather_277[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_278")); } _res = NULL; done: @@ -41544,9 +41737,9 @@ _gather_274_rule(Parser *p) return _res; } -// _tmp_276: 'as' star_target +// _tmp_279: 'as' star_target static void * -_tmp_276_rule(Parser *p) +_tmp_279_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41562,21 +41755,21 @@ _tmp_276_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_276[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c> _tmp_279[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); Token * _keyword; expr_ty star_target_var; if ( - (_keyword = _PyPegen_expect_token(p, 656)) // token='as' + (_keyword = _PyPegen_expect_token(p, 659)) // token='as' && (star_target_var = star_target_rule(p)) // star_target ) { - D(fprintf(stderr, "%*c+ _tmp_276[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c+ _tmp_279[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); _res = _PyPegen_dummy_name(p, _keyword, star_target_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_276[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_279[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' star_target")); } _res = NULL; @@ -41585,9 +41778,9 @@ _tmp_276_rule(Parser *p) return _res; } -// _tmp_277: 'as' star_target +// _tmp_280: 'as' star_target static void * -_tmp_277_rule(Parser *p) +_tmp_280_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41603,21 +41796,21 @@ _tmp_277_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_277[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c> _tmp_280[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); Token * _keyword; expr_ty star_target_var; if ( - (_keyword = _PyPegen_expect_token(p, 656)) // token='as' + (_keyword = _PyPegen_expect_token(p, 659)) // token='as' && (star_target_var = star_target_rule(p)) // star_target ) { - D(fprintf(stderr, "%*c+ _tmp_277[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c+ _tmp_280[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); _res = _PyPegen_dummy_name(p, _keyword, star_target_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_277[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_280[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' star_target")); } _res = NULL; @@ -41626,9 +41819,9 @@ _tmp_277_rule(Parser *p) return _res; } -// _tmp_278: 'as' star_target +// _tmp_281: 'as' star_target static void * -_tmp_278_rule(Parser *p) +_tmp_281_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41644,21 +41837,21 @@ _tmp_278_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_278[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c> _tmp_281[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); Token * _keyword; expr_ty star_target_var; if ( - (_keyword = _PyPegen_expect_token(p, 656)) // token='as' + (_keyword = _PyPegen_expect_token(p, 659)) // token='as' && (star_target_var = star_target_rule(p)) // star_target ) { - D(fprintf(stderr, "%*c+ _tmp_278[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c+ _tmp_281[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); _res = _PyPegen_dummy_name(p, _keyword, star_target_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_278[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_281[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' star_target")); } _res = NULL; @@ -41667,9 +41860,9 @@ _tmp_278_rule(Parser *p) return _res; } -// _tmp_279: 'as' star_target +// _tmp_282: 'as' star_target static void * -_tmp_279_rule(Parser *p) +_tmp_282_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41685,21 +41878,21 @@ _tmp_279_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_279[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c> _tmp_282[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target")); Token * _keyword; expr_ty star_target_var; if ( - (_keyword = _PyPegen_expect_token(p, 656)) // token='as' + (_keyword = _PyPegen_expect_token(p, 659)) // token='as' && (star_target_var = star_target_rule(p)) // star_target ) { - D(fprintf(stderr, "%*c+ _tmp_279[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); + D(fprintf(stderr, "%*c+ _tmp_282[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target")); _res = _PyPegen_dummy_name(p, _keyword, star_target_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_279[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_282[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' star_target")); } _res = NULL; @@ -41708,9 +41901,9 @@ _tmp_279_rule(Parser *p) return _res; } -// _tmp_280: starred_expression | (assignment_expression | expression !':=') !'=' +// _tmp_283: starred_expression | (assignment_expression | expression !':=') !'=' static void * -_tmp_280_rule(Parser *p) +_tmp_283_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41726,18 +41919,18 @@ _tmp_280_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_280[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression")); + D(fprintf(stderr, "%*c> _tmp_283[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression")); expr_ty starred_expression_var; if ( (starred_expression_var = starred_expression_rule(p)) // starred_expression ) { - D(fprintf(stderr, "%*c+ _tmp_280[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression")); + D(fprintf(stderr, "%*c+ _tmp_283[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression")); _res = starred_expression_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_280[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_283[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "starred_expression")); } { // (assignment_expression | expression !':=') !'=' @@ -41745,20 +41938,20 @@ _tmp_280_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_280[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='")); - void *_tmp_281_var; + D(fprintf(stderr, "%*c> _tmp_283[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='")); + void *_tmp_284_var; if ( - (_tmp_281_var = _tmp_281_rule(p)) // assignment_expression | expression !':=' + (_tmp_284_var = _tmp_284_rule(p)) // assignment_expression | expression !':=' && _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 22) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_280[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='")); - _res = _tmp_281_var; + D(fprintf(stderr, "%*c+ _tmp_283[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='")); + _res = _tmp_284_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_280[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_283[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(assignment_expression | expression !':=') !'='")); } _res = NULL; @@ -41767,9 +41960,9 @@ _tmp_280_rule(Parser *p) return _res; } -// _tmp_281: assignment_expression | expression !':=' +// _tmp_284: assignment_expression | expression !':=' static void * -_tmp_281_rule(Parser *p) +_tmp_284_rule(Parser *p) { if (p->level++ == MAXSTACK) { _Pypegen_stack_overflow(p); @@ -41785,18 +41978,18 @@ _tmp_281_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_281[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "assignment_expression")); + D(fprintf(stderr, "%*c> _tmp_284[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "assignment_expression")); expr_ty assignment_expression_var; if ( (assignment_expression_var = assignment_expression_rule(p)) // assignment_expression ) { - D(fprintf(stderr, "%*c+ _tmp_281[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "assignment_expression")); + D(fprintf(stderr, "%*c+ _tmp_284[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "assignment_expression")); _res = assignment_expression_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_281[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_284[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "assignment_expression")); } { // expression !':=' @@ -41804,7 +41997,7 @@ _tmp_281_rule(Parser *p) p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_281[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression !':='")); + D(fprintf(stderr, "%*c> _tmp_284[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression !':='")); expr_ty expression_var; if ( (expression_var = expression_rule(p)) // expression @@ -41812,12 +42005,12 @@ _tmp_281_rule(Parser *p) _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 53) // token=':=' ) { - D(fprintf(stderr, "%*c+ _tmp_281[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression !':='")); + D(fprintf(stderr, "%*c+ _tmp_284[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression !':='")); _res = expression_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_281[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_284[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression !':='")); } _res = NULL; diff --git a/Python/Python-ast.c b/Python/Python-ast.c index 699e1c157c591c..d77e986ba067a3 100644 --- a/Python/Python-ast.c +++ b/Python/Python-ast.c @@ -13149,15 +13149,14 @@ PyObject* PyAST_mod2obj(mod_ty t) int starting_recursion_depth; /* Be careful here to prevent overflow. */ - int COMPILER_STACK_FRAME_SCALE = 2; PyThreadState *tstate = _PyThreadState_GET(); if (!tstate) { return NULL; } struct validator vstate; - vstate.recursion_limit = Py_C_RECURSION_LIMIT * COMPILER_STACK_FRAME_SCALE; + vstate.recursion_limit = Py_C_RECURSION_LIMIT; int recursion_depth = Py_C_RECURSION_LIMIT - tstate->c_recursion_remaining; - starting_recursion_depth = recursion_depth * COMPILER_STACK_FRAME_SCALE; + starting_recursion_depth = recursion_depth; vstate.recursion_depth = starting_recursion_depth; PyObject *result = ast2obj_mod(state, &vstate, t); diff --git a/Python/ast.c b/Python/ast.c index 5f46d4149c2ed0..71b09d889f17c1 100644 --- a/Python/ast.c +++ b/Python/ast.c @@ -1037,10 +1037,6 @@ validate_type_params(struct validator *state, asdl_type_param_seq *tps) return 1; } - -/* See comments in symtable.c. */ -#define COMPILER_STACK_FRAME_SCALE 2 - int _PyAST_Validate(mod_ty mod) { @@ -1057,9 +1053,9 @@ _PyAST_Validate(mod_ty mod) } /* Be careful here to prevent overflow. */ int recursion_depth = Py_C_RECURSION_LIMIT - tstate->c_recursion_remaining; - starting_recursion_depth = recursion_depth * COMPILER_STACK_FRAME_SCALE; + starting_recursion_depth = recursion_depth; state.recursion_depth = starting_recursion_depth; - state.recursion_limit = Py_C_RECURSION_LIMIT * COMPILER_STACK_FRAME_SCALE; + state.recursion_limit = Py_C_RECURSION_LIMIT; switch (mod->kind) { case Module_kind: diff --git a/Python/ast_opt.c b/Python/ast_opt.c index 04d7ae6eaafbc0..41e906c66e8eec 100644 --- a/Python/ast_opt.c +++ b/Python/ast_opt.c @@ -1100,9 +1100,6 @@ astfold_type_param(type_param_ty node_, PyArena *ctx_, _PyASTOptimizeState *stat #undef CALL_OPT #undef CALL_SEQ -/* See comments in symtable.c. */ -#define COMPILER_STACK_FRAME_SCALE 2 - int _PyAST_Optimize(mod_ty mod, PyArena *arena, int optimize, int ff_features) { @@ -1120,9 +1117,9 @@ _PyAST_Optimize(mod_ty mod, PyArena *arena, int optimize, int ff_features) } /* Be careful here to prevent overflow. */ int recursion_depth = Py_C_RECURSION_LIMIT - tstate->c_recursion_remaining; - starting_recursion_depth = recursion_depth * COMPILER_STACK_FRAME_SCALE; + starting_recursion_depth = recursion_depth; state.recursion_depth = starting_recursion_depth; - state.recursion_limit = Py_C_RECURSION_LIMIT * COMPILER_STACK_FRAME_SCALE; + state.recursion_limit = Py_C_RECURSION_LIMIT; int ret = astfold_mod(mod, arena, &state); assert(ret || PyErr_Occurred()); diff --git a/Python/bytecodes.c b/Python/bytecodes.c index e1a6a256fbdf96..c48f0a17c60fb1 100644 --- a/Python/bytecodes.c +++ b/Python/bytecodes.c @@ -68,7 +68,7 @@ static size_t jump; static uint16_t invert, counter, index, hint; #define unused 0 // Used in a macro def, can't be static static uint32_t type_version; -static _PyUOpExecutorObject *current_executor; +static _PyExecutorObject *current_executor; static PyObject * dummy_func( @@ -208,7 +208,7 @@ dummy_func( Py_INCREF(value); } - inst(LOAD_FAST, (-- value)) { + pure inst(LOAD_FAST, (-- value)) { value = GETLOCAL(oparg); assert(value != NULL); Py_INCREF(value); @@ -229,7 +229,7 @@ dummy_func( Py_INCREF(value2); } - inst(LOAD_CONST, (-- value)) { + pure inst(LOAD_CONST, (-- value)) { value = GETITEM(FRAME_CO_CONSTS, oparg); Py_INCREF(value); } @@ -257,11 +257,11 @@ dummy_func( SETLOCAL(oparg2, value2); } - inst(POP_TOP, (value --)) { + pure inst(POP_TOP, (value --)) { DECREF_INPUTS(); } - inst(PUSH_NULL, (-- res)) { + pure inst(PUSH_NULL, (-- res)) { res = NULL; } @@ -281,7 +281,7 @@ dummy_func( DECREF_INPUTS(); } - inst(END_SEND, (receiver, value -- value)) { + pure inst(END_SEND, (receiver, value -- value)) { Py_DECREF(receiver); } @@ -303,7 +303,7 @@ dummy_func( ERROR_IF(res == NULL, error); } - inst(UNARY_NOT, (value -- res)) { + pure inst(UNARY_NOT, (value -- res)) { assert(PyBool_Check(value)); res = Py_IsFalse(value) ? Py_True : Py_False; } @@ -411,12 +411,12 @@ dummy_func( // BINARY_OP_INPLACE_ADD_UNICODE, // See comments at that opcode. }; - op(_GUARD_BOTH_INT, (left, right -- left, right)) { + op(_GUARD_BOTH_INT, (left, right -- left: &PYLONG_TYPE, right: &PYLONG_TYPE)) { DEOPT_IF(!PyLong_CheckExact(left)); DEOPT_IF(!PyLong_CheckExact(right)); } - op(_BINARY_OP_MULTIPLY_INT, (left, right -- res)) { + pure op(_BINARY_OP_MULTIPLY_INT, (left, right -- res: &PYLONG_TYPE)) { STAT_INC(BINARY_OP, hit); res = _PyLong_Multiply((PyLongObject *)left, (PyLongObject *)right); _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); @@ -424,7 +424,7 @@ dummy_func( ERROR_IF(res == NULL, error); } - op(_BINARY_OP_ADD_INT, (left, right -- res)) { + pure op(_BINARY_OP_ADD_INT, (left, right -- res: &PYLONG_TYPE)) { STAT_INC(BINARY_OP, hit); res = _PyLong_Add((PyLongObject *)left, (PyLongObject *)right); _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); @@ -432,7 +432,7 @@ dummy_func( ERROR_IF(res == NULL, error); } - op(_BINARY_OP_SUBTRACT_INT, (left, right -- res)) { + pure op(_BINARY_OP_SUBTRACT_INT, (left, right -- res: &PYLONG_TYPE)) { STAT_INC(BINARY_OP, hit); res = _PyLong_Subtract((PyLongObject *)left, (PyLongObject *)right); _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); @@ -447,12 +447,12 @@ dummy_func( macro(BINARY_OP_SUBTRACT_INT) = _GUARD_BOTH_INT + unused/1 + _BINARY_OP_SUBTRACT_INT; - op(_GUARD_BOTH_FLOAT, (left, right -- left, right)) { + op(_GUARD_BOTH_FLOAT, (left, right -- left: &PYFLOAT_TYPE, right: &PYFLOAT_TYPE)) { DEOPT_IF(!PyFloat_CheckExact(left)); DEOPT_IF(!PyFloat_CheckExact(right)); } - op(_BINARY_OP_MULTIPLY_FLOAT, (left, right -- res)) { + pure op(_BINARY_OP_MULTIPLY_FLOAT, (left, right -- res: &PYFLOAT_TYPE)) { STAT_INC(BINARY_OP, hit); double dres = ((PyFloatObject *)left)->ob_fval * @@ -460,7 +460,7 @@ dummy_func( DECREF_INPUTS_AND_REUSE_FLOAT(left, right, dres, res); } - op(_BINARY_OP_ADD_FLOAT, (left, right -- res)) { + pure op(_BINARY_OP_ADD_FLOAT, (left, right -- res: &PYFLOAT_TYPE)) { STAT_INC(BINARY_OP, hit); double dres = ((PyFloatObject *)left)->ob_fval + @@ -468,7 +468,7 @@ dummy_func( DECREF_INPUTS_AND_REUSE_FLOAT(left, right, dres, res); } - op(_BINARY_OP_SUBTRACT_FLOAT, (left, right -- res)) { + pure op(_BINARY_OP_SUBTRACT_FLOAT, (left, right -- res: &PYFLOAT_TYPE)) { STAT_INC(BINARY_OP, hit); double dres = ((PyFloatObject *)left)->ob_fval - @@ -483,12 +483,12 @@ dummy_func( macro(BINARY_OP_SUBTRACT_FLOAT) = _GUARD_BOTH_FLOAT + unused/1 + _BINARY_OP_SUBTRACT_FLOAT; - op(_GUARD_BOTH_UNICODE, (left, right -- left, right)) { + op(_GUARD_BOTH_UNICODE, (left, right -- left: &PYUNICODE_TYPE, right: &PYUNICODE_TYPE)) { DEOPT_IF(!PyUnicode_CheckExact(left)); DEOPT_IF(!PyUnicode_CheckExact(right)); } - op(_BINARY_OP_ADD_UNICODE, (left, right -- res)) { + pure op(_BINARY_OP_ADD_UNICODE, (left, right -- res: &PYUNICODE_TYPE)) { STAT_INC(BINARY_OP, hit); res = PyUnicode_Concat(left, right); _Py_DECREF_SPECIALIZED(left, _PyUnicode_ExactDealloc); @@ -805,7 +805,8 @@ dummy_func( #if TIER_ONE assert(frame != &entry_frame); #endif - STORE_SP(); + SYNC_SP(); + _PyFrame_SetStackPointer(frame, stack_pointer); assert(EMPTY()); _Py_LeaveRecursiveCallPy(tstate); // GH-99729: We need to unlink the frame *before* clearing it: @@ -1900,7 +1901,7 @@ dummy_func( LOAD_ATTR, }; - op(_GUARD_TYPE_VERSION, (type_version/2, owner -- owner)) { + op(_GUARD_TYPE_VERSION, (type_version/2, owner -- owner: &(GUARD_TYPE_VERSION_TYPE + type_version))) { PyTypeObject *tp = Py_TYPE(owner); assert(type_version != 0); DEOPT_IF(tp->tp_version_tag != type_version); @@ -2081,7 +2082,7 @@ dummy_func( DISPATCH_INLINED(new_frame); } - op(_GUARD_DORV_VALUES, (owner -- owner)) { + op(_GUARD_DORV_VALUES, (owner -- owner: &GUARD_DORV_VALUES_TYPE)) { assert(Py_TYPE(owner)->tp_flags & Py_TPFLAGS_MANAGED_DICT); PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); DEOPT_IF(!_PyDictOrValues_IsValues(dorv)); @@ -2325,12 +2326,18 @@ dummy_func( // Double-check that the opcode isn't instrumented or something: if (ucounter > threshold && this_instr->op.code == JUMP_BACKWARD) { OPT_STAT_INC(attempts); - int optimized = _PyOptimizer_BackEdge(frame, this_instr, next_instr, stack_pointer); + _Py_CODEUNIT *start = this_instr; + /* Back up over EXTENDED_ARGs so optimizer sees the whole instruction */ + while (oparg > 255) { + oparg >>= 8; + start--; + } + int optimized = _PyOptimizer_Optimize(frame, start, stack_pointer); ERROR_IF(optimized < 0, error); if (optimized) { // Rewind and enter the executor: - assert(this_instr->op.code == ENTER_EXECUTOR); - next_instr = this_instr; + assert(start->op.code == ENTER_EXECUTOR); + next_instr = start; this_instr[1].cache &= ((1 << OPTIMIZER_BITS_IN_COUNTER) - 1); } else { @@ -2363,25 +2370,19 @@ dummy_func( CHECK_EVAL_BREAKER(); PyCodeObject *code = _PyFrame_GetCode(frame); - _PyExecutorObject *executor = (_PyExecutorObject *)code->co_executors->executors[oparg&255]; + _PyExecutorObject *executor = code->co_executors->executors[oparg & 255]; if (executor->vm_data.valid) { Py_INCREF(executor); - if (executor->execute == _PyUOpExecute) { - current_executor = (_PyUOpExecutorObject *)executor; - GOTO_TIER_TWO(); - } - next_instr = executor->execute(executor, frame, stack_pointer); - frame = tstate->current_frame; - if (next_instr == NULL) { - goto resume_with_error; - } - stack_pointer = _PyFrame_GetStackPointer(frame); + current_executor = executor; + GOTO_TIER_TWO(); } else { - code->co_executors->executors[oparg & 255] = NULL; + /* ENTER_EXECUTOR will be the first code unit of the instruction */ + assert(oparg < 256); + code->co_executors->executors[oparg] = NULL; opcode = this_instr->op.code = executor->vm_data.opcode; this_instr->op.arg = executor->vm_data.oparg; - oparg = (oparg & (~255)) | executor->vm_data.oparg; + oparg = executor->vm_data.oparg; Py_DECREF(executor); next_instr = this_instr; DISPATCH_GOTO(); @@ -2721,7 +2722,7 @@ dummy_func( DEOPT_IF(r->len <= 0); } - op(_ITER_NEXT_RANGE, (iter -- iter, next)) { + op(_ITER_NEXT_RANGE, (iter -- iter, next: &PYLONG_TYPE)) { _PyRangeIterObject *r = (_PyRangeIterObject *)iter; assert(Py_TYPE(r) == &PyRangeIter_Type); assert(r->len > 0); @@ -2879,13 +2880,13 @@ dummy_func( exc_info->exc_value = Py_NewRef(new_exc); } - op(_GUARD_DORV_VALUES_INST_ATTR_FROM_DICT, (owner -- owner)) { + op(_GUARD_DORV_VALUES_INST_ATTR_FROM_DICT, (owner -- owner: &GUARD_DORV_VALUES_INST_ATTR_FROM_DICT_TYPE)) { assert(Py_TYPE(owner)->tp_flags & Py_TPFLAGS_MANAGED_DICT); PyDictOrValues *dorv = _PyObject_DictOrValuesPointer(owner); DEOPT_IF(!_PyDictOrValues_IsValues(*dorv) && !_PyObject_MakeInstanceAttributesFromDict(owner, dorv)); } - op(_GUARD_KEYS_VERSION, (keys_version/2, owner -- owner)) { + op(_GUARD_KEYS_VERSION, (keys_version/2, owner -- owner: &(GUARD_KEYS_VERSION_TYPE + keys_version))) { PyTypeObject *owner_cls = Py_TYPE(owner); PyHeapTypeObject *owner_heap_type = (PyHeapTypeObject *)owner_cls; DEOPT_IF(owner_heap_type->ht_cached_keys->dk_version != keys_version); @@ -3100,7 +3101,7 @@ dummy_func( macro(CALL) = _SPECIALIZE_CALL + unused/2 + _CALL; - op(_CHECK_CALL_BOUND_METHOD_EXACT_ARGS, (callable, null, unused[oparg] -- callable, null, unused[oparg])) { + op(_CHECK_CALL_BOUND_METHOD_EXACT_ARGS, (callable, null, unused[oparg] -- callable: &PYMETHOD_TYPE, null: &NULL_TYPE, unused[oparg])) { DEOPT_IF(null != NULL); DEOPT_IF(Py_TYPE(callable) != &PyMethod_Type); } @@ -3118,7 +3119,7 @@ dummy_func( DEOPT_IF(tstate->interp->eval_frame); } - op(_CHECK_FUNCTION_EXACT_ARGS, (func_version/2, callable, self_or_null, unused[oparg] -- callable, self_or_null, unused[oparg])) { + op(_CHECK_FUNCTION_EXACT_ARGS, (func_version/2, callable, self_or_null, unused[oparg] -- callable: &(PYFUNCTION_TYPE_VERSION_TYPE + func_version), self_or_null, unused[oparg])) { DEOPT_IF(!PyFunction_Check(callable)); PyFunctionObject *func = (PyFunctionObject *)callable; DEOPT_IF(func->func_version != func_version); @@ -3133,7 +3134,7 @@ dummy_func( DEOPT_IF(tstate->py_recursion_remaining <= 1); } - op(_INIT_CALL_PY_EXACT_ARGS, (callable, self_or_null, args[oparg] -- new_frame: _PyInterpreterFrame*)) { + pure op(_INIT_CALL_PY_EXACT_ARGS, (callable, self_or_null, args[oparg] -- new_frame: _PyInterpreterFrame*)) { int argcount = oparg; if (self_or_null != NULL) { args--; @@ -3154,7 +3155,8 @@ dummy_func( // Write it out explicitly because it's subtly different. // Eventually this should be the only occurrence of this code. assert(tstate->interp->eval_frame == NULL); - STORE_SP(); + SYNC_SP(); + _PyFrame_SetStackPointer(frame, stack_pointer); new_frame->previous = frame; CALL_STAT_INC(inlined_py_calls); frame = tstate->current_frame = new_frame; @@ -3877,7 +3879,7 @@ dummy_func( ERROR_IF(res == NULL, error); } - inst(COPY, (bottom, unused[oparg-1] -- bottom, unused[oparg-1], top)) { + pure inst(COPY, (bottom, unused[oparg-1] -- bottom, unused[oparg-1], top)) { assert(oparg > 0); top = Py_NewRef(bottom); } @@ -3906,7 +3908,7 @@ dummy_func( macro(BINARY_OP) = _SPECIALIZE_BINARY_OP + _BINARY_OP; - inst(SWAP, (bottom, unused[oparg-2], top -- + pure inst(SWAP, (bottom, unused[oparg-2], top -- top, unused[oparg-2], bottom)) { assert(oparg >= 2); } @@ -4013,20 +4015,27 @@ dummy_func( ///////// Tier-2 only opcodes ///////// op (_GUARD_IS_TRUE_POP, (flag -- )) { - DEOPT_IF(Py_IsFalse(flag)); + SYNC_SP(); + DEOPT_IF(!Py_IsTrue(flag)); assert(Py_IsTrue(flag)); } op (_GUARD_IS_FALSE_POP, (flag -- )) { - DEOPT_IF(Py_IsTrue(flag)); + SYNC_SP(); + DEOPT_IF(!Py_IsFalse(flag)); assert(Py_IsFalse(flag)); } op (_GUARD_IS_NONE_POP, (val -- )) { - DEOPT_IF(!Py_IsNone(val)); + SYNC_SP(); + if (!Py_IsNone(val)) { + Py_DECREF(val); + DEOPT_IF(1); + } } op (_GUARD_IS_NOT_NONE_POP, (val -- )) { + SYNC_SP(); DEOPT_IF(Py_IsNone(val)); Py_DECREF(val); } @@ -4056,14 +4065,19 @@ dummy_func( DEOPT_IF(1); } - op(_INSERT, (unused[oparg], top -- top, unused[oparg])) { - // Inserts TOS at position specified by oparg; - memmove(&stack_pointer[-1 - oparg], &stack_pointer[-oparg], oparg * sizeof(stack_pointer[0])); - } - op(_CHECK_VALIDITY, (--)) { TIER_TWO_ONLY - DEOPT_IF(!current_executor->base.vm_data.valid); + DEOPT_IF(!current_executor->vm_data.valid); + } + + op(_LOAD_CONST_INLINE_BORROW, (ptr/4 -- value)) { + value = ptr; + } + + /* Internal -- for testing executors */ + op(_INTERNAL_INCREMENT_OPT_COUNTER, (opt --)) { + _PyCounterOptimizerObject *exe = (_PyCounterOptimizerObject *)opt; + exe->count++; } diff --git a/Python/ceval.c b/Python/ceval.c index 1fea9747488102..49388cd20377c0 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -25,7 +25,6 @@ #include "pycore_tuple.h" // _PyTuple_ITEMS() #include "pycore_typeobject.h" // _PySuper_Lookup() #include "pycore_uop_ids.h" // Uops -#include "pycore_uops.h" // _PyUOpExecutorObject #include "pycore_pyerrors.h" #include "pycore_dict.h" @@ -739,7 +738,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int } /* State shared between Tier 1 and Tier 2 interpreter */ - _PyUOpExecutorObject *current_executor = NULL; + _PyExecutorObject *current_executor = NULL; /* Local "register" variables. * These are cached values from the frame and code object. */ @@ -2111,6 +2110,9 @@ do_monitor_exc(PyThreadState *tstate, _PyInterpreterFrame *frame, _Py_CODEUNIT *instr, int event) { assert(event < _PY_MONITORING_UNGROUPED_EVENTS); + if (_PyFrame_GetCode(frame)->co_flags & CO_NO_MONITORING_EVENTS) { + return 0; + } PyObject *exc = PyErr_GetRaisedException(); assert(exc != NULL); int err = _Py_call_instrumentation_arg(tstate, event, frame, instr, exc); diff --git a/Python/ceval_macros.h b/Python/ceval_macros.h index a3606b17b71c62..c2550f53ad6eaa 100644 --- a/Python/ceval_macros.h +++ b/Python/ceval_macros.h @@ -382,9 +382,6 @@ static inline void _Py_LeaveRecursiveCallPy(PyThreadState *tstate) { /* There's no STORE_IP(), it's inlined by the code generator. */ -#define STORE_SP() \ -_PyFrame_SetStackPointer(frame, stack_pointer) - #define LOAD_SP() \ stack_pointer = _PyFrame_GetStackPointer(frame); diff --git a/Python/clinic/marshal.c.h b/Python/clinic/marshal.c.h index e6b0f1999a41c5..c19a3ed5050ed3 100644 --- a/Python/clinic/marshal.c.h +++ b/Python/clinic/marshal.c.h @@ -2,10 +2,14 @@ preserve [clinic start generated code]*/ -#include "pycore_modsupport.h" // _PyArg_CheckPositional() +#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) +# include "pycore_gc.h" // PyGC_Head +# include "pycore_runtime.h" // _Py_ID() +#endif +#include "pycore_modsupport.h" // _PyArg_UnpackKeywords() PyDoc_STRVAR(marshal_dump__doc__, -"dump($module, value, file, version=version, /)\n" +"dump($module, value, file, version=version, /, *, allow_code=True)\n" "--\n" "\n" "Write the value on the open file.\n" @@ -16,53 +20,95 @@ PyDoc_STRVAR(marshal_dump__doc__, " Must be a writeable binary file.\n" " version\n" " Indicates the data format that dump should use.\n" +" allow_code\n" +" Allow to write code objects.\n" "\n" "If the value has (or contains an object that has) an unsupported type, a\n" "ValueError exception is raised - but garbage data will also be written\n" "to the file. The object will not be properly read back by load()."); #define MARSHAL_DUMP_METHODDEF \ - {"dump", _PyCFunction_CAST(marshal_dump), METH_FASTCALL, marshal_dump__doc__}, + {"dump", _PyCFunction_CAST(marshal_dump), METH_FASTCALL|METH_KEYWORDS, marshal_dump__doc__}, static PyObject * marshal_dump_impl(PyObject *module, PyObject *value, PyObject *file, - int version); + int version, int allow_code); static PyObject * -marshal_dump(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +marshal_dump(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(allow_code), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"", "", "", "allow_code", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "dump", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[4]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 2; PyObject *value; PyObject *file; int version = Py_MARSHAL_VERSION; + int allow_code = 1; - if (!_PyArg_CheckPositional("dump", nargs, 2, 3)) { + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 2, 3, 0, argsbuf); + if (!args) { goto exit; } value = args[0]; file = args[1]; if (nargs < 3) { - goto skip_optional; + goto skip_optional_posonly; } + noptargs--; version = PyLong_AsInt(args[2]); if (version == -1 && PyErr_Occurred()) { goto exit; } -skip_optional: - return_value = marshal_dump_impl(module, value, file, version); +skip_optional_posonly: + if (!noptargs) { + goto skip_optional_kwonly; + } + allow_code = PyObject_IsTrue(args[3]); + if (allow_code < 0) { + goto exit; + } +skip_optional_kwonly: + return_value = marshal_dump_impl(module, value, file, version, allow_code); exit: return return_value; } PyDoc_STRVAR(marshal_load__doc__, -"load($module, file, /)\n" +"load($module, file, /, *, allow_code=True)\n" "--\n" "\n" "Read one value from the open file and return it.\n" "\n" " file\n" " Must be readable binary file.\n" +" allow_code\n" +" Allow to load code objects.\n" "\n" "If no valid value is read (e.g. because the data has a different Python\n" "version\'s incompatible marshal format), raise EOFError, ValueError or\n" @@ -72,10 +118,66 @@ PyDoc_STRVAR(marshal_load__doc__, "dump(), load() will substitute None for the unmarshallable type."); #define MARSHAL_LOAD_METHODDEF \ - {"load", (PyCFunction)marshal_load, METH_O, marshal_load__doc__}, + {"load", _PyCFunction_CAST(marshal_load), METH_FASTCALL|METH_KEYWORDS, marshal_load__doc__}, + +static PyObject * +marshal_load_impl(PyObject *module, PyObject *file, int allow_code); + +static PyObject * +marshal_load(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(allow_code), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"", "allow_code", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "load", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[2]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; + PyObject *file; + int allow_code = 1; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); + if (!args) { + goto exit; + } + file = args[0]; + if (!noptargs) { + goto skip_optional_kwonly; + } + allow_code = PyObject_IsTrue(args[1]); + if (allow_code < 0) { + goto exit; + } +skip_optional_kwonly: + return_value = marshal_load_impl(module, file, allow_code); + +exit: + return return_value; +} PyDoc_STRVAR(marshal_dumps__doc__, -"dumps($module, value, version=version, /)\n" +"dumps($module, value, version=version, /, *, allow_code=True)\n" "--\n" "\n" "Return the bytes object that would be written to a file by dump(value, file).\n" @@ -84,66 +186,150 @@ PyDoc_STRVAR(marshal_dumps__doc__, " Must be a supported type.\n" " version\n" " Indicates the data format that dumps should use.\n" +" allow_code\n" +" Allow to write code objects.\n" "\n" "Raise a ValueError exception if value has (or contains an object that has) an\n" "unsupported type."); #define MARSHAL_DUMPS_METHODDEF \ - {"dumps", _PyCFunction_CAST(marshal_dumps), METH_FASTCALL, marshal_dumps__doc__}, + {"dumps", _PyCFunction_CAST(marshal_dumps), METH_FASTCALL|METH_KEYWORDS, marshal_dumps__doc__}, static PyObject * -marshal_dumps_impl(PyObject *module, PyObject *value, int version); +marshal_dumps_impl(PyObject *module, PyObject *value, int version, + int allow_code); static PyObject * -marshal_dumps(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +marshal_dumps(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(allow_code), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"", "", "allow_code", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "dumps", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[3]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; PyObject *value; int version = Py_MARSHAL_VERSION; + int allow_code = 1; - if (!_PyArg_CheckPositional("dumps", nargs, 1, 2)) { + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 2, 0, argsbuf); + if (!args) { goto exit; } value = args[0]; if (nargs < 2) { - goto skip_optional; + goto skip_optional_posonly; } + noptargs--; version = PyLong_AsInt(args[1]); if (version == -1 && PyErr_Occurred()) { goto exit; } -skip_optional: - return_value = marshal_dumps_impl(module, value, version); +skip_optional_posonly: + if (!noptargs) { + goto skip_optional_kwonly; + } + allow_code = PyObject_IsTrue(args[2]); + if (allow_code < 0) { + goto exit; + } +skip_optional_kwonly: + return_value = marshal_dumps_impl(module, value, version, allow_code); exit: return return_value; } PyDoc_STRVAR(marshal_loads__doc__, -"loads($module, bytes, /)\n" +"loads($module, bytes, /, *, allow_code=True)\n" "--\n" "\n" "Convert the bytes-like object to a value.\n" "\n" +" allow_code\n" +" Allow to load code objects.\n" +"\n" "If no valid value is found, raise EOFError, ValueError or TypeError. Extra\n" "bytes in the input are ignored."); #define MARSHAL_LOADS_METHODDEF \ - {"loads", (PyCFunction)marshal_loads, METH_O, marshal_loads__doc__}, + {"loads", _PyCFunction_CAST(marshal_loads), METH_FASTCALL|METH_KEYWORDS, marshal_loads__doc__}, static PyObject * -marshal_loads_impl(PyObject *module, Py_buffer *bytes); +marshal_loads_impl(PyObject *module, Py_buffer *bytes, int allow_code); static PyObject * -marshal_loads(PyObject *module, PyObject *arg) +marshal_loads(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(allow_code), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"", "allow_code", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "loads", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[2]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; Py_buffer bytes = {NULL, NULL}; + int allow_code = 1; - if (PyObject_GetBuffer(arg, &bytes, PyBUF_SIMPLE) != 0) { + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); + if (!args) { + goto exit; + } + if (PyObject_GetBuffer(args[0], &bytes, PyBUF_SIMPLE) != 0) { + goto exit; + } + if (!noptargs) { + goto skip_optional_kwonly; + } + allow_code = PyObject_IsTrue(args[1]); + if (allow_code < 0) { goto exit; } - return_value = marshal_loads_impl(module, &bytes); +skip_optional_kwonly: + return_value = marshal_loads_impl(module, &bytes, allow_code); exit: /* Cleanup for bytes */ @@ -153,4 +339,4 @@ marshal_loads(PyObject *module, PyObject *arg) return return_value; } -/*[clinic end generated code: output=92d2d47aac9128ee input=a9049054013a1b77]*/ +/*[clinic end generated code: output=1575b9a3ae48ad3d input=a9049054013a1b77]*/ diff --git a/Python/compile.c b/Python/compile.c index 65ac05ad58d4dd..2a6291ccb51b0c 100644 --- a/Python/compile.c +++ b/Python/compile.c @@ -2897,7 +2897,7 @@ compiler_jump_if(struct compiler *c, location loc, compiler_jump_if(c, loc, e->v.IfExp.test, next2, 0)); RETURN_IF_ERROR( compiler_jump_if(c, loc, e->v.IfExp.body, next, cond)); - ADDOP_JUMP(c, NO_LOCATION, JUMP, end); + ADDOP_JUMP(c, NO_LOCATION, JUMP_NO_INTERRUPT, end); USE_LABEL(c, next2); RETURN_IF_ERROR( @@ -2926,12 +2926,12 @@ compiler_jump_if(struct compiler *c, location loc, ADDOP(c, LOC(e), TO_BOOL); ADDOP_JUMP(c, LOC(e), cond ? POP_JUMP_IF_TRUE : POP_JUMP_IF_FALSE, next); NEW_JUMP_TARGET_LABEL(c, end); - ADDOP_JUMP(c, NO_LOCATION, JUMP, end); + ADDOP_JUMP(c, NO_LOCATION, JUMP_NO_INTERRUPT, end); USE_LABEL(c, cleanup); ADDOP(c, LOC(e), POP_TOP); if (!cond) { - ADDOP_JUMP(c, NO_LOCATION, JUMP, next); + ADDOP_JUMP(c, NO_LOCATION, JUMP_NO_INTERRUPT, next); } USE_LABEL(c, end); @@ -2963,7 +2963,7 @@ compiler_ifexp(struct compiler *c, expr_ty e) compiler_jump_if(c, LOC(e), e->v.IfExp.test, next, 0)); VISIT(c, expr, e->v.IfExp.body); - ADDOP_JUMP(c, NO_LOCATION, JUMP, end); + ADDOP_JUMP(c, NO_LOCATION, JUMP_NO_INTERRUPT, end); USE_LABEL(c, next); VISIT(c, expr, e->v.IfExp.orelse); @@ -3041,7 +3041,7 @@ compiler_if(struct compiler *c, stmt_ty s) VISIT_SEQ(c, stmt, s->v.If.body); if (asdl_seq_LEN(s->v.If.orelse)) { - ADDOP_JUMP(c, NO_LOCATION, JUMP, end); + ADDOP_JUMP(c, NO_LOCATION, JUMP_NO_INTERRUPT, end); USE_LABEL(c, next); VISIT_SEQ(c, stmt, s->v.If.orelse); @@ -3294,7 +3294,7 @@ compiler_try_finally(struct compiler *c, stmt_ty s) compiler_pop_fblock(c, FINALLY_TRY, body); VISIT_SEQ(c, stmt, s->v.Try.finalbody); - ADDOP_JUMP(c, NO_LOCATION, JUMP, exit); + ADDOP_JUMP(c, NO_LOCATION, JUMP_NO_INTERRUPT, exit); /* `finally` block */ USE_LABEL(c, end); @@ -3344,7 +3344,7 @@ compiler_try_star_finally(struct compiler *c, stmt_ty s) compiler_pop_fblock(c, FINALLY_TRY, body); VISIT_SEQ(c, stmt, s->v.TryStar.finalbody); - ADDOP_JUMP(c, NO_LOCATION, JUMP, exit); + ADDOP_JUMP(c, NO_LOCATION, JUMP_NO_INTERRUPT, exit); /* `finally` block */ USE_LABEL(c, end); @@ -3419,7 +3419,7 @@ compiler_try_except(struct compiler *c, stmt_ty s) if (s->v.Try.orelse && asdl_seq_LEN(s->v.Try.orelse)) { VISIT_SEQ(c, stmt, s->v.Try.orelse); } - ADDOP_JUMP(c, NO_LOCATION, JUMP, end); + ADDOP_JUMP(c, NO_LOCATION, JUMP_NO_INTERRUPT, end); n = asdl_seq_LEN(s->v.Try.handlers); USE_LABEL(c, except); @@ -3483,7 +3483,7 @@ compiler_try_except(struct compiler *c, stmt_ty s) compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Store)); RETURN_IF_ERROR( compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Del)); - ADDOP_JUMP(c, NO_LOCATION, JUMP, end); + ADDOP_JUMP(c, NO_LOCATION, JUMP_NO_INTERRUPT, end); /* except: */ USE_LABEL(c, cleanup_end); @@ -3511,7 +3511,7 @@ compiler_try_except(struct compiler *c, stmt_ty s) compiler_pop_fblock(c, HANDLER_CLEANUP, cleanup_body); ADDOP(c, NO_LOCATION, POP_BLOCK); ADDOP(c, NO_LOCATION, POP_EXCEPT); - ADDOP_JUMP(c, NO_LOCATION, JUMP, end); + ADDOP_JUMP(c, NO_LOCATION, JUMP_NO_INTERRUPT, end); } USE_LABEL(c, except); @@ -3599,7 +3599,7 @@ compiler_try_star_except(struct compiler *c, stmt_ty s) VISIT_SEQ(c, stmt, s->v.TryStar.body); compiler_pop_fblock(c, TRY_EXCEPT, body); ADDOP(c, NO_LOCATION, POP_BLOCK); - ADDOP_JUMP(c, NO_LOCATION, JUMP, orelse); + ADDOP_JUMP(c, NO_LOCATION, JUMP_NO_INTERRUPT, orelse); Py_ssize_t n = asdl_seq_LEN(s->v.TryStar.handlers); USE_LABEL(c, except); @@ -3681,7 +3681,7 @@ compiler_try_star_except(struct compiler *c, stmt_ty s) RETURN_IF_ERROR( compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Del)); } - ADDOP_JUMP(c, NO_LOCATION, JUMP, except); + ADDOP_JUMP(c, NO_LOCATION, JUMP_NO_INTERRUPT, except); /* except: */ USE_LABEL(c, cleanup_end); @@ -3698,11 +3698,11 @@ compiler_try_star_except(struct compiler *c, stmt_ty s) /* add exception raised to the res list */ ADDOP_I(c, NO_LOCATION, LIST_APPEND, 3); // exc ADDOP(c, NO_LOCATION, POP_TOP); // lasti - ADDOP_JUMP(c, NO_LOCATION, JUMP, except_with_error); + ADDOP_JUMP(c, NO_LOCATION, JUMP_NO_INTERRUPT, except_with_error); USE_LABEL(c, except); ADDOP(c, NO_LOCATION, NOP); // to hold a propagated location info - ADDOP_JUMP(c, NO_LOCATION, JUMP, except_with_error); + ADDOP_JUMP(c, NO_LOCATION, JUMP_NO_INTERRUPT, except_with_error); USE_LABEL(c, no_match); ADDOP(c, loc, POP_TOP); // match (None) @@ -3712,7 +3712,7 @@ compiler_try_star_except(struct compiler *c, stmt_ty s) if (i == n - 1) { /* Add exc to the list (if not None it's the unhandled part of the EG) */ ADDOP_I(c, NO_LOCATION, LIST_APPEND, 1); - ADDOP_JUMP(c, NO_LOCATION, JUMP, reraise_star); + ADDOP_JUMP(c, NO_LOCATION, JUMP_NO_INTERRUPT, reraise_star); } } /* artificial */ @@ -3728,7 +3728,7 @@ compiler_try_star_except(struct compiler *c, stmt_ty s) ADDOP(c, NO_LOCATION, POP_TOP); ADDOP(c, NO_LOCATION, POP_BLOCK); ADDOP(c, NO_LOCATION, POP_EXCEPT); - ADDOP_JUMP(c, NO_LOCATION, JUMP, end); + ADDOP_JUMP(c, NO_LOCATION, JUMP_NO_INTERRUPT, end); USE_LABEL(c, reraise); ADDOP(c, NO_LOCATION, POP_BLOCK); @@ -4626,7 +4626,7 @@ compiler_compare(struct compiler *c, expr_ty e) VISIT(c, expr, (expr_ty)asdl_seq_GET(e->v.Compare.comparators, n)); ADDOP_COMPARE(c, loc, asdl_seq_GET(e->v.Compare.ops, n)); NEW_JUMP_TARGET_LABEL(c, end); - ADDOP_JUMP(c, NO_LOCATION, JUMP, end); + ADDOP_JUMP(c, NO_LOCATION, JUMP_NO_INTERRUPT, end); USE_LABEL(c, cleanup); ADDOP_I(c, loc, SWAP, 2); @@ -5668,7 +5668,7 @@ pop_inlined_comprehension_state(struct compiler *c, location loc, } if (state.pushed_locals) { ADDOP(c, NO_LOCATION, POP_BLOCK); - ADDOP_JUMP(c, NO_LOCATION, JUMP, state.end); + ADDOP_JUMP(c, NO_LOCATION, JUMP_NO_INTERRUPT, state.end); // cleanup from an exception inside the comprehension USE_LABEL(c, state.cleanup); @@ -5916,7 +5916,7 @@ compiler_with_except_finish(struct compiler *c, jump_target_label cleanup) { ADDOP(c, NO_LOCATION, POP_TOP); ADDOP(c, NO_LOCATION, POP_TOP); NEW_JUMP_TARGET_LABEL(c, exit); - ADDOP_JUMP(c, NO_LOCATION, JUMP, exit); + ADDOP_JUMP(c, NO_LOCATION, JUMP_NO_INTERRUPT, exit); USE_LABEL(c, cleanup); POP_EXCEPT_AND_RERAISE(c, NO_LOCATION); @@ -7410,7 +7410,7 @@ compiler_match_inner(struct compiler *c, stmt_ty s, pattern_context *pc) ADDOP(c, LOC(m->pattern), POP_TOP); } VISIT_SEQ(c, stmt, m->body); - ADDOP_JUMP(c, NO_LOCATION, JUMP, end); + ADDOP_JUMP(c, NO_LOCATION, JUMP_NO_INTERRUPT, end); // If the pattern fails to match, we want the line number of the // cleanup to be associated with the failed pattern, not the last line // of the body diff --git a/Python/context.c b/Python/context.c index c94c014219d0e4..1e90811c374ec6 100644 --- a/Python/context.c +++ b/Python/context.c @@ -64,12 +64,12 @@ static int contextvar_del(PyContextVar *var); -#if PyContext_MAXFREELIST > 0 +#ifdef WITH_FREELISTS static struct _Py_context_state * get_context_state(void) { - PyInterpreterState *interp = _PyInterpreterState_GET(); - return &interp->context; + _PyFreeListState *state = _PyFreeListState_GET(); + return &state->context_state; } #endif @@ -340,13 +340,9 @@ static inline PyContext * _context_alloc(void) { PyContext *ctx; -#if PyContext_MAXFREELIST > 0 +#ifdef WITH_FREELISTS struct _Py_context_state *state = get_context_state(); -#ifdef Py_DEBUG - // _context_alloc() must not be called after _PyContext_Fini() - assert(state->numfree != -1); -#endif - if (state->numfree) { + if (state->numfree > 0) { state->numfree--; ctx = state->freelist; state->freelist = (PyContext *)ctx->ctx_weakreflist; @@ -471,13 +467,9 @@ context_tp_dealloc(PyContext *self) } (void)context_tp_clear(self); -#if PyContext_MAXFREELIST > 0 +#ifdef WITH_FREELISTS struct _Py_context_state *state = get_context_state(); -#ifdef Py_DEBUG - // _context_alloc() must not be called after _PyContext_Fini() - assert(state->numfree != -1); -#endif - if (state->numfree < PyContext_MAXFREELIST) { + if (state->numfree >= 0 && state->numfree < PyContext_MAXFREELIST) { state->numfree++; self->ctx_weakreflist = (PyObject *)state->freelist; state->freelist = self; @@ -1275,28 +1267,27 @@ get_token_missing(void) void -_PyContext_ClearFreeList(PyInterpreterState *interp) +_PyContext_ClearFreeList(_PyFreeListState *freelist_state, int is_finalization) { -#if PyContext_MAXFREELIST > 0 - struct _Py_context_state *state = &interp->context; - for (; state->numfree; state->numfree--) { +#ifdef WITH_FREELISTS + struct _Py_context_state *state = &freelist_state->context_state; + for (; state->numfree > 0; state->numfree--) { PyContext *ctx = state->freelist; state->freelist = (PyContext *)ctx->ctx_weakreflist; ctx->ctx_weakreflist = NULL; PyObject_GC_Del(ctx); } + if (is_finalization) { + state->numfree = -1; + } #endif } void -_PyContext_Fini(PyInterpreterState *interp) +_PyContext_Fini(_PyFreeListState *state) { - _PyContext_ClearFreeList(interp); -#if defined(Py_DEBUG) && PyContext_MAXFREELIST > 0 - struct _Py_context_state *state = &interp->context; - state->numfree = -1; -#endif + _PyContext_ClearFreeList(state, 1); } diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h index 14fb3a05a9f674..2b4399b25bae2b 100644 --- a/Python/executor_cases.c.h +++ b/Python/executor_cases.c.h @@ -3318,35 +3318,38 @@ case _GUARD_IS_TRUE_POP: { PyObject *flag; flag = stack_pointer[-1]; - if (Py_IsFalse(flag)) goto deoptimize; - assert(Py_IsTrue(flag)); stack_pointer += -1; + if (!Py_IsTrue(flag)) goto deoptimize; + assert(Py_IsTrue(flag)); break; } case _GUARD_IS_FALSE_POP: { PyObject *flag; flag = stack_pointer[-1]; - if (Py_IsTrue(flag)) goto deoptimize; - assert(Py_IsFalse(flag)); stack_pointer += -1; + if (!Py_IsFalse(flag)) goto deoptimize; + assert(Py_IsFalse(flag)); break; } case _GUARD_IS_NONE_POP: { PyObject *val; val = stack_pointer[-1]; - if (!Py_IsNone(val)) goto deoptimize; stack_pointer += -1; + if (!Py_IsNone(val)) { + Py_DECREF(val); + if (1) goto deoptimize; + } break; } case _GUARD_IS_NOT_NONE_POP: { PyObject *val; val = stack_pointer[-1]; + stack_pointer += -1; if (Py_IsNone(val)) goto deoptimize; Py_DECREF(val); - stack_pointer += -1; break; } @@ -3381,19 +3384,27 @@ break; } - case _INSERT: { - PyObject *top; - oparg = CURRENT_OPARG(); - top = stack_pointer[-1]; - // Inserts TOS at position specified by oparg; - memmove(&stack_pointer[-1 - oparg], &stack_pointer[-oparg], oparg * sizeof(stack_pointer[0])); - stack_pointer[-1 - oparg] = top; + case _CHECK_VALIDITY: { + TIER_TWO_ONLY + if (!current_executor->vm_data.valid) goto deoptimize; break; } - case _CHECK_VALIDITY: { - TIER_TWO_ONLY - if (!current_executor->base.vm_data.valid) goto deoptimize; + case _LOAD_CONST_INLINE_BORROW: { + PyObject *value; + PyObject *ptr = (PyObject *)CURRENT_OPERAND(); + value = ptr; + stack_pointer[0] = value; + stack_pointer += 1; + break; + } + + case _INTERNAL_INCREMENT_OPT_COUNTER: { + PyObject *opt; + opt = stack_pointer[-1]; + _PyCounterOptimizerObject *exe = (_PyCounterOptimizerObject *)opt; + exe->count++; + stack_pointer += -1; break; } diff --git a/Python/flowgraph.c b/Python/flowgraph.c index 5bb11980b8ca37..e84030c87b1b4b 100644 --- a/Python/flowgraph.c +++ b/Python/flowgraph.c @@ -29,6 +29,7 @@ typedef struct _PyCfgInstruction { int i_opcode; int i_oparg; _PyCompilerSrcLocation i_loc; + unsigned i_loc_propagated : 1; /* location was set by propagate_line_numbers */ struct _PyCfgBasicblock *i_target; /* target block (if jump instruction) */ struct _PyCfgBasicblock *i_except; /* target block when exception is raised */ } cfg_instr; @@ -316,6 +317,16 @@ basicblock_exits_scope(const basicblock *b) { return last && IS_SCOPE_EXIT_OPCODE(last->i_opcode); } +static inline int +basicblock_has_eval_break(const basicblock *b) { + for (int i = 0; i < b->b_iused; i++) { + if (OPCODE_HAS_EVAL_BREAK(b->b_instr[i].i_opcode)) { + return true; + } + } + return false; +} + static bool cfg_builder_current_block_is_terminated(cfg_builder *g) { @@ -481,9 +492,12 @@ no_redundant_jumps(cfg_builder *g) { if (IS_UNCONDITIONAL_JUMP_OPCODE(last->i_opcode)) { basicblock *next = next_nonempty_block(b->b_next); basicblock *jump_target = next_nonempty_block(last->i_target); - assert(jump_target != next); if (jump_target == next) { - return false; + assert(next); + if (last->i_loc.lineno == next->b_instr[0].i_loc.lineno) { + assert(0); + return false; + } } } } @@ -491,6 +505,21 @@ no_redundant_jumps(cfg_builder *g) { return true; } +static bool +all_exits_have_lineno(basicblock *entryblock) { + for (basicblock *b = entryblock; b != NULL; b = b->b_next) { + for (int i = 0; i < b->b_iused; i++) { + cfg_instr *instr = &b->b_instr[i]; + if (instr->i_opcode == RETURN_VALUE) { + if (instr->i_loc.lineno < 0) { + assert(0); + return false; + } + } + } + } + return true; +} #endif /***** CFG preprocessing (jump targets and exceptions) *****/ @@ -927,7 +956,10 @@ label_exception_targets(basicblock *entryblock) { /***** CFG optimizations *****/ static int -mark_reachable(basicblock *entryblock) { +remove_unreachable(basicblock *entryblock) { + for (basicblock *b = entryblock; b != NULL; b = b->b_next) { + b->b_predecessors = 0; + } basicblock **stack = make_cfg_traversal_stack(entryblock); if (stack == NULL) { return ERROR; @@ -959,6 +991,14 @@ mark_reachable(basicblock *entryblock) { } } PyMem_Free(stack); + + /* Delete unreachable instructions */ + for (basicblock *b = entryblock; b != NULL; b = b->b_next) { + if (b->b_predecessors == 0) { + b->b_iused = 0; + b->b_except_handler = 0; + } + } return SUCCESS; } @@ -1097,6 +1137,7 @@ remove_redundant_jumps(cfg_builder *g) { } } } + return SUCCESS; } @@ -1135,11 +1176,17 @@ jump_thread(cfg_instr *inst, cfg_instr *target, int opcode) assert(is_jump(target)); // bpo-45773: If inst->i_target == target->i_target, then nothing actually // changes (and we fall into an infinite loop): - if ((inst->i_loc.lineno == target->i_loc.lineno || target->i_loc.lineno == -1) && + if (inst->i_loc.lineno == -1) assert(inst->i_loc_propagated); + if (target->i_loc.lineno == -1) assert(target->i_loc_propagated); + if ((inst->i_loc.lineno == target->i_loc.lineno || + inst->i_loc_propagated || target->i_loc_propagated) && inst->i_target != target->i_target) { inst->i_target = target->i_target; inst->i_opcode = opcode; + if (inst->i_loc_propagated && !target->i_loc_propagated) { + inst->i_loc = target->i_loc; + } return true; } return false; @@ -1602,9 +1649,14 @@ optimize_basic_block(PyObject *const_cache, basicblock *bb, PyObject *consts) } break; case JUMP: + case JUMP_NO_INTERRUPT: switch (target->i_opcode) { case JUMP: i -= jump_thread(inst, target, JUMP); + continue; + case JUMP_NO_INTERRUPT: + i -= jump_thread(inst, target, opcode); + continue; } break; case FOR_ITER: @@ -1691,6 +1743,7 @@ optimize_basic_block(PyObject *const_cache, basicblock *bb, PyObject *consts) return ERROR; } +static int resolve_line_numbers(cfg_builder *g, int firstlineno); /* Perform optimizations on a control flow graph. The consts object should still be in list form to allow new constants @@ -1700,41 +1753,31 @@ optimize_basic_block(PyObject *const_cache, basicblock *bb, PyObject *consts) NOPs. Later those NOPs are removed. */ static int -optimize_cfg(cfg_builder *g, PyObject *consts, PyObject *const_cache) +optimize_cfg(cfg_builder *g, PyObject *consts, PyObject *const_cache, int firstlineno) { assert(PyDict_CheckExact(const_cache)); RETURN_IF_ERROR(check_cfg(g)); for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { RETURN_IF_ERROR(inline_small_exit_blocks(b)); } + RETURN_IF_ERROR(remove_unreachable(g->g_entryblock)); + RETURN_IF_ERROR(resolve_line_numbers(g, firstlineno)); for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { RETURN_IF_ERROR(optimize_basic_block(const_cache, b, consts)); - assert(b->b_predecessors == 0); } RETURN_IF_ERROR(remove_redundant_nops_and_pairs(g->g_entryblock)); for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { RETURN_IF_ERROR(inline_small_exit_blocks(b)); } - RETURN_IF_ERROR(mark_reachable(g->g_entryblock)); + RETURN_IF_ERROR(remove_unreachable(g->g_entryblock)); - /* Delete unreachable instructions */ - for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { - if (b->b_predecessors == 0) { - b->b_iused = 0; - b->b_except_handler = 0; - } - } - for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { - remove_redundant_nops(b); - } - RETURN_IF_ERROR(remove_redundant_jumps(g)); - - for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { - remove_redundant_nops(b); + for (int n = 0; n < 2; n++) { + for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { + remove_redundant_nops(b); + } + RETURN_IF_ERROR(remove_redundant_jumps(g)); } - RETURN_IF_ERROR(remove_redundant_jumps(g)); - assert(no_redundant_jumps(g)); return SUCCESS; } @@ -2151,7 +2194,13 @@ push_cold_blocks_to_end(cfg_builder *g) { if (!IS_LABEL(b->b_next->b_label)) { b->b_next->b_label.id = next_lbl++; } - basicblock_addop(explicit_jump, JUMP, b->b_next->b_label.id, NO_LOCATION); + cfg_instr *prev_instr = basicblock_last_instr(b); + // b cannot be empty because at the end of an exception handler + // there is always a POP_EXCEPT + RERAISE/RETURN + assert(prev_instr); + + basicblock_addop(explicit_jump, JUMP_NO_INTERRUPT, b->b_next->b_label.id, + prev_instr->i_loc); explicit_jump->b_cold = 1; explicit_jump->b_next = b->b_next; b->b_next = explicit_jump; @@ -2233,16 +2282,18 @@ convert_pseudo_ops(basicblock *entryblock) } static inline bool -is_exit_without_lineno(basicblock *b) { - if (!basicblock_exits_scope(b)) { - return false; - } - for (int i = 0; i < b->b_iused; i++) { - if (b->b_instr[i].i_loc.lineno >= 0) { - return false; +is_exit_or_eval_check_without_lineno(basicblock *b) { + if (basicblock_exits_scope(b) || basicblock_has_eval_break(b)) { + for (int i = 0; i < b->b_iused; i++) { + if (b->b_instr[i].i_loc.lineno >= 0) { + return false; + } } + return true; + } + else { + return false; } - return true; } @@ -2270,7 +2321,7 @@ duplicate_exits_without_lineno(cfg_builder *g) } if (is_jump(last)) { basicblock *target = next_nonempty_block(last->i_target); - if (is_exit_without_lineno(target) && target->b_predecessors > 1) { + if (is_exit_or_eval_check_without_lineno(target) && target->b_predecessors > 1) { basicblock *new_target = copy_basicblock(g, target); if (new_target == NULL) { return ERROR; @@ -2290,7 +2341,7 @@ duplicate_exits_without_lineno(cfg_builder *g) * fall through, and thus can only have a single predecessor */ for (basicblock *b = entryblock; b != NULL; b = b->b_next) { if (BB_HAS_FALLTHROUGH(b) && b->b_next && b->b_iused > 0) { - if (is_exit_without_lineno(b->b_next)) { + if (is_exit_or_eval_check_without_lineno(b->b_next)) { cfg_instr *last = basicblock_last_instr(b); assert(last != NULL); b->b_next->b_instr[0].i_loc = last->i_loc; @@ -2320,6 +2371,7 @@ propagate_line_numbers(basicblock *entryblock) { for (int i = 0; i < b->b_iused; i++) { if (b->b_instr[i].i_loc.lineno < 0) { b->b_instr[i].i_loc = prev_location; + b->b_instr[i].i_loc_propagated = 1; } else { prev_location = b->b_instr[i].i_loc; @@ -2329,6 +2381,7 @@ propagate_line_numbers(basicblock *entryblock) { if (b->b_next->b_iused > 0) { if (b->b_next->b_instr[0].i_loc.lineno < 0) { b->b_next->b_instr[0].i_loc = prev_location; + b->b_next->b_instr[0].i_loc_propagated = 1; } } } @@ -2337,46 +2390,18 @@ propagate_line_numbers(basicblock *entryblock) { if (target->b_predecessors == 1) { if (target->b_instr[0].i_loc.lineno < 0) { target->b_instr[0].i_loc = prev_location; + target->b_instr[0].i_loc_propagated = 1; } } } } } -/* Make sure that all returns have a line number, even if early passes - * have failed to propagate a correct line number. - * The resulting line number may not be correct according to PEP 626, - * but should be "good enough", and no worse than in older versions. */ -static void -guarantee_lineno_for_exits(basicblock *entryblock, int firstlineno) { - int lineno = firstlineno; - assert(lineno > 0); - for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - cfg_instr *last = basicblock_last_instr(b); - if (last == NULL) { - continue; - } - if (last->i_loc.lineno < 0) { - if (last->i_opcode == RETURN_VALUE) { - for (int i = 0; i < b->b_iused; i++) { - assert(b->b_instr[i].i_loc.lineno < 0); - - b->b_instr[i].i_loc.lineno = lineno; - } - } - } - else { - lineno = last->i_loc.lineno; - } - } -} - static int resolve_line_numbers(cfg_builder *g, int firstlineno) { RETURN_IF_ERROR(duplicate_exits_without_lineno(g)); propagate_line_numbers(g->g_entryblock); - guarantee_lineno_for_exits(g->g_entryblock, firstlineno); return SUCCESS; } @@ -2392,7 +2417,7 @@ _PyCfg_OptimizeCodeUnit(cfg_builder *g, PyObject *consts, PyObject *const_cache, RETURN_IF_ERROR(label_exception_targets(g->g_entryblock)); /** Optimization **/ - RETURN_IF_ERROR(optimize_cfg(g, consts, const_cache)); + RETURN_IF_ERROR(optimize_cfg(g, consts, const_cache, firstlineno)); RETURN_IF_ERROR(remove_unused_consts(g->g_entryblock, consts)); RETURN_IF_ERROR( add_checks_for_loads_of_uninitialized_variables( @@ -2400,6 +2425,7 @@ _PyCfg_OptimizeCodeUnit(cfg_builder *g, PyObject *consts, PyObject *const_cache, insert_superinstructions(g); RETURN_IF_ERROR(push_cold_blocks_to_end(g)); + assert(all_exits_have_lineno(g->g_entryblock)); RETURN_IF_ERROR(resolve_line_numbers(g, firstlineno)); return SUCCESS; } diff --git a/Python/gc.c b/Python/gc.c new file mode 100644 index 00000000000000..9f9a755f6ac95e --- /dev/null +++ b/Python/gc.c @@ -0,0 +1,1943 @@ +// This implements the reference cycle garbage collector. +// The Python module inteface to the collector is in gcmodule.c. +// See https://devguide.python.org/internals/garbage-collector/ + +#include "Python.h" +#include "pycore_ceval.h" // _Py_set_eval_breaker_bit() +#include "pycore_context.h" +#include "pycore_dict.h" // _PyDict_MaybeUntrack() +#include "pycore_initconfig.h" +#include "pycore_interp.h" // PyInterpreterState.gc +#include "pycore_object.h" +#include "pycore_pyerrors.h" +#include "pycore_pystate.h" // _PyThreadState_GET() +#include "pycore_weakref.h" // _PyWeakref_ClearRef() +#include "pydtrace.h" + +typedef struct _gc_runtime_state GCState; + +#ifdef Py_DEBUG +# define GC_DEBUG +#endif + +#define GC_NEXT _PyGCHead_NEXT +#define GC_PREV _PyGCHead_PREV + +// update_refs() set this bit for all objects in current generation. +// subtract_refs() and move_unreachable() uses this to distinguish +// visited object is in GCing or not. +// +// move_unreachable() removes this flag from reachable objects. +// Only unreachable objects have this flag. +// +// No objects in interpreter have this flag after GC ends. +#define PREV_MASK_COLLECTING _PyGC_PREV_MASK_COLLECTING + +// Lowest bit of _gc_next is used for UNREACHABLE flag. +// +// This flag represents the object is in unreachable list in move_unreachable() +// +// Although this flag is used only in move_unreachable(), move_unreachable() +// doesn't clear this flag to skip unnecessary iteration. +// move_legacy_finalizers() removes this flag instead. +// Between them, unreachable list is not normal list and we can not use +// most gc_list_* functions for it. +#define NEXT_MASK_UNREACHABLE (1) + +#define AS_GC(op) _Py_AS_GC(op) +#define FROM_GC(gc) _Py_FROM_GC(gc) + +// Automatically choose the generation that needs collecting. +#define GENERATION_AUTO (-1) + +static inline int +gc_is_collecting(PyGC_Head *g) +{ + return (g->_gc_prev & PREV_MASK_COLLECTING) != 0; +} + +static inline void +gc_clear_collecting(PyGC_Head *g) +{ + g->_gc_prev &= ~PREV_MASK_COLLECTING; +} + +static inline Py_ssize_t +gc_get_refs(PyGC_Head *g) +{ + return (Py_ssize_t)(g->_gc_prev >> _PyGC_PREV_SHIFT); +} + +static inline void +gc_set_refs(PyGC_Head *g, Py_ssize_t refs) +{ + g->_gc_prev = (g->_gc_prev & ~_PyGC_PREV_MASK) + | ((uintptr_t)(refs) << _PyGC_PREV_SHIFT); +} + +static inline void +gc_reset_refs(PyGC_Head *g, Py_ssize_t refs) +{ + g->_gc_prev = (g->_gc_prev & _PyGC_PREV_MASK_FINALIZED) + | PREV_MASK_COLLECTING + | ((uintptr_t)(refs) << _PyGC_PREV_SHIFT); +} + +static inline void +gc_decref(PyGC_Head *g) +{ + _PyObject_ASSERT_WITH_MSG(FROM_GC(g), + gc_get_refs(g) > 0, + "refcount is too small"); + g->_gc_prev -= 1 << _PyGC_PREV_SHIFT; +} + + +#define GEN_HEAD(gcstate, n) (&(gcstate)->generations[n].head) + + +static GCState * +get_gc_state(void) +{ + PyInterpreterState *interp = _PyInterpreterState_GET(); + return &interp->gc; +} + + +void +_PyGC_InitState(GCState *gcstate) +{ +#define INIT_HEAD(GEN) \ + do { \ + GEN.head._gc_next = (uintptr_t)&GEN.head; \ + GEN.head._gc_prev = (uintptr_t)&GEN.head; \ + } while (0) + + for (int i = 0; i < NUM_GENERATIONS; i++) { + assert(gcstate->generations[i].count == 0); + INIT_HEAD(gcstate->generations[i]); + }; + gcstate->generation0 = GEN_HEAD(gcstate, 0); + INIT_HEAD(gcstate->permanent_generation); + +#undef INIT_HEAD +} + + +PyStatus +_PyGC_Init(PyInterpreterState *interp) +{ + GCState *gcstate = &interp->gc; + + gcstate->garbage = PyList_New(0); + if (gcstate->garbage == NULL) { + return _PyStatus_NO_MEMORY(); + } + + gcstate->callbacks = PyList_New(0); + if (gcstate->callbacks == NULL) { + return _PyStatus_NO_MEMORY(); + } + + return _PyStatus_OK(); +} + + +/* +_gc_prev values +--------------- + +Between collections, _gc_prev is used for doubly linked list. + +Lowest two bits of _gc_prev are used for flags. +PREV_MASK_COLLECTING is used only while collecting and cleared before GC ends +or _PyObject_GC_UNTRACK() is called. + +During a collection, _gc_prev is temporary used for gc_refs, and the gc list +is singly linked until _gc_prev is restored. + +gc_refs + At the start of a collection, update_refs() copies the true refcount + to gc_refs, for each object in the generation being collected. + subtract_refs() then adjusts gc_refs so that it equals the number of + times an object is referenced directly from outside the generation + being collected. + +PREV_MASK_COLLECTING + Objects in generation being collected are marked PREV_MASK_COLLECTING in + update_refs(). + + +_gc_next values +--------------- + +_gc_next takes these values: + +0 + The object is not tracked + +!= 0 + Pointer to the next object in the GC list. + Additionally, lowest bit is used temporary for + NEXT_MASK_UNREACHABLE flag described below. + +NEXT_MASK_UNREACHABLE + move_unreachable() then moves objects not reachable (whether directly or + indirectly) from outside the generation into an "unreachable" set and + set this flag. + + Objects that are found to be reachable have gc_refs set to 1. + When this flag is set for the reachable object, the object must be in + "unreachable" set. + The flag is unset and the object is moved back to "reachable" set. + + move_legacy_finalizers() will remove this flag from "unreachable" set. +*/ + +/*** list functions ***/ + +static inline void +gc_list_init(PyGC_Head *list) +{ + // List header must not have flags. + // We can assign pointer by simple cast. + list->_gc_prev = (uintptr_t)list; + list->_gc_next = (uintptr_t)list; +} + +static inline int +gc_list_is_empty(PyGC_Head *list) +{ + return (list->_gc_next == (uintptr_t)list); +} + +/* Append `node` to `list`. */ +static inline void +gc_list_append(PyGC_Head *node, PyGC_Head *list) +{ + PyGC_Head *last = (PyGC_Head *)list->_gc_prev; + + // last <-> node + _PyGCHead_SET_PREV(node, last); + _PyGCHead_SET_NEXT(last, node); + + // node <-> list + _PyGCHead_SET_NEXT(node, list); + list->_gc_prev = (uintptr_t)node; +} + +/* Remove `node` from the gc list it's currently in. */ +static inline void +gc_list_remove(PyGC_Head *node) +{ + PyGC_Head *prev = GC_PREV(node); + PyGC_Head *next = GC_NEXT(node); + + _PyGCHead_SET_NEXT(prev, next); + _PyGCHead_SET_PREV(next, prev); + + node->_gc_next = 0; /* object is not currently tracked */ +} + +/* Move `node` from the gc list it's currently in (which is not explicitly + * named here) to the end of `list`. This is semantically the same as + * gc_list_remove(node) followed by gc_list_append(node, list). + */ +static void +gc_list_move(PyGC_Head *node, PyGC_Head *list) +{ + /* Unlink from current list. */ + PyGC_Head *from_prev = GC_PREV(node); + PyGC_Head *from_next = GC_NEXT(node); + _PyGCHead_SET_NEXT(from_prev, from_next); + _PyGCHead_SET_PREV(from_next, from_prev); + + /* Relink at end of new list. */ + // list must not have flags. So we can skip macros. + PyGC_Head *to_prev = (PyGC_Head*)list->_gc_prev; + _PyGCHead_SET_PREV(node, to_prev); + _PyGCHead_SET_NEXT(to_prev, node); + list->_gc_prev = (uintptr_t)node; + _PyGCHead_SET_NEXT(node, list); +} + +/* append list `from` onto list `to`; `from` becomes an empty list */ +static void +gc_list_merge(PyGC_Head *from, PyGC_Head *to) +{ + assert(from != to); + if (!gc_list_is_empty(from)) { + PyGC_Head *to_tail = GC_PREV(to); + PyGC_Head *from_head = GC_NEXT(from); + PyGC_Head *from_tail = GC_PREV(from); + assert(from_head != from); + assert(from_tail != from); + + _PyGCHead_SET_NEXT(to_tail, from_head); + _PyGCHead_SET_PREV(from_head, to_tail); + + _PyGCHead_SET_NEXT(from_tail, to); + _PyGCHead_SET_PREV(to, from_tail); + } + gc_list_init(from); +} + +static Py_ssize_t +gc_list_size(PyGC_Head *list) +{ + PyGC_Head *gc; + Py_ssize_t n = 0; + for (gc = GC_NEXT(list); gc != list; gc = GC_NEXT(gc)) { + n++; + } + return n; +} + +/* Walk the list and mark all objects as non-collecting */ +static inline void +gc_list_clear_collecting(PyGC_Head *collectable) +{ + PyGC_Head *gc; + for (gc = GC_NEXT(collectable); gc != collectable; gc = GC_NEXT(gc)) { + gc_clear_collecting(gc); + } +} + +/* Append objects in a GC list to a Python list. + * Return 0 if all OK, < 0 if error (out of memory for list) + */ +static int +append_objects(PyObject *py_list, PyGC_Head *gc_list) +{ + PyGC_Head *gc; + for (gc = GC_NEXT(gc_list); gc != gc_list; gc = GC_NEXT(gc)) { + PyObject *op = FROM_GC(gc); + if (op != py_list) { + if (PyList_Append(py_list, op)) { + return -1; /* exception */ + } + } + } + return 0; +} + +// Constants for validate_list's flags argument. +enum flagstates {collecting_clear_unreachable_clear, + collecting_clear_unreachable_set, + collecting_set_unreachable_clear, + collecting_set_unreachable_set}; + +#ifdef GC_DEBUG +// validate_list checks list consistency. And it works as document +// describing when flags are expected to be set / unset. +// `head` must be a doubly-linked gc list, although it's fine (expected!) if +// the prev and next pointers are "polluted" with flags. +// What's checked: +// - The `head` pointers are not polluted. +// - The objects' PREV_MASK_COLLECTING and NEXT_MASK_UNREACHABLE flags are all +// `set or clear, as specified by the 'flags' argument. +// - The prev and next pointers are mutually consistent. +static void +validate_list(PyGC_Head *head, enum flagstates flags) +{ + assert((head->_gc_prev & PREV_MASK_COLLECTING) == 0); + assert((head->_gc_next & NEXT_MASK_UNREACHABLE) == 0); + uintptr_t prev_value = 0, next_value = 0; + switch (flags) { + case collecting_clear_unreachable_clear: + break; + case collecting_set_unreachable_clear: + prev_value = PREV_MASK_COLLECTING; + break; + case collecting_clear_unreachable_set: + next_value = NEXT_MASK_UNREACHABLE; + break; + case collecting_set_unreachable_set: + prev_value = PREV_MASK_COLLECTING; + next_value = NEXT_MASK_UNREACHABLE; + break; + default: + assert(! "bad internal flags argument"); + } + PyGC_Head *prev = head; + PyGC_Head *gc = GC_NEXT(head); + while (gc != head) { + PyGC_Head *trueprev = GC_PREV(gc); + PyGC_Head *truenext = (PyGC_Head *)(gc->_gc_next & ~NEXT_MASK_UNREACHABLE); + assert(truenext != NULL); + assert(trueprev == prev); + assert((gc->_gc_prev & PREV_MASK_COLLECTING) == prev_value); + assert((gc->_gc_next & NEXT_MASK_UNREACHABLE) == next_value); + prev = gc; + gc = truenext; + } + assert(prev == GC_PREV(head)); +} +#else +#define validate_list(x, y) do{}while(0) +#endif + +/*** end of list stuff ***/ + + +/* Set all gc_refs = ob_refcnt. After this, gc_refs is > 0 and + * PREV_MASK_COLLECTING bit is set for all objects in containers. + */ +static void +update_refs(PyGC_Head *containers) +{ + PyGC_Head *next; + PyGC_Head *gc = GC_NEXT(containers); + + while (gc != containers) { + next = GC_NEXT(gc); + /* Move any object that might have become immortal to the + * permanent generation as the reference count is not accurately + * reflecting the actual number of live references to this object + */ + if (_Py_IsImmortal(FROM_GC(gc))) { + gc_list_move(gc, &get_gc_state()->permanent_generation.head); + gc = next; + continue; + } + gc_reset_refs(gc, Py_REFCNT(FROM_GC(gc))); + /* Python's cyclic gc should never see an incoming refcount + * of 0: if something decref'ed to 0, it should have been + * deallocated immediately at that time. + * Possible cause (if the assert triggers): a tp_dealloc + * routine left a gc-aware object tracked during its teardown + * phase, and did something-- or allowed something to happen -- + * that called back into Python. gc can trigger then, and may + * see the still-tracked dying object. Before this assert + * was added, such mistakes went on to allow gc to try to + * delete the object again. In a debug build, that caused + * a mysterious segfault, when _Py_ForgetReference tried + * to remove the object from the doubly-linked list of all + * objects a second time. In a release build, an actual + * double deallocation occurred, which leads to corruption + * of the allocator's internal bookkeeping pointers. That's + * so serious that maybe this should be a release-build + * check instead of an assert? + */ + _PyObject_ASSERT(FROM_GC(gc), gc_get_refs(gc) != 0); + gc = next; + } +} + +/* A traversal callback for subtract_refs. */ +static int +visit_decref(PyObject *op, void *parent) +{ + OBJECT_STAT_INC(object_visits); + _PyObject_ASSERT(_PyObject_CAST(parent), !_PyObject_IsFreed(op)); + + if (_PyObject_IS_GC(op)) { + PyGC_Head *gc = AS_GC(op); + /* We're only interested in gc_refs for objects in the + * generation being collected, which can be recognized + * because only they have positive gc_refs. + */ + if (gc_is_collecting(gc)) { + gc_decref(gc); + } + } + return 0; +} + +/* Subtract internal references from gc_refs. After this, gc_refs is >= 0 + * for all objects in containers, and is GC_REACHABLE for all tracked gc + * objects not in containers. The ones with gc_refs > 0 are directly + * reachable from outside containers, and so can't be collected. + */ +static void +subtract_refs(PyGC_Head *containers) +{ + traverseproc traverse; + PyGC_Head *gc = GC_NEXT(containers); + for (; gc != containers; gc = GC_NEXT(gc)) { + PyObject *op = FROM_GC(gc); + traverse = Py_TYPE(op)->tp_traverse; + (void) traverse(op, + visit_decref, + op); + } +} + +/* A traversal callback for move_unreachable. */ +static int +visit_reachable(PyObject *op, void *arg) +{ + PyGC_Head *reachable = arg; + OBJECT_STAT_INC(object_visits); + if (!_PyObject_IS_GC(op)) { + return 0; + } + + PyGC_Head *gc = AS_GC(op); + const Py_ssize_t gc_refs = gc_get_refs(gc); + + // Ignore objects in other generation. + // This also skips objects "to the left" of the current position in + // move_unreachable's scan of the 'young' list - they've already been + // traversed, and no longer have the PREV_MASK_COLLECTING flag. + if (! gc_is_collecting(gc)) { + return 0; + } + // It would be a logic error elsewhere if the collecting flag were set on + // an untracked object. + assert(gc->_gc_next != 0); + + if (gc->_gc_next & NEXT_MASK_UNREACHABLE) { + /* This had gc_refs = 0 when move_unreachable got + * to it, but turns out it's reachable after all. + * Move it back to move_unreachable's 'young' list, + * and move_unreachable will eventually get to it + * again. + */ + // Manually unlink gc from unreachable list because the list functions + // don't work right in the presence of NEXT_MASK_UNREACHABLE flags. + PyGC_Head *prev = GC_PREV(gc); + PyGC_Head *next = (PyGC_Head*)(gc->_gc_next & ~NEXT_MASK_UNREACHABLE); + _PyObject_ASSERT(FROM_GC(prev), + prev->_gc_next & NEXT_MASK_UNREACHABLE); + _PyObject_ASSERT(FROM_GC(next), + next->_gc_next & NEXT_MASK_UNREACHABLE); + prev->_gc_next = gc->_gc_next; // copy NEXT_MASK_UNREACHABLE + _PyGCHead_SET_PREV(next, prev); + + gc_list_append(gc, reachable); + gc_set_refs(gc, 1); + } + else if (gc_refs == 0) { + /* This is in move_unreachable's 'young' list, but + * the traversal hasn't yet gotten to it. All + * we need to do is tell move_unreachable that it's + * reachable. + */ + gc_set_refs(gc, 1); + } + /* Else there's nothing to do. + * If gc_refs > 0, it must be in move_unreachable's 'young' + * list, and move_unreachable will eventually get to it. + */ + else { + _PyObject_ASSERT_WITH_MSG(op, gc_refs > 0, "refcount is too small"); + } + return 0; +} + +/* Move the unreachable objects from young to unreachable. After this, + * all objects in young don't have PREV_MASK_COLLECTING flag and + * unreachable have the flag. + * All objects in young after this are directly or indirectly reachable + * from outside the original young; and all objects in unreachable are + * not. + * + * This function restores _gc_prev pointer. young and unreachable are + * doubly linked list after this function. + * But _gc_next in unreachable list has NEXT_MASK_UNREACHABLE flag. + * So we can not gc_list_* functions for unreachable until we remove the flag. + */ +static void +move_unreachable(PyGC_Head *young, PyGC_Head *unreachable) +{ + // previous elem in the young list, used for restore gc_prev. + PyGC_Head *prev = young; + PyGC_Head *gc = GC_NEXT(young); + + /* Invariants: all objects "to the left" of us in young are reachable + * (directly or indirectly) from outside the young list as it was at entry. + * + * All other objects from the original young "to the left" of us are in + * unreachable now, and have NEXT_MASK_UNREACHABLE. All objects to the + * left of us in 'young' now have been scanned, and no objects here + * or to the right have been scanned yet. + */ + + while (gc != young) { + if (gc_get_refs(gc)) { + /* gc is definitely reachable from outside the + * original 'young'. Mark it as such, and traverse + * its pointers to find any other objects that may + * be directly reachable from it. Note that the + * call to tp_traverse may append objects to young, + * so we have to wait until it returns to determine + * the next object to visit. + */ + PyObject *op = FROM_GC(gc); + traverseproc traverse = Py_TYPE(op)->tp_traverse; + _PyObject_ASSERT_WITH_MSG(op, gc_get_refs(gc) > 0, + "refcount is too small"); + // NOTE: visit_reachable may change gc->_gc_next when + // young->_gc_prev == gc. Don't do gc = GC_NEXT(gc) before! + (void) traverse(op, + visit_reachable, + (void *)young); + // relink gc_prev to prev element. + _PyGCHead_SET_PREV(gc, prev); + // gc is not COLLECTING state after here. + gc_clear_collecting(gc); + prev = gc; + } + else { + /* This *may* be unreachable. To make progress, + * assume it is. gc isn't directly reachable from + * any object we've already traversed, but may be + * reachable from an object we haven't gotten to yet. + * visit_reachable will eventually move gc back into + * young if that's so, and we'll see it again. + */ + // Move gc to unreachable. + // No need to gc->next->prev = prev because it is single linked. + prev->_gc_next = gc->_gc_next; + + // We can't use gc_list_append() here because we use + // NEXT_MASK_UNREACHABLE here. + PyGC_Head *last = GC_PREV(unreachable); + // NOTE: Since all objects in unreachable set has + // NEXT_MASK_UNREACHABLE flag, we set it unconditionally. + // But this may pollute the unreachable list head's 'next' pointer + // too. That's semantically senseless but expedient here - the + // damage is repaired when this function ends. + last->_gc_next = (NEXT_MASK_UNREACHABLE | (uintptr_t)gc); + _PyGCHead_SET_PREV(gc, last); + gc->_gc_next = (NEXT_MASK_UNREACHABLE | (uintptr_t)unreachable); + unreachable->_gc_prev = (uintptr_t)gc; + } + gc = (PyGC_Head*)prev->_gc_next; + } + // young->_gc_prev must be last element remained in the list. + young->_gc_prev = (uintptr_t)prev; + // don't let the pollution of the list head's next pointer leak + unreachable->_gc_next &= ~NEXT_MASK_UNREACHABLE; +} + +static void +untrack_tuples(PyGC_Head *head) +{ + PyGC_Head *next, *gc = GC_NEXT(head); + while (gc != head) { + PyObject *op = FROM_GC(gc); + next = GC_NEXT(gc); + if (PyTuple_CheckExact(op)) { + _PyTuple_MaybeUntrack(op); + } + gc = next; + } +} + +/* Try to untrack all currently tracked dictionaries */ +static void +untrack_dicts(PyGC_Head *head) +{ + PyGC_Head *next, *gc = GC_NEXT(head); + while (gc != head) { + PyObject *op = FROM_GC(gc); + next = GC_NEXT(gc); + if (PyDict_CheckExact(op)) { + _PyDict_MaybeUntrack(op); + } + gc = next; + } +} + +/* Return true if object has a pre-PEP 442 finalization method. */ +static int +has_legacy_finalizer(PyObject *op) +{ + return Py_TYPE(op)->tp_del != NULL; +} + +/* Move the objects in unreachable with tp_del slots into `finalizers`. + * + * This function also removes NEXT_MASK_UNREACHABLE flag + * from _gc_next in unreachable. + */ +static void +move_legacy_finalizers(PyGC_Head *unreachable, PyGC_Head *finalizers) +{ + PyGC_Head *gc, *next; + assert((unreachable->_gc_next & NEXT_MASK_UNREACHABLE) == 0); + + /* March over unreachable. Move objects with finalizers into + * `finalizers`. + */ + for (gc = GC_NEXT(unreachable); gc != unreachable; gc = next) { + PyObject *op = FROM_GC(gc); + + _PyObject_ASSERT(op, gc->_gc_next & NEXT_MASK_UNREACHABLE); + gc->_gc_next &= ~NEXT_MASK_UNREACHABLE; + next = (PyGC_Head*)gc->_gc_next; + + if (has_legacy_finalizer(op)) { + gc_clear_collecting(gc); + gc_list_move(gc, finalizers); + } + } +} + +static inline void +clear_unreachable_mask(PyGC_Head *unreachable) +{ + /* Check that the list head does not have the unreachable bit set */ + assert(((uintptr_t)unreachable & NEXT_MASK_UNREACHABLE) == 0); + + PyGC_Head *gc, *next; + assert((unreachable->_gc_next & NEXT_MASK_UNREACHABLE) == 0); + for (gc = GC_NEXT(unreachable); gc != unreachable; gc = next) { + _PyObject_ASSERT((PyObject*)FROM_GC(gc), gc->_gc_next & NEXT_MASK_UNREACHABLE); + gc->_gc_next &= ~NEXT_MASK_UNREACHABLE; + next = (PyGC_Head*)gc->_gc_next; + } + validate_list(unreachable, collecting_set_unreachable_clear); +} + +/* A traversal callback for move_legacy_finalizer_reachable. */ +static int +visit_move(PyObject *op, void *arg) +{ + PyGC_Head *tolist = arg; + OBJECT_STAT_INC(object_visits); + if (_PyObject_IS_GC(op)) { + PyGC_Head *gc = AS_GC(op); + if (gc_is_collecting(gc)) { + gc_list_move(gc, tolist); + gc_clear_collecting(gc); + } + } + return 0; +} + +/* Move objects that are reachable from finalizers, from the unreachable set + * into finalizers set. + */ +static void +move_legacy_finalizer_reachable(PyGC_Head *finalizers) +{ + traverseproc traverse; + PyGC_Head *gc = GC_NEXT(finalizers); + for (; gc != finalizers; gc = GC_NEXT(gc)) { + /* Note that the finalizers list may grow during this. */ + traverse = Py_TYPE(FROM_GC(gc))->tp_traverse; + (void) traverse(FROM_GC(gc), + visit_move, + (void *)finalizers); + } +} + +/* Clear all weakrefs to unreachable objects, and if such a weakref has a + * callback, invoke it if necessary. Note that it's possible for such + * weakrefs to be outside the unreachable set -- indeed, those are precisely + * the weakrefs whose callbacks must be invoked. See gc_weakref.txt for + * overview & some details. Some weakrefs with callbacks may be reclaimed + * directly by this routine; the number reclaimed is the return value. Other + * weakrefs with callbacks may be moved into the `old` generation. Objects + * moved into `old` have gc_refs set to GC_REACHABLE; the objects remaining in + * unreachable are left at GC_TENTATIVELY_UNREACHABLE. When this returns, + * no object in `unreachable` is weakly referenced anymore. + */ +static int +handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old) +{ + PyGC_Head *gc; + PyObject *op; /* generally FROM_GC(gc) */ + PyWeakReference *wr; /* generally a cast of op */ + PyGC_Head wrcb_to_call; /* weakrefs with callbacks to call */ + PyGC_Head *next; + int num_freed = 0; + + gc_list_init(&wrcb_to_call); + + /* Clear all weakrefs to the objects in unreachable. If such a weakref + * also has a callback, move it into `wrcb_to_call` if the callback + * needs to be invoked. Note that we cannot invoke any callbacks until + * all weakrefs to unreachable objects are cleared, lest the callback + * resurrect an unreachable object via a still-active weakref. We + * make another pass over wrcb_to_call, invoking callbacks, after this + * pass completes. + */ + for (gc = GC_NEXT(unreachable); gc != unreachable; gc = next) { + PyWeakReference **wrlist; + + op = FROM_GC(gc); + next = GC_NEXT(gc); + + if (PyWeakref_Check(op)) { + /* A weakref inside the unreachable set must be cleared. If we + * allow its callback to execute inside delete_garbage(), it + * could expose objects that have tp_clear already called on + * them. Or, it could resurrect unreachable objects. One way + * this can happen is if some container objects do not implement + * tp_traverse. Then, wr_object can be outside the unreachable + * set but can be deallocated as a result of breaking the + * reference cycle. If we don't clear the weakref, the callback + * will run and potentially cause a crash. See bpo-38006 for + * one example. + */ + _PyWeakref_ClearRef((PyWeakReference *)op); + } + + if (! _PyType_SUPPORTS_WEAKREFS(Py_TYPE(op))) { + continue; + } + + /* It supports weakrefs. Does it have any? + * + * This is never triggered for static types so we can avoid the + * (slightly) more costly _PyObject_GET_WEAKREFS_LISTPTR(). + */ + wrlist = _PyObject_GET_WEAKREFS_LISTPTR_FROM_OFFSET(op); + + /* `op` may have some weakrefs. March over the list, clear + * all the weakrefs, and move the weakrefs with callbacks + * that must be called into wrcb_to_call. + */ + for (wr = *wrlist; wr != NULL; wr = *wrlist) { + PyGC_Head *wrasgc; /* AS_GC(wr) */ + + /* _PyWeakref_ClearRef clears the weakref but leaves + * the callback pointer intact. Obscure: it also + * changes *wrlist. + */ + _PyObject_ASSERT((PyObject *)wr, wr->wr_object == op); + _PyWeakref_ClearRef(wr); + _PyObject_ASSERT((PyObject *)wr, wr->wr_object == Py_None); + if (wr->wr_callback == NULL) { + /* no callback */ + continue; + } + + /* Headache time. `op` is going away, and is weakly referenced by + * `wr`, which has a callback. Should the callback be invoked? If wr + * is also trash, no: + * + * 1. There's no need to call it. The object and the weakref are + * both going away, so it's legitimate to pretend the weakref is + * going away first. The user has to ensure a weakref outlives its + * referent if they want a guarantee that the wr callback will get + * invoked. + * + * 2. It may be catastrophic to call it. If the callback is also in + * cyclic trash (CT), then although the CT is unreachable from + * outside the current generation, CT may be reachable from the + * callback. Then the callback could resurrect insane objects. + * + * Since the callback is never needed and may be unsafe in this case, + * wr is simply left in the unreachable set. Note that because we + * already called _PyWeakref_ClearRef(wr), its callback will never + * trigger. + * + * OTOH, if wr isn't part of CT, we should invoke the callback: the + * weakref outlived the trash. Note that since wr isn't CT in this + * case, its callback can't be CT either -- wr acted as an external + * root to this generation, and therefore its callback did too. So + * nothing in CT is reachable from the callback either, so it's hard + * to imagine how calling it later could create a problem for us. wr + * is moved to wrcb_to_call in this case. + */ + if (gc_is_collecting(AS_GC((PyObject *)wr))) { + /* it should already have been cleared above */ + assert(wr->wr_object == Py_None); + continue; + } + + /* Create a new reference so that wr can't go away + * before we can process it again. + */ + Py_INCREF(wr); + + /* Move wr to wrcb_to_call, for the next pass. */ + wrasgc = AS_GC((PyObject *)wr); + assert(wrasgc != next); /* wrasgc is reachable, but + next isn't, so they can't + be the same */ + gc_list_move(wrasgc, &wrcb_to_call); + } + } + + /* Invoke the callbacks we decided to honor. It's safe to invoke them + * because they can't reference unreachable objects. + */ + while (! gc_list_is_empty(&wrcb_to_call)) { + PyObject *temp; + PyObject *callback; + + gc = (PyGC_Head*)wrcb_to_call._gc_next; + op = FROM_GC(gc); + _PyObject_ASSERT(op, PyWeakref_Check(op)); + wr = (PyWeakReference *)op; + callback = wr->wr_callback; + _PyObject_ASSERT(op, callback != NULL); + + /* copy-paste of weakrefobject.c's handle_callback() */ + temp = PyObject_CallOneArg(callback, (PyObject *)wr); + if (temp == NULL) { + PyErr_WriteUnraisable(callback); + } + else { + Py_DECREF(temp); + } + + /* Give up the reference we created in the first pass. When + * op's refcount hits 0 (which it may or may not do right now), + * op's tp_dealloc will decref op->wr_callback too. Note + * that the refcount probably will hit 0 now, and because this + * weakref was reachable to begin with, gc didn't already + * add it to its count of freed objects. Example: a reachable + * weak value dict maps some key to this reachable weakref. + * The callback removes this key->weakref mapping from the + * dict, leaving no other references to the weakref (excepting + * ours). + */ + Py_DECREF(op); + if (wrcb_to_call._gc_next == (uintptr_t)gc) { + /* object is still alive -- move it */ + gc_list_move(gc, old); + } + else { + ++num_freed; + } + } + + return num_freed; +} + +static void +debug_cycle(const char *msg, PyObject *op) +{ + PySys_FormatStderr("gc: %s <%s %p>\n", + msg, Py_TYPE(op)->tp_name, op); +} + +/* Handle uncollectable garbage (cycles with tp_del slots, and stuff reachable + * only from such cycles). + * If _PyGC_DEBUG_SAVEALL, all objects in finalizers are appended to the module + * garbage list (a Python list), else only the objects in finalizers with + * __del__ methods are appended to garbage. All objects in finalizers are + * merged into the old list regardless. + */ +static void +handle_legacy_finalizers(PyThreadState *tstate, + GCState *gcstate, + PyGC_Head *finalizers, PyGC_Head *old) +{ + assert(!_PyErr_Occurred(tstate)); + assert(gcstate->garbage != NULL); + + PyGC_Head *gc = GC_NEXT(finalizers); + for (; gc != finalizers; gc = GC_NEXT(gc)) { + PyObject *op = FROM_GC(gc); + + if ((gcstate->debug & _PyGC_DEBUG_SAVEALL) || has_legacy_finalizer(op)) { + if (PyList_Append(gcstate->garbage, op) < 0) { + _PyErr_Clear(tstate); + break; + } + } + } + + gc_list_merge(finalizers, old); +} + +/* Run first-time finalizers (if any) on all the objects in collectable. + * Note that this may remove some (or even all) of the objects from the + * list, due to refcounts falling to 0. + */ +static void +finalize_garbage(PyThreadState *tstate, PyGC_Head *collectable) +{ + destructor finalize; + PyGC_Head seen; + + /* While we're going through the loop, `finalize(op)` may cause op, or + * other objects, to be reclaimed via refcounts falling to zero. So + * there's little we can rely on about the structure of the input + * `collectable` list across iterations. For safety, we always take the + * first object in that list and move it to a temporary `seen` list. + * If objects vanish from the `collectable` and `seen` lists we don't + * care. + */ + gc_list_init(&seen); + + while (!gc_list_is_empty(collectable)) { + PyGC_Head *gc = GC_NEXT(collectable); + PyObject *op = FROM_GC(gc); + gc_list_move(gc, &seen); + if (!_PyGCHead_FINALIZED(gc) && + (finalize = Py_TYPE(op)->tp_finalize) != NULL) + { + _PyGCHead_SET_FINALIZED(gc); + Py_INCREF(op); + finalize(op); + assert(!_PyErr_Occurred(tstate)); + Py_DECREF(op); + } + } + gc_list_merge(&seen, collectable); +} + +/* Break reference cycles by clearing the containers involved. This is + * tricky business as the lists can be changing and we don't know which + * objects may be freed. It is possible I screwed something up here. + */ +static void +delete_garbage(PyThreadState *tstate, GCState *gcstate, + PyGC_Head *collectable, PyGC_Head *old) +{ + assert(!_PyErr_Occurred(tstate)); + + while (!gc_list_is_empty(collectable)) { + PyGC_Head *gc = GC_NEXT(collectable); + PyObject *op = FROM_GC(gc); + + _PyObject_ASSERT_WITH_MSG(op, Py_REFCNT(op) > 0, + "refcount is too small"); + + if (gcstate->debug & _PyGC_DEBUG_SAVEALL) { + assert(gcstate->garbage != NULL); + if (PyList_Append(gcstate->garbage, op) < 0) { + _PyErr_Clear(tstate); + } + } + else { + inquiry clear; + if ((clear = Py_TYPE(op)->tp_clear) != NULL) { + Py_INCREF(op); + (void) clear(op); + if (_PyErr_Occurred(tstate)) { + PyErr_FormatUnraisable("Exception ignored in tp_clear of %s", + Py_TYPE(op)->tp_name); + } + Py_DECREF(op); + } + } + if (GC_NEXT(collectable) == gc) { + /* object is still alive, move it, it may die later */ + gc_clear_collecting(gc); + gc_list_move(gc, old); + } + } +} + + +// Show stats for objects in each generations +static void +show_stats_each_generations(GCState *gcstate) +{ + char buf[100]; + size_t pos = 0; + + for (int i = 0; i < NUM_GENERATIONS && pos < sizeof(buf); i++) { + pos += PyOS_snprintf(buf+pos, sizeof(buf)-pos, + " %zd", + gc_list_size(GEN_HEAD(gcstate, i))); + } + + PySys_FormatStderr( + "gc: objects in each generation:%s\n" + "gc: objects in permanent generation: %zd\n", + buf, gc_list_size(&gcstate->permanent_generation.head)); +} + +/* Deduce which objects among "base" are unreachable from outside the list + and move them to 'unreachable'. The process consist in the following steps: + +1. Copy all reference counts to a different field (gc_prev is used to hold + this copy to save memory). +2. Traverse all objects in "base" and visit all referred objects using + "tp_traverse" and for every visited object, subtract 1 to the reference + count (the one that we copied in the previous step). After this step, all + objects that can be reached directly from outside must have strictly positive + reference count, while all unreachable objects must have a count of exactly 0. +3. Identify all unreachable objects (the ones with 0 reference count) and move + them to the "unreachable" list. This step also needs to move back to "base" all + objects that were initially marked as unreachable but are referred transitively + by the reachable objects (the ones with strictly positive reference count). + +Contracts: + + * The "base" has to be a valid list with no mask set. + + * The "unreachable" list must be uninitialized (this function calls + gc_list_init over 'unreachable'). + +IMPORTANT: This function leaves 'unreachable' with the NEXT_MASK_UNREACHABLE +flag set but it does not clear it to skip unnecessary iteration. Before the +flag is cleared (for example, by using 'clear_unreachable_mask' function or +by a call to 'move_legacy_finalizers'), the 'unreachable' list is not a normal +list and we can not use most gc_list_* functions for it. */ +static inline void +deduce_unreachable(PyGC_Head *base, PyGC_Head *unreachable) { + validate_list(base, collecting_clear_unreachable_clear); + /* Using ob_refcnt and gc_refs, calculate which objects in the + * container set are reachable from outside the set (i.e., have a + * refcount greater than 0 when all the references within the + * set are taken into account). + */ + update_refs(base); // gc_prev is used for gc_refs + subtract_refs(base); + + /* Leave everything reachable from outside base in base, and move + * everything else (in base) to unreachable. + * + * NOTE: This used to move the reachable objects into a reachable + * set instead. But most things usually turn out to be reachable, + * so it's more efficient to move the unreachable things. It "sounds slick" + * to move the unreachable objects, until you think about it - the reason it + * pays isn't actually obvious. + * + * Suppose we create objects A, B, C in that order. They appear in the young + * generation in the same order. If B points to A, and C to B, and C is + * reachable from outside, then the adjusted refcounts will be 0, 0, and 1 + * respectively. + * + * When move_unreachable finds A, A is moved to the unreachable list. The + * same for B when it's first encountered. Then C is traversed, B is moved + * _back_ to the reachable list. B is eventually traversed, and then A is + * moved back to the reachable list. + * + * So instead of not moving at all, the reachable objects B and A are moved + * twice each. Why is this a win? A straightforward algorithm to move the + * reachable objects instead would move A, B, and C once each. + * + * The key is that this dance leaves the objects in order C, B, A - it's + * reversed from the original order. On all _subsequent_ scans, none of + * them will move. Since most objects aren't in cycles, this can save an + * unbounded number of moves across an unbounded number of later collections. + * It can cost more only the first time the chain is scanned. + * + * Drawback: move_unreachable is also used to find out what's still trash + * after finalizers may resurrect objects. In _that_ case most unreachable + * objects will remain unreachable, so it would be more efficient to move + * the reachable objects instead. But this is a one-time cost, probably not + * worth complicating the code to speed just a little. + */ + gc_list_init(unreachable); + move_unreachable(base, unreachable); // gc_prev is pointer again + validate_list(base, collecting_clear_unreachable_clear); + validate_list(unreachable, collecting_set_unreachable_set); +} + +/* Handle objects that may have resurrected after a call to 'finalize_garbage', moving + them to 'old_generation' and placing the rest on 'still_unreachable'. + + Contracts: + * After this function 'unreachable' must not be used anymore and 'still_unreachable' + will contain the objects that did not resurrect. + + * The "still_unreachable" list must be uninitialized (this function calls + gc_list_init over 'still_unreachable'). + +IMPORTANT: After a call to this function, the 'still_unreachable' set will have the +PREV_MARK_COLLECTING set, but the objects in this set are going to be removed so +we can skip the expense of clearing the flag to avoid extra iteration. */ +static inline void +handle_resurrected_objects(PyGC_Head *unreachable, PyGC_Head* still_unreachable, + PyGC_Head *old_generation) +{ + // Remove the PREV_MASK_COLLECTING from unreachable + // to prepare it for a new call to 'deduce_unreachable' + gc_list_clear_collecting(unreachable); + + // After the call to deduce_unreachable, the 'still_unreachable' set will + // have the PREV_MARK_COLLECTING set, but the objects are going to be + // removed so we can skip the expense of clearing the flag. + PyGC_Head* resurrected = unreachable; + deduce_unreachable(resurrected, still_unreachable); + clear_unreachable_mask(still_unreachable); + + // Move the resurrected objects to the old generation for future collection. + gc_list_merge(resurrected, old_generation); +} + + +/* Invoke progress callbacks to notify clients that garbage collection + * is starting or stopping + */ +static void +invoke_gc_callback(PyThreadState *tstate, const char *phase, + int generation, Py_ssize_t collected, + Py_ssize_t uncollectable) +{ + assert(!_PyErr_Occurred(tstate)); + + /* we may get called very early */ + GCState *gcstate = &tstate->interp->gc; + if (gcstate->callbacks == NULL) { + return; + } + + /* The local variable cannot be rebound, check it for sanity */ + assert(PyList_CheckExact(gcstate->callbacks)); + PyObject *info = NULL; + if (PyList_GET_SIZE(gcstate->callbacks) != 0) { + info = Py_BuildValue("{sisnsn}", + "generation", generation, + "collected", collected, + "uncollectable", uncollectable); + if (info == NULL) { + PyErr_FormatUnraisable("Exception ignored on invoking gc callbacks"); + return; + } + } + + PyObject *phase_obj = PyUnicode_FromString(phase); + if (phase_obj == NULL) { + Py_XDECREF(info); + PyErr_FormatUnraisable("Exception ignored on invoking gc callbacks"); + return; + } + + PyObject *stack[] = {phase_obj, info}; + for (Py_ssize_t i=0; icallbacks); i++) { + PyObject *r, *cb = PyList_GET_ITEM(gcstate->callbacks, i); + Py_INCREF(cb); /* make sure cb doesn't go away */ + r = PyObject_Vectorcall(cb, stack, 2, NULL); + if (r == NULL) { + PyErr_WriteUnraisable(cb); + } + else { + Py_DECREF(r); + } + Py_DECREF(cb); + } + Py_DECREF(phase_obj); + Py_XDECREF(info); + assert(!_PyErr_Occurred(tstate)); +} + + +/* Find the oldest generation (highest numbered) where the count + * exceeds the threshold. Objects in the that generation and + * generations younger than it will be collected. */ +static int +gc_select_generation(GCState *gcstate) +{ + for (int i = NUM_GENERATIONS-1; i >= 0; i--) { + if (gcstate->generations[i].count > gcstate->generations[i].threshold) { + /* Avoid quadratic performance degradation in number + of tracked objects (see also issue #4074): + + To limit the cost of garbage collection, there are two strategies; + - make each collection faster, e.g. by scanning fewer objects + - do less collections + This heuristic is about the latter strategy. + + In addition to the various configurable thresholds, we only trigger a + full collection if the ratio + + long_lived_pending / long_lived_total + + is above a given value (hardwired to 25%). + + The reason is that, while "non-full" collections (i.e., collections of + the young and middle generations) will always examine roughly the same + number of objects -- determined by the aforementioned thresholds --, + the cost of a full collection is proportional to the total number of + long-lived objects, which is virtually unbounded. + + Indeed, it has been remarked that doing a full collection every + of object creations entails a dramatic performance + degradation in workloads which consist in creating and storing lots of + long-lived objects (e.g. building a large list of GC-tracked objects would + show quadratic performance, instead of linear as expected: see issue #4074). + + Using the above ratio, instead, yields amortized linear performance in + the total number of objects (the effect of which can be summarized + thusly: "each full garbage collection is more and more costly as the + number of objects grows, but we do fewer and fewer of them"). + + This heuristic was suggested by Martin von Löwis on python-dev in + June 2008. His original analysis and proposal can be found at: + http://mail.python.org/pipermail/python-dev/2008-June/080579.html + */ + if (i == NUM_GENERATIONS - 1 + && gcstate->long_lived_pending < gcstate->long_lived_total / 4) + { + continue; + } + return i; + } + } + return -1; +} + + +/* This is the main function. Read this to understand how the + * collection process works. */ +static Py_ssize_t +gc_collect_main(PyThreadState *tstate, int generation, _PyGC_Reason reason) +{ + int i; + Py_ssize_t m = 0; /* # objects collected */ + Py_ssize_t n = 0; /* # unreachable objects that couldn't be collected */ + PyGC_Head *young; /* the generation we are examining */ + PyGC_Head *old; /* next older generation */ + PyGC_Head unreachable; /* non-problematic unreachable trash */ + PyGC_Head finalizers; /* objects with, & reachable from, __del__ */ + PyGC_Head *gc; + _PyTime_t t1 = 0; /* initialize to prevent a compiler warning */ + GCState *gcstate = &tstate->interp->gc; + + // gc_collect_main() must not be called before _PyGC_Init + // or after _PyGC_Fini() + assert(gcstate->garbage != NULL); + assert(!_PyErr_Occurred(tstate)); + + int expected = 0; + if (!_Py_atomic_compare_exchange_int(&gcstate->collecting, &expected, 1)) { + // Don't start a garbage collection if one is already in progress. + return 0; + } + + if (generation == GENERATION_AUTO) { + // Select the oldest generation that needs collecting. We will collect + // objects from that generation and all generations younger than it. + generation = gc_select_generation(gcstate); + if (generation < 0) { + // No generation needs to be collected. + _Py_atomic_store_int(&gcstate->collecting, 0); + return 0; + } + } + + assert(generation >= 0 && generation < NUM_GENERATIONS); + +#ifdef Py_STATS + if (_Py_stats) { + _Py_stats->object_stats.object_visits = 0; + } +#endif + GC_STAT_ADD(generation, collections, 1); + + if (reason != _Py_GC_REASON_SHUTDOWN) { + invoke_gc_callback(tstate, "start", generation, 0, 0); + } + + if (gcstate->debug & _PyGC_DEBUG_STATS) { + PySys_WriteStderr("gc: collecting generation %d...\n", generation); + show_stats_each_generations(gcstate); + t1 = _PyTime_GetPerfCounter(); + } + + if (PyDTrace_GC_START_ENABLED()) { + PyDTrace_GC_START(generation); + } + + /* update collection and allocation counters */ + if (generation+1 < NUM_GENERATIONS) { + gcstate->generations[generation+1].count += 1; + } + for (i = 0; i <= generation; i++) { + gcstate->generations[i].count = 0; + } + + /* merge younger generations with one we are currently collecting */ + for (i = 0; i < generation; i++) { + gc_list_merge(GEN_HEAD(gcstate, i), GEN_HEAD(gcstate, generation)); + } + + /* handy references */ + young = GEN_HEAD(gcstate, generation); + if (generation < NUM_GENERATIONS-1) { + old = GEN_HEAD(gcstate, generation+1); + } + else { + old = young; + } + validate_list(old, collecting_clear_unreachable_clear); + + deduce_unreachable(young, &unreachable); + + untrack_tuples(young); + /* Move reachable objects to next generation. */ + if (young != old) { + if (generation == NUM_GENERATIONS - 2) { + gcstate->long_lived_pending += gc_list_size(young); + } + gc_list_merge(young, old); + } + else { + /* We only un-track dicts in full collections, to avoid quadratic + dict build-up. See issue #14775. */ + untrack_dicts(young); + gcstate->long_lived_pending = 0; + gcstate->long_lived_total = gc_list_size(young); + } + + /* All objects in unreachable are trash, but objects reachable from + * legacy finalizers (e.g. tp_del) can't safely be deleted. + */ + gc_list_init(&finalizers); + // NEXT_MASK_UNREACHABLE is cleared here. + // After move_legacy_finalizers(), unreachable is normal list. + move_legacy_finalizers(&unreachable, &finalizers); + /* finalizers contains the unreachable objects with a legacy finalizer; + * unreachable objects reachable *from* those are also uncollectable, + * and we move those into the finalizers list too. + */ + move_legacy_finalizer_reachable(&finalizers); + + validate_list(&finalizers, collecting_clear_unreachable_clear); + validate_list(&unreachable, collecting_set_unreachable_clear); + + /* Print debugging information. */ + if (gcstate->debug & _PyGC_DEBUG_COLLECTABLE) { + for (gc = GC_NEXT(&unreachable); gc != &unreachable; gc = GC_NEXT(gc)) { + debug_cycle("collectable", FROM_GC(gc)); + } + } + + /* Clear weakrefs and invoke callbacks as necessary. */ + m += handle_weakrefs(&unreachable, old); + + validate_list(old, collecting_clear_unreachable_clear); + validate_list(&unreachable, collecting_set_unreachable_clear); + + /* Call tp_finalize on objects which have one. */ + finalize_garbage(tstate, &unreachable); + + /* Handle any objects that may have resurrected after the call + * to 'finalize_garbage' and continue the collection with the + * objects that are still unreachable */ + PyGC_Head final_unreachable; + handle_resurrected_objects(&unreachable, &final_unreachable, old); + + /* Call tp_clear on objects in the final_unreachable set. This will cause + * the reference cycles to be broken. It may also cause some objects + * in finalizers to be freed. + */ + m += gc_list_size(&final_unreachable); + delete_garbage(tstate, gcstate, &final_unreachable, old); + + /* Collect statistics on uncollectable objects found and print + * debugging information. */ + for (gc = GC_NEXT(&finalizers); gc != &finalizers; gc = GC_NEXT(gc)) { + n++; + if (gcstate->debug & _PyGC_DEBUG_UNCOLLECTABLE) + debug_cycle("uncollectable", FROM_GC(gc)); + } + if (gcstate->debug & _PyGC_DEBUG_STATS) { + double d = _PyTime_AsSecondsDouble(_PyTime_GetPerfCounter() - t1); + PySys_WriteStderr( + "gc: done, %zd unreachable, %zd uncollectable, %.4fs elapsed\n", + n+m, n, d); + } + + /* Append instances in the uncollectable set to a Python + * reachable list of garbage. The programmer has to deal with + * this if they insist on creating this type of structure. + */ + handle_legacy_finalizers(tstate, gcstate, &finalizers, old); + validate_list(old, collecting_clear_unreachable_clear); + + /* Clear free list only during the collection of the highest + * generation */ + if (generation == NUM_GENERATIONS-1) { + _PyGC_ClearAllFreeLists(tstate->interp); + } + + if (_PyErr_Occurred(tstate)) { + if (reason == _Py_GC_REASON_SHUTDOWN) { + _PyErr_Clear(tstate); + } + else { + PyErr_FormatUnraisable("Exception ignored in garbage collection"); + } + } + + /* Update stats */ + struct gc_generation_stats *stats = &gcstate->generation_stats[generation]; + stats->collections++; + stats->collected += m; + stats->uncollectable += n; + + GC_STAT_ADD(generation, objects_collected, m); +#ifdef Py_STATS + if (_Py_stats) { + GC_STAT_ADD(generation, object_visits, + _Py_stats->object_stats.object_visits); + _Py_stats->object_stats.object_visits = 0; + } +#endif + + if (PyDTrace_GC_DONE_ENABLED()) { + PyDTrace_GC_DONE(n + m); + } + + if (reason != _Py_GC_REASON_SHUTDOWN) { + invoke_gc_callback(tstate, "stop", generation, m, n); + } + + assert(!_PyErr_Occurred(tstate)); + _Py_atomic_store_int(&gcstate->collecting, 0); + return n + m; +} + +static int +referrersvisit(PyObject* obj, void *arg) +{ + PyObject *objs = arg; + Py_ssize_t i; + for (i = 0; i < PyTuple_GET_SIZE(objs); i++) { + if (PyTuple_GET_ITEM(objs, i) == obj) { + return 1; + } + } + return 0; +} + +static int +gc_referrers_for(PyObject *objs, PyGC_Head *list, PyObject *resultlist) +{ + PyGC_Head *gc; + PyObject *obj; + traverseproc traverse; + for (gc = GC_NEXT(list); gc != list; gc = GC_NEXT(gc)) { + obj = FROM_GC(gc); + traverse = Py_TYPE(obj)->tp_traverse; + if (obj == objs || obj == resultlist) { + continue; + } + if (traverse(obj, referrersvisit, objs)) { + if (PyList_Append(resultlist, obj) < 0) { + return 0; /* error */ + } + } + } + return 1; /* no error */ +} + +PyObject * +_PyGC_GetReferrers(PyInterpreterState *interp, PyObject *objs) +{ + PyObject *result = PyList_New(0); + if (!result) { + return NULL; + } + + GCState *gcstate = &interp->gc; + for (int i = 0; i < NUM_GENERATIONS; i++) { + if (!(gc_referrers_for(objs, GEN_HEAD(gcstate, i), result))) { + Py_DECREF(result); + return NULL; + } + } + return result; +} + +PyObject * +_PyGC_GetObjects(PyInterpreterState *interp, Py_ssize_t generation) +{ + assert(generation >= -1 && generation < NUM_GENERATIONS); + GCState *gcstate = &interp->gc; + + PyObject *result = PyList_New(0); + if (result == NULL) { + return NULL; + } + + if (generation == -1) { + /* If generation is -1, get all objects from all generations */ + for (int i = 0; i < NUM_GENERATIONS; i++) { + if (append_objects(result, GEN_HEAD(gcstate, i))) { + goto error; + } + } + } + else { + if (append_objects(result, GEN_HEAD(gcstate, generation))) { + goto error; + } + } + + return result; +error: + Py_DECREF(result); + return NULL; +} + +void +_PyGC_Freeze(PyInterpreterState *interp) +{ + GCState *gcstate = &interp->gc; + for (int i = 0; i < NUM_GENERATIONS; ++i) { + gc_list_merge(GEN_HEAD(gcstate, i), &gcstate->permanent_generation.head); + gcstate->generations[i].count = 0; + } +} + +void +_PyGC_Unfreeze(PyInterpreterState *interp) +{ + GCState *gcstate = &interp->gc; + gc_list_merge(&gcstate->permanent_generation.head, + GEN_HEAD(gcstate, NUM_GENERATIONS-1)); +} + +Py_ssize_t +_PyGC_GetFreezeCount(PyInterpreterState *interp) +{ + GCState *gcstate = &interp->gc; + return gc_list_size(&gcstate->permanent_generation.head); +} + +/* C API for controlling the state of the garbage collector */ +int +PyGC_Enable(void) +{ + GCState *gcstate = get_gc_state(); + int old_state = gcstate->enabled; + gcstate->enabled = 1; + return old_state; +} + +int +PyGC_Disable(void) +{ + GCState *gcstate = get_gc_state(); + int old_state = gcstate->enabled; + gcstate->enabled = 0; + return old_state; +} + +int +PyGC_IsEnabled(void) +{ + GCState *gcstate = get_gc_state(); + return gcstate->enabled; +} + +/* Public API to invoke gc.collect() from C */ +Py_ssize_t +PyGC_Collect(void) +{ + PyThreadState *tstate = _PyThreadState_GET(); + GCState *gcstate = &tstate->interp->gc; + + if (!gcstate->enabled) { + return 0; + } + + Py_ssize_t n; + PyObject *exc = _PyErr_GetRaisedException(tstate); + n = gc_collect_main(tstate, NUM_GENERATIONS - 1, _Py_GC_REASON_MANUAL); + _PyErr_SetRaisedException(tstate, exc); + + return n; +} + +Py_ssize_t +_PyGC_Collect(PyThreadState *tstate, int generation, _PyGC_Reason reason) +{ + return gc_collect_main(tstate, generation, reason); +} + +Py_ssize_t +_PyGC_CollectNoFail(PyThreadState *tstate) +{ + /* Ideally, this function is only called on interpreter shutdown, + and therefore not recursively. Unfortunately, when there are daemon + threads, a daemon thread can start a cyclic garbage collection + during interpreter shutdown (and then never finish it). + See http://bugs.python.org/issue8713#msg195178 for an example. + */ + return gc_collect_main(tstate, NUM_GENERATIONS - 1, _Py_GC_REASON_SHUTDOWN); +} + +void +_PyGC_DumpShutdownStats(PyInterpreterState *interp) +{ + GCState *gcstate = &interp->gc; + if (!(gcstate->debug & _PyGC_DEBUG_SAVEALL) + && gcstate->garbage != NULL && PyList_GET_SIZE(gcstate->garbage) > 0) { + const char *message; + if (gcstate->debug & _PyGC_DEBUG_UNCOLLECTABLE) { + message = "gc: %zd uncollectable objects at shutdown"; + } + else { + message = "gc: %zd uncollectable objects at shutdown; " \ + "use gc.set_debug(gc.DEBUG_UNCOLLECTABLE) to list them"; + } + /* PyErr_WarnFormat does too many things and we are at shutdown, + the warnings module's dependencies (e.g. linecache) may be gone + already. */ + if (PyErr_WarnExplicitFormat(PyExc_ResourceWarning, "gc", 0, + "gc", NULL, message, + PyList_GET_SIZE(gcstate->garbage))) + { + PyErr_WriteUnraisable(NULL); + } + if (gcstate->debug & _PyGC_DEBUG_UNCOLLECTABLE) { + PyObject *repr = NULL, *bytes = NULL; + repr = PyObject_Repr(gcstate->garbage); + if (!repr || !(bytes = PyUnicode_EncodeFSDefault(repr))) { + PyErr_WriteUnraisable(gcstate->garbage); + } + else { + PySys_WriteStderr( + " %s\n", + PyBytes_AS_STRING(bytes) + ); + } + Py_XDECREF(repr); + Py_XDECREF(bytes); + } + } +} + + +void +_PyGC_Fini(PyInterpreterState *interp) +{ + GCState *gcstate = &interp->gc; + Py_CLEAR(gcstate->garbage); + Py_CLEAR(gcstate->callbacks); + + /* We expect that none of this interpreters objects are shared + with other interpreters. + See https://github.com/python/cpython/issues/90228. */ +} + +/* for debugging */ +void +_PyGC_Dump(PyGC_Head *g) +{ + _PyObject_Dump(FROM_GC(g)); +} + + +#ifdef Py_DEBUG +static int +visit_validate(PyObject *op, void *parent_raw) +{ + PyObject *parent = _PyObject_CAST(parent_raw); + if (_PyObject_IsFreed(op)) { + _PyObject_ASSERT_FAILED_MSG(parent, + "PyObject_GC_Track() object is not valid"); + } + return 0; +} +#endif + + +/* extension modules might be compiled with GC support so these + functions must always be available */ + +void +PyObject_GC_Track(void *op_raw) +{ + PyObject *op = _PyObject_CAST(op_raw); + if (_PyObject_GC_IS_TRACKED(op)) { + _PyObject_ASSERT_FAILED_MSG(op, + "object already tracked " + "by the garbage collector"); + } + _PyObject_GC_TRACK(op); + +#ifdef Py_DEBUG + /* Check that the object is valid: validate objects traversed + by tp_traverse() */ + traverseproc traverse = Py_TYPE(op)->tp_traverse; + (void)traverse(op, visit_validate, op); +#endif +} + +void +PyObject_GC_UnTrack(void *op_raw) +{ + PyObject *op = _PyObject_CAST(op_raw); + /* Obscure: the Py_TRASHCAN mechanism requires that we be able to + * call PyObject_GC_UnTrack twice on an object. + */ + if (_PyObject_GC_IS_TRACKED(op)) { + _PyObject_GC_UNTRACK(op); + } +} + +int +PyObject_IS_GC(PyObject *obj) +{ + return _PyObject_IS_GC(obj); +} + +void +_Py_ScheduleGC(PyInterpreterState *interp) +{ + _Py_set_eval_breaker_bit(interp, _PY_GC_SCHEDULED_BIT, 1); +} + +void +_PyObject_GC_Link(PyObject *op) +{ + PyGC_Head *g = AS_GC(op); + assert(((uintptr_t)g & (sizeof(uintptr_t)-1)) == 0); // g must be correctly aligned + + PyThreadState *tstate = _PyThreadState_GET(); + GCState *gcstate = &tstate->interp->gc; + g->_gc_next = 0; + g->_gc_prev = 0; + gcstate->generations[0].count++; /* number of allocated GC objects */ + if (gcstate->generations[0].count > gcstate->generations[0].threshold && + gcstate->enabled && + gcstate->generations[0].threshold && + !_Py_atomic_load_int_relaxed(&gcstate->collecting) && + !_PyErr_Occurred(tstate)) + { + _Py_ScheduleGC(tstate->interp); + } +} + +void +_Py_RunGC(PyThreadState *tstate) +{ + gc_collect_main(tstate, GENERATION_AUTO, _Py_GC_REASON_HEAP); +} + +static PyObject * +gc_alloc(size_t basicsize, size_t presize) +{ + PyThreadState *tstate = _PyThreadState_GET(); + if (basicsize > PY_SSIZE_T_MAX - presize) { + return _PyErr_NoMemory(tstate); + } + size_t size = presize + basicsize; + char *mem = PyObject_Malloc(size); + if (mem == NULL) { + return _PyErr_NoMemory(tstate); + } + ((PyObject **)mem)[0] = NULL; + ((PyObject **)mem)[1] = NULL; + PyObject *op = (PyObject *)(mem + presize); + _PyObject_GC_Link(op); + return op; +} + +PyObject * +_PyObject_GC_New(PyTypeObject *tp) +{ + size_t presize = _PyType_PreHeaderSize(tp); + PyObject *op = gc_alloc(_PyObject_SIZE(tp), presize); + if (op == NULL) { + return NULL; + } + _PyObject_Init(op, tp); + return op; +} + +PyVarObject * +_PyObject_GC_NewVar(PyTypeObject *tp, Py_ssize_t nitems) +{ + PyVarObject *op; + + if (nitems < 0) { + PyErr_BadInternalCall(); + return NULL; + } + size_t presize = _PyType_PreHeaderSize(tp); + size_t size = _PyObject_VAR_SIZE(tp, nitems); + op = (PyVarObject *)gc_alloc(size, presize); + if (op == NULL) { + return NULL; + } + _PyObject_InitVar(op, tp, nitems); + return op; +} + +PyObject * +PyUnstable_Object_GC_NewWithExtraData(PyTypeObject *tp, size_t extra_size) +{ + size_t presize = _PyType_PreHeaderSize(tp); + PyObject *op = gc_alloc(_PyObject_SIZE(tp) + extra_size, presize); + if (op == NULL) { + return NULL; + } + memset(op, 0, _PyObject_SIZE(tp) + extra_size); + _PyObject_Init(op, tp); + return op; +} + +PyVarObject * +_PyObject_GC_Resize(PyVarObject *op, Py_ssize_t nitems) +{ + const size_t basicsize = _PyObject_VAR_SIZE(Py_TYPE(op), nitems); + const size_t presize = _PyType_PreHeaderSize(((PyObject *)op)->ob_type); + _PyObject_ASSERT((PyObject *)op, !_PyObject_GC_IS_TRACKED(op)); + if (basicsize > (size_t)PY_SSIZE_T_MAX - presize) { + return (PyVarObject *)PyErr_NoMemory(); + } + char *mem = (char *)op - presize; + mem = (char *)PyObject_Realloc(mem, presize + basicsize); + if (mem == NULL) { + return (PyVarObject *)PyErr_NoMemory(); + } + op = (PyVarObject *) (mem + presize); + Py_SET_SIZE(op, nitems); + return op; +} + +void +PyObject_GC_Del(void *op) +{ + size_t presize = _PyType_PreHeaderSize(((PyObject *)op)->ob_type); + PyGC_Head *g = AS_GC(op); + if (_PyObject_GC_IS_TRACKED(op)) { + gc_list_remove(g); +#ifdef Py_DEBUG + PyObject *exc = PyErr_GetRaisedException(); + if (PyErr_WarnExplicitFormat(PyExc_ResourceWarning, "gc", 0, + "gc", NULL, "Object of type %s is not untracked before destruction", + ((PyObject*)op)->ob_type->tp_name)) { + PyErr_WriteUnraisable(NULL); + } + PyErr_SetRaisedException(exc); +#endif + } + GCState *gcstate = get_gc_state(); + if (gcstate->generations[0].count > 0) { + gcstate->generations[0].count--; + } + PyObject_Free(((char *)op)-presize); +} + +int +PyObject_GC_IsTracked(PyObject* obj) +{ + if (_PyObject_IS_GC(obj) && _PyObject_GC_IS_TRACKED(obj)) { + return 1; + } + return 0; +} + +int +PyObject_GC_IsFinalized(PyObject *obj) +{ + if (_PyObject_IS_GC(obj) && _PyGC_FINALIZED(obj)) { + return 1; + } + return 0; +} + +void +PyUnstable_GC_VisitObjects(gcvisitobjects_t callback, void *arg) +{ + size_t i; + GCState *gcstate = get_gc_state(); + int origenstate = gcstate->enabled; + gcstate->enabled = 0; + for (i = 0; i < NUM_GENERATIONS; i++) { + PyGC_Head *gc_list, *gc; + gc_list = GEN_HEAD(gcstate, i); + for (gc = GC_NEXT(gc_list); gc != gc_list; gc = GC_NEXT(gc)) { + PyObject *op = FROM_GC(gc); + Py_INCREF(op); + int res = callback(op, arg); + Py_DECREF(op); + if (!res) { + goto done; + } + } + } +done: + gcstate->enabled = origenstate; +} diff --git a/Python/gc_free_threading.c b/Python/gc_free_threading.c new file mode 100644 index 00000000000000..207a43b68d21f5 --- /dev/null +++ b/Python/gc_free_threading.c @@ -0,0 +1,28 @@ +#include "Python.h" +#include "pycore_pystate.h" // _PyFreeListState_GET() +#include "pycore_tstate.h" // _PyThreadStateImpl + +#ifdef Py_GIL_DISABLED + +/* Clear all free lists + * All free lists are cleared during the collection of the highest generation. + * Allocated items in the free list may keep a pymalloc arena occupied. + * Clearing the free lists may give back memory to the OS earlier. + * Free-threading version: Since freelists are managed per thread, + * GC should clear all freelists by traversing all threads. + */ +void +_PyGC_ClearAllFreeLists(PyInterpreterState *interp) +{ + _PyDict_ClearFreeList(interp); + + HEAD_LOCK(&_PyRuntime); + _PyThreadStateImpl *tstate = (_PyThreadStateImpl *)interp->threads.head; + while (tstate != NULL) { + _Py_ClearFreeLists(&tstate->freelist_state, 0); + tstate = (_PyThreadStateImpl *)tstate->base.next; + } + HEAD_UNLOCK(&_PyRuntime); +} + +#endif diff --git a/Python/gc_gil.c b/Python/gc_gil.c new file mode 100644 index 00000000000000..04c1c184250c60 --- /dev/null +++ b/Python/gc_gil.c @@ -0,0 +1,19 @@ +#include "Python.h" +#include "pycore_pystate.h" // _Py_ClearFreeLists() + +#ifndef Py_GIL_DISABLED + +/* Clear all free lists + * All free lists are cleared during the collection of the highest generation. + * Allocated items in the free list may keep a pymalloc arena occupied. + * Clearing the free lists may give back memory to the OS earlier. + */ +void +_PyGC_ClearAllFreeLists(PyInterpreterState *interp) +{ + _PyDict_ClearFreeList(interp); + + _Py_ClearFreeLists(&interp->freelist_state, 0); +} + +#endif diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h index 8226d827cde514..68468728d44bf8 100644 --- a/Python/generated_cases.c.h +++ b/Python/generated_cases.c.h @@ -2377,25 +2377,19 @@ TIER_ONE_ONLY CHECK_EVAL_BREAKER(); PyCodeObject *code = _PyFrame_GetCode(frame); - _PyExecutorObject *executor = (_PyExecutorObject *)code->co_executors->executors[oparg&255]; + _PyExecutorObject *executor = code->co_executors->executors[oparg & 255]; if (executor->vm_data.valid) { Py_INCREF(executor); - if (executor->execute == _PyUOpExecute) { - current_executor = (_PyUOpExecutorObject *)executor; - GOTO_TIER_TWO(); - } - next_instr = executor->execute(executor, frame, stack_pointer); - frame = tstate->current_frame; - if (next_instr == NULL) { - goto resume_with_error; - } - stack_pointer = _PyFrame_GetStackPointer(frame); + current_executor = executor; + GOTO_TIER_TWO(); } else { - code->co_executors->executors[oparg & 255] = NULL; + /* ENTER_EXECUTOR will be the first code unit of the instruction */ + assert(oparg < 256); + code->co_executors->executors[oparg] = NULL; opcode = this_instr->op.code = executor->vm_data.opcode; this_instr->op.arg = executor->vm_data.oparg; - oparg = (oparg & (~255)) | executor->vm_data.oparg; + oparg = executor->vm_data.oparg; Py_DECREF(executor); next_instr = this_instr; DISPATCH_GOTO(); @@ -3297,12 +3291,18 @@ // Double-check that the opcode isn't instrumented or something: if (ucounter > threshold && this_instr->op.code == JUMP_BACKWARD) { OPT_STAT_INC(attempts); - int optimized = _PyOptimizer_BackEdge(frame, this_instr, next_instr, stack_pointer); + _Py_CODEUNIT *start = this_instr; + /* Back up over EXTENDED_ARGs so optimizer sees the whole instruction */ + while (oparg > 255) { + oparg >>= 8; + start--; + } + int optimized = _PyOptimizer_Optimize(frame, start, stack_pointer); if (optimized < 0) goto error; if (optimized) { // Rewind and enter the executor: - assert(this_instr->op.code == ENTER_EXECUTOR); - next_instr = this_instr; + assert(start->op.code == ENTER_EXECUTOR); + next_instr = start; this_instr[1].cache &= ((1 << OPTIMIZER_BITS_IN_COUNTER) - 1); } else { diff --git a/Python/getcopyright.c b/Python/getcopyright.c index c1f1aad9b845b1..066c2ed66acddf 100644 --- a/Python/getcopyright.c +++ b/Python/getcopyright.c @@ -4,7 +4,7 @@ static const char cprt[] = "\ -Copyright (c) 2001-2023 Python Software Foundation.\n\ +Copyright (c) 2001-2024 Python Software Foundation.\n\ All Rights Reserved.\n\ \n\ Copyright (c) 2000 BeOpen.com.\n\ diff --git a/Python/initconfig.c b/Python/initconfig.c index 06e317907b8ec9..a6d8c176156617 100644 --- a/Python/initconfig.c +++ b/Python/initconfig.c @@ -295,6 +295,7 @@ static const char usage_envvars[] = " running a local build).\n" "PYTHON_COLORS : If this variable is set to 1, the interpreter will" " colorize various kinds of output. Setting it to 0 deactivates this behavior.\n" +"PYTHON_HISTORY : the location of a .python_history file.\n" "These variables have equivalent command-line parameters (see --help for details):\n" "PYTHONDEBUG : enable parser debug mode (-d)\n" "PYTHONDONTWRITEBYTECODE : don't write .pyc files (-B)\n" diff --git a/Python/instrumentation.c b/Python/instrumentation.c index 35b0e7a8f35c56..533aece210202b 100644 --- a/Python/instrumentation.c +++ b/Python/instrumentation.c @@ -1576,13 +1576,11 @@ _Py_Instrument(PyCodeObject *code, PyInterpreterState *interp) } _Py_Executors_InvalidateDependency(interp, code); int code_len = (int)Py_SIZE(code); - /* code->_co_firsttraceable >= code_len indicates - * that no instrumentation can be inserted. - * Exit early to avoid creating instrumentation + /* Exit early to avoid creating instrumentation * data for potential statically allocated code * objects. * See https://github.com/python/cpython/issues/108390 */ - if (code->_co_firsttraceable >= code_len) { + if (code->co_flags & CO_NO_MONITORING_EVENTS) { return 0; } if (update_instrumentation_data(code, interp)) { diff --git a/Python/marshal.c b/Python/marshal.c index 8940582c7f5328..daec7415b3fc7e 100644 --- a/Python/marshal.c +++ b/Python/marshal.c @@ -78,6 +78,7 @@ module marshal #define WFERR_UNMARSHALLABLE 1 #define WFERR_NESTEDTOODEEP 2 #define WFERR_NOMEMORY 3 +#define WFERR_CODE_NOT_ALLOWED 4 typedef struct { FILE *fp; @@ -89,6 +90,7 @@ typedef struct { char *buf; _Py_hashtable_t *hashtable; int version; + int allow_code; } WFILE; #define w_byte(c, p) do { \ @@ -225,6 +227,9 @@ w_short_pstring(const void *s, Py_ssize_t n, WFILE *p) w_byte((t) | flag, (p)); \ } while(0) +static PyObject * +_PyMarshal_WriteObjectToString(PyObject *x, int version, int allow_code); + static void w_PyLong(const PyLongObject *ob, char flag, WFILE *p) { @@ -520,7 +525,8 @@ w_complex_object(PyObject *v, char flag, WFILE *p) } Py_ssize_t i = 0; while (_PySet_NextEntry(v, &pos, &value, &hash)) { - PyObject *dump = PyMarshal_WriteObjectToString(value, p->version); + PyObject *dump = _PyMarshal_WriteObjectToString(value, + p->version, p->allow_code); if (dump == NULL) { p->error = WFERR_UNMARSHALLABLE; Py_DECREF(pairs); @@ -549,6 +555,10 @@ w_complex_object(PyObject *v, char flag, WFILE *p) Py_DECREF(pairs); } else if (PyCode_Check(v)) { + if (!p->allow_code) { + p->error = WFERR_CODE_NOT_ALLOWED; + return; + } PyCodeObject *co = (PyCodeObject *)v; PyObject *co_code = _PyCode_GetCode(co); if (co_code == NULL) { @@ -657,6 +667,7 @@ PyMarshal_WriteObjectToFile(PyObject *x, FILE *fp, int version) wf.end = wf.ptr + sizeof(buf); wf.error = WFERR_OK; wf.version = version; + wf.allow_code = 1; if (w_init_refs(&wf, version)) { return; /* caller must check PyErr_Occurred() */ } @@ -674,6 +685,7 @@ typedef struct { char *buf; Py_ssize_t buf_size; PyObject *refs; /* a list */ + int allow_code; } RFILE; static const char * @@ -1364,6 +1376,11 @@ r_object(RFILE *p) PyObject* linetable = NULL; PyObject *exceptiontable = NULL; + if (!p->allow_code) { + PyErr_SetString(PyExc_ValueError, + "unmarshalling code objects is disallowed"); + break; + } idx = r_ref_reserve(flag, p); if (idx < 0) break; @@ -1609,6 +1626,7 @@ PyMarshal_ReadObjectFromFile(FILE *fp) { RFILE rf; PyObject *result; + rf.allow_code = 1; rf.fp = fp; rf.readable = NULL; rf.depth = 0; @@ -1629,6 +1647,7 @@ PyMarshal_ReadObjectFromString(const char *str, Py_ssize_t len) { RFILE rf; PyObject *result; + rf.allow_code = 1; rf.fp = NULL; rf.readable = NULL; rf.ptr = str; @@ -1645,8 +1664,8 @@ PyMarshal_ReadObjectFromString(const char *str, Py_ssize_t len) return result; } -PyObject * -PyMarshal_WriteObjectToString(PyObject *x, int version) +static PyObject * +_PyMarshal_WriteObjectToString(PyObject *x, int version, int allow_code) { WFILE wf; @@ -1661,6 +1680,7 @@ PyMarshal_WriteObjectToString(PyObject *x, int version) wf.end = wf.ptr + PyBytes_GET_SIZE(wf.str); wf.error = WFERR_OK; wf.version = version; + wf.allow_code = allow_code; if (w_init_refs(&wf, version)) { Py_DECREF(wf.str); return NULL; @@ -1674,17 +1694,35 @@ PyMarshal_WriteObjectToString(PyObject *x, int version) } if (wf.error != WFERR_OK) { Py_XDECREF(wf.str); - if (wf.error == WFERR_NOMEMORY) + switch (wf.error) { + case WFERR_NOMEMORY: PyErr_NoMemory(); - else + break; + case WFERR_NESTEDTOODEEP: PyErr_SetString(PyExc_ValueError, - (wf.error==WFERR_UNMARSHALLABLE)?"unmarshallable object" - :"object too deeply nested to marshal"); + "object too deeply nested to marshal"); + break; + case WFERR_CODE_NOT_ALLOWED: + PyErr_SetString(PyExc_ValueError, + "marshalling code objects is disallowed"); + break; + default: + case WFERR_UNMARSHALLABLE: + PyErr_SetString(PyExc_ValueError, + "unmarshallable object"); + break; + } return NULL; } return wf.str; } +PyObject * +PyMarshal_WriteObjectToString(PyObject *x, int version) +{ + return _PyMarshal_WriteObjectToString(x, version, 1); +} + /* And an interface for Python programs... */ /*[clinic input] marshal.dump @@ -1696,6 +1734,9 @@ marshal.dump version: int(c_default="Py_MARSHAL_VERSION") = version Indicates the data format that dump should use. / + * + allow_code: bool = True + Allow to write code objects. Write the value on the open file. @@ -1706,14 +1747,14 @@ to the file. The object will not be properly read back by load(). static PyObject * marshal_dump_impl(PyObject *module, PyObject *value, PyObject *file, - int version) -/*[clinic end generated code: output=aaee62c7028a7cb2 input=6c7a3c23c6fef556]*/ + int version, int allow_code) +/*[clinic end generated code: output=429e5fd61c2196b9 input=041f7f6669b0aafb]*/ { /* XXX Quick hack -- need to do this differently */ PyObject *s; PyObject *res; - s = PyMarshal_WriteObjectToString(value, version); + s = _PyMarshal_WriteObjectToString(value, version, allow_code); if (s == NULL) return NULL; res = PyObject_CallMethodOneArg(file, &_Py_ID(write), s); @@ -1727,6 +1768,9 @@ marshal.load file: object Must be readable binary file. / + * + allow_code: bool = True + Allow to load code objects. Read one value from the open file and return it. @@ -1739,8 +1783,8 @@ dump(), load() will substitute None for the unmarshallable type. [clinic start generated code]*/ static PyObject * -marshal_load(PyObject *module, PyObject *file) -/*[clinic end generated code: output=f8e5c33233566344 input=c85c2b594cd8124a]*/ +marshal_load_impl(PyObject *module, PyObject *file, int allow_code) +/*[clinic end generated code: output=0c1aaf3546ae3ed3 input=2dca7b570653b82f]*/ { PyObject *data, *result; RFILE rf; @@ -1762,6 +1806,7 @@ marshal_load(PyObject *module, PyObject *file) result = NULL; } else { + rf.allow_code = allow_code; rf.depth = 0; rf.fp = NULL; rf.readable = file; @@ -1787,6 +1832,9 @@ marshal.dumps version: int(c_default="Py_MARSHAL_VERSION") = version Indicates the data format that dumps should use. / + * + allow_code: bool = True + Allow to write code objects. Return the bytes object that would be written to a file by dump(value, file). @@ -1795,10 +1843,11 @@ unsupported type. [clinic start generated code]*/ static PyObject * -marshal_dumps_impl(PyObject *module, PyObject *value, int version) -/*[clinic end generated code: output=9c200f98d7256cad input=a2139ea8608e9b27]*/ +marshal_dumps_impl(PyObject *module, PyObject *value, int version, + int allow_code) +/*[clinic end generated code: output=115f90da518d1d49 input=167eaecceb63f0a8]*/ { - return PyMarshal_WriteObjectToString(value, version); + return _PyMarshal_WriteObjectToString(value, version, allow_code); } /*[clinic input] @@ -1806,6 +1855,9 @@ marshal.loads bytes: Py_buffer / + * + allow_code: bool = True + Allow to load code objects. Convert the bytes-like object to a value. @@ -1814,13 +1866,14 @@ bytes in the input are ignored. [clinic start generated code]*/ static PyObject * -marshal_loads_impl(PyObject *module, Py_buffer *bytes) -/*[clinic end generated code: output=9fc65985c93d1bb1 input=6f426518459c8495]*/ +marshal_loads_impl(PyObject *module, Py_buffer *bytes, int allow_code) +/*[clinic end generated code: output=62c0c538d3edc31f input=14de68965b45aaa7]*/ { RFILE rf; char *s = bytes->buf; Py_ssize_t n = bytes->len; PyObject* result; + rf.allow_code = allow_code; rf.fp = NULL; rf.readable = NULL; rf.ptr = s; diff --git a/Python/optimizer.c b/Python/optimizer.c index f27af14d967cd3..1551a5ef61f892 100644 --- a/Python/optimizer.c +++ b/Python/optimizer.c @@ -2,12 +2,11 @@ #include "opcode.h" #include "pycore_interp.h" #include "pycore_bitutils.h" // _Py_popcount32() -#include "pycore_opcode_metadata.h" // _PyOpcode_OpName() +#include "pycore_opcode_metadata.h" // _PyOpcode_OpName[] #include "pycore_opcode_utils.h" // MAX_REAL_OPCODE #include "pycore_optimizer.h" // _Py_uop_analyze_and_optimize() #include "pycore_pystate.h" // _PyInterpreterState_GET() #include "pycore_uop_ids.h" -#include "pycore_uops.h" #include "cpython/optimizer.h" #include #include @@ -17,6 +16,8 @@ #include "pycore_uop_metadata.h" // Uop tables #undef NEED_OPCODE_METADATA +#define UOP_MAX_TRACE_LENGTH 512 + #define MAX_EXECUTORS_SIZE 256 @@ -162,24 +163,22 @@ PyUnstable_SetOptimizer(_PyOptimizerObject *optimizer) } int -_PyOptimizer_BackEdge(_PyInterpreterFrame *frame, _Py_CODEUNIT *src, _Py_CODEUNIT *dest, PyObject **stack_pointer) +_PyOptimizer_Optimize(_PyInterpreterFrame *frame, _Py_CODEUNIT *start, PyObject **stack_pointer) { - assert(src->op.code == JUMP_BACKWARD); PyCodeObject *code = (PyCodeObject *)frame->f_executable; assert(PyCode_Check(code)); PyInterpreterState *interp = _PyInterpreterState_GET(); - if (!has_space_for_executor(code, src)) { + if (!has_space_for_executor(code, start)) { return 0; } _PyOptimizerObject *opt = interp->optimizer; _PyExecutorObject *executor = NULL; - /* Start optimizing at the destination to guarantee forward progress */ - int err = opt->optimize(opt, code, dest, &executor, (int)(stack_pointer - _PyFrame_Stackbase(frame))); + int err = opt->optimize(opt, code, start, &executor, (int)(stack_pointer - _PyFrame_Stackbase(frame))); if (err <= 0) { assert(executor == NULL); return err; } - int index = get_index_for_executor(code, src); + int index = get_index_for_executor(code, start); if (index < 0) { /* Out of memory. Don't raise and assume that the * error will show up elsewhere. @@ -190,7 +189,7 @@ _PyOptimizer_BackEdge(_PyInterpreterFrame *frame, _Py_CODEUNIT *src, _Py_CODEUNI Py_DECREF(executor); return 0; } - insert_executor(code, src, index, executor); + insert_executor(code, start, index, executor); Py_DECREF(executor); return 1; } @@ -212,27 +211,6 @@ PyUnstable_GetExecutor(PyCodeObject *code, int offset) return NULL; } -/** Test support **/ - - -typedef struct { - _PyOptimizerObject base; - int64_t count; -} _PyCounterOptimizerObject; - -typedef struct { - _PyExecutorObject executor; - _PyCounterOptimizerObject *optimizer; - _Py_CODEUNIT *next_instr; -} _PyCounterExecutorObject; - -static void -counter_dealloc(_PyCounterExecutorObject *self) { - _Py_ExecutorClear((_PyExecutorObject *)self); - Py_DECREF(self->optimizer); - PyObject_Free(self); -} - static PyObject * is_valid(PyObject *self, PyObject *Py_UNUSED(ignored)) { @@ -244,89 +222,11 @@ static PyMethodDef executor_methods[] = { { NULL, NULL }, }; -PyTypeObject _PyCounterExecutor_Type = { - PyVarObject_HEAD_INIT(&PyType_Type, 0) - .tp_name = "counting_executor", - .tp_basicsize = sizeof(_PyCounterExecutorObject), - .tp_itemsize = 0, - .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_DISALLOW_INSTANTIATION, - .tp_dealloc = (destructor)counter_dealloc, - .tp_methods = executor_methods, -}; - -static _Py_CODEUNIT * -counter_execute(_PyExecutorObject *self, _PyInterpreterFrame *frame, PyObject **stack_pointer) -{ - ((_PyCounterExecutorObject *)self)->optimizer->count++; - _PyFrame_SetStackPointer(frame, stack_pointer); - Py_DECREF(self); - return ((_PyCounterExecutorObject *)self)->next_instr; -} - -static int -counter_optimize( - _PyOptimizerObject* self, - PyCodeObject *code, - _Py_CODEUNIT *instr, - _PyExecutorObject **exec_ptr, - int Py_UNUSED(curr_stackentries) -) -{ - _PyCounterExecutorObject *executor = (_PyCounterExecutorObject *)_PyObject_New(&_PyCounterExecutor_Type); - if (executor == NULL) { - return -1; - } - executor->executor.execute = counter_execute; - Py_INCREF(self); - executor->optimizer = (_PyCounterOptimizerObject *)self; - executor->next_instr = instr; - *exec_ptr = (_PyExecutorObject *)executor; - _PyBloomFilter empty; - _Py_BloomFilter_Init(&empty); - _Py_ExecutorInit((_PyExecutorObject *)executor, &empty); - return 1; -} - -static PyObject * -counter_get_counter(PyObject *self, PyObject *args) -{ - return PyLong_FromLongLong(((_PyCounterOptimizerObject *)self)->count); -} - -static PyMethodDef counter_optimizer_methods[] = { - { "get_count", counter_get_counter, METH_NOARGS, NULL }, - { NULL, NULL }, -}; - -PyTypeObject _PyCounterOptimizer_Type = { - PyVarObject_HEAD_INIT(&PyType_Type, 0) - .tp_name = "Counter optimizer", - .tp_basicsize = sizeof(_PyCounterOptimizerObject), - .tp_itemsize = 0, - .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_DISALLOW_INSTANTIATION, - .tp_methods = counter_optimizer_methods, - .tp_dealloc = (destructor)PyObject_Del, -}; - -PyObject * -PyUnstable_Optimizer_NewCounter(void) -{ - _PyCounterOptimizerObject *opt = (_PyCounterOptimizerObject *)_PyObject_New(&_PyCounterOptimizer_Type); - if (opt == NULL) { - return NULL; - } - opt->base.optimize = counter_optimize; - opt->base.resume_threshold = INT16_MAX; - opt->base.backedge_threshold = 0; - opt->count = 0; - return (PyObject *)opt; -} - ///////////////////// Experimental UOp Optimizer ///////////////////// static void -uop_dealloc(_PyUOpExecutorObject *self) { - _Py_ExecutorClear((_PyExecutorObject *)self); +uop_dealloc(_PyExecutorObject *self) { + _Py_ExecutorClear(self); PyObject_Free(self); } @@ -337,13 +237,13 @@ _PyUOpName(int index) } static Py_ssize_t -uop_len(_PyUOpExecutorObject *self) +uop_len(_PyExecutorObject *self) { return Py_SIZE(self); } static PyObject * -uop_item(_PyUOpExecutorObject *self, Py_ssize_t index) +uop_item(_PyExecutorObject *self, Py_ssize_t index) { Py_ssize_t len = uop_len(self); if (index < 0 || index >= len) { @@ -381,7 +281,7 @@ PySequenceMethods uop_as_sequence = { PyTypeObject _PyUOpExecutor_Type = { PyVarObject_HEAD_INIT(&PyType_Type, 0) .tp_name = "uop_executor", - .tp_basicsize = sizeof(_PyUOpExecutorObject) - sizeof(_PyUOpInstruction), + .tp_basicsize = offsetof(_PyExecutorObject, trace), .tp_itemsize = sizeof(_PyUOpInstruction), .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_DISALLOW_INSTANTIATION, .tp_dealloc = (destructor)uop_dealloc, @@ -415,38 +315,6 @@ BRANCH_TO_GUARD[4][2] = { #define CONFIDENCE_RANGE 1000 #define CONFIDENCE_CUTOFF 333 -/* Returns 1 on success, - * 0 if it failed to produce a worthwhile trace, - * and -1 on an error. - */ -static int -translate_bytecode_to_trace( - PyCodeObject *code, - _Py_CODEUNIT *instr, - _PyUOpInstruction *trace, - int buffer_size, - _PyBloomFilter *dependencies) -{ - PyCodeObject *initial_code = code; - _Py_BloomFilter_Add(dependencies, initial_code); - _Py_CODEUNIT *initial_instr = instr; - int trace_length = 0; - int max_length = buffer_size; - struct { - PyCodeObject *code; - _Py_CODEUNIT *instr; - } trace_stack[TRACE_STACK_SIZE]; - int trace_stack_depth = 0; - int confidence = CONFIDENCE_RANGE; // Adjusted by branch instructions - -#ifdef Py_DEBUG - char *python_lltrace = Py_GETENV("PYTHON_LLTRACE"); - int lltrace = 0; - if (python_lltrace != NULL && *python_lltrace >= '0') { - lltrace = *python_lltrace - '0'; // TODO: Parse an int and all that - } -#endif - #ifdef Py_DEBUG #define DPRINTF(level, ...) \ if (lltrace >= (level)) { printf(__VA_ARGS__); } @@ -502,6 +370,39 @@ translate_bytecode_to_trace( code = trace_stack[trace_stack_depth].code; \ instr = trace_stack[trace_stack_depth].instr; +/* Returns 1 on success, + * 0 if it failed to produce a worthwhile trace, + * and -1 on an error. + */ +static int +translate_bytecode_to_trace( + PyCodeObject *code, + _Py_CODEUNIT *instr, + _PyUOpInstruction *trace, + int buffer_size, + _PyBloomFilter *dependencies) +{ + bool progress_needed = true; + PyCodeObject *initial_code = code; + _Py_BloomFilter_Add(dependencies, initial_code); + _Py_CODEUNIT *initial_instr = instr; + int trace_length = 0; + int max_length = buffer_size; + struct { + PyCodeObject *code; + _Py_CODEUNIT *instr; + } trace_stack[TRACE_STACK_SIZE]; + int trace_stack_depth = 0; + int confidence = CONFIDENCE_RANGE; // Adjusted by branch instructions + +#ifdef Py_DEBUG + char *python_lltrace = Py_GETENV("PYTHON_LLTRACE"); + int lltrace = 0; + if (python_lltrace != NULL && *python_lltrace >= '0') { + lltrace = *python_lltrace - '0'; // TODO: Parse an int and all that + } +#endif + DPRINTF(4, "Optimizing %s (%s:%d) at byte offset %d\n", PyUnicode_AsUTF8(code->co_qualname), @@ -509,6 +410,7 @@ translate_bytecode_to_trace( code->co_firstlineno, 2 * INSTR_IP(initial_instr, code)); uint32_t target = 0; + top: // Jump here after _PUSH_FRAME or likely branches for (;;) { target = INSTR_IP(instr, code); @@ -520,6 +422,14 @@ translate_bytecode_to_trace( uint32_t oparg = instr->op.arg; uint32_t extended = 0; + if (opcode == ENTER_EXECUTOR) { + assert(oparg < 256); + _PyExecutorObject *executor = code->co_executors->executors[oparg]; + opcode = executor->vm_data.opcode; + DPRINTF(2, " * ENTER_EXECUTOR -> %s\n", _PyOpcode_OpName[opcode]); + oparg = executor->vm_data.oparg; + } + if (opcode == EXTENDED_ARG) { instr++; extended = 1; @@ -530,13 +440,23 @@ translate_bytecode_to_trace( goto done; } } - - if (opcode == ENTER_EXECUTOR) { - _PyExecutorObject *executor = - (_PyExecutorObject *)code->co_executors->executors[oparg&255]; - opcode = executor->vm_data.opcode; - DPRINTF(2, " * ENTER_EXECUTOR -> %s\n", _PyOpcode_OpName[opcode]); - oparg = (oparg & 0xffffff00) | executor->vm_data.oparg; + assert(opcode != ENTER_EXECUTOR && opcode != EXTENDED_ARG); + + /* Special case the first instruction, + * so that we can guarantee forward progress */ + if (progress_needed) { + progress_needed = false; + if (opcode == JUMP_BACKWARD || opcode == JUMP_BACKWARD_NO_INTERRUPT) { + instr += 1 + _PyOpcode_Caches[opcode] - (int32_t)oparg; + initial_instr = instr; + continue; + } + else { + if (OPCODE_HAS_DEOPT(opcode)) { + opcode = _PyOpcode_Deopt[opcode]; + } + assert(!OPCODE_HAS_DEOPT(opcode)); + } } switch (opcode) { @@ -561,25 +481,28 @@ translate_bytecode_to_trace( goto done; } uint32_t uopcode = BRANCH_TO_GUARD[opcode - POP_JUMP_IF_FALSE][jump_likely]; - _Py_CODEUNIT *next_instr = instr + 1 + _PyOpcode_Caches[_PyOpcode_Deopt[opcode]]; DPRINTF(2, "%s(%d): counter=%x, bitcount=%d, likely=%d, confidence=%d, uopcode=%s\n", - _PyUOpName(opcode), oparg, + _PyOpcode_OpName[opcode], oparg, counter, bitcount, jump_likely, confidence, _PyUOpName(uopcode)); - ADD_TO_TRACE(uopcode, max_length, 0, target); + _Py_CODEUNIT *next_instr = instr + 1 + _PyOpcode_Caches[_PyOpcode_Deopt[opcode]]; + _Py_CODEUNIT *target_instr = next_instr + oparg; if (jump_likely) { - _Py_CODEUNIT *target_instr = next_instr + oparg; DPRINTF(2, "Jump likely (%x = %d bits), continue at byte offset %d\n", instr[1].cache, bitcount, 2 * INSTR_IP(target_instr, code)); instr = target_instr; + ADD_TO_TRACE(uopcode, max_length, 0, INSTR_IP(next_instr, code)); goto top; } + ADD_TO_TRACE(uopcode, max_length, 0, INSTR_IP(target_instr, code)); break; } case JUMP_BACKWARD: case JUMP_BACKWARD_NO_INTERRUPT: { - if (instr + 2 - oparg == initial_instr && code == initial_code) { + _Py_CODEUNIT *target = instr + 1 + _PyOpcode_Caches[opcode] - (int)oparg; + if (target == initial_instr) { + /* We have looped round to the start */ RESERVE(1); ADD_TO_TRACE(_JUMP_TO_TOP, 0, 0, 0); } @@ -722,7 +645,7 @@ translate_bytecode_to_trace( } break; } - DPRINTF(2, "Unsupported opcode %s\n", _PyUOpName(opcode)); + DPRINTF(2, "Unsupported opcode %s\n", _PyOpcode_OpName[opcode]); OPT_UNSUPPORTED_OPCODE(opcode); goto done; // Break out of loop } // End default @@ -740,19 +663,7 @@ translate_bytecode_to_trace( } assert(code == initial_code); // Skip short traces like _SET_IP, LOAD_FAST, _SET_IP, _EXIT_TRACE - if (trace_length > 4) { - ADD_TO_TRACE(_EXIT_TRACE, 0, 0, target); - DPRINTF(1, - "Created a trace for %s (%s:%d) at byte offset %d -- length %d\n", - PyUnicode_AsUTF8(code->co_qualname), - PyUnicode_AsUTF8(code->co_filename), - code->co_firstlineno, - 2 * INSTR_IP(initial_instr, code), - trace_length); - OPT_HIST(trace_length + buffer_size - max_length, trace_length_hist); - return 1; - } - else { + if (progress_needed || trace_length < 5) { OPT_STAT_INC(trace_too_short); DPRINTF(4, "No trace for %s (%s:%d) at byte offset %d\n", @@ -760,15 +671,25 @@ translate_bytecode_to_trace( PyUnicode_AsUTF8(code->co_filename), code->co_firstlineno, 2 * INSTR_IP(initial_instr, code)); + return 0; } - return 0; + ADD_TO_TRACE(_EXIT_TRACE, 0, 0, target); + DPRINTF(1, + "Created a trace for %s (%s:%d) at byte offset %d -- length %d\n", + PyUnicode_AsUTF8(code->co_qualname), + PyUnicode_AsUTF8(code->co_filename), + code->co_firstlineno, + 2 * INSTR_IP(initial_instr, code), + trace_length); + OPT_HIST(trace_length + buffer_size - max_length, trace_length_hist); + return 1; +} #undef RESERVE #undef RESERVE_RAW #undef INSTR_IP #undef ADD_TO_TRACE #undef DPRINTF -} #define UNSET_BIT(array, bit) (array[(bit)>>5] &= ~(1<<((bit)&31))) #define SET_BIT(array, bit) (array[(bit)>>5] |= (1<<((bit)&31))) @@ -784,7 +705,7 @@ compute_used(_PyUOpInstruction *buffer, uint32_t *used) { int count = 0; SET_BIT(used, 0); - for (int i = 0; i < _Py_UOP_MAX_TRACE_LENGTH; i++) { + for (int i = 0; i < UOP_MAX_TRACE_LENGTH; i++) { if (!BIT_IS_SET(used, i)) { continue; } @@ -816,15 +737,15 @@ compute_used(_PyUOpInstruction *buffer, uint32_t *used) static _PyExecutorObject * make_executor_from_uops(_PyUOpInstruction *buffer, _PyBloomFilter *dependencies) { - uint32_t used[(_Py_UOP_MAX_TRACE_LENGTH + 31)/32] = { 0 }; + uint32_t used[(UOP_MAX_TRACE_LENGTH + 31)/32] = { 0 }; int length = compute_used(buffer, used); - _PyUOpExecutorObject *executor = PyObject_NewVar(_PyUOpExecutorObject, &_PyUOpExecutor_Type, length); + _PyExecutorObject *executor = PyObject_NewVar(_PyExecutorObject, &_PyUOpExecutor_Type, length); if (executor == NULL) { return NULL; } int dest = length - 1; /* Scan backwards, so that we see the destinations of jumps before the jumps themselves. */ - for (int i = _Py_UOP_MAX_TRACE_LENGTH-1; i >= 0; i--) { + for (int i = UOP_MAX_TRACE_LENGTH-1; i >= 0; i--) { if (!BIT_IS_SET(used, i)) { continue; } @@ -843,8 +764,7 @@ make_executor_from_uops(_PyUOpInstruction *buffer, _PyBloomFilter *dependencies) dest--; } assert(dest == -1); - executor->base.execute = _PyUOpExecute; - _Py_ExecutorInit((_PyExecutorObject *)executor, dependencies); + _Py_ExecutorInit(executor, dependencies); #ifdef Py_DEBUG char *python_lltrace = Py_GETENV("PYTHON_LLTRACE"); int lltrace = 0; @@ -863,7 +783,7 @@ make_executor_from_uops(_PyUOpInstruction *buffer, _PyBloomFilter *dependencies) } } #endif - return (_PyExecutorObject *)executor; + return executor; } static int @@ -876,8 +796,8 @@ uop_optimize( { _PyBloomFilter dependencies; _Py_BloomFilter_Init(&dependencies); - _PyUOpInstruction buffer[_Py_UOP_MAX_TRACE_LENGTH]; - int err = translate_bytecode_to_trace(code, instr, buffer, _Py_UOP_MAX_TRACE_LENGTH, &dependencies); + _PyUOpInstruction buffer[UOP_MAX_TRACE_LENGTH]; + int err = translate_bytecode_to_trace(code, instr, buffer, UOP_MAX_TRACE_LENGTH, &dependencies); if (err <= 0) { // Error or nothing translated return err; @@ -885,7 +805,7 @@ uop_optimize( OPT_STAT_INC(traces_created); char *uop_optimize = Py_GETENV("PYTHONUOPSOPTIMIZE"); if (uop_optimize == NULL || *uop_optimize > '0') { - err = _Py_uop_analyze_and_optimize(code, buffer, _Py_UOP_MAX_TRACE_LENGTH, curr_stackentries); + err = _Py_uop_analyze_and_optimize(code, buffer, UOP_MAX_TRACE_LENGTH, curr_stackentries); if (err < 0) { return -1; } @@ -899,15 +819,6 @@ uop_optimize( return 1; } -/* Dummy execute() function for UOp Executor. - * The actual implementation is inlined in ceval.c, - * in _PyEval_EvalFrameDefault(). */ -_Py_CODEUNIT * -_PyUOpExecute(_PyExecutorObject *executor, _PyInterpreterFrame *frame, PyObject **stack_pointer) -{ - Py_FatalError("Tier 2 is now inlined into Tier 1"); -} - static void uop_opt_dealloc(PyObject *self) { PyObject_Free(self); @@ -937,6 +848,94 @@ PyUnstable_Optimizer_NewUOpOptimizer(void) return (PyObject *)opt; } +static void +counter_dealloc(_PyExecutorObject *self) { + PyObject *opt = (PyObject *)self->trace[0].operand; + Py_DECREF(opt); + uop_dealloc(self); +} + +PyTypeObject _PyCounterExecutor_Type = { + PyVarObject_HEAD_INIT(&PyType_Type, 0) + .tp_name = "counting_executor", + .tp_basicsize = offsetof(_PyExecutorObject, trace), + .tp_itemsize = sizeof(_PyUOpInstruction), + .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_DISALLOW_INSTANTIATION, + .tp_dealloc = (destructor)counter_dealloc, + .tp_methods = executor_methods, +}; + +static int +counter_optimize( + _PyOptimizerObject* self, + PyCodeObject *code, + _Py_CODEUNIT *instr, + _PyExecutorObject **exec_ptr, + int Py_UNUSED(curr_stackentries) +) +{ + int oparg = instr->op.arg; + while (instr->op.code == EXTENDED_ARG) { + instr++; + oparg = (oparg << 8) | instr->op.arg; + } + if (instr->op.code != JUMP_BACKWARD) { + /* Counter optimizer can only handle backward edges */ + return 0; + } + _Py_CODEUNIT *target = instr + 1 + _PyOpcode_Caches[JUMP_BACKWARD] - oparg; + _PyUOpInstruction buffer[3] = { + { .opcode = _LOAD_CONST_INLINE_BORROW, .operand = (uintptr_t)self }, + { .opcode = _INTERNAL_INCREMENT_OPT_COUNTER }, + { .opcode = _EXIT_TRACE, .target = (uint32_t)(target - _PyCode_CODE(code)) } + }; + _PyBloomFilter empty; + _Py_BloomFilter_Init(&empty); + _PyExecutorObject *executor = make_executor_from_uops(buffer, &empty); + if (executor == NULL) { + return -1; + } + Py_INCREF(self); + Py_SET_TYPE(executor, &_PyCounterExecutor_Type); + *exec_ptr = executor; + return 1; +} + +static PyObject * +counter_get_counter(PyObject *self, PyObject *args) +{ + return PyLong_FromLongLong(((_PyCounterOptimizerObject *)self)->count); +} + +static PyMethodDef counter_optimizer_methods[] = { + { "get_count", counter_get_counter, METH_NOARGS, NULL }, + { NULL, NULL }, +}; + +PyTypeObject _PyCounterOptimizer_Type = { + PyVarObject_HEAD_INIT(&PyType_Type, 0) + .tp_name = "Counter optimizer", + .tp_basicsize = sizeof(_PyCounterOptimizerObject), + .tp_itemsize = 0, + .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_DISALLOW_INSTANTIATION, + .tp_methods = counter_optimizer_methods, + .tp_dealloc = (destructor)PyObject_Del, +}; + +PyObject * +PyUnstable_Optimizer_NewCounter(void) +{ + _PyCounterOptimizerObject *opt = (_PyCounterOptimizerObject *)_PyObject_New(&_PyCounterOptimizer_Type); + if (opt == NULL) { + return NULL; + } + opt->base.optimize = counter_optimize; + opt->base.resume_threshold = INT16_MAX; + opt->base.backedge_threshold = 0; + opt->count = 0; + return (PyObject *)opt; +} + /***************************************** * Executor management diff --git a/Python/optimizer_analysis.c b/Python/optimizer_analysis.c index 4eb2d9711f5e56..7db51f0d90a453 100644 --- a/Python/optimizer_analysis.c +++ b/Python/optimizer_analysis.c @@ -5,7 +5,6 @@ #include "pycore_opcode_utils.h" #include "pycore_pystate.h" // _PyInterpreterState_GET() #include "pycore_uop_metadata.h" -#include "pycore_uops.h" #include "pycore_long.h" #include "cpython/optimizer.h" #include diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index 1d8af26e4a1cb7..0d5eec06e9b458 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -1735,8 +1735,6 @@ finalize_interp_types(PyInterpreterState *interp) _PySys_FiniTypes(interp); _PyXI_FiniTypes(interp); _PyExc_Fini(interp); - _PyAsyncGen_Fini(interp); - _PyContext_Fini(interp); _PyFloat_FiniType(interp); _PyLong_FiniTypes(interp); _PyThread_FiniType(interp); @@ -1752,13 +1750,16 @@ finalize_interp_types(PyInterpreterState *interp) _PyUnicode_ClearInterned(interp); _PyDict_Fini(interp); - _PyList_Fini(interp); - _PyTuple_Fini(interp); + _PyUnicode_Fini(interp); - _PySlice_Fini(interp); + _PyFreeListState *state = _PyFreeListState_GET(); + _PyTuple_Fini(state); + _PyList_Fini(state); + _PyFloat_Fini(state); + _PySlice_Fini(state); + _PyContext_Fini(state); + _PyAsyncGen_Fini(state); - _PyUnicode_Fini(interp); - _PyFloat_Fini(interp); #ifdef Py_DEBUG _PyStaticObjects_CheckRefcnt(interp); #endif diff --git a/Python/pystate.c b/Python/pystate.c index 84e2d6ea172f2b..999976283da675 100644 --- a/Python/pystate.c +++ b/Python/pystate.c @@ -1455,6 +1455,16 @@ clear_datastack(PyThreadState *tstate) } } +void +_Py_ClearFreeLists(_PyFreeListState *state, int is_finalization) +{ + _PyFloat_ClearFreeList(state, is_finalization); + _PyTuple_ClearFreeList(state, is_finalization); + _PyList_ClearFreeList(state, is_finalization); + _PyContext_ClearFreeList(state, is_finalization); + _PyAsyncGen_ClearFreeLists(state, is_finalization); +} + void PyThreadState_Clear(PyThreadState *tstate) { @@ -1537,6 +1547,12 @@ PyThreadState_Clear(PyThreadState *tstate) // don't call _PyInterpreterState_SetNotRunningMain() yet. tstate->on_delete(tstate->on_delete_data); } +#ifdef Py_GIL_DISABLED + // Each thread should clear own freelists in free-threading builds. + _PyFreeListState *freelist_state = &((_PyThreadStateImpl*)tstate)->freelist_state; + _Py_ClearFreeLists(freelist_state, 1); + _PySlice_ClearCache(freelist_state); +#endif _PyThreadState_ClearMimallocHeaps(tstate); @@ -2533,9 +2549,14 @@ tstate_mimalloc_bind(PyThreadState *tstate) mi_tld_t *tld = &mts->tld; _mi_tld_init(tld, &mts->heaps[_Py_MIMALLOC_HEAP_MEM]); + // Exiting threads push any remaining in-use segments to the abandoned + // pool to be re-claimed later by other threads. We use per-interpreter + // pools to keep Python objects from different interpreters separate. + tld->segments.abandoned = &tstate->interp->mimalloc.abandoned_pool; + // Initialize each heap - for (Py_ssize_t i = 0; i < _Py_MIMALLOC_HEAP_COUNT; i++) { - _mi_heap_init_ex(&mts->heaps[i], tld, _mi_arena_id_none()); + for (uint8_t i = 0; i < _Py_MIMALLOC_HEAP_COUNT; i++) { + _mi_heap_init_ex(&mts->heaps[i], tld, _mi_arena_id_none(), false, i); } // By default, object allocations use _Py_MIMALLOC_HEAP_OBJECT. diff --git a/Python/specialize.c b/Python/specialize.c index 369b962a545f4e..13e0440dd9dd0d 100644 --- a/Python/specialize.c +++ b/Python/specialize.c @@ -586,6 +586,7 @@ _PyCode_Quicken(PyCodeObject *code) static int function_kind(PyCodeObject *code); static bool function_check_args(PyObject *o, int expected_argcount, int opcode); static uint32_t function_get_version(PyObject *o, int opcode); +static uint32_t type_get_version(PyTypeObject *t, int opcode); static int specialize_module_load_attr( @@ -874,6 +875,9 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name) PyObject *descr = NULL; DescriptorClassification kind = analyze_descriptor(type, name, &descr, 0); assert(descr != NULL || kind == ABSENT || kind == GETSET_OVERRIDDEN); + if (type_get_version(type, LOAD_ATTR) == 0) { + goto fail; + } switch(kind) { case OVERRIDING: SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_ATTR_OVERRIDING_DESCRIPTOR); @@ -1057,6 +1061,9 @@ _Py_Specialize_StoreAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name) } PyObject *descr; DescriptorClassification kind = analyze_descriptor(type, name, &descr, 1); + if (type_get_version(type, STORE_ATTR) == 0) { + goto fail; + } switch(kind) { case OVERRIDING: SPECIALIZATION_FAIL(STORE_ATTR, SPEC_FAIL_ATTR_OVERRIDING_DESCRIPTOR); @@ -1183,6 +1190,9 @@ specialize_class_load_attr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *descr = NULL; DescriptorClassification kind = 0; kind = analyze_descriptor((PyTypeObject *)owner, name, &descr, 0); + if (type_get_version((PyTypeObject *)owner, LOAD_ATTR) == 0) { + return -1; + } switch (kind) { case METHOD: case NON_DESCRIPTOR: @@ -1455,6 +1465,18 @@ function_get_version(PyObject *o, int opcode) return version; } +/* Returning 0 indicates a failure. */ +static uint32_t +type_get_version(PyTypeObject *t, int opcode) +{ + uint32_t version = t->tp_version_tag; + if (version == 0) { + SPECIALIZATION_FAIL(opcode, SPEC_FAIL_OUT_OF_VERSIONS); + return 0; + } + return version; +} + void _Py_Specialize_BinarySubscr( PyObject *container, PyObject *sub, _Py_CODEUNIT *instr) @@ -1726,6 +1748,9 @@ specialize_class_call(PyObject *callable, _Py_CODEUNIT *instr, int nargs) } if (tp->tp_new == PyBaseObject_Type.tp_new) { PyFunctionObject *init = get_init_for_simple_managed_python_class(tp); + if (type_get_version(tp, CALL) == 0) { + return -1; + } if (init != NULL) { if (((PyCodeObject *)init->func_code)->co_argcount != nargs+1) { SPECIALIZATION_FAIL(CALL, SPEC_FAIL_WRONG_NUMBER_ARGUMENTS); @@ -2466,7 +2491,10 @@ _Py_Specialize_ToBool(PyObject *value, _Py_CODEUNIT *instr) SPECIALIZATION_FAIL(TO_BOOL, SPEC_FAIL_OUT_OF_VERSIONS); goto failure; } - uint32_t version = Py_TYPE(value)->tp_version_tag; + uint32_t version = type_get_version(Py_TYPE(value), TO_BOOL); + if (version == 0) { + goto failure; + } instr->op.code = TO_BOOL_ALWAYS_TRUE; write_u32(cache->version, version); assert(version); @@ -2534,7 +2562,7 @@ const struct _PyCode_DEF(8) _Py_InitCleanup = { .co_consts = (PyObject *)&_Py_SINGLETON(tuple_empty), .co_names = (PyObject *)&_Py_SINGLETON(tuple_empty), .co_exceptiontable = (PyObject *)&_Py_SINGLETON(bytes_empty), - .co_flags = CO_OPTIMIZED, + .co_flags = CO_OPTIMIZED | CO_NO_MONITORING_EVENTS, .co_localsplusnames = (PyObject *)&_Py_SINGLETON(tuple_empty), .co_localspluskinds = (PyObject *)&_Py_SINGLETON(bytes_empty), .co_filename = &_Py_ID(__init__), diff --git a/Python/stdlib_module_names.h b/Python/stdlib_module_names.h index 701bfc35cc8182..2445a5c838a7d7 100644 --- a/Python/stdlib_module_names.h +++ b/Python/stdlib_module_names.h @@ -76,6 +76,7 @@ static const char* _Py_stdlib_module_names[] = { "_string", "_strptime", "_struct", +"_suggestions", "_symtable", "_sysconfig", "_thread", diff --git a/Python/suggestions.c b/Python/suggestions.c index 1ad359b18923f3..a09b3ce6d9dab2 100644 --- a/Python/suggestions.c +++ b/Python/suggestions.c @@ -4,8 +4,6 @@ #include "pycore_pyerrors.h" // export _Py_UTF8_Edit_Cost() #include "pycore_runtime.h" // _Py_ID() -#include "stdlib_module_names.h" // _Py_stdlib_module_names - #define MAX_CANDIDATE_ITEMS 750 #define MAX_STRING_SIZE 40 @@ -178,225 +176,6 @@ _Py_CalculateSuggestions(PyObject *dir, return Py_XNewRef(suggestion); } -static PyObject * -get_suggestions_for_attribute_error(PyAttributeErrorObject *exc) -{ - PyObject *name = exc->name; // borrowed reference - PyObject *obj = exc->obj; // borrowed reference - - // Abort if we don't have an attribute name or we have an invalid one - if (name == NULL || obj == NULL || !PyUnicode_CheckExact(name)) { - return NULL; - } - - PyObject *dir = PyObject_Dir(obj); - if (dir == NULL) { - return NULL; - } - - PyObject *suggestions = _Py_CalculateSuggestions(dir, name); - Py_DECREF(dir); - return suggestions; -} - -static PyObject * -offer_suggestions_for_attribute_error(PyAttributeErrorObject *exc) -{ - PyObject* suggestion = get_suggestions_for_attribute_error(exc); - if (suggestion == NULL) { - return NULL; - } - // Add a trailer ". Did you mean: (...)?" - PyObject* result = PyUnicode_FromFormat(". Did you mean: %R?", suggestion); - Py_DECREF(suggestion); - return result; -} - -static PyObject * -get_suggestions_for_name_error(PyObject* name, PyFrameObject* frame) -{ - PyCodeObject *code = PyFrame_GetCode(frame); - assert(code != NULL && code->co_localsplusnames != NULL); - - PyObject *varnames = _PyCode_GetVarnames(code); - Py_DECREF(code); - if (varnames == NULL) { - return NULL; - } - PyObject *dir = PySequence_List(varnames); - Py_DECREF(varnames); - if (dir == NULL) { - return NULL; - } - - // Are we inside a method and the instance has an attribute called 'name'? - int res = PySequence_Contains(dir, &_Py_ID(self)); - if (res < 0) { - goto error; - } - if (res > 0) { - PyObject* locals = PyFrame_GetLocals(frame); - if (!locals) { - goto error; - } - PyObject* self = PyDict_GetItemWithError(locals, &_Py_ID(self)); /* borrowed */ - if (!self) { - Py_DECREF(locals); - goto error; - } - - res = PyObject_HasAttrWithError(self, name); - Py_DECREF(locals); - if (res < 0) { - goto error; - } - if (res) { - Py_DECREF(dir); - return PyUnicode_FromFormat("self.%U", name); - } - } - - PyObject *suggestions = _Py_CalculateSuggestions(dir, name); - Py_DECREF(dir); - if (suggestions != NULL || PyErr_Occurred()) { - return suggestions; - } - - dir = PySequence_List(frame->f_frame->f_globals); - if (dir == NULL) { - return NULL; - } - suggestions = _Py_CalculateSuggestions(dir, name); - Py_DECREF(dir); - if (suggestions != NULL || PyErr_Occurred()) { - return suggestions; - } - - dir = PySequence_List(frame->f_frame->f_builtins); - if (dir == NULL) { - return NULL; - } - suggestions = _Py_CalculateSuggestions(dir, name); - Py_DECREF(dir); - - return suggestions; - -error: - Py_DECREF(dir); - return NULL; -} - -static bool -is_name_stdlib_module(PyObject* name) -{ - const char* the_name = PyUnicode_AsUTF8(name); - Py_ssize_t len = Py_ARRAY_LENGTH(_Py_stdlib_module_names); - for (Py_ssize_t i = 0; i < len; i++) { - if (strcmp(the_name, _Py_stdlib_module_names[i]) == 0) { - return 1; - } - } - return 0; -} - -static PyObject * -offer_suggestions_for_name_error(PyNameErrorObject *exc) -{ - PyObject *name = exc->name; // borrowed reference - PyTracebackObject *traceback = (PyTracebackObject *) exc->traceback; // borrowed reference - // Abort if we don't have a variable name or we have an invalid one - // or if we don't have a traceback to work with - if (name == NULL || !PyUnicode_CheckExact(name) || - traceback == NULL || !Py_IS_TYPE(traceback, &PyTraceBack_Type) - ) { - return NULL; - } - - // Move to the traceback of the exception - while (1) { - PyTracebackObject *next = traceback->tb_next; - if (next == NULL || !Py_IS_TYPE(next, &PyTraceBack_Type)) { - break; - } - else { - traceback = next; - } - } - - PyFrameObject *frame = traceback->tb_frame; - assert(frame != NULL); - - PyObject* suggestion = get_suggestions_for_name_error(name, frame); - if (suggestion == NULL && PyErr_Occurred()) { - return NULL; - } - - // Add a trailer ". Did you mean: (...)?" - PyObject* result = NULL; - if (!is_name_stdlib_module(name)) { - if (suggestion == NULL) { - return NULL; - } - result = PyUnicode_FromFormat(". Did you mean: %R?", suggestion); - } else if (suggestion == NULL) { - result = PyUnicode_FromFormat(". Did you forget to import %R?", name); - } else { - result = PyUnicode_FromFormat(". Did you mean: %R? Or did you forget to import %R?", suggestion, name); - } - Py_XDECREF(suggestion); - return result; -} - -static PyObject * -offer_suggestions_for_import_error(PyImportErrorObject *exc) -{ - PyObject *mod_name = exc->name; // borrowed reference - PyObject *name = exc->name_from; // borrowed reference - if (name == NULL || mod_name == NULL || name == Py_None || - !PyUnicode_CheckExact(name) || !PyUnicode_CheckExact(mod_name)) { - return NULL; - } - - PyObject* mod = PyImport_GetModule(mod_name); - if (mod == NULL) { - return NULL; - } - - PyObject *dir = PyObject_Dir(mod); - Py_DECREF(mod); - if (dir == NULL) { - return NULL; - } - - PyObject *suggestion = _Py_CalculateSuggestions(dir, name); - Py_DECREF(dir); - if (!suggestion) { - return NULL; - } - - PyObject* result = PyUnicode_FromFormat(". Did you mean: %R?", suggestion); - Py_DECREF(suggestion); - return result; -} - -// Offer suggestions for a given exception. Returns a python string object containing the -// suggestions. This function returns NULL if no suggestion was found or if an exception happened, -// users must call PyErr_Occurred() to disambiguate. -PyObject * -_Py_Offer_Suggestions(PyObject *exception) -{ - PyObject *result = NULL; - assert(!PyErr_Occurred()); - if (Py_IS_TYPE(exception, (PyTypeObject*)PyExc_AttributeError)) { - result = offer_suggestions_for_attribute_error((PyAttributeErrorObject *) exception); - } else if (Py_IS_TYPE(exception, (PyTypeObject*)PyExc_NameError)) { - result = offer_suggestions_for_name_error((PyNameErrorObject *) exception); - } else if (Py_IS_TYPE(exception, (PyTypeObject*)PyExc_ImportError)) { - result = offer_suggestions_for_import_error((PyImportErrorObject *) exception); - } - return result; -} - Py_ssize_t _Py_UTF8_Edit_Cost(PyObject *a, PyObject *b, Py_ssize_t max_cost) { diff --git a/Python/symtable.c b/Python/symtable.c index 52d5932896b263..743029956e32fa 100644 --- a/Python/symtable.c +++ b/Python/symtable.c @@ -386,11 +386,6 @@ symtable_new(void) return NULL; } -/* Using a scaling factor means this should automatically adjust when - the recursion limit is adjusted for small or large C stack allocations. -*/ -#define COMPILER_STACK_FRAME_SCALE 2 - struct symtable * _PySymtable_Build(mod_ty mod, PyObject *filename, PyFutureFeatures *future) { @@ -417,9 +412,9 @@ _PySymtable_Build(mod_ty mod, PyObject *filename, PyFutureFeatures *future) } /* Be careful here to prevent overflow. */ int recursion_depth = Py_C_RECURSION_LIMIT - tstate->c_recursion_remaining; - starting_recursion_depth = recursion_depth * COMPILER_STACK_FRAME_SCALE; + starting_recursion_depth = recursion_depth; st->recursion_depth = starting_recursion_depth; - st->recursion_limit = Py_C_RECURSION_LIMIT * COMPILER_STACK_FRAME_SCALE; + st->recursion_limit = Py_C_RECURSION_LIMIT; /* Make the initial symbol information gathering pass */ if (!symtable_enter_block(st, &_Py_ID(top), ModuleBlock, (void *)mod, 0, 0, 0, 0)) { @@ -977,6 +972,12 @@ update_symbols(PyObject *symbols, PyObject *scopes, } Py_DECREF(name); } + + /* Check if loop ended because of exception in PyIter_Next */ + if (PyErr_Occurred()) { + goto error; + } + Py_DECREF(itr); Py_DECREF(v_free); return 1; diff --git a/Python/traceback.c b/Python/traceback.c index abd429ac6c1f71..7a188e56c939c0 100644 --- a/Python/traceback.c +++ b/Python/traceback.c @@ -965,7 +965,11 @@ dump_traceback(int fd, PyThreadState *tstate, int write_header) unsigned int depth = 0; while (1) { if (MAX_FRAME_DEPTH <= depth) { - PUTS(fd, " ...\n"); + if (MAX_FRAME_DEPTH < depth) { + PUTS(fd, "plus "); + _Py_DumpDecimal(fd, depth); + PUTS(fd, " frames\n"); + } break; } dump_frame(fd, frame); diff --git a/README.rst b/README.rst index 4c8602c97ac8ef..fbfae16a7dbb0b 100644 --- a/README.rst +++ b/README.rst @@ -1,4 +1,4 @@ -This is Python version 3.13.0 alpha 2 +This is Python version 3.13.0 alpha 3 ===================================== .. image:: https://github.com/python/cpython/workflows/Tests/badge.svg @@ -14,7 +14,7 @@ This is Python version 3.13.0 alpha 2 :target: https://discuss.python.org/ -Copyright © 2001-2023 Python Software Foundation. All rights reserved. +Copyright © 2001-2024 Python Software Foundation. All rights reserved. See the end of this file for further copyright and license information. @@ -206,8 +206,8 @@ directories installed using ``make altinstall`` contain the major and minor version and can thus live side-by-side. ``make install`` also creates ``${prefix}/bin/python3`` which refers to ``${prefix}/bin/python3.X``. If you intend to install multiple versions using the same prefix you must decide which -version (if any) is your "primary" version. Install that version using ``make -install``. Install all other versions using ``make altinstall``. +version (if any) is your "primary" version. Install that version using +``make install``. Install all other versions using ``make altinstall``. For example, if you want to install Python 2.7, 3.6, and 3.13 with 3.13 being the primary version, you would execute ``make install`` in your 3.13 build directory @@ -224,7 +224,7 @@ Copyright and License Information --------------------------------- -Copyright © 2001-2023 Python Software Foundation. All rights reserved. +Copyright © 2001-2024 Python Software Foundation. All rights reserved. Copyright © 2000 BeOpen.com. All rights reserved. diff --git a/Tools/build/generate_sbom.py b/Tools/build/generate_sbom.py index 93d0d8a3762df3..317d48fee3a9d4 100644 --- a/Tools/build/generate_sbom.py +++ b/Tools/build/generate_sbom.py @@ -82,6 +82,14 @@ def spdx_id(value: str) -> str: return re.sub(r"[^a-zA-Z0-9.\-]+", "-", value) +def error_if(value: bool, error_message: str) -> None: + """Prints an error if a comparison fails along with a link to the devguide""" + if value: + print(error_message) + print("See 'https://devguide.python.org/developer-workflow/sbom' for more information.") + sys.exit(1) + + def filter_gitignored_paths(paths: list[str]) -> list[str]: """ Filter out paths excluded by the gitignore file. @@ -98,6 +106,7 @@ def filter_gitignored_paths(paths: list[str]) -> list[str]: # Non-matching files show up as '::' git_check_ignore_proc = subprocess.run( ["git", "check-ignore", "--verbose", "--non-matching", *paths], + cwd=CPYTHON_ROOT_DIR, check=False, stdout=subprocess.PIPE, ) @@ -206,22 +215,47 @@ def main() -> None: discover_pip_sbom_package(sbom_data) # Ensure all packages in this tool are represented also in the SBOM file. - assert {package["name"] for package in sbom_data["packages"]} == set(PACKAGE_TO_FILES) + error_if( + {package["name"] for package in sbom_data["packages"]} != set(PACKAGE_TO_FILES), + "Packages defined in SBOM tool don't match those defined in SBOM file.", + ) # Make a bunch of assertions about the SBOM data to ensure it's consistent. for package in sbom_data["packages"]: - # Properties and ID must be properly formed. - assert set(package.keys()) == REQUIRED_PROPERTIES_PACKAGE - assert package["SPDXID"] == spdx_id(f"SPDXRef-PACKAGE-{package['name']}") + error_if( + "name" not in package, + "Package is missing the 'name' field" + ) + error_if( + set(package.keys()) != REQUIRED_PROPERTIES_PACKAGE, + f"Package '{package['name']}' is missing required fields", + ) + error_if( + package["SPDXID"] != spdx_id(f"SPDXRef-PACKAGE-{package['name']}"), + f"Package '{package['name']}' has a malformed SPDXID", + ) # Version must be in the download and external references. version = package["versionInfo"] - assert version in package["downloadLocation"] - assert all(version in ref["referenceLocator"] for ref in package["externalRefs"]) + error_if( + version not in package["downloadLocation"], + f"Version '{version}' for package '{package['name']} not in 'downloadLocation' field", + ) + error_if( + any(version not in ref["referenceLocator"] for ref in package["externalRefs"]), + ( + f"Version '{version}' for package '{package['name']} not in " + f"all 'externalRefs[].referenceLocator' fields" + ), + ) # License must be on the approved list for SPDX. - assert package["licenseConcluded"] in ALLOWED_LICENSE_EXPRESSIONS, package["licenseConcluded"] + license_concluded = package["licenseConcluded"] + error_if( + license_concluded not in ALLOWED_LICENSE_EXPRESSIONS, + f"License identifier '{license_concluded}' not in SBOM tool allowlist" + ) # Regenerate file information from current data. sbom_files = [] @@ -232,11 +266,13 @@ def main() -> None: package_spdx_id = spdx_id(f"SPDXRef-PACKAGE-{name}") exclude = files.exclude or () for include in sorted(files.include): - # Find all the paths and then filter them through .gitignore. paths = glob.glob(include, root_dir=CPYTHON_ROOT_DIR, recursive=True) paths = filter_gitignored_paths(paths) - assert paths, include # Make sure that every value returns something! + error_if( + len(paths) == 0, + f"No valid paths found at path '{include}' for package '{name}", + ) for path in paths: # Skip directories and excluded files diff --git a/Tools/c-analyzer/cpython/_parser.py b/Tools/c-analyzer/cpython/_parser.py index 239ed4e0266a75..444063d2148934 100644 --- a/Tools/c-analyzer/cpython/_parser.py +++ b/Tools/c-analyzer/cpython/_parser.py @@ -70,7 +70,6 @@ def clean_lines(text): Python/thread_pthread_stubs.h # only huge constants (safe but parsing is slow) -Modules/_ssl_data.h Modules/_ssl_data_31.h Modules/_ssl_data_300.h Modules/_ssl_data_111.h @@ -333,7 +332,7 @@ def clean_lines(text): _abs('Python/stdlib_module_names.h'): (5_000, 500), # These large files are currently ignored (see above). - _abs('Modules/_ssl_data.h'): (80_000, 10_000), + _abs('Modules/_ssl_data_31.h'): (80_000, 10_000), _abs('Modules/_ssl_data_300.h'): (80_000, 10_000), _abs('Modules/_ssl_data_111.h'): (80_000, 10_000), _abs('Modules/cjkcodecs/mappings_*.h'): (160_000, 2_000), diff --git a/Tools/cases_generator/analyzer.py b/Tools/cases_generator/analyzer.py index 82ef8888bfcee5..b80fa66e2a159a 100644 --- a/Tools/cases_generator/analyzer.py +++ b/Tools/cases_generator/analyzer.py @@ -1,4 +1,4 @@ -from dataclasses import dataclass +from dataclasses import dataclass, field import lexer import parser from typing import Optional @@ -22,6 +22,10 @@ class Properties: uses_locals: bool has_free: bool + pure: bool + passthrough: bool + guard: bool + def dump(self, indent: str) -> None: print(indent, end="") text = ", ".join([f"{key}: {value}" for (key, value) in self.__dict__.items()]) @@ -45,6 +49,9 @@ def from_list(properties: list["Properties"]) -> "Properties": uses_co_names=any(p.uses_co_names for p in properties), uses_locals=any(p.uses_locals for p in properties), has_free=any(p.has_free for p in properties), + pure=all(p.pure for p in properties), + passthrough=all(p.passthrough for p in properties), + guard=all(p.guard for p in properties), ) @@ -64,6 +71,9 @@ def from_list(properties: list["Properties"]) -> "Properties": uses_co_names=False, uses_locals=False, has_free=False, + pure=False, + passthrough=False, + guard=False, ) @@ -88,6 +98,9 @@ class StackItem: condition: str | None size: str peek: bool = False + type_prop: None | tuple[str, None | str] = field( + default_factory=lambda: None, init=True, compare=False, hash=False + ) def __str__(self) -> str: cond = f" if ({self.condition})" if self.condition else "" @@ -259,7 +272,9 @@ def override_error( def convert_stack_item(item: parser.StackEffect) -> StackItem: - return StackItem(item.name, item.type, item.cond, (item.size or "1")) + return StackItem( + item.name, item.type, item.cond, (item.size or "1"), type_prop=item.type_prop + ) def analyze_stack(op: parser.InstDef) -> StackEffect: @@ -377,7 +392,6 @@ def makes_escaping_api_call(instr: parser.InstDef) -> bool: return False - EXITS = { "DISPATCH", "GO_TO_INSTRUCTION", @@ -417,29 +431,49 @@ def always_exits(op: parser.InstDef) -> bool: return False +def stack_effect_only_peeks(instr: parser.InstDef) -> bool: + stack_inputs = [s for s in instr.inputs if not isinstance(s, parser.CacheEffect)] + if len(stack_inputs) != len(instr.outputs): + return False + if len(stack_inputs) == 0: + return False + if any(s.cond for s in stack_inputs) or any(s.cond for s in instr.outputs): + return False + return all( + (s.name == other.name and s.type == other.type and s.size == other.size) + for s, other in zip(stack_inputs, instr.outputs) + ) + + def compute_properties(op: parser.InstDef) -> Properties: has_free = ( variable_used(op, "PyCell_New") or variable_used(op, "PyCell_GET") or variable_used(op, "PyCell_SET") ) + infallible = is_infallible(op) + deopts = variable_used(op, "DEOPT_IF") + passthrough = stack_effect_only_peeks(op) and infallible return Properties( escapes=makes_escaping_api_call(op), - infallible=is_infallible(op), - deopts=variable_used(op, "DEOPT_IF"), + infallible=infallible, + deopts=deopts, oparg=variable_used(op, "oparg"), jumps=variable_used(op, "JUMPBY"), eval_breaker=variable_used(op, "CHECK_EVAL_BREAKER"), ends_with_eval_breaker=eval_breaker_at_end(op), needs_this=variable_used(op, "this_instr"), always_exits=always_exits(op), - stores_sp=variable_used(op, "STORE_SP"), + stores_sp=variable_used(op, "SYNC_SP"), tier_one_only=variable_used(op, "TIER_ONE_ONLY"), uses_co_consts=variable_used(op, "FRAME_CO_CONSTS"), uses_co_names=variable_used(op, "FRAME_CO_NAMES"), uses_locals=(variable_used(op, "GETLOCAL") or variable_used(op, "SETLOCAL")) and not has_free, has_free=has_free, + pure="pure" in op.annotations, + passthrough=passthrough, + guard=passthrough and deopts, ) @@ -686,9 +720,7 @@ def analyze_forest(forest: list[parser.AstNode]) -> Analysis: inst = instructions["BINARY_OP_INPLACE_ADD_UNICODE"] inst.family = families["BINARY_OP"] families["BINARY_OP"].members.append(inst) - opmap, first_arg, min_instrumented = assign_opcodes( - instructions, families, pseudos - ) + opmap, first_arg, min_instrumented = assign_opcodes(instructions, families, pseudos) return Analysis( instructions, uops, families, pseudos, opmap, first_arg, min_instrumented ) diff --git a/Tools/cases_generator/generators_common.py b/Tools/cases_generator/generators_common.py index 5a42a05c5c2ef2..2fc2ab115321cf 100644 --- a/Tools/cases_generator/generators_common.py +++ b/Tools/cases_generator/generators_common.py @@ -26,7 +26,9 @@ def root_relative_path(filename: str) -> str: return filename -def write_header(generator: str, sources: list[str], outfile: TextIO, comment: str = "//") -> None: +def write_header( + generator: str, sources: list[str], outfile: TextIO, comment: str = "//" +) -> None: outfile.write( f"""{comment} This file is generated by {root_relative_path(generator)} {comment} from: @@ -122,7 +124,7 @@ def replace_decrefs( out.emit(f"Py_DECREF({var.name});\n") -def replace_store_sp( +def replace_sync_sp( out: CWriter, tkn: Token, tkn_iter: Iterator[Token], @@ -133,9 +135,7 @@ def replace_store_sp( next(tkn_iter) next(tkn_iter) next(tkn_iter) - out.emit_at("", tkn) stack.flush(out) - out.emit("_PyFrame_SetStackPointer(frame, stack_pointer);\n") def replace_check_eval_breaker( @@ -158,7 +158,7 @@ def replace_check_eval_breaker( "ERROR_IF": replace_error, "DECREF_INPUTS": replace_decrefs, "CHECK_EVAL_BREAKER": replace_check_eval_breaker, - "STORE_SP": replace_store_sp, + "SYNC_SP": replace_sync_sp, } ReplacementFunctionType = Callable[ @@ -209,6 +209,10 @@ def cflags(p: Properties) -> str: flags.append("HAS_ERROR_FLAG") if p.escapes: flags.append("HAS_ESCAPES_FLAG") + if p.pure: + flags.append("HAS_PURE_FLAG") + if p.passthrough: + flags.append("HAS_PASSTHROUGH_FLAG") if flags: return " | ".join(flags) else: diff --git a/Tools/cases_generator/interpreter_definition.md b/Tools/cases_generator/interpreter_definition.md index 5c4238756748a7..e87aff43762b11 100644 --- a/Tools/cases_generator/interpreter_definition.md +++ b/Tools/cases_generator/interpreter_definition.md @@ -6,7 +6,7 @@ The CPython interpreter is defined in C, meaning that the semantics of the bytecode instructions, the dispatching mechanism, error handling, and tracing and instrumentation are all intermixed. -This document proposes defining a custom C-like DSL for defining the +This document proposes defining a custom C-like DSL for defining the instruction semantics and tools for generating the code deriving from the instruction definitions. @@ -15,6 +15,7 @@ These tools would be used to: * Generate the tier 2 interpreter * Generate documentation for instructions * Generate metadata about instructions, such as stack use (done). +* Generate the tier 2 optimizer's abstract interpreter. Having a single definition file ensures that there is a single source of truth for bytecode semantics. @@ -45,7 +46,7 @@ passes from the semantic definition, reducing errors. As we improve the performance of CPython, we need to optimize larger regions of code, use more complex optimizations and, ultimately, translate to machine -code. +code. All of these steps introduce the possibility of more bugs, and require more code to be written. One way to mitigate this is through the use of code generators. @@ -61,7 +62,7 @@ blocks as the instructions for the tier 1 (PEP 659) interpreter. Rewriting all the instructions is tedious and error-prone, and changing the instructions is a maintenance headache as both versions need to be kept in sync. -By using a code generator and using a common source for the instructions, or +By using a code generator and using a common source for the instructions, or parts of instructions, we can reduce the potential for errors considerably. @@ -74,7 +75,7 @@ We update it as the need arises. Each op definition has a kind, a name, a stack and instruction stream effect, and a piece of C code describing its semantics:: - + ``` file: (definition | family | pseudo)+ @@ -85,7 +86,7 @@ and a piece of C code describing its semantics:: "op" "(" NAME "," stack_effect ")" "{" C-code "}" | "macro" "(" NAME ")" "=" uop ("+" uop)* ";" - + stack_effect: "(" [inputs] "--" [outputs] ")" @@ -108,7 +109,10 @@ and a piece of C code describing its semantics:: NAME [":" type] [ "if" "(" C-expression ")" ] type: - NAME ["*"] + NAME ["*"] | type_prop + + type_prop: + "&" "(" NAME ["+" NAME] ")" stream: NAME "/" size @@ -138,7 +142,27 @@ The following definitions may occur: The optional `type` in an `object` is the C type. It defaults to `PyObject *`. The objects before the "--" are the objects on top of the stack at the start of the instruction. Those after the "--" are the objects on top of the stack at the -end of the instruction. +end of the instruction. When prefixed by a `&`, the `type` production rule follows the +`type_prop` production rule. This indicates the type of the value is of that specific type +after the operation. In this case, the type may also contain 64-bit refinement information +that is fetched from a previously defined operand in the instruction header, such as +a type version tag. This follows the format `type + refinement`. The list of possible types +and their refinements are below. They obey the following predicates: + + +* `PYLONG_TYPE`: `Py_TYPE(val) == &PyLong_Type` +* `PYFLOAT_TYPE`: `Py_TYPE(val) == &PyFloat_Type` +* `PYUNICODE_TYPE`: `Py_TYPE(val) == &PYUNICODE_TYPE` +* `NULL_TYPE`: `val == NULL` +* `GUARD_TYPE_VERSION_TYPE`: `type->tp_version_tag == auxillary` +* `GUARD_DORV_VALUES_TYPE`: `_PyDictOrValues_IsValues(obj)` +* `GUARD_DORV_VALUES_INST_ATTR_FROM_DICT_TYPE`: + `_PyDictOrValues_IsValues(obj) || _PyObject_MakeInstanceAttributesFromDict(obj, dorv)` +* `GUARD_KEYS_VERSION_TYPE`: `owner_heap_type->ht_cached_keys->dk_version == auxillary` +* `PYMETHOD_TYPE`: `Py_TYPE(val) == &PyMethod_Type` +* `PYFUNCTION_TYPE_VERSION_TYPE`: + `PyFunction_Check(callable) && func->func_version == auxillary && code->co_argcount == oparg + (self_or_null != NULL)` + An `inst` without `stack_effect` is a transitional form to allow the original C code definitions to be copied. It lacks information to generate anything other than the @@ -158,6 +182,15 @@ By convention cache effects (`stream`) must precede the input effects. The name `oparg` is pre-defined as a 32 bit value fetched from the instruction stream. +### Special instruction annotations + +Instruction headers may be prefixed by one or more annotations. The non-exhaustive +list of annotations and their meanings are as follows: + +* `override`. For external use by other interpreter definitions to override the current + instruction definition. +* `pure`. This instruction has no side effects. + ### Special functions/macros The C code may include special functions that are understood by the tools as @@ -168,6 +201,7 @@ Those functions include: * `DEOPT_IF(cond, instruction)`. Deoptimize if `cond` is met. * `ERROR_IF(cond, label)`. Jump to error handler at `label` if `cond` is true. * `DECREF_INPUTS()`. Generate `Py_DECREF()` calls for the input stack effects. +* `SYNC_SP()`. Synchronizes the physical stack pointer with the stack effects. Note that the use of `DECREF_INPUTS()` is optional -- manual calls to `Py_DECREF()` or other approaches are also acceptable @@ -412,7 +446,7 @@ rather than popping and pushing, such that `LOAD_ATTR_SLOT` would look something stack_pointer += 1; } s1 = res; - } + } next_instr += (1 + 1 + 2 + 1 + 4); stack_pointer[-1] = s1; DISPATCH(); diff --git a/Tools/cases_generator/lexer.py b/Tools/cases_generator/lexer.py index c3c2954a42083f..4f8d01c5492f51 100644 --- a/Tools/cases_generator/lexer.py +++ b/Tools/cases_generator/lexer.py @@ -216,7 +216,13 @@ def choice(*opts: str) -> str: keywords = {name.lower(): name for name in kwds} ANNOTATION = "ANNOTATION" -annotations = {"specializing", "guard", "override", "register", "replaced"} +annotations = { + "specializing", + "override", + "register", + "replaced", + "pure", +} __all__ = [] __all__.extend(kwds) @@ -324,7 +330,9 @@ def tokenize(src: str, line: int = 1, filename: str = "") -> Iterator[Token]: else: begin = line, start - linestart if kind != "\n": - yield Token(filename, kind, text, begin, (line, start - linestart + len(text))) + yield Token( + filename, kind, text, begin, (line, start - linestart + len(text)) + ) def to_text(tkns: list[Token], dedent: int = 0) -> str: diff --git a/Tools/cases_generator/opcode_metadata_generator.py b/Tools/cases_generator/opcode_metadata_generator.py index 9b7df9a54c7b3b..1826a0b645c3b8 100644 --- a/Tools/cases_generator/opcode_metadata_generator.py +++ b/Tools/cases_generator/opcode_metadata_generator.py @@ -50,6 +50,8 @@ "DEOPT", "ERROR", "ESCAPES", + "PURE", + "PASSTHROUGH", ] diff --git a/Tools/cases_generator/parsing.py b/Tools/cases_generator/parsing.py index 60c185dcef58e9..307919cb37ce1e 100644 --- a/Tools/cases_generator/parsing.py +++ b/Tools/cases_generator/parsing.py @@ -75,6 +75,11 @@ class StackEffect(Node): size: str = "" # Optional `[size]` # Note: size cannot be combined with type or cond + # Optional `(type, refinement)` + type_prop: None | tuple[str, None | str] = field( + default_factory=lambda: None, init=True, compare=False, hash=False + ) + def __repr__(self) -> str: items = [self.name, self.type, self.cond, self.size] while items and items[-1] == "": @@ -138,11 +143,13 @@ class Family(Node): @dataclass class Pseudo(Node): name: str - flags: list[str] # instr flags to set on the pseudo instruction - targets: list[str] # opcodes this can be replaced by + flags: list[str] # instr flags to set on the pseudo instruction + targets: list[str] # opcodes this can be replaced by + AstNode = InstDef | Macro | Pseudo | Family + class Parser(PLexer): @contextual def definition(self) -> AstNode | None: @@ -253,14 +260,25 @@ def cache_effect(self) -> CacheEffect | None: @contextual def stack_effect(self) -> StackEffect | None: - # IDENTIFIER [':' IDENTIFIER [TIMES]] ['if' '(' expression ')'] + # IDENTIFIER [':' [IDENTIFIER [TIMES]] ['&' '(' IDENTIFIER ['+' IDENTIFIER] ')']] ['if' '(' expression ')'] # | IDENTIFIER '[' expression ']' if tkn := self.expect(lx.IDENTIFIER): type_text = "" + type_prop = None if self.expect(lx.COLON): - type_text = self.require(lx.IDENTIFIER).text.strip() - if self.expect(lx.TIMES): - type_text += " *" + if i := self.expect(lx.IDENTIFIER): + type_text = i.text.strip() + if self.expect(lx.TIMES): + type_text += " *" + if self.expect(lx.AND): + consumed_bracket = self.expect(lx.LPAREN) is not None + type_prop_text = self.require(lx.IDENTIFIER).text.strip() + refinement = None + if self.expect(lx.PLUS): + refinement = self.require(lx.IDENTIFIER).text.strip() + type_prop = (type_prop_text, refinement) + if consumed_bracket: + self.require(lx.RPAREN) cond_text = "" if self.expect(lx.IF): self.require(lx.LPAREN) @@ -277,7 +295,7 @@ def stack_effect(self) -> StackEffect | None: self.require(lx.RBRACKET) type_text = "PyObject **" size_text = size.text.strip() - return StackEffect(tkn.text, type_text, cond_text, size_text) + return StackEffect(tkn.text, type_text, cond_text, size_text, type_prop) return None @contextual @@ -364,7 +382,9 @@ def family_def(self) -> Family | None: if self.expect(lx.COMMA): if not (size := self.expect(lx.IDENTIFIER)): if not (size := self.expect(lx.NUMBER)): - raise self.make_syntax_error("Expected identifier or number") + raise self.make_syntax_error( + "Expected identifier or number" + ) if self.expect(lx.RPAREN): if self.expect(lx.EQUALS): if not self.expect(lx.LBRACE): diff --git a/Tools/cases_generator/stack.py b/Tools/cases_generator/stack.py index d351037a663ca2..f62ece43c1be7f 100644 --- a/Tools/cases_generator/stack.py +++ b/Tools/cases_generator/stack.py @@ -3,6 +3,8 @@ from dataclasses import dataclass from cwriter import CWriter +UNUSED = {"unused"} + def maybe_parenthesize(sym: str) -> str: """Add parentheses around a string if it contains an operator @@ -29,6 +31,7 @@ def var_size(var: StackItem) -> str: else: return var.size + @dataclass class StackOffset: "The stack offset of the virtual base of the stack from the physical stack pointer" @@ -47,10 +50,7 @@ def push(self, item: StackItem) -> None: self.pushed.append(var_size(item)) def __sub__(self, other: "StackOffset") -> "StackOffset": - return StackOffset( - self.popped + other.pushed, - self.pushed + other.popped - ) + return StackOffset(self.popped + other.pushed, self.pushed + other.popped) def __neg__(self) -> "StackOffset": return StackOffset(self.pushed, self.popped) @@ -134,18 +134,18 @@ def pop(self, var: StackItem) -> str: ) if popped.name == var.name: return "" - elif popped.name == "unused": + elif popped.name in UNUSED: self.defined.add(var.name) return ( f"{var.name} = {indirect}stack_pointer[{self.top_offset.to_c()}];\n" ) - elif var.name == "unused": + elif var.name in UNUSED: return "" else: self.defined.add(var.name) return f"{var.name} = {popped.name};\n" self.base_offset.pop(var) - if var.name == "unused": + if var.name in UNUSED: return "" else: self.defined.add(var.name) @@ -159,7 +159,7 @@ def pop(self, var: StackItem) -> str: def push(self, var: StackItem) -> str: self.variables.append(var) - if var.is_array() and var.name not in self.defined and var.name != "unused": + if var.is_array() and var.name not in self.defined and var.name not in UNUSED: c_offset = self.top_offset.to_c() self.top_offset.push(var) self.defined.add(var.name) @@ -169,10 +169,11 @@ def push(self, var: StackItem) -> str: return "" def flush(self, out: CWriter) -> None: + out.start_line() for var in self.variables: if not var.peek: cast = "(PyObject *)" if var.type else "" - if var.name != "unused" and not var.is_array(): + if var.name not in UNUSED and not var.is_array(): if var.condition: out.emit(f"if ({var.condition}) ") out.emit( @@ -189,6 +190,7 @@ def flush(self, out: CWriter) -> None: self.base_offset.clear() self.top_offset.clear() self.peek_offset.clear() + out.start_line() def as_comment(self) -> str: return f"/* Variables: {[v.name for v in self.variables]}. Base offset: {self.base_offset.to_c()}. Top offset: {self.top_offset.to_c()} */" diff --git a/Tools/clinic/clinic.py b/Tools/clinic/clinic.py index f6f95580f1a177..c247bd075321cd 100755 --- a/Tools/clinic/clinic.py +++ b/Tools/clinic/clinic.py @@ -16,7 +16,6 @@ import dataclasses as dc import enum import functools -import hashlib import inspect import io import itertools @@ -24,7 +23,6 @@ import pprint import re import shlex -import string import sys import textwrap @@ -271,24 +269,6 @@ def __init__(self) -> None: self.unlock: list[str] = [] -class FormatCounterFormatter(string.Formatter): - """ - This counts how many instances of each formatter - "replacement string" appear in the format string. - - e.g. after evaluating "string {a}, {b}, {c}, {a}" - the counts dict would now look like - {'a': 2, 'b': 1, 'c': 1} - """ - def __init__(self) -> None: - self.counts = collections.Counter[str]() - - def get_value( - self, key: str, args: object, kwargs: object # type: ignore[override] - ) -> Literal['']: - self.counts[key] += 1 - return '' - class Language(metaclass=abc.ABCMeta): start_line = "" @@ -342,7 +322,7 @@ def assert_only_one( fields = ['dsl_name'] fields.extend(additional_fields) line: str = getattr(self, attr) - fcf = FormatCounterFormatter() + fcf = libclinic.FormatCounterFormatter() fcf.format(line) def local_fail(should_be_there_but_isnt: bool) -> None: if should_be_there_but_isnt: @@ -1792,21 +1772,6 @@ def render_function( return clinic.get_destination('block').dump() -def create_regex( - before: str, - after: str, - word: bool = True, - whole_line: bool = True -) -> re.Pattern[str]: - """Create an re object for matching marker lines.""" - group_re = r"\w+" if word else ".+" - pattern = r'{}({}){}' - if whole_line: - pattern = '^' + pattern + '$' - pattern = pattern.format(re.escape(before), group_re, re.escape(after)) - return re.compile(pattern) - - @dc.dataclass(slots=True, repr=False) class Block: r""" @@ -1905,8 +1870,9 @@ def __init__( self.language = language before, _, after = language.start_line.partition('{dsl_name}') assert _ == '{dsl_name}' - self.find_start_re = create_regex(before, after, whole_line=False) - self.start_re = create_regex(before, after) + self.find_start_re = libclinic.create_regex(before, after, + whole_line=False) + self.start_re = libclinic.create_regex(before, after) self.verify = verify self.last_checksum_re: re.Pattern[str] | None = None self.last_dsl_name: str | None = None @@ -1995,7 +1961,7 @@ def is_stop_line(line: str) -> bool: else: before, _, after = self.language.checksum_line.format(dsl_name=dsl_name, arguments='{arguments}').partition('{arguments}') assert _ == '{arguments}' - checksum_re = create_regex(before, after, word=False) + checksum_re = libclinic.create_regex(before, after, word=False) self.last_dsl_name = dsl_name self.last_checksum_re = checksum_re assert checksum_re is not None @@ -2029,7 +1995,7 @@ def is_stop_line(line: str) -> bool: else: checksum = d['checksum'] - computed = compute_checksum(output, len(checksum)) + computed = libclinic.compute_checksum(output, len(checksum)) if checksum != computed: fail("Checksum mismatch! " f"Expected {checksum!r}, computed {computed!r}. " @@ -2142,8 +2108,8 @@ def print_block( write(output) arguments = "output={output} input={input}".format( - output=compute_checksum(output, 16), - input=compute_checksum(input, 16) + output=libclinic.compute_checksum(output, 16), + input=libclinic.compute_checksum(input, 16) ) write(self.language.checksum_line.format(dsl_name=dsl_name, arguments=arguments)) write("\n") @@ -2245,27 +2211,6 @@ def dump(self) -> str: extensions['py'] = PythonLanguage -def write_file(filename: str, new_contents: str) -> None: - try: - with open(filename, encoding="utf-8") as fp: - old_contents = fp.read() - - if old_contents == new_contents: - # no change: avoid modifying the file modification time - return - except FileNotFoundError: - pass - # Atomic write using a temporary file and os.replace() - filename_new = f"{filename}.new" - with open(filename_new, "w", encoding="utf-8") as fp: - fp.write(new_contents) - try: - os.replace(filename_new, filename) - except: - os.unlink(filename_new) - raise - - ClassDict = dict[str, "Class"] DestinationDict = dict[str, Destination] ModuleDict = dict[str, "Module"] @@ -2505,7 +2450,8 @@ def parse(self, input: str) -> str: core_includes=True, limited_capi=self.limited_capi, header_includes=self.includes) - write_file(destination.filename, printer_2.f.getvalue()) + libclinic.write_file(destination.filename, + printer_2.f.getvalue()) continue return printer.f.getvalue() @@ -2578,18 +2524,7 @@ def parse_file( limited_capi=limited_capi) cooked = clinic.parse(raw) - write_file(output, cooked) - - -def compute_checksum( - input: str | None, - length: int | None = None -) -> str: - input = input or '' - s = hashlib.sha1(input.encode('utf-8')).hexdigest() - if length: - s = s[:length] - return s + libclinic.write_file(output, cooked) class PythonParser: diff --git a/Tools/clinic/libclinic/__init__.py b/Tools/clinic/libclinic/__init__.py index d4e7a0c5cf7b76..1b300b55acc21e 100644 --- a/Tools/clinic/libclinic/__init__.py +++ b/Tools/clinic/libclinic/__init__.py @@ -15,6 +15,12 @@ wrap_declarations, wrapped_c_string_literal, ) +from .utils import ( + FormatCounterFormatter, + compute_checksum, + create_regex, + write_file, +) __all__ = [ @@ -32,6 +38,12 @@ "suffix_all_lines", "wrap_declarations", "wrapped_c_string_literal", + + # Utility functions + "FormatCounterFormatter", + "compute_checksum", + "create_regex", + "write_file", ] diff --git a/Tools/clinic/libclinic/utils.py b/Tools/clinic/libclinic/utils.py new file mode 100644 index 00000000000000..d2d09387a73d1e --- /dev/null +++ b/Tools/clinic/libclinic/utils.py @@ -0,0 +1,68 @@ +import collections +import hashlib +import os +import re +import string +from typing import Literal + + +def write_file(filename: str, new_contents: str) -> None: + """Write new content to file, iff the content changed.""" + try: + with open(filename, encoding="utf-8") as fp: + old_contents = fp.read() + + if old_contents == new_contents: + # no change: avoid modifying the file modification time + return + except FileNotFoundError: + pass + # Atomic write using a temporary file and os.replace() + filename_new = f"{filename}.new" + with open(filename_new, "w", encoding="utf-8") as fp: + fp.write(new_contents) + try: + os.replace(filename_new, filename) + except: + os.unlink(filename_new) + raise + + +def compute_checksum(input_: str, length: int | None = None) -> str: + checksum = hashlib.sha1(input_.encode("utf-8")).hexdigest() + if length: + checksum = checksum[:length] + return checksum + + +def create_regex( + before: str, after: str, word: bool = True, whole_line: bool = True +) -> re.Pattern[str]: + """Create a regex object for matching marker lines.""" + group_re = r"\w+" if word else ".+" + before = re.escape(before) + after = re.escape(after) + pattern = rf"{before}({group_re}){after}" + if whole_line: + pattern = rf"^{pattern}$" + return re.compile(pattern) + + +class FormatCounterFormatter(string.Formatter): + """ + This counts how many instances of each formatter + "replacement string" appear in the format string. + + e.g. after evaluating "string {a}, {b}, {c}, {a}" + the counts dict would now look like + {'a': 2, 'b': 1, 'c': 1} + """ + + def __init__(self) -> None: + self.counts = collections.Counter[str]() + + def get_value( + self, key: str, args: object, kwargs: object # type: ignore[override] + ) -> Literal[""]: + self.counts[key] += 1 + return "" diff --git a/Tools/importbench/importbench.py b/Tools/importbench/importbench.py index 0c4b3bc73517c5..eb101fe616c587 100644 --- a/Tools/importbench/importbench.py +++ b/Tools/importbench/importbench.py @@ -165,8 +165,8 @@ def using_bytecode_benchmark(seconds, repeat): def main(import_, options): if options.source_file: - with options.source_file: - prev_results = json.load(options.source_file) + with open(options.source_file, 'r', encoding='utf-8') as source_file: + prev_results = json.load(source_file) else: prev_results = {} __builtins__.__import__ = import_ @@ -218,8 +218,8 @@ def main(import_, options): new_result/old_result) print(benchmark_name, ':', result) if options.dest_file: - with options.dest_file: - json.dump(new_results, options.dest_file, indent=2) + with open(options.dest_file, 'w', encoding='utf-8') as dest_file: + json.dump(new_results, dest_file, indent=2) if __name__ == '__main__': @@ -229,11 +229,9 @@ def main(import_, options): parser.add_argument('-b', '--builtin', dest='builtin', action='store_true', default=False, help="use the built-in __import__") parser.add_argument('-r', '--read', dest='source_file', - type=argparse.FileType('r'), help='file to read benchmark data from to compare ' 'against') parser.add_argument('-w', '--write', dest='dest_file', - type=argparse.FileType('w'), help='file to write benchmark data to') parser.add_argument('--benchmark', dest='benchmark', help='specific benchmark to run') diff --git a/Tools/msi/build.bat b/Tools/msi/build.bat index b9aab887c4939b..2fe8a475e7e3a3 100644 --- a/Tools/msi/build.bat +++ b/Tools/msi/build.bat @@ -22,6 +22,9 @@ if "%~1" EQU "--no-test-marker" (set BUILDTEST=) && shift && goto CheckOpts if "%~1" EQU "--test-marker" (set BUILDTEST=--test-marker) && shift && goto CheckOpts if "%~1" EQU "--pack" (set BUILDPACK=1) && shift && goto CheckOpts if "%~1" EQU "-r" (set REBUILD=-r) && shift && goto CheckOpts +rem %IncludeFreethreaded% is recognised by the MSI build, but not the regular build. +rem We use it to build twice and then build the installer with its extra option +if /I "%~1" EQU "--disable-gil" (set IncludeFreethreaded=true) && shift && goto CheckOpts if not defined BUILDX86 if not defined BUILDX64 if not defined BUILDARM64 (set BUILDX86=1) && (set BUILDX64=1) @@ -44,6 +47,20 @@ if errorlevel 1 exit /B %ERRORLEVEL% if defined BUILDARM64 call "%PCBUILD%build.bat" -p ARM64 -e %REBUILD% %BUILDTEST% if errorlevel 1 exit /B %ERRORLEVEL% +if /I "%IncludeFreethreaded%"=="true" ( + rem Cannot "exit /B" inside an if block because %ERRORLEVEL% will be wrong. + rem We just skip everything after the first "errorlevel 1" and then exit after + if defined BUILDX86 call "%PCBUILD%build.bat" -p Win32 -d -e %REBUILD% %BUILDTEST% --disable-gil + if not errorlevel 1 if defined BUILDX86 call "%PCBUILD%build.bat" -p Win32 -e %REBUILD% %BUILDTEST% --disable-gil + + if not errorlevel 1 if defined BUILDX64 call "%PCBUILD%build.bat" -p x64 -d -e %REBUILD% %BUILDTEST% --disable-gil + if not errorlevel 1 if defined BUILDX64 call "%PCBUILD%build.bat" -p x64 -e %REBUILD% %BUILDTEST% --disable-gil + + if not errorlevel 1 if defined BUILDARM64 call "%PCBUILD%build.bat" -p ARM64 -d -e %REBUILD% %BUILDTEST% --disable-gil + if not errorlevel 1 if defined BUILDARM64 call "%PCBUILD%build.bat" -p ARM64 -e %REBUILD% %BUILDTEST% --disable-gil +) +if errorlevel 1 exit /B %ERRORLEVEL% + if defined BUILDDOC call "%PCBUILD%..\Doc\make.bat" html if errorlevel 1 exit /B %ERRORLEVEL% diff --git a/Tools/msi/bundle/Default.thm b/Tools/msi/bundle/Default.thm index d1b0f5bd9c1d4a..471d37acc33b3d 100644 --- a/Tools/msi/bundle/Default.thm +++ b/Tools/msi/bundle/Default.thm @@ -83,6 +83,7 @@ #(loc.PrecompileLabel) #(loc.Include_symbolsLabel) #(loc.Include_debugLabel) + #(loc.Include_freethreadedLabel) #(loc.CustomLocationLabel) diff --git a/Tools/msi/bundle/Default.wxl b/Tools/msi/bundle/Default.wxl index 6f8befba3a2523..1540f050159a54 100644 --- a/Tools/msi/bundle/Default.wxl +++ b/Tools/msi/bundle/Default.wxl @@ -91,6 +91,7 @@ Select Customize to review current options. &Precompile standard library Download debugging &symbols Download debu&g binaries (requires VS 2017 or later) + Download &free-threaded binaries (experimental) [ActionLikeInstallation] Progress [ActionLikeInstalling]: diff --git a/Tools/msi/bundle/bundle.targets b/Tools/msi/bundle/bundle.targets index 9c7410fe514d19..cb3effb4434843 100644 --- a/Tools/msi/bundle/bundle.targets +++ b/Tools/msi/bundle/bundle.targets @@ -72,6 +72,7 @@ + diff --git a/Tools/msi/bundle/bundle.wxs b/Tools/msi/bundle/bundle.wxs index 8b12baae31105e..9b4f072152d5c0 100644 --- a/Tools/msi/bundle/bundle.wxs +++ b/Tools/msi/bundle/bundle.wxs @@ -82,6 +82,13 @@ + + + + + + + @@ -104,6 +111,9 @@ + + + diff --git a/Tools/msi/bundle/packagegroups/freethreaded.wxs b/Tools/msi/bundle/packagegroups/freethreaded.wxs new file mode 100644 index 00000000000000..121ca34ab66157 --- /dev/null +++ b/Tools/msi/bundle/packagegroups/freethreaded.wxs @@ -0,0 +1,62 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/Tools/msi/freethreaded/freethreaded.wixproj b/Tools/msi/freethreaded/freethreaded.wixproj new file mode 100644 index 00000000000000..0b4bd055d77977 --- /dev/null +++ b/Tools/msi/freethreaded/freethreaded.wixproj @@ -0,0 +1,20 @@ + + + + {1B4502D5-B627-4F50-ABEA-4CC5A8E88265} + 2.0 + freethreaded + Package + + + + + + + + + + + + + \ No newline at end of file diff --git a/Tools/msi/freethreaded/freethreaded.wxs b/Tools/msi/freethreaded/freethreaded.wxs new file mode 100644 index 00000000000000..063aa28bf09fce --- /dev/null +++ b/Tools/msi/freethreaded/freethreaded.wxs @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + + diff --git a/Tools/msi/freethreaded/freethreaded_d.wixproj b/Tools/msi/freethreaded/freethreaded_d.wixproj new file mode 100644 index 00000000000000..e1563d4f907126 --- /dev/null +++ b/Tools/msi/freethreaded/freethreaded_d.wixproj @@ -0,0 +1,20 @@ + + + + {D3677DCF-098A-4398-9FA5-8E74AC37E0DF} + 2.0 + freethreaded_d + Package + + + + + + + + + + + + + \ No newline at end of file diff --git a/Tools/msi/freethreaded/freethreaded_d.wxs b/Tools/msi/freethreaded/freethreaded_d.wxs new file mode 100644 index 00000000000000..cddf22a6c803d3 --- /dev/null +++ b/Tools/msi/freethreaded/freethreaded_d.wxs @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + diff --git a/Tools/msi/freethreaded/freethreaded_en-US.wxl_template b/Tools/msi/freethreaded/freethreaded_en-US.wxl_template new file mode 100644 index 00000000000000..b9747eb256d24b --- /dev/null +++ b/Tools/msi/freethreaded/freethreaded_en-US.wxl_template @@ -0,0 +1,8 @@ + + + Freethreaded Interpreter + freethreaded + Python {{ShortVersion}} ({{Bitness}}, freethreaded) + Launches the !(loc.ProductName) freethreaded interpreter. + https://www.python.org/ + diff --git a/Tools/msi/freethreaded/freethreaded_files.wxs b/Tools/msi/freethreaded/freethreaded_files.wxs new file mode 100644 index 00000000000000..adaf63c69d5ade --- /dev/null +++ b/Tools/msi/freethreaded/freethreaded_files.wxs @@ -0,0 +1,175 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Tools/msi/freethreaded/freethreaded_pdb.wixproj b/Tools/msi/freethreaded/freethreaded_pdb.wixproj new file mode 100644 index 00000000000000..789a4f55ae5191 --- /dev/null +++ b/Tools/msi/freethreaded/freethreaded_pdb.wixproj @@ -0,0 +1,20 @@ + + + + {E98E7539-64E7-4DCE-AACD-01E3ADE40EFD} + 2.0 + freethreaded_pdb + Package + + + + + + + + + + + + + \ No newline at end of file diff --git a/Tools/msi/freethreaded/freethreaded_pdb.wxs b/Tools/msi/freethreaded/freethreaded_pdb.wxs new file mode 100644 index 00000000000000..302ac416fe9275 --- /dev/null +++ b/Tools/msi/freethreaded/freethreaded_pdb.wxs @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + + diff --git a/Tools/msi/lib/lib_files.wxs b/Tools/msi/lib/lib_files.wxs index a82cad596d47a6..b8e16b5fe238a0 100644 --- a/Tools/msi/lib/lib_files.wxs +++ b/Tools/msi/lib/lib_files.wxs @@ -26,10 +26,10 @@ - + - + @@ -63,10 +63,10 @@ - + - + @@ -100,10 +100,10 @@ - + - + diff --git a/Tools/msi/msi.props b/Tools/msi/msi.props index cfb3ca9e76e24c..372c4823bce07f 100644 --- a/Tools/msi/msi.props +++ b/Tools/msi/msi.props @@ -24,14 +24,14 @@ This URI is used to generate the various GUIDs used by the installer. Installers built with the same URI will upgrade each other or block when attempting to downgrade. - + By default, this is the local computer name, which will produce installers that do not interfere with other installers. Products that intend to bundle Python should rebuild these modules with their own URI to avoid conflicting with the official releases. - + The official releases use "https://www.python.org/$(ArchName)" - + This is not the same as the DownloadUrl property used in the bundle projects. --> @@ -39,7 +39,7 @@ $(ReleaseUri)/ - + @@ -63,13 +63,17 @@ $(MajorVersionNumber).$(MinorVersionNumber).$(Field3Value).0 - + + + false + + $([System.Math]::Floor($([System.DateTime]::Now.Subtract($([System.DateTime]::new(2001, 1, 1))).TotalDays))) $(MajorVersionNumber).$(MinorVersionNumber).$(MicroVersionNumber)dev$(RevisionNumber) $(MajorVersionNumber).$(MinorVersionNumber).$(RevisionNumber).0 - + 32-bit 64-bit @@ -91,9 +95,12 @@ PyDebugExt=$(PyDebugExt); PyArchExt=$(PyArchExt); PyTestExt=$(PyTestExt); + PydTag=$(PydTag); + FreethreadedPydTag=$(FreethreadedPydTag); OptionalFeatureName=$(OutputName); ssltag=$(OpenSSLDLLSuffix); Suffix32=$(PyArchExt); + IncludeFreethreaded=$(IncludeFreethreaded); $(DefineConstants);CRTRedist=$(CRTRedist); @@ -139,7 +146,7 @@ - + <_Uuid Include="CoreUpgradeCode"> upgradecode @@ -162,6 +169,12 @@ <_Uuid Include="PythonRegComponentGuid"> registry/$(OutputName) + <_Uuid Include="FreethreadedPythonExeComponentGuid" Condition="$(IncludeFreethreaded)"> + freethreaded/python.exe + + <_Uuid Include="FreethreadedPythonwExeComponentGuid" Condition="$(IncludeFreethreaded)"> + freethreaded/pythonw.exe + @(_Uuid->'("%(Identity)", "$(MajorVersionNumber).$(MinorVersionNumber)/%(Uri)")',',') <_GenerateCommand>import uuid; print('\n'.join('{}={}'.format(i, uuid.uuid5(uuid.UUID('c8d9733e-a70c-43ff-ab0c-e26456f11083'), '$(ReleaseUri.Replace(`{arch}`, `$(ArchName)`))' + j)) for i,j in [$(_Uuids.Replace(`"`,`'`))])) - + - + - + $(DefineConstants);@(_UuidValue,';'); diff --git a/Tools/peg_generator/pegen/keywordgen.py b/Tools/peg_generator/pegen/keywordgen.py index 82d717b72976e5..52611eae044e58 100644 --- a/Tools/peg_generator/pegen/keywordgen.py +++ b/Tools/peg_generator/pegen/keywordgen.py @@ -41,25 +41,24 @@ def main() -> None: description="Generate the Lib/keywords.py file from the grammar." ) parser.add_argument( - "grammar", type=str, help="The file with the grammar definition in PEG format" + "grammar", help="The file with the grammar definition in PEG format" ) parser.add_argument( - "tokens_file", type=argparse.FileType("r"), help="The file with the token definitions" + "tokens_file", help="The file with the token definitions" ) parser.add_argument( "keyword_file", - type=argparse.FileType("w"), help="The path to write the keyword definitions", ) args = parser.parse_args() grammar, _, _ = build_parser(args.grammar) - with args.tokens_file as tok_file: + with open(args.tokens_file) as tok_file: all_tokens, exact_tok, non_exact_tok = generate_token_definitions(tok_file) gen = CParserGenerator(grammar, all_tokens, exact_tok, non_exact_tok, file=None) gen.collect_rules() - with args.keyword_file as thefile: + with open(args.keyword_file, 'w') as thefile: all_keywords = sorted(list(gen.keywords.keys())) all_soft_keywords = sorted(gen.soft_keywords) diff --git a/Tools/scripts/summarize_stats.py b/Tools/scripts/summarize_stats.py index 80a1280c025aca..df8a7fddfb8866 100644 --- a/Tools/scripts/summarize_stats.py +++ b/Tools/scripts/summarize_stats.py @@ -1154,12 +1154,13 @@ def to_markdown(x): print("Stats gathered on:", date.today(), file=out) -def output_stats(inputs: list[Path], json_output=TextIO | None): +def output_stats(inputs: list[Path], json_output=str | None): match len(inputs): case 1: data = load_raw_data(Path(inputs[0])) if json_output is not None: - save_raw_data(data, json_output) # type: ignore + with open(json_output, 'w', encoding='utf-8') as f: + save_raw_data(data, f) # type: ignore stats = Stats(data) output_markdown(sys.stdout, LAYOUT, stats) case 2: @@ -1195,7 +1196,6 @@ def main(): parser.add_argument( "--json-output", nargs="?", - type=argparse.FileType("w"), help="Output complete raw results to the given JSON file.", ) diff --git a/Tools/ssl/make_ssl_data.py b/Tools/ssl/make_ssl_data.py index ab1134ed8c4f77..98608716576792 100755 --- a/Tools/ssl/make_ssl_data.py +++ b/Tools/ssl/make_ssl_data.py @@ -23,7 +23,7 @@ ) parser.add_argument("srcdir", help="OpenSSL source directory") parser.add_argument( - "output", nargs="?", type=argparse.FileType("w"), default=sys.stdout + "output", nargs="?", default=None ) @@ -126,8 +126,13 @@ def main(): lines.append("") lines.extend(gen_error_codes(args)) - for line in lines: - args.output.write(line + "\n") + if args.output is None: + for line in lines: + print(line) + else: + with open(args.output, 'w') as output: + for line in lines: + print(line, file=output) if __name__ == "__main__": diff --git a/Tools/wasm/wasi.py b/Tools/wasm/wasi.py index 34c0e9375e24c8..e71b0b302a5561 100644 --- a/Tools/wasm/wasi.py +++ b/Tools/wasm/wasi.py @@ -17,11 +17,15 @@ CHECKOUT = pathlib.Path(__file__).parent.parent.parent + CROSS_BUILD_DIR = CHECKOUT / "cross-build" BUILD_DIR = CROSS_BUILD_DIR / "build" HOST_TRIPLE = "wasm32-wasi" HOST_DIR = CROSS_BUILD_DIR / HOST_TRIPLE +LOCAL_SETUP = CHECKOUT / "Modules" / "Setup.local" +LOCAL_SETUP_MARKER = "# Generated by Tools/wasm/wasi.py\n".encode("utf-8") + def updated_env(updates={}): """Create a new dict representing the environment to use. @@ -119,12 +123,11 @@ def build_python_path(): @subdir(BUILD_DIR, clean_ok=True) def configure_build_python(context, working_dir): """Configure the build/host Python.""" - local_setup = CHECKOUT / "Modules" / "Setup.local" - if local_setup.exists(): - print(f"👍 {local_setup} exists ...") + if LOCAL_SETUP.exists(): + print(f"👍 {LOCAL_SETUP} exists ...") else: - print(f"📝 Touching {local_setup} ...") - local_setup.touch() + print(f"📝 Touching {LOCAL_SETUP} ...") + LOCAL_SETUP.write_bytes(LOCAL_SETUP_MARKER) configure = [os.path.relpath(CHECKOUT / 'configure', working_dir)] if context.args: @@ -233,9 +236,10 @@ def configure_wasi_python(context, working_dir): env=updated_env(env_additions | wasi_sdk_env(context)), quiet=context.quiet) + python_wasm = working_dir / "python.wasm" exec_script = working_dir / "python.sh" with exec_script.open("w", encoding="utf-8") as file: - file.write(f'#!/bin/sh\nexec {host_runner} "$@"\n') + file.write(f'#!/bin/sh\nexec {host_runner} {python_wasm} "$@"\n') exec_script.chmod(0o755) print(f"🏃‍♀️ Created {exec_script} ... ") sys.stdout.flush() @@ -259,6 +263,17 @@ def build_all(context): for step in steps: step(context) +def clean_contents(context): + """Delete all files created by this script.""" + if CROSS_BUILD_DIR.exists(): + print(f"🧹 Deleting {CROSS_BUILD_DIR} ...") + shutil.rmtree(CROSS_BUILD_DIR) + + if LOCAL_SETUP.exists(): + with LOCAL_SETUP.open("rb") as file: + if file.read(len(LOCAL_SETUP_MARKER)) == LOCAL_SETUP_MARKER: + print(f"🧹 Deleting generated {LOCAL_SETUP} ...") + def main(): default_host_runner = (f"{shutil.which('wasmtime')} run " @@ -272,9 +287,7 @@ def main(): # Map the checkout to / to load the stdlib from /Lib. "--dir {HOST_DIR}::{GUEST_DIR} " # Set PYTHONPATH to the sysconfig data. - "--env {ENV_VAR_NAME}={ENV_VAR_VALUE} " - # Path to the WASM binary. - "{PYTHON_WASM}") + "--env {ENV_VAR_NAME}={ENV_VAR_VALUE}") parser = argparse.ArgumentParser() subcommands = parser.add_subparsers(dest="subcommand") @@ -291,11 +304,13 @@ def main(): "Python)") make_host = subcommands.add_parser("make-host", help="Run `make` for the host/WASI") + clean = subcommands.add_parser("clean", help="Delete files and directories " + "created by this script") for subcommand in build, configure_build, make_build, configure_host, make_host: subcommand.add_argument("--quiet", action="store_true", default=False, dest="quiet", help="Redirect output from subprocesses to a log file") - for subcommand in build, configure_build, configure_host: + for subcommand in configure_build, configure_host: subcommand.add_argument("--clean", action="store_true", default=False, dest="clean", help="Delete any relevant directories before building") @@ -310,8 +325,8 @@ def main(): "$WASI_SDK_PATH or /opt/wasi-sdk") subcommand.add_argument("--host-runner", action="store", default=default_host_runner, dest="host_runner", - help="Command template for running the WebAssembly " - "code (default meant for wasmtime 14 or newer: " + help="Command template for running the WASI host " + "(default designed for wasmtime 14 or newer: " f"`{default_host_runner}`)") context = parser.parse_args() @@ -320,7 +335,8 @@ def main(): "make-build-python": make_build_python, "configure-host": configure_wasi_python, "make-host": make_wasi_python, - "build": build_all} + "build": build_all, + "clean": clean_contents} dispatch[context.subcommand](context) diff --git a/configure b/configure index 3cc9aecafad13e..b1153df4d7ec52 100755 --- a/configure +++ b/configure @@ -16538,7 +16538,7 @@ ipv6type=unknown ipv6lib=none ipv6trylibc=no -if test "$ipv6" = "yes"; then +if test "$ipv6" = yes -a "$cross_compiling" = no; then { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking ipv6 stack type" >&5 printf %s "checking ipv6 stack type... " >&6; } for i in inria kame linux-glibc linux-inet6 solaris toshiba v6d zeta; diff --git a/configure.ac b/configure.ac index 6a80a5d29a04ef..9587e6d63499aa 100644 --- a/configure.ac +++ b/configure.ac @@ -4367,7 +4367,7 @@ ipv6type=unknown ipv6lib=none ipv6trylibc=no -if test "$ipv6" = "yes"; then +if test "$ipv6" = yes -a "$cross_compiling" = no; then AC_MSG_CHECKING([ipv6 stack type]) for i in inria kame linux-glibc linux-inet6 solaris toshiba v6d zeta; do