diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile
index ce8967337b02f9..efbdcd402cdf67 100644
--- a/.devcontainer/Dockerfile
+++ b/.devcontainer/Dockerfile
@@ -9,8 +9,8 @@ ENV WASMTIME_HOME=/opt/wasmtime
ENV WASMTIME_VERSION=7.0.0
ENV WASMTIME_CPU_ARCH=x86_64
-RUN dnf -y --nodocs install git clang xz python3-blurb dnf-plugins-core && \
- dnf -y --nodocs builddep python3 && \
+RUN dnf -y --nodocs --setopt=install_weak_deps=False install /usr/bin/{blurb,clang,curl,git,ln,tar,xz} 'dnf-command(builddep)' && \
+ dnf -y --nodocs --setopt=install_weak_deps=False builddep python3 && \
dnf -y clean all
RUN mkdir ${WASI_SDK_PATH} && \
diff --git a/.gitattributes b/.gitattributes
index cb1cf8bcc7c877..4ed95069442f3d 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -34,6 +34,7 @@ Lib/test/xmltestdata/* noeol
# Shell scripts should have LF even on Windows because of Cygwin
Lib/venv/scripts/common/activate text eol=lf
+Lib/venv/scripts/posix/* text eol=lf
# CRLF files
[attr]dos text eol=crlf
diff --git a/.github/workflows/verify-ensurepip-wheels.yml b/.github/workflows/verify-ensurepip-wheels.yml
index d4a2cb6846c1cb..17d841f1f1c54a 100644
--- a/.github/workflows/verify-ensurepip-wheels.yml
+++ b/.github/workflows/verify-ensurepip-wheels.yml
@@ -1,4 +1,4 @@
-name: Verify bundled pip and setuptools
+name: Verify bundled wheels
on:
workflow_dispatch:
@@ -29,5 +29,5 @@ jobs:
- uses: actions/setup-python@v4
with:
python-version: '3'
- - name: Compare checksums of bundled pip and setuptools to ones published on PyPI
+ - name: Compare checksum of bundled wheels to the ones published on PyPI
run: ./Tools/build/verify_ensurepip_wheels.py
diff --git a/Doc/c-api/type.rst b/Doc/c-api/type.rst
index 7b5d1fac40ed87..69b15296993301 100644
--- a/Doc/c-api/type.rst
+++ b/Doc/c-api/type.rst
@@ -232,6 +232,15 @@ Type Objects
.. versionadded:: 3.11
+.. c:function:: int PyUnstable_Type_AssignVersionTag(PyTypeObject *type)
+
+ Attempt to assign a version tag to the given type.
+
+ Returns 1 if the type already had a valid version tag or a new one was
+ assigned, or 0 if a new tag could not be assigned.
+
+ .. versionadded:: 3.12
+
Creating Heap-Allocated Types
.............................
diff --git a/Doc/conf.py b/Doc/conf.py
index e99b801d0ae87a..4c120bee64dde4 100644
--- a/Doc/conf.py
+++ b/Doc/conf.py
@@ -254,8 +254,31 @@
# Options for the link checker
# ----------------------------
-# Ignore certain URLs.
-linkcheck_ignore = [r'https://bugs.python.org/(issue)?\d+']
+linkcheck_allowed_redirects = {
+ # bpo-NNNN -> BPO -> GH Issues
+ r'https://bugs.python.org/issue\?@action=redirect&bpo=\d+': 'https://github.com/python/cpython/issues/\d+',
+ # GH-NNNN used to refer to pull requests
+ r'https://github.com/python/cpython/issues/\d+': 'https://github.com/python/cpython/pull/\d+',
+ # :source:`something` linking files in the repository
+ r'https://github.com/python/cpython/tree/.*': 'https://github.com/python/cpython/blob/.*'
+}
+
+linkcheck_anchors_ignore = [
+ # ignore anchors that start with a '/', e.g. Wikipedia media files:
+ # https://en.wikipedia.org/wiki/Walrus#/media/File:Pacific_Walrus_-_Bull_(8247646168).jpg
+ r'\/.*',
+]
+
+linkcheck_ignore = [
+ # The crawler gets "Anchor not found"
+ r'https://developer.apple.com/documentation/.+?#.*',
+ r'https://devguide.python.org.+?/#.*',
+ r'https://github.com.+?#.*',
+ # Robot crawlers not allowed: "403 Client Error: Forbidden"
+ r'https://support.enthought.com/hc/.*',
+ # SSLError CertificateError, even though it is valid
+ r'https://unix.org/version2/whatsnew/lp64_wp.html',
+]
# Options for extensions
diff --git a/Doc/distributing/index.rst b/Doc/distributing/index.rst
index 21389adedf9c15..d237f8f082d87b 100644
--- a/Doc/distributing/index.rst
+++ b/Doc/distributing/index.rst
@@ -129,14 +129,10 @@ involved in creating and publishing a project:
* `Uploading the project to the Python Package Index`_
* `The .pypirc file`_
-.. _Project structure: \
- https://packaging.python.org/tutorials/packaging-projects/#packaging-python-projects
-.. _Building and packaging the project: \
- https://packaging.python.org/tutorials/packaging-projects/#creating-the-package-files
-.. _Uploading the project to the Python Package Index: \
- https://packaging.python.org/tutorials/packaging-projects/#uploading-the-distribution-archives
-.. _The .pypirc file: \
- https://packaging.python.org/specifications/pypirc/
+.. _Project structure: https://packaging.python.org/tutorials/packaging-projects/#packaging-python-projects
+.. _Building and packaging the project: https://packaging.python.org/tutorials/packaging-projects/#creating-the-package-files
+.. _Uploading the project to the Python Package Index: https://packaging.python.org/tutorials/packaging-projects/#uploading-the-distribution-archives
+.. _The .pypirc file: https://packaging.python.org/specifications/pypirc/
How do I...?
diff --git a/Doc/extending/newtypes.rst b/Doc/extending/newtypes.rst
index 80a1387db200c2..56b40acdb69fed 100644
--- a/Doc/extending/newtypes.rst
+++ b/Doc/extending/newtypes.rst
@@ -337,7 +337,7 @@ Here is an example::
}
PyErr_Format(PyExc_AttributeError,
- "'%.50s' object has no attribute '%.400s'",
+ "'%.100s' object has no attribute '%.400s'",
tp->tp_name, name);
return NULL;
}
diff --git a/Doc/faq/library.rst b/Doc/faq/library.rst
index a9cde456575020..597caaa778e1c8 100644
--- a/Doc/faq/library.rst
+++ b/Doc/faq/library.rst
@@ -780,7 +780,7 @@ socket to :meth:`select.select` to check if it's writable.
The :mod:`asyncio` module provides a general purpose single-threaded and
concurrent asynchronous library, which can be used for writing non-blocking
network code.
- The third-party `Twisted `_ library is
+ The third-party `Twisted `_ library is
a popular and feature-rich alternative.
diff --git a/Doc/howto/descriptor.rst b/Doc/howto/descriptor.rst
index 74710d9b3fc2ed..3688c47f0d6ec9 100644
--- a/Doc/howto/descriptor.rst
+++ b/Doc/howto/descriptor.rst
@@ -1273,11 +1273,14 @@ Using the non-data descriptor protocol, a pure Python version of
.. testcode::
+ import functools
+
class StaticMethod:
"Emulate PyStaticMethod_Type() in Objects/funcobject.c"
def __init__(self, f):
self.f = f
+ functools.update_wrapper(self, f)
def __get__(self, obj, objtype=None):
return self.f
@@ -1285,13 +1288,19 @@ Using the non-data descriptor protocol, a pure Python version of
def __call__(self, *args, **kwds):
return self.f(*args, **kwds)
+The :func:`functools.update_wrapper` call adds a ``__wrapped__`` attribute
+that refers to the underlying function. Also it carries forward
+the attributes necessary to make the wrapper look like the wrapped
+function: ``__name__``, ``__qualname__``, ``__doc__``, and ``__annotations__``.
+
.. testcode::
:hide:
class E_sim:
@StaticMethod
- def f(x):
- return x * 10
+ def f(x: int) -> str:
+ "Simple function example"
+ return "!" * x
wrapped_ord = StaticMethod(ord)
@@ -1299,11 +1308,51 @@ Using the non-data descriptor protocol, a pure Python version of
:hide:
>>> E_sim.f(3)
- 30
+ '!!!'
>>> E_sim().f(3)
- 30
+ '!!!'
+
+ >>> sm = vars(E_sim)['f']
+ >>> type(sm).__name__
+ 'StaticMethod'
+ >>> f = E_sim.f
+ >>> type(f).__name__
+ 'function'
+ >>> sm.__name__
+ 'f'
+ >>> f.__name__
+ 'f'
+ >>> sm.__qualname__
+ 'E_sim.f'
+ >>> f.__qualname__
+ 'E_sim.f'
+ >>> sm.__doc__
+ 'Simple function example'
+ >>> f.__doc__
+ 'Simple function example'
+ >>> sm.__annotations__
+ {'x': , 'return': }
+ >>> f.__annotations__
+ {'x': , 'return': }
+ >>> sm.__module__ == f.__module__
+ True
+ >>> sm(3)
+ '!!!'
+ >>> f(3)
+ '!!!'
+
>>> wrapped_ord('A')
65
+ >>> wrapped_ord.__module__ == ord.__module__
+ True
+ >>> wrapped_ord.__wrapped__ == ord
+ True
+ >>> wrapped_ord.__name__ == ord.__name__
+ True
+ >>> wrapped_ord.__qualname__ == ord.__qualname__
+ True
+ >>> wrapped_ord.__doc__ == ord.__doc__
+ True
Class methods
@@ -1359,11 +1408,14 @@ Using the non-data descriptor protocol, a pure Python version of
.. testcode::
+ import functools
+
class ClassMethod:
"Emulate PyClassMethod_Type() in Objects/funcobject.c"
def __init__(self, f):
self.f = f
+ functools.update_wrapper(self, f)
def __get__(self, obj, cls=None):
if cls is None:
@@ -1380,8 +1432,9 @@ Using the non-data descriptor protocol, a pure Python version of
# Verify the emulation works
class T:
@ClassMethod
- def cm(cls, x, y):
- return (cls, x, y)
+ def cm(cls, x: int, y: str) -> tuple[str, int, str]:
+ "Class method that returns a tuple"
+ return (cls.__name__, x, y)
@ClassMethod
@property
@@ -1393,17 +1446,40 @@ Using the non-data descriptor protocol, a pure Python version of
:hide:
>>> T.cm(11, 22)
- (, 11, 22)
+ ('T', 11, 22)
# Also call it from an instance
>>> t = T()
>>> t.cm(11, 22)
- (, 11, 22)
+ ('T', 11, 22)
# Check the alternate path for chained descriptors
>>> T.__doc__
"A doc for 'T'"
+ # Verify that T uses our emulation
+ >>> type(vars(T)['cm']).__name__
+ 'ClassMethod'
+
+ # Verify that update_wrapper() correctly copied attributes
+ >>> T.cm.__name__
+ 'cm'
+ >>> T.cm.__qualname__
+ 'T.cm'
+ >>> T.cm.__doc__
+ 'Class method that returns a tuple'
+ >>> T.cm.__annotations__
+ {'x': , 'y': , 'return': tuple[str, int, str]}
+
+ # Verify that __wrapped__ was added and works correctly
+ >>> f = vars(T)['cm'].__wrapped__
+ >>> type(f).__name__
+ 'function'
+ >>> f.__name__
+ 'cm'
+ >>> f(T, 11, 22)
+ ('T', 11, 22)
+
The code path for ``hasattr(type(self.f), '__get__')`` was added in
Python 3.9 and makes it possible for :func:`classmethod` to support
@@ -1423,6 +1499,12 @@ chained together. In Python 3.11, this functionality was deprecated.
>>> G.__doc__
"A doc for 'G'"
+The :func:`functools.update_wrapper` call in ``ClassMethod`` adds a
+``__wrapped__`` attribute that refers to the underlying function. Also
+it carries forward the attributes necessary to make the wrapper look
+like the wrapped function: ``__name__``, ``__qualname__``, ``__doc__``,
+and ``__annotations__``.
+
Member objects and __slots__
----------------------------
diff --git a/Doc/howto/enum.rst b/Doc/howto/enum.rst
index 56391a026cf889..68b75c529e92c7 100644
--- a/Doc/howto/enum.rst
+++ b/Doc/howto/enum.rst
@@ -36,8 +36,10 @@ inherits from :class:`Enum` itself.
.. note:: Case of Enum Members
- Because Enums are used to represent constants we recommend using
- UPPER_CASE names for members, and will be using that style in our examples.
+ Because Enums are used to represent constants, and to help avoid issues
+ with name clashes between mixin-class methods/attributes and enum names,
+ we strongly recommend using UPPER_CASE names for members, and will be using
+ that style in our examples.
Depending on the nature of the enum a member's value may or may not be
important, but either way that value can be used to get the corresponding
@@ -490,6 +492,10 @@ the :meth:`~Enum.__repr__` omits the inherited class' name. For example::
Use the :func:`!dataclass` argument ``repr=False``
to use the standard :func:`repr`.
+.. versionchanged:: 3.12
+ Only the dataclass fields are shown in the value area, not the dataclass'
+ name.
+
Pickling
--------
@@ -992,7 +998,9 @@ but remain normal attributes.
Enum members are instances of their enum class, and are normally accessed as
``EnumClass.member``. In certain situations, such as writing custom enum
behavior, being able to access one member directly from another is useful,
-and is supported.
+and is supported; however, in order to avoid name clashes between member names
+and attributes/methods from mixed-in classes, upper-case names are strongly
+recommended.
.. versionchanged:: 3.5
diff --git a/Doc/howto/functional.rst b/Doc/howto/functional.rst
index 38a651b0f964a6..5cf12cc52bde4e 100644
--- a/Doc/howto/functional.rst
+++ b/Doc/howto/functional.rst
@@ -1208,8 +1208,8 @@ General
-------
**Structure and Interpretation of Computer Programs**, by Harold Abelson and
-Gerald Jay Sussman with Julie Sussman. Full text at
-https://mitpress.mit.edu/sicp/. In this classic textbook of computer science,
+Gerald Jay Sussman with Julie Sussman. The book can be found at
+https://mitpress.mit.edu/sicp. In this classic textbook of computer science,
chapters 2 and 3 discuss the use of sequences and streams to organize the data
flow inside a program. The book uses Scheme for its examples, but many of the
design approaches described in these chapters are applicable to functional-style
diff --git a/Doc/howto/urllib2.rst b/Doc/howto/urllib2.rst
index 69af3c3a85c5d6..61ba6bd7224fcc 100644
--- a/Doc/howto/urllib2.rst
+++ b/Doc/howto/urllib2.rst
@@ -86,7 +86,7 @@ response::
import urllib.request
- req = urllib.request.Request('http://www.voidspace.org.uk')
+ req = urllib.request.Request('http://python.org/')
with urllib.request.urlopen(req) as response:
the_page = response.read()
@@ -458,7 +458,7 @@ To illustrate creating and installing a handler we will use the
``HTTPBasicAuthHandler``. For a more detailed discussion of this subject --
including an explanation of how Basic Authentication works - see the `Basic
Authentication Tutorial
-`_.
+`__.
When authentication is required, the server sends a header (as well as the 401
error code) requesting authentication. This specifies the authentication scheme
diff --git a/Doc/library/argparse.rst b/Doc/library/argparse.rst
index ee68ac58d3de75..dd59181a30702e 100644
--- a/Doc/library/argparse.rst
+++ b/Doc/library/argparse.rst
@@ -67,7 +67,7 @@ default_ Default value used when an argument is not provided
dest_ Specify the attribute name used in the result namespace
help_ Help message for an argument
metavar_ Alternate display name for the argument as shown in help
-nargs_ Number of times the argument can be used :class:`int`, ``'?'``, ``'*'``, ``'+'``, or ``argparse.REMAINDER``
+nargs_ Number of times the argument can be used :class:`int`, ``'?'``, ``'*'``, or ``'+'``
required_ Indicate whether an argument is required or optional ``True`` or ``False``
type_ Automatically convert an argument to the given type :class:`int`, :class:`float`, ``argparse.FileType('w')``, or callable function
====================== =========================================================== ==========================================================================================================================
@@ -2218,7 +2218,7 @@ support this parsing style.
These parsers do not support all the argparse features, and will raise
exceptions if unsupported features are used. In particular, subparsers,
-``argparse.REMAINDER``, and mutually exclusive groups that include both
+and mutually exclusive groups that include both
optionals and positionals are not supported.
The following example shows the difference between
diff --git a/Doc/library/asyncio-task.rst b/Doc/library/asyncio-task.rst
index 41d09e1e79705c..ba0f909c405a34 100644
--- a/Doc/library/asyncio-task.rst
+++ b/Doc/library/asyncio-task.rst
@@ -256,8 +256,9 @@ Creating Tasks
.. note::
- :meth:`asyncio.TaskGroup.create_task` is a newer alternative
- that allows for convenient waiting for a group of related tasks.
+ :meth:`asyncio.TaskGroup.create_task` is a new alternative
+ leveraging structural concurrency; it allows for waiting
+ for a group of related tasks with strong safety guarantees.
.. important::
@@ -340,7 +341,7 @@ Example::
async with asyncio.TaskGroup() as tg:
task1 = tg.create_task(some_coro(...))
task2 = tg.create_task(another_coro(...))
- print("Both tasks have completed now.")
+ print(f"Both tasks have completed now: {task1.result()}, {task2.result()}")
The ``async with`` statement will wait for all tasks in the group to finish.
While waiting, new tasks may still be added to the group
@@ -459,8 +460,12 @@ Running Tasks Concurrently
Tasks/Futures to be cancelled.
.. note::
- A more modern way to create and run tasks concurrently and
- wait for their completion is :class:`asyncio.TaskGroup`.
+ A new alternative to create and run tasks concurrently and
+ wait for their completion is :class:`asyncio.TaskGroup`. *TaskGroup*
+ provides stronger safety guarantees than *gather* for scheduling a nesting of subtasks:
+ if a task (or a subtask, a task scheduled by a task)
+ raises an exception, *TaskGroup* will, while *gather* will not,
+ cancel the remaining scheduled tasks).
.. _asyncio_example_gather:
@@ -829,6 +834,9 @@ Waiting Primitives
Deprecation warning is emitted if not all awaitable objects in the *aws*
iterable are Future-like objects and there is no running event loop.
+ .. versionchanged:: 3.12
+ Added support for generators yielding tasks.
+
Running in Threads
==================
diff --git a/Doc/library/copyreg.rst b/Doc/library/copyreg.rst
index 866b180f4bc3b8..2107215c0c1967 100644
--- a/Doc/library/copyreg.rst
+++ b/Doc/library/copyreg.rst
@@ -28,8 +28,8 @@ Such constructors may be factory functions or class instances.
.. function:: pickle(type, function, constructor_ob=None)
Declares that *function* should be used as a "reduction" function for objects
- of type *type*. *function* should return either a string or a tuple
- containing two or three elements. See the :attr:`~pickle.Pickler.dispatch_table`
+ of type *type*. *function* must return either a string or a tuple
+ containing two or five elements. See the :attr:`~pickle.Pickler.dispatch_table`
for more details on the interface of *function*.
The *constructor_ob* parameter is a legacy feature and is now ignored, but if
diff --git a/Doc/library/dataclasses.rst b/Doc/library/dataclasses.rst
index 5f4dc25bfd7877..a04e5f744fa350 100644
--- a/Doc/library/dataclasses.rst
+++ b/Doc/library/dataclasses.rst
@@ -714,7 +714,7 @@ Using dataclasses, *if* this code was valid::
@dataclass
class D:
- x: List = []
+ x: list = [] # This code raises ValueError
def add(self, element):
self.x += element
diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst
index 761f5f04b9b288..7889dd7d1c3ef0 100644
--- a/Doc/library/datetime.rst
+++ b/Doc/library/datetime.rst
@@ -1043,7 +1043,7 @@ Other constructors, all class methods:
Return a :class:`.datetime` corresponding to *date_string*, parsed according to
*format*.
- This is equivalent to::
+ If *format* does not contain microseconds or timezone information, this is equivalent to::
datetime(*(time.strptime(date_string, format)[0:6]))
@@ -2510,10 +2510,7 @@ Notes:
Because the format depends on the current locale, care should be taken when
making assumptions about the output value. Field orderings will vary (for
example, "month/day/year" versus "day/month/year"), and the output may
- contain Unicode characters encoded using the locale's default encoding (for
- example, if the current locale is ``ja_JP``, the default encoding could be
- any one of ``eucJP``, ``SJIS``, or ``utf-8``; use :meth:`locale.getlocale`
- to determine the current locale's encoding).
+ contain non-ASCII characters.
(2)
The :meth:`strptime` method can parse years in the full [1, 9999] range, but
diff --git a/Doc/library/decimal.rst b/Doc/library/decimal.rst
index 6187098a752053..0b4a4973cb4da0 100644
--- a/Doc/library/decimal.rst
+++ b/Doc/library/decimal.rst
@@ -926,7 +926,7 @@ Each thread has its own current context which is accessed or changed using the
You can also use the :keyword:`with` statement and the :func:`localcontext`
function to temporarily change the active context.
-.. function:: localcontext(ctx=None, \*\*kwargs)
+.. function:: localcontext(ctx=None, **kwargs)
Return a context manager that will set the current context for the active thread
to a copy of *ctx* on entry to the with-statement and restore the previous context
diff --git a/Doc/library/dis.rst b/Doc/library/dis.rst
index c8897b34f0ddfd..3894837127877c 100644
--- a/Doc/library/dis.rst
+++ b/Doc/library/dis.rst
@@ -1048,8 +1048,8 @@ iterations of the loop.
The low bit of ``namei`` signals to attempt a method load, as with
:opcode:`LOAD_ATTR`.
- The second-low bit of ``namei``, if set, means that this was a zero-argument
- call to :func:`super`.
+ The second-low bit of ``namei``, if set, means that this was a two-argument
+ call to :func:`super` (unset means zero-argument).
.. versionadded:: 3.12
diff --git a/Doc/library/enum.rst b/Doc/library/enum.rst
index 07acf9da33e275..582e06261afd72 100644
--- a/Doc/library/enum.rst
+++ b/Doc/library/enum.rst
@@ -119,7 +119,8 @@ Module Contents
:func:`~enum.property`
Allows :class:`Enum` members to have attributes without conflicting with
- member names.
+ member names. The ``value`` and ``name`` attributes are implemented this
+ way.
:func:`unique`
@@ -169,7 +170,7 @@ Data Types
final *enum*, as well as creating the enum members, properly handling
duplicates, providing iteration over the enum class, etc.
- .. method:: EnumType.__call__(cls, value, names=None, *, module=None, qualname=None, type=None, start=1, boundary=None)
+ .. method:: EnumType.__call__(cls, value, names=None, \*, module=None, qualname=None, type=None, start=1, boundary=None)
This method is called in two different ways:
@@ -317,7 +318,7 @@ Data Types
>>> PowersOfThree.SECOND.value
9
- .. method:: Enum.__init_subclass__(cls, **kwds)
+ .. method:: Enum.__init_subclass__(cls, \**kwds)
A *classmethod* that is used to further configure subsequent subclasses.
By default, does nothing.
diff --git a/Doc/library/functools.rst b/Doc/library/functools.rst
index d467e50bc7a424..29cbc87bf66d12 100644
--- a/Doc/library/functools.rst
+++ b/Doc/library/functools.rst
@@ -49,8 +49,13 @@ The :mod:`functools` module defines the following functions:
>>> factorial(12) # makes two new recursive calls, the other 10 are cached
479001600
- The cache is threadsafe so the wrapped function can be used in multiple
- threads.
+ The cache is threadsafe so that the wrapped function can be used in
+ multiple threads. This means that the underlying data structure will
+ remain coherent during concurrent updates.
+
+ It is possible for the wrapped function to be called more than once if
+ another thread makes an additional call before the initial call has been
+ completed and cached.
.. versionadded:: 3.9
@@ -118,6 +123,7 @@ The :mod:`functools` module defines the following functions:
def stdev(self):
return statistics.stdev(self._data)
+ .. versionadded:: 3.8
.. versionchanged:: 3.12
Prior to Python 3.12, ``cached_property`` included an undocumented lock to
@@ -126,8 +132,6 @@ The :mod:`functools` module defines the following functions:
per-instance, which could result in unacceptably high lock contention. In
Python 3.12+ this locking is removed.
- .. versionadded:: 3.8
-
.. function:: cmp_to_key(func)
@@ -159,8 +163,13 @@ The :mod:`functools` module defines the following functions:
*maxsize* most recent calls. It can save time when an expensive or I/O bound
function is periodically called with the same arguments.
- The cache is threadsafe so the wrapped function can be used in multiple
- threads.
+ The cache is threadsafe so that the wrapped function can be used in
+ multiple threads. This means that the underlying data structure will
+ remain coherent during concurrent updates.
+
+ It is possible for the wrapped function to be called more than once if
+ another thread makes an additional call before the initial call has been
+ completed and cached.
Since a dictionary is used to cache results, the positional and keyword
arguments to the function must be :term:`hashable`.
@@ -233,7 +242,7 @@ The :mod:`functools` module defines the following functions:
@lru_cache(maxsize=32)
def get_pep(num):
'Retrieve text of a Python Enhancement Proposal'
- resource = 'https://peps.python.org/pep-%04d/' % num
+ resource = f'https://peps.python.org/pep-{num:04d}'
try:
with urllib.request.urlopen(resource) as s:
return s.read()
diff --git a/Doc/library/importlib.metadata.rst b/Doc/library/importlib.metadata.rst
index 6e084101995e25..b306d5f55a714f 100644
--- a/Doc/library/importlib.metadata.rst
+++ b/Doc/library/importlib.metadata.rst
@@ -308,6 +308,10 @@ Python module or `Import Package >> packages_distributions()
{'importlib_metadata': ['importlib-metadata'], 'yaml': ['PyYAML'], 'jaraco': ['jaraco.classes', 'jaraco.functools'], ...}
+Some editable installs, `do not supply top-level names
+`_, and thus this
+function is not reliable with such installs.
+
.. versionadded:: 3.10
.. _distributions:
diff --git a/Doc/library/multiprocessing.rst b/Doc/library/multiprocessing.rst
index 84e309f1bc8326..8454296b815b41 100644
--- a/Doc/library/multiprocessing.rst
+++ b/Doc/library/multiprocessing.rst
@@ -452,9 +452,7 @@ process which created it.
importable by the children. This is covered in :ref:`multiprocessing-programming`
however it is worth pointing out here. This means that some examples, such
as the :class:`multiprocessing.pool.Pool` examples will not work in the
- interactive interpreter. For example:
-
- .. code-block:: text
+ interactive interpreter. For example::
>>> from multiprocessing import Pool
>>> p = Pool(5)
diff --git a/Doc/library/optparse.rst b/Doc/library/optparse.rst
index 3e29fed0175e04..468c3efbe01148 100644
--- a/Doc/library/optparse.rst
+++ b/Doc/library/optparse.rst
@@ -2027,7 +2027,7 @@ Features of note:
values.ensure_value(attr, value)
If the ``attr`` attribute of ``values`` doesn't exist or is ``None``, then
- ensure_value() first sets it to ``value``, and then returns 'value. This is
+ ensure_value() first sets it to ``value``, and then returns ``value``. This is
very handy for actions like ``"extend"``, ``"append"``, and ``"count"``, all
of which accumulate data in a variable and expect that variable to be of a
certain type (a list for the first two, an integer for the latter). Using
diff --git a/Doc/library/os.rst b/Doc/library/os.rst
index 7bb501c5946817..50e951c631fa88 100644
--- a/Doc/library/os.rst
+++ b/Doc/library/os.rst
@@ -3919,7 +3919,8 @@ to be ignored.
the :envvar:`PATH` variable. The other variants, :func:`execl`, :func:`execle`,
:func:`execv`, and :func:`execve`, will not use the :envvar:`PATH` variable to
locate the executable; *path* must contain an appropriate absolute or relative
- path.
+ path. Relative paths must include at least one slash, even on Windows, as
+ plain names will not be resolved.
For :func:`execle`, :func:`execlpe`, :func:`execve`, and :func:`execvpe` (note
that these all end in "e"), the *env* parameter must be a mapping which is
diff --git a/Doc/library/pkgutil.rst b/Doc/library/pkgutil.rst
index 788a02dcb8922f..64e617b82b48bc 100644
--- a/Doc/library/pkgutil.rst
+++ b/Doc/library/pkgutil.rst
@@ -25,9 +25,9 @@ support.
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
- This will add to the package's ``__path__`` all subdirectories of directories
- on :data:`sys.path` named after the package. This is useful if one wants to
- distribute different parts of a single logical package as multiple
+ For each directory on :data:`sys.path` that has a subdirectory that matches the
+ package name, add the subdirectory to the package's :attr:`__path__`. This is useful
+ if one wants to distribute different parts of a single logical package as multiple
directories.
It also looks for :file:`\*.pkg` files beginning where ``*`` matches the
@@ -82,7 +82,7 @@ support.
This is a backwards compatibility wrapper around
:func:`importlib.util.find_spec` that converts most failures to
:exc:`ImportError` and only returns the loader rather than the full
- :class:`ModuleSpec`.
+ :class:`importlib.machinery.ModuleSpec`.
.. versionchanged:: 3.3
Updated to be based directly on :mod:`importlib` rather than relying
diff --git a/Doc/library/readline.rst b/Doc/library/readline.rst
index 4d485d25b54020..8fb0eca8df74d8 100644
--- a/Doc/library/readline.rst
+++ b/Doc/library/readline.rst
@@ -19,7 +19,7 @@ function.
Readline keybindings may be configured via an initialization file, typically
``.inputrc`` in your home directory. See `Readline Init File
-`_
+`_
in the GNU Readline manual for information about the format and
allowable constructs of that file, and the capabilities of the
Readline library in general.
diff --git a/Doc/library/shutil.rst b/Doc/library/shutil.rst
index 373cc7d6072031..7f408be2336824 100644
--- a/Doc/library/shutil.rst
+++ b/Doc/library/shutil.rst
@@ -662,7 +662,7 @@ provided. They rely on the :mod:`zipfile` and :mod:`tarfile` modules.
Remove the archive format *name* from the list of supported formats.
-.. function:: unpack_archive(filename[, extract_dir[, format]])
+.. function:: unpack_archive(filename[, extract_dir[, format[, filter]]])
Unpack an archive. *filename* is the full path of the archive.
@@ -676,6 +676,14 @@ provided. They rely on the :mod:`zipfile` and :mod:`tarfile` modules.
registered for that extension. In case none is found,
a :exc:`ValueError` is raised.
+ The keyword-only *filter* argument is passed to the underlying unpacking
+ function. For zip files, *filter* is not accepted.
+ For tar files, it is recommended to set it to ``'data'``,
+ unless using features specific to tar and UNIX-like filesystems.
+ (See :ref:`tarfile-extraction-filter` for details.)
+ The ``'data'`` filter will become the default for tar files
+ in Python 3.14.
+
.. audit-event:: shutil.unpack_archive filename,extract_dir,format shutil.unpack_archive
.. warning::
@@ -688,6 +696,9 @@ provided. They rely on the :mod:`zipfile` and :mod:`tarfile` modules.
.. versionchanged:: 3.7
Accepts a :term:`path-like object` for *filename* and *extract_dir*.
+ .. versionchanged:: 3.12
+ Added the *filter* argument.
+
.. function:: register_unpack_format(name, extensions, function[, extra_args[, description]])
Registers an unpack format. *name* is the name of the format and
@@ -695,11 +706,14 @@ provided. They rely on the :mod:`zipfile` and :mod:`tarfile` modules.
``.zip`` for Zip files.
*function* is the callable that will be used to unpack archives. The
- callable will receive the path of the archive, followed by the directory
- the archive must be extracted to.
-
- When provided, *extra_args* is a sequence of ``(name, value)`` tuples that
- will be passed as keywords arguments to the callable.
+ callable will receive:
+
+ - the path of the archive, as a positional argument;
+ - the directory the archive must be extracted to, as a positional argument;
+ - possibly a *filter* keyword argument, if it was given to
+ :func:`unpack_archive`;
+ - additional keyword arguments, specified by *extra_args* as a sequence
+ of ``(name, value)`` tuples.
*description* can be provided to describe the format, and will be returned
by the :func:`get_unpack_formats` function.
diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst
index bcfc6e5cfce611..2360472b31f175 100644
--- a/Doc/library/stdtypes.rst
+++ b/Doc/library/stdtypes.rst
@@ -1605,8 +1605,8 @@ expression support in the :mod:`re` module).
converts it to ``"ss"``.
The casefolding algorithm is
- `described in section 3.13 of the Unicode Standard
- `__.
+ `described in section 3.13 'Default Case Folding' of the Unicode Standard
+ `__.
.. versionadded:: 3.3
@@ -1768,8 +1768,9 @@ expression support in the :mod:`re` module).
one character, ``False`` otherwise. Alphabetic characters are those characters defined
in the Unicode character database as "Letter", i.e., those with general category
property being one of "Lm", "Lt", "Lu", "Ll", or "Lo". Note that this is different
- from the `Alphabetic property defined in the Unicode Standard
- `_.
+ from the `Alphabetic property defined in the section 4.10 'Letters, Alphabetic, and
+ Ideographic' of the Unicode Standard
+ `_.
.. method:: str.isascii()
@@ -1904,8 +1905,8 @@ expression support in the :mod:`re` module).
lowercase.
The lowercasing algorithm used is
- `described in section 3.13 of the Unicode Standard
- `__.
+ `described in section 3.13 'Default Case Folding' of the Unicode Standard
+ `__.
.. method:: str.lstrip([chars])
@@ -2250,8 +2251,8 @@ expression support in the :mod:`re` module).
titlecase).
The uppercasing algorithm used is
- `described in section 3.13 of the Unicode Standard
- `__.
+ `described in section 3.13 'Default Case Folding' of the Unicode Standard
+ `__.
.. method:: str.zfill(width)
@@ -3714,12 +3715,15 @@ copying.
types such as :class:`bytes` and :class:`bytearray`, an element is a single
byte, but other types such as :class:`array.array` may have bigger elements.
- ``len(view)`` is equal to the length of :class:`~memoryview.tolist`.
- If ``view.ndim = 0``, the length is 1. If ``view.ndim = 1``, the length
- is equal to the number of elements in the view. For higher dimensions,
- the length is equal to the length of the nested list representation of
- the view. The :class:`~memoryview.itemsize` attribute will give you the
- number of bytes in a single element.
+ ``len(view)`` is equal to the length of :class:`~memoryview.tolist`, which
+ is the nested list representation of the view. If ``view.ndim = 1``,
+ this is equal to the number of elements in the view.
+
+ .. versionchanged:: 3.12
+ If ``view.ndim == 0``, ``len(view)`` now raises :exc:`TypeError` instead of returning 1.
+
+ The :class:`~memoryview.itemsize` attribute will give you the number of
+ bytes in a single element.
A :class:`memoryview` supports slicing and indexing to expose its data.
One-dimensional slicing will result in a subview::
diff --git a/Doc/library/string.rst b/Doc/library/string.rst
index f55074cc582718..26b3f5000634f5 100644
--- a/Doc/library/string.rst
+++ b/Doc/library/string.rst
@@ -254,10 +254,10 @@ Some simple format string examples::
"Units destroyed: {players[0]}" # First element of keyword argument 'players'.
The *conversion* field causes a type coercion before formatting. Normally, the
-job of formatting a value is done by the :meth:`__format__` method of the value
+job of formatting a value is done by the :meth:`~object.__format__` method of the value
itself. However, in some cases it is desirable to force a type to be formatted
as a string, overriding its own definition of formatting. By converting the
-value to a string before calling :meth:`__format__`, the normal formatting logic
+value to a string before calling :meth:`~object.__format__`, the normal formatting logic
is bypassed.
Three conversion flags are currently supported: ``'!s'`` which calls :func:`str`
diff --git a/Doc/library/subprocess.rst b/Doc/library/subprocess.rst
index 2b5a82e0107fb6..53dfbf827260c9 100644
--- a/Doc/library/subprocess.rst
+++ b/Doc/library/subprocess.rst
@@ -919,9 +919,12 @@ Reassigning them to new values is unsupported:
.. attribute:: Popen.returncode
- The child return code, set by :meth:`poll` and :meth:`wait` (and indirectly
- by :meth:`communicate`). A ``None`` value indicates that the process
- hasn't terminated yet.
+ The child return code. Initially ``None``, :attr:`returncode` is set by
+ a call to the :meth:`poll`, :meth:`wait`, or :meth:`communicate` methods
+ if they detect that the process has terminated.
+
+ A ``None`` value indicates that the process hadn't yet terminated at the
+ time of the last method call.
A negative value ``-N`` indicates that the child was terminated by signal
``N`` (POSIX only).
diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst
index e37d57edce515f..7324f3113e0a08 100644
--- a/Doc/library/sys.rst
+++ b/Doc/library/sys.rst
@@ -670,6 +670,13 @@ always available.
.. versionadded:: 3.4
+.. function:: getunicodeinternedsize()
+
+ Return the number of unicode objects that have been interned.
+
+ .. versionadded:: 3.12
+
+
.. function:: getandroidapilevel()
Return the build time API version of Android as an integer.
diff --git a/Doc/library/tarfile.rst b/Doc/library/tarfile.rst
index 741d40da152101..891af1bcf7edff 100644
--- a/Doc/library/tarfile.rst
+++ b/Doc/library/tarfile.rst
@@ -36,6 +36,13 @@ Some facts and figures:
.. versionchanged:: 3.3
Added support for :mod:`lzma` compression.
+.. versionchanged:: 3.12
+ Archives are extracted using a :ref:`filter `,
+ which makes it possible to either limit surprising/dangerous features,
+ or to acknowledge that they are expected and the archive is fully trusted.
+ By default, archives are fully trusted, but this default is deprecated
+ and slated to change in Python 3.14.
+
.. function:: open(name=None, mode='r', fileobj=None, bufsize=10240, **kwargs)
@@ -209,6 +216,38 @@ The :mod:`tarfile` module defines the following exceptions:
Is raised by :meth:`TarInfo.frombuf` if the buffer it gets is invalid.
+.. exception:: FilterError
+
+ Base class for members :ref:`refused ` by
+ filters.
+
+ .. attribute:: tarinfo
+
+ Information about the member that the filter refused to extract,
+ as :ref:`TarInfo `.
+
+.. exception:: AbsolutePathError
+
+ Raised to refuse extracting a member with an absolute path.
+
+.. exception:: OutsideDestinationError
+
+ Raised to refuse extracting a member outside the destination directory.
+
+.. exception:: SpecialFileError
+
+ Raised to refuse extracting a special file (e.g. a device or pipe).
+
+.. exception:: AbsoluteLinkError
+
+ Raised to refuse extracting a symbolic link with an absolute path.
+
+.. exception:: LinkOutsideDestinationError
+
+ Raised to refuse extracting a symbolic link pointing outside the destination
+ directory.
+
+
The following constants are available at the module level:
.. data:: ENCODING
@@ -319,11 +358,8 @@ be finalized; only the internally used file object will be closed. See the
*debug* can be set from ``0`` (no debug messages) up to ``3`` (all debug
messages). The messages are written to ``sys.stderr``.
- If *errorlevel* is ``0``, all errors are ignored when using :meth:`TarFile.extract`.
- Nevertheless, they appear as error messages in the debug output, when debugging
- is enabled. If ``1``, all *fatal* errors are raised as :exc:`OSError`
- exceptions. If ``2``, all *non-fatal* errors are raised as :exc:`TarError`
- exceptions as well.
+ *errorlevel* controls how extraction errors are handled,
+ see :attr:`the corresponding attribute <~TarFile.errorlevel>`.
The *encoding* and *errors* arguments define the character encoding to be
used for reading or writing the archive and how conversion errors are going
@@ -390,7 +426,7 @@ be finalized; only the internally used file object will be closed. See the
available.
-.. method:: TarFile.extractall(path=".", members=None, *, numeric_owner=False)
+.. method:: TarFile.extractall(path=".", members=None, *, numeric_owner=False, filter=None)
Extract all members from the archive to the current working directory or
directory *path*. If optional *members* is given, it must be a subset of the
@@ -404,6 +440,12 @@ be finalized; only the internally used file object will be closed. See the
are used to set the owner/group for the extracted files. Otherwise, the named
values from the tarfile are used.
+ The *filter* argument specifies how ``members`` are modified or rejected
+ before extraction.
+ See :ref:`tarfile-extraction-filter` for details.
+ It is recommended to set this explicitly depending on which *tar* features
+ you need to support.
+
.. warning::
Never extract archives from untrusted sources without prior inspection.
@@ -411,14 +453,20 @@ be finalized; only the internally used file object will be closed. See the
that have absolute filenames starting with ``"/"`` or filenames with two
dots ``".."``.
+ Set ``filter='data'`` to prevent the most dangerous security issues,
+ and read the :ref:`tarfile-extraction-filter` section for details.
+
.. versionchanged:: 3.5
Added the *numeric_owner* parameter.
.. versionchanged:: 3.6
The *path* parameter accepts a :term:`path-like object`.
+ .. versionchanged:: 3.12
+ Added the *filter* parameter.
-.. method:: TarFile.extract(member, path="", set_attrs=True, *, numeric_owner=False)
+
+.. method:: TarFile.extract(member, path="", set_attrs=True, *, numeric_owner=False, filter=None)
Extract a member from the archive to the current working directory, using its
full name. Its file information is extracted as accurately as possible. *member*
@@ -426,9 +474,8 @@ be finalized; only the internally used file object will be closed. See the
directory using *path*. *path* may be a :term:`path-like object`.
File attributes (owner, mtime, mode) are set unless *set_attrs* is false.
- If *numeric_owner* is :const:`True`, the uid and gid numbers from the tarfile
- are used to set the owner/group for the extracted files. Otherwise, the named
- values from the tarfile are used.
+ The *numeric_owner* and *filter* arguments are the same as
+ for :meth:`extractall`.
.. note::
@@ -439,6 +486,9 @@ be finalized; only the internally used file object will be closed. See the
See the warning for :meth:`extractall`.
+ Set ``filter='data'`` to prevent the most dangerous security issues,
+ and read the :ref:`tarfile-extraction-filter` section for details.
+
.. versionchanged:: 3.2
Added the *set_attrs* parameter.
@@ -448,6 +498,9 @@ be finalized; only the internally used file object will be closed. See the
.. versionchanged:: 3.6
The *path* parameter accepts a :term:`path-like object`.
+ .. versionchanged:: 3.12
+ Added the *filter* parameter.
+
.. method:: TarFile.extractfile(member)
@@ -460,6 +513,55 @@ be finalized; only the internally used file object will be closed. See the
.. versionchanged:: 3.3
Return an :class:`io.BufferedReader` object.
+.. attribute:: TarFile.errorlevel
+ :type: int
+
+ If *errorlevel* is ``0``, errors are ignored when using :meth:`TarFile.extract`
+ and :meth:`TarFile.extractall`.
+ Nevertheless, they appear as error messages in the debug output when
+ *debug* is greater than 0.
+ If ``1`` (the default), all *fatal* errors are raised as :exc:`OSError` or
+ :exc:`FilterError` exceptions. If ``2``, all *non-fatal* errors are raised
+ as :exc:`TarError` exceptions as well.
+
+ Some exceptions, e.g. ones caused by wrong argument types or data
+ corruption, are always raised.
+
+ Custom :ref:`extraction filters `
+ should raise :exc:`FilterError` for *fatal* errors
+ and :exc:`ExtractError` for *non-fatal* ones.
+
+ Note that when an exception is raised, the archive may be partially
+ extracted. It is the user’s responsibility to clean up.
+
+.. attribute:: TarFile.extraction_filter
+
+ .. versionadded:: 3.12
+
+ The :ref:`extraction filter ` used
+ as a default for the *filter* argument of :meth:`~TarFile.extract`
+ and :meth:`~TarFile.extractall`.
+
+ The attribute may be ``None`` or a callable.
+ String names are not allowed for this attribute, unlike the *filter*
+ argument to :meth:`~TarFile.extract`.
+
+ If ``extraction_filter`` is ``None`` (the default),
+ calling an extraction method without a *filter* argument will raise a
+ ``DeprecationWarning``,
+ and fall back to the :func:`fully_trusted ` filter,
+ whose dangerous behavior matches previous versions of Python.
+
+ In Python 3.14+, leaving ``extraction_filter=None`` will cause
+ extraction methods to use the :func:`data ` filter by default.
+
+ The attribute may be set on instances or overridden in subclasses.
+ It also is possible to set it on the ``TarFile`` class itself to set a
+ global default, although, since it affects all uses of *tarfile*,
+ it is best practice to only do so in top-level applications or
+ :mod:`site configuration `.
+ To set a global default this way, a filter function needs to be wrapped in
+ :func:`staticmethod()` to prevent injection of a ``self`` argument.
.. method:: TarFile.add(name, arcname=None, recursive=True, *, filter=None)
@@ -535,8 +637,23 @@ permissions, owner etc.), it provides some useful methods to determine its type.
It does *not* contain the file's data itself.
:class:`TarInfo` objects are returned by :class:`TarFile`'s methods
-:meth:`getmember`, :meth:`getmembers` and :meth:`gettarinfo`.
+:meth:`~TarFile.getmember`, :meth:`~TarFile.getmembers` and
+:meth:`~TarFile.gettarinfo`.
+Modifying the objects returned by :meth:`~!TarFile.getmember` or
+:meth:`~!TarFile.getmembers` will affect all subsequent
+operations on the archive.
+For cases where this is unwanted, you can use :mod:`copy.copy() ` or
+call the :meth:`~TarInfo.replace` method to create a modified copy in one step.
+
+Several attributes can be set to ``None`` to indicate that a piece of metadata
+is unused or unknown.
+Different :class:`TarInfo` methods handle ``None`` differently:
+
+- The :meth:`~TarFile.extract` or :meth:`~TarFile.extractall` methods will
+ ignore the corresponding metadata, leaving it set to a default.
+- :meth:`~TarFile.addfile` will fail.
+- :meth:`~TarFile.list` will print a placeholder string.
.. class:: TarInfo(name="")
@@ -569,24 +686,39 @@ A ``TarInfo`` object has the following public data attributes:
.. attribute:: TarInfo.name
+ :type: str
Name of the archive member.
.. attribute:: TarInfo.size
+ :type: int
Size in bytes.
.. attribute:: TarInfo.mtime
+ :type: int | float
- Time of last modification.
+ Time of last modification in seconds since the :ref:`epoch `,
+ as in :attr:`os.stat_result.st_mtime`.
+
+ .. versionchanged:: 3.12
+ Can be set to ``None`` for :meth:`~TarFile.extract` and
+ :meth:`~TarFile.extractall`, causing extraction to skip applying this
+ attribute.
.. attribute:: TarInfo.mode
+ :type: int
- Permission bits.
+ Permission bits, as for :func:`os.chmod`.
+ .. versionchanged:: 3.12
+
+ Can be set to ``None`` for :meth:`~TarFile.extract` and
+ :meth:`~TarFile.extractall`, causing extraction to skip applying this
+ attribute.
.. attribute:: TarInfo.type
@@ -598,35 +730,76 @@ A ``TarInfo`` object has the following public data attributes:
.. attribute:: TarInfo.linkname
+ :type: str
Name of the target file name, which is only present in :class:`TarInfo` objects
of type :const:`LNKTYPE` and :const:`SYMTYPE`.
.. attribute:: TarInfo.uid
+ :type: int
User ID of the user who originally stored this member.
+ .. versionchanged:: 3.12
+
+ Can be set to ``None`` for :meth:`~TarFile.extract` and
+ :meth:`~TarFile.extractall`, causing extraction to skip applying this
+ attribute.
.. attribute:: TarInfo.gid
+ :type: int
Group ID of the user who originally stored this member.
+ .. versionchanged:: 3.12
+
+ Can be set to ``None`` for :meth:`~TarFile.extract` and
+ :meth:`~TarFile.extractall`, causing extraction to skip applying this
+ attribute.
.. attribute:: TarInfo.uname
+ :type: str
User name.
+ .. versionchanged:: 3.12
+
+ Can be set to ``None`` for :meth:`~TarFile.extract` and
+ :meth:`~TarFile.extractall`, causing extraction to skip applying this
+ attribute.
.. attribute:: TarInfo.gname
+ :type: str
Group name.
+ .. versionchanged:: 3.12
+
+ Can be set to ``None`` for :meth:`~TarFile.extract` and
+ :meth:`~TarFile.extractall`, causing extraction to skip applying this
+ attribute.
.. attribute:: TarInfo.pax_headers
+ :type: dict
A dictionary containing key-value pairs of an associated pax extended header.
+.. method:: TarInfo.replace(name=..., mtime=..., mode=..., linkname=...,
+ uid=..., gid=..., uname=..., gname=...,
+ deep=True)
+
+ .. versionadded:: 3.12
+
+ Return a *new* copy of the :class:`!TarInfo` object with the given attributes
+ changed. For example, to return a ``TarInfo`` with the group name set to
+ ``'staff'``, use::
+
+ new_tarinfo = old_tarinfo.replace(gname='staff')
+
+ By default, a deep copy is made.
+ If *deep* is false, the copy is shallow, i.e. ``pax_headers``
+ and any custom attributes are shared with the original ``TarInfo`` object.
A :class:`TarInfo` object also provides some convenient query methods:
@@ -676,9 +849,258 @@ A :class:`TarInfo` object also provides some convenient query methods:
Return :const:`True` if it is one of character device, block device or FIFO.
+.. _tarfile-extraction-filter:
+
+Extraction filters
+------------------
+
+.. versionadded:: 3.12
+
+The *tar* format is designed to capture all details of a UNIX-like filesystem,
+which makes it very powerful.
+Unfortunately, the features make it easy to create tar files that have
+unintended -- and possibly malicious -- effects when extracted.
+For example, extracting a tar file can overwrite arbitrary files in various
+ways (e.g. by using absolute paths, ``..`` path components, or symlinks that
+affect later members).
+
+In most cases, the full functionality is not needed.
+Therefore, *tarfile* supports extraction filters: a mechanism to limit
+functionality, and thus mitigate some of the security issues.
+
+.. seealso::
+
+ :pep:`706`
+ Contains further motivation and rationale behind the design.
+
+The *filter* argument to :meth:`TarFile.extract` or :meth:`~TarFile.extractall`
+can be:
+
+* the string ``'fully_trusted'``: Honor all metadata as specified in the
+ archive.
+ Should be used if the user trusts the archive completely, or implements
+ their own complex verification.
+
+* the string ``'tar'``: Honor most *tar*-specific features (i.e. features of
+ UNIX-like filesystems), but block features that are very likely to be
+ surprising or malicious. See :func:`tar_filter` for details.
+
+* the string ``'data'``: Ignore or block most features specific to UNIX-like
+ filesystems. Intended for extracting cross-platform data archives.
+ See :func:`data_filter` for details.
+
+* ``None`` (default): Use :attr:`TarFile.extraction_filter`.
+
+ If that is also ``None`` (the default), raise a ``DeprecationWarning``,
+ and fall back to the ``'fully_trusted'`` filter, whose dangerous behavior
+ matches previous versions of Python.
+
+ In Python 3.14, the ``'data'`` filter will become the default instead.
+ It's possible to switch earlier; see :attr:`TarFile.extraction_filter`.
+
+* A callable which will be called for each extracted member with a
+ :ref:`TarInfo ` describing the member and the destination
+ path to where the archive is extracted (i.e. the same path is used for all
+ members)::
+
+ filter(/, member: TarInfo, path: str) -> TarInfo | None
+
+ The callable is called just before each member is extracted, so it can
+ take the current state of the disk into account.
+ It can:
+
+ - return a :class:`TarInfo` object which will be used instead of the metadata
+ in the archive, or
+ - return ``None``, in which case the member will be skipped, or
+ - raise an exception to abort the operation or skip the member,
+ depending on :attr:`~TarFile.errorlevel`.
+ Note that when extraction is aborted, :meth:`~TarFile.extractall` may leave
+ the archive partially extracted. It does not attempt to clean up.
+
+Default named filters
+~~~~~~~~~~~~~~~~~~~~~
+
+The pre-defined, named filters are available as functions, so they can be
+reused in custom filters:
+
+.. function:: fully_trusted_filter(/, member, path)
+
+ Return *member* unchanged.
+
+ This implements the ``'fully_trusted'`` filter.
+
+.. function:: tar_filter(/, member, path)
+
+ Implements the ``'tar'`` filter.
+
+ - Strip leading slashes (``/`` and :attr:`os.sep`) from filenames.
+ - :ref:`Refuse ` to extract files with absolute
+ paths (in case the name is absolute
+ even after stripping slashes, e.g. ``C:/foo`` on Windows).
+ This raises :class:`~tarfile.AbsolutePathError`.
+ - :ref:`Refuse ` to extract files whose absolute
+ path (after following symlinks) would end up outside the destination.
+ This raises :class:`~tarfile.OutsideDestinationError`.
+ - Clear high mode bits (setuid, setgid, sticky) and group/other write bits
+ (:attr:`~stat.S_IWGRP`|:attr:`~stat.S_IWOTH`).
+
+ Return the modified ``TarInfo`` member.
+
+.. function:: data_filter(/, member, path)
+
+ Implements the ``'data'`` filter.
+ In addition to what ``tar_filter`` does:
+
+ - :ref:`Refuse ` to extract links (hard or soft)
+ that link to absolute paths, or ones that link outside the destination.
+
+ This raises :class:`~tarfile.AbsoluteLinkError` or
+ :class:`~tarfile.LinkOutsideDestinationError`.
+
+ Note that such files are refused even on platforms that do not support
+ symbolic links.
+
+ - :ref:`Refuse ` to extract device files
+ (including pipes).
+ This raises :class:`~tarfile.SpecialFileError`.
+
+ - For regular files, including hard links:
+
+ - Set the owner read and write permissions
+ (:attr:`~stat.S_IRUSR`|:attr:`~stat.S_IWUSR`).
+ - Remove the group & other executable permission
+ (:attr:`~stat.S_IXGRP`|:attr:`~stat.S_IXOTH`)
+ if the owner doesn’t have it (:attr:`~stat.S_IXUSR`).
+
+ - For other files (directories), set ``mode`` to ``None``, so
+ that extraction methods skip applying permission bits.
+ - Set user and group info (``uid``, ``gid``, ``uname``, ``gname``)
+ to ``None``, so that extraction methods skip setting it.
+
+ Return the modified ``TarInfo`` member.
+
+
+.. _tarfile-extraction-refuse:
+
+Filter errors
+~~~~~~~~~~~~~
+
+When a filter refuses to extract a file, it will raise an appropriate exception,
+a subclass of :class:`~tarfile.FilterError`.
+This will abort the extraction if :attr:`TarFile.errorlevel` is 1 or more.
+With ``errorlevel=0`` the error will be logged and the member will be skipped,
+but extraction will continue.
+
+
+Hints for further verification
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Even with ``filter='data'``, *tarfile* is not suited for extracting untrusted
+files without prior inspection.
+Among other issues, the pre-defined filters do not prevent denial-of-service
+attacks. Users should do additional checks.
+
+Here is an incomplete list of things to consider:
+
+* Extract to a :func:`new temporary directory `
+ to prevent e.g. exploiting pre-existing links, and to make it easier to
+ clean up after a failed extraction.
+* When working with untrusted data, use external (e.g. OS-level) limits on
+ disk, memory and CPU usage.
+* Check filenames against an allow-list of characters
+ (to filter out control characters, confusables, foreign path separators,
+ etc.).
+* Check that filenames have expected extensions (discouraging files that
+ execute when you “click on them”, or extension-less files like Windows special device names).
+* Limit the number of extracted files, total size of extracted data,
+ filename length (including symlink length), and size of individual files.
+* Check for files that would be shadowed on case-insensitive filesystems.
+
+Also note that:
+
+* Tar files may contain multiple versions of the same file.
+ Later ones are expected to overwrite any earlier ones.
+ This feature is crucial to allow updating tape archives, but can be abused
+ maliciously.
+* *tarfile* does not protect against issues with “live” data,
+ e.g. an attacker tinkering with the destination (or source) directory while
+ extraction (or archiving) is in progress.
+
+
+Supporting older Python versions
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Extraction filters were added to Python 3.12, but may be backported to older
+versions as security updates.
+To check whether the feature is available, use e.g.
+``hasattr(tarfile, 'data_filter')`` rather than checking the Python version.
+
+The following examples show how to support Python versions with and without
+the feature.
+Note that setting ``extraction_filter`` will affect any subsequent operations.
+
+* Fully trusted archive::
+
+ my_tarfile.extraction_filter = (lambda member, path: member)
+ my_tarfile.extractall()
+
+* Use the ``'data'`` filter if available, but revert to Python 3.11 behavior
+ (``'fully_trusted'``) if this feature is not available::
+
+ my_tarfile.extraction_filter = getattr(tarfile, 'data_filter',
+ (lambda member, path: member))
+ my_tarfile.extractall()
+
+* Use the ``'data'`` filter; *fail* if it is not available::
+
+ my_tarfile.extractall(filter=tarfile.data_filter)
+
+ or::
+
+ my_tarfile.extraction_filter = tarfile.data_filter
+ my_tarfile.extractall()
+
+* Use the ``'data'`` filter; *warn* if it is not available::
+
+ if hasattr(tarfile, 'data_filter'):
+ my_tarfile.extractall(filter='data')
+ else:
+ # remove this when no longer needed
+ warn_the_user('Extracting may be unsafe; consider updating Python')
+ my_tarfile.extractall()
+
+
+Stateful extraction filter example
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+While *tarfile*'s extraction methods take a simple *filter* callable,
+custom filters may be more complex objects with an internal state.
+It may be useful to write these as context managers, to be used like this::
+
+ with StatefulFilter() as filter_func:
+ tar.extractall(path, filter=filter_func)
+
+Such a filter can be written as, for example::
+
+ class StatefulFilter:
+ def __init__(self):
+ self.file_count = 0
+
+ def __enter__(self):
+ return self
+
+ def __call__(self, member, path):
+ self.file_count += 1
+ return member
+
+ def __exit__(self, *exc_info):
+ print(f'{self.file_count} files extracted')
+
+
.. _tarfile-commandline:
.. program:: tarfile
+
Command-Line Interface
----------------------
@@ -748,6 +1170,13 @@ Command-line options
Verbose output.
+.. cmdoption:: --filter
+
+ Specifies the *filter* for ``--extract``.
+ See :ref:`tarfile-extraction-filter` for details.
+ Only string names are accepted (that is, ``fully_trusted``, ``tar``,
+ and ``data``).
+
.. _tar-examples:
Examples
@@ -757,7 +1186,7 @@ How to extract an entire tar archive to the current working directory::
import tarfile
tar = tarfile.open("sample.tar.gz")
- tar.extractall()
+ tar.extractall(filter='data')
tar.close()
How to extract a subset of a tar archive with :meth:`TarFile.extractall` using
diff --git a/Doc/library/token-list.inc b/Doc/library/token-list.inc
index 2739d5bfc1dfa2..3b345099bf54b5 100644
--- a/Doc/library/token-list.inc
+++ b/Doc/library/token-list.inc
@@ -201,6 +201,10 @@
Token value for ``":="``.
+.. data:: EXCLAMATION
+
+ Token value for ``"!"``.
+
.. data:: OP
.. data:: AWAIT
@@ -213,6 +217,12 @@
.. data:: SOFT_KEYWORD
+.. data:: FSTRING_START
+
+.. data:: FSTRING_MIDDLE
+
+.. data:: FSTRING_END
+
.. data:: ERRORTOKEN
.. data:: N_TOKENS
diff --git a/Doc/library/types.rst b/Doc/library/types.rst
index 27b9846325914d..54887f4c51983a 100644
--- a/Doc/library/types.rst
+++ b/Doc/library/types.rst
@@ -82,6 +82,46 @@ Dynamic Type Creation
.. versionadded:: 3.7
+.. function:: get_original_bases(cls, /)
+
+ Return the tuple of objects originally given as the bases of *cls* before
+ the :meth:`~object.__mro_entries__` method has been called on any bases
+ (following the mechanisms laid out in :pep:`560`). This is useful for
+ introspecting :ref:`Generics `.
+
+ For classes that have an ``__orig_bases__`` attribute, this
+ function returns the value of ``cls.__orig_bases__``.
+ For classes without the ``__orig_bases__`` attribute, ``cls.__bases__`` is
+ returned.
+
+ Examples::
+
+ from typing import TypeVar, Generic, NamedTuple, TypedDict
+
+ T = TypeVar("T")
+ class Foo(Generic[T]): ...
+ class Bar(Foo[int], float): ...
+ class Baz(list[str]): ...
+ Eggs = NamedTuple("Eggs", [("a", int), ("b", str)])
+ Spam = TypedDict("Spam", {"a": int, "b": str})
+
+ assert Bar.__bases__ == (Foo, float)
+ assert get_original_bases(Bar) == (Foo[int], float)
+
+ assert Baz.__bases__ == (list,)
+ assert get_original_bases(Baz) == (list[str],)
+
+ assert Eggs.__bases__ == (tuple,)
+ assert get_original_bases(Eggs) == (NamedTuple,)
+
+ assert Spam.__bases__ == (dict,)
+ assert get_original_bases(Spam) == (TypedDict,)
+
+ assert int.__bases__ == (object,)
+ assert get_original_bases(int) == (object,)
+
+ .. versionadded:: 3.12
+
.. seealso::
:pep:`560` - Core support for typing module and generic types
diff --git a/Doc/library/venv.rst b/Doc/library/venv.rst
index 240ab139838db9..52bf99e5bb0f67 100644
--- a/Doc/library/venv.rst
+++ b/Doc/library/venv.rst
@@ -284,11 +284,14 @@ creation according to their needs, the :class:`EnvBuilder` class.
.. method:: upgrade_dependencies(context)
- Upgrades the core venv dependency packages (currently ``pip`` and
- ``setuptools``) in the environment. This is done by shelling out to the
+ Upgrades the core venv dependency packages (currently ``pip``)
+ in the environment. This is done by shelling out to the
``pip`` executable in the environment.
.. versionadded:: 3.9
+ .. versionchanged:: 3.12
+
+ ``setuptools`` is no longer a core venv dependency.
.. method:: post_setup(context)
diff --git a/Doc/library/zipfile.rst b/Doc/library/zipfile.rst
index e2a085d6e98e67..6f4826cb065c64 100644
--- a/Doc/library/zipfile.rst
+++ b/Doc/library/zipfile.rst
@@ -7,7 +7,7 @@
.. moduleauthor:: James C. Ahlstrom
.. sectionauthor:: James C. Ahlstrom
-**Source code:** :source:`Lib/zipfile.py`
+**Source code:** :source:`Lib/zipfile/`
--------------
diff --git a/Doc/reference/datamodel.rst b/Doc/reference/datamodel.rst
index 9f91ade35e50dc..55431f1951e50d 100644
--- a/Doc/reference/datamodel.rst
+++ b/Doc/reference/datamodel.rst
@@ -2102,6 +2102,10 @@ Resolving MRO entries
:func:`types.resolve_bases`
Dynamically resolve bases that are not instances of :class:`type`.
+ :func:`types.get_original_bases`
+ Retrieve a class's "original bases" prior to modifications by
+ :meth:`~object.__mro_entries__`.
+
:pep:`560`
Core support for typing module and generic types.
diff --git a/Doc/using/venv-create.inc b/Doc/using/venv-create.inc
index 43ee6b7807d57e..2fc90126482268 100644
--- a/Doc/using/venv-create.inc
+++ b/Doc/using/venv-create.inc
@@ -61,12 +61,16 @@ The command, if run with ``-h``, will show the available options::
environment (pip is bootstrapped by default)
--prompt PROMPT Provides an alternative prompt prefix for this
environment.
- --upgrade-deps Upgrade core dependencies: pip setuptools to the
+ --upgrade-deps Upgrade core dependencies (pip) to the
latest version in PyPI
Once an environment has been created, you may wish to activate it, e.g. by
sourcing an activate script in its bin directory.
+.. versionchanged:: 3.12
+
+ ``setuptools`` is no longer a core venv dependency.
+
.. versionchanged:: 3.9
Add ``--upgrade-deps`` option to upgrade pip + setuptools to the latest on PyPI
@@ -104,4 +108,3 @@ invoked to bootstrap ``pip`` into the virtual environment.
Multiple paths can be given to ``venv``, in which case an identical virtual
environment will be created, according to the given options, at each provided
path.
-
diff --git a/Doc/using/windows.rst b/Doc/using/windows.rst
index 1c4e41c0e0e239..380950eb507ffb 100644
--- a/Doc/using/windows.rst
+++ b/Doc/using/windows.rst
@@ -470,7 +470,7 @@ user's system, including environment variables, system registry settings, and
installed packages. The standard library is included as pre-compiled and
optimized ``.pyc`` files in a ZIP, and ``python3.dll``, ``python37.dll``,
``python.exe`` and ``pythonw.exe`` are all provided. Tcl/tk (including all
-dependants, such as Idle), pip and the Python documentation are not included.
+dependents, such as Idle), pip and the Python documentation are not included.
.. note::
diff --git a/Doc/whatsnew/2.6.rst b/Doc/whatsnew/2.6.rst
index 34f2656f765c7d..4ee2aacb108a36 100644
--- a/Doc/whatsnew/2.6.rst
+++ b/Doc/whatsnew/2.6.rst
@@ -172,7 +172,7 @@ this edition of "What's New in Python" links to the bug/patch
item for each change.
Hosting of the Python bug tracker is kindly provided by
-`Upfront Systems `__
+`Upfront Systems `__
of Stellenbosch, South Africa. Martin von Löwis put a
lot of effort into importing existing bugs and patches from
SourceForge; his scripts for this import operation are at
diff --git a/Doc/whatsnew/2.7.rst b/Doc/whatsnew/2.7.rst
index 810a2cd2537c34..36afcb163f1afc 100644
--- a/Doc/whatsnew/2.7.rst
+++ b/Doc/whatsnew/2.7.rst
@@ -2104,7 +2104,7 @@ Changes to Python's build process and to the C API include:
* The latest release of the GNU Debugger, GDB 7, can be `scripted
using Python
- `__.
+ `__.
When you begin debugging an executable program P, GDB will look for
a file named ``P-gdb.py`` and automatically read it. Dave Malcolm
contributed a :file:`python-gdb.py` that adds a number of
diff --git a/Doc/whatsnew/3.12.rst b/Doc/whatsnew/3.12.rst
index 4165b16ba76441..373e31b37cd9dc 100644
--- a/Doc/whatsnew/3.12.rst
+++ b/Doc/whatsnew/3.12.rst
@@ -137,6 +137,13 @@ New Features
(Design by Pablo Galindo. Contributed by Pablo Galindo and Christian Heimes
with contributions from Gregory P. Smith [Google] and Mark Shannon
in :gh:`96123`.)
+* The extraction methods in :mod:`tarfile`, and :func:`shutil.unpack_archive`,
+ have a new a *filter* argument that allows limiting tar features than may be
+ surprising or dangerous, such as creating files outside the destination
+ directory.
+ See :ref:`tarfile-extraction-filter` for details.
+ In Python 3.14, the default will switch to ``'data'``.
+ (Contributed by Petr Viktorin in :pep:`706`.)
Other Language Changes
@@ -250,7 +257,8 @@ asyncio
:mod:`asyncio` does not support legacy generator-based coroutines.
(Contributed by Kumar Aditya in :gh:`102748`.)
-* :func:`asyncio.wait` now accepts generators yielding tasks.
+* :func:`asyncio.wait` and :func:`asyncio.as_completed` now accepts generators
+ yielding tasks.
(Contributed by Kumar Aditya in :gh:`78530`.)
csv
@@ -258,7 +266,7 @@ csv
* Add :data:`~csv.QUOTE_NOTNULL` and :data:`~csv.QUOTE_STRINGS` flags to
provide finer grained control of ``None`` and empty strings by
- :class:`~csv.reader` and :class:`~csv.writer` objects.
+ :class:`~csv.writer` objects.
inspect
-------
@@ -308,6 +316,13 @@ fractions
* Objects of type :class:`fractions.Fraction` now support float-style
formatting. (Contributed by Mark Dickinson in :gh:`100161`.)
+itertools
+---------
+
+* Added :class:`itertools.batched()` for collecting into even-sized
+ tuples where the last batch may be shorter than the rest.
+ (Contributed by Raymond Hettinger in :gh:`98363`.)
+
math
----
@@ -399,6 +414,13 @@ threading
profiling functions in all running threads in addition to the calling one.
(Contributed by Pablo Galindo in :gh:`93503`.)
+types
+-----
+
+* Add :func:`types.get_original_bases` to allow for further introspection of
+ :ref:`user-defined-generics` when subclassed. (Contributed by
+ James Hilton-Balfe and Alex Waygood in :gh:`101827`.)
+
unicodedata
-----------
@@ -615,6 +637,10 @@ Deprecated
* The *onerror* argument of :func:`shutil.rmtree` is deprecated as will be removed
in Python 3.14. Use *onexc* instead. (Contributed by Irit Katriel in :gh:`102828`.)
+* Extracting tar archives without specifying *filter* is deprecated until
+ Python 3.14, when ``'data'`` filter will become the default.
+ See :ref:`tarfile-extraction-filter` for details.
+
Pending Removal in Python 3.13
------------------------------
@@ -731,6 +757,24 @@ Removed
project can be installed: it still provides ``distutils``.
(Contributed by Victor Stinner in :gh:`92584`.)
+* Remove the bundled setuptools wheel from :mod:`ensurepip`,
+ and stop installing setuptools in environments created by :mod:`venv`.
+
+ ``pip (>= 22.1)`` does not require setuptools to be installed in the
+ environment. ``setuptools``-based (and ``distutils``-based) packages
+ can still be used with ``pip install``, since pip will provide
+ ``setuptools`` in the build environment it uses for building a
+ package.
+
+ ``easy_install``, ``pkg_resources``, ``setuptools`` and ``distutils``
+ are no longer provided by default in environments created with
+ ``venv`` or bootstrapped with ``ensurepip``, since they are part of
+ the ``setuptools`` package. For projects relying on these at runtime,
+ the ``setuptools`` project should be declared as a dependency and
+ installed separately (typically, using pip).
+
+ (Contributed by Pradyun Gedam in :gh:`95299`.)
+
* Removed many old deprecated :mod:`unittest` features:
- A number of :class:`~unittest.TestCase` method aliases:
@@ -959,6 +1003,10 @@ Changes in the Python API
exception instance, rather than to a ``(typ, exc, tb)`` tuple.
(Contributed by Irit Katriel in :gh:`103176`.)
+* When extracting tar files using :mod:`tarfile` or
+ :func:`shutil.unpack_archive`, pass the *filter* argument to limit features
+ that may be surprising or dangerous.
+ See :ref:`tarfile-extraction-filter` for details.
Build Changes
=============
@@ -1103,6 +1151,24 @@ New Features
to replace the legacy-api :c:func:`!PyErr_Display`. (Contributed by
Irit Katriel in :gh:`102755`).
+* :pep:`683`: Introduced Immortal Objects to Python which allows objects
+ to bypass reference counts and introduced changes to the C-API:
+
+ - ``_Py_IMMORTAL_REFCNT``: The reference count that defines an object
+ as immortal.
+ - ``_Py_IsImmortal`` Checks if an object has the immortal reference count.
+ - ``PyObject_HEAD_INIT`` This will now initialize reference count to
+ ``_Py_IMMORTAL_REFCNT`` when used with ``Py_BUILD_CORE``.
+ - ``SSTATE_INTERNED_IMMORTAL`` An identifier for interned unicode objects
+ that are immortal.
+ - ``SSTATE_INTERNED_IMMORTAL_STATIC`` An identifier for interned unicode
+ objects that are immortal and static
+ - ``sys.getunicodeinternedsize`` This returns the total number of unicode
+ objects that have been interned. This is now needed for refleak.py to
+ correctly track reference counts and allocated blocks
+
+ (Contributed by Eddie Elizondo in :gh:`84436`.)
+
Porting to Python 3.12
----------------------
@@ -1267,8 +1333,7 @@ Removed
* :c:func:`!PyUnicode_GetSize`
* :c:func:`!PyUnicode_GET_DATA_SIZE`
-* Remove the ``PyUnicode_InternImmortal()`` function and the
- ``SSTATE_INTERNED_IMMORTAL`` macro.
+* Remove the ``PyUnicode_InternImmortal()`` function macro.
(Contributed by Victor Stinner in :gh:`85858`.)
* Remove ``Jython`` compatibility hacks from several stdlib modules and tests.
diff --git a/Grammar/Tokens b/Grammar/Tokens
index 1f3e3b09913653..096876fdd130f8 100644
--- a/Grammar/Tokens
+++ b/Grammar/Tokens
@@ -53,6 +53,7 @@ ATEQUAL '@='
RARROW '->'
ELLIPSIS '...'
COLONEQUAL ':='
+EXCLAMATION '!'
OP
AWAIT
@@ -60,6 +61,9 @@ ASYNC
TYPE_IGNORE
TYPE_COMMENT
SOFT_KEYWORD
+FSTRING_START
+FSTRING_MIDDLE
+FSTRING_END
ERRORTOKEN
# These aren't used by the C tokenizer but are needed for tokenize.py
diff --git a/Grammar/python.gram b/Grammar/python.gram
index 2498251293e80e..3a356c65a75195 100644
--- a/Grammar/python.gram
+++ b/Grammar/python.gram
@@ -194,7 +194,7 @@ yield_stmt[stmt_ty]: y=yield_expr { _PyAST_Expr(y, EXTRA) }
assert_stmt[stmt_ty]: 'assert' a=expression b=[',' z=expression { z }] { _PyAST_Assert(a, b, EXTRA) }
-import_stmt[stmt_ty]:
+import_stmt[stmt_ty]:
| invalid_import
| import_name
| import_from
@@ -415,8 +415,8 @@ try_stmt[stmt_ty]:
| invalid_try_stmt
| 'try' &&':' b=block f=finally_block { _PyAST_Try(b, NULL, NULL, f, EXTRA) }
| 'try' &&':' b=block ex[asdl_excepthandler_seq*]=except_block+ el=[else_block] f=[finally_block] { _PyAST_Try(b, ex, el, f, EXTRA) }
- | 'try' &&':' b=block ex[asdl_excepthandler_seq*]=except_star_block+ el=[else_block] f=[finally_block] {
- CHECK_VERSION(stmt_ty, 11, "Exception groups are",
+ | 'try' &&':' b=block ex[asdl_excepthandler_seq*]=except_star_block+ el=[else_block] f=[finally_block] {
+ CHECK_VERSION(stmt_ty, 11, "Exception groups are",
_PyAST_TryStar(b, ex, el, f, EXTRA)) }
@@ -807,7 +807,7 @@ atom[expr_ty]:
| 'True' { _PyAST_Constant(Py_True, NULL, EXTRA) }
| 'False' { _PyAST_Constant(Py_False, NULL, EXTRA) }
| 'None' { _PyAST_Constant(Py_None, NULL, EXTRA) }
- | &STRING strings
+ | &(STRING|FSTRING_START) strings
| NUMBER
| &'(' (tuple | group | genexp)
| &'[' (list | listcomp)
@@ -877,7 +877,26 @@ lambda_param[arg_ty]: a=NAME { _PyAST_arg(a->v.Name.id, NULL, NULL, EXTRA) }
# LITERALS
# ========
-strings[expr_ty] (memo): a=STRING+ { _PyPegen_concatenate_strings(p, a) }
+fstring_middle[expr_ty]:
+ | fstring_replacement_field
+ | t=FSTRING_MIDDLE { _PyPegen_constant_from_token(p, t) }
+fstring_replacement_field[expr_ty]:
+ | '{' a=(yield_expr | star_expressions) debug_expr="="? conversion=[fstring_conversion] format=[fstring_full_format_spec] '}' {
+ _PyPegen_formatted_value(p, a, debug_expr, conversion, format, EXTRA)
+ }
+ | invalid_replacement_field
+fstring_conversion[expr_ty]:
+ | conv_token="!" conv=NAME { _PyPegen_check_fstring_conversion(p, conv_token, conv) }
+fstring_full_format_spec[expr_ty]:
+ | ':' spec=fstring_format_spec* { spec ? _PyAST_JoinedStr((asdl_expr_seq*)spec, EXTRA) : NULL }
+fstring_format_spec[expr_ty]:
+ | t=FSTRING_MIDDLE { _PyPegen_constant_from_token(p, t) }
+ | fstring_replacement_field
+fstring[expr_ty]:
+ | a=FSTRING_START b=fstring_middle* c=FSTRING_END { _PyPegen_joined_str(p, a, (asdl_expr_seq*)b, c) }
+
+string[expr_ty]: s[Token*]=STRING { _PyPegen_constant_from_string(p, s) }
+strings[expr_ty] (memo): a[asdl_expr_seq*]=(fstring|string)+ { _PyPegen_concatenate_strings(p, a, EXTRA) }
list[expr_ty]:
| '[' a=[star_named_expressions] ']' { _PyAST_List(a, Load, EXTRA) }
@@ -1118,6 +1137,8 @@ invalid_expression:
_PyPegen_check_legacy_stmt(p, a) ? NULL : p->tokens[p->mark-1]->level == 0 ? NULL :
RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, "invalid syntax. Perhaps you forgot a comma?") }
| a=disjunction 'if' b=disjunction !('else'|':') { RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, "expected 'else' after 'if' expression") }
+ | a='lambda' [lambda_params] b=':' &(FSTRING_MIDDLE | fstring_replacement_field) {
+ RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, "f-string: lambda expressions are not allowed without parentheses") }
invalid_named_expression(memo):
| a=expression ':=' expression {
@@ -1241,7 +1262,7 @@ invalid_group:
invalid_import:
| a='import' dotted_name 'from' dotted_name {
RAISE_SYNTAX_ERROR_STARTING_FROM(a, "Did you mean to use 'from ... import ...' instead?") }
-
+
invalid_import_from_targets:
| import_from_as_names ',' NEWLINE {
RAISE_SYNTAX_ERROR("trailing comma not allowed without surrounding parentheses") }
@@ -1335,3 +1356,24 @@ invalid_kvpair:
| expression a=':' &('}'|',') {RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "expression expected after dictionary key and ':'") }
invalid_starred_expression:
| a='*' expression '=' b=expression { RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, "cannot assign to iterable argument unpacking") }
+invalid_replacement_field:
+ | '{' a='=' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "f-string: valid expression required before '='") }
+ | '{' a='!' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "f-string: valid expression required before '!'") }
+ | '{' a=':' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "f-string: valid expression required before ':'") }
+ | '{' a='}' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "f-string: valid expression required before '}'") }
+ | '{' !(yield_expr | star_expressions) { RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("f-string: expecting a valid expression after '{'")}
+ | '{' (yield_expr | star_expressions) !('=' | '!' | ':' | '}') {
+ PyErr_Occurred() ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("f-string: expecting '=', or '!', or ':', or '}'") }
+ | '{' (yield_expr | star_expressions) '=' !('!' | ':' | '}') {
+ PyErr_Occurred() ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("f-string: expecting '!', or ':', or '}'") }
+ | '{' (yield_expr | star_expressions) '='? invalid_conversion_character
+ | '{' (yield_expr | star_expressions) '='? ['!' NAME] !(':' | '}') {
+ PyErr_Occurred() ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("f-string: expecting ':' or '}'") }
+ | '{' (yield_expr | star_expressions) '='? ['!' NAME] ':' fstring_format_spec* !'}' {
+ PyErr_Occurred() ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("f-string: expecting '}', or format specs") }
+ | '{' (yield_expr | star_expressions) '='? ['!' NAME] !'}' {
+ PyErr_Occurred() ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("f-string: expecting '}'") }
+
+invalid_conversion_character:
+ | '!' &(':' | '}') { RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("f-string: missing conversion character") }
+ | '!' !NAME { RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("f-string: invalid conversion character") }
diff --git a/Include/boolobject.h b/Include/boolobject.h
index ca21fbfad8e827..976fa35201d035 100644
--- a/Include/boolobject.h
+++ b/Include/boolobject.h
@@ -11,8 +11,7 @@ PyAPI_DATA(PyTypeObject) PyBool_Type;
#define PyBool_Check(x) Py_IS_TYPE((x), &PyBool_Type)
-/* Py_False and Py_True are the only two bools in existence.
-Don't forget to apply Py_INCREF() when returning either!!! */
+/* Py_False and Py_True are the only two bools in existence. */
/* Don't use these directly */
PyAPI_DATA(PyLongObject) _Py_FalseStruct;
@@ -31,8 +30,8 @@ PyAPI_FUNC(int) Py_IsFalse(PyObject *x);
#define Py_IsFalse(x) Py_Is((x), Py_False)
/* Macros for returning Py_True or Py_False, respectively */
-#define Py_RETURN_TRUE return Py_NewRef(Py_True)
-#define Py_RETURN_FALSE return Py_NewRef(Py_False)
+#define Py_RETURN_TRUE return Py_True
+#define Py_RETURN_FALSE return Py_False
/* Function to return a bool from a C long */
PyAPI_FUNC(PyObject *) PyBool_FromLong(long);
diff --git a/Include/cpython/object.h b/Include/cpython/object.h
index 98cc51cd7fee49..ce4d13cd9c28fe 100644
--- a/Include/cpython/object.h
+++ b/Include/cpython/object.h
@@ -564,3 +564,10 @@ PyAPI_FUNC(int) PyType_AddWatcher(PyType_WatchCallback callback);
PyAPI_FUNC(int) PyType_ClearWatcher(int watcher_id);
PyAPI_FUNC(int) PyType_Watch(int watcher_id, PyObject *type);
PyAPI_FUNC(int) PyType_Unwatch(int watcher_id, PyObject *type);
+
+/* Attempt to assign a version tag to the given type.
+ *
+ * Returns 1 if the type already had a valid version tag or a new one was
+ * assigned, or 0 if a new tag could not be assigned.
+ */
+PyAPI_FUNC(int) PyUnstable_Type_AssignVersionTag(PyTypeObject *type);
diff --git a/Include/cpython/unicodeobject.h b/Include/cpython/unicodeobject.h
index 75a74ffa2f9dff..3394726dfffd72 100644
--- a/Include/cpython/unicodeobject.h
+++ b/Include/cpython/unicodeobject.h
@@ -98,9 +98,16 @@ typedef struct {
Py_ssize_t length; /* Number of code points in the string */
Py_hash_t hash; /* Hash value; -1 if not set */
struct {
- /* If interned is set, the two references from the
- dictionary to this object are *not* counted in ob_refcnt. */
- unsigned int interned:1;
+ /* If interned is non-zero, the two references from the
+ dictionary to this object are *not* counted in ob_refcnt.
+ The possible values here are:
+ 0: Not Interned
+ 1: Interned
+ 2: Interned and Immortal
+ 3: Interned, Immortal, and Static
+ This categorization allows the runtime to determine the right
+ cleanup mechanism at runtime shutdown. */
+ unsigned int interned:2;
/* Character size:
- PyUnicode_1BYTE_KIND (1):
@@ -135,7 +142,7 @@ typedef struct {
unsigned int ascii:1;
/* Padding to ensure that PyUnicode_DATA() is always aligned to
4 bytes (see issue #19537 on m68k). */
- unsigned int :26;
+ unsigned int :25;
} state;
} PyASCIIObject;
@@ -183,6 +190,8 @@ PyAPI_FUNC(int) _PyUnicode_CheckConsistency(
/* Interning state. */
#define SSTATE_NOT_INTERNED 0
#define SSTATE_INTERNED_MORTAL 1
+#define SSTATE_INTERNED_IMMORTAL 2
+#define SSTATE_INTERNED_IMMORTAL_STATIC 3
/* Use only if you know it's a string */
static inline unsigned int PyUnicode_CHECK_INTERNED(PyObject *op) {
diff --git a/Include/internal/pycore_global_objects_fini_generated.h b/Include/internal/pycore_global_objects_fini_generated.h
index 14dfd9ea5823ed..fdfa80bd7d424a 100644
--- a/Include/internal/pycore_global_objects_fini_generated.h
+++ b/Include/internal/pycore_global_objects_fini_generated.h
@@ -8,15 +8,13 @@ extern "C" {
# error "this header requires Py_BUILD_CORE define"
#endif
-#include "pycore_object.h" // _PyObject_IMMORTAL_REFCNT
-
#ifdef Py_DEBUG
static inline void
_PyStaticObject_CheckRefcnt(PyObject *obj) {
- if (Py_REFCNT(obj) < _PyObject_IMMORTAL_REFCNT) {
+ if (Py_REFCNT(obj) < _Py_IMMORTAL_REFCNT) {
_PyObject_ASSERT_FAILED_MSG(obj,
"immortal object has less refcnt than expected "
- "_PyObject_IMMORTAL_REFCNT");
+ "_Py_IMMORTAL_REFCNT");
}
}
#endif
diff --git a/Include/internal/pycore_long.h b/Include/internal/pycore_long.h
index 137a0465d5ec60..fe86581e81f6b5 100644
--- a/Include/internal/pycore_long.h
+++ b/Include/internal/pycore_long.h
@@ -245,7 +245,7 @@ _PyLong_FlipSign(PyLongObject *op) {
#define _PyLong_DIGIT_INIT(val) \
{ \
- .ob_base = _PyObject_IMMORTAL_INIT(&PyLong_Type), \
+ .ob_base = _PyObject_HEAD_INIT(&PyLong_Type) \
.long_value = { \
.lv_tag = TAG_FROM_SIGN_AND_SIZE( \
(val) == 0 ? 0 : ((val) < 0 ? -1 : 1), \
diff --git a/Include/internal/pycore_object.h b/Include/internal/pycore_object.h
index b3d496ed6fc240..2ca047846e0935 100644
--- a/Include/internal/pycore_object.h
+++ b/Include/internal/pycore_object.h
@@ -14,21 +14,25 @@ extern "C" {
#include "pycore_pystate.h" // _PyInterpreterState_GET()
#include "pycore_runtime.h" // _PyRuntime
-/* This value provides *effective* immortality, meaning the object should never
- be deallocated (until runtime finalization). See PEP 683 for more details about
- immortality, as well as a proposed mechanism for proper immortality. */
-#define _PyObject_IMMORTAL_REFCNT 999999999
-
-#define _PyObject_IMMORTAL_INIT(type) \
- { \
- .ob_refcnt = _PyObject_IMMORTAL_REFCNT, \
- .ob_type = (type), \
- }
-#define _PyVarObject_IMMORTAL_INIT(type, size) \
- { \
- .ob_base = _PyObject_IMMORTAL_INIT(type), \
- .ob_size = size, \
- }
+/* We need to maintain an internal copy of Py{Var}Object_HEAD_INIT to avoid
+ designated initializer conflicts in C++20. If we use the deinition in
+ object.h, we will be mixing designated and non-designated initializers in
+ pycore objects which is forbiddent in C++20. However, if we then use
+ designated initializers in object.h then Extensions without designated break.
+ Furthermore, we can't use designated initializers in Extensions since these
+ are not supported pre-C++20. Thus, keeping an internal copy here is the most
+ backwards compatible solution */
+#define _PyObject_HEAD_INIT(type) \
+ { \
+ _PyObject_EXTRA_INIT \
+ .ob_refcnt = _Py_IMMORTAL_REFCNT, \
+ .ob_type = (type) \
+ },
+#define _PyVarObject_HEAD_INIT(type, size) \
+ { \
+ .ob_base = _PyObject_HEAD_INIT(type) \
+ .ob_size = size \
+ },
PyAPI_FUNC(void) _Py_NO_RETURN _Py_FatalRefcountErrorFunc(
const char *func,
@@ -61,9 +65,20 @@ static inline void _Py_RefcntAdd(PyObject* op, Py_ssize_t n)
}
#define _Py_RefcntAdd(op, n) _Py_RefcntAdd(_PyObject_CAST(op), n)
+static inline void _Py_SetImmortal(PyObject *op)
+{
+ if (op) {
+ op->ob_refcnt = _Py_IMMORTAL_REFCNT;
+ }
+}
+#define _Py_SetImmortal(op) _Py_SetImmortal(_PyObject_CAST(op))
+
static inline void
_Py_DECREF_SPECIALIZED(PyObject *op, const destructor destruct)
{
+ if (_Py_IsImmortal(op)) {
+ return;
+ }
_Py_DECREF_STAT_INC();
#ifdef Py_REF_DEBUG
_Py_DEC_REFTOTAL(_PyInterpreterState_GET());
@@ -82,6 +97,9 @@ _Py_DECREF_SPECIALIZED(PyObject *op, const destructor destruct)
static inline void
_Py_DECREF_NO_DEALLOC(PyObject *op)
{
+ if (_Py_IsImmortal(op)) {
+ return;
+ }
_Py_DECREF_STAT_INC();
#ifdef Py_REF_DEBUG
_Py_DEC_REFTOTAL(_PyInterpreterState_GET());
diff --git a/Include/internal/pycore_pystate.h b/Include/internal/pycore_pystate.h
index 6e5f2289cb6b95..c40f9e7393a16f 100644
--- a/Include/internal/pycore_pystate.h
+++ b/Include/internal/pycore_pystate.h
@@ -64,17 +64,14 @@ _Py_ThreadCanHandlePendingCalls(void)
/* Variable and macro for in-line access to current thread
and interpreter state */
-static inline PyThreadState*
-_PyRuntimeState_GetThreadState(_PyRuntimeState *runtime)
-{
- return (PyThreadState*)_Py_atomic_load_relaxed(&runtime->tstate_current);
-}
+#if defined(HAVE_THREAD_LOCAL) && !defined(Py_BUILD_CORE_MODULE)
+extern _Py_thread_local PyThreadState *_Py_tss_tstate;
+#endif
+PyAPI_DATA(PyThreadState *) _PyThreadState_GetCurrent(void);
/* Get the current Python thread state.
- Efficient macro reading directly the 'tstate_current' atomic
- variable. The macro is unsafe: it does not check for error and it can
- return NULL.
+ This function is unsafe: it does not check for error and it can return NULL.
The caller must hold the GIL.
@@ -82,9 +79,20 @@ _PyRuntimeState_GetThreadState(_PyRuntimeState *runtime)
static inline PyThreadState*
_PyThreadState_GET(void)
{
- return _PyRuntimeState_GetThreadState(&_PyRuntime);
+#if defined(HAVE_THREAD_LOCAL) && !defined(Py_BUILD_CORE_MODULE)
+ return _Py_tss_tstate;
+#else
+ return _PyThreadState_GetCurrent();
+#endif
+}
+
+static inline PyThreadState*
+_PyRuntimeState_GetThreadState(_PyRuntimeState *Py_UNUSED(runtime))
+{
+ return _PyThreadState_GET();
}
+
static inline void
_Py_EnsureFuncTstateNotNULL(const char *func, PyThreadState *tstate)
{
diff --git a/Include/internal/pycore_runtime.h b/Include/internal/pycore_runtime.h
index 3ebe49926edda6..2a3fd8ab2813ea 100644
--- a/Include/internal/pycore_runtime.h
+++ b/Include/internal/pycore_runtime.h
@@ -119,9 +119,6 @@ typedef struct pyruntimestate {
unsigned long main_thread;
- /* Assuming the current thread holds the GIL, this is the
- PyThreadState for the current thread. */
- _Py_atomic_address tstate_current;
/* Used for the thread state bound to the current thread. */
Py_tss_t autoTSSkey;
diff --git a/Include/internal/pycore_runtime_init.h b/Include/internal/pycore_runtime_init.h
index 5b09a45e41cd84..d8425b3199a89a 100644
--- a/Include/internal/pycore_runtime_init.h
+++ b/Include/internal/pycore_runtime_init.h
@@ -76,13 +76,13 @@ extern PyTypeObject _PyExc_MemoryError;
.latin1 = _Py_str_latin1_INIT, \
}, \
.tuple_empty = { \
- .ob_base = _PyVarObject_IMMORTAL_INIT(&PyTuple_Type, 0) \
+ .ob_base = _PyVarObject_HEAD_INIT(&PyTuple_Type, 0) \
}, \
.hamt_bitmap_node_empty = { \
- .ob_base = _PyVarObject_IMMORTAL_INIT(&_PyHamt_BitmapNode_Type, 0) \
+ .ob_base = _PyVarObject_HEAD_INIT(&_PyHamt_BitmapNode_Type, 0) \
}, \
.context_token_missing = { \
- .ob_base = _PyObject_IMMORTAL_INIT(&_PyContextTokenMissing_Type), \
+ .ob_base = _PyObject_HEAD_INIT(&_PyContextTokenMissing_Type) \
}, \
}, \
}, \
@@ -116,11 +116,11 @@ extern PyTypeObject _PyExc_MemoryError;
.singletons = { \
._not_used = 1, \
.hamt_empty = { \
- .ob_base = _PyObject_IMMORTAL_INIT(&_PyHamt_Type), \
+ .ob_base = _PyObject_HEAD_INIT(&_PyHamt_Type) \
.h_root = (PyHamtNode*)&_Py_SINGLETON(hamt_bitmap_node_empty), \
}, \
.last_resort_memory_error = { \
- _PyObject_IMMORTAL_INIT(&_PyExc_MemoryError), \
+ _PyObject_HEAD_INIT(&_PyExc_MemoryError) \
}, \
}, \
}, \
@@ -138,7 +138,7 @@ extern PyTypeObject _PyExc_MemoryError;
#define _PyBytes_SIMPLE_INIT(CH, LEN) \
{ \
- _PyVarObject_IMMORTAL_INIT(&PyBytes_Type, (LEN)), \
+ _PyVarObject_HEAD_INIT(&PyBytes_Type, (LEN)) \
.ob_shash = -1, \
.ob_sval = { (CH) }, \
}
@@ -149,7 +149,7 @@ extern PyTypeObject _PyExc_MemoryError;
#define _PyUnicode_ASCII_BASE_INIT(LITERAL, ASCII) \
{ \
- .ob_base = _PyObject_IMMORTAL_INIT(&PyUnicode_Type), \
+ .ob_base = _PyObject_HEAD_INIT(&PyUnicode_Type) \
.length = sizeof(LITERAL) - 1, \
.hash = -1, \
.state = { \
diff --git a/Include/internal/pycore_token.h b/Include/internal/pycore_token.h
index 95459ab9f7d004..b9df8766736adf 100644
--- a/Include/internal/pycore_token.h
+++ b/Include/internal/pycore_token.h
@@ -67,14 +67,18 @@ extern "C" {
#define RARROW 51
#define ELLIPSIS 52
#define COLONEQUAL 53
-#define OP 54
-#define AWAIT 55
-#define ASYNC 56
-#define TYPE_IGNORE 57
-#define TYPE_COMMENT 58
-#define SOFT_KEYWORD 59
-#define ERRORTOKEN 60
-#define N_TOKENS 64
+#define EXCLAMATION 54
+#define OP 55
+#define AWAIT 56
+#define ASYNC 57
+#define TYPE_IGNORE 58
+#define TYPE_COMMENT 59
+#define SOFT_KEYWORD 60
+#define FSTRING_START 61
+#define FSTRING_MIDDLE 62
+#define FSTRING_END 63
+#define ERRORTOKEN 64
+#define N_TOKENS 68
#define NT_OFFSET 256
/* Special definitions for cooperation with parser */
@@ -86,6 +90,8 @@ extern "C" {
(x) == NEWLINE || \
(x) == INDENT || \
(x) == DEDENT)
+#define ISSTRINGLIT(x) ((x) == STRING || \
+ (x) == FSTRING_MIDDLE)
// Symbols exported for test_peg_generator
diff --git a/Include/internal/pycore_unicodeobject.h b/Include/internal/pycore_unicodeobject.h
index ff97b9a623d210..1bb0f366e78163 100644
--- a/Include/internal/pycore_unicodeobject.h
+++ b/Include/internal/pycore_unicodeobject.h
@@ -12,6 +12,7 @@ extern "C" {
#include "pycore_ucnhash.h" // _PyUnicode_Name_CAPI
void _PyUnicode_ExactDealloc(PyObject *op);
+Py_ssize_t _PyUnicode_InternedSize(void);
/* runtime lifecycle */
diff --git a/Include/object.h b/Include/object.h
index 2943a6066818cd..66c3df0d7f780a 100644
--- a/Include/object.h
+++ b/Include/object.h
@@ -78,12 +78,76 @@ whose size is determined when the object is allocated.
/* PyObject_HEAD defines the initial segment of every PyObject. */
#define PyObject_HEAD PyObject ob_base;
-#define PyObject_HEAD_INIT(type) \
- { _PyObject_EXTRA_INIT \
- 1, (type) },
+/*
+Immortalization:
+
+The following indicates the immortalization strategy depending on the amount
+of available bits in the reference count field. All strategies are backwards
+compatible but the specific reference count value or immortalization check
+might change depending on the specializations for the underlying system.
+
+Proper deallocation of immortal instances requires distinguishing between
+statically allocated immortal instances vs those promoted by the runtime to be
+immortal. The latter should be the only instances that require
+cleanup during runtime finalization.
+*/
+
+#if SIZEOF_VOID_P > 4
+/*
+In 64+ bit systems, an object will be marked as immortal by setting all of the
+lower 32 bits of the reference count field, which is equal to: 0xFFFFFFFF
+
+Using the lower 32 bits makes the value backwards compatible by allowing
+C-Extensions without the updated checks in Py_INCREF and Py_DECREF to safely
+increase and decrease the objects reference count. The object would lose its
+immortality, but the execution would still be correct.
+
+Reference count increases will use saturated arithmetic, taking advantage of
+having all the lower 32 bits set, which will avoid the reference count to go
+beyond the refcount limit. Immortality checks for reference count decreases will
+be done by checking the bit sign flag in the lower 32 bits.
+*/
+#define _Py_IMMORTAL_REFCNT UINT_MAX
+
+#else
+/*
+In 32 bit systems, an object will be marked as immortal by setting all of the
+lower 30 bits of the reference count field, which is equal to: 0x3FFFFFFF
-#define PyVarObject_HEAD_INIT(type, size) \
- { PyObject_HEAD_INIT(type) (size) },
+Using the lower 30 bits makes the value backwards compatible by allowing
+C-Extensions without the updated checks in Py_INCREF and Py_DECREF to safely
+increase and decrease the objects reference count. The object would lose its
+immortality, but the execution would still be correct.
+
+Reference count increases and decreases will first go through an immortality
+check by comparing the reference count field to the immortality reference count.
+*/
+#define _Py_IMMORTAL_REFCNT (UINT_MAX >> 2)
+#endif
+
+// Make all internal uses of PyObject_HEAD_INIT immortal while preserving the
+// C-API expectation that the refcnt will be set to 1.
+#ifdef Py_BUILD_CORE
+#define PyObject_HEAD_INIT(type) \
+ { \
+ _PyObject_EXTRA_INIT \
+ { _Py_IMMORTAL_REFCNT }, \
+ (type) \
+ },
+#else
+#define PyObject_HEAD_INIT(type) \
+ { \
+ _PyObject_EXTRA_INIT \
+ { 1 }, \
+ (type) \
+ },
+#endif /* Py_BUILD_CORE */
+
+#define PyVarObject_HEAD_INIT(type, size) \
+ { \
+ PyObject_HEAD_INIT(type) \
+ (size) \
+ },
/* PyObject_VAR_HEAD defines the initial segment of all variable-size
* container objects. These end with a declaration of an array with 1
@@ -101,7 +165,12 @@ whose size is determined when the object is allocated.
*/
struct _object {
_PyObject_HEAD_EXTRA
- Py_ssize_t ob_refcnt;
+ union {
+ Py_ssize_t ob_refcnt;
+#if SIZEOF_VOID_P > 4
+ PY_UINT32_T ob_refcnt_split[2];
+#endif
+ };
PyTypeObject *ob_type;
};
@@ -152,6 +221,15 @@ static inline Py_ssize_t Py_SIZE(PyObject *ob) {
# define Py_SIZE(ob) Py_SIZE(_PyObject_CAST(ob))
#endif
+static inline Py_ALWAYS_INLINE int _Py_IsImmortal(PyObject *op)
+{
+#if SIZEOF_VOID_P > 4
+ return _Py_CAST(PY_INT32_T, op->ob_refcnt) < 0;
+#else
+ return op->ob_refcnt == _Py_IMMORTAL_REFCNT;
+#endif
+}
+#define _Py_IsImmortal(op) _Py_IsImmortal(_PyObject_CAST(op))
static inline int Py_IS_TYPE(PyObject *ob, PyTypeObject *type) {
return Py_TYPE(ob) == type;
@@ -162,6 +240,13 @@ static inline int Py_IS_TYPE(PyObject *ob, PyTypeObject *type) {
static inline void Py_SET_REFCNT(PyObject *ob, Py_ssize_t refcnt) {
+ // This immortal check is for code that is unaware of immortal objects.
+ // The runtime tracks these objects and we should avoid as much
+ // as possible having extensions inadvertently change the refcnt
+ // of an immortalized object.
+ if (_Py_IsImmortal(ob)) {
+ return;
+ }
ob->ob_refcnt = refcnt;
}
#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 < 0x030b0000
@@ -524,19 +609,33 @@ PyAPI_FUNC(void) Py_DecRef(PyObject *);
PyAPI_FUNC(void) _Py_IncRef(PyObject *);
PyAPI_FUNC(void) _Py_DecRef(PyObject *);
-static inline void Py_INCREF(PyObject *op)
+static inline Py_ALWAYS_INLINE void Py_INCREF(PyObject *op)
{
#if defined(Py_REF_DEBUG) && defined(Py_LIMITED_API) && Py_LIMITED_API+0 >= 0x030A0000
// Stable ABI for Python 3.10 built in debug mode.
_Py_IncRef(op);
#else
- _Py_INCREF_STAT_INC();
// Non-limited C API and limited C API for Python 3.9 and older access
// directly PyObject.ob_refcnt.
+#if SIZEOF_VOID_P > 4
+ // Portable saturated add, branching on the carry flag and set low bits
+ PY_UINT32_T cur_refcnt = op->ob_refcnt_split[PY_BIG_ENDIAN];
+ PY_UINT32_T new_refcnt = cur_refcnt + 1;
+ if (new_refcnt == 0) {
+ return;
+ }
+ op->ob_refcnt_split[PY_BIG_ENDIAN] = new_refcnt;
+#else
+ // Explicitly check immortality against the immortal value
+ if (_Py_IsImmortal(op)) {
+ return;
+ }
+ op->ob_refcnt++;
+#endif
+ _Py_INCREF_STAT_INC();
#ifdef Py_REF_DEBUG
_Py_INC_REFTOTAL();
-#endif // Py_REF_DEBUG
- op->ob_refcnt++;
+#endif
#endif
}
#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 < 0x030b0000
@@ -553,6 +652,9 @@ static inline void Py_DECREF(PyObject *op) {
#elif defined(Py_REF_DEBUG)
static inline void Py_DECREF(const char *filename, int lineno, PyObject *op)
{
+ if (_Py_IsImmortal(op)) {
+ return;
+ }
_Py_DECREF_STAT_INC();
_Py_DEC_REFTOTAL();
if (--op->ob_refcnt != 0) {
@@ -567,11 +669,14 @@ static inline void Py_DECREF(const char *filename, int lineno, PyObject *op)
#define Py_DECREF(op) Py_DECREF(__FILE__, __LINE__, _PyObject_CAST(op))
#else
-static inline void Py_DECREF(PyObject *op)
+static inline Py_ALWAYS_INLINE void Py_DECREF(PyObject *op)
{
- _Py_DECREF_STAT_INC();
// Non-limited C API and limited C API for Python 3.9 and older access
// directly PyObject.ob_refcnt.
+ if (_Py_IsImmortal(op)) {
+ return;
+ }
+ _Py_DECREF_STAT_INC();
if (--op->ob_refcnt == 0) {
_Py_Dealloc(op);
}
@@ -721,7 +826,7 @@ PyAPI_FUNC(int) Py_IsNone(PyObject *x);
#define Py_IsNone(x) Py_Is((x), Py_None)
/* Macro for returning Py_None from a function */
-#define Py_RETURN_NONE return Py_NewRef(Py_None)
+#define Py_RETURN_NONE return Py_None
/*
Py_NotImplemented is a singleton used to signal that an operation is
@@ -731,7 +836,7 @@ PyAPI_DATA(PyObject) _Py_NotImplementedStruct; /* Don't use this directly */
#define Py_NotImplemented (&_Py_NotImplementedStruct)
/* Macro for returning Py_NotImplemented from a function */
-#define Py_RETURN_NOTIMPLEMENTED return Py_NewRef(Py_NotImplemented)
+#define Py_RETURN_NOTIMPLEMENTED return Py_NotImplemented
/* Rich comparison opcodes */
#define Py_LT 0
diff --git a/Include/pyport.h b/Include/pyport.h
index eef0fe1bfd71d8..bd0ba6d0681b21 100644
--- a/Include/pyport.h
+++ b/Include/pyport.h
@@ -184,7 +184,6 @@ typedef Py_ssize_t Py_ssize_clean_t;
# define Py_LOCAL_INLINE(type) static inline type
#endif
-// bpo-28126: Py_MEMCPY is kept for backwards compatibility,
#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 < 0x030b0000
# define Py_MEMCPY memcpy
#endif
@@ -663,6 +662,27 @@ extern char * _getpty(int *, int, mode_t, int);
# define WITH_THREAD
#endif
+#ifdef WITH_THREAD
+# ifdef Py_BUILD_CORE
+# ifdef HAVE_THREAD_LOCAL
+# error "HAVE_THREAD_LOCAL is already defined"
+# endif
+# define HAVE_THREAD_LOCAL 1
+# ifdef thread_local
+# define _Py_thread_local thread_local
+# elif __STDC_VERSION__ >= 201112L && !defined(__STDC_NO_THREADS__)
+# define _Py_thread_local _Thread_local
+# elif defined(_MSC_VER) /* AKA NT_THREADS */
+# define _Py_thread_local __declspec(thread)
+# elif defined(__GNUC__) /* includes clang */
+# define _Py_thread_local __thread
+# else
+ // fall back to the PyThread_tss_*() API, or ignore.
+# undef HAVE_THREAD_LOCAL
+# endif
+# endif
+#endif
+
/* Check that ALT_SOABI is consistent with Py_TRACE_REFS:
./configure --with-trace-refs should must be used to define Py_TRACE_REFS */
#if defined(ALT_SOABI) && defined(Py_TRACE_REFS)
diff --git a/Lib/ast.py b/Lib/ast.py
index 2cbc80a9835aa5..d9733a79d3a78f 100644
--- a/Lib/ast.py
+++ b/Lib/ast.py
@@ -25,6 +25,7 @@
:license: Python License.
"""
import sys
+import re
from _ast import *
from contextlib import contextmanager, nullcontext
from enum import IntEnum, auto, _simple_enum
@@ -305,28 +306,17 @@ def get_docstring(node, clean=True):
return text
-def _splitlines_no_ff(source):
+_line_pattern = re.compile(r"(.*?(?:\r\n|\n|\r|$))")
+def _splitlines_no_ff(source, maxlines=None):
"""Split a string into lines ignoring form feed and other chars.
This mimics how the Python parser splits source code.
"""
- idx = 0
lines = []
- next_line = ''
- while idx < len(source):
- c = source[idx]
- next_line += c
- idx += 1
- # Keep \r\n together
- if c == '\r' and idx < len(source) and source[idx] == '\n':
- next_line += '\n'
- idx += 1
- if c in '\r\n':
- lines.append(next_line)
- next_line = ''
-
- if next_line:
- lines.append(next_line)
+ for lineno, match in enumerate(_line_pattern.finditer(source), 1):
+ if maxlines is not None and lineno > maxlines:
+ break
+ lines.append(match[0])
return lines
@@ -360,7 +350,7 @@ def get_source_segment(source, node, *, padded=False):
except AttributeError:
return None
- lines = _splitlines_no_ff(source)
+ lines = _splitlines_no_ff(source, maxlines=end_lineno+1)
if end_lineno == lineno:
return lines[lineno].encode()[col_offset:end_col_offset].decode()
diff --git a/Lib/bdb.py b/Lib/bdb.py
index 7f9b09514ffd00..0f3eec653baaad 100644
--- a/Lib/bdb.py
+++ b/Lib/bdb.py
@@ -574,6 +574,8 @@ def format_stack_entry(self, frame_lineno, lprefix=': '):
line = linecache.getline(filename, lineno, frame.f_globals)
if line:
s += lprefix + line.strip()
+ else:
+ s += f'{lprefix}Warning: lineno is None'
return s
# The following methods can be called by clients to use
diff --git a/Lib/dataclasses.py b/Lib/dataclasses.py
index 4026c8b77975b7..a73cdc22a5f4b3 100644
--- a/Lib/dataclasses.py
+++ b/Lib/dataclasses.py
@@ -1128,8 +1128,13 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen,
if not getattr(cls, '__doc__'):
# Create a class doc-string.
- cls.__doc__ = (cls.__name__ +
- str(inspect.signature(cls)).replace(' -> None', ''))
+ try:
+ # In some cases fetching a signature is not possible.
+ # But, we surely should not fail in this case.
+ text_sig = str(inspect.signature(cls)).replace(' -> None', '')
+ except (TypeError, ValueError):
+ text_sig = ''
+ cls.__doc__ = (cls.__name__ + text_sig)
if match_args:
# I could probably compute this once
diff --git a/Lib/datetime.py b/Lib/datetime.py
index 637144637485bc..09a2d2d5381c34 100644
--- a/Lib/datetime.py
+++ b/Lib/datetime.py
@@ -1965,6 +1965,11 @@ def replace(self, year=None, month=None, day=None, hour=None,
def _local_timezone(self):
if self.tzinfo is None:
ts = self._mktime()
+ # Detect gap
+ ts2 = self.replace(fold=1-self.fold)._mktime()
+ if ts2 != ts: # This happens in a gap or a fold
+ if (ts2 > ts) == self.fold:
+ ts = ts2
else:
ts = (self - _EPOCH) // timedelta(seconds=1)
localtm = _time.localtime(ts)
diff --git a/Lib/email/_header_value_parser.py b/Lib/email/_header_value_parser.py
index e637e6df06612d..0d6bd812475eea 100644
--- a/Lib/email/_header_value_parser.py
+++ b/Lib/email/_header_value_parser.py
@@ -1987,7 +1987,7 @@ def get_address_list(value):
try:
token, value = get_address(value)
address_list.append(token)
- except errors.HeaderParseError as err:
+ except errors.HeaderParseError:
leader = None
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
@@ -2096,7 +2096,7 @@ def get_msg_id(value):
except errors.HeaderParseError:
try:
token, value = get_no_fold_literal(value)
- except errors.HeaderParseError as e:
+ except errors.HeaderParseError:
try:
token, value = get_domain(value)
msg_id.defects.append(errors.ObsoleteHeaderDefect(
@@ -2443,7 +2443,6 @@ def get_parameter(value):
raise errors.HeaderParseError("Parameter not followed by '='")
param.append(ValueTerminal('=', 'parameter-separator'))
value = value[1:]
- leader = None
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
param.append(token)
@@ -2568,7 +2567,7 @@ def parse_mime_parameters(value):
try:
token, value = get_parameter(value)
mime_parameters.append(token)
- except errors.HeaderParseError as err:
+ except errors.HeaderParseError:
leader = None
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
@@ -2626,7 +2625,6 @@ def parse_content_type_header(value):
don't do that.
"""
ctype = ContentType()
- recover = False
if not value:
ctype.defects.append(errors.HeaderMissingRequiredValue(
"Missing content type specification"))
diff --git a/Lib/email/charset.py b/Lib/email/charset.py
index 9af269442fb8af..043801107b60e5 100644
--- a/Lib/email/charset.py
+++ b/Lib/email/charset.py
@@ -341,7 +341,6 @@ def header_encode_lines(self, string, maxlengths):
if not lines and not current_line:
lines.append(None)
else:
- separator = (' ' if lines else '')
joined_line = EMPTYSTRING.join(current_line)
header_bytes = _encode(joined_line, codec)
lines.append(encoder(header_bytes))
diff --git a/Lib/email/feedparser.py b/Lib/email/feedparser.py
index 6bc4e0c4e59895..885097c7dda067 100644
--- a/Lib/email/feedparser.py
+++ b/Lib/email/feedparser.py
@@ -264,7 +264,7 @@ def _parsegen(self):
yield NeedMoreData
continue
break
- msg = self._pop_message()
+ self._pop_message()
# We need to pop the EOF matcher in order to tell if we're at
# the end of the current file, not the end of the last block
# of message headers.
diff --git a/Lib/email/message.py b/Lib/email/message.py
index b540c33984a753..411118c74dabb4 100644
--- a/Lib/email/message.py
+++ b/Lib/email/message.py
@@ -14,7 +14,7 @@
# Intrapackage imports
from email import utils
from email import errors
-from email._policybase import Policy, compat32
+from email._policybase import compat32
from email import charset as _charset
from email._encoded_words import decode_b
Charset = _charset.Charset
diff --git a/Lib/email/mime/text.py b/Lib/email/mime/text.py
index dfe53c426b2ac4..7672b789138600 100644
--- a/Lib/email/mime/text.py
+++ b/Lib/email/mime/text.py
@@ -6,7 +6,6 @@
__all__ = ['MIMEText']
-from email.charset import Charset
from email.mime.nonmultipart import MIMENonMultipart
@@ -36,6 +35,6 @@ def __init__(self, _text, _subtype='plain', _charset=None, *, policy=None):
_charset = 'utf-8'
MIMENonMultipart.__init__(self, 'text', _subtype, policy=policy,
- **{'charset': str(_charset)})
+ charset=str(_charset))
self.set_payload(_text, _charset)
diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py
index 00e77749e25e77..4278422dfacc9f 100644
--- a/Lib/ensurepip/__init__.py
+++ b/Lib/ensurepip/__init__.py
@@ -9,11 +9,9 @@
__all__ = ["version", "bootstrap"]
-_PACKAGE_NAMES = ('setuptools', 'pip')
-_SETUPTOOLS_VERSION = "65.5.0"
-_PIP_VERSION = "23.0.1"
+_PACKAGE_NAMES = ('pip',)
+_PIP_VERSION = "23.1.1"
_PROJECTS = [
- ("setuptools", _SETUPTOOLS_VERSION, "py3"),
("pip", _PIP_VERSION, "py3"),
]
@@ -153,17 +151,17 @@ def _bootstrap(*, root=None, upgrade=False, user=False,
_disable_pip_configuration_settings()
- # By default, installing pip and setuptools installs all of the
+ # By default, installing pip installs all of the
# following scripts (X.Y == running Python version):
#
- # pip, pipX, pipX.Y, easy_install, easy_install-X.Y
+ # pip, pipX, pipX.Y
#
# pip 1.5+ allows ensurepip to request that some of those be left out
if altinstall:
- # omit pip, pipX and easy_install
+ # omit pip, pipX
os.environ["ENSUREPIP_OPTIONS"] = "altinstall"
elif not default_pip:
- # omit pip and easy_install
+ # omit pip
os.environ["ENSUREPIP_OPTIONS"] = "install"
with tempfile.TemporaryDirectory() as tmpdir:
@@ -271,14 +269,14 @@ def _main(argv=None):
action="store_true",
default=False,
help=("Make an alternate install, installing only the X.Y versioned "
- "scripts (Default: pipX, pipX.Y, easy_install-X.Y)."),
+ "scripts (Default: pipX, pipX.Y)."),
)
parser.add_argument(
"--default-pip",
action="store_true",
default=False,
help=("Make a default pip install, installing the unqualified pip "
- "and easy_install in addition to the versioned scripts."),
+ "in addition to the versioned scripts."),
)
args = parser.parse_args(argv)
diff --git a/Lib/ensurepip/_bundled/pip-23.0.1-py3-none-any.whl b/Lib/ensurepip/_bundled/pip-23.1.1-py3-none-any.whl
similarity index 76%
rename from Lib/ensurepip/_bundled/pip-23.0.1-py3-none-any.whl
rename to Lib/ensurepip/_bundled/pip-23.1.1-py3-none-any.whl
index a855dc40e8630d..dee4c0304b2c36 100644
Binary files a/Lib/ensurepip/_bundled/pip-23.0.1-py3-none-any.whl and b/Lib/ensurepip/_bundled/pip-23.1.1-py3-none-any.whl differ
diff --git a/Lib/ensurepip/_bundled/setuptools-65.5.0-py3-none-any.whl b/Lib/ensurepip/_bundled/setuptools-65.5.0-py3-none-any.whl
deleted file mode 100644
index 123a13e2c6b254..00000000000000
Binary files a/Lib/ensurepip/_bundled/setuptools-65.5.0-py3-none-any.whl and /dev/null differ
diff --git a/Lib/enum.py b/Lib/enum.py
index e9f224a303d3e5..6e497f7ef6a7de 100644
--- a/Lib/enum.py
+++ b/Lib/enum.py
@@ -190,6 +190,8 @@ class property(DynamicClassAttribute):
"""
member = None
+ _attr_type = None
+ _cls_type = None
def __get__(self, instance, ownerclass=None):
if instance is None:
@@ -199,33 +201,36 @@ def __get__(self, instance, ownerclass=None):
raise AttributeError(
'%r has no attribute %r' % (ownerclass, self.name)
)
- else:
- if self.fget is None:
- # look for a member by this name.
- try:
- return ownerclass._member_map_[self.name]
- except KeyError:
- raise AttributeError(
- '%r has no attribute %r' % (ownerclass, self.name)
- ) from None
- else:
- return self.fget(instance)
+ if self.fget is not None:
+ # use previous enum.property
+ return self.fget(instance)
+ elif self._attr_type == 'attr':
+ # look up previous attibute
+ return getattr(self._cls_type, self.name)
+ elif self._attr_type == 'desc':
+ # use previous descriptor
+ return getattr(instance._value_, self.name)
+ # look for a member by this name.
+ try:
+ return ownerclass._member_map_[self.name]
+ except KeyError:
+ raise AttributeError(
+ '%r has no attribute %r' % (ownerclass, self.name)
+ ) from None
def __set__(self, instance, value):
- if self.fset is None:
- raise AttributeError(
- " cannot set attribute %r" % (self.clsname, self.name)
- )
- else:
+ if self.fset is not None:
return self.fset(instance, value)
+ raise AttributeError(
+ " cannot set attribute %r" % (self.clsname, self.name)
+ )
def __delete__(self, instance):
- if self.fdel is None:
- raise AttributeError(
- " cannot delete attribute %r" % (self.clsname, self.name)
- )
- else:
+ if self.fdel is not None:
return self.fdel(instance)
+ raise AttributeError(
+ " cannot delete attribute %r" % (self.clsname, self.name)
+ )
def __set_name__(self, ownerclass, name):
self.name = name
@@ -313,27 +318,38 @@ def __set_name__(self, enum_class, member_name):
enum_class._member_names_.append(member_name)
# if necessary, get redirect in place and then add it to _member_map_
found_descriptor = None
+ descriptor_type = None
+ class_type = None
for base in enum_class.__mro__[1:]:
- descriptor = base.__dict__.get(member_name)
- if descriptor is not None:
- if isinstance(descriptor, (property, DynamicClassAttribute)):
- found_descriptor = descriptor
+ attr = base.__dict__.get(member_name)
+ if attr is not None:
+ if isinstance(attr, (property, DynamicClassAttribute)):
+ found_descriptor = attr
+ class_type = base
+ descriptor_type = 'enum'
break
- elif (
- hasattr(descriptor, 'fget') and
- hasattr(descriptor, 'fset') and
- hasattr(descriptor, 'fdel')
- ):
- found_descriptor = descriptor
+ elif _is_descriptor(attr):
+ found_descriptor = attr
+ descriptor_type = descriptor_type or 'desc'
+ class_type = class_type or base
continue
+ else:
+ descriptor_type = 'attr'
+ class_type = base
if found_descriptor:
redirect = property()
redirect.member = enum_member
redirect.__set_name__(enum_class, member_name)
- # earlier descriptor found; copy fget, fset, fdel to this one.
- redirect.fget = found_descriptor.fget
- redirect.fset = found_descriptor.fset
- redirect.fdel = found_descriptor.fdel
+ if descriptor_type in ('enum','desc'):
+ # earlier descriptor found; copy fget, fset, fdel to this one.
+ redirect.fget = getattr(found_descriptor, 'fget', None)
+ redirect._get = getattr(found_descriptor, '__get__', None)
+ redirect.fset = getattr(found_descriptor, 'fset', None)
+ redirect._set = getattr(found_descriptor, '__set__', None)
+ redirect.fdel = getattr(found_descriptor, 'fdel', None)
+ redirect._del = getattr(found_descriptor, '__delete__', None)
+ redirect._attr_type = descriptor_type
+ redirect._cls_type = class_type
setattr(enum_class, member_name, redirect)
else:
setattr(enum_class, member_name, enum_member)
diff --git a/Lib/idlelib/calltip_w.py b/Lib/idlelib/calltip_w.py
index 1e0404aa49f562..278546064adde2 100644
--- a/Lib/idlelib/calltip_w.py
+++ b/Lib/idlelib/calltip_w.py
@@ -25,7 +25,7 @@ def __init__(self, text_widget):
text_widget: a Text widget with code for which call-tips are desired
"""
# Note: The Text widget will be accessible as self.anchor_widget
- super(CalltipWindow, self).__init__(text_widget)
+ super().__init__(text_widget)
self.label = self.text = None
self.parenline = self.parencol = self.lastline = None
@@ -54,7 +54,7 @@ def position_window(self):
return
self.lastline = curline
self.anchor_widget.see("insert")
- super(CalltipWindow, self).position_window()
+ super().position_window()
def showtip(self, text, parenleft, parenright):
"""Show the call-tip, bind events which will close it and reposition it.
@@ -73,7 +73,7 @@ def showtip(self, text, parenleft, parenright):
self.parenline, self.parencol = map(
int, self.anchor_widget.index(parenleft).split("."))
- super(CalltipWindow, self).showtip()
+ super().showtip()
self._bind_events()
@@ -143,7 +143,7 @@ def hidetip(self):
# ValueError may be raised by MultiCall
pass
- super(CalltipWindow, self).hidetip()
+ super().hidetip()
def _bind_events(self):
"""Bind event handlers."""
diff --git a/Lib/idlelib/debugger.py b/Lib/idlelib/debugger.py
index ccd03e46e16147..452c62b42655b3 100644
--- a/Lib/idlelib/debugger.py
+++ b/Lib/idlelib/debugger.py
@@ -49,9 +49,9 @@ def __frame2message(self, frame):
filename = code.co_filename
lineno = frame.f_lineno
basename = os.path.basename(filename)
- message = "%s:%s" % (basename, lineno)
+ message = f"{basename}:{lineno}"
if code.co_name != "?":
- message = "%s: %s()" % (message, code.co_name)
+ message = f"{message}: {code.co_name}()"
return message
@@ -213,7 +213,8 @@ def interaction(self, message, frame, info=None):
m1 = "%s" % str(type)
if value is not None:
try:
- m1 = "%s: %s" % (m1, str(value))
+ # TODO redo entire section, tries not needed.
+ m1 = f"{m1}: {value}"
except:
pass
bg = "yellow"
diff --git a/Lib/idlelib/debugobj.py b/Lib/idlelib/debugobj.py
index 5a4c9978842035..71d01c7070df54 100644
--- a/Lib/idlelib/debugobj.py
+++ b/Lib/idlelib/debugobj.py
@@ -87,7 +87,7 @@ def GetSubList(self):
continue
def setfunction(value, key=key, object=self.object):
object[key] = value
- item = make_objecttreeitem("%r:" % (key,), value, setfunction)
+ item = make_objecttreeitem(f"{key!r}:", value, setfunction)
sublist.append(item)
return sublist
diff --git a/Lib/idlelib/editor.py b/Lib/idlelib/editor.py
index 08d6aa2efde22a..505815502600b1 100644
--- a/Lib/idlelib/editor.py
+++ b/Lib/idlelib/editor.py
@@ -38,12 +38,13 @@
def _sphinx_version():
"Format sys.version_info to produce the Sphinx version string used to install the chm docs"
major, minor, micro, level, serial = sys.version_info
- release = '%s%s' % (major, minor)
- release += '%s' % (micro,)
+ # TODO remove unneeded function since .chm no longer installed
+ release = f'{major}{minor}'
+ release += f'{micro}'
if level == 'candidate':
- release += 'rc%s' % (serial,)
+ release += f'rc{serial}'
elif level != 'final':
- release += '%s%s' % (level[0], serial)
+ release += f'{level[0]}{serial}'
return release
@@ -950,7 +951,7 @@ def update_recent_files_list(self, new_file=None):
rf_list = []
file_path = self.recent_files_path
if file_path and os.path.exists(file_path):
- with open(file_path, 'r',
+ with open(file_path,
encoding='utf_8', errors='replace') as rf_list_file:
rf_list = rf_list_file.readlines()
if new_file:
@@ -1458,7 +1459,7 @@ def newline_and_indent_event(self, event):
else:
self.reindent_to(y.compute_backslash_indent())
else:
- assert 0, "bogus continuation type %r" % (c,)
+ assert 0, f"bogus continuation type {c!r}"
return "break"
# This line starts a brand new statement; indent relative to
diff --git a/Lib/idlelib/filelist.py b/Lib/idlelib/filelist.py
index 254f5caf6b81b0..f87781d2570fe0 100644
--- a/Lib/idlelib/filelist.py
+++ b/Lib/idlelib/filelist.py
@@ -22,7 +22,7 @@ def open(self, filename, action=None):
# This can happen when bad filename is passed on command line:
messagebox.showerror(
"File Error",
- "%r is a directory." % (filename,),
+ f"{filename!r} is a directory.",
master=self.root)
return None
key = os.path.normcase(filename)
@@ -90,7 +90,7 @@ def filename_changed_edit(self, edit):
self.inversedict[conflict] = None
messagebox.showerror(
"Name Conflict",
- "You now have multiple edit windows open for %r" % (filename,),
+ f"You now have multiple edit windows open for {filename!r}",
master=self.root)
self.dict[newkey] = edit
self.inversedict[edit] = newkey
diff --git a/Lib/idlelib/idle_test/test_config.py b/Lib/idlelib/idle_test/test_config.py
index 697fda527968de..08ed76fe288294 100644
--- a/Lib/idlelib/idle_test/test_config.py
+++ b/Lib/idlelib/idle_test/test_config.py
@@ -191,7 +191,7 @@ def setUpClass(cls):
idle_dir = os.path.abspath(sys.path[0])
for ctype in conf.config_types:
config_path = os.path.join(idle_dir, '../config-%s.def' % ctype)
- with open(config_path, 'r') as f:
+ with open(config_path) as f:
cls.config_string[ctype] = f.read()
cls.orig_warn = config._warn
diff --git a/Lib/idlelib/idle_test/test_outwin.py b/Lib/idlelib/idle_test/test_outwin.py
index e347bfca7f191a..d6e85ad674417c 100644
--- a/Lib/idlelib/idle_test/test_outwin.py
+++ b/Lib/idlelib/idle_test/test_outwin.py
@@ -159,7 +159,7 @@ def test_file_line_helper(self, mock_open):
for line, expected_output in test_lines:
self.assertEqual(flh(line), expected_output)
if expected_output:
- mock_open.assert_called_with(expected_output[0], 'r')
+ mock_open.assert_called_with(expected_output[0])
if __name__ == '__main__':
diff --git a/Lib/idlelib/idle_test/test_sidebar.py b/Lib/idlelib/idle_test/test_sidebar.py
index 049531e66a414e..5506fd2b0e22a5 100644
--- a/Lib/idlelib/idle_test/test_sidebar.py
+++ b/Lib/idlelib/idle_test/test_sidebar.py
@@ -328,7 +328,7 @@ def test_scroll(self):
self.assertEqual(self.linenumber.sidebar_text.index('@0,0'), '11.0')
# Generate a mouse-wheel event and make sure it scrolled up or down.
- # The meaning of the "delta" is OS-dependant, so this just checks for
+ # The meaning of the "delta" is OS-dependent, so this just checks for
# any change.
self.linenumber.sidebar_text.event_generate('',
x=0, y=0,
@@ -691,7 +691,7 @@ def test_mousewheel(self):
self.assertIsNotNone(text.dlineinfo(text.index(f'{last_lineno}.0')))
# Scroll up using the event.
- # The meaning delta is platform-dependant.
+ # The meaning of delta is platform-dependent.
delta = -1 if sys.platform == 'darwin' else 120
sidebar.canvas.event_generate('', x=0, y=0, delta=delta)
yield
diff --git a/Lib/idlelib/multicall.py b/Lib/idlelib/multicall.py
index dc02001292fc14..0200f445cc9340 100644
--- a/Lib/idlelib/multicall.py
+++ b/Lib/idlelib/multicall.py
@@ -52,9 +52,9 @@
_modifier_masks = (MC_CONTROL, MC_ALT, MC_SHIFT, MC_META)
# a dictionary to map a modifier name into its number
-_modifier_names = dict([(name, number)
+_modifier_names = {name: number
for number in range(len(_modifiers))
- for name in _modifiers[number]])
+ for name in _modifiers[number]}
# In 3.4, if no shell window is ever open, the underlying Tk widget is
# destroyed before .__del__ methods here are called. The following
@@ -134,7 +134,7 @@ def nbits(n):
return nb
statelist = []
for state in states:
- substates = list(set(state & x for x in states))
+ substates = list({state & x for x in states})
substates.sort(key=nbits, reverse=True)
statelist.append(substates)
return statelist
@@ -258,9 +258,9 @@ def __del__(self):
_binder_classes = (_ComplexBinder,) * 4 + (_SimpleBinder,) * (len(_types)-4)
# A dictionary to map a type name into its number
-_type_names = dict([(name, number)
+_type_names = {name: number
for number in range(len(_types))
- for name in _types[number]])
+ for name in _types[number]}
_keysym_re = re.compile(r"^\w+$")
_button_re = re.compile(r"^[1-5]$")
diff --git a/Lib/idlelib/outwin.py b/Lib/idlelib/outwin.py
index 5ab08bbaf4bc95..ac67c904ab9797 100644
--- a/Lib/idlelib/outwin.py
+++ b/Lib/idlelib/outwin.py
@@ -42,7 +42,7 @@ def file_line_helper(line):
if match:
filename, lineno = match.group(1, 2)
try:
- f = open(filename, "r")
+ f = open(filename)
f.close()
break
except OSError:
diff --git a/Lib/idlelib/pyshell.py b/Lib/idlelib/pyshell.py
index e68233a5a4131e..bdde156166171b 100755
--- a/Lib/idlelib/pyshell.py
+++ b/Lib/idlelib/pyshell.py
@@ -249,7 +249,7 @@ def store_file_breaks(self):
breaks = self.breakpoints
filename = self.io.filename
try:
- with open(self.breakpointPath, "r") as fp:
+ with open(self.breakpointPath) as fp:
lines = fp.readlines()
except OSError:
lines = []
@@ -279,7 +279,7 @@ def restore_file_breaks(self):
if filename is None:
return
if os.path.isfile(self.breakpointPath):
- with open(self.breakpointPath, "r") as fp:
+ with open(self.breakpointPath) as fp:
lines = fp.readlines()
for line in lines:
if line.startswith(filename + '='):
@@ -441,7 +441,7 @@ def build_subprocess_arglist(self):
# run from the IDLE source directory.
del_exitf = idleConf.GetOption('main', 'General', 'delete-exitfunc',
default=False, type='bool')
- command = "__import__('idlelib.run').run.main(%r)" % (del_exitf,)
+ command = f"__import__('idlelib.run').run.main({del_exitf!r})"
return [sys.executable] + w + ["-c", command, str(self.port)]
def start_subprocess(self):
@@ -574,9 +574,9 @@ def transfer_path(self, with_cwd=False):
self.runcommand("""if 1:
import sys as _sys
- _sys.path = %r
+ _sys.path = {!r}
del _sys
- \n""" % (path,))
+ \n""".format(path))
active_seq = None
@@ -703,14 +703,14 @@ def stuffsource(self, source):
def prepend_syspath(self, filename):
"Prepend sys.path with file's directory if not already included"
self.runcommand("""if 1:
- _filename = %r
+ _filename = {!r}
import sys as _sys
from os.path import dirname as _dirname
_dir = _dirname(_filename)
if not _dir in _sys.path:
_sys.path.insert(0, _dir)
del _filename, _sys, _dirname, _dir
- \n""" % (filename,))
+ \n""".format(filename))
def showsyntaxerror(self, filename=None):
"""Override Interactive Interpreter method: Use Colorizing
@@ -1536,7 +1536,7 @@ def main():
try:
opts, args = getopt.getopt(sys.argv[1:], "c:deihnr:st:")
except getopt.error as msg:
- print("Error: %s\n%s" % (msg, usage_msg), file=sys.stderr)
+ print(f"Error: {msg}\n{usage_msg}", file=sys.stderr)
sys.exit(2)
for o, a in opts:
if o == '-c':
@@ -1668,9 +1668,9 @@ def main():
if cmd or script:
shell.interp.runcommand("""if 1:
import sys as _sys
- _sys.argv = %r
+ _sys.argv = {!r}
del _sys
- \n""" % (sys.argv,))
+ \n""".format(sys.argv))
if cmd:
shell.interp.execsource(cmd)
elif script:
diff --git a/Lib/idlelib/redirector.py b/Lib/idlelib/redirector.py
index 9ab34c5acfb22c..4928340e98df68 100644
--- a/Lib/idlelib/redirector.py
+++ b/Lib/idlelib/redirector.py
@@ -47,9 +47,8 @@ def __init__(self, widget):
tk.createcommand(w, self.dispatch)
def __repr__(self):
- return "%s(%s<%s>)" % (self.__class__.__name__,
- self.widget.__class__.__name__,
- self.widget._w)
+ w = self.widget
+ return f"{self.__class__.__name__,}({w.__class__.__name__}<{w._w}>)"
def close(self):
"Unregister operations and revert redirection created by .__init__."
@@ -143,8 +142,7 @@ def __init__(self, redir, operation):
self.orig_and_operation = (redir.orig, operation)
def __repr__(self):
- return "%s(%r, %r)" % (self.__class__.__name__,
- self.redir, self.operation)
+ return f"{self.__class__.__name__,}({self.redir!r}, {self.operation!r})"
def __call__(self, *args):
return self.tk_call(self.orig_and_operation + args)
diff --git a/Lib/idlelib/rpc.py b/Lib/idlelib/rpc.py
index 62eec84c9c8d09..b08b80c9004551 100644
--- a/Lib/idlelib/rpc.py
+++ b/Lib/idlelib/rpc.py
@@ -174,7 +174,7 @@ def localcall(self, seq, request):
except TypeError:
return ("ERROR", "Bad request format")
if oid not in self.objtable:
- return ("ERROR", "Unknown object id: %r" % (oid,))
+ return ("ERROR", f"Unknown object id: {oid!r}")
obj = self.objtable[oid]
if methodname == "__methods__":
methods = {}
@@ -185,7 +185,7 @@ def localcall(self, seq, request):
_getattributes(obj, attributes)
return ("OK", attributes)
if not hasattr(obj, methodname):
- return ("ERROR", "Unsupported method name: %r" % (methodname,))
+ return ("ERROR", f"Unsupported method name: {methodname!r}")
method = getattr(obj, methodname)
try:
if how == 'CALL':
diff --git a/Lib/idlelib/run.py b/Lib/idlelib/run.py
index 577c49eb67b20d..84792a82b0022c 100644
--- a/Lib/idlelib/run.py
+++ b/Lib/idlelib/run.py
@@ -52,13 +52,13 @@ def idle_formatwarning(message, category, filename, lineno, line=None):
"""Format warnings the IDLE way."""
s = "\nWarning (from warnings module):\n"
- s += ' File \"%s\", line %s\n' % (filename, lineno)
+ s += f' File \"{filename}\", line {lineno}\n'
if line is None:
line = linecache.getline(filename, lineno)
line = line.strip()
if line:
s += " %s\n" % line
- s += "%s: %s\n" % (category.__name__, message)
+ s += f"{category.__name__}: {message}\n"
return s
def idle_showwarning_subproc(
@@ -239,6 +239,7 @@ def print_exception():
efile = sys.stderr
typ, val, tb = excinfo = sys.exc_info()
sys.last_type, sys.last_value, sys.last_traceback = excinfo
+ sys.last_exc = val
seen = set()
def print_exc(typ, exc, tb):
diff --git a/Lib/idlelib/textview.py b/Lib/idlelib/textview.py
index a66c1a4309a617..23f0f4cb5027ec 100644
--- a/Lib/idlelib/textview.py
+++ b/Lib/idlelib/textview.py
@@ -169,7 +169,7 @@ def view_file(parent, title, filename, encoding, modal=True, wrap='word',
with contents of the file.
"""
try:
- with open(filename, 'r', encoding=encoding) as file:
+ with open(filename, encoding=encoding) as file:
contents = file.read()
except OSError:
showerror(title='File Load Error',
diff --git a/Lib/idlelib/tooltip.py b/Lib/idlelib/tooltip.py
index d714318dae8ef1..3983690dd41177 100644
--- a/Lib/idlelib/tooltip.py
+++ b/Lib/idlelib/tooltip.py
@@ -92,7 +92,7 @@ def __init__(self, anchor_widget, hover_delay=1000):
e.g. after hovering over the anchor widget with the mouse for enough
time.
"""
- super(OnHoverTooltipBase, self).__init__(anchor_widget)
+ super().__init__(anchor_widget)
self.hover_delay = hover_delay
self._after_id = None
@@ -107,7 +107,7 @@ def __del__(self):
self.anchor_widget.unbind("", self._id3) # pragma: no cover
except TclError:
pass
- super(OnHoverTooltipBase, self).__del__()
+ super().__del__()
def _show_event(self, event=None):
"""event handler to display the tooltip"""
@@ -139,7 +139,7 @@ def hidetip(self):
self.unschedule()
except TclError: # pragma: no cover
pass
- super(OnHoverTooltipBase, self).hidetip()
+ super().hidetip()
class Hovertip(OnHoverTooltipBase):
@@ -154,7 +154,7 @@ def __init__(self, anchor_widget, text, hover_delay=1000):
e.g. after hovering over the anchor widget with the mouse for enough
time.
"""
- super(Hovertip, self).__init__(anchor_widget, hover_delay=hover_delay)
+ super().__init__(anchor_widget, hover_delay=hover_delay)
self.text = text
def showcontents(self):
diff --git a/Lib/idlelib/tree.py b/Lib/idlelib/tree.py
index 5947268f5c35ae..5f30f0f6092bfa 100644
--- a/Lib/idlelib/tree.py
+++ b/Lib/idlelib/tree.py
@@ -32,7 +32,7 @@
if os.path.isdir(_icondir):
ICONDIR = _icondir
elif not os.path.isdir(ICONDIR):
- raise RuntimeError("can't find icon directory (%r)" % (ICONDIR,))
+ raise RuntimeError(f"can't find icon directory ({ICONDIR!r})")
def listicons(icondir=ICONDIR):
"""Utility to display the available icons."""
diff --git a/Lib/idlelib/undo.py b/Lib/idlelib/undo.py
index 85ecffecb4cbcb..5f10c0f05c1acb 100644
--- a/Lib/idlelib/undo.py
+++ b/Lib/idlelib/undo.py
@@ -309,7 +309,7 @@ def __repr__(self):
s = self.__class__.__name__
strs = []
for cmd in self.cmds:
- strs.append(" %r" % (cmd,))
+ strs.append(f" {cmd!r}")
return s + "(\n" + ",\n".join(strs) + "\n)"
def __len__(self):
diff --git a/Lib/importlib/_bootstrap_external.py b/Lib/importlib/_bootstrap_external.py
index 74a78bc9f6dc08..cb227373ca2fd4 100644
--- a/Lib/importlib/_bootstrap_external.py
+++ b/Lib/importlib/_bootstrap_external.py
@@ -440,7 +440,8 @@ def _write_atomic(path, data, mode=0o666):
# Python 3.12a7 3524 (Shrink the BINARY_SUBSCR caches)
# Python 3.12b1 3525 (Shrink the CALL caches)
# Python 3.12b1 3526 (Add instrumentation support)
-# Python 3.12b1 3527 (Optimize super() calls)
+# Python 3.12b1 3527 (Add LOAD_SUPER_ATTR)
+# Python 3.12b1 3528 (Add LOAD_SUPER_ATTR_METHOD specialization)
# Python 3.13 will start with 3550
@@ -457,7 +458,7 @@ def _write_atomic(path, data, mode=0o666):
# Whenever MAGIC_NUMBER is changed, the ranges in the magic_values array
# in PC/launcher.c must also be updated.
-MAGIC_NUMBER = (3527).to_bytes(2, 'little') + b'\r\n'
+MAGIC_NUMBER = (3528).to_bytes(2, 'little') + b'\r\n'
_RAW_MAGIC_NUMBER = int.from_bytes(MAGIC_NUMBER, 'little') # For import.c
diff --git a/Lib/importlib/metadata/__init__.py b/Lib/importlib/metadata/__init__.py
index 40ab1a1aaac328..82e0ce1b281c54 100644
--- a/Lib/importlib/metadata/__init__.py
+++ b/Lib/importlib/metadata/__init__.py
@@ -12,7 +12,9 @@
import functools
import itertools
import posixpath
+import contextlib
import collections
+import inspect
from . import _adapters, _meta
from ._collections import FreezableDefaultDict, Pair
@@ -24,7 +26,7 @@
from importlib import import_module
from importlib.abc import MetaPathFinder
from itertools import starmap
-from typing import List, Mapping, Optional
+from typing import List, Mapping, Optional, cast
__all__ = [
@@ -341,11 +343,30 @@ def __repr__(self):
return f''
-class Distribution:
+class DeprecatedNonAbstract:
+ def __new__(cls, *args, **kwargs):
+ all_names = {
+ name for subclass in inspect.getmro(cls) for name in vars(subclass)
+ }
+ abstract = {
+ name
+ for name in all_names
+ if getattr(getattr(cls, name), '__isabstractmethod__', False)
+ }
+ if abstract:
+ warnings.warn(
+ f"Unimplemented abstract methods {abstract}",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return super().__new__(cls)
+
+
+class Distribution(DeprecatedNonAbstract):
"""A Python distribution package."""
@abc.abstractmethod
- def read_text(self, filename):
+ def read_text(self, filename) -> Optional[str]:
"""Attempt to load metadata file given by the name.
:param filename: The name of the file in the distribution info.
@@ -419,7 +440,7 @@ def metadata(self) -> _meta.PackageMetadata:
The returned object will have keys that name the various bits of
metadata. See PEP 566 for details.
"""
- text = (
+ opt_text = (
self.read_text('METADATA')
or self.read_text('PKG-INFO')
# This last clause is here to support old egg-info files. Its
@@ -427,6 +448,7 @@ def metadata(self) -> _meta.PackageMetadata:
# (which points to the egg-info file) attribute unchanged.
or self.read_text('')
)
+ text = cast(str, opt_text)
return _adapters.Message(email.message_from_string(text))
@property
@@ -455,8 +477,8 @@ def files(self):
:return: List of PackagePath for this distribution or None
Result is `None` if the metadata file that enumerates files
- (i.e. RECORD for dist-info or SOURCES.txt for egg-info) is
- missing.
+ (i.e. RECORD for dist-info, or installed-files.txt or
+ SOURCES.txt for egg-info) is missing.
Result may be empty if the metadata exists but is empty.
"""
@@ -469,9 +491,19 @@ def make_file(name, hash=None, size_str=None):
@pass_none
def make_files(lines):
- return list(starmap(make_file, csv.reader(lines)))
+ return starmap(make_file, csv.reader(lines))
- return make_files(self._read_files_distinfo() or self._read_files_egginfo())
+ @pass_none
+ def skip_missing_files(package_paths):
+ return list(filter(lambda path: path.locate().exists(), package_paths))
+
+ return skip_missing_files(
+ make_files(
+ self._read_files_distinfo()
+ or self._read_files_egginfo_installed()
+ or self._read_files_egginfo_sources()
+ )
+ )
def _read_files_distinfo(self):
"""
@@ -480,10 +512,45 @@ def _read_files_distinfo(self):
text = self.read_text('RECORD')
return text and text.splitlines()
- def _read_files_egginfo(self):
+ def _read_files_egginfo_installed(self):
+ """
+ Read installed-files.txt and return lines in a similar
+ CSV-parsable format as RECORD: each file must be placed
+ relative to the site-packages directory and must also be
+ quoted (since file names can contain literal commas).
+
+ This file is written when the package is installed by pip,
+ but it might not be written for other installation methods.
+ Assume the file is accurate if it exists.
"""
- SOURCES.txt might contain literal commas, so wrap each line
- in quotes.
+ text = self.read_text('installed-files.txt')
+ # Prepend the .egg-info/ subdir to the lines in this file.
+ # But this subdir is only available from PathDistribution's
+ # self._path.
+ subdir = getattr(self, '_path', None)
+ if not text or not subdir:
+ return
+
+ paths = (
+ (subdir / name)
+ .resolve()
+ .relative_to(self.locate_file('').resolve())
+ .as_posix()
+ for name in text.splitlines()
+ )
+ return map('"{}"'.format, paths)
+
+ def _read_files_egginfo_sources(self):
+ """
+ Read SOURCES.txt and return lines in a similar CSV-parsable
+ format as RECORD: each file name must be quoted (since it
+ might contain literal commas).
+
+ Note that SOURCES.txt is not a reliable source for what
+ files are installed by a package. This file is generated
+ for a source archive, and the files that are present
+ there (e.g. setup.py) may not correctly reflect the files
+ that are present after the package has been installed.
"""
text = self.read_text('SOURCES.txt')
return text and map('"{}"'.format, text.splitlines())
@@ -886,8 +953,13 @@ def _top_level_declared(dist):
def _top_level_inferred(dist):
- return {
- f.parts[0] if len(f.parts) > 1 else f.with_suffix('').name
+ opt_names = {
+ f.parts[0] if len(f.parts) > 1 else inspect.getmodulename(f)
for f in always_iterable(dist.files)
- if f.suffix == ".py"
}
+
+ @pass_none
+ def importable_name(name):
+ return '.' not in name
+
+ return filter(importable_name, opt_names)
diff --git a/Lib/importlib/metadata/_adapters.py b/Lib/importlib/metadata/_adapters.py
index aa460d3eda50fb..6aed69a30857e4 100644
--- a/Lib/importlib/metadata/_adapters.py
+++ b/Lib/importlib/metadata/_adapters.py
@@ -1,3 +1,5 @@
+import functools
+import warnings
import re
import textwrap
import email.message
@@ -5,6 +7,15 @@
from ._text import FoldedCase
+# Do not remove prior to 2024-01-01 or Python 3.14
+_warn = functools.partial(
+ warnings.warn,
+ "Implicit None on return values is deprecated and will raise KeyErrors.",
+ DeprecationWarning,
+ stacklevel=2,
+)
+
+
class Message(email.message.Message):
multiple_use_keys = set(
map(
@@ -39,6 +50,16 @@ def __init__(self, *args, **kwargs):
def __iter__(self):
return super().__iter__()
+ def __getitem__(self, item):
+ """
+ Warn users that a ``KeyError`` can be expected when a
+ mising key is supplied. Ref python/importlib_metadata#371.
+ """
+ res = super().__getitem__(item)
+ if res is None:
+ _warn()
+ return res
+
def _repair_headers(self):
def redent(value):
"Correct for RFC822 indentation"
diff --git a/Lib/importlib/metadata/_meta.py b/Lib/importlib/metadata/_meta.py
index d5c0576194ece2..c9a7ef906a8a8c 100644
--- a/Lib/importlib/metadata/_meta.py
+++ b/Lib/importlib/metadata/_meta.py
@@ -1,4 +1,5 @@
-from typing import Any, Dict, Iterator, List, Protocol, TypeVar, Union
+from typing import Protocol
+from typing import Any, Dict, Iterator, List, Optional, TypeVar, Union, overload
_T = TypeVar("_T")
@@ -17,7 +18,21 @@ def __getitem__(self, key: str) -> str:
def __iter__(self) -> Iterator[str]:
... # pragma: no cover
- def get_all(self, name: str, failobj: _T = ...) -> Union[List[Any], _T]:
+ @overload
+ def get(self, name: str, failobj: None = None) -> Optional[str]:
+ ... # pragma: no cover
+
+ @overload
+ def get(self, name: str, failobj: _T) -> Union[str, _T]:
+ ... # pragma: no cover
+
+ # overload per python/importlib_metadata#435
+ @overload
+ def get_all(self, name: str, failobj: None = None) -> Optional[List[Any]]:
+ ... # pragma: no cover
+
+ @overload
+ def get_all(self, name: str, failobj: _T) -> Union[List[Any], _T]:
"""
Return all values associated with a possibly multi-valued key.
"""
@@ -29,18 +44,19 @@ def json(self) -> Dict[str, Union[str, List[str]]]:
"""
-class SimplePath(Protocol):
+class SimplePath(Protocol[_T]):
"""
A minimal subset of pathlib.Path required by PathDistribution.
"""
- def joinpath(self) -> 'SimplePath':
+ def joinpath(self) -> _T:
... # pragma: no cover
- def __truediv__(self) -> 'SimplePath':
+ def __truediv__(self, other: Union[str, _T]) -> _T:
... # pragma: no cover
- def parent(self) -> 'SimplePath':
+ @property
+ def parent(self) -> _T:
... # pragma: no cover
def read_text(self) -> str:
diff --git a/Lib/inspect.py b/Lib/inspect.py
index 4242b40c2a08df..6d1d7b766cb3bb 100644
--- a/Lib/inspect.py
+++ b/Lib/inspect.py
@@ -3006,7 +3006,7 @@ def __init__(self, parameters=None, *, return_annotation=_empty,
if __validate_parameters__:
params = OrderedDict()
top_kind = _POSITIONAL_ONLY
- kind_defaults = False
+ seen_default = False
for param in parameters:
kind = param.kind
@@ -3021,21 +3021,19 @@ def __init__(self, parameters=None, *, return_annotation=_empty,
kind.description)
raise ValueError(msg)
elif kind > top_kind:
- kind_defaults = False
top_kind = kind
if kind in (_POSITIONAL_ONLY, _POSITIONAL_OR_KEYWORD):
if param.default is _empty:
- if kind_defaults:
+ if seen_default:
# No default for this parameter, but the
- # previous parameter of the same kind had
- # a default
+ # previous parameter of had a default
msg = 'non-default argument follows default ' \
'argument'
raise ValueError(msg)
else:
# There is a default for this parameter.
- kind_defaults = True
+ seen_default = True
if name in params:
msg = 'duplicate parameter name: {!r}'.format(name)
diff --git a/Lib/pkgutil.py b/Lib/pkgutil.py
index bdebfd2fc8ac32..56731de64af494 100644
--- a/Lib/pkgutil.py
+++ b/Lib/pkgutil.py
@@ -511,10 +511,10 @@ def extend_path(path, name):
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
- This will add to the package's __path__ all subdirectories of
- directories on sys.path named after the package. This is useful
- if one wants to distribute different parts of a single logical
- package as multiple directories.
+ For each directory on sys.path that has a subdirectory that
+ matches the package name, add the subdirectory to the package's
+ __path__. This is useful if one wants to distribute different
+ parts of a single logical package as multiple directories.
It also looks for *.pkg files beginning where * matches the name
argument. This feature is similar to *.pth files (see site.py),
diff --git a/Lib/shutil.py b/Lib/shutil.py
index 95b6c5299cab29..c75ea4da02ebb4 100644
--- a/Lib/shutil.py
+++ b/Lib/shutil.py
@@ -1245,7 +1245,7 @@ def _unpack_zipfile(filename, extract_dir):
finally:
zip.close()
-def _unpack_tarfile(filename, extract_dir):
+def _unpack_tarfile(filename, extract_dir, *, filter=None):
"""Unpack tar/tar.gz/tar.bz2/tar.xz `filename` to `extract_dir`
"""
import tarfile # late import for breaking circular dependency
@@ -1255,7 +1255,7 @@ def _unpack_tarfile(filename, extract_dir):
raise ReadError(
"%s is not a compressed or uncompressed tar file" % filename)
try:
- tarobj.extractall(extract_dir)
+ tarobj.extractall(extract_dir, filter=filter)
finally:
tarobj.close()
@@ -1288,7 +1288,7 @@ def _find_unpack_format(filename):
return name
return None
-def unpack_archive(filename, extract_dir=None, format=None):
+def unpack_archive(filename, extract_dir=None, format=None, *, filter=None):
"""Unpack an archive.
`filename` is the name of the archive.
@@ -1302,6 +1302,9 @@ def unpack_archive(filename, extract_dir=None, format=None):
was registered for that extension.
In case none is found, a ValueError is raised.
+
+ If `filter` is given, it is passed to the underlying
+ extraction function.
"""
sys.audit("shutil.unpack_archive", filename, extract_dir, format)
@@ -1311,6 +1314,10 @@ def unpack_archive(filename, extract_dir=None, format=None):
extract_dir = os.fspath(extract_dir)
filename = os.fspath(filename)
+ if filter is None:
+ filter_kwargs = {}
+ else:
+ filter_kwargs = {'filter': filter}
if format is not None:
try:
format_info = _UNPACK_FORMATS[format]
@@ -1318,7 +1325,7 @@ def unpack_archive(filename, extract_dir=None, format=None):
raise ValueError("Unknown unpack format '{0}'".format(format)) from None
func = format_info[1]
- func(filename, extract_dir, **dict(format_info[2]))
+ func(filename, extract_dir, **dict(format_info[2]), **filter_kwargs)
else:
# we need to look at the registered unpackers supported extensions
format = _find_unpack_format(filename)
@@ -1326,7 +1333,7 @@ def unpack_archive(filename, extract_dir=None, format=None):
raise ReadError("Unknown archive format '{0}'".format(filename))
func = _UNPACK_FORMATS[format][1]
- kwargs = dict(_UNPACK_FORMATS[format][2])
+ kwargs = dict(_UNPACK_FORMATS[format][2]) | filter_kwargs
func(filename, extract_dir, **kwargs)
diff --git a/Lib/tarfile.py b/Lib/tarfile.py
index b733195c9c5636..7781a430839ea5 100755
--- a/Lib/tarfile.py
+++ b/Lib/tarfile.py
@@ -46,6 +46,7 @@
import struct
import copy
import re
+import warnings
try:
import pwd
@@ -65,7 +66,11 @@
__all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError", "ReadError",
"CompressionError", "StreamError", "ExtractError", "HeaderError",
"ENCODING", "USTAR_FORMAT", "GNU_FORMAT", "PAX_FORMAT",
- "DEFAULT_FORMAT", "open"]
+ "DEFAULT_FORMAT", "open","fully_trusted_filter", "data_filter",
+ "tar_filter", "FilterError", "AbsoluteLinkError",
+ "OutsideDestinationError", "SpecialFileError", "AbsolutePathError",
+ "LinkOutsideDestinationError"]
+
#---------------------------------------------------------
# tar constants
@@ -154,6 +159,8 @@
def stn(s, length, encoding, errors):
"""Convert a string to a null-terminated bytes object.
"""
+ if s is None:
+ raise ValueError("metadata cannot contain None")
s = s.encode(encoding, errors)
return s[:length] + (length - len(s)) * NUL
@@ -707,9 +714,127 @@ def __init__(self, tarfile, tarinfo):
super().__init__(fileobj)
#class ExFileObject
+
+#-----------------------------
+# extraction filters (PEP 706)
+#-----------------------------
+
+class FilterError(TarError):
+ pass
+
+class AbsolutePathError(FilterError):
+ def __init__(self, tarinfo):
+ self.tarinfo = tarinfo
+ super().__init__(f'member {tarinfo.name!r} has an absolute path')
+
+class OutsideDestinationError(FilterError):
+ def __init__(self, tarinfo, path):
+ self.tarinfo = tarinfo
+ self._path = path
+ super().__init__(f'{tarinfo.name!r} would be extracted to {path!r}, '
+ + 'which is outside the destination')
+
+class SpecialFileError(FilterError):
+ def __init__(self, tarinfo):
+ self.tarinfo = tarinfo
+ super().__init__(f'{tarinfo.name!r} is a special file')
+
+class AbsoluteLinkError(FilterError):
+ def __init__(self, tarinfo):
+ self.tarinfo = tarinfo
+ super().__init__(f'{tarinfo.name!r} is a symlink to an absolute path')
+
+class LinkOutsideDestinationError(FilterError):
+ def __init__(self, tarinfo, path):
+ self.tarinfo = tarinfo
+ self._path = path
+ super().__init__(f'{tarinfo.name!r} would link to {path!r}, '
+ + 'which is outside the destination')
+
+def _get_filtered_attrs(member, dest_path, for_data=True):
+ new_attrs = {}
+ name = member.name
+ dest_path = os.path.realpath(dest_path)
+ # Strip leading / (tar's directory separator) from filenames.
+ # Include os.sep (target OS directory separator) as well.
+ if name.startswith(('/', os.sep)):
+ name = new_attrs['name'] = member.path.lstrip('/' + os.sep)
+ if os.path.isabs(name):
+ # Path is absolute even after stripping.
+ # For example, 'C:/foo' on Windows.
+ raise AbsolutePathError(member)
+ # Ensure we stay in the destination
+ target_path = os.path.realpath(os.path.join(dest_path, name))
+ if os.path.commonpath([target_path, dest_path]) != dest_path:
+ raise OutsideDestinationError(member, target_path)
+ # Limit permissions (no high bits, and go-w)
+ mode = member.mode
+ if mode is not None:
+ # Strip high bits & group/other write bits
+ mode = mode & 0o755
+ if for_data:
+ # For data, handle permissions & file types
+ if member.isreg() or member.islnk():
+ if not mode & 0o100:
+ # Clear executable bits if not executable by user
+ mode &= ~0o111
+ # Ensure owner can read & write
+ mode |= 0o600
+ elif member.isdir() or member.issym():
+ # Ignore mode for directories & symlinks
+ mode = None
+ else:
+ # Reject special files
+ raise SpecialFileError(member)
+ if mode != member.mode:
+ new_attrs['mode'] = mode
+ if for_data:
+ # Ignore ownership for 'data'
+ if member.uid is not None:
+ new_attrs['uid'] = None
+ if member.gid is not None:
+ new_attrs['gid'] = None
+ if member.uname is not None:
+ new_attrs['uname'] = None
+ if member.gname is not None:
+ new_attrs['gname'] = None
+ # Check link destination for 'data'
+ if member.islnk() or member.issym():
+ if os.path.isabs(member.linkname):
+ raise AbsoluteLinkError(member)
+ target_path = os.path.realpath(os.path.join(dest_path, member.linkname))
+ if os.path.commonpath([target_path, dest_path]) != dest_path:
+ raise LinkOutsideDestinationError(member, target_path)
+ return new_attrs
+
+def fully_trusted_filter(member, dest_path):
+ return member
+
+def tar_filter(member, dest_path):
+ new_attrs = _get_filtered_attrs(member, dest_path, False)
+ if new_attrs:
+ return member.replace(**new_attrs, deep=False)
+ return member
+
+def data_filter(member, dest_path):
+ new_attrs = _get_filtered_attrs(member, dest_path, True)
+ if new_attrs:
+ return member.replace(**new_attrs, deep=False)
+ return member
+
+_NAMED_FILTERS = {
+ "fully_trusted": fully_trusted_filter,
+ "tar": tar_filter,
+ "data": data_filter,
+}
+
#------------------
# Exported Classes
#------------------
+
+# Sentinel for replace() defaults, meaning "don't change the attribute"
+_KEEP = object()
+
class TarInfo(object):
"""Informational class which holds the details about an
archive member given by a tar header block.
@@ -790,12 +915,44 @@ def linkpath(self, linkname):
def __repr__(self):
return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self))
+ def replace(self, *,
+ name=_KEEP, mtime=_KEEP, mode=_KEEP, linkname=_KEEP,
+ uid=_KEEP, gid=_KEEP, uname=_KEEP, gname=_KEEP,
+ deep=True, _KEEP=_KEEP):
+ """Return a deep copy of self with the given attributes replaced.
+ """
+ if deep:
+ result = copy.deepcopy(self)
+ else:
+ result = copy.copy(self)
+ if name is not _KEEP:
+ result.name = name
+ if mtime is not _KEEP:
+ result.mtime = mtime
+ if mode is not _KEEP:
+ result.mode = mode
+ if linkname is not _KEEP:
+ result.linkname = linkname
+ if uid is not _KEEP:
+ result.uid = uid
+ if gid is not _KEEP:
+ result.gid = gid
+ if uname is not _KEEP:
+ result.uname = uname
+ if gname is not _KEEP:
+ result.gname = gname
+ return result
+
def get_info(self):
"""Return the TarInfo's attributes as a dictionary.
"""
+ if self.mode is None:
+ mode = None
+ else:
+ mode = self.mode & 0o7777
info = {
"name": self.name,
- "mode": self.mode & 0o7777,
+ "mode": mode,
"uid": self.uid,
"gid": self.gid,
"size": self.size,
@@ -818,6 +975,9 @@ def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescap
"""Return a tar header as a string of 512 byte blocks.
"""
info = self.get_info()
+ for name, value in info.items():
+ if value is None:
+ raise ValueError("%s may not be None" % name)
if format == USTAR_FORMAT:
return self.create_ustar_header(info, encoding, errors)
@@ -948,6 +1108,12 @@ def _create_header(info, format, encoding, errors):
devmajor = stn("", 8, encoding, errors)
devminor = stn("", 8, encoding, errors)
+ # None values in metadata should cause ValueError.
+ # itn()/stn() do this for all fields except type.
+ filetype = info.get("type", REGTYPE)
+ if filetype is None:
+ raise ValueError("TarInfo.type must not be None")
+
parts = [
stn(info.get("name", ""), 100, encoding, errors),
itn(info.get("mode", 0) & 0o7777, 8, format),
@@ -956,7 +1122,7 @@ def _create_header(info, format, encoding, errors):
itn(info.get("size", 0), 12, format),
itn(info.get("mtime", 0), 12, format),
b" ", # checksum field
- info.get("type", REGTYPE),
+ filetype,
stn(info.get("linkname", ""), 100, encoding, errors),
info.get("magic", POSIX_MAGIC),
stn(info.get("uname", ""), 32, encoding, errors),
@@ -1462,6 +1628,8 @@ class TarFile(object):
fileobject = ExFileObject # The file-object for extractfile().
+ extraction_filter = None # The default filter for extraction.
+
def __init__(self, name=None, mode="r", fileobj=None, format=None,
tarinfo=None, dereference=None, ignore_zeros=None, encoding=None,
errors="surrogateescape", pax_headers=None, debug=None,
@@ -1936,7 +2104,10 @@ def list(self, verbose=True, *, members=None):
members = self
for tarinfo in members:
if verbose:
- _safe_print(stat.filemode(tarinfo.mode))
+ if tarinfo.mode is None:
+ _safe_print("??????????")
+ else:
+ _safe_print(stat.filemode(tarinfo.mode))
_safe_print("%s/%s" % (tarinfo.uname or tarinfo.uid,
tarinfo.gname or tarinfo.gid))
if tarinfo.ischr() or tarinfo.isblk():
@@ -1944,8 +2115,11 @@ def list(self, verbose=True, *, members=None):
("%d,%d" % (tarinfo.devmajor, tarinfo.devminor)))
else:
_safe_print("%10d" % tarinfo.size)
- _safe_print("%d-%02d-%02d %02d:%02d:%02d" \
- % time.localtime(tarinfo.mtime)[:6])
+ if tarinfo.mtime is None:
+ _safe_print("????-??-?? ??:??:??")
+ else:
+ _safe_print("%d-%02d-%02d %02d:%02d:%02d" \
+ % time.localtime(tarinfo.mtime)[:6])
_safe_print(tarinfo.name + ("/" if tarinfo.isdir() else ""))
@@ -2032,32 +2206,63 @@ def addfile(self, tarinfo, fileobj=None):
self.members.append(tarinfo)
- def extractall(self, path=".", members=None, *, numeric_owner=False):
+ def _get_filter_function(self, filter):
+ if filter is None:
+ filter = self.extraction_filter
+ if filter is None:
+ warnings.warn(
+ 'Python 3.14 will, by default, filter extracted tar '
+ + 'archives and reject files or modify their metadata. '
+ + 'Use the filter argument to control this behavior.',
+ DeprecationWarning)
+ return fully_trusted_filter
+ if isinstance(filter, str):
+ raise TypeError(
+ 'String names are not supported for '
+ + 'TarFile.extraction_filter. Use a function such as '
+ + 'tarfile.data_filter directly.')
+ return filter
+ if callable(filter):
+ return filter
+ try:
+ return _NAMED_FILTERS[filter]
+ except KeyError:
+ raise ValueError(f"filter {filter!r} not found") from None
+
+ def extractall(self, path=".", members=None, *, numeric_owner=False,
+ filter=None):
"""Extract all members from the archive to the current working
directory and set owner, modification time and permissions on
directories afterwards. `path' specifies a different directory
to extract to. `members' is optional and must be a subset of the
list returned by getmembers(). If `numeric_owner` is True, only
the numbers for user/group names are used and not the names.
+
+ The `filter` function will be called on each member just
+ before extraction.
+ It can return a changed TarInfo or None to skip the member.
+ String names of common filters are accepted.
"""
directories = []
+ filter_function = self._get_filter_function(filter)
if members is None:
members = self
- for tarinfo in members:
+ for member in members:
+ tarinfo = self._get_extract_tarinfo(member, filter_function, path)
+ if tarinfo is None:
+ continue
if tarinfo.isdir():
- # Extract directories with a safe mode.
+ # For directories, delay setting attributes until later,
+ # since permissions can interfere with extraction and
+ # extracting contents can reset mtime.
directories.append(tarinfo)
- tarinfo = copy.copy(tarinfo)
- tarinfo.mode = 0o700
- # Do not set_attrs directories, as we will do that further down
- self.extract(tarinfo, path, set_attrs=not tarinfo.isdir(),
- numeric_owner=numeric_owner)
+ self._extract_one(tarinfo, path, set_attrs=not tarinfo.isdir(),
+ numeric_owner=numeric_owner)
# Reverse sort directories.
- directories.sort(key=lambda a: a.name)
- directories.reverse()
+ directories.sort(key=lambda a: a.name, reverse=True)
# Set correct owner, mtime and filemode on directories.
for tarinfo in directories:
@@ -2067,12 +2272,10 @@ def extractall(self, path=".", members=None, *, numeric_owner=False):
self.utime(tarinfo, dirpath)
self.chmod(tarinfo, dirpath)
except ExtractError as e:
- if self.errorlevel > 1:
- raise
- else:
- self._dbg(1, "tarfile: %s" % e)
+ self._handle_nonfatal_error(e)
- def extract(self, member, path="", set_attrs=True, *, numeric_owner=False):
+ def extract(self, member, path="", set_attrs=True, *, numeric_owner=False,
+ filter=None):
"""Extract a member from the archive to the current working directory,
using its full name. Its file information is extracted as accurately
as possible. `member' may be a filename or a TarInfo object. You can
@@ -2080,35 +2283,70 @@ def extract(self, member, path="", set_attrs=True, *, numeric_owner=False):
mtime, mode) are set unless `set_attrs' is False. If `numeric_owner`
is True, only the numbers for user/group names are used and not
the names.
+
+ The `filter` function will be called before extraction.
+ It can return a changed TarInfo or None to skip the member.
+ String names of common filters are accepted.
"""
- self._check("r")
+ filter_function = self._get_filter_function(filter)
+ tarinfo = self._get_extract_tarinfo(member, filter_function, path)
+ if tarinfo is not None:
+ self._extract_one(tarinfo, path, set_attrs, numeric_owner)
+ def _get_extract_tarinfo(self, member, filter_function, path):
+ """Get filtered TarInfo (or None) from member, which might be a str"""
if isinstance(member, str):
tarinfo = self.getmember(member)
else:
tarinfo = member
+ unfiltered = tarinfo
+ try:
+ tarinfo = filter_function(tarinfo, path)
+ except (OSError, FilterError) as e:
+ self._handle_fatal_error(e)
+ except ExtractError as e:
+ self._handle_nonfatal_error(e)
+ if tarinfo is None:
+ self._dbg(2, "tarfile: Excluded %r" % unfiltered.name)
+ return None
# Prepare the link target for makelink().
if tarinfo.islnk():
+ tarinfo = copy.copy(tarinfo)
tarinfo._link_target = os.path.join(path, tarinfo.linkname)
+ return tarinfo
+
+ def _extract_one(self, tarinfo, path, set_attrs, numeric_owner):
+ """Extract from filtered tarinfo to disk"""
+ self._check("r")
try:
self._extract_member(tarinfo, os.path.join(path, tarinfo.name),
set_attrs=set_attrs,
numeric_owner=numeric_owner)
except OSError as e:
- if self.errorlevel > 0:
- raise
- else:
- if e.filename is None:
- self._dbg(1, "tarfile: %s" % e.strerror)
- else:
- self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename))
+ self._handle_fatal_error(e)
except ExtractError as e:
- if self.errorlevel > 1:
- raise
+ self._handle_nonfatal_error(e)
+
+ def _handle_nonfatal_error(self, e):
+ """Handle non-fatal error (ExtractError) according to errorlevel"""
+ if self.errorlevel > 1:
+ raise
+ else:
+ self._dbg(1, "tarfile: %s" % e)
+
+ def _handle_fatal_error(self, e):
+ """Handle "fatal" error according to self.errorlevel"""
+ if self.errorlevel > 0:
+ raise
+ elif isinstance(e, OSError):
+ if e.filename is None:
+ self._dbg(1, "tarfile: %s" % e.strerror)
else:
- self._dbg(1, "tarfile: %s" % e)
+ self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename))
+ else:
+ self._dbg(1, "tarfile: %s %s" % (type(e).__name__, e))
def extractfile(self, member):
"""Extract a member from the archive as a file object. `member' may be
@@ -2195,9 +2433,13 @@ def makedir(self, tarinfo, targetpath):
"""Make a directory called targetpath.
"""
try:
- # Use a safe mode for the directory, the real mode is set
- # later in _extract_member().
- os.mkdir(targetpath, 0o700)
+ if tarinfo.mode is None:
+ # Use the system's default mode
+ os.mkdir(targetpath)
+ else:
+ # Use a safe mode for the directory, the real mode is set
+ # later in _extract_member().
+ os.mkdir(targetpath, 0o700)
except FileExistsError:
pass
@@ -2240,6 +2482,9 @@ def makedev(self, tarinfo, targetpath):
raise ExtractError("special devices not supported by system")
mode = tarinfo.mode
+ if mode is None:
+ # Use mknod's default
+ mode = 0o600
if tarinfo.isblk():
mode |= stat.S_IFBLK
else:
@@ -2261,7 +2506,6 @@ def makelink(self, tarinfo, targetpath):
os.unlink(targetpath)
os.symlink(tarinfo.linkname, targetpath)
else:
- # See extract().
if os.path.exists(tarinfo._link_target):
os.link(tarinfo._link_target, targetpath)
else:
@@ -2286,15 +2530,19 @@ def chown(self, tarinfo, targetpath, numeric_owner):
u = tarinfo.uid
if not numeric_owner:
try:
- if grp:
+ if grp and tarinfo.gname:
g = grp.getgrnam(tarinfo.gname)[2]
except KeyError:
pass
try:
- if pwd:
+ if pwd and tarinfo.uname:
u = pwd.getpwnam(tarinfo.uname)[2]
except KeyError:
pass
+ if g is None:
+ g = -1
+ if u is None:
+ u = -1
try:
if tarinfo.issym() and hasattr(os, "lchown"):
os.lchown(targetpath, u, g)
@@ -2306,6 +2554,8 @@ def chown(self, tarinfo, targetpath, numeric_owner):
def chmod(self, tarinfo, targetpath):
"""Set file permissions of targetpath according to tarinfo.
"""
+ if tarinfo.mode is None:
+ return
try:
os.chmod(targetpath, tarinfo.mode)
except OSError as e:
@@ -2314,10 +2564,13 @@ def chmod(self, tarinfo, targetpath):
def utime(self, tarinfo, targetpath):
"""Set modification time of targetpath according to tarinfo.
"""
+ mtime = tarinfo.mtime
+ if mtime is None:
+ return
if not hasattr(os, 'utime'):
return
try:
- os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime))
+ os.utime(targetpath, (mtime, mtime))
except OSError as e:
raise ExtractError("could not change modification time") from e
@@ -2395,13 +2648,26 @@ def _getmember(self, name, tarinfo=None, normalize=False):
members = self.getmembers()
# Limit the member search list up to tarinfo.
+ skipping = False
if tarinfo is not None:
- members = members[:members.index(tarinfo)]
+ try:
+ index = members.index(tarinfo)
+ except ValueError:
+ # The given starting point might be a (modified) copy.
+ # We'll later skip members until we find an equivalent.
+ skipping = True
+ else:
+ # Happy fast path
+ members = members[:index]
if normalize:
name = os.path.normpath(name)
for member in reversed(members):
+ if skipping:
+ if tarinfo.offset == member.offset:
+ skipping = False
+ continue
if normalize:
member_name = os.path.normpath(member.name)
else:
@@ -2410,6 +2676,10 @@ def _getmember(self, name, tarinfo=None, normalize=False):
if name == member_name:
return member
+ if skipping:
+ # Starting point was not found
+ raise ValueError(tarinfo)
+
def _load(self):
"""Read through the entire archive file and look for readable
members.
@@ -2500,6 +2770,7 @@ def __exit__(self, type, value, traceback):
#--------------------
# exported functions
#--------------------
+
def is_tarfile(name):
"""Return True if name points to a tar archive that we
are able to handle, else return False.
@@ -2528,6 +2799,10 @@ def main():
parser = argparse.ArgumentParser(description=description)
parser.add_argument('-v', '--verbose', action='store_true', default=False,
help='Verbose output')
+ parser.add_argument('--filter', metavar='',
+ choices=_NAMED_FILTERS,
+ help='Filter for extraction')
+
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('-l', '--list', metavar='',
help='Show listing of a tarfile')
@@ -2539,8 +2814,12 @@ def main():
help='Create tarfile from sources')
group.add_argument('-t', '--test', metavar='',
help='Test if a tarfile is valid')
+
args = parser.parse_args()
+ if args.filter and args.extract is None:
+ parser.exit(1, '--filter is only valid for extraction\n')
+
if args.test is not None:
src = args.test
if is_tarfile(src):
@@ -2571,7 +2850,7 @@ def main():
if is_tarfile(src):
with TarFile.open(src, 'r:*') as tf:
- tf.extractall(path=curdir)
+ tf.extractall(path=curdir, filter=args.filter)
if args.verbose:
if curdir == '.':
msg = '{!r} file is extracted.'.format(src)
diff --git a/Lib/test/_test_embed_structseq.py b/Lib/test/_test_embed_structseq.py
index 868f9f83e8be77..834daa4df55fec 100644
--- a/Lib/test/_test_embed_structseq.py
+++ b/Lib/test/_test_embed_structseq.py
@@ -1,27 +1,31 @@
import sys
import types
-import unittest
+# Note: This test file can't import `unittest` since the runtime can't
+# currently guarantee that it will not leak memory. Doing so will mark
+# the test as passing but with reference leaks. This can safely import
+# the `unittest` library once there's a strict guarantee of no leaks
+# during runtime shutdown.
# bpo-46417: Test that structseq types used by the sys module are still
# valid when Py_Finalize()/Py_Initialize() are called multiple times.
-class TestStructSeq(unittest.TestCase):
+class TestStructSeq:
# test PyTypeObject members
- def check_structseq(self, obj_type):
+ def _check_structseq(self, obj_type):
# ob_refcnt
- self.assertGreaterEqual(sys.getrefcount(obj_type), 1)
+ assert sys.getrefcount(obj_type) > 1
# tp_base
- self.assertTrue(issubclass(obj_type, tuple))
+ assert issubclass(obj_type, tuple)
# tp_bases
- self.assertEqual(obj_type.__bases__, (tuple,))
+ assert obj_type.__bases__ == (tuple,)
# tp_dict
- self.assertIsInstance(obj_type.__dict__, types.MappingProxyType)
+ assert isinstance(obj_type.__dict__, types.MappingProxyType)
# tp_mro
- self.assertEqual(obj_type.__mro__, (obj_type, tuple, object))
+ assert obj_type.__mro__ == (obj_type, tuple, object)
# tp_name
- self.assertIsInstance(type.__name__, str)
+ assert isinstance(type.__name__, str)
# tp_subclasses
- self.assertEqual(obj_type.__subclasses__(), [])
+ assert obj_type.__subclasses__() == []
def test_sys_attrs(self):
for attr_name in (
@@ -32,23 +36,23 @@ def test_sys_attrs(self):
'thread_info', # ThreadInfoType
'version_info', # VersionInfoType
):
- with self.subTest(attr=attr_name):
- attr = getattr(sys, attr_name)
- self.check_structseq(type(attr))
+ attr = getattr(sys, attr_name)
+ self._check_structseq(type(attr))
def test_sys_funcs(self):
func_names = ['get_asyncgen_hooks'] # AsyncGenHooksType
if hasattr(sys, 'getwindowsversion'):
func_names.append('getwindowsversion') # WindowsVersionType
for func_name in func_names:
- with self.subTest(func=func_name):
- func = getattr(sys, func_name)
- obj = func()
- self.check_structseq(type(obj))
+ func = getattr(sys, func_name)
+ obj = func()
+ self._check_structseq(type(obj))
try:
- unittest.main()
+ tests = TestStructSeq()
+ tests.test_sys_attrs()
+ tests.test_sys_funcs()
except SystemExit as exc:
if exc.args[0] != 0:
raise
diff --git a/Lib/test/datetimetester.py b/Lib/test/datetimetester.py
index 570f803918c1ef..477f16f1841f62 100644
--- a/Lib/test/datetimetester.py
+++ b/Lib/test/datetimetester.py
@@ -6212,6 +6212,10 @@ def test_system_transitions(self):
ts1 = dt.replace(fold=1).timestamp()
self.assertEqual(ts0, s0 + ss / 2)
self.assertEqual(ts1, s0 - ss / 2)
+ # gh-83861
+ utc0 = dt.astimezone(timezone.utc)
+ utc1 = dt.replace(fold=1).astimezone(timezone.utc)
+ self.assertEqual(utc0, utc1 + timedelta(0, ss))
finally:
if TZ is None:
del os.environ['TZ']
diff --git a/Lib/test/libregrtest/refleak.py b/Lib/test/libregrtest/refleak.py
index 4298fa806e1065..2de8c6cfbc61a1 100644
--- a/Lib/test/libregrtest/refleak.py
+++ b/Lib/test/libregrtest/refleak.py
@@ -73,9 +73,10 @@ def get_pooled_int(value):
fd_deltas = [0] * repcount
getallocatedblocks = sys.getallocatedblocks
gettotalrefcount = sys.gettotalrefcount
+ getunicodeinternedsize = sys.getunicodeinternedsize
fd_count = os_helper.fd_count
# initialize variables to make pyflakes quiet
- rc_before = alloc_before = fd_before = 0
+ rc_before = alloc_before = fd_before = interned_before = 0
if not ns.quiet:
print("beginning", repcount, "repetitions", file=sys.stderr)
@@ -91,9 +92,13 @@ def get_pooled_int(value):
dash_R_cleanup(fs, ps, pic, zdc, abcs)
support.gc_collect()
- # Read memory statistics immediately after the garbage collection
- alloc_after = getallocatedblocks()
- rc_after = gettotalrefcount()
+ # Read memory statistics immediately after the garbage collection.
+ # Also, readjust the reference counts and alloc blocks by ignoring
+ # any strings that might have been interned during test_func. These
+ # strings will be deallocated at runtime shutdown
+ interned_after = getunicodeinternedsize()
+ alloc_after = getallocatedblocks() - interned_after
+ rc_after = gettotalrefcount() - interned_after * 2
fd_after = fd_count()
if not ns.quiet:
@@ -106,6 +111,7 @@ def get_pooled_int(value):
alloc_before = alloc_after
rc_before = rc_after
fd_before = fd_after
+ interned_before = interned_after
if not ns.quiet:
print(file=sys.stderr)
diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py
index 6c932e1305e1dd..8eef7baec70118 100644
--- a/Lib/test/test_ast.py
+++ b/Lib/test/test_ast.py
@@ -774,11 +774,6 @@ def test_parenthesized_with_feature_version(self):
ast.parse('with (CtxManager() as example): ...', feature_version=(3, 8))
ast.parse('with CtxManager() as example: ...', feature_version=(3, 8))
- def test_debug_f_string_feature_version(self):
- ast.parse('f"{x=}"', feature_version=(3, 8))
- with self.assertRaises(SyntaxError):
- ast.parse('f"{x=}"', feature_version=(3, 7))
-
def test_assignment_expression_feature_version(self):
ast.parse('(x := 0)', feature_version=(3, 8))
with self.assertRaises(SyntaxError):
@@ -2298,6 +2293,17 @@ class C:
cdef = ast.parse(s).body[0]
self.assertEqual(ast.get_source_segment(s, cdef.body[0], padded=True), s_method)
+ def test_source_segment_newlines(self):
+ s = 'def f():\n pass\ndef g():\r pass\r\ndef h():\r\n pass\r\n'
+ f, g, h = ast.parse(s).body
+ self._check_content(s, f, 'def f():\n pass')
+ self._check_content(s, g, 'def g():\r pass')
+ self._check_content(s, h, 'def h():\r\n pass')
+
+ s = 'def f():\n a = 1\r b = 2\r\n c = 3\n'
+ f = ast.parse(s).body[0]
+ self._check_content(s, f, s.rstrip())
+
def test_source_segment_missing_info(self):
s = 'v = 1\r\nw = 1\nx = 1\n\ry = 1\r\n'
v, w, x, y = ast.parse(s).body
diff --git a/Lib/test/test_asyncio/test_unix_events.py b/Lib/test/test_asyncio/test_unix_events.py
index 96999470a7c69a..cdf3eaac68af15 100644
--- a/Lib/test/test_asyncio/test_unix_events.py
+++ b/Lib/test/test_asyncio/test_unix_events.py
@@ -1712,11 +1712,11 @@ class PolicyTests(unittest.TestCase):
def create_policy(self):
return asyncio.DefaultEventLoopPolicy()
- def test_get_default_child_watcher(self):
+ @mock.patch('asyncio.unix_events.can_use_pidfd')
+ def test_get_default_child_watcher(self, m_can_use_pidfd):
+ m_can_use_pidfd.return_value = False
policy = self.create_policy()
self.assertIsNone(policy._watcher)
- unix_events.can_use_pidfd = mock.Mock()
- unix_events.can_use_pidfd.return_value = False
with self.assertWarns(DeprecationWarning):
watcher = policy.get_child_watcher()
self.assertIsInstance(watcher, asyncio.ThreadedChildWatcher)
@@ -1725,10 +1725,9 @@ def test_get_default_child_watcher(self):
with self.assertWarns(DeprecationWarning):
self.assertIs(watcher, policy.get_child_watcher())
+ m_can_use_pidfd.return_value = True
policy = self.create_policy()
self.assertIsNone(policy._watcher)
- unix_events.can_use_pidfd = mock.Mock()
- unix_events.can_use_pidfd.return_value = True
with self.assertWarns(DeprecationWarning):
watcher = policy.get_child_watcher()
self.assertIsInstance(watcher, asyncio.PidfdChildWatcher)
diff --git a/Lib/test/test_bdb.py b/Lib/test/test_bdb.py
index fc4b8094316332..568c88e326c087 100644
--- a/Lib/test/test_bdb.py
+++ b/Lib/test/test_bdb.py
@@ -1207,7 +1207,8 @@ def main():
class TestRegressions(unittest.TestCase):
def test_format_stack_entry_no_lineno(self):
# See gh-101517
- Bdb().format_stack_entry((sys._getframe(), None))
+ self.assertIn('Warning: lineno is None',
+ Bdb().format_stack_entry((sys._getframe(), None)))
if __name__ == "__main__":
diff --git a/Lib/test/test_buffer.py b/Lib/test/test_buffer.py
index 8ac3b7e7eb29d1..098d2d999643cb 100644
--- a/Lib/test/test_buffer.py
+++ b/Lib/test/test_buffer.py
@@ -965,8 +965,10 @@ def check_memoryview(m, expected_readonly=readonly):
self.assertEqual(m.strides, tuple(strides))
self.assertEqual(m.suboffsets, tuple(suboffsets))
- n = 1 if ndim == 0 else len(lst)
- self.assertEqual(len(m), n)
+ if ndim == 0:
+ self.assertRaises(TypeError, len, m)
+ else:
+ self.assertEqual(len(m), len(lst))
rep = result.tolist() if fmt else result.tobytes()
self.assertEqual(rep, lst)
diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py
index e7a79bc13b7f3d..04dd8ff3070c99 100644
--- a/Lib/test/test_builtin.py
+++ b/Lib/test/test_builtin.py
@@ -28,7 +28,7 @@
from types import AsyncGeneratorType, FunctionType, CellType
from operator import neg
from test import support
-from test.support import (swap_attr, maybe_get_event_loop_policy)
+from test.support import (cpython_only, swap_attr, maybe_get_event_loop_policy)
from test.support.os_helper import (EnvironmentVarGuard, TESTFN, unlink)
from test.support.script_helper import assert_python_ok
from test.support.warnings_helper import check_warnings
@@ -2370,6 +2370,28 @@ def __del__(self):
self.assertEqual(["before", "after"], out.decode().splitlines())
+@cpython_only
+class ImmortalTests(unittest.TestCase):
+ def test_immortal(self):
+ none_refcount = sys.getrefcount(None)
+ true_refcount = sys.getrefcount(True)
+ false_refcount = sys.getrefcount(False)
+ smallint_refcount = sys.getrefcount(100)
+
+ # Assert that all of these immortal instances have large ref counts.
+ self.assertGreater(none_refcount, 2 ** 15)
+ self.assertGreater(true_refcount, 2 ** 15)
+ self.assertGreater(false_refcount, 2 ** 15)
+ self.assertGreater(smallint_refcount, 2 ** 15)
+
+ # Confirm that the refcount doesn't change even with a new ref to them.
+ l = [None, True, False, 100]
+ self.assertEqual(sys.getrefcount(None), none_refcount)
+ self.assertEqual(sys.getrefcount(True), true_refcount)
+ self.assertEqual(sys.getrefcount(False), false_refcount)
+ self.assertEqual(sys.getrefcount(100), smallint_refcount)
+
+
class TestType(unittest.TestCase):
def test_new_type(self):
A = type('A', (), {})
diff --git a/Lib/test/test_cmd_line_script.py b/Lib/test/test_cmd_line_script.py
index f10d72ea5547ee..d98e23855e0c19 100644
--- a/Lib/test/test_cmd_line_script.py
+++ b/Lib/test/test_cmd_line_script.py
@@ -636,9 +636,9 @@ def test_syntaxerror_multi_line_fstring(self):
self.assertEqual(
stderr.splitlines()[-3:],
[
- b' foo"""',
- b' ^',
- b'SyntaxError: f-string: empty expression not allowed',
+ b' foo = f"""{}',
+ b' ^',
+ b'SyntaxError: f-string: valid expression required before \'}\'',
],
)
diff --git a/Lib/test/test_ctypes/test_pep3118.py b/Lib/test/test_ctypes/test_pep3118.py
index c8a70e3e335693..038161745df905 100644
--- a/Lib/test/test_ctypes/test_pep3118.py
+++ b/Lib/test/test_ctypes/test_pep3118.py
@@ -28,7 +28,7 @@ def test_native_types(self):
if shape:
self.assertEqual(len(v), shape[0])
else:
- self.assertEqual(len(v) * sizeof(itemtp), sizeof(ob))
+ self.assertRaises(TypeError, len, v)
self.assertEqual(v.itemsize, sizeof(itemtp))
self.assertEqual(v.shape, shape)
# XXX Issue #12851: PyCData_NewGetBuffer() must provide strides
@@ -39,11 +39,10 @@ def test_native_types(self):
# they are always read/write
self.assertFalse(v.readonly)
- if v.shape:
- n = 1
- for dim in v.shape:
- n = n * dim
- self.assertEqual(n * v.itemsize, len(v.tobytes()))
+ n = 1
+ for dim in v.shape:
+ n = n * dim
+ self.assertEqual(n * v.itemsize, len(v.tobytes()))
except:
# so that we can see the failing type
print(tp)
@@ -58,7 +57,7 @@ def test_endian_types(self):
if shape:
self.assertEqual(len(v), shape[0])
else:
- self.assertEqual(len(v) * sizeof(itemtp), sizeof(ob))
+ self.assertRaises(TypeError, len, v)
self.assertEqual(v.itemsize, sizeof(itemtp))
self.assertEqual(v.shape, shape)
# XXX Issue #12851
@@ -67,11 +66,10 @@ def test_endian_types(self):
# they are always read/write
self.assertFalse(v.readonly)
- if v.shape:
- n = 1
- for dim in v.shape:
- n = n * dim
- self.assertEqual(n, len(v))
+ n = 1
+ for dim in v.shape:
+ n = n * dim
+ self.assertEqual(n * v.itemsize, len(v.tobytes()))
except:
# so that we can see the failing type
print(tp)
@@ -243,7 +241,7 @@ class LEPoint(LittleEndianStructure):
#
endian_types = [
(BEPoint, "T{>l:x:>l:y:}".replace('l', s_long), (), BEPoint),
- (LEPoint, "T{l:x:>l:y:}".replace('l', s_long), (), POINTER(BEPoint)),
(POINTER(LEPoint), "&T{)")
+ def test_docstring_with_no_signature(self):
+ # See https://github.com/python/cpython/issues/103449
+ class Meta(type):
+ __call__ = dict
+ class Base(metaclass=Meta):
+ pass
+
+ @dataclass
+ class C(Base):
+ pass
+
+ self.assertDocStrEqual(C.__doc__, "C")
+
class TestInit(unittest.TestCase):
def test_base_has_init(self):
diff --git a/Lib/test/test_ensurepip.py b/Lib/test/test_ensurepip.py
index bfca0cd7fbe483..69ab2a4feaa938 100644
--- a/Lib/test/test_ensurepip.py
+++ b/Lib/test/test_ensurepip.py
@@ -20,7 +20,6 @@ def test_version(self):
# Test version()
with tempfile.TemporaryDirectory() as tmpdir:
self.touch(tmpdir, "pip-1.2.3b1-py2.py3-none-any.whl")
- self.touch(tmpdir, "setuptools-49.1.3-py3-none-any.whl")
with (unittest.mock.patch.object(ensurepip, '_PACKAGES', None),
unittest.mock.patch.object(ensurepip, '_WHEEL_PKG_DIR', tmpdir)):
self.assertEqual(ensurepip.version(), '1.2.3b1')
@@ -36,15 +35,12 @@ def test_get_packages_no_dir(self):
# use bundled wheel packages
self.assertIsNotNone(packages['pip'].wheel_name)
- self.assertIsNotNone(packages['setuptools'].wheel_name)
def test_get_packages_with_dir(self):
# Test _get_packages() with a wheel package directory
- setuptools_filename = "setuptools-49.1.3-py3-none-any.whl"
pip_filename = "pip-20.2.2-py2.py3-none-any.whl"
with tempfile.TemporaryDirectory() as tmpdir:
- self.touch(tmpdir, setuptools_filename)
self.touch(tmpdir, pip_filename)
# not used, make sure that it's ignored
self.touch(tmpdir, "wheel-0.34.2-py2.py3-none-any.whl")
@@ -53,15 +49,12 @@ def test_get_packages_with_dir(self):
unittest.mock.patch.object(ensurepip, '_WHEEL_PKG_DIR', tmpdir)):
packages = ensurepip._get_packages()
- self.assertEqual(packages['setuptools'].version, '49.1.3')
- self.assertEqual(packages['setuptools'].wheel_path,
- os.path.join(tmpdir, setuptools_filename))
self.assertEqual(packages['pip'].version, '20.2.2')
self.assertEqual(packages['pip'].wheel_path,
os.path.join(tmpdir, pip_filename))
# wheel package is ignored
- self.assertEqual(sorted(packages), ['pip', 'setuptools'])
+ self.assertEqual(sorted(packages), ['pip'])
class EnsurepipMixin:
@@ -92,13 +85,13 @@ def test_basic_bootstrapping(self):
self.run_pip.assert_called_once_with(
[
"install", "--no-cache-dir", "--no-index", "--find-links",
- unittest.mock.ANY, "setuptools", "pip",
+ unittest.mock.ANY, "pip",
],
unittest.mock.ANY,
)
additional_paths = self.run_pip.call_args[0][1]
- self.assertEqual(len(additional_paths), 2)
+ self.assertEqual(len(additional_paths), 1)
def test_bootstrapping_with_root(self):
ensurepip.bootstrap(root="/foo/bar/")
@@ -107,7 +100,7 @@ def test_bootstrapping_with_root(self):
[
"install", "--no-cache-dir", "--no-index", "--find-links",
unittest.mock.ANY, "--root", "/foo/bar/",
- "setuptools", "pip",
+ "pip",
],
unittest.mock.ANY,
)
@@ -118,7 +111,7 @@ def test_bootstrapping_with_user(self):
self.run_pip.assert_called_once_with(
[
"install", "--no-cache-dir", "--no-index", "--find-links",
- unittest.mock.ANY, "--user", "setuptools", "pip",
+ unittest.mock.ANY, "--user", "pip",
],
unittest.mock.ANY,
)
@@ -129,7 +122,7 @@ def test_bootstrapping_with_upgrade(self):
self.run_pip.assert_called_once_with(
[
"install", "--no-cache-dir", "--no-index", "--find-links",
- unittest.mock.ANY, "--upgrade", "setuptools", "pip",
+ unittest.mock.ANY, "--upgrade", "pip",
],
unittest.mock.ANY,
)
@@ -140,7 +133,7 @@ def test_bootstrapping_with_verbosity_1(self):
self.run_pip.assert_called_once_with(
[
"install", "--no-cache-dir", "--no-index", "--find-links",
- unittest.mock.ANY, "-v", "setuptools", "pip",
+ unittest.mock.ANY, "-v", "pip",
],
unittest.mock.ANY,
)
@@ -151,7 +144,7 @@ def test_bootstrapping_with_verbosity_2(self):
self.run_pip.assert_called_once_with(
[
"install", "--no-cache-dir", "--no-index", "--find-links",
- unittest.mock.ANY, "-vv", "setuptools", "pip",
+ unittest.mock.ANY, "-vv", "pip",
],
unittest.mock.ANY,
)
@@ -162,7 +155,7 @@ def test_bootstrapping_with_verbosity_3(self):
self.run_pip.assert_called_once_with(
[
"install", "--no-cache-dir", "--no-index", "--find-links",
- unittest.mock.ANY, "-vvv", "setuptools", "pip",
+ unittest.mock.ANY, "-vvv", "pip",
],
unittest.mock.ANY,
)
@@ -239,7 +232,6 @@ def test_uninstall(self):
self.run_pip.assert_called_once_with(
[
"uninstall", "-y", "--disable-pip-version-check", "pip",
- "setuptools",
]
)
@@ -250,7 +242,6 @@ def test_uninstall_with_verbosity_1(self):
self.run_pip.assert_called_once_with(
[
"uninstall", "-y", "--disable-pip-version-check", "-v", "pip",
- "setuptools",
]
)
@@ -261,7 +252,6 @@ def test_uninstall_with_verbosity_2(self):
self.run_pip.assert_called_once_with(
[
"uninstall", "-y", "--disable-pip-version-check", "-vv", "pip",
- "setuptools",
]
)
@@ -272,7 +262,7 @@ def test_uninstall_with_verbosity_3(self):
self.run_pip.assert_called_once_with(
[
"uninstall", "-y", "--disable-pip-version-check", "-vvv",
- "pip", "setuptools",
+ "pip"
]
)
@@ -312,13 +302,13 @@ def test_basic_bootstrapping(self):
self.run_pip.assert_called_once_with(
[
"install", "--no-cache-dir", "--no-index", "--find-links",
- unittest.mock.ANY, "setuptools", "pip",
+ unittest.mock.ANY, "pip",
],
unittest.mock.ANY,
)
additional_paths = self.run_pip.call_args[0][1]
- self.assertEqual(len(additional_paths), 2)
+ self.assertEqual(len(additional_paths), 1)
self.assertEqual(exit_code, 0)
def test_bootstrapping_error_code(self):
@@ -344,7 +334,6 @@ def test_basic_uninstall(self):
self.run_pip.assert_called_once_with(
[
"uninstall", "-y", "--disable-pip-version-check", "pip",
- "setuptools",
]
)
diff --git a/Lib/test/test_enum.py b/Lib/test/test_enum.py
index e9dfcf8586a823..fb7a016c9007f8 100644
--- a/Lib/test/test_enum.py
+++ b/Lib/test/test_enum.py
@@ -819,10 +819,27 @@ class TestPlainFlag(_EnumTests, _PlainOutputTests, _FlagTests, unittest.TestCase
class TestIntEnum(_EnumTests, _MinimalOutputTests, unittest.TestCase):
enum_type = IntEnum
+ #
+ def test_shadowed_attr(self):
+ class Number(IntEnum):
+ divisor = 1
+ numerator = 2
+ #
+ self.assertEqual(Number.divisor.numerator, 1)
+ self.assertIs(Number.numerator.divisor, Number.divisor)
class TestStrEnum(_EnumTests, _MinimalOutputTests, unittest.TestCase):
enum_type = StrEnum
+ #
+ def test_shadowed_attr(self):
+ class Book(StrEnum):
+ author = 'author'
+ title = 'title'
+ #
+ self.assertEqual(Book.author.title(), 'Author')
+ self.assertEqual(Book.title.title(), 'Title')
+ self.assertIs(Book.title.author, Book.author)
class TestIntFlag(_EnumTests, _MinimalOutputTests, _FlagTests, unittest.TestCase):
diff --git a/Lib/test/test_eof.py b/Lib/test/test_eof.py
index abcbf046e2cc22..be4fd73bfdc36b 100644
--- a/Lib/test/test_eof.py
+++ b/Lib/test/test_eof.py
@@ -4,6 +4,7 @@
from test import support
from test.support import os_helper
from test.support import script_helper
+from test.support import warnings_helper
import unittest
class EOFTestCase(unittest.TestCase):
@@ -36,10 +37,11 @@ def test_EOFS_with_file(self):
rc, out, err = script_helper.assert_python_failure(file_name)
self.assertIn(b'unterminated triple-quoted string literal (detected at line 3)', err)
+ @warnings_helper.ignore_warnings(category=SyntaxWarning)
def test_eof_with_line_continuation(self):
expect = "unexpected EOF while parsing (, line 1)"
try:
- compile('"\\xhh" \\', '', 'exec', dont_inherit=True)
+ compile('"\\Xhh" \\', '', 'exec')
except SyntaxError as msg:
self.assertEqual(str(msg), expect)
else:
diff --git a/Lib/test/test_exceptions.py b/Lib/test/test_exceptions.py
index 684e888f08c778..4ef7decfbc263e 100644
--- a/Lib/test/test_exceptions.py
+++ b/Lib/test/test_exceptions.py
@@ -155,6 +155,7 @@ def ckmsg(src, msg):
ckmsg(s, "'continue' not properly in loop")
ckmsg("continue\n", "'continue' not properly in loop")
+ ckmsg("f'{6 0}'", "invalid syntax. Perhaps you forgot a comma?")
def testSyntaxErrorMissingParens(self):
def ckmsg(src, msg, exception=SyntaxError):
@@ -227,7 +228,7 @@ def testSyntaxErrorOffset(self):
check('Python = "\u1e54\xfd\u0163\u0125\xf2\xf1" +', 1, 20)
check(b'# -*- coding: cp1251 -*-\nPython = "\xcf\xb3\xf2\xee\xed" +',
2, 19, encoding='cp1251')
- check(b'Python = "\xcf\xb3\xf2\xee\xed" +', 1, 18)
+ check(b'Python = "\xcf\xb3\xf2\xee\xed" +', 1, 10)
check('x = "a', 1, 5)
check('lambda x: x = 2', 1, 1)
check('f{a + b + c}', 1, 2)
diff --git a/Lib/test/test_fstring.py b/Lib/test/test_fstring.py
index b3f6ef41d77b8f..9d5e16628f04b6 100644
--- a/Lib/test/test_fstring.py
+++ b/Lib/test/test_fstring.py
@@ -13,6 +13,7 @@
import types
import decimal
import unittest
+from test import support
from test.support.os_helper import temp_cwd
from test.support.script_helper import assert_python_failure
@@ -329,13 +330,13 @@ def test_ast_line_numbers_multiline_fstring(self):
self.assertEqual(t.body[1].lineno, 3)
self.assertEqual(t.body[1].value.lineno, 3)
self.assertEqual(t.body[1].value.values[0].lineno, 3)
- self.assertEqual(t.body[1].value.values[1].lineno, 3)
- self.assertEqual(t.body[1].value.values[2].lineno, 3)
+ self.assertEqual(t.body[1].value.values[1].lineno, 4)
+ self.assertEqual(t.body[1].value.values[2].lineno, 6)
self.assertEqual(t.body[1].col_offset, 0)
self.assertEqual(t.body[1].value.col_offset, 0)
- self.assertEqual(t.body[1].value.values[0].col_offset, 0)
- self.assertEqual(t.body[1].value.values[1].col_offset, 0)
- self.assertEqual(t.body[1].value.values[2].col_offset, 0)
+ self.assertEqual(t.body[1].value.values[0].col_offset, 4)
+ self.assertEqual(t.body[1].value.values[1].col_offset, 2)
+ self.assertEqual(t.body[1].value.values[2].col_offset, 11)
# NOTE: the following lineno information and col_offset is correct for
# expressions within FormattedValues.
binop = t.body[1].value.values[1].value
@@ -366,13 +367,13 @@ def test_ast_line_numbers_multiline_fstring(self):
self.assertEqual(t.body[0].lineno, 2)
self.assertEqual(t.body[0].value.lineno, 2)
self.assertEqual(t.body[0].value.values[0].lineno, 2)
- self.assertEqual(t.body[0].value.values[1].lineno, 2)
- self.assertEqual(t.body[0].value.values[2].lineno, 2)
+ self.assertEqual(t.body[0].value.values[1].lineno, 3)
+ self.assertEqual(t.body[0].value.values[2].lineno, 3)
self.assertEqual(t.body[0].col_offset, 0)
self.assertEqual(t.body[0].value.col_offset, 4)
- self.assertEqual(t.body[0].value.values[0].col_offset, 4)
- self.assertEqual(t.body[0].value.values[1].col_offset, 4)
- self.assertEqual(t.body[0].value.values[2].col_offset, 4)
+ self.assertEqual(t.body[0].value.values[0].col_offset, 8)
+ self.assertEqual(t.body[0].value.values[1].col_offset, 10)
+ self.assertEqual(t.body[0].value.values[2].col_offset, 17)
# Check {blech}
self.assertEqual(t.body[0].value.values[1].value.lineno, 3)
self.assertEqual(t.body[0].value.values[1].value.end_lineno, 3)
@@ -387,6 +388,20 @@ def test_ast_line_numbers_with_parentheses(self):
t = ast.parse(expr)
self.assertEqual(type(t), ast.Module)
self.assertEqual(len(t.body), 1)
+ # check the joinedstr location
+ joinedstr = t.body[0].value
+ self.assertEqual(type(joinedstr), ast.JoinedStr)
+ self.assertEqual(joinedstr.lineno, 3)
+ self.assertEqual(joinedstr.end_lineno, 3)
+ self.assertEqual(joinedstr.col_offset, 4)
+ self.assertEqual(joinedstr.end_col_offset, 17)
+ # check the formatted value location
+ fv = t.body[0].value.values[1]
+ self.assertEqual(type(fv), ast.FormattedValue)
+ self.assertEqual(fv.lineno, 3)
+ self.assertEqual(fv.end_lineno, 3)
+ self.assertEqual(fv.col_offset, 7)
+ self.assertEqual(fv.end_col_offset, 16)
# check the test(t) location
call = t.body[0].value.values[1].value
self.assertEqual(type(call), ast.Call)
@@ -397,6 +412,50 @@ def test_ast_line_numbers_with_parentheses(self):
expr = """
x = (
+ u'wat',
+ u"wat",
+ b'wat',
+ b"wat",
+ f'wat',
+ f"wat",
+)
+
+y = (
+ u'''wat''',
+ u\"\"\"wat\"\"\",
+ b'''wat''',
+ b\"\"\"wat\"\"\",
+ f'''wat''',
+ f\"\"\"wat\"\"\",
+)
+ """
+ t = ast.parse(expr)
+ self.assertEqual(type(t), ast.Module)
+ self.assertEqual(len(t.body), 2)
+ x, y = t.body
+
+ # Check the single quoted string offsets first.
+ offsets = [
+ (elt.col_offset, elt.end_col_offset)
+ for elt in x.value.elts
+ ]
+ self.assertTrue(all(
+ offset == (4, 10)
+ for offset in offsets
+ ))
+
+ # Check the triple quoted string offsets.
+ offsets = [
+ (elt.col_offset, elt.end_col_offset)
+ for elt in y.value.elts
+ ]
+ self.assertTrue(all(
+ offset == (4, 14)
+ for offset in offsets
+ ))
+
+ expr = """
+x = (
'PERL_MM_OPT', (
f'wat'
f'some_string={f(x)} '
@@ -415,9 +474,9 @@ def test_ast_line_numbers_with_parentheses(self):
# check the first wat
self.assertEqual(type(wat1), ast.Constant)
self.assertEqual(wat1.lineno, 4)
- self.assertEqual(wat1.end_lineno, 6)
- self.assertEqual(wat1.col_offset, 12)
- self.assertEqual(wat1.end_col_offset, 18)
+ self.assertEqual(wat1.end_lineno, 5)
+ self.assertEqual(wat1.col_offset, 14)
+ self.assertEqual(wat1.end_col_offset, 26)
# check the call
call = middle.value
self.assertEqual(type(call), ast.Call)
@@ -427,10 +486,14 @@ def test_ast_line_numbers_with_parentheses(self):
self.assertEqual(call.end_col_offset, 31)
# check the second wat
self.assertEqual(type(wat2), ast.Constant)
- self.assertEqual(wat2.lineno, 4)
+ self.assertEqual(wat2.lineno, 5)
self.assertEqual(wat2.end_lineno, 6)
- self.assertEqual(wat2.col_offset, 12)
- self.assertEqual(wat2.end_col_offset, 18)
+ self.assertEqual(wat2.col_offset, 32)
+ # wat ends at the offset 17, but the whole f-string
+ # ends at the offset 18 (since the quote is part of the
+ # f-string but not the wat string)
+ self.assertEqual(wat2.end_col_offset, 17)
+ self.assertEqual(fstring.end_col_offset, 18)
def test_docstring(self):
def f():
@@ -467,36 +530,42 @@ def test_literal(self):
self.assertEqual(f' ', ' ')
def test_unterminated_string(self):
- self.assertAllRaise(SyntaxError, 'f-string: unterminated string',
+ self.assertAllRaise(SyntaxError, 'unterminated string',
[r"""f'{"x'""",
r"""f'{"x}'""",
r"""f'{("x'""",
r"""f'{("x}'""",
])
+ @unittest.skipIf(support.is_wasi, "exhausts limited stack on WASI")
def test_mismatched_parens(self):
- self.assertAllRaise(SyntaxError, r"f-string: closing parenthesis '\}' "
+ self.assertAllRaise(SyntaxError, r"closing parenthesis '\}' "
r"does not match opening parenthesis '\('",
["f'{((}'",
])
- self.assertAllRaise(SyntaxError, r"f-string: closing parenthesis '\)' "
+ self.assertAllRaise(SyntaxError, r"closing parenthesis '\)' "
r"does not match opening parenthesis '\['",
["f'{a[4)}'",
])
- self.assertAllRaise(SyntaxError, r"f-string: closing parenthesis '\]' "
+ self.assertAllRaise(SyntaxError, r"closing parenthesis '\]' "
r"does not match opening parenthesis '\('",
["f'{a(4]}'",
])
- self.assertAllRaise(SyntaxError, r"f-string: closing parenthesis '\}' "
+ self.assertAllRaise(SyntaxError, r"closing parenthesis '\}' "
r"does not match opening parenthesis '\['",
["f'{a[4}'",
])
- self.assertAllRaise(SyntaxError, r"f-string: closing parenthesis '\}' "
+ self.assertAllRaise(SyntaxError, r"closing parenthesis '\}' "
r"does not match opening parenthesis '\('",
["f'{a(4}'",
])
self.assertRaises(SyntaxError, eval, "f'{" + "("*500 + "}'")
+ def test_fstring_nested_too_deeply(self):
+ self.assertAllRaise(SyntaxError,
+ "f-string: expressions nested too deeply",
+ ['f"{1+2:{1+2:{1+1:{1}}}}"'])
+
def test_double_braces(self):
self.assertEqual(f'{{', '{')
self.assertEqual(f'a{{', 'a{')
@@ -559,8 +628,14 @@ def test_compile_time_concat(self):
self.assertEqual(f'' '' f'', '')
self.assertEqual(f'' '' f'' '', '')
- self.assertAllRaise(SyntaxError, "f-string: expecting '}'",
- ["f'{3' f'}'", # can't concat to get a valid f-string
+ # This is not really [f'{'] + [f'}'] since we treat the inside
+ # of braces as a purely new context, so it is actually f'{ and
+ # then eval(' f') (a valid expression) and then }' which would
+ # constitute a valid f-string.
+ self.assertEqual(f'{' f'}', ' f')
+
+ self.assertAllRaise(SyntaxError, "expecting '}'",
+ ['''f'{3' f"}"''', # can't concat to get a valid f-string
])
def test_comments(self):
@@ -618,25 +693,19 @@ def test_format_specifier_expressions(self):
self.assertEqual(f'{-10:-{"#"}1{0}x}', ' -0xa')
self.assertEqual(f'{-10:{"-"}#{1}0{"x"}}', ' -0xa')
self.assertEqual(f'{10:#{3 != {4:5} and width}x}', ' 0xa')
+ self.assertEqual(f'result: {value:{width:{0}}.{precision:1}}', 'result: 12.35')
- self.assertAllRaise(SyntaxError,
- """f-string: invalid conversion character 'r{"': """
- """expected 's', 'r', or 'a'""",
+ self.assertAllRaise(SyntaxError, "f-string: expecting ':' or '}'",
["""f'{"s"!r{":10"}}'""",
-
# This looks like a nested format spec.
])
- self.assertAllRaise(SyntaxError, "f-string: invalid syntax",
+ self.assertAllRaise(SyntaxError,
+ "f-string: expecting a valid expression after '{'",
[# Invalid syntax inside a nested spec.
"f'{4:{/5}}'",
])
- self.assertAllRaise(SyntaxError, "f-string: expressions nested too deeply",
- [# Can't nest format specifiers.
- "f'result: {value:{width:{0}}.{precision:1}}'",
- ])
-
self.assertAllRaise(SyntaxError, 'f-string: invalid conversion character',
[# No expansion inside conversion or for
# the : or ! itself.
@@ -655,7 +724,8 @@ def __format__(self, spec):
self.assertEqual(f'{x} {x}', '1 2')
def test_missing_expression(self):
- self.assertAllRaise(SyntaxError, 'f-string: empty expression not allowed',
+ self.assertAllRaise(SyntaxError,
+ "f-string: valid expression required before '}'",
["f'{}'",
"f'{ }'"
"f' {} '",
@@ -667,8 +737,8 @@ def test_missing_expression(self):
"f'''{\t\f\r\n}'''",
])
- # Different error messages are raised when a specifier ('!', ':' or '=') is used after an empty expression
- self.assertAllRaise(SyntaxError, "f-string: expression required before '!'",
+ self.assertAllRaise(SyntaxError,
+ "f-string: valid expression required before '!'",
["f'{!r}'",
"f'{ !r}'",
"f'{!}'",
@@ -689,7 +759,8 @@ def test_missing_expression(self):
"f'{ !xr:a}'",
])
- self.assertAllRaise(SyntaxError, "f-string: expression required before ':'",
+ self.assertAllRaise(SyntaxError,
+ "f-string: valid expression required before ':'",
["f'{:}'",
"f'{ :!}'",
"f'{:2}'",
@@ -697,7 +768,8 @@ def test_missing_expression(self):
"f'{:'",
])
- self.assertAllRaise(SyntaxError, "f-string: expression required before '='",
+ self.assertAllRaise(SyntaxError,
+ "f-string: valid expression required before '='",
["f'{=}'",
"f'{ =}'",
"f'{ =:}'",
@@ -715,24 +787,18 @@ def test_missing_expression(self):
def test_parens_in_expressions(self):
self.assertEqual(f'{3,}', '(3,)')
- # Add these because when an expression is evaluated, parens
- # are added around it. But we shouldn't go from an invalid
- # expression to a valid one. The added parens are just
- # supposed to allow whitespace (including newlines).
- self.assertAllRaise(SyntaxError, 'f-string: invalid syntax',
+ self.assertAllRaise(SyntaxError,
+ "f-string: expecting a valid expression after '{'",
["f'{,}'",
- "f'{,}'", # this is (,), which is an error
])
self.assertAllRaise(SyntaxError, r"f-string: unmatched '\)'",
["f'{3)+(4}'",
])
- self.assertAllRaise(SyntaxError, 'unterminated string literal',
- ["f'{\n}'",
- ])
def test_newlines_before_syntax_error(self):
- self.assertAllRaise(SyntaxError, "invalid syntax",
+ self.assertAllRaise(SyntaxError,
+ "f-string: expecting a valid expression after '{'",
["f'{.}'", "\nf'{.}'", "\n\nf'{.}'"])
def test_backslashes_in_string_part(self):
@@ -776,7 +842,7 @@ def test_backslashes_in_string_part(self):
self.assertEqual(f'2\x203', '2 3')
self.assertEqual(f'\x203', ' 3')
- with self.assertWarns(SyntaxWarning): # invalid escape sequence
+ with self.assertWarns(DeprecationWarning): # invalid escape sequence
value = eval(r"f'\{6*7}'")
self.assertEqual(value, '\\42')
self.assertEqual(f'\\{6*7}', '\\42')
@@ -809,18 +875,40 @@ def test_misformed_unicode_character_name(self):
r"'\N{GREEK CAPITAL LETTER DELTA'",
])
- def test_no_backslashes_in_expression_part(self):
- self.assertAllRaise(SyntaxError, 'f-string expression part cannot include a backslash',
- [r"f'{\'a\'}'",
- r"f'{\t3}'",
- r"f'{\}'",
- r"rf'{\'a\'}'",
- r"rf'{\t3}'",
- r"rf'{\}'",
- r"""rf'{"\N{LEFT CURLY BRACKET}"}'""",
- r"f'{\n}'",
+ def test_backslashes_in_expression_part(self):
+ self.assertEqual(f"{(
+ 1 +
+ 2
+ )}", "3")
+
+ self.assertEqual("\N{LEFT CURLY BRACKET}", '{')
+ self.assertEqual(f'{"\N{LEFT CURLY BRACKET}"}', '{')
+ self.assertEqual(rf'{"\N{LEFT CURLY BRACKET}"}', '{')
+
+ self.assertAllRaise(SyntaxError,
+ "f-string: valid expression required before '}'",
+ ["f'{\n}'",
])
+ def test_invalid_backslashes_inside_fstring_context(self):
+ # All of these variations are invalid python syntax,
+ # so they are also invalid in f-strings as well.
+ cases = [
+ formatting.format(expr=expr)
+ for formatting in [
+ "{expr}",
+ "f'{{{expr}}}'",
+ "rf'{{{expr}}}'",
+ ]
+ for expr in [
+ r"\'a\'",
+ r"\t3",
+ r"\\"[0],
+ ]
+ ]
+ self.assertAllRaise(SyntaxError, 'unexpected character after line continuation',
+ cases)
+
def test_no_escapes_for_braces(self):
"""
Only literal curly braces begin an expression.
@@ -843,11 +931,67 @@ def test_lambda(self):
self.assertEqual(f'{(lambda y:x*y)("8"):10}', "88888 ")
# lambda doesn't work without parens, because the colon
- # makes the parser think it's a format_spec
- self.assertAllRaise(SyntaxError, 'f-string: invalid syntax',
+ # makes the parser think it's a format_spec
+ # emit warning if we can match a format_spec
+ self.assertAllRaise(SyntaxError,
+ "f-string: lambda expressions are not allowed "
+ "without parentheses",
["f'{lambda x:x}'",
+ "f'{lambda :x}'",
+ "f'{lambda *arg, :x}'",
+ "f'{1, lambda:x}'",
+ "f'{lambda x:}'",
+ "f'{lambda :}'",
])
+ # but don't emit the paren warning in general cases
+ with self.assertRaisesRegex(SyntaxError, "f-string: expecting a valid expression after '{'"):
+ eval("f'{+ lambda:None}'")
+
+ def test_valid_prefixes(self):
+ self.assertEqual(F'{1}', "1")
+ self.assertEqual(FR'{2}', "2")
+ self.assertEqual(fR'{3}', "3")
+
+ def test_roundtrip_raw_quotes(self):
+ self.assertEqual(fr"\'", "\\'")
+ self.assertEqual(fr'\"', '\\"')
+ self.assertEqual(fr'\"\'', '\\"\\\'')
+ self.assertEqual(fr'\'\"', '\\\'\\"')
+ self.assertEqual(fr'\"\'\"', '\\"\\\'\\"')
+ self.assertEqual(fr'\'\"\'', '\\\'\\"\\\'')
+ self.assertEqual(fr'\"\'\"\'', '\\"\\\'\\"\\\'')
+
+ def test_fstring_backslash_before_double_bracket(self):
+ self.assertEqual(f'\{{\}}', '\\{\\}')
+ self.assertEqual(f'\{{', '\\{')
+ self.assertEqual(f'\{{{1+1}', '\\{2')
+ self.assertEqual(f'\}}{1+1}', '\\}2')
+ self.assertEqual(f'{1+1}\}}', '2\\}')
+ self.assertEqual(fr'\{{\}}', '\\{\\}')
+ self.assertEqual(fr'\{{', '\\{')
+ self.assertEqual(fr'\{{{1+1}', '\\{2')
+ self.assertEqual(fr'\}}{1+1}', '\\}2')
+ self.assertEqual(fr'{1+1}\}}', '2\\}')
+
+ def test_fstring_backslash_prefix_raw(self):
+ self.assertEqual(f'\\', '\\')
+ self.assertEqual(f'\\\\', '\\\\')
+ self.assertEqual(fr'\\', r'\\')
+ self.assertEqual(fr'\\\\', r'\\\\')
+ self.assertEqual(rf'\\', r'\\')
+ self.assertEqual(rf'\\\\', r'\\\\')
+ self.assertEqual(Rf'\\', R'\\')
+ self.assertEqual(Rf'\\\\', R'\\\\')
+ self.assertEqual(fR'\\', R'\\')
+ self.assertEqual(fR'\\\\', R'\\\\')
+ self.assertEqual(FR'\\', R'\\')
+ self.assertEqual(FR'\\\\', R'\\\\')
+
+ def test_fstring_format_spec_greedy_matching(self):
+ self.assertEqual(f"{1:}}}", "1}")
+ self.assertEqual(f"{1:>3{5}}}}", " 1}")
+
def test_yield(self):
# Not terribly useful, but make sure the yield turns
# a function into a generator
@@ -1037,6 +1181,11 @@ def test_conversions(self):
self.assertEqual(f'{"a"!r}', "'a'")
self.assertEqual(f'{"a"!a}', "'a'")
+ # Conversions can have trailing whitespace after them since it
+ # does not provide any significance
+ self.assertEqual(f"{3!s }", "3")
+ self.assertEqual(f'{3.14!s :10.10}', '3.14 ')
+
# Not a conversion.
self.assertEqual(f'{"a!r"}', "a!r")
@@ -1049,16 +1198,27 @@ def test_conversions(self):
"f'{3!g'",
])
- self.assertAllRaise(SyntaxError, 'f-string: missed conversion character',
+ self.assertAllRaise(SyntaxError, 'f-string: missing conversion character',
["f'{3!}'",
"f'{3!:'",
"f'{3!:}'",
])
- for conv in 'g', 'A', '3', 'G', '!', ' s', 's ', ' s ', 'ä', 'ɐ', 'ª':
+ for conv_identifier in 'g', 'A', 'G', 'ä', 'ɐ':
self.assertAllRaise(SyntaxError,
"f-string: invalid conversion character %r: "
- "expected 's', 'r', or 'a'" % conv,
+ "expected 's', 'r', or 'a'" % conv_identifier,
+ ["f'{3!" + conv_identifier + "}'"])
+
+ for conv_non_identifier in '3', '!':
+ self.assertAllRaise(SyntaxError,
+ "f-string: invalid conversion character",
+ ["f'{3!" + conv_non_identifier + "}'"])
+
+ for conv in ' s', ' s ':
+ self.assertAllRaise(SyntaxError,
+ "f-string: conversion type must come right after the"
+ " exclamanation mark",
["f'{3!" + conv + "}'"])
self.assertAllRaise(SyntaxError,
@@ -1097,8 +1257,7 @@ def test_mismatched_braces(self):
])
self.assertAllRaise(SyntaxError, "f-string: expecting '}'",
- ["f'{3:{{>10}'",
- "f'{3'",
+ ["f'{3'",
"f'{3!'",
"f'{3:'",
"f'{3!s'",
@@ -1111,11 +1270,14 @@ def test_mismatched_braces(self):
"f'{{{'",
"f'{{}}{'",
"f'{'",
- "f'x{<'", # See bpo-46762.
- "f'x{>'",
"f'{i='", # See gh-93418.
])
+ self.assertAllRaise(SyntaxError,
+ "f-string: expecting a valid expression after '{'",
+ ["f'{3:{{>10}'",
+ ])
+
# But these are just normal strings.
self.assertEqual(f'{"{"}', '{')
self.assertEqual(f'{"}"}', '}')
@@ -1314,6 +1476,7 @@ def __repr__(self):
self.assertEqual(f'X{x =}Y', 'Xx ='+repr(x)+'Y')
self.assertEqual(f'X{x= }Y', 'Xx= '+repr(x)+'Y')
self.assertEqual(f'X{x = }Y', 'Xx = '+repr(x)+'Y')
+ self.assertEqual(f"sadsd {1 + 1 = :{1 + 1:1d}f}", "sadsd 1 + 1 = 2.000000")
# These next lines contains tabs. Backslash escapes don't
# work in f-strings.
@@ -1335,7 +1498,8 @@ def test_walrus(self):
self.assertEqual(x, 10)
def test_invalid_syntax_error_message(self):
- with self.assertRaisesRegex(SyntaxError, "f-string: invalid syntax"):
+ with self.assertRaisesRegex(SyntaxError,
+ "f-string: expecting '=', or '!', or ':', or '}'"):
compile("f'{a $ b}'", "?", "exec")
def test_with_two_commas_in_format_specifier(self):
@@ -1359,13 +1523,17 @@ def test_with_an_underscore_and_a_comma_in_format_specifier(self):
f'{1:_,}'
def test_syntax_error_for_starred_expressions(self):
- error_msg = re.escape("cannot use starred expression here")
- with self.assertRaisesRegex(SyntaxError, error_msg):
+ with self.assertRaisesRegex(SyntaxError, "can't use starred expression here"):
compile("f'{*a}'", "?", "exec")
- error_msg = re.escape("cannot use double starred expression here")
- with self.assertRaisesRegex(SyntaxError, error_msg):
+ with self.assertRaisesRegex(SyntaxError,
+ "f-string: expecting a valid expression after '{'"):
compile("f'{**a}'", "?", "exec")
+ def test_not_closing_quotes(self):
+ self.assertAllRaise(SyntaxError, "unterminated f-string literal", ['f"', "f'"])
+ self.assertAllRaise(SyntaxError, "unterminated triple-quoted f-string literal",
+ ['f"""', "f'''"])
+
if __name__ == '__main__':
unittest.main()
diff --git a/Lib/test/test_imp.py b/Lib/test/test_imp.py
index 03e3adba221e57..80abc720c3251a 100644
--- a/Lib/test/test_imp.py
+++ b/Lib/test/test_imp.py
@@ -1,5 +1,4 @@
import gc
-import json
import importlib
import importlib.util
import os
@@ -11,28 +10,15 @@
from test.support import os_helper
from test.support import script_helper
from test.support import warnings_helper
-import textwrap
-import types
import unittest
import warnings
imp = warnings_helper.import_deprecated('imp')
import _imp
-import _testinternalcapi
-try:
- import _xxsubinterpreters as _interpreters
-except ModuleNotFoundError:
- _interpreters = None
OS_PATH_NAME = os.path.__name__
-def requires_subinterpreters(meth):
- """Decorator to skip a test if subinterpreters are not supported."""
- return unittest.skipIf(_interpreters is None,
- 'subinterpreters required')(meth)
-
-
def requires_load_dynamic(meth):
"""Decorator to skip a test if not running under CPython or lacking
imp.load_dynamic()."""
@@ -41,169 +27,6 @@ def requires_load_dynamic(meth):
'imp.load_dynamic() required')(meth)
-class ModuleSnapshot(types.SimpleNamespace):
- """A representation of a module for testing.
-
- Fields:
-
- * id - the module's object ID
- * module - the actual module or an adequate substitute
- * __file__
- * __spec__
- * name
- * origin
- * ns - a copy (dict) of the module's __dict__ (or None)
- * ns_id - the object ID of the module's __dict__
- * cached - the sys.modules[mod.__spec__.name] entry (or None)
- * cached_id - the object ID of the sys.modules entry (or None)
-
- In cases where the value is not available (e.g. due to serialization),
- the value will be None.
- """
- _fields = tuple('id module ns ns_id cached cached_id'.split())
-
- @classmethod
- def from_module(cls, mod):
- name = mod.__spec__.name
- cached = sys.modules.get(name)
- return cls(
- id=id(mod),
- module=mod,
- ns=types.SimpleNamespace(**mod.__dict__),
- ns_id=id(mod.__dict__),
- cached=cached,
- cached_id=id(cached),
- )
-
- SCRIPT = textwrap.dedent('''
- {imports}
-
- name = {name!r}
-
- {prescript}
-
- mod = {name}
-
- {body}
-
- {postscript}
- ''')
- IMPORTS = textwrap.dedent('''
- import sys
- ''').strip()
- SCRIPT_BODY = textwrap.dedent('''
- # Capture the snapshot data.
- cached = sys.modules.get(name)
- snapshot = dict(
- id=id(mod),
- module=dict(
- __file__=mod.__file__,
- __spec__=dict(
- name=mod.__spec__.name,
- origin=mod.__spec__.origin,
- ),
- ),
- ns=None,
- ns_id=id(mod.__dict__),
- cached=None,
- cached_id=id(cached) if cached else None,
- )
- ''').strip()
- CLEANUP_SCRIPT = textwrap.dedent('''
- # Clean up the module.
- sys.modules.pop(name, None)
- ''').strip()
-
- @classmethod
- def build_script(cls, name, *,
- prescript=None,
- import_first=False,
- postscript=None,
- postcleanup=False,
- ):
- if postcleanup is True:
- postcleanup = cls.CLEANUP_SCRIPT
- elif isinstance(postcleanup, str):
- postcleanup = textwrap.dedent(postcleanup).strip()
- postcleanup = cls.CLEANUP_SCRIPT + os.linesep + postcleanup
- else:
- postcleanup = ''
- prescript = textwrap.dedent(prescript).strip() if prescript else ''
- postscript = textwrap.dedent(postscript).strip() if postscript else ''
-
- if postcleanup:
- if postscript:
- postscript = postscript + os.linesep * 2 + postcleanup
- else:
- postscript = postcleanup
-
- if import_first:
- prescript += textwrap.dedent(f'''
-
- # Now import the module.
- assert name not in sys.modules
- import {name}''')
-
- return cls.SCRIPT.format(
- imports=cls.IMPORTS.strip(),
- name=name,
- prescript=prescript.strip(),
- body=cls.SCRIPT_BODY.strip(),
- postscript=postscript,
- )
-
- @classmethod
- def parse(cls, text):
- raw = json.loads(text)
- mod = raw['module']
- mod['__spec__'] = types.SimpleNamespace(**mod['__spec__'])
- raw['module'] = types.SimpleNamespace(**mod)
- return cls(**raw)
-
- @classmethod
- def from_subinterp(cls, name, interpid=None, *, pipe=None, **script_kwds):
- if pipe is not None:
- return cls._from_subinterp(name, interpid, pipe, script_kwds)
- pipe = os.pipe()
- try:
- return cls._from_subinterp(name, interpid, pipe, script_kwds)
- finally:
- r, w = pipe
- os.close(r)
- os.close(w)
-
- @classmethod
- def _from_subinterp(cls, name, interpid, pipe, script_kwargs):
- r, w = pipe
-
- # Build the script.
- postscript = textwrap.dedent(f'''
- # Send the result over the pipe.
- import json
- import os
- os.write({w}, json.dumps(snapshot).encode())
-
- ''')
- _postscript = script_kwargs.get('postscript')
- if _postscript:
- _postscript = textwrap.dedent(_postscript).lstrip()
- postscript += _postscript
- script_kwargs['postscript'] = postscript.strip()
- script = cls.build_script(name, **script_kwargs)
-
- # Run the script.
- if interpid is None:
- ret = support.run_in_subinterp(script)
- if ret != 0:
- raise AssertionError(f'{ret} != 0')
- else:
- _interpreters.run_string(interpid, script)
-
- # Parse the results.
- text = os.read(r, 1000)
- return cls.parse(text.decode())
-
-
class LockTests(unittest.TestCase):
"""Very basic test of import lock functions."""
@@ -620,669 +443,6 @@ def check_get_builtins():
check_get_builtins()
-class TestSinglePhaseSnapshot(ModuleSnapshot):
-
- @classmethod
- def from_module(cls, mod):
- self = super().from_module(mod)
- self.summed = mod.sum(1, 2)
- self.lookedup = mod.look_up_self()
- self.lookedup_id = id(self.lookedup)
- self.state_initialized = mod.state_initialized()
- if hasattr(mod, 'initialized_count'):
- self.init_count = mod.initialized_count()
- return self
-
- SCRIPT_BODY = ModuleSnapshot.SCRIPT_BODY + textwrap.dedent(f'''
- snapshot['module'].update(dict(
- int_const=mod.int_const,
- str_const=mod.str_const,
- _module_initialized=mod._module_initialized,
- ))
- snapshot.update(dict(
- summed=mod.sum(1, 2),
- lookedup_id=id(mod.look_up_self()),
- state_initialized=mod.state_initialized(),
- init_count=mod.initialized_count(),
- has_spam=hasattr(mod, 'spam'),
- spam=getattr(mod, 'spam', None),
- ))
- ''').rstrip()
-
- @classmethod
- def parse(cls, text):
- self = super().parse(text)
- if not self.has_spam:
- del self.spam
- del self.has_spam
- return self
-
-
-@requires_load_dynamic
-class SinglephaseInitTests(unittest.TestCase):
-
- NAME = '_testsinglephase'
-
- @classmethod
- def setUpClass(cls):
- if '-R' in sys.argv or '--huntrleaks' in sys.argv:
- # https://github.com/python/cpython/issues/102251
- raise unittest.SkipTest('unresolved refleaks (see gh-102251)')
- fileobj, filename, _ = imp.find_module(cls.NAME)
- fileobj.close()
- cls.FILE = filename
-
- # Start fresh.
- cls.clean_up()
-
- def tearDown(self):
- # Clean up the module.
- self.clean_up()
-
- @classmethod
- def clean_up(cls):
- name = cls.NAME
- filename = cls.FILE
- if name in sys.modules:
- if hasattr(sys.modules[name], '_clear_globals'):
- assert sys.modules[name].__file__ == filename
- sys.modules[name]._clear_globals()
- del sys.modules[name]
- # Clear all internally cached data for the extension.
- _testinternalcapi.clear_extension(name, filename)
-
- #########################
- # helpers
-
- def add_module_cleanup(self, name):
- def clean_up():
- # Clear all internally cached data for the extension.
- _testinternalcapi.clear_extension(name, self.FILE)
- self.addCleanup(clean_up)
-
- def load(self, name):
- try:
- already_loaded = self.already_loaded
- except AttributeError:
- already_loaded = self.already_loaded = {}
- assert name not in already_loaded
- mod = imp.load_dynamic(name, self.FILE)
- self.assertNotIn(mod, already_loaded.values())
- already_loaded[name] = mod
- return types.SimpleNamespace(
- name=name,
- module=mod,
- snapshot=TestSinglePhaseSnapshot.from_module(mod),
- )
-
- def re_load(self, name, mod):
- assert sys.modules[name] is mod
- assert mod.__dict__ == mod.__dict__
- reloaded = imp.load_dynamic(name, self.FILE)
- return types.SimpleNamespace(
- name=name,
- module=reloaded,
- snapshot=TestSinglePhaseSnapshot.from_module(reloaded),
- )
-
- # subinterpreters
-
- def add_subinterpreter(self):
- interpid = _interpreters.create(isolated=False)
- _interpreters.run_string(interpid, textwrap.dedent('''
- import sys
- import _testinternalcapi
- '''))
- def clean_up():
- _interpreters.run_string(interpid, textwrap.dedent(f'''
- name = {self.NAME!r}
- if name in sys.modules:
- sys.modules[name]._clear_globals()
- _testinternalcapi.clear_extension(name, {self.FILE!r})
- '''))
- _interpreters.destroy(interpid)
- self.addCleanup(clean_up)
- return interpid
-
- def import_in_subinterp(self, interpid=None, *,
- postscript=None,
- postcleanup=False,
- ):
- name = self.NAME
-
- if postcleanup:
- import_ = 'import _testinternalcapi' if interpid is None else ''
- postcleanup = f'''
- {import_}
- mod._clear_globals()
- _testinternalcapi.clear_extension(name, {self.FILE!r})
- '''
-
- try:
- pipe = self._pipe
- except AttributeError:
- r, w = pipe = self._pipe = os.pipe()
- self.addCleanup(os.close, r)
- self.addCleanup(os.close, w)
-
- snapshot = TestSinglePhaseSnapshot.from_subinterp(
- name,
- interpid,
- pipe=pipe,
- import_first=True,
- postscript=postscript,
- postcleanup=postcleanup,
- )
-
- return types.SimpleNamespace(
- name=name,
- module=None,
- snapshot=snapshot,
- )
-
- # checks
-
- def check_common(self, loaded):
- isolated = False
-
- mod = loaded.module
- if not mod:
- # It came from a subinterpreter.
- isolated = True
- mod = loaded.snapshot.module
- # mod.__name__ might not match, but the spec will.
- self.assertEqual(mod.__spec__.name, loaded.name)
- self.assertEqual(mod.__file__, self.FILE)
- self.assertEqual(mod.__spec__.origin, self.FILE)
- if not isolated:
- self.assertTrue(issubclass(mod.error, Exception))
- self.assertEqual(mod.int_const, 1969)
- self.assertEqual(mod.str_const, 'something different')
- self.assertIsInstance(mod._module_initialized, float)
- self.assertGreater(mod._module_initialized, 0)
-
- snap = loaded.snapshot
- self.assertEqual(snap.summed, 3)
- if snap.state_initialized is not None:
- self.assertIsInstance(snap.state_initialized, float)
- self.assertGreater(snap.state_initialized, 0)
- if isolated:
- # The "looked up" module is interpreter-specific
- # (interp->imports.modules_by_index was set for the module).
- self.assertEqual(snap.lookedup_id, snap.id)
- self.assertEqual(snap.cached_id, snap.id)
- with self.assertRaises(AttributeError):
- snap.spam
- else:
- self.assertIs(snap.lookedup, mod)
- self.assertIs(snap.cached, mod)
-
- def check_direct(self, loaded):
- # The module has its own PyModuleDef, with a matching name.
- self.assertEqual(loaded.module.__name__, loaded.name)
- self.assertIs(loaded.snapshot.lookedup, loaded.module)
-
- def check_indirect(self, loaded, orig):
- # The module re-uses another's PyModuleDef, with a different name.
- assert orig is not loaded.module
- assert orig.__name__ != loaded.name
- self.assertNotEqual(loaded.module.__name__, loaded.name)
- self.assertIs(loaded.snapshot.lookedup, loaded.module)
-
- def check_basic(self, loaded, expected_init_count):
- # m_size == -1
- # The module loads fresh the first time and copies m_copy after.
- snap = loaded.snapshot
- self.assertIsNot(snap.state_initialized, None)
- self.assertIsInstance(snap.init_count, int)
- self.assertGreater(snap.init_count, 0)
- self.assertEqual(snap.init_count, expected_init_count)
-
- def check_with_reinit(self, loaded):
- # m_size >= 0
- # The module loads fresh every time.
- pass
-
- def check_fresh(self, loaded):
- """
- The module had not been loaded before (at least since fully reset).
- """
- snap = loaded.snapshot
- # The module's init func was run.
- # A copy of the module's __dict__ was stored in def->m_base.m_copy.
- # The previous m_copy was deleted first.
- # _PyRuntime.imports.extensions was set.
- self.assertEqual(snap.init_count, 1)
- # The global state was initialized.
- # The module attrs were initialized from that state.
- self.assertEqual(snap.module._module_initialized,
- snap.state_initialized)
-
- def check_semi_fresh(self, loaded, base, prev):
- """
- The module had been loaded before and then reset
- (but the module global state wasn't).
- """
- snap = loaded.snapshot
- # The module's init func was run again.
- # A copy of the module's __dict__ was stored in def->m_base.m_copy.
- # The previous m_copy was deleted first.
- # The module globals did not get reset.
- self.assertNotEqual(snap.id, base.snapshot.id)
- self.assertNotEqual(snap.id, prev.snapshot.id)
- self.assertEqual(snap.init_count, prev.snapshot.init_count + 1)
- # The global state was updated.
- # The module attrs were initialized from that state.
- self.assertEqual(snap.module._module_initialized,
- snap.state_initialized)
- self.assertNotEqual(snap.state_initialized,
- base.snapshot.state_initialized)
- self.assertNotEqual(snap.state_initialized,
- prev.snapshot.state_initialized)
-
- def check_copied(self, loaded, base):
- """
- The module had been loaded before and never reset.
- """
- snap = loaded.snapshot
- # The module's init func was not run again.
- # The interpreter copied m_copy, as set by the other interpreter,
- # with objects owned by the other interpreter.
- # The module globals did not get reset.
- self.assertNotEqual(snap.id, base.snapshot.id)
- self.assertEqual(snap.init_count, base.snapshot.init_count)
- # The global state was not updated since the init func did not run.
- # The module attrs were not directly initialized from that state.
- # The state and module attrs still match the previous loading.
- self.assertEqual(snap.module._module_initialized,
- snap.state_initialized)
- self.assertEqual(snap.state_initialized,
- base.snapshot.state_initialized)
-
- #########################
- # the tests
-
- def test_cleared_globals(self):
- loaded = self.load(self.NAME)
- _testsinglephase = loaded.module
- init_before = _testsinglephase.state_initialized()
-
- _testsinglephase._clear_globals()
- init_after = _testsinglephase.state_initialized()
- init_count = _testsinglephase.initialized_count()
-
- self.assertGreater(init_before, 0)
- self.assertEqual(init_after, 0)
- self.assertEqual(init_count, -1)
-
- def test_variants(self):
- # Exercise the most meaningful variants described in Python/import.c.
- self.maxDiff = None
-
- # Check the "basic" module.
-
- name = self.NAME
- expected_init_count = 1
- with self.subTest(name):
- loaded = self.load(name)
-
- self.check_common(loaded)
- self.check_direct(loaded)
- self.check_basic(loaded, expected_init_count)
- basic = loaded.module
-
- # Check its indirect variants.
-
- name = f'{self.NAME}_basic_wrapper'
- self.add_module_cleanup(name)
- expected_init_count += 1
- with self.subTest(name):
- loaded = self.load(name)
-
- self.check_common(loaded)
- self.check_indirect(loaded, basic)
- self.check_basic(loaded, expected_init_count)
-
- # Currently PyState_AddModule() always replaces the cached module.
- self.assertIs(basic.look_up_self(), loaded.module)
- self.assertEqual(basic.initialized_count(), expected_init_count)
-
- # The cached module shouldn't change after this point.
- basic_lookedup = loaded.module
-
- # Check its direct variant.
-
- name = f'{self.NAME}_basic_copy'
- self.add_module_cleanup(name)
- expected_init_count += 1
- with self.subTest(name):
- loaded = self.load(name)
-
- self.check_common(loaded)
- self.check_direct(loaded)
- self.check_basic(loaded, expected_init_count)
-
- # This should change the cached module for _testsinglephase.
- self.assertIs(basic.look_up_self(), basic_lookedup)
- self.assertEqual(basic.initialized_count(), expected_init_count)
-
- # Check the non-basic variant that has no state.
-
- name = f'{self.NAME}_with_reinit'
- self.add_module_cleanup(name)
- with self.subTest(name):
- loaded = self.load(name)
-
- self.check_common(loaded)
- self.assertIs(loaded.snapshot.state_initialized, None)
- self.check_direct(loaded)
- self.check_with_reinit(loaded)
-
- # This should change the cached module for _testsinglephase.
- self.assertIs(basic.look_up_self(), basic_lookedup)
- self.assertEqual(basic.initialized_count(), expected_init_count)
-
- # Check the basic variant that has state.
-
- name = f'{self.NAME}_with_state'
- self.add_module_cleanup(name)
- with self.subTest(name):
- loaded = self.load(name)
-
- self.check_common(loaded)
- self.assertIsNot(loaded.snapshot.state_initialized, None)
- self.check_direct(loaded)
- self.check_with_reinit(loaded)
-
- # This should change the cached module for _testsinglephase.
- self.assertIs(basic.look_up_self(), basic_lookedup)
- self.assertEqual(basic.initialized_count(), expected_init_count)
-
- def test_basic_reloaded(self):
- # m_copy is copied into the existing module object.
- # Global state is not changed.
- self.maxDiff = None
-
- for name in [
- self.NAME, # the "basic" module
- f'{self.NAME}_basic_wrapper', # the indirect variant
- f'{self.NAME}_basic_copy', # the direct variant
- ]:
- self.add_module_cleanup(name)
- with self.subTest(name):
- loaded = self.load(name)
- reloaded = self.re_load(name, loaded.module)
-
- self.check_common(loaded)
- self.check_common(reloaded)
-
- # Make sure the original __dict__ did not get replaced.
- self.assertEqual(id(loaded.module.__dict__),
- loaded.snapshot.ns_id)
- self.assertEqual(loaded.snapshot.ns.__dict__,
- loaded.module.__dict__)
-
- self.assertEqual(reloaded.module.__spec__.name, reloaded.name)
- self.assertEqual(reloaded.module.__name__,
- reloaded.snapshot.ns.__name__)
-
- self.assertIs(reloaded.module, loaded.module)
- self.assertIs(reloaded.module.__dict__, loaded.module.__dict__)
- # It only happens to be the same but that's good enough here.
- # We really just want to verify that the re-loaded attrs
- # didn't change.
- self.assertIs(reloaded.snapshot.lookedup,
- loaded.snapshot.lookedup)
- self.assertEqual(reloaded.snapshot.state_initialized,
- loaded.snapshot.state_initialized)
- self.assertEqual(reloaded.snapshot.init_count,
- loaded.snapshot.init_count)
-
- self.assertIs(reloaded.snapshot.cached, reloaded.module)
-
- def test_with_reinit_reloaded(self):
- # The module's m_init func is run again.
- self.maxDiff = None
-
- # Keep a reference around.
- basic = self.load(self.NAME)
-
- for name in [
- f'{self.NAME}_with_reinit', # m_size == 0
- f'{self.NAME}_with_state', # m_size > 0
- ]:
- self.add_module_cleanup(name)
- with self.subTest(name):
- loaded = self.load(name)
- reloaded = self.re_load(name, loaded.module)
-
- self.check_common(loaded)
- self.check_common(reloaded)
-
- # Make sure the original __dict__ did not get replaced.
- self.assertEqual(id(loaded.module.__dict__),
- loaded.snapshot.ns_id)
- self.assertEqual(loaded.snapshot.ns.__dict__,
- loaded.module.__dict__)
-
- self.assertEqual(reloaded.module.__spec__.name, reloaded.name)
- self.assertEqual(reloaded.module.__name__,
- reloaded.snapshot.ns.__name__)
-
- self.assertIsNot(reloaded.module, loaded.module)
- self.assertNotEqual(reloaded.module.__dict__,
- loaded.module.__dict__)
- self.assertIs(reloaded.snapshot.lookedup, reloaded.module)
- if loaded.snapshot.state_initialized is None:
- self.assertIs(reloaded.snapshot.state_initialized, None)
- else:
- self.assertGreater(reloaded.snapshot.state_initialized,
- loaded.snapshot.state_initialized)
-
- self.assertIs(reloaded.snapshot.cached, reloaded.module)
-
- # Currently, for every single-phrase init module loaded
- # in multiple interpreters, those interpreters share a
- # PyModuleDef for that object, which can be a problem.
- # Also, we test with a single-phase module that has global state,
- # which is shared by all interpreters.
-
- @requires_subinterpreters
- def test_basic_multiple_interpreters_main_no_reset(self):
- # without resetting; already loaded in main interpreter
-
- # At this point:
- # * alive in 0 interpreters
- # * module def may or may not be loaded already
- # * module def not in _PyRuntime.imports.extensions
- # * mod init func has not run yet (since reset, at least)
- # * m_copy not set (hasn't been loaded yet or already cleared)
- # * module's global state has not been initialized yet
- # (or already cleared)
-
- main_loaded = self.load(self.NAME)
- _testsinglephase = main_loaded.module
- # Attrs set after loading are not in m_copy.
- _testsinglephase.spam = 'spam, spam, spam, spam, eggs, and spam'
-
- self.check_common(main_loaded)
- self.check_fresh(main_loaded)
-
- interpid1 = self.add_subinterpreter()
- interpid2 = self.add_subinterpreter()
-
- # At this point:
- # * alive in 1 interpreter (main)
- # * module def in _PyRuntime.imports.extensions
- # * mod init func ran for the first time (since reset, at least)
- # * m_copy was copied from the main interpreter (was NULL)
- # * module's global state was initialized
-
- # Use an interpreter that gets destroyed right away.
- loaded = self.import_in_subinterp()
- self.check_common(loaded)
- self.check_copied(loaded, main_loaded)
-
- # At this point:
- # * alive in 1 interpreter (main)
- # * module def still in _PyRuntime.imports.extensions
- # * mod init func ran again
- # * m_copy is NULL (claered when the interpreter was destroyed)
- # (was from main interpreter)
- # * module's global state was updated, not reset
-
- # Use a subinterpreter that sticks around.
- loaded = self.import_in_subinterp(interpid1)
- self.check_common(loaded)
- self.check_copied(loaded, main_loaded)
-
- # At this point:
- # * alive in 2 interpreters (main, interp1)
- # * module def still in _PyRuntime.imports.extensions
- # * mod init func ran again
- # * m_copy was copied from interp1
- # * module's global state was updated, not reset
-
- # Use a subinterpreter while the previous one is still alive.
- loaded = self.import_in_subinterp(interpid2)
- self.check_common(loaded)
- self.check_copied(loaded, main_loaded)
-
- # At this point:
- # * alive in 3 interpreters (main, interp1, interp2)
- # * module def still in _PyRuntime.imports.extensions
- # * mod init func ran again
- # * m_copy was copied from interp2 (was from interp1)
- # * module's global state was updated, not reset
-
- @requires_subinterpreters
- def test_basic_multiple_interpreters_deleted_no_reset(self):
- # without resetting; already loaded in a deleted interpreter
-
- # At this point:
- # * alive in 0 interpreters
- # * module def may or may not be loaded already
- # * module def not in _PyRuntime.imports.extensions
- # * mod init func has not run yet (since reset, at least)
- # * m_copy not set (hasn't been loaded yet or already cleared)
- # * module's global state has not been initialized yet
- # (or already cleared)
-
- interpid1 = self.add_subinterpreter()
- interpid2 = self.add_subinterpreter()
-
- # First, load in the main interpreter but then completely clear it.
- loaded_main = self.load(self.NAME)
- loaded_main.module._clear_globals()
- _testinternalcapi.clear_extension(self.NAME, self.FILE)
-
- # At this point:
- # * alive in 0 interpreters
- # * module def loaded already
- # * module def was in _PyRuntime.imports.extensions, but cleared
- # * mod init func ran for the first time (since reset, at least)
- # * m_copy was set, but cleared (was NULL)
- # * module's global state was initialized but cleared
-
- # Start with an interpreter that gets destroyed right away.
- base = self.import_in_subinterp(postscript='''
- # Attrs set after loading are not in m_copy.
- mod.spam = 'spam, spam, mash, spam, eggs, and spam'
- ''')
- self.check_common(base)
- self.check_fresh(base)
-
- # At this point:
- # * alive in 0 interpreters
- # * module def in _PyRuntime.imports.extensions
- # * mod init func ran again
- # * m_copy is NULL (claered when the interpreter was destroyed)
- # * module's global state was initialized, not reset
-
- # Use a subinterpreter that sticks around.
- loaded_interp1 = self.import_in_subinterp(interpid1)
- self.check_common(loaded_interp1)
- self.check_semi_fresh(loaded_interp1, loaded_main, base)
-
- # At this point:
- # * alive in 1 interpreter (interp1)
- # * module def still in _PyRuntime.imports.extensions
- # * mod init func ran again
- # * m_copy was copied from interp1 (was NULL)
- # * module's global state was updated, not reset
-
- # Use a subinterpreter while the previous one is still alive.
- loaded_interp2 = self.import_in_subinterp(interpid2)
- self.check_common(loaded_interp2)
- self.check_copied(loaded_interp2, loaded_interp1)
-
- # At this point:
- # * alive in 2 interpreters (interp1, interp2)
- # * module def still in _PyRuntime.imports.extensions
- # * mod init func ran again
- # * m_copy was copied from interp2 (was from interp1)
- # * module's global state was updated, not reset
-
- @requires_subinterpreters
- @requires_load_dynamic
- def test_basic_multiple_interpreters_reset_each(self):
- # resetting between each interpreter
-
- # At this point:
- # * alive in 0 interpreters
- # * module def may or may not be loaded already
- # * module def not in _PyRuntime.imports.extensions
- # * mod init func has not run yet (since reset, at least)
- # * m_copy not set (hasn't been loaded yet or already cleared)
- # * module's global state has not been initialized yet
- # (or already cleared)
-
- interpid1 = self.add_subinterpreter()
- interpid2 = self.add_subinterpreter()
-
- # Use an interpreter that gets destroyed right away.
- loaded = self.import_in_subinterp(
- postscript='''
- # Attrs set after loading are not in m_copy.
- mod.spam = 'spam, spam, mash, spam, eggs, and spam'
- ''',
- postcleanup=True,
- )
- self.check_common(loaded)
- self.check_fresh(loaded)
-
- # At this point:
- # * alive in 0 interpreters
- # * module def in _PyRuntime.imports.extensions
- # * mod init func ran for the first time (since reset, at least)
- # * m_copy is NULL (claered when the interpreter was destroyed)
- # * module's global state was initialized, not reset
-
- # Use a subinterpreter that sticks around.
- loaded = self.import_in_subinterp(interpid1, postcleanup=True)
- self.check_common(loaded)
- self.check_fresh(loaded)
-
- # At this point:
- # * alive in 1 interpreter (interp1)
- # * module def still in _PyRuntime.imports.extensions
- # * mod init func ran again
- # * m_copy was copied from interp1 (was NULL)
- # * module's global state was initialized, not reset
-
- # Use a subinterpreter while the previous one is still alive.
- loaded = self.import_in_subinterp(interpid2, postcleanup=True)
- self.check_common(loaded)
- self.check_fresh(loaded)
-
- # At this point:
- # * alive in 2 interpreters (interp2, interp2)
- # * module def still in _PyRuntime.imports.extensions
- # * mod init func ran again
- # * m_copy was copied from interp2 (was from interp1)
- # * module's global state was initialized, not reset
-
-
class ReloadTests(unittest.TestCase):
"""Very basic tests to make sure that imp.reload() operates just like
diff --git a/Lib/test/test_import/__init__.py b/Lib/test/test_import/__init__.py
index 3ef07203c46c7e..66ae554f984fa2 100644
--- a/Lib/test/test_import/__init__.py
+++ b/Lib/test/test_import/__init__.py
@@ -2,6 +2,7 @@
import contextlib
import errno
import glob
+import json
import importlib.util
from importlib._bootstrap_external import _get_sourcefile
from importlib.machinery import (
@@ -18,13 +19,15 @@
import textwrap
import threading
import time
+import types
import unittest
from unittest import mock
+import _testinternalcapi
from test.support import os_helper
from test.support import (
STDLIB_DIR, swap_attr, swap_item, cpython_only, is_emscripten,
- is_wasi, run_in_subinterp_with_config)
+ is_wasi, run_in_subinterp, run_in_subinterp_with_config)
from test.support.import_helper import (
forget, make_legacy_pyc, unlink, unload, DirsOnSysPath, CleanImport)
from test.support.os_helper import (
@@ -41,6 +44,10 @@
import _testmultiphase
except ImportError:
_testmultiphase = None
+try:
+ import _xxsubinterpreters as _interpreters
+except ModuleNotFoundError:
+ _interpreters = None
skip_if_dont_write_bytecode = unittest.skipIf(
@@ -120,6 +127,182 @@ def _ready_to_import(name=None, source=""):
del sys.modules[name]
+def requires_subinterpreters(meth):
+ """Decorator to skip a test if subinterpreters are not supported."""
+ return unittest.skipIf(_interpreters is None,
+ 'subinterpreters required')(meth)
+
+
+def requires_singlephase_init(meth):
+ """Decorator to skip if single-phase init modules are not supported."""
+ meth = cpython_only(meth)
+ return unittest.skipIf(_testsinglephase is None,
+ 'test requires _testsinglephase module')(meth)
+
+
+class ModuleSnapshot(types.SimpleNamespace):
+ """A representation of a module for testing.
+
+ Fields:
+
+ * id - the module's object ID
+ * module - the actual module or an adequate substitute
+ * __file__
+ * __spec__
+ * name
+ * origin
+ * ns - a copy (dict) of the module's __dict__ (or None)
+ * ns_id - the object ID of the module's __dict__
+ * cached - the sys.modules[mod.__spec__.name] entry (or None)
+ * cached_id - the object ID of the sys.modules entry (or None)
+
+ In cases where the value is not available (e.g. due to serialization),
+ the value will be None.
+ """
+ _fields = tuple('id module ns ns_id cached cached_id'.split())
+
+ @classmethod
+ def from_module(cls, mod):
+ name = mod.__spec__.name
+ cached = sys.modules.get(name)
+ return cls(
+ id=id(mod),
+ module=mod,
+ ns=types.SimpleNamespace(**mod.__dict__),
+ ns_id=id(mod.__dict__),
+ cached=cached,
+ cached_id=id(cached),
+ )
+
+ SCRIPT = textwrap.dedent('''
+ {imports}
+
+ name = {name!r}
+
+ {prescript}
+
+ mod = {name}
+
+ {body}
+
+ {postscript}
+ ''')
+ IMPORTS = textwrap.dedent('''
+ import sys
+ ''').strip()
+ SCRIPT_BODY = textwrap.dedent('''
+ # Capture the snapshot data.
+ cached = sys.modules.get(name)
+ snapshot = dict(
+ id=id(mod),
+ module=dict(
+ __file__=mod.__file__,
+ __spec__=dict(
+ name=mod.__spec__.name,
+ origin=mod.__spec__.origin,
+ ),
+ ),
+ ns=None,
+ ns_id=id(mod.__dict__),
+ cached=None,
+ cached_id=id(cached) if cached else None,
+ )
+ ''').strip()
+ CLEANUP_SCRIPT = textwrap.dedent('''
+ # Clean up the module.
+ sys.modules.pop(name, None)
+ ''').strip()
+
+ @classmethod
+ def build_script(cls, name, *,
+ prescript=None,
+ import_first=False,
+ postscript=None,
+ postcleanup=False,
+ ):
+ if postcleanup is True:
+ postcleanup = cls.CLEANUP_SCRIPT
+ elif isinstance(postcleanup, str):
+ postcleanup = textwrap.dedent(postcleanup).strip()
+ postcleanup = cls.CLEANUP_SCRIPT + os.linesep + postcleanup
+ else:
+ postcleanup = ''
+ prescript = textwrap.dedent(prescript).strip() if prescript else ''
+ postscript = textwrap.dedent(postscript).strip() if postscript else ''
+
+ if postcleanup:
+ if postscript:
+ postscript = postscript + os.linesep * 2 + postcleanup
+ else:
+ postscript = postcleanup
+
+ if import_first:
+ prescript += textwrap.dedent(f'''
+
+ # Now import the module.
+ assert name not in sys.modules
+ import {name}''')
+
+ return cls.SCRIPT.format(
+ imports=cls.IMPORTS.strip(),
+ name=name,
+ prescript=prescript.strip(),
+ body=cls.SCRIPT_BODY.strip(),
+ postscript=postscript,
+ )
+
+ @classmethod
+ def parse(cls, text):
+ raw = json.loads(text)
+ mod = raw['module']
+ mod['__spec__'] = types.SimpleNamespace(**mod['__spec__'])
+ raw['module'] = types.SimpleNamespace(**mod)
+ return cls(**raw)
+
+ @classmethod
+ def from_subinterp(cls, name, interpid=None, *, pipe=None, **script_kwds):
+ if pipe is not None:
+ return cls._from_subinterp(name, interpid, pipe, script_kwds)
+ pipe = os.pipe()
+ try:
+ return cls._from_subinterp(name, interpid, pipe, script_kwds)
+ finally:
+ r, w = pipe
+ os.close(r)
+ os.close(w)
+
+ @classmethod
+ def _from_subinterp(cls, name, interpid, pipe, script_kwargs):
+ r, w = pipe
+
+ # Build the script.
+ postscript = textwrap.dedent(f'''
+ # Send the result over the pipe.
+ import json
+ import os
+ os.write({w}, json.dumps(snapshot).encode())
+
+ ''')
+ _postscript = script_kwargs.get('postscript')
+ if _postscript:
+ _postscript = textwrap.dedent(_postscript).lstrip()
+ postscript += _postscript
+ script_kwargs['postscript'] = postscript.strip()
+ script = cls.build_script(name, **script_kwargs)
+
+ # Run the script.
+ if interpid is None:
+ ret = run_in_subinterp(script)
+ if ret != 0:
+ raise AssertionError(f'{ret} != 0')
+ else:
+ _interpreters.run_string(interpid, script)
+
+ # Parse the results.
+ text = os.read(r, 1000)
+ return cls.parse(text.decode())
+
+
class ImportTests(unittest.TestCase):
def setUp(self):
@@ -1604,7 +1787,7 @@ def test_frozen_compat(self):
with self.subTest(f'{module}: strict, not fresh'):
self.check_compatible_here(module, strict=True)
- @unittest.skipIf(_testsinglephase is None, "test requires _testsinglephase module")
+ @requires_singlephase_init
def test_single_init_extension_compat(self):
module = '_testsinglephase'
require_extension(module)
@@ -1636,7 +1819,7 @@ def test_python_compat(self):
with self.subTest(f'{module}: strict, fresh'):
self.check_compatible_fresh(module, strict=True)
- @unittest.skipIf(_testsinglephase is None, "test requires _testsinglephase module")
+ @requires_singlephase_init
def test_singlephase_check_with_setting_and_override(self):
module = '_testsinglephase'
require_extension(module)
@@ -1672,6 +1855,685 @@ def check_incompatible(setting, override):
check_compatible(False, -1)
+class TestSinglePhaseSnapshot(ModuleSnapshot):
+
+ @classmethod
+ def from_module(cls, mod):
+ self = super().from_module(mod)
+ self.summed = mod.sum(1, 2)
+ self.lookedup = mod.look_up_self()
+ self.lookedup_id = id(self.lookedup)
+ self.state_initialized = mod.state_initialized()
+ if hasattr(mod, 'initialized_count'):
+ self.init_count = mod.initialized_count()
+ return self
+
+ SCRIPT_BODY = ModuleSnapshot.SCRIPT_BODY + textwrap.dedent(f'''
+ snapshot['module'].update(dict(
+ int_const=mod.int_const,
+ str_const=mod.str_const,
+ _module_initialized=mod._module_initialized,
+ ))
+ snapshot.update(dict(
+ summed=mod.sum(1, 2),
+ lookedup_id=id(mod.look_up_self()),
+ state_initialized=mod.state_initialized(),
+ init_count=mod.initialized_count(),
+ has_spam=hasattr(mod, 'spam'),
+ spam=getattr(mod, 'spam', None),
+ ))
+ ''').rstrip()
+
+ @classmethod
+ def parse(cls, text):
+ self = super().parse(text)
+ if not self.has_spam:
+ del self.spam
+ del self.has_spam
+ return self
+
+
+@requires_singlephase_init
+class SinglephaseInitTests(unittest.TestCase):
+
+ NAME = '_testsinglephase'
+
+ @classmethod
+ def setUpClass(cls):
+ if '-R' in sys.argv or '--huntrleaks' in sys.argv:
+ # https://github.com/python/cpython/issues/102251
+ raise unittest.SkipTest('unresolved refleaks (see gh-102251)')
+
+ spec = importlib.util.find_spec(cls.NAME)
+ from importlib.machinery import ExtensionFileLoader
+ cls.FILE = spec.origin
+ cls.LOADER = type(spec.loader)
+ assert cls.LOADER is ExtensionFileLoader
+
+ # Start fresh.
+ cls.clean_up()
+
+ def tearDown(self):
+ # Clean up the module.
+ self.clean_up()
+
+ @classmethod
+ def clean_up(cls):
+ name = cls.NAME
+ filename = cls.FILE
+ if name in sys.modules:
+ if hasattr(sys.modules[name], '_clear_globals'):
+ assert sys.modules[name].__file__ == filename
+ sys.modules[name]._clear_globals()
+ del sys.modules[name]
+ # Clear all internally cached data for the extension.
+ _testinternalcapi.clear_extension(name, filename)
+
+ #########################
+ # helpers
+
+ def add_module_cleanup(self, name):
+ def clean_up():
+ # Clear all internally cached data for the extension.
+ _testinternalcapi.clear_extension(name, self.FILE)
+ self.addCleanup(clean_up)
+
+ def _load_dynamic(self, name, path):
+ """
+ Load an extension module.
+ """
+ # This is essentially copied from the old imp module.
+ from importlib._bootstrap import _load
+ loader = self.LOADER(name, path)
+
+ # Issue bpo-24748: Skip the sys.modules check in _load_module_shim;
+ # always load new extension.
+ spec = importlib.util.spec_from_file_location(name, path,
+ loader=loader)
+ return _load(spec)
+
+ def load(self, name):
+ try:
+ already_loaded = self.already_loaded
+ except AttributeError:
+ already_loaded = self.already_loaded = {}
+ assert name not in already_loaded
+ mod = self._load_dynamic(name, self.FILE)
+ self.assertNotIn(mod, already_loaded.values())
+ already_loaded[name] = mod
+ return types.SimpleNamespace(
+ name=name,
+ module=mod,
+ snapshot=TestSinglePhaseSnapshot.from_module(mod),
+ )
+
+ def re_load(self, name, mod):
+ assert sys.modules[name] is mod
+ assert mod.__dict__ == mod.__dict__
+ reloaded = self._load_dynamic(name, self.FILE)
+ return types.SimpleNamespace(
+ name=name,
+ module=reloaded,
+ snapshot=TestSinglePhaseSnapshot.from_module(reloaded),
+ )
+
+ # subinterpreters
+
+ def add_subinterpreter(self):
+ interpid = _interpreters.create(isolated=False)
+ _interpreters.run_string(interpid, textwrap.dedent('''
+ import sys
+ import _testinternalcapi
+ '''))
+ def clean_up():
+ _interpreters.run_string(interpid, textwrap.dedent(f'''
+ name = {self.NAME!r}
+ if name in sys.modules:
+ sys.modules[name]._clear_globals()
+ _testinternalcapi.clear_extension(name, {self.FILE!r})
+ '''))
+ _interpreters.destroy(interpid)
+ self.addCleanup(clean_up)
+ return interpid
+
+ def import_in_subinterp(self, interpid=None, *,
+ postscript=None,
+ postcleanup=False,
+ ):
+ name = self.NAME
+
+ if postcleanup:
+ import_ = 'import _testinternalcapi' if interpid is None else ''
+ postcleanup = f'''
+ {import_}
+ mod._clear_globals()
+ _testinternalcapi.clear_extension(name, {self.FILE!r})
+ '''
+
+ try:
+ pipe = self._pipe
+ except AttributeError:
+ r, w = pipe = self._pipe = os.pipe()
+ self.addCleanup(os.close, r)
+ self.addCleanup(os.close, w)
+
+ snapshot = TestSinglePhaseSnapshot.from_subinterp(
+ name,
+ interpid,
+ pipe=pipe,
+ import_first=True,
+ postscript=postscript,
+ postcleanup=postcleanup,
+ )
+
+ return types.SimpleNamespace(
+ name=name,
+ module=None,
+ snapshot=snapshot,
+ )
+
+ # checks
+
+ def check_common(self, loaded):
+ isolated = False
+
+ mod = loaded.module
+ if not mod:
+ # It came from a subinterpreter.
+ isolated = True
+ mod = loaded.snapshot.module
+ # mod.__name__ might not match, but the spec will.
+ self.assertEqual(mod.__spec__.name, loaded.name)
+ self.assertEqual(mod.__file__, self.FILE)
+ self.assertEqual(mod.__spec__.origin, self.FILE)
+ if not isolated:
+ self.assertTrue(issubclass(mod.error, Exception))
+ self.assertEqual(mod.int_const, 1969)
+ self.assertEqual(mod.str_const, 'something different')
+ self.assertIsInstance(mod._module_initialized, float)
+ self.assertGreater(mod._module_initialized, 0)
+
+ snap = loaded.snapshot
+ self.assertEqual(snap.summed, 3)
+ if snap.state_initialized is not None:
+ self.assertIsInstance(snap.state_initialized, float)
+ self.assertGreater(snap.state_initialized, 0)
+ if isolated:
+ # The "looked up" module is interpreter-specific
+ # (interp->imports.modules_by_index was set for the module).
+ self.assertEqual(snap.lookedup_id, snap.id)
+ self.assertEqual(snap.cached_id, snap.id)
+ with self.assertRaises(AttributeError):
+ snap.spam
+ else:
+ self.assertIs(snap.lookedup, mod)
+ self.assertIs(snap.cached, mod)
+
+ def check_direct(self, loaded):
+ # The module has its own PyModuleDef, with a matching name.
+ self.assertEqual(loaded.module.__name__, loaded.name)
+ self.assertIs(loaded.snapshot.lookedup, loaded.module)
+
+ def check_indirect(self, loaded, orig):
+ # The module re-uses another's PyModuleDef, with a different name.
+ assert orig is not loaded.module
+ assert orig.__name__ != loaded.name
+ self.assertNotEqual(loaded.module.__name__, loaded.name)
+ self.assertIs(loaded.snapshot.lookedup, loaded.module)
+
+ def check_basic(self, loaded, expected_init_count):
+ # m_size == -1
+ # The module loads fresh the first time and copies m_copy after.
+ snap = loaded.snapshot
+ self.assertIsNot(snap.state_initialized, None)
+ self.assertIsInstance(snap.init_count, int)
+ self.assertGreater(snap.init_count, 0)
+ self.assertEqual(snap.init_count, expected_init_count)
+
+ def check_with_reinit(self, loaded):
+ # m_size >= 0
+ # The module loads fresh every time.
+ pass
+
+ def check_fresh(self, loaded):
+ """
+ The module had not been loaded before (at least since fully reset).
+ """
+ snap = loaded.snapshot
+ # The module's init func was run.
+ # A copy of the module's __dict__ was stored in def->m_base.m_copy.
+ # The previous m_copy was deleted first.
+ # _PyRuntime.imports.extensions was set.
+ self.assertEqual(snap.init_count, 1)
+ # The global state was initialized.
+ # The module attrs were initialized from that state.
+ self.assertEqual(snap.module._module_initialized,
+ snap.state_initialized)
+
+ def check_semi_fresh(self, loaded, base, prev):
+ """
+ The module had been loaded before and then reset
+ (but the module global state wasn't).
+ """
+ snap = loaded.snapshot
+ # The module's init func was run again.
+ # A copy of the module's __dict__ was stored in def->m_base.m_copy.
+ # The previous m_copy was deleted first.
+ # The module globals did not get reset.
+ self.assertNotEqual(snap.id, base.snapshot.id)
+ self.assertNotEqual(snap.id, prev.snapshot.id)
+ self.assertEqual(snap.init_count, prev.snapshot.init_count + 1)
+ # The global state was updated.
+ # The module attrs were initialized from that state.
+ self.assertEqual(snap.module._module_initialized,
+ snap.state_initialized)
+ self.assertNotEqual(snap.state_initialized,
+ base.snapshot.state_initialized)
+ self.assertNotEqual(snap.state_initialized,
+ prev.snapshot.state_initialized)
+
+ def check_copied(self, loaded, base):
+ """
+ The module had been loaded before and never reset.
+ """
+ snap = loaded.snapshot
+ # The module's init func was not run again.
+ # The interpreter copied m_copy, as set by the other interpreter,
+ # with objects owned by the other interpreter.
+ # The module globals did not get reset.
+ self.assertNotEqual(snap.id, base.snapshot.id)
+ self.assertEqual(snap.init_count, base.snapshot.init_count)
+ # The global state was not updated since the init func did not run.
+ # The module attrs were not directly initialized from that state.
+ # The state and module attrs still match the previous loading.
+ self.assertEqual(snap.module._module_initialized,
+ snap.state_initialized)
+ self.assertEqual(snap.state_initialized,
+ base.snapshot.state_initialized)
+
+ #########################
+ # the tests
+
+ def test_cleared_globals(self):
+ loaded = self.load(self.NAME)
+ _testsinglephase = loaded.module
+ init_before = _testsinglephase.state_initialized()
+
+ _testsinglephase._clear_globals()
+ init_after = _testsinglephase.state_initialized()
+ init_count = _testsinglephase.initialized_count()
+
+ self.assertGreater(init_before, 0)
+ self.assertEqual(init_after, 0)
+ self.assertEqual(init_count, -1)
+
+ def test_variants(self):
+ # Exercise the most meaningful variants described in Python/import.c.
+ self.maxDiff = None
+
+ # Check the "basic" module.
+
+ name = self.NAME
+ expected_init_count = 1
+ with self.subTest(name):
+ loaded = self.load(name)
+
+ self.check_common(loaded)
+ self.check_direct(loaded)
+ self.check_basic(loaded, expected_init_count)
+ basic = loaded.module
+
+ # Check its indirect variants.
+
+ name = f'{self.NAME}_basic_wrapper'
+ self.add_module_cleanup(name)
+ expected_init_count += 1
+ with self.subTest(name):
+ loaded = self.load(name)
+
+ self.check_common(loaded)
+ self.check_indirect(loaded, basic)
+ self.check_basic(loaded, expected_init_count)
+
+ # Currently PyState_AddModule() always replaces the cached module.
+ self.assertIs(basic.look_up_self(), loaded.module)
+ self.assertEqual(basic.initialized_count(), expected_init_count)
+
+ # The cached module shouldn't change after this point.
+ basic_lookedup = loaded.module
+
+ # Check its direct variant.
+
+ name = f'{self.NAME}_basic_copy'
+ self.add_module_cleanup(name)
+ expected_init_count += 1
+ with self.subTest(name):
+ loaded = self.load(name)
+
+ self.check_common(loaded)
+ self.check_direct(loaded)
+ self.check_basic(loaded, expected_init_count)
+
+ # This should change the cached module for _testsinglephase.
+ self.assertIs(basic.look_up_self(), basic_lookedup)
+ self.assertEqual(basic.initialized_count(), expected_init_count)
+
+ # Check the non-basic variant that has no state.
+
+ name = f'{self.NAME}_with_reinit'
+ self.add_module_cleanup(name)
+ with self.subTest(name):
+ loaded = self.load(name)
+
+ self.check_common(loaded)
+ self.assertIs(loaded.snapshot.state_initialized, None)
+ self.check_direct(loaded)
+ self.check_with_reinit(loaded)
+
+ # This should change the cached module for _testsinglephase.
+ self.assertIs(basic.look_up_self(), basic_lookedup)
+ self.assertEqual(basic.initialized_count(), expected_init_count)
+
+ # Check the basic variant that has state.
+
+ name = f'{self.NAME}_with_state'
+ self.add_module_cleanup(name)
+ with self.subTest(name):
+ loaded = self.load(name)
+
+ self.check_common(loaded)
+ self.assertIsNot(loaded.snapshot.state_initialized, None)
+ self.check_direct(loaded)
+ self.check_with_reinit(loaded)
+
+ # This should change the cached module for _testsinglephase.
+ self.assertIs(basic.look_up_self(), basic_lookedup)
+ self.assertEqual(basic.initialized_count(), expected_init_count)
+
+ def test_basic_reloaded(self):
+ # m_copy is copied into the existing module object.
+ # Global state is not changed.
+ self.maxDiff = None
+
+ for name in [
+ self.NAME, # the "basic" module
+ f'{self.NAME}_basic_wrapper', # the indirect variant
+ f'{self.NAME}_basic_copy', # the direct variant
+ ]:
+ self.add_module_cleanup(name)
+ with self.subTest(name):
+ loaded = self.load(name)
+ reloaded = self.re_load(name, loaded.module)
+
+ self.check_common(loaded)
+ self.check_common(reloaded)
+
+ # Make sure the original __dict__ did not get replaced.
+ self.assertEqual(id(loaded.module.__dict__),
+ loaded.snapshot.ns_id)
+ self.assertEqual(loaded.snapshot.ns.__dict__,
+ loaded.module.__dict__)
+
+ self.assertEqual(reloaded.module.__spec__.name, reloaded.name)
+ self.assertEqual(reloaded.module.__name__,
+ reloaded.snapshot.ns.__name__)
+
+ self.assertIs(reloaded.module, loaded.module)
+ self.assertIs(reloaded.module.__dict__, loaded.module.__dict__)
+ # It only happens to be the same but that's good enough here.
+ # We really just want to verify that the re-loaded attrs
+ # didn't change.
+ self.assertIs(reloaded.snapshot.lookedup,
+ loaded.snapshot.lookedup)
+ self.assertEqual(reloaded.snapshot.state_initialized,
+ loaded.snapshot.state_initialized)
+ self.assertEqual(reloaded.snapshot.init_count,
+ loaded.snapshot.init_count)
+
+ self.assertIs(reloaded.snapshot.cached, reloaded.module)
+
+ def test_with_reinit_reloaded(self):
+ # The module's m_init func is run again.
+ self.maxDiff = None
+
+ # Keep a reference around.
+ basic = self.load(self.NAME)
+
+ for name in [
+ f'{self.NAME}_with_reinit', # m_size == 0
+ f'{self.NAME}_with_state', # m_size > 0
+ ]:
+ self.add_module_cleanup(name)
+ with self.subTest(name):
+ loaded = self.load(name)
+ reloaded = self.re_load(name, loaded.module)
+
+ self.check_common(loaded)
+ self.check_common(reloaded)
+
+ # Make sure the original __dict__ did not get replaced.
+ self.assertEqual(id(loaded.module.__dict__),
+ loaded.snapshot.ns_id)
+ self.assertEqual(loaded.snapshot.ns.__dict__,
+ loaded.module.__dict__)
+
+ self.assertEqual(reloaded.module.__spec__.name, reloaded.name)
+ self.assertEqual(reloaded.module.__name__,
+ reloaded.snapshot.ns.__name__)
+
+ self.assertIsNot(reloaded.module, loaded.module)
+ self.assertNotEqual(reloaded.module.__dict__,
+ loaded.module.__dict__)
+ self.assertIs(reloaded.snapshot.lookedup, reloaded.module)
+ if loaded.snapshot.state_initialized is None:
+ self.assertIs(reloaded.snapshot.state_initialized, None)
+ else:
+ self.assertGreater(reloaded.snapshot.state_initialized,
+ loaded.snapshot.state_initialized)
+
+ self.assertIs(reloaded.snapshot.cached, reloaded.module)
+
+ # Currently, for every single-phrase init module loaded
+ # in multiple interpreters, those interpreters share a
+ # PyModuleDef for that object, which can be a problem.
+ # Also, we test with a single-phase module that has global state,
+ # which is shared by all interpreters.
+
+ @requires_subinterpreters
+ def test_basic_multiple_interpreters_main_no_reset(self):
+ # without resetting; already loaded in main interpreter
+
+ # At this point:
+ # * alive in 0 interpreters
+ # * module def may or may not be loaded already
+ # * module def not in _PyRuntime.imports.extensions
+ # * mod init func has not run yet (since reset, at least)
+ # * m_copy not set (hasn't been loaded yet or already cleared)
+ # * module's global state has not been initialized yet
+ # (or already cleared)
+
+ main_loaded = self.load(self.NAME)
+ _testsinglephase = main_loaded.module
+ # Attrs set after loading are not in m_copy.
+ _testsinglephase.spam = 'spam, spam, spam, spam, eggs, and spam'
+
+ self.check_common(main_loaded)
+ self.check_fresh(main_loaded)
+
+ interpid1 = self.add_subinterpreter()
+ interpid2 = self.add_subinterpreter()
+
+ # At this point:
+ # * alive in 1 interpreter (main)
+ # * module def in _PyRuntime.imports.extensions
+ # * mod init func ran for the first time (since reset, at least)
+ # * m_copy was copied from the main interpreter (was NULL)
+ # * module's global state was initialized
+
+ # Use an interpreter that gets destroyed right away.
+ loaded = self.import_in_subinterp()
+ self.check_common(loaded)
+ self.check_copied(loaded, main_loaded)
+
+ # At this point:
+ # * alive in 1 interpreter (main)
+ # * module def still in _PyRuntime.imports.extensions
+ # * mod init func ran again
+ # * m_copy is NULL (claered when the interpreter was destroyed)
+ # (was from main interpreter)
+ # * module's global state was updated, not reset
+
+ # Use a subinterpreter that sticks around.
+ loaded = self.import_in_subinterp(interpid1)
+ self.check_common(loaded)
+ self.check_copied(loaded, main_loaded)
+
+ # At this point:
+ # * alive in 2 interpreters (main, interp1)
+ # * module def still in _PyRuntime.imports.extensions
+ # * mod init func ran again
+ # * m_copy was copied from interp1
+ # * module's global state was updated, not reset
+
+ # Use a subinterpreter while the previous one is still alive.
+ loaded = self.import_in_subinterp(interpid2)
+ self.check_common(loaded)
+ self.check_copied(loaded, main_loaded)
+
+ # At this point:
+ # * alive in 3 interpreters (main, interp1, interp2)
+ # * module def still in _PyRuntime.imports.extensions
+ # * mod init func ran again
+ # * m_copy was copied from interp2 (was from interp1)
+ # * module's global state was updated, not reset
+
+ @requires_subinterpreters
+ def test_basic_multiple_interpreters_deleted_no_reset(self):
+ # without resetting; already loaded in a deleted interpreter
+
+ # At this point:
+ # * alive in 0 interpreters
+ # * module def may or may not be loaded already
+ # * module def not in _PyRuntime.imports.extensions
+ # * mod init func has not run yet (since reset, at least)
+ # * m_copy not set (hasn't been loaded yet or already cleared)
+ # * module's global state has not been initialized yet
+ # (or already cleared)
+
+ interpid1 = self.add_subinterpreter()
+ interpid2 = self.add_subinterpreter()
+
+ # First, load in the main interpreter but then completely clear it.
+ loaded_main = self.load(self.NAME)
+ loaded_main.module._clear_globals()
+ _testinternalcapi.clear_extension(self.NAME, self.FILE)
+
+ # At this point:
+ # * alive in 0 interpreters
+ # * module def loaded already
+ # * module def was in _PyRuntime.imports.extensions, but cleared
+ # * mod init func ran for the first time (since reset, at least)
+ # * m_copy was set, but cleared (was NULL)
+ # * module's global state was initialized but cleared
+
+ # Start with an interpreter that gets destroyed right away.
+ base = self.import_in_subinterp(postscript='''
+ # Attrs set after loading are not in m_copy.
+ mod.spam = 'spam, spam, mash, spam, eggs, and spam'
+ ''')
+ self.check_common(base)
+ self.check_fresh(base)
+
+ # At this point:
+ # * alive in 0 interpreters
+ # * module def in _PyRuntime.imports.extensions
+ # * mod init func ran again
+ # * m_copy is NULL (claered when the interpreter was destroyed)
+ # * module's global state was initialized, not reset
+
+ # Use a subinterpreter that sticks around.
+ loaded_interp1 = self.import_in_subinterp(interpid1)
+ self.check_common(loaded_interp1)
+ self.check_semi_fresh(loaded_interp1, loaded_main, base)
+
+ # At this point:
+ # * alive in 1 interpreter (interp1)
+ # * module def still in _PyRuntime.imports.extensions
+ # * mod init func ran again
+ # * m_copy was copied from interp1 (was NULL)
+ # * module's global state was updated, not reset
+
+ # Use a subinterpreter while the previous one is still alive.
+ loaded_interp2 = self.import_in_subinterp(interpid2)
+ self.check_common(loaded_interp2)
+ self.check_copied(loaded_interp2, loaded_interp1)
+
+ # At this point:
+ # * alive in 2 interpreters (interp1, interp2)
+ # * module def still in _PyRuntime.imports.extensions
+ # * mod init func ran again
+ # * m_copy was copied from interp2 (was from interp1)
+ # * module's global state was updated, not reset
+
+ @requires_subinterpreters
+ def test_basic_multiple_interpreters_reset_each(self):
+ # resetting between each interpreter
+
+ # At this point:
+ # * alive in 0 interpreters
+ # * module def may or may not be loaded already
+ # * module def not in _PyRuntime.imports.extensions
+ # * mod init func has not run yet (since reset, at least)
+ # * m_copy not set (hasn't been loaded yet or already cleared)
+ # * module's global state has not been initialized yet
+ # (or already cleared)
+
+ interpid1 = self.add_subinterpreter()
+ interpid2 = self.add_subinterpreter()
+
+ # Use an interpreter that gets destroyed right away.
+ loaded = self.import_in_subinterp(
+ postscript='''
+ # Attrs set after loading are not in m_copy.
+ mod.spam = 'spam, spam, mash, spam, eggs, and spam'
+ ''',
+ postcleanup=True,
+ )
+ self.check_common(loaded)
+ self.check_fresh(loaded)
+
+ # At this point:
+ # * alive in 0 interpreters
+ # * module def in _PyRuntime.imports.extensions
+ # * mod init func ran for the first time (since reset, at least)
+ # * m_copy is NULL (claered when the interpreter was destroyed)
+ # * module's global state was initialized, not reset
+
+ # Use a subinterpreter that sticks around.
+ loaded = self.import_in_subinterp(interpid1, postcleanup=True)
+ self.check_common(loaded)
+ self.check_fresh(loaded)
+
+ # At this point:
+ # * alive in 1 interpreter (interp1)
+ # * module def still in _PyRuntime.imports.extensions
+ # * mod init func ran again
+ # * m_copy was copied from interp1 (was NULL)
+ # * module's global state was initialized, not reset
+
+ # Use a subinterpreter while the previous one is still alive.
+ loaded = self.import_in_subinterp(interpid2, postcleanup=True)
+ self.check_common(loaded)
+ self.check_fresh(loaded)
+
+ # At this point:
+ # * alive in 2 interpreters (interp2, interp2)
+ # * module def still in _PyRuntime.imports.extensions
+ # * mod init func ran again
+ # * m_copy was copied from interp2 (was from interp1)
+ # * module's global state was initialized, not reset
+
+
if __name__ == '__main__':
# Test needs to be a package, so we can do relative imports.
unittest.main()
diff --git a/Lib/test/test_importlib/_context.py b/Lib/test/test_importlib/_context.py
new file mode 100644
index 00000000000000..8a53eb55d1503b
--- /dev/null
+++ b/Lib/test/test_importlib/_context.py
@@ -0,0 +1,13 @@
+import contextlib
+
+
+# from jaraco.context 4.3
+class suppress(contextlib.suppress, contextlib.ContextDecorator):
+ """
+ A version of contextlib.suppress with decorator support.
+
+ >>> @suppress(KeyError)
+ ... def key_error():
+ ... {}['']
+ >>> key_error()
+ """
diff --git a/Lib/test/test_importlib/_path.py b/Lib/test/test_importlib/_path.py
new file mode 100644
index 00000000000000..71a704389b986e
--- /dev/null
+++ b/Lib/test/test_importlib/_path.py
@@ -0,0 +1,109 @@
+# from jaraco.path 3.5
+
+import functools
+import pathlib
+from typing import Dict, Union
+
+try:
+ from typing import Protocol, runtime_checkable
+except ImportError: # pragma: no cover
+ # Python 3.7
+ from typing_extensions import Protocol, runtime_checkable # type: ignore
+
+
+FilesSpec = Dict[str, Union[str, bytes, 'FilesSpec']] # type: ignore
+
+
+@runtime_checkable
+class TreeMaker(Protocol):
+ def __truediv__(self, *args, **kwargs):
+ ... # pragma: no cover
+
+ def mkdir(self, **kwargs):
+ ... # pragma: no cover
+
+ def write_text(self, content, **kwargs):
+ ... # pragma: no cover
+
+ def write_bytes(self, content):
+ ... # pragma: no cover
+
+
+def _ensure_tree_maker(obj: Union[str, TreeMaker]) -> TreeMaker:
+ return obj if isinstance(obj, TreeMaker) else pathlib.Path(obj) # type: ignore
+
+
+def build(
+ spec: FilesSpec,
+ prefix: Union[str, TreeMaker] = pathlib.Path(), # type: ignore
+):
+ """
+ Build a set of files/directories, as described by the spec.
+
+ Each key represents a pathname, and the value represents
+ the content. Content may be a nested directory.
+
+ >>> spec = {
+ ... 'README.txt': "A README file",
+ ... "foo": {
+ ... "__init__.py": "",
+ ... "bar": {
+ ... "__init__.py": "",
+ ... },
+ ... "baz.py": "# Some code",
+ ... }
+ ... }
+ >>> target = getfixture('tmp_path')
+ >>> build(spec, target)
+ >>> target.joinpath('foo/baz.py').read_text(encoding='utf-8')
+ '# Some code'
+ """
+ for name, contents in spec.items():
+ create(contents, _ensure_tree_maker(prefix) / name)
+
+
+@functools.singledispatch
+def create(content: Union[str, bytes, FilesSpec], path):
+ path.mkdir(exist_ok=True)
+ build(content, prefix=path) # type: ignore
+
+
+@create.register
+def _(content: bytes, path):
+ path.write_bytes(content)
+
+
+@create.register
+def _(content: str, path):
+ path.write_text(content, encoding='utf-8')
+
+
+@create.register
+def _(content: str, path):
+ path.write_text(content, encoding='utf-8')
+
+
+class Recording:
+ """
+ A TreeMaker object that records everything that would be written.
+
+ >>> r = Recording()
+ >>> build({'foo': {'foo1.txt': 'yes'}, 'bar.txt': 'abc'}, r)
+ >>> r.record
+ ['foo/foo1.txt', 'bar.txt']
+ """
+
+ def __init__(self, loc=pathlib.PurePosixPath(), record=None):
+ self.loc = loc
+ self.record = record if record is not None else []
+
+ def __truediv__(self, other):
+ return Recording(self.loc / other, self.record)
+
+ def write_text(self, content, **kwargs):
+ self.record.append(str(self.loc))
+
+ write_bytes = write_text
+
+ def mkdir(self, **kwargs):
+ return
diff --git a/Lib/test/test_importlib/fixtures.py b/Lib/test/test_importlib/fixtures.py
index e7be77b3957c67..a364a977bce781 100644
--- a/Lib/test/test_importlib/fixtures.py
+++ b/Lib/test/test_importlib/fixtures.py
@@ -10,7 +10,10 @@
from test.support.os_helper import FS_NONASCII
from test.support import requires_zlib
-from typing import Dict, Union
+
+from . import _path
+from ._path import FilesSpec
+
try:
from importlib import resources # type: ignore
@@ -83,13 +86,8 @@ def setUp(self):
self.fixtures.enter_context(self.add_sys_path(self.site_dir))
-# Except for python/mypy#731, prefer to define
-# FilesDef = Dict[str, Union['FilesDef', str]]
-FilesDef = Dict[str, Union[Dict[str, Union[Dict[str, str], str]], str]]
-
-
class DistInfoPkg(OnSysPath, SiteDir):
- files: FilesDef = {
+ files: FilesSpec = {
"distinfo_pkg-1.0.0.dist-info": {
"METADATA": """
Name: distinfo-pkg
@@ -131,7 +129,7 @@ def make_uppercase(self):
class DistInfoPkgWithDot(OnSysPath, SiteDir):
- files: FilesDef = {
+ files: FilesSpec = {
"pkg_dot-1.0.0.dist-info": {
"METADATA": """
Name: pkg.dot
@@ -146,7 +144,7 @@ def setUp(self):
class DistInfoPkgWithDotLegacy(OnSysPath, SiteDir):
- files: FilesDef = {
+ files: FilesSpec = {
"pkg.dot-1.0.0.dist-info": {
"METADATA": """
Name: pkg.dot
@@ -173,7 +171,7 @@ def setUp(self):
class EggInfoPkg(OnSysPath, SiteDir):
- files: FilesDef = {
+ files: FilesSpec = {
"egginfo_pkg.egg-info": {
"PKG-INFO": """
Name: egginfo-pkg
@@ -212,8 +210,99 @@ def setUp(self):
build_files(EggInfoPkg.files, prefix=self.site_dir)
+class EggInfoPkgPipInstalledNoToplevel(OnSysPath, SiteDir):
+ files: FilesSpec = {
+ "egg_with_module_pkg.egg-info": {
+ "PKG-INFO": "Name: egg_with_module-pkg",
+ # SOURCES.txt is made from the source archive, and contains files
+ # (setup.py) that are not present after installation.
+ "SOURCES.txt": """
+ egg_with_module.py
+ setup.py
+ egg_with_module_pkg.egg-info/PKG-INFO
+ egg_with_module_pkg.egg-info/SOURCES.txt
+ egg_with_module_pkg.egg-info/top_level.txt
+ """,
+ # installed-files.txt is written by pip, and is a strictly more
+ # accurate source than SOURCES.txt as to the installed contents of
+ # the package.
+ "installed-files.txt": """
+ ../egg_with_module.py
+ PKG-INFO
+ SOURCES.txt
+ top_level.txt
+ """,
+ # missing top_level.txt (to trigger fallback to installed-files.txt)
+ },
+ "egg_with_module.py": """
+ def main():
+ print("hello world")
+ """,
+ }
+
+ def setUp(self):
+ super().setUp()
+ build_files(EggInfoPkgPipInstalledNoToplevel.files, prefix=self.site_dir)
+
+
+class EggInfoPkgPipInstalledNoModules(OnSysPath, SiteDir):
+ files: FilesSpec = {
+ "egg_with_no_modules_pkg.egg-info": {
+ "PKG-INFO": "Name: egg_with_no_modules-pkg",
+ # SOURCES.txt is made from the source archive, and contains files
+ # (setup.py) that are not present after installation.
+ "SOURCES.txt": """
+ setup.py
+ egg_with_no_modules_pkg.egg-info/PKG-INFO
+ egg_with_no_modules_pkg.egg-info/SOURCES.txt
+ egg_with_no_modules_pkg.egg-info/top_level.txt
+ """,
+ # installed-files.txt is written by pip, and is a strictly more
+ # accurate source than SOURCES.txt as to the installed contents of
+ # the package.
+ "installed-files.txt": """
+ PKG-INFO
+ SOURCES.txt
+ top_level.txt
+ """,
+ # top_level.txt correctly reflects that no modules are installed
+ "top_level.txt": b"\n",
+ },
+ }
+
+ def setUp(self):
+ super().setUp()
+ build_files(EggInfoPkgPipInstalledNoModules.files, prefix=self.site_dir)
+
+
+class EggInfoPkgSourcesFallback(OnSysPath, SiteDir):
+ files: FilesSpec = {
+ "sources_fallback_pkg.egg-info": {
+ "PKG-INFO": "Name: sources_fallback-pkg",
+ # SOURCES.txt is made from the source archive, and contains files
+ # (setup.py) that are not present after installation.
+ "SOURCES.txt": """
+ sources_fallback.py
+ setup.py
+ sources_fallback_pkg.egg-info/PKG-INFO
+ sources_fallback_pkg.egg-info/SOURCES.txt
+ """,
+ # missing installed-files.txt (i.e. not installed by pip) and
+ # missing top_level.txt (to trigger fallback to SOURCES.txt)
+ },
+ "sources_fallback.py": """
+ def main():
+ print("hello world")
+ """,
+ }
+
+ def setUp(self):
+ super().setUp()
+ build_files(EggInfoPkgSourcesFallback.files, prefix=self.site_dir)
+
+
class EggInfoFile(OnSysPath, SiteDir):
- files: FilesDef = {
+ files: FilesSpec = {
"egginfo_file.egg-info": """
Metadata-Version: 1.0
Name: egginfo_file
@@ -233,38 +322,22 @@ def setUp(self):
build_files(EggInfoFile.files, prefix=self.site_dir)
-def build_files(file_defs, prefix=pathlib.Path()):
- """Build a set of files/directories, as described by the
+# dedent all text strings before writing
+orig = _path.create.registry[str]
+_path.create.register(str, lambda content, path: orig(DALS(content), path))
- file_defs dictionary. Each key/value pair in the dictionary is
- interpreted as a filename/contents pair. If the contents value is a
- dictionary, a directory is created, and the dictionary interpreted
- as the files within it, recursively.
- For example:
+build_files = _path.build
- {"README.txt": "A README file",
- "foo": {
- "__init__.py": "",
- "bar": {
- "__init__.py": "",
- },
- "baz.py": "# Some code",
- }
- }
- """
- for name, contents in file_defs.items():
- full_name = prefix / name
- if isinstance(contents, dict):
- full_name.mkdir()
- build_files(contents, prefix=full_name)
- else:
- if isinstance(contents, bytes):
- with full_name.open('wb') as f:
- f.write(contents)
- else:
- with full_name.open('w', encoding='utf-8') as f:
- f.write(DALS(contents))
+
+def build_record(file_defs):
+ return ''.join(f'{name},,\n' for name in record_names(file_defs))
+
+
+def record_names(file_defs):
+ recording = _path.Recording()
+ _path.build(file_defs, recording)
+ return recording.record
class FileBuilder:
diff --git a/Lib/test/test_importlib/test_main.py b/Lib/test/test_importlib/test_main.py
index 30b68b6ae7d86e..46cd2b696d4cc8 100644
--- a/Lib/test/test_importlib/test_main.py
+++ b/Lib/test/test_importlib/test_main.py
@@ -1,7 +1,10 @@
import re
import pickle
import unittest
+import warnings
import importlib.metadata
+import contextlib
+import itertools
try:
import pyfakefs.fake_filesystem_unittest as ffs
@@ -9,6 +12,7 @@
from .stubs import fake_filesystem_unittest as ffs
from . import fixtures
+from ._context import suppress
from importlib.metadata import (
Distribution,
EntryPoint,
@@ -22,6 +26,13 @@
)
+@contextlib.contextmanager
+def suppress_known_deprecation():
+ with warnings.catch_warnings(record=True) as ctx:
+ warnings.simplefilter('default', category=DeprecationWarning)
+ yield ctx
+
+
class BasicTests(fixtures.DistInfoPkg, unittest.TestCase):
version_pattern = r'\d+\.\d+(\.\d)?'
@@ -37,7 +48,7 @@ def test_for_name_does_not_exist(self):
def test_package_not_found_mentions_metadata(self):
"""
When a package is not found, that could indicate that the
- packgae is not installed or that it is installed without
+ package is not installed or that it is installed without
metadata. Ensure the exception mentions metadata to help
guide users toward the cause. See #124.
"""
@@ -46,8 +57,12 @@ def test_package_not_found_mentions_metadata(self):
assert "metadata" in str(ctx.exception)
- def test_new_style_classes(self):
- self.assertIsInstance(Distribution, type)
+ # expected to fail until ABC is enforced
+ @suppress(AssertionError)
+ @suppress_known_deprecation()
+ def test_abc_enforced(self):
+ with self.assertRaises(TypeError):
+ type('DistributionSubclass', (Distribution,), {})()
@fixtures.parameterize(
dict(name=None),
@@ -172,11 +187,21 @@ def test_metadata_loads_egg_info(self):
assert meta['Description'] == 'pôrˈtend'
-class DiscoveryTests(fixtures.EggInfoPkg, fixtures.DistInfoPkg, unittest.TestCase):
+class DiscoveryTests(
+ fixtures.EggInfoPkg,
+ fixtures.EggInfoPkgPipInstalledNoToplevel,
+ fixtures.EggInfoPkgPipInstalledNoModules,
+ fixtures.EggInfoPkgSourcesFallback,
+ fixtures.DistInfoPkg,
+ unittest.TestCase,
+):
def test_package_discovery(self):
dists = list(distributions())
assert all(isinstance(dist, Distribution) for dist in dists)
assert any(dist.metadata['Name'] == 'egginfo-pkg' for dist in dists)
+ assert any(dist.metadata['Name'] == 'egg_with_module-pkg' for dist in dists)
+ assert any(dist.metadata['Name'] == 'egg_with_no_modules-pkg' for dist in dists)
+ assert any(dist.metadata['Name'] == 'sources_fallback-pkg' for dist in dists)
assert any(dist.metadata['Name'] == 'distinfo-pkg' for dist in dists)
def test_invalid_usage(self):
@@ -324,3 +349,79 @@ def test_packages_distributions_neither_toplevel_nor_files(self):
prefix=self.site_dir,
)
packages_distributions()
+
+ def test_packages_distributions_all_module_types(self):
+ """
+ Test top-level modules detected on a package without 'top-level.txt'.
+ """
+ suffixes = importlib.machinery.all_suffixes()
+ metadata = dict(
+ METADATA="""
+ Name: all_distributions
+ Version: 1.0.0
+ """,
+ )
+ files = {
+ 'all_distributions-1.0.0.dist-info': metadata,
+ }
+ for i, suffix in enumerate(suffixes):
+ files.update(
+ {
+ f'importable-name {i}{suffix}': '',
+ f'in_namespace_{i}': {
+ f'mod{suffix}': '',
+ },
+ f'in_package_{i}': {
+ '__init__.py': '',
+ f'mod{suffix}': '',
+ },
+ }
+ )
+ metadata.update(RECORD=fixtures.build_record(files))
+ fixtures.build_files(files, prefix=self.site_dir)
+
+ distributions = packages_distributions()
+
+ for i in range(len(suffixes)):
+ assert distributions[f'importable-name {i}'] == ['all_distributions']
+ assert distributions[f'in_namespace_{i}'] == ['all_distributions']
+ assert distributions[f'in_package_{i}'] == ['all_distributions']
+
+ assert not any(name.endswith('.dist-info') for name in distributions)
+
+
+class PackagesDistributionsEggTest(
+ fixtures.EggInfoPkg,
+ fixtures.EggInfoPkgPipInstalledNoToplevel,
+ fixtures.EggInfoPkgPipInstalledNoModules,
+ fixtures.EggInfoPkgSourcesFallback,
+ unittest.TestCase,
+):
+ def test_packages_distributions_on_eggs(self):
+ """
+ Test old-style egg packages with a variation of 'top_level.txt',
+ 'SOURCES.txt', and 'installed-files.txt', available.
+ """
+ distributions = packages_distributions()
+
+ def import_names_from_package(package_name):
+ return {
+ import_name
+ for import_name, package_names in distributions.items()
+ if package_name in package_names
+ }
+
+ # egginfo-pkg declares one import ('mod') via top_level.txt
+ assert import_names_from_package('egginfo-pkg') == {'mod'}
+
+ # egg_with_module-pkg has one import ('egg_with_module') inferred from
+ # installed-files.txt (top_level.txt is missing)
+ assert import_names_from_package('egg_with_module-pkg') == {'egg_with_module'}
+
+ # egg_with_no_modules-pkg should not be associated with any import names
+ # (top_level.txt is empty, and installed-files.txt has no .py files)
+ assert import_names_from_package('egg_with_no_modules-pkg') == set()
+
+ # sources_fallback-pkg has one import ('sources_fallback') inferred from
+ # SOURCES.txt (top_level.txt and installed-files.txt is missing)
+ assert import_names_from_package('sources_fallback-pkg') == {'sources_fallback'}
diff --git a/Lib/test/test_importlib/test_metadata_api.py b/Lib/test/test_importlib/test_metadata_api.py
index 71c47e62d27124..33c6e85ee94753 100644
--- a/Lib/test/test_importlib/test_metadata_api.py
+++ b/Lib/test/test_importlib/test_metadata_api.py
@@ -27,12 +27,14 @@ def suppress_known_deprecation():
class APITests(
fixtures.EggInfoPkg,
+ fixtures.EggInfoPkgPipInstalledNoToplevel,
+ fixtures.EggInfoPkgPipInstalledNoModules,
+ fixtures.EggInfoPkgSourcesFallback,
fixtures.DistInfoPkg,
fixtures.DistInfoPkgWithDot,
fixtures.EggInfoFile,
unittest.TestCase,
):
-
version_pattern = r'\d+\.\d+(\.\d)?'
def test_retrieves_version_of_self(self):
@@ -63,15 +65,28 @@ def test_prefix_not_matched(self):
distribution(prefix)
def test_for_top_level(self):
- self.assertEqual(
- distribution('egginfo-pkg').read_text('top_level.txt').strip(), 'mod'
- )
+ tests = [
+ ('egginfo-pkg', 'mod'),
+ ('egg_with_no_modules-pkg', ''),
+ ]
+ for pkg_name, expect_content in tests:
+ with self.subTest(pkg_name):
+ self.assertEqual(
+ distribution(pkg_name).read_text('top_level.txt').strip(),
+ expect_content,
+ )
def test_read_text(self):
- top_level = [
- path for path in files('egginfo-pkg') if path.name == 'top_level.txt'
- ][0]
- self.assertEqual(top_level.read_text(), 'mod\n')
+ tests = [
+ ('egginfo-pkg', 'mod\n'),
+ ('egg_with_no_modules-pkg', '\n'),
+ ]
+ for pkg_name, expect_content in tests:
+ with self.subTest(pkg_name):
+ top_level = [
+ path for path in files(pkg_name) if path.name == 'top_level.txt'
+ ][0]
+ self.assertEqual(top_level.read_text(), expect_content)
def test_entry_points(self):
eps = entry_points()
@@ -137,6 +152,28 @@ def test_metadata_for_this_package(self):
classifiers = md.get_all('Classifier')
assert 'Topic :: Software Development :: Libraries' in classifiers
+ def test_missing_key_legacy(self):
+ """
+ Requesting a missing key will still return None, but warn.
+ """
+ md = metadata('distinfo-pkg')
+ with suppress_known_deprecation():
+ assert md['does-not-exist'] is None
+
+ def test_get_key(self):
+ """
+ Getting a key gets the key.
+ """
+ md = metadata('egginfo-pkg')
+ assert md.get('Name') == 'egginfo-pkg'
+
+ def test_get_missing_key(self):
+ """
+ Requesting a missing key will return None.
+ """
+ md = metadata('distinfo-pkg')
+ assert md.get('does-not-exist') is None
+
@staticmethod
def _test_files(files):
root = files[0].root
@@ -159,6 +196,9 @@ def test_files_dist_info(self):
def test_files_egg_info(self):
self._test_files(files('egginfo-pkg'))
+ self._test_files(files('egg_with_module-pkg'))
+ self._test_files(files('egg_with_no_modules-pkg'))
+ self._test_files(files('sources_fallback-pkg'))
def test_version_egg_info_file(self):
self.assertEqual(version('egginfo-file'), '0.1')
diff --git a/Lib/test/test_inspect.py b/Lib/test/test_inspect.py
index 6b342b1f00d654..42e3d709bd683f 100644
--- a/Lib/test/test_inspect.py
+++ b/Lib/test/test_inspect.py
@@ -2463,18 +2463,43 @@ def test_signature_object(self):
self.assertEqual(str(S()), '()')
self.assertEqual(repr(S().parameters), 'mappingproxy(OrderedDict())')
- def test(po, pk, pod=42, pkd=100, *args, ko, **kwargs):
+ def test(po, /, pk, pkd=100, *args, ko, kod=10, **kwargs):
pass
+
sig = inspect.signature(test)
- po = sig.parameters['po'].replace(kind=P.POSITIONAL_ONLY)
- pod = sig.parameters['pod'].replace(kind=P.POSITIONAL_ONLY)
+ self.assertTrue(repr(sig).startswith(' {42:'ham'}: pass
foo_partial = functools.partial(foo, a=1)
diff --git a/Lib/test/test_pdb.py b/Lib/test/test_pdb.py
index 94b441720f258c..b5c413af344c93 100644
--- a/Lib/test/test_pdb.py
+++ b/Lib/test/test_pdb.py
@@ -1715,8 +1715,8 @@ def test_pdb_issue_gh_101517():
... 'continue'
... ]):
... test_function()
- --Return--
- > (None)test_function()->None
+ > (5)test_function()
+ -> import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace()
(Pdb) continue
"""
diff --git a/Lib/test/test_posix.py b/Lib/test/test_posix.py
index 77f42f7f9c937b..444f8abe4607b7 100644
--- a/Lib/test/test_posix.py
+++ b/Lib/test/test_posix.py
@@ -231,6 +231,9 @@ def test_register_at_fork(self):
with self.assertRaises(TypeError, msg="Invalid arg was allowed"):
# Ensure a combination of valid and invalid is an error.
os.register_at_fork(before=None, after_in_parent=lambda: 3)
+ with self.assertRaises(TypeError, msg="At least one argument is required"):
+ # when no arg is passed
+ os.register_at_fork()
with self.assertRaises(TypeError, msg="Invalid arg was allowed"):
# Ensure a combination of valid and invalid is an error.
os.register_at_fork(before=lambda: None, after_in_child='')
diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py
index 9eaf167a9fa3c9..36f0b8a31a3715 100644
--- a/Lib/test/test_shutil.py
+++ b/Lib/test/test_shutil.py
@@ -33,6 +33,7 @@
from test import support
from test.support import os_helper
from test.support.os_helper import TESTFN, FakePath
+from test.support import warnings_helper
TESTFN2 = TESTFN + "2"
TESTFN_SRC = TESTFN + "_SRC"
@@ -1841,12 +1842,14 @@ def test_register_archive_format(self):
### shutil.unpack_archive
- def check_unpack_archive(self, format):
- self.check_unpack_archive_with_converter(format, lambda path: path)
- self.check_unpack_archive_with_converter(format, pathlib.Path)
- self.check_unpack_archive_with_converter(format, FakePath)
+ def check_unpack_archive(self, format, **kwargs):
+ self.check_unpack_archive_with_converter(
+ format, lambda path: path, **kwargs)
+ self.check_unpack_archive_with_converter(
+ format, pathlib.Path, **kwargs)
+ self.check_unpack_archive_with_converter(format, FakePath, **kwargs)
- def check_unpack_archive_with_converter(self, format, converter):
+ def check_unpack_archive_with_converter(self, format, converter, **kwargs):
root_dir, base_dir = self._create_files()
expected = rlistdir(root_dir)
expected.remove('outer')
@@ -1856,36 +1859,48 @@ def check_unpack_archive_with_converter(self, format, converter):
# let's try to unpack it now
tmpdir2 = self.mkdtemp()
- unpack_archive(converter(filename), converter(tmpdir2))
+ unpack_archive(converter(filename), converter(tmpdir2), **kwargs)
self.assertEqual(rlistdir(tmpdir2), expected)
# and again, this time with the format specified
tmpdir3 = self.mkdtemp()
- unpack_archive(converter(filename), converter(tmpdir3), format=format)
+ unpack_archive(converter(filename), converter(tmpdir3), format=format,
+ **kwargs)
self.assertEqual(rlistdir(tmpdir3), expected)
- self.assertRaises(shutil.ReadError, unpack_archive, converter(TESTFN))
- self.assertRaises(ValueError, unpack_archive, converter(TESTFN), format='xxx')
+ with self.assertRaises(shutil.ReadError):
+ unpack_archive(converter(TESTFN), **kwargs)
+ with self.assertRaises(ValueError):
+ unpack_archive(converter(TESTFN), format='xxx', **kwargs)
+
+ def check_unpack_tarball(self, format):
+ self.check_unpack_archive(format, filter='fully_trusted')
+ self.check_unpack_archive(format, filter='data')
+ with warnings_helper.check_warnings(
+ ('Python 3.14', DeprecationWarning)):
+ self.check_unpack_archive(format)
def test_unpack_archive_tar(self):
- self.check_unpack_archive('tar')
+ self.check_unpack_tarball('tar')
@support.requires_zlib()
def test_unpack_archive_gztar(self):
- self.check_unpack_archive('gztar')
+ self.check_unpack_tarball('gztar')
@support.requires_bz2()
def test_unpack_archive_bztar(self):
- self.check_unpack_archive('bztar')
+ self.check_unpack_tarball('bztar')
@support.requires_lzma()
@unittest.skipIf(AIX and not _maxdataOK(), "AIX MAXDATA must be 0x20000000 or larger")
def test_unpack_archive_xztar(self):
- self.check_unpack_archive('xztar')
+ self.check_unpack_tarball('xztar')
@support.requires_zlib()
def test_unpack_archive_zip(self):
self.check_unpack_archive('zip')
+ with self.assertRaises(TypeError):
+ self.check_unpack_archive('zip', filter='data')
def test_unpack_registry(self):
diff --git a/Lib/test/test_socketserver.py b/Lib/test/test_socketserver.py
index 2fa5069423327a..71ed4c7d58dafc 100644
--- a/Lib/test/test_socketserver.py
+++ b/Lib/test/test_socketserver.py
@@ -56,7 +56,7 @@ class ForkingUnixDatagramServer(socketserver.ForkingMixIn,
socketserver.UnixDatagramServer):
pass
-
+@test.support.requires_fork()
@contextlib.contextmanager
def simple_subprocess(testcase):
"""Tests that a custom child process is not waited on (Issue 1540386)"""
diff --git a/Lib/test/test_super.py b/Lib/test/test_super.py
index 2cdfd342f320f1..ed773a3cff2a6d 100644
--- a/Lib/test/test_super.py
+++ b/Lib/test/test_super.py
@@ -393,6 +393,23 @@ def method(self):
with self.assertRaisesRegex(TypeError, "argument 1 must be a type"):
C().method()
+ def test_super___class__(self):
+ class C:
+ def method(self):
+ return super().__class__
+
+ self.assertEqual(C().method(), super)
+
+ def test_super_subclass___class__(self):
+ class mysuper(super):
+ pass
+
+ class C:
+ def method(self):
+ return mysuper(C, self).__class__
+
+ self.assertEqual(C().method(), mysuper)
+
if __name__ == "__main__":
unittest.main()
diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py
index f23653558a9119..f959bbb4400702 100644
--- a/Lib/test/test_syntax.py
+++ b/Lib/test/test_syntax.py
@@ -1853,6 +1853,30 @@ def f(x: *b)
Traceback (most recent call last):
...
SyntaxError: invalid syntax
+
+Invalid bytes literals:
+
+ >>> b"Ā"
+ Traceback (most recent call last):
+ ...
+ b"Ā"
+ ^^^
+ SyntaxError: bytes can only contain ASCII literal characters
+
+ >>> b"абвгде"
+ Traceback (most recent call last):
+ ...
+ b"абвгде"
+ ^^^^^^^^
+ SyntaxError: bytes can only contain ASCII literal characters
+
+ >>> b"abc ъющый" # first 3 letters are ascii
+ Traceback (most recent call last):
+ ...
+ b"abc ъющый"
+ ^^^^^^^^^^^
+ SyntaxError: bytes can only contain ASCII literal characters
+
"""
import re
diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py
index 1aebe1b111f2e9..611cd27ecf1240 100644
--- a/Lib/test/test_sys.py
+++ b/Lib/test/test_sys.py
@@ -385,7 +385,8 @@ def test_refcount(self):
self.assertRaises(TypeError, sys.getrefcount)
c = sys.getrefcount(None)
n = None
- self.assertEqual(sys.getrefcount(None), c+1)
+ # Singleton refcnts don't change
+ self.assertEqual(sys.getrefcount(None), c)
del n
self.assertEqual(sys.getrefcount(None), c)
if hasattr(sys, "gettotalrefcount"):
diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py
index 39f6f499c818ef..ba25590b265ca4 100644
--- a/Lib/test/test_tarfile.py
+++ b/Lib/test/test_tarfile.py
@@ -2,9 +2,13 @@
import os
import io
from hashlib import sha256
-from contextlib import contextmanager
+from contextlib import contextmanager, ExitStack
from random import Random
import pathlib
+import shutil
+import re
+import warnings
+import stat
import unittest
import unittest.mock
@@ -13,6 +17,7 @@
from test import support
from test.support import os_helper
from test.support import script_helper
+from test.support import warnings_helper
# Check for our compression modules.
try:
@@ -108,7 +113,7 @@ def test_fileobj_regular_file(self):
"regular file extraction failed")
def test_fileobj_readlines(self):
- self.tar.extract("ustar/regtype", TEMPDIR)
+ self.tar.extract("ustar/regtype", TEMPDIR, filter='data')
tarinfo = self.tar.getmember("ustar/regtype")
with open(os.path.join(TEMPDIR, "ustar/regtype"), "r") as fobj1:
lines1 = fobj1.readlines()
@@ -126,7 +131,7 @@ def test_fileobj_readlines(self):
"fileobj.readlines() failed")
def test_fileobj_iter(self):
- self.tar.extract("ustar/regtype", TEMPDIR)
+ self.tar.extract("ustar/regtype", TEMPDIR, filter='data')
tarinfo = self.tar.getmember("ustar/regtype")
with open(os.path.join(TEMPDIR, "ustar/regtype"), "r") as fobj1:
lines1 = fobj1.readlines()
@@ -136,7 +141,8 @@ def test_fileobj_iter(self):
"fileobj.__iter__() failed")
def test_fileobj_seek(self):
- self.tar.extract("ustar/regtype", TEMPDIR)
+ self.tar.extract("ustar/regtype", TEMPDIR,
+ filter='data')
with open(os.path.join(TEMPDIR, "ustar/regtype"), "rb") as fobj:
data = fobj.read()
@@ -467,7 +473,7 @@ def test_premature_end_of_archive(self):
t = tar.next()
with self.assertRaisesRegex(tarfile.ReadError, "unexpected end of data"):
- tar.extract(t, TEMPDIR)
+ tar.extract(t, TEMPDIR, filter='data')
with self.assertRaisesRegex(tarfile.ReadError, "unexpected end of data"):
tar.extractfile(t).read()
@@ -629,16 +635,16 @@ def test_find_members(self):
def test_extract_hardlink(self):
# Test hardlink extraction (e.g. bug #857297).
with tarfile.open(tarname, errorlevel=1, encoding="iso8859-1") as tar:
- tar.extract("ustar/regtype", TEMPDIR)
+ tar.extract("ustar/regtype", TEMPDIR, filter='data')
self.addCleanup(os_helper.unlink, os.path.join(TEMPDIR, "ustar/regtype"))
- tar.extract("ustar/lnktype", TEMPDIR)
+ tar.extract("ustar/lnktype", TEMPDIR, filter='data')
self.addCleanup(os_helper.unlink, os.path.join(TEMPDIR, "ustar/lnktype"))
with open(os.path.join(TEMPDIR, "ustar/lnktype"), "rb") as f:
data = f.read()
self.assertEqual(sha256sum(data), sha256_regtype)
- tar.extract("ustar/symtype", TEMPDIR)
+ tar.extract("ustar/symtype", TEMPDIR, filter='data')
self.addCleanup(os_helper.unlink, os.path.join(TEMPDIR, "ustar/symtype"))
with open(os.path.join(TEMPDIR, "ustar/symtype"), "rb") as f:
data = f.read()
@@ -653,13 +659,14 @@ def test_extractall(self):
os.mkdir(DIR)
try:
directories = [t for t in tar if t.isdir()]
- tar.extractall(DIR, directories)
+ tar.extractall(DIR, directories, filter='fully_trusted')
for tarinfo in directories:
path = os.path.join(DIR, tarinfo.name)
if sys.platform != "win32":
# Win32 has no support for fine grained permissions.
self.assertEqual(tarinfo.mode & 0o777,
- os.stat(path).st_mode & 0o777)
+ os.stat(path).st_mode & 0o777,
+ tarinfo.name)
def format_mtime(mtime):
if isinstance(mtime, float):
return "{} ({})".format(mtime, mtime.hex())
@@ -683,7 +690,7 @@ def test_extract_directory(self):
try:
with tarfile.open(tarname, encoding="iso8859-1") as tar:
tarinfo = tar.getmember(dirtype)
- tar.extract(tarinfo, path=DIR)
+ tar.extract(tarinfo, path=DIR, filter='fully_trusted')
extracted = os.path.join(DIR, dirtype)
self.assertEqual(os.path.getmtime(extracted), tarinfo.mtime)
if sys.platform != "win32":
@@ -696,7 +703,7 @@ def test_extractall_pathlike_name(self):
with os_helper.temp_dir(DIR), \
tarfile.open(tarname, encoding="iso8859-1") as tar:
directories = [t for t in tar if t.isdir()]
- tar.extractall(DIR, directories)
+ tar.extractall(DIR, directories, filter='fully_trusted')
for tarinfo in directories:
path = DIR / tarinfo.name
self.assertEqual(os.path.getmtime(path), tarinfo.mtime)
@@ -707,7 +714,7 @@ def test_extract_pathlike_name(self):
with os_helper.temp_dir(DIR), \
tarfile.open(tarname, encoding="iso8859-1") as tar:
tarinfo = tar.getmember(dirtype)
- tar.extract(tarinfo, path=DIR)
+ tar.extract(tarinfo, path=DIR, filter='fully_trusted')
extracted = DIR / dirtype
self.assertEqual(os.path.getmtime(extracted), tarinfo.mtime)
@@ -1075,7 +1082,7 @@ class GNUReadTest(LongnameTest, ReadTest, unittest.TestCase):
# an all platforms, and after that a test that will work only on
# platforms/filesystems that prove to support sparse files.
def _test_sparse_file(self, name):
- self.tar.extract(name, TEMPDIR)
+ self.tar.extract(name, TEMPDIR, filter='data')
filename = os.path.join(TEMPDIR, name)
with open(filename, "rb") as fobj:
data = fobj.read()
@@ -1442,7 +1449,8 @@ def test_extractall_symlinks(self):
with tarfile.open(temparchive, errorlevel=2) as tar:
# this should not raise OSError: [Errno 17] File exists
try:
- tar.extractall(path=tempdir)
+ tar.extractall(path=tempdir,
+ filter='fully_trusted')
except OSError:
self.fail("extractall failed with symlinked files")
finally:
@@ -2547,6 +2555,15 @@ def make_simple_tarfile(self, tar_name):
for tardata in files:
tf.add(tardata, arcname=os.path.basename(tardata))
+ def make_evil_tarfile(self, tar_name):
+ files = [support.findfile('tokenize_tests.txt')]
+ self.addCleanup(os_helper.unlink, tar_name)
+ with tarfile.open(tar_name, 'w') as tf:
+ benign = tarfile.TarInfo('benign')
+ tf.addfile(benign, fileobj=io.BytesIO(b''))
+ evil = tarfile.TarInfo('../evil')
+ tf.addfile(evil, fileobj=io.BytesIO(b''))
+
def test_bad_use(self):
rc, out, err = self.tarfilecmd_failure()
self.assertEqual(out, b'')
@@ -2703,6 +2720,25 @@ def test_extract_command_verbose(self):
finally:
os_helper.rmtree(tarextdir)
+ def test_extract_command_filter(self):
+ self.make_evil_tarfile(tmpname)
+ # Make an inner directory, so the member named '../evil'
+ # is still extracted into `tarextdir`
+ destdir = os.path.join(tarextdir, 'dest')
+ os.mkdir(tarextdir)
+ try:
+ with os_helper.temp_cwd(destdir):
+ self.tarfilecmd_failure('-e', tmpname,
+ '-v',
+ '--filter', 'data')
+ out = self.tarfilecmd('-e', tmpname,
+ '-v',
+ '--filter', 'fully_trusted',
+ PYTHONIOENCODING='utf-8')
+ self.assertIn(b' file is extracted.', out)
+ finally:
+ os_helper.rmtree(tarextdir)
+
def test_extract_command_different_directory(self):
self.make_simple_tarfile(tmpname)
try:
@@ -2786,7 +2822,7 @@ class LinkEmulationTest(ReadTest, unittest.TestCase):
# symbolic or hard links tarfile tries to extract these types of members
# as the regular files they point to.
def _test_link_extraction(self, name):
- self.tar.extract(name, TEMPDIR)
+ self.tar.extract(name, TEMPDIR, filter='fully_trusted')
with open(os.path.join(TEMPDIR, name), "rb") as f:
data = f.read()
self.assertEqual(sha256sum(data), sha256_regtype)
@@ -2918,8 +2954,10 @@ def test_extract_with_numeric_owner(self, mock_geteuid, mock_chmod,
mock_chown):
with self._setup_test(mock_geteuid) as (tarfl, filename_1, _,
filename_2):
- tarfl.extract(filename_1, TEMPDIR, numeric_owner=True)
- tarfl.extract(filename_2 , TEMPDIR, numeric_owner=True)
+ tarfl.extract(filename_1, TEMPDIR, numeric_owner=True,
+ filter='fully_trusted')
+ tarfl.extract(filename_2 , TEMPDIR, numeric_owner=True,
+ filter='fully_trusted')
# convert to filesystem paths
f_filename_1 = os.path.join(TEMPDIR, filename_1)
@@ -2937,7 +2975,8 @@ def test_extractall_with_numeric_owner(self, mock_geteuid, mock_chmod,
mock_chown):
with self._setup_test(mock_geteuid) as (tarfl, filename_1, dirname_1,
filename_2):
- tarfl.extractall(TEMPDIR, numeric_owner=True)
+ tarfl.extractall(TEMPDIR, numeric_owner=True,
+ filter='fully_trusted')
# convert to filesystem paths
f_filename_1 = os.path.join(TEMPDIR, filename_1)
@@ -2962,7 +3001,8 @@ def test_extractall_with_numeric_owner(self, mock_geteuid, mock_chmod,
def test_extract_without_numeric_owner(self, mock_geteuid, mock_chmod,
mock_chown):
with self._setup_test(mock_geteuid) as (tarfl, filename_1, _, _):
- tarfl.extract(filename_1, TEMPDIR, numeric_owner=False)
+ tarfl.extract(filename_1, TEMPDIR, numeric_owner=False,
+ filter='fully_trusted')
# convert to filesystem paths
f_filename_1 = os.path.join(TEMPDIR, filename_1)
@@ -2976,6 +3016,890 @@ def test_keyword_only(self, mock_geteuid):
tarfl.extract, filename_1, TEMPDIR, False, True)
+class ReplaceTests(ReadTest, unittest.TestCase):
+ def test_replace_name(self):
+ member = self.tar.getmember('ustar/regtype')
+ replaced = member.replace(name='misc/other')
+ self.assertEqual(replaced.name, 'misc/other')
+ self.assertEqual(member.name, 'ustar/regtype')
+ self.assertEqual(self.tar.getmember('ustar/regtype').name,
+ 'ustar/regtype')
+
+ def test_replace_deep(self):
+ member = self.tar.getmember('pax/regtype1')
+ replaced = member.replace()
+ replaced.pax_headers['gname'] = 'not-bar'
+ self.assertEqual(member.pax_headers['gname'], 'bar')
+ self.assertEqual(
+ self.tar.getmember('pax/regtype1').pax_headers['gname'], 'bar')
+
+ def test_replace_shallow(self):
+ member = self.tar.getmember('pax/regtype1')
+ replaced = member.replace(deep=False)
+ replaced.pax_headers['gname'] = 'not-bar'
+ self.assertEqual(member.pax_headers['gname'], 'not-bar')
+ self.assertEqual(
+ self.tar.getmember('pax/regtype1').pax_headers['gname'], 'not-bar')
+
+ def test_replace_all(self):
+ member = self.tar.getmember('ustar/regtype')
+ for attr_name in ('name', 'mtime', 'mode', 'linkname',
+ 'uid', 'gid', 'uname', 'gname'):
+ with self.subTest(attr_name=attr_name):
+ replaced = member.replace(**{attr_name: None})
+ self.assertEqual(getattr(replaced, attr_name), None)
+ self.assertNotEqual(getattr(member, attr_name), None)
+
+ def test_replace_internal(self):
+ member = self.tar.getmember('ustar/regtype')
+ with self.assertRaises(TypeError):
+ member.replace(offset=123456789)
+
+
+class NoneInfoExtractTests(ReadTest):
+ # These mainly check that all kinds of members are extracted successfully
+ # if some metadata is None.
+ # Some of the methods do additional spot checks.
+
+ # We also test that the default filters can deal with None.
+
+ extraction_filter = None
+
+ @classmethod
+ def setUpClass(cls):
+ tar = tarfile.open(tarname, mode='r', encoding="iso8859-1")
+ cls.control_dir = pathlib.Path(TEMPDIR) / "extractall_ctrl"
+ tar.errorlevel = 0
+ with ExitStack() as cm:
+ if cls.extraction_filter is None:
+ cm.enter_context(warnings.catch_warnings(
+ action="ignore", category=DeprecationWarning))
+ tar.extractall(cls.control_dir, filter=cls.extraction_filter)
+ tar.close()
+ cls.control_paths = set(
+ p.relative_to(cls.control_dir)
+ for p in pathlib.Path(cls.control_dir).glob('**/*'))
+
+ @classmethod
+ def tearDownClass(cls):
+ shutil.rmtree(cls.control_dir)
+
+ def check_files_present(self, directory):
+ got_paths = set(
+ p.relative_to(directory)
+ for p in pathlib.Path(directory).glob('**/*'))
+ self.assertEqual(self.control_paths, got_paths)
+
+ @contextmanager
+ def extract_with_none(self, *attr_names):
+ DIR = pathlib.Path(TEMPDIR) / "extractall_none"
+ self.tar.errorlevel = 0
+ for member in self.tar.getmembers():
+ for attr_name in attr_names:
+ setattr(member, attr_name, None)
+ with os_helper.temp_dir(DIR):
+ self.tar.extractall(DIR, filter='fully_trusted')
+ self.check_files_present(DIR)
+ yield DIR
+
+ def test_extractall_none_mtime(self):
+ # mtimes of extracted files should be later than 'now' -- the mtime
+ # of a previously created directory.
+ now = pathlib.Path(TEMPDIR).stat().st_mtime
+ with self.extract_with_none('mtime') as DIR:
+ for path in pathlib.Path(DIR).glob('**/*'):
+ with self.subTest(path=path):
+ try:
+ mtime = path.stat().st_mtime
+ except OSError:
+ # Some systems can't stat symlinks, ignore those
+ if not path.is_symlink():
+ raise
+ else:
+ self.assertGreaterEqual(path.stat().st_mtime, now)
+
+ def test_extractall_none_mode(self):
+ # modes of directories and regular files should match the mode
+ # of a "normally" created directory or regular file
+ dir_mode = pathlib.Path(TEMPDIR).stat().st_mode
+ regular_file = pathlib.Path(TEMPDIR) / 'regular_file'
+ regular_file.write_text('')
+ regular_file_mode = regular_file.stat().st_mode
+ with self.extract_with_none('mode') as DIR:
+ for path in pathlib.Path(DIR).glob('**/*'):
+ with self.subTest(path=path):
+ if path.is_dir():
+ self.assertEqual(path.stat().st_mode, dir_mode)
+ elif path.is_file():
+ self.assertEqual(path.stat().st_mode,
+ regular_file_mode)
+
+ def test_extractall_none_uid(self):
+ with self.extract_with_none('uid'):
+ pass
+
+ def test_extractall_none_gid(self):
+ with self.extract_with_none('gid'):
+ pass
+
+ def test_extractall_none_uname(self):
+ with self.extract_with_none('uname'):
+ pass
+
+ def test_extractall_none_gname(self):
+ with self.extract_with_none('gname'):
+ pass
+
+ def test_extractall_none_ownership(self):
+ with self.extract_with_none('uid', 'gid', 'uname', 'gname'):
+ pass
+
+class NoneInfoExtractTests_Data(NoneInfoExtractTests, unittest.TestCase):
+ extraction_filter = 'data'
+
+class NoneInfoExtractTests_FullyTrusted(NoneInfoExtractTests,
+ unittest.TestCase):
+ extraction_filter = 'fully_trusted'
+
+class NoneInfoExtractTests_Tar(NoneInfoExtractTests, unittest.TestCase):
+ extraction_filter = 'tar'
+
+class NoneInfoExtractTests_Default(NoneInfoExtractTests,
+ unittest.TestCase):
+ extraction_filter = None
+
+class NoneInfoTests_Misc(unittest.TestCase):
+ def test_add(self):
+ # When addfile() encounters None metadata, it raises a ValueError
+ bio = io.BytesIO()
+ for tarformat in (tarfile.USTAR_FORMAT, tarfile.GNU_FORMAT,
+ tarfile.PAX_FORMAT):
+ with self.subTest(tarformat=tarformat):
+ tar = tarfile.open(fileobj=bio, mode='w', format=tarformat)
+ tarinfo = tar.gettarinfo(tarname)
+ try:
+ tar.addfile(tarinfo)
+ except Exception:
+ if tarformat == tarfile.USTAR_FORMAT:
+ # In the old, limited format, adding might fail for
+ # reasons like the UID being too large
+ pass
+ else:
+ raise
+ else:
+ for attr_name in ('mtime', 'mode', 'uid', 'gid',
+ 'uname', 'gname'):
+ with self.subTest(attr_name=attr_name):
+ replaced = tarinfo.replace(**{attr_name: None})
+ with self.assertRaisesRegex(ValueError,
+ f"{attr_name}"):
+ tar.addfile(replaced)
+
+ def test_list(self):
+ # Change some metadata to None, then compare list() output
+ # word-for-word. We want list() to not raise, and to only change
+ # printout for the affected piece of metadata.
+ # (n.b.: some contents of the test archive are hardcoded.)
+ for attr_names in ({'mtime'}, {'mode'}, {'uid'}, {'gid'},
+ {'uname'}, {'gname'},
+ {'uid', 'uname'}, {'gid', 'gname'}):
+ with (self.subTest(attr_names=attr_names),
+ tarfile.open(tarname, encoding="iso8859-1") as tar):
+ tio_prev = io.TextIOWrapper(io.BytesIO(), 'ascii', newline='\n')
+ with support.swap_attr(sys, 'stdout', tio_prev):
+ tar.list()
+ for member in tar.getmembers():
+ for attr_name in attr_names:
+ setattr(member, attr_name, None)
+ tio_new = io.TextIOWrapper(io.BytesIO(), 'ascii', newline='\n')
+ with support.swap_attr(sys, 'stdout', tio_new):
+ tar.list()
+ for expected, got in zip(tio_prev.detach().getvalue().split(),
+ tio_new.detach().getvalue().split()):
+ if attr_names == {'mtime'} and re.match(rb'2003-01-\d\d', expected):
+ self.assertEqual(got, b'????-??-??')
+ elif attr_names == {'mtime'} and re.match(rb'\d\d:\d\d:\d\d', expected):
+ self.assertEqual(got, b'??:??:??')
+ elif attr_names == {'mode'} and re.match(
+ rb'.([r-][w-][x-]){3}', expected):
+ self.assertEqual(got, b'??????????')
+ elif attr_names == {'uname'} and expected.startswith(
+ (b'tarfile/', b'lars/', b'foo/')):
+ exp_user, exp_group = expected.split(b'/')
+ got_user, got_group = got.split(b'/')
+ self.assertEqual(got_group, exp_group)
+ self.assertRegex(got_user, b'[0-9]+')
+ elif attr_names == {'gname'} and expected.endswith(
+ (b'/tarfile', b'/users', b'/bar')):
+ exp_user, exp_group = expected.split(b'/')
+ got_user, got_group = got.split(b'/')
+ self.assertEqual(got_user, exp_user)
+ self.assertRegex(got_group, b'[0-9]+')
+ elif attr_names == {'uid'} and expected.startswith(
+ (b'1000/')):
+ exp_user, exp_group = expected.split(b'/')
+ got_user, got_group = got.split(b'/')
+ self.assertEqual(got_group, exp_group)
+ self.assertEqual(got_user, b'None')
+ elif attr_names == {'gid'} and expected.endswith((b'/100')):
+ exp_user, exp_group = expected.split(b'/')
+ got_user, got_group = got.split(b'/')
+ self.assertEqual(got_user, exp_user)
+ self.assertEqual(got_group, b'None')
+ elif attr_names == {'uid', 'uname'} and expected.startswith(
+ (b'tarfile/', b'lars/', b'foo/', b'1000/')):
+ exp_user, exp_group = expected.split(b'/')
+ got_user, got_group = got.split(b'/')
+ self.assertEqual(got_group, exp_group)
+ self.assertEqual(got_user, b'None')
+ elif attr_names == {'gname', 'gid'} and expected.endswith(
+ (b'/tarfile', b'/users', b'/bar', b'/100')):
+ exp_user, exp_group = expected.split(b'/')
+ got_user, got_group = got.split(b'/')
+ self.assertEqual(got_user, exp_user)
+ self.assertEqual(got_group, b'None')
+ else:
+ # In other cases the output should be the same
+ self.assertEqual(expected, got)
+
+def _filemode_to_int(mode):
+ """Inverse of `stat.filemode` (for permission bits)
+
+ Using mode strings rather than numbers makes the later tests more readable.
+ """
+ str_mode = mode[1:]
+ result = (
+ {'r': stat.S_IRUSR, '-': 0}[str_mode[0]]
+ | {'w': stat.S_IWUSR, '-': 0}[str_mode[1]]
+ | {'x': stat.S_IXUSR, '-': 0,
+ 's': stat.S_IXUSR | stat.S_ISUID,
+ 'S': stat.S_ISUID}[str_mode[2]]
+ | {'r': stat.S_IRGRP, '-': 0}[str_mode[3]]
+ | {'w': stat.S_IWGRP, '-': 0}[str_mode[4]]
+ | {'x': stat.S_IXGRP, '-': 0,
+ 's': stat.S_IXGRP | stat.S_ISGID,
+ 'S': stat.S_ISGID}[str_mode[5]]
+ | {'r': stat.S_IROTH, '-': 0}[str_mode[6]]
+ | {'w': stat.S_IWOTH, '-': 0}[str_mode[7]]
+ | {'x': stat.S_IXOTH, '-': 0,
+ 't': stat.S_IXOTH | stat.S_ISVTX,
+ 'T': stat.S_ISVTX}[str_mode[8]]
+ )
+ # check we did this right
+ assert stat.filemode(result)[1:] == mode[1:]
+
+ return result
+
+class ArchiveMaker:
+ """Helper to create a tar file with specific contents
+
+ Usage:
+
+ with ArchiveMaker() as t:
+ t.add('filename', ...)
+
+ with t.open() as tar:
+ ... # `tar` is now a TarFile with 'filename' in it!
+ """
+ def __init__(self):
+ self.bio = io.BytesIO()
+
+ def __enter__(self):
+ self.tar_w = tarfile.TarFile(mode='w', fileobj=self.bio)
+ return self
+
+ def __exit__(self, *exc):
+ self.tar_w.close()
+ self.contents = self.bio.getvalue()
+ self.bio = None
+
+ def add(self, name, *, type=None, symlink_to=None, hardlink_to=None,
+ mode=None, **kwargs):
+ """Add a member to the test archive. Call within `with`."""
+ name = str(name)
+ tarinfo = tarfile.TarInfo(name).replace(**kwargs)
+ if mode:
+ tarinfo.mode = _filemode_to_int(mode)
+ if symlink_to is not None:
+ type = tarfile.SYMTYPE
+ tarinfo.linkname = str(symlink_to)
+ if hardlink_to is not None:
+ type = tarfile.LNKTYPE
+ tarinfo.linkname = str(hardlink_to)
+ if name.endswith('/') and type is None:
+ type = tarfile.DIRTYPE
+ if type is not None:
+ tarinfo.type = type
+ if tarinfo.isreg():
+ fileobj = io.BytesIO(bytes(tarinfo.size))
+ else:
+ fileobj = None
+ self.tar_w.addfile(tarinfo, fileobj)
+
+ def open(self, **kwargs):
+ """Open the resulting archive as TarFile. Call after `with`."""
+ bio = io.BytesIO(self.contents)
+ return tarfile.open(fileobj=bio, **kwargs)
+
+# Under WASI, `os_helper.can_symlink` is False to make
+# `skip_unless_symlink` skip symlink tests. "
+# But in the following tests we use can_symlink to *determine* which
+# behavior is expected.
+# Like other symlink tests, skip these on WASI for now.
+if support.is_wasi:
+ def symlink_test(f):
+ return unittest.skip("WASI: Skip symlink test for now")(f)
+else:
+ def symlink_test(f):
+ return f
+
+
+class TestExtractionFilters(unittest.TestCase):
+
+ # A temporary directory for the extraction results.
+ # All files that "escape" the destination path should still end
+ # up in this directory.
+ outerdir = pathlib.Path(TEMPDIR) / 'outerdir'
+
+ # The destination for the extraction, within `outerdir`
+ destdir = outerdir / 'dest'
+
+ @contextmanager
+ def check_context(self, tar, filter):
+ """Extracts `tar` to `self.destdir` and allows checking the result
+
+ If an error occurs, it must be checked using `expect_exception`
+
+ Otherwise, all resulting files must be checked using `expect_file`,
+ except the destination directory itself and parent directories of
+ other files.
+ When checking directories, do so before their contents.
+ """
+ with os_helper.temp_dir(self.outerdir):
+ try:
+ tar.extractall(self.destdir, filter=filter)
+ except Exception as exc:
+ self.raised_exception = exc
+ self.expected_paths = set()
+ else:
+ self.raised_exception = None
+ self.expected_paths = set(self.outerdir.glob('**/*'))
+ self.expected_paths.discard(self.destdir)
+ try:
+ yield
+ finally:
+ tar.close()
+ if self.raised_exception:
+ raise self.raised_exception
+ self.assertEqual(self.expected_paths, set())
+
+ def expect_file(self, name, type=None, symlink_to=None, mode=None):
+ """Check a single file. See check_context."""
+ if self.raised_exception:
+ raise self.raised_exception
+ # use normpath() rather than resolve() so we don't follow symlinks
+ path = pathlib.Path(os.path.normpath(self.destdir / name))
+ self.assertIn(path, self.expected_paths)
+ self.expected_paths.remove(path)
+ if mode is not None and os_helper.can_chmod():
+ got = stat.filemode(stat.S_IMODE(path.stat().st_mode))
+ self.assertEqual(got, mode)
+ if type is None and isinstance(name, str) and name.endswith('/'):
+ type = tarfile.DIRTYPE
+ if symlink_to is not None:
+ got = (self.destdir / name).readlink()
+ expected = pathlib.Path(symlink_to)
+ # The symlink might be the same (textually) as what we expect,
+ # but some systems change the link to an equivalent path, so
+ # we fall back to samefile().
+ if expected != got:
+ self.assertTrue(got.samefile(expected))
+ elif type == tarfile.REGTYPE or type is None:
+ self.assertTrue(path.is_file())
+ elif type == tarfile.DIRTYPE:
+ self.assertTrue(path.is_dir())
+ elif type == tarfile.FIFOTYPE:
+ self.assertTrue(path.is_fifo())
+ else:
+ raise NotImplementedError(type)
+ for parent in path.parents:
+ self.expected_paths.discard(parent)
+
+ def expect_exception(self, exc_type, message_re='.'):
+ with self.assertRaisesRegex(exc_type, message_re):
+ if self.raised_exception is not None:
+ raise self.raised_exception
+ self.raised_exception = None
+
+ def test_benign_file(self):
+ with ArchiveMaker() as arc:
+ arc.add('benign.txt')
+ for filter in 'fully_trusted', 'tar', 'data':
+ with self.check_context(arc.open(), filter):
+ self.expect_file('benign.txt')
+
+ def test_absolute(self):
+ # Test handling a member with an absolute path
+ # Inspired by 'absolute1' in https://github.com/jwilk/traversal-archives
+ with ArchiveMaker() as arc:
+ arc.add(self.outerdir / 'escaped.evil')
+
+ with self.check_context(arc.open(), 'fully_trusted'):
+ self.expect_file('../escaped.evil')
+
+ for filter in 'tar', 'data':
+ with self.check_context(arc.open(), filter):
+ if str(self.outerdir).startswith('/'):
+ # We strip leading slashes, as e.g. GNU tar does
+ # (without --absolute-filenames).
+ outerdir_stripped = str(self.outerdir).lstrip('/')
+ self.expect_file(f'{outerdir_stripped}/escaped.evil')
+ else:
+ # On this system, absolute paths don't have leading
+ # slashes.
+ # So, there's nothing to strip. We refuse to unpack
+ # to an absolute path, nonetheless.
+ self.expect_exception(
+ tarfile.AbsolutePathError,
+ """['"].*escaped.evil['"] has an absolute path""")
+
+ @symlink_test
+ def test_parent_symlink(self):
+ # Test interplaying symlinks
+ # Inspired by 'dirsymlink2a' in jwilk/traversal-archives
+ with ArchiveMaker() as arc:
+ arc.add('current', symlink_to='.')
+ arc.add('parent', symlink_to='current/..')
+ arc.add('parent/evil')
+
+ if os_helper.can_symlink():
+ with self.check_context(arc.open(), 'fully_trusted'):
+ if self.raised_exception is not None:
+ # Windows will refuse to create a file that's a symlink to itself
+ # (and tarfile doesn't swallow that exception)
+ self.expect_exception(FileExistsError)
+ # The other cases will fail with this error too.
+ # Skip the rest of this test.
+ return
+ else:
+ self.expect_file('current', symlink_to='.')
+ self.expect_file('parent', symlink_to='current/..')
+ self.expect_file('../evil')
+
+ with self.check_context(arc.open(), 'tar'):
+ self.expect_exception(
+ tarfile.OutsideDestinationError,
+ """'parent/evil' would be extracted to ['"].*evil['"], """
+ + "which is outside the destination")
+
+ with self.check_context(arc.open(), 'data'):
+ self.expect_exception(
+ tarfile.LinkOutsideDestinationError,
+ """'parent' would link to ['"].*outerdir['"], """
+ + "which is outside the destination")
+
+ else:
+ # No symlink support. The symlinks are ignored.
+ with self.check_context(arc.open(), 'fully_trusted'):
+ self.expect_file('parent/evil')
+ with self.check_context(arc.open(), 'tar'):
+ self.expect_file('parent/evil')
+ with self.check_context(arc.open(), 'data'):
+ self.expect_file('parent/evil')
+
+ @symlink_test
+ def test_parent_symlink2(self):
+ # Test interplaying symlinks
+ # Inspired by 'dirsymlink2b' in jwilk/traversal-archives
+ with ArchiveMaker() as arc:
+ arc.add('current', symlink_to='.')
+ arc.add('current/parent', symlink_to='..')
+ arc.add('parent/evil')
+
+ with self.check_context(arc.open(), 'fully_trusted'):
+ if os_helper.can_symlink():
+ self.expect_file('current', symlink_to='.')
+ self.expect_file('parent', symlink_to='..')
+ self.expect_file('../evil')
+ else:
+ self.expect_file('current/')
+ self.expect_file('parent/evil')
+
+ with self.check_context(arc.open(), 'tar'):
+ if os_helper.can_symlink():
+ self.expect_exception(
+ tarfile.OutsideDestinationError,
+ "'parent/evil' would be extracted to "
+ + """['"].*evil['"], which is outside """
+ + "the destination")
+ else:
+ self.expect_file('current/')
+ self.expect_file('parent/evil')
+
+ with self.check_context(arc.open(), 'data'):
+ self.expect_exception(
+ tarfile.LinkOutsideDestinationError,
+ """'current/parent' would link to ['"].*['"], """
+ + "which is outside the destination")
+
+ @symlink_test
+ def test_absolute_symlink(self):
+ # Test symlink to an absolute path
+ # Inspired by 'dirsymlink' in jwilk/traversal-archives
+ with ArchiveMaker() as arc:
+ arc.add('parent', symlink_to=self.outerdir)
+ arc.add('parent/evil')
+
+ with self.check_context(arc.open(), 'fully_trusted'):
+ if os_helper.can_symlink():
+ self.expect_file('parent', symlink_to=self.outerdir)
+ self.expect_file('../evil')
+ else:
+ self.expect_file('parent/evil')
+
+ with self.check_context(arc.open(), 'tar'):
+ if os_helper.can_symlink():
+ self.expect_exception(
+ tarfile.OutsideDestinationError,
+ "'parent/evil' would be extracted to "
+ + """['"].*evil['"], which is outside """
+ + "the destination")
+ else:
+ self.expect_file('parent/evil')
+
+ with self.check_context(arc.open(), 'data'):
+ self.expect_exception(
+ tarfile.AbsoluteLinkError,
+ "'parent' is a symlink to an absolute path")
+
+ @symlink_test
+ def test_sly_relative0(self):
+ # Inspired by 'relative0' in jwilk/traversal-archives
+ with ArchiveMaker() as arc:
+ arc.add('../moo', symlink_to='..//tmp/moo')
+
+ try:
+ with self.check_context(arc.open(), filter='fully_trusted'):
+ if os_helper.can_symlink():
+ if isinstance(self.raised_exception, FileExistsError):
+ # XXX TarFile happens to fail creating a parent
+ # directory.
+ # This might be a bug, but fixing it would hurt
+ # security.
+ # Note that e.g. GNU `tar` rejects '..' components,
+ # so you could argue this is an invalid archive and we
+ # just raise an bad type of exception.
+ self.expect_exception(FileExistsError)
+ else:
+ self.expect_file('../moo', symlink_to='..//tmp/moo')
+ else:
+ # The symlink can't be extracted and is ignored
+ pass
+ except FileExistsError:
+ pass
+
+ for filter in 'tar', 'data':
+ with self.check_context(arc.open(), filter):
+ self.expect_exception(
+ tarfile.OutsideDestinationError,
+ "'../moo' would be extracted to "
+ + "'.*moo', which is outside "
+ + "the destination")
+
+ @symlink_test
+ def test_sly_relative2(self):
+ # Inspired by 'relative2' in jwilk/traversal-archives
+ with ArchiveMaker() as arc:
+ arc.add('tmp/')
+ arc.add('tmp/../../moo', symlink_to='tmp/../..//tmp/moo')
+
+ with self.check_context(arc.open(), 'fully_trusted'):
+ self.expect_file('tmp', type=tarfile.DIRTYPE)
+ if os_helper.can_symlink():
+ self.expect_file('../moo', symlink_to='tmp/../../tmp/moo')
+
+ for filter in 'tar', 'data':
+ with self.check_context(arc.open(), filter):
+ self.expect_exception(
+ tarfile.OutsideDestinationError,
+ "'tmp/../../moo' would be extracted to "
+ + """['"].*moo['"], which is outside the """
+ + "destination")
+
+ def test_modes(self):
+ # Test how file modes are extracted
+ # (Note that the modes are ignored on platforms without working chmod)
+ with ArchiveMaker() as arc:
+ arc.add('all_bits', mode='?rwsrwsrwt')
+ arc.add('perm_bits', mode='?rwxrwxrwx')
+ arc.add('exec_group_other', mode='?rw-rwxrwx')
+ arc.add('read_group_only', mode='?---r-----')
+ arc.add('no_bits', mode='?---------')
+ arc.add('dir/', mode='?---rwsrwt', type=tarfile.DIRTYPE)
+
+ with self.check_context(arc.open(), 'fully_trusted'):
+ self.expect_file('all_bits', mode='?rwsrwsrwt')
+ self.expect_file('perm_bits', mode='?rwxrwxrwx')
+ self.expect_file('exec_group_other', mode='?rw-rwxrwx')
+ self.expect_file('read_group_only', mode='?---r-----')
+ self.expect_file('no_bits', mode='?---------')
+ self.expect_file('dir', type=tarfile.DIRTYPE, mode='?---rwsrwt')
+
+ with self.check_context(arc.open(), 'tar'):
+ self.expect_file('all_bits', mode='?rwxr-xr-x')
+ self.expect_file('perm_bits', mode='?rwxr-xr-x')
+ self.expect_file('exec_group_other', mode='?rw-r-xr-x')
+ self.expect_file('read_group_only', mode='?---r-----')
+ self.expect_file('no_bits', mode='?---------')
+ self.expect_file('dir/', type=tarfile.DIRTYPE, mode='?---r-xr-x')
+
+ with self.check_context(arc.open(), 'data'):
+ normal_dir_mode = stat.filemode(stat.S_IMODE(
+ self.outerdir.stat().st_mode))
+ self.expect_file('all_bits', mode='?rwxr-xr-x')
+ self.expect_file('perm_bits', mode='?rwxr-xr-x')
+ self.expect_file('exec_group_other', mode='?rw-r--r--')
+ self.expect_file('read_group_only', mode='?rw-r-----')
+ self.expect_file('no_bits', mode='?rw-------')
+ self.expect_file('dir/', type=tarfile.DIRTYPE, mode=normal_dir_mode)
+
+ def test_pipe(self):
+ # Test handling of a special file
+ with ArchiveMaker() as arc:
+ arc.add('foo', type=tarfile.FIFOTYPE)
+
+ for filter in 'fully_trusted', 'tar':
+ with self.check_context(arc.open(), filter):
+ if hasattr(os, 'mkfifo'):
+ self.expect_file('foo', type=tarfile.FIFOTYPE)
+ else:
+ # The pipe can't be extracted and is skipped.
+ pass
+
+ with self.check_context(arc.open(), 'data'):
+ self.expect_exception(
+ tarfile.SpecialFileError,
+ "'foo' is a special file")
+
+ def test_special_files(self):
+ # Creating device files is tricky. Instead of attempting that let's
+ # only check the filter result.
+ for special_type in tarfile.FIFOTYPE, tarfile.CHRTYPE, tarfile.BLKTYPE:
+ tarinfo = tarfile.TarInfo('foo')
+ tarinfo.type = special_type
+ trusted = tarfile.fully_trusted_filter(tarinfo, '')
+ self.assertIs(trusted, tarinfo)
+ tar = tarfile.tar_filter(tarinfo, '')
+ self.assertEqual(tar.type, special_type)
+ with self.assertRaises(tarfile.SpecialFileError) as cm:
+ tarfile.data_filter(tarinfo, '')
+ self.assertIsInstance(cm.exception.tarinfo, tarfile.TarInfo)
+ self.assertEqual(cm.exception.tarinfo.name, 'foo')
+
+ def test_fully_trusted_filter(self):
+ # The 'fully_trusted' filter returns the original TarInfo objects.
+ with tarfile.TarFile.open(tarname) as tar:
+ for tarinfo in tar.getmembers():
+ filtered = tarfile.fully_trusted_filter(tarinfo, '')
+ self.assertIs(filtered, tarinfo)
+
+ def test_tar_filter(self):
+ # The 'tar' filter returns TarInfo objects with the same name/type.
+ # (It can also fail for particularly "evil" input, but we don't have
+ # that in the test archive.)
+ with tarfile.TarFile.open(tarname) as tar:
+ for tarinfo in tar.getmembers():
+ filtered = tarfile.tar_filter(tarinfo, '')
+ self.assertIs(filtered.name, tarinfo.name)
+ self.assertIs(filtered.type, tarinfo.type)
+
+ def test_data_filter(self):
+ # The 'data' filter either raises, or returns TarInfo with the same
+ # name/type.
+ with tarfile.TarFile.open(tarname) as tar:
+ for tarinfo in tar.getmembers():
+ try:
+ filtered = tarfile.data_filter(tarinfo, '')
+ except tarfile.FilterError:
+ continue
+ self.assertIs(filtered.name, tarinfo.name)
+ self.assertIs(filtered.type, tarinfo.type)
+
+ def test_default_filter_warns(self):
+ """Ensure the default filter warns"""
+ with ArchiveMaker() as arc:
+ arc.add('foo')
+ with warnings_helper.check_warnings(
+ ('Python 3.14', DeprecationWarning)):
+ with self.check_context(arc.open(), None):
+ self.expect_file('foo')
+
+ def test_change_default_filter_on_instance(self):
+ tar = tarfile.TarFile(tarname, 'r')
+ def strict_filter(tarinfo, path):
+ if tarinfo.name == 'ustar/regtype':
+ return tarinfo
+ else:
+ return None
+ tar.extraction_filter = strict_filter
+ with self.check_context(tar, None):
+ self.expect_file('ustar/regtype')
+
+ def test_change_default_filter_on_class(self):
+ def strict_filter(tarinfo, path):
+ if tarinfo.name == 'ustar/regtype':
+ return tarinfo
+ else:
+ return None
+ tar = tarfile.TarFile(tarname, 'r')
+ with support.swap_attr(tarfile.TarFile, 'extraction_filter',
+ staticmethod(strict_filter)):
+ with self.check_context(tar, None):
+ self.expect_file('ustar/regtype')
+
+ def test_change_default_filter_on_subclass(self):
+ class TarSubclass(tarfile.TarFile):
+ def extraction_filter(self, tarinfo, path):
+ if tarinfo.name == 'ustar/regtype':
+ return tarinfo
+ else:
+ return None
+
+ tar = TarSubclass(tarname, 'r')
+ with self.check_context(tar, None):
+ self.expect_file('ustar/regtype')
+
+ def test_change_default_filter_to_string(self):
+ tar = tarfile.TarFile(tarname, 'r')
+ tar.extraction_filter = 'data'
+ with self.check_context(tar, None):
+ self.expect_exception(TypeError)
+
+ def test_custom_filter(self):
+ def custom_filter(tarinfo, path):
+ self.assertIs(path, self.destdir)
+ if tarinfo.name == 'move_this':
+ return tarinfo.replace(name='moved')
+ if tarinfo.name == 'ignore_this':
+ return None
+ return tarinfo
+
+ with ArchiveMaker() as arc:
+ arc.add('move_this')
+ arc.add('ignore_this')
+ arc.add('keep')
+ with self.check_context(arc.open(), custom_filter):
+ self.expect_file('moved')
+ self.expect_file('keep')
+
+ def test_bad_filter_name(self):
+ with ArchiveMaker() as arc:
+ arc.add('foo')
+ with self.check_context(arc.open(), 'bad filter name'):
+ self.expect_exception(ValueError)
+
+ def test_stateful_filter(self):
+ # Stateful filters should be possible.
+ # (This doesn't really test tarfile. Rather, it demonstrates
+ # that third parties can implement a stateful filter.)
+ class StatefulFilter:
+ def __enter__(self):
+ self.num_files_processed = 0
+ return self
+
+ def __call__(self, tarinfo, path):
+ try:
+ tarinfo = tarfile.data_filter(tarinfo, path)
+ except tarfile.FilterError:
+ return None
+ self.num_files_processed += 1
+ return tarinfo
+
+ def __exit__(self, *exc_info):
+ self.done = True
+
+ with ArchiveMaker() as arc:
+ arc.add('good')
+ arc.add('bad', symlink_to='/')
+ arc.add('good')
+ with StatefulFilter() as custom_filter:
+ with self.check_context(arc.open(), custom_filter):
+ self.expect_file('good')
+ self.assertEqual(custom_filter.num_files_processed, 2)
+ self.assertEqual(custom_filter.done, True)
+
+ def test_errorlevel(self):
+ def extracterror_filter(tarinfo, path):
+ raise tarfile.ExtractError('failed with ExtractError')
+ def filtererror_filter(tarinfo, path):
+ raise tarfile.FilterError('failed with FilterError')
+ def oserror_filter(tarinfo, path):
+ raise OSError('failed with OSError')
+ def tarerror_filter(tarinfo, path):
+ raise tarfile.TarError('failed with base TarError')
+ def valueerror_filter(tarinfo, path):
+ raise ValueError('failed with ValueError')
+
+ with ArchiveMaker() as arc:
+ arc.add('file')
+
+ # If errorlevel is 0, errors affected by errorlevel are ignored
+
+ with self.check_context(arc.open(errorlevel=0), extracterror_filter):
+ self.expect_file('file')
+
+ with self.check_context(arc.open(errorlevel=0), filtererror_filter):
+ self.expect_file('file')
+
+ with self.check_context(arc.open(errorlevel=0), oserror_filter):
+ self.expect_file('file')
+
+ with self.check_context(arc.open(errorlevel=0), tarerror_filter):
+ self.expect_exception(tarfile.TarError)
+
+ with self.check_context(arc.open(errorlevel=0), valueerror_filter):
+ self.expect_exception(ValueError)
+
+ # If 1, all fatal errors are raised
+
+ with self.check_context(arc.open(errorlevel=1), extracterror_filter):
+ self.expect_file('file')
+
+ with self.check_context(arc.open(errorlevel=1), filtererror_filter):
+ self.expect_exception(tarfile.FilterError)
+
+ with self.check_context(arc.open(errorlevel=1), oserror_filter):
+ self.expect_exception(OSError)
+
+ with self.check_context(arc.open(errorlevel=1), tarerror_filter):
+ self.expect_exception(tarfile.TarError)
+
+ with self.check_context(arc.open(errorlevel=1), valueerror_filter):
+ self.expect_exception(ValueError)
+
+ # If 2, all non-fatal errors are raised as well.
+
+ with self.check_context(arc.open(errorlevel=2), extracterror_filter):
+ self.expect_exception(tarfile.ExtractError)
+
+ with self.check_context(arc.open(errorlevel=2), filtererror_filter):
+ self.expect_exception(tarfile.FilterError)
+
+ with self.check_context(arc.open(errorlevel=2), oserror_filter):
+ self.expect_exception(OSError)
+
+ with self.check_context(arc.open(errorlevel=2), tarerror_filter):
+ self.expect_exception(tarfile.TarError)
+
+ with self.check_context(arc.open(errorlevel=2), valueerror_filter):
+ self.expect_exception(ValueError)
+
+ # We only handle ExtractionError, FilterError & OSError specially.
+
+ with self.check_context(arc.open(errorlevel='boo!'), filtererror_filter):
+ self.expect_exception(TypeError) # errorlevel is not int
+
+
def setUpModule():
os_helper.unlink(TEMPDIR)
os.makedirs(TEMPDIR)
diff --git a/Lib/test/test_tkinter/test_widgets.py b/Lib/test/test_tkinter/test_widgets.py
index 64c9472706549b..ba4ef49078c5a7 100644
--- a/Lib/test/test_tkinter/test_widgets.py
+++ b/Lib/test/test_tkinter/test_widgets.py
@@ -1377,6 +1377,11 @@ class MenuTest(AbstractWidgetTest, unittest.TestCase):
def create(self, **kwargs):
return tkinter.Menu(self.root, **kwargs)
+ def test_indexcommand_none(self):
+ widget = self.create()
+ i = widget.index('none')
+ self.assertIsNone(i)
+
def test_configure_postcommand(self):
widget = self.create()
self.checkCommandParam(widget, 'postcommand')
diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py
index 63c2501cfe2338..283a7c23609e67 100644
--- a/Lib/test/test_tokenize.py
+++ b/Lib/test/test_tokenize.py
@@ -1625,6 +1625,10 @@ def test_random_files(self):
# 7 more testfiles fail. Remove them also until the failure is diagnosed.
testfiles.remove(os.path.join(tempdir, "test_unicode_identifiers.py"))
+
+ # TODO: Remove this once we can unparse PEP 701 syntax
+ testfiles.remove(os.path.join(tempdir, "test_fstring.py"))
+
for f in ('buffer', 'builtin', 'fileio', 'inspect', 'os', 'platform', 'sys'):
testfiles.remove(os.path.join(tempdir, "test_%s.py") % f)
@@ -1937,25 +1941,39 @@ def test_string(self):
""")
self.check_tokenize('f"abc"', """\
- STRING 'f"abc"' (1, 0) (1, 6)
+ FSTRING_START 'f"' (1, 0) (1, 2)
+ FSTRING_MIDDLE 'abc' (1, 2) (1, 5)
+ FSTRING_END '"' (1, 5) (1, 6)
""")
self.check_tokenize('fR"a{b}c"', """\
- STRING 'fR"a{b}c"' (1, 0) (1, 9)
+ FSTRING_START 'fR"' (1, 0) (1, 3)
+ FSTRING_MIDDLE 'a' (1, 3) (1, 4)
+ LBRACE '{' (1, 4) (1, 5)
+ NAME 'b' (1, 5) (1, 6)
+ RBRACE '}' (1, 6) (1, 7)
+ FSTRING_MIDDLE 'c' (1, 7) (1, 8)
+ FSTRING_END '"' (1, 8) (1, 9)
""")
self.check_tokenize('f"""abc"""', """\
- STRING 'f\"\"\"abc\"\"\"' (1, 0) (1, 10)
+ FSTRING_START 'f\"""' (1, 0) (1, 4)
+ FSTRING_MIDDLE 'abc' (1, 4) (1, 7)
+ FSTRING_END '\"""' (1, 7) (1, 10)
""")
self.check_tokenize(r'f"abc\
def"', """\
- STRING 'f"abc\\\\\\ndef"' (1, 0) (2, 4)
+ FSTRING_START \'f"\' (1, 0) (1, 2)
+ FSTRING_MIDDLE 'abc\\\\\\ndef' (1, 2) (2, 3)
+ FSTRING_END '"' (2, 3) (2, 4)
""")
self.check_tokenize(r'Rf"abc\
def"', """\
- STRING 'Rf"abc\\\\\\ndef"' (1, 0) (2, 4)
+ FSTRING_START 'Rf"' (1, 0) (1, 3)
+ FSTRING_MIDDLE 'abc\\\\\\ndef' (1, 3) (2, 3)
+ FSTRING_END '"' (2, 3) (2, 4)
""")
def test_function(self):
diff --git a/Lib/test/test_type_cache.py b/Lib/test/test_type_cache.py
index 8502f6b0584b00..24f83cd3e172c7 100644
--- a/Lib/test/test_type_cache.py
+++ b/Lib/test/test_type_cache.py
@@ -9,6 +9,7 @@
# Skip this test if the _testcapi module isn't available.
type_get_version = import_helper.import_module('_testcapi').type_get_version
+type_assign_version = import_helper.import_module('_testcapi').type_assign_version
@support.cpython_only
@@ -42,6 +43,19 @@ def test_tp_version_tag_unique(self):
self.assertEqual(len(set(all_version_tags)), 30,
msg=f"{all_version_tags} contains non-unique versions")
+ def test_type_assign_version(self):
+ class C:
+ x = 5
+
+ self.assertEqual(type_assign_version(C), 1)
+ c_ver = type_get_version(C)
+
+ C.x = 6
+ self.assertEqual(type_get_version(C), 0)
+ self.assertEqual(type_assign_version(C), 1)
+ self.assertNotEqual(type_get_version(C), 0)
+ self.assertNotEqual(type_get_version(C), c_ver)
+
if __name__ == "__main__":
support.run_unittest(TypeCacheTests)
diff --git a/Lib/test/test_type_comments.py b/Lib/test/test_type_comments.py
index 8db7394d1512aa..aba4a44be9da96 100644
--- a/Lib/test/test_type_comments.py
+++ b/Lib/test/test_type_comments.py
@@ -272,7 +272,7 @@ def test_matmul(self):
pass
def test_fstring(self):
- for tree in self.parse_all(fstring, minver=6):
+ for tree in self.parse_all(fstring):
pass
def test_underscorednumber(self):
diff --git a/Lib/test/test_types.py b/Lib/test/test_types.py
index af095632a36fcb..89548100da62d7 100644
--- a/Lib/test/test_types.py
+++ b/Lib/test/test_types.py
@@ -925,6 +925,35 @@ def test_or_type_operator_with_SpecialForm(self):
assert typing.Optional[int] | str == typing.Union[int, str, None]
assert typing.Union[int, bool] | str == typing.Union[int, bool, str]
+ def test_or_type_operator_with_Literal(self):
+ Literal = typing.Literal
+ self.assertEqual((Literal[1] | Literal[2]).__args__,
+ (Literal[1], Literal[2]))
+
+ self.assertEqual((Literal[0] | Literal[False]).__args__,
+ (Literal[0], Literal[False]))
+ self.assertEqual((Literal[1] | Literal[True]).__args__,
+ (Literal[1], Literal[True]))
+
+ self.assertEqual(Literal[1] | Literal[1], Literal[1])
+ self.assertEqual(Literal['a'] | Literal['a'], Literal['a'])
+
+ import enum
+ class Ints(enum.IntEnum):
+ A = 0
+ B = 1
+
+ self.assertEqual(Literal[Ints.A] | Literal[Ints.A], Literal[Ints.A])
+ self.assertEqual(Literal[Ints.B] | Literal[Ints.B], Literal[Ints.B])
+
+ self.assertEqual((Literal[Ints.B] | Literal[Ints.A]).__args__,
+ (Literal[Ints.B], Literal[Ints.A]))
+
+ self.assertEqual((Literal[0] | Literal[Ints.A]).__args__,
+ (Literal[0], Literal[Ints.A]))
+ self.assertEqual((Literal[1] | Literal[Ints.B]).__args__,
+ (Literal[1], Literal[Ints.B]))
+
def test_or_type_repr(self):
assert repr(int | str) == "int | str"
assert repr((int | str) | list) == "int | str | list"
@@ -1360,6 +1389,67 @@ class C: pass
D = types.new_class('D', (A(), C, B()), {})
self.assertEqual(D.__bases__, (A1, A2, A3, C, B1, B2))
+ def test_get_original_bases(self):
+ T = typing.TypeVar('T')
+ class A: pass
+ class B(typing.Generic[T]): pass
+ class C(B[int]): pass
+ class D(B[str], float): pass
+ self.assertEqual(types.get_original_bases(A), (object,))
+ self.assertEqual(types.get_original_bases(B), (typing.Generic[T],))
+ self.assertEqual(types.get_original_bases(C), (B[int],))
+ self.assertEqual(types.get_original_bases(int), (object,))
+ self.assertEqual(types.get_original_bases(D), (B[str], float))
+
+ class E(list[T]): pass
+ class F(list[int]): pass
+
+ self.assertEqual(types.get_original_bases(E), (list[T],))
+ self.assertEqual(types.get_original_bases(F), (list[int],))
+
+ class ClassBasedNamedTuple(typing.NamedTuple):
+ x: int
+
+ class GenericNamedTuple(typing.NamedTuple, typing.Generic[T]):
+ x: T
+
+ CallBasedNamedTuple = typing.NamedTuple("CallBasedNamedTuple", [("x", int)])
+
+ self.assertIs(
+ types.get_original_bases(ClassBasedNamedTuple)[0], typing.NamedTuple
+ )
+ self.assertEqual(
+ types.get_original_bases(GenericNamedTuple),
+ (typing.NamedTuple, typing.Generic[T])
+ )
+ self.assertIs(
+ types.get_original_bases(CallBasedNamedTuple)[0], typing.NamedTuple
+ )
+
+ class ClassBasedTypedDict(typing.TypedDict):
+ x: int
+
+ class GenericTypedDict(typing.TypedDict, typing.Generic[T]):
+ x: T
+
+ CallBasedTypedDict = typing.TypedDict("CallBasedTypedDict", {"x": int})
+
+ self.assertIs(
+ types.get_original_bases(ClassBasedTypedDict)[0],
+ typing.TypedDict
+ )
+ self.assertEqual(
+ types.get_original_bases(GenericTypedDict),
+ (typing.TypedDict, typing.Generic[T])
+ )
+ self.assertIs(
+ types.get_original_bases(CallBasedTypedDict)[0],
+ typing.TypedDict
+ )
+
+ with self.assertRaisesRegex(TypeError, "Expected an instance of type"):
+ types.get_original_bases(object())
+
# Many of the following tests are derived from test_descr.py
def test_prepare_class(self):
# Basic test of metaclass derivation
diff --git a/Lib/test/test_typing.py b/Lib/test/test_typing.py
index f983efe956f902..f36bb958c88ef9 100644
--- a/Lib/test/test_typing.py
+++ b/Lib/test/test_typing.py
@@ -1789,6 +1789,35 @@ def Elem(*args):
Union[Elem, str] # Nor should this
+ def test_union_of_literals(self):
+ self.assertEqual(Union[Literal[1], Literal[2]].__args__,
+ (Literal[1], Literal[2]))
+ self.assertEqual(Union[Literal[1], Literal[1]],
+ Literal[1])
+
+ self.assertEqual(Union[Literal[False], Literal[0]].__args__,
+ (Literal[False], Literal[0]))
+ self.assertEqual(Union[Literal[True], Literal[1]].__args__,
+ (Literal[True], Literal[1]))
+
+ import enum
+ class Ints(enum.IntEnum):
+ A = 0
+ B = 1
+
+ self.assertEqual(Union[Literal[Ints.A], Literal[Ints.A]],
+ Literal[Ints.A])
+ self.assertEqual(Union[Literal[Ints.B], Literal[Ints.B]],
+ Literal[Ints.B])
+
+ self.assertEqual(Union[Literal[Ints.A], Literal[Ints.B]].__args__,
+ (Literal[Ints.A], Literal[Ints.B]))
+
+ self.assertEqual(Union[Literal[0], Literal[Ints.A], Literal[False]].__args__,
+ (Literal[0], Literal[Ints.A], Literal[False]))
+ self.assertEqual(Union[Literal[1], Literal[Ints.B], Literal[True]].__args__,
+ (Literal[1], Literal[Ints.B], Literal[True]))
+
class TupleTests(BaseTestCase):
@@ -2156,6 +2185,13 @@ def test_basics(self):
Literal[Literal[1, 2], Literal[4, 5]]
Literal[b"foo", u"bar"]
+ def test_enum(self):
+ import enum
+ class My(enum.Enum):
+ A = 'A'
+
+ self.assertEqual(Literal[My.A].__args__, (My.A,))
+
def test_illegal_parameters_do_not_raise_runtime_errors(self):
# Type checkers should reject these types, but we do not
# raise errors at runtime to maintain maximum flexibility.
@@ -2245,6 +2281,20 @@ def test_flatten(self):
self.assertEqual(l, Literal[1, 2, 3])
self.assertEqual(l.__args__, (1, 2, 3))
+ def test_does_not_flatten_enum(self):
+ import enum
+ class Ints(enum.IntEnum):
+ A = 1
+ B = 2
+
+ l = Literal[
+ Literal[Ints.A],
+ Literal[Ints.B],
+ Literal[1],
+ Literal[2],
+ ]
+ self.assertEqual(l.__args__, (Ints.A, Ints.B, 1, 2))
+
XK = TypeVar('XK', str, bytes)
XV = TypeVar('XV')
@@ -6695,6 +6745,22 @@ def test_copy_and_pickle(self):
self.assertEqual(jane2, jane)
self.assertIsInstance(jane2, cls)
+ def test_orig_bases(self):
+ T = TypeVar('T')
+
+ class SimpleNamedTuple(NamedTuple):
+ pass
+
+ class GenericNamedTuple(NamedTuple, Generic[T]):
+ pass
+
+ self.assertEqual(SimpleNamedTuple.__orig_bases__, (NamedTuple,))
+ self.assertEqual(GenericNamedTuple.__orig_bases__, (NamedTuple, Generic[T]))
+
+ CallNamedTuple = NamedTuple('CallNamedTuple', [])
+
+ self.assertEqual(CallNamedTuple.__orig_bases__, (NamedTuple,))
+
class TypedDictTests(BaseTestCase):
def test_basics_functional_syntax(self):
@@ -7126,6 +7192,49 @@ class TD(TypedDict):
self.assertIs(type(a), dict)
self.assertEqual(a, {'a': 1})
+ def test_orig_bases(self):
+ T = TypeVar('T')
+
+ class Parent(TypedDict):
+ pass
+
+ class Child(Parent):
+ pass
+
+ class OtherChild(Parent):
+ pass
+
+ class MixedChild(Child, OtherChild, Parent):
+ pass
+
+ class GenericParent(TypedDict, Generic[T]):
+ pass
+
+ class GenericChild(GenericParent[int]):
+ pass
+
+ class OtherGenericChild(GenericParent[str]):
+ pass
+
+ class MixedGenericChild(GenericChild, OtherGenericChild, GenericParent[float]):
+ pass
+
+ class MultipleGenericBases(GenericParent[int], GenericParent[float]):
+ pass
+
+ CallTypedDict = TypedDict('CallTypedDict', {})
+
+ self.assertEqual(Parent.__orig_bases__, (TypedDict,))
+ self.assertEqual(Child.__orig_bases__, (Parent,))
+ self.assertEqual(OtherChild.__orig_bases__, (Parent,))
+ self.assertEqual(MixedChild.__orig_bases__, (Child, OtherChild, Parent,))
+ self.assertEqual(GenericParent.__orig_bases__, (TypedDict, Generic[T]))
+ self.assertEqual(GenericChild.__orig_bases__, (GenericParent[int],))
+ self.assertEqual(OtherGenericChild.__orig_bases__, (GenericParent[str],))
+ self.assertEqual(MixedGenericChild.__orig_bases__, (GenericChild, OtherGenericChild, GenericParent[float]))
+ self.assertEqual(MultipleGenericBases.__orig_bases__, (GenericParent[int], GenericParent[float]))
+ self.assertEqual(CallTypedDict.__orig_bases__, (TypedDict,))
+
class RequiredTests(BaseTestCase):
diff --git a/Lib/test/test_urllib2.py b/Lib/test/test_urllib2.py
index 633d596ac3de3f..b7c6f6dd8f1b99 100644
--- a/Lib/test/test_urllib2.py
+++ b/Lib/test/test_urllib2.py
@@ -3,6 +3,7 @@
from test.support import os_helper
from test.support import warnings_helper
from test import test_urllib
+from unittest import mock
import os
import io
@@ -484,7 +485,18 @@ def build_test_opener(*handler_instances):
return opener
-class MockHTTPHandler(urllib.request.BaseHandler):
+class MockHTTPHandler(urllib.request.HTTPHandler):
+ # Very simple mock HTTP handler with no special behavior other than using a mock HTTP connection
+
+ def __init__(self, debuglevel=None):
+ super(MockHTTPHandler, self).__init__(debuglevel=debuglevel)
+ self.httpconn = MockHTTPClass()
+
+ def http_open(self, req):
+ return self.do_open(self.httpconn, req)
+
+
+class MockHTTPHandlerRedirect(urllib.request.BaseHandler):
# useful for testing redirections and auth
# sends supplied headers and code as first response
# sends 200 OK as second response
@@ -512,12 +524,12 @@ def http_open(self, req):
return MockResponse(200, "OK", msg, "", req.get_full_url())
-class MockHTTPSHandler(urllib.request.AbstractHTTPHandler):
+class MockHTTPSHandler(urllib.request.HTTPSHandler):
# Useful for testing the Proxy-Authorization request by verifying the
# properties of httpcon
- def __init__(self, debuglevel=0):
- urllib.request.AbstractHTTPHandler.__init__(self, debuglevel=debuglevel)
+ def __init__(self, debuglevel=None, context=None, check_hostname=None):
+ super(MockHTTPSHandler, self).__init__(debuglevel, context, check_hostname)
self.httpconn = MockHTTPClass()
def https_open(self, req):
@@ -1048,12 +1060,35 @@ def test_http_body_array(self):
newreq = h.do_request_(req)
self.assertEqual(int(newreq.get_header('Content-length')),16)
- def test_http_handler_debuglevel(self):
+ def test_http_handler_global_debuglevel(self):
+ with mock.patch.object(http.client.HTTPConnection, 'debuglevel', 6):
+ o = OpenerDirector()
+ h = MockHTTPHandler()
+ o.add_handler(h)
+ o.open("http://www.example.com")
+ self.assertEqual(h._debuglevel, 6)
+
+ def test_http_handler_local_debuglevel(self):
+ o = OpenerDirector()
+ h = MockHTTPHandler(debuglevel=5)
+ o.add_handler(h)
+ o.open("http://www.example.com")
+ self.assertEqual(h._debuglevel, 5)
+
+ def test_https_handler_global_debuglevel(self):
+ with mock.patch.object(http.client.HTTPSConnection, 'debuglevel', 7):
+ o = OpenerDirector()
+ h = MockHTTPSHandler()
+ o.add_handler(h)
+ o.open("https://www.example.com")
+ self.assertEqual(h._debuglevel, 7)
+
+ def test_https_handler_local_debuglevel(self):
o = OpenerDirector()
- h = MockHTTPSHandler(debuglevel=1)
+ h = MockHTTPSHandler(debuglevel=4)
o.add_handler(h)
o.open("https://www.example.com")
- self.assertEqual(h._debuglevel, 1)
+ self.assertEqual(h._debuglevel, 4)
def test_http_doubleslash(self):
# Checks the presence of any unnecessary double slash in url does not
@@ -1289,7 +1324,7 @@ def test_cookie_redirect(self):
cj = CookieJar()
interact_netscape(cj, "http://www.example.com/", "spam=eggs")
- hh = MockHTTPHandler(302, "Location: http://www.cracker.com/\r\n\r\n")
+ hh = MockHTTPHandlerRedirect(302, "Location: http://www.cracker.com/\r\n\r\n")
hdeh = urllib.request.HTTPDefaultErrorHandler()
hrh = urllib.request.HTTPRedirectHandler()
cp = urllib.request.HTTPCookieProcessor(cj)
@@ -1299,7 +1334,7 @@ def test_cookie_redirect(self):
def test_redirect_fragment(self):
redirected_url = 'http://www.example.com/index.html#OK\r\n\r\n'
- hh = MockHTTPHandler(302, 'Location: ' + redirected_url)
+ hh = MockHTTPHandlerRedirect(302, 'Location: ' + redirected_url)
hdeh = urllib.request.HTTPDefaultErrorHandler()
hrh = urllib.request.HTTPRedirectHandler()
o = build_test_opener(hh, hdeh, hrh)
@@ -1484,7 +1519,7 @@ def check_basic_auth(self, headers, realm):
password_manager = MockPasswordManager()
auth_handler = urllib.request.HTTPBasicAuthHandler(password_manager)
body = '\r\n'.join(headers) + '\r\n\r\n'
- http_handler = MockHTTPHandler(401, body)
+ http_handler = MockHTTPHandlerRedirect(401, body)
opener.add_handler(auth_handler)
opener.add_handler(http_handler)
self._test_basic_auth(opener, auth_handler, "Authorization",
@@ -1544,7 +1579,7 @@ def test_proxy_basic_auth(self):
password_manager = MockPasswordManager()
auth_handler = urllib.request.ProxyBasicAuthHandler(password_manager)
realm = "ACME Networks"
- http_handler = MockHTTPHandler(
+ http_handler = MockHTTPHandlerRedirect(
407, 'Proxy-Authenticate: Basic realm="%s"\r\n\r\n' % realm)
opener.add_handler(auth_handler)
opener.add_handler(http_handler)
@@ -1588,7 +1623,7 @@ def http_error_401(self, *args, **kwds):
digest_handler = TestDigestAuthHandler(password_manager)
basic_handler = TestBasicAuthHandler(password_manager)
realm = "ACME Networks"
- http_handler = MockHTTPHandler(
+ http_handler = MockHTTPHandlerRedirect(
401, 'WWW-Authenticate: Basic realm="%s"\r\n\r\n' % realm)
opener.add_handler(basic_handler)
opener.add_handler(digest_handler)
@@ -1608,7 +1643,7 @@ def test_unsupported_auth_digest_handler(self):
opener = OpenerDirector()
# While using DigestAuthHandler
digest_auth_handler = urllib.request.HTTPDigestAuthHandler(None)
- http_handler = MockHTTPHandler(
+ http_handler = MockHTTPHandlerRedirect(
401, 'WWW-Authenticate: Kerberos\r\n\r\n')
opener.add_handler(digest_auth_handler)
opener.add_handler(http_handler)
@@ -1618,7 +1653,7 @@ def test_unsupported_auth_basic_handler(self):
# While using BasicAuthHandler
opener = OpenerDirector()
basic_auth_handler = urllib.request.HTTPBasicAuthHandler(None)
- http_handler = MockHTTPHandler(
+ http_handler = MockHTTPHandlerRedirect(
401, 'WWW-Authenticate: NTLM\r\n\r\n')
opener.add_handler(basic_auth_handler)
opener.add_handler(http_handler)
@@ -1705,7 +1740,7 @@ def test_basic_prior_auth_send_after_first_success(self):
opener = OpenerDirector()
opener.add_handler(auth_prior_handler)
- http_handler = MockHTTPHandler(
+ http_handler = MockHTTPHandlerRedirect(
401, 'WWW-Authenticate: Basic realm="%s"\r\n\r\n' % None)
opener.add_handler(http_handler)
diff --git a/Lib/test/test_urllib2net.py b/Lib/test/test_urllib2net.py
index 5da41c37bbfb8e..d8d882b2d33589 100644
--- a/Lib/test/test_urllib2net.py
+++ b/Lib/test/test_urllib2net.py
@@ -134,7 +134,9 @@ def setUp(self):
# They do sometimes catch some major disasters, though.
def test_ftp(self):
+ # Testing the same URL twice exercises the caching in CacheFTPHandler
urls = [
+ 'ftp://www.pythontest.net/README',
'ftp://www.pythontest.net/README',
('ftp://www.pythontest.net/non-existent-file',
None, urllib.error.URLError),
diff --git a/Lib/test/test_venv.py b/Lib/test/test_venv.py
index 7cccbe84f4ebfa..95944c7c711620 100644
--- a/Lib/test/test_venv.py
+++ b/Lib/test/test_venv.py
@@ -227,7 +227,6 @@ def pip_cmd_checker(cmd, **kwargs):
'install',
'--upgrade',
'pip',
- 'setuptools'
]
)
@@ -601,9 +600,15 @@ def test_zippath_from_non_installed_posix(self):
ld_library_path_env = "DYLD_LIBRARY_PATH"
else:
ld_library_path_env = "LD_LIBRARY_PATH"
- subprocess.check_call(cmd,
- env={"PYTHONPATH": pythonpath,
- ld_library_path_env: ld_library_path})
+ # Note that in address sanitizer mode, the current runtime
+ # implementation leaks memory due to not being able to correctly
+ # clean all unicode objects during runtime shutdown. Therefore,
+ # this uses subprocess.run instead of subprocess.check_call to
+ # maintain the core of the test while not failing due to the refleaks.
+ # This should be able to use check_call once all refleaks are fixed.
+ subprocess.run(cmd,
+ env={"PYTHONPATH": pythonpath,
+ ld_library_path_env: ld_library_path})
envpy = os.path.join(self.env_dir, self.bindir, self.exe)
# Now check the venv created from the non-installed python has
# correct zip path in pythonpath.
@@ -745,7 +750,6 @@ def do_test_with_pip(self, system_site_packages):
# future pip versions, this test can likely be relaxed further.
out = out.decode("latin-1") # Force to text, prevent decoding errors
self.assertIn("Successfully uninstalled pip", out)
- self.assertIn("Successfully uninstalled setuptools", out)
# Check pip is now gone from the virtual environment. This only
# applies in the system_site_packages=False case, because in the
# other case, pip may still be available in the system site-packages
diff --git a/Lib/test/test_weakref.py b/Lib/test/test_weakref.py
index 7c5920797d2538..1bc1d05f7daba9 100644
--- a/Lib/test/test_weakref.py
+++ b/Lib/test/test_weakref.py
@@ -116,6 +116,17 @@ def test_basic_ref(self):
del o
repr(wr)
+ def test_repr_failure_gh99184(self):
+ class MyConfig(dict):
+ def __getattr__(self, x):
+ return self[x]
+
+ obj = MyConfig(offset=5)
+ obj_weakref = weakref.ref(obj)
+
+ self.assertIn('MyConfig', repr(obj_weakref))
+ self.assertIn('MyConfig', str(obj_weakref))
+
def test_basic_callback(self):
self.check_basic_callback(C)
self.check_basic_callback(create_function)
diff --git a/Lib/test/test_winreg.py b/Lib/test/test_winreg.py
index 769ab67b0f5611..924a962781a75b 100644
--- a/Lib/test/test_winreg.py
+++ b/Lib/test/test_winreg.py
@@ -1,11 +1,12 @@
# Test the windows specific win32reg module.
# Only win32reg functions not hit here: FlushKey, LoadKey and SaveKey
+import gc
import os, sys, errno
-import unittest
-from test.support import import_helper
import threading
+import unittest
from platform import machine, win32_edition
+from test.support import cpython_only, import_helper
# Do this first so test will be skipped if module doesn't exist
import_helper.import_module('winreg', required_on=['win'])
@@ -49,6 +50,17 @@
("Japanese 日本", "日本語", REG_SZ),
]
+
+@cpython_only
+class HeapTypeTests(unittest.TestCase):
+ def test_have_gc(self):
+ self.assertTrue(gc.is_tracked(HKEYType))
+
+ def test_immutable(self):
+ with self.assertRaisesRegex(TypeError, "immutable"):
+ HKEYType.foo = "bar"
+
+
class BaseWinregTests(unittest.TestCase):
def setUp(self):
diff --git a/Lib/tkinter/__init__.py b/Lib/tkinter/__init__.py
index 479daf0e5abfc3..bf0b3b92155938 100644
--- a/Lib/tkinter/__init__.py
+++ b/Lib/tkinter/__init__.py
@@ -3430,8 +3430,7 @@ def entryconfigure(self, index, cnf=None, **kw):
def index(self, index):
"""Return the index of a menu item identified by INDEX."""
i = self.tk.call(self._w, 'index', index)
- if i == 'none': return None
- return self.tk.getint(i)
+ return None if i in ('', 'none') else self.tk.getint(i) # GH-103685.
def invoke(self, index):
"""Invoke a menu item identified by INDEX and execute
diff --git a/Lib/token.py b/Lib/token.py
index 95b107c6643b3f..1459d12b376f82 100644
--- a/Lib/token.py
+++ b/Lib/token.py
@@ -57,18 +57,22 @@
RARROW = 51
ELLIPSIS = 52
COLONEQUAL = 53
-OP = 54
-AWAIT = 55
-ASYNC = 56
-TYPE_IGNORE = 57
-TYPE_COMMENT = 58
-SOFT_KEYWORD = 59
+EXCLAMATION = 54
+OP = 55
+AWAIT = 56
+ASYNC = 57
+TYPE_IGNORE = 58
+TYPE_COMMENT = 59
+SOFT_KEYWORD = 60
+FSTRING_START = 61
+FSTRING_MIDDLE = 62
+FSTRING_END = 63
# These aren't used by the C tokenizer but are needed for tokenize.py
-ERRORTOKEN = 60
-COMMENT = 61
-NL = 62
-ENCODING = 63
-N_TOKENS = 64
+ERRORTOKEN = 64
+COMMENT = 65
+NL = 66
+ENCODING = 67
+N_TOKENS = 68
# Special definitions for cooperation with parser
NT_OFFSET = 256
@@ -78,6 +82,7 @@
__all__.extend(tok_name.values())
EXACT_TOKEN_TYPES = {
+ '!': EXCLAMATION,
'!=': NOTEQUAL,
'%': PERCENT,
'%=': PERCENTEQUAL,
diff --git a/Lib/types.py b/Lib/types.py
index aa8a1c84722399..6110e6e1de7249 100644
--- a/Lib/types.py
+++ b/Lib/types.py
@@ -143,6 +143,38 @@ def _calculate_meta(meta, bases):
"of the metaclasses of all its bases")
return winner
+
+def get_original_bases(cls, /):
+ """Return the class's "original" bases prior to modification by `__mro_entries__`.
+
+ Examples::
+
+ from typing import TypeVar, Generic, NamedTuple, TypedDict
+
+ T = TypeVar("T")
+ class Foo(Generic[T]): ...
+ class Bar(Foo[int], float): ...
+ class Baz(list[str]): ...
+ Eggs = NamedTuple("Eggs", [("a", int), ("b", str)])
+ Spam = TypedDict("Spam", {"a": int, "b": str})
+
+ assert get_original_bases(Bar) == (Foo[int], float)
+ assert get_original_bases(Baz) == (list[str],)
+ assert get_original_bases(Eggs) == (NamedTuple,)
+ assert get_original_bases(Spam) == (TypedDict,)
+ assert get_original_bases(int) == (object,)
+ """
+ try:
+ return cls.__orig_bases__
+ except AttributeError:
+ try:
+ return cls.__bases__
+ except AttributeError:
+ raise TypeError(
+ f'Expected an instance of type, not {type(cls).__name__!r}'
+ ) from None
+
+
class DynamicClassAttribute:
"""Route attribute access on a class to __getattr__.
diff --git a/Lib/typing.py b/Lib/typing.py
index 7c165562c2b53d..354bc80eb3abfa 100644
--- a/Lib/typing.py
+++ b/Lib/typing.py
@@ -2962,7 +2962,9 @@ class Employee(NamedTuple):
elif kwargs:
raise TypeError("Either list of fields or keywords"
" can be provided to NamedTuple, not both")
- return _make_nmtuple(typename, fields, module=_caller())
+ nt = _make_nmtuple(typename, fields, module=_caller())
+ nt.__orig_bases__ = (NamedTuple,)
+ return nt
_NamedTuple = type.__new__(NamedTupleMeta, 'NamedTuple', (), {})
@@ -2994,6 +2996,9 @@ def __new__(cls, name, bases, ns, total=True):
tp_dict = type.__new__(_TypedDictMeta, name, (*generic_base, dict), ns)
+ if not hasattr(tp_dict, '__orig_bases__'):
+ tp_dict.__orig_bases__ = bases
+
annotations = {}
own_annotations = ns.get('__annotations__', {})
msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
@@ -3104,7 +3109,9 @@ class body be required.
# Setting correct module is necessary to make typed dict classes pickleable.
ns['__module__'] = module
- return _TypedDictMeta(typename, (), ns, total=total)
+ td = _TypedDictMeta(typename, (), ns, total=total)
+ td.__orig_bases__ = (TypedDict,)
+ return td
_TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {})
TypedDict.__mro_entries__ = lambda bases: (_TypedDict,)
diff --git a/Lib/urllib/request.py b/Lib/urllib/request.py
index 151034e6a81bf9..5314b3f26021eb 100644
--- a/Lib/urllib/request.py
+++ b/Lib/urllib/request.py
@@ -1251,8 +1251,8 @@ def http_error_407(self, req, fp, code, msg, headers):
class AbstractHTTPHandler(BaseHandler):
- def __init__(self, debuglevel=0):
- self._debuglevel = debuglevel
+ def __init__(self, debuglevel=None):
+ self._debuglevel = debuglevel if debuglevel is not None else http.client.HTTPConnection.debuglevel
def set_http_debuglevel(self, level):
self._debuglevel = level
@@ -1378,7 +1378,8 @@ def http_open(self, req):
class HTTPSHandler(AbstractHTTPHandler):
- def __init__(self, debuglevel=0, context=None, check_hostname=None):
+ def __init__(self, debuglevel=None, context=None, check_hostname=None):
+ debuglevel = debuglevel if debuglevel is not None else http.client.HTTPSConnection.debuglevel
AbstractHTTPHandler.__init__(self, debuglevel)
if context is None:
http_version = http.client.HTTPSConnection._http_vsn
@@ -2474,7 +2475,13 @@ def retrfile(self, file, type):
return (ftpobj, retrlen)
def endtransfer(self):
+ if not self.busy:
+ return
self.busy = 0
+ try:
+ self.ftp.voidresp()
+ except ftperrors():
+ pass
def close(self):
self.keepalive = False
diff --git a/Lib/venv/__init__.py b/Lib/venv/__init__.py
index 2f87c62ccba866..2173c9b13e5cf7 100644
--- a/Lib/venv/__init__.py
+++ b/Lib/venv/__init__.py
@@ -13,7 +13,7 @@
import types
-CORE_VENV_DEPS = ('pip', 'setuptools')
+CORE_VENV_DEPS = ('pip',)
logger = logging.getLogger(__name__)
@@ -523,7 +523,7 @@ def main(args=None):
'this environment.')
parser.add_argument('--upgrade-deps', default=False, action='store_true',
dest='upgrade_deps',
- help=f'Upgrade core dependencies: {", ".join(CORE_VENV_DEPS)} '
+ help=f'Upgrade core dependencies ({", ".join(CORE_VENV_DEPS)}) '
'to the latest version in PyPI')
options = parser.parse_args(args)
if options.upgrade and options.clear:
diff --git a/Lib/venv/scripts/common/activate b/Lib/venv/scripts/common/activate
index cb898b39670c47..408df5cb93b9e9 100644
--- a/Lib/venv/scripts/common/activate
+++ b/Lib/venv/scripts/common/activate
@@ -1,5 +1,5 @@
# This file must be used with "source bin/activate" *from bash*
-# you cannot run it directly
+# You cannot run it directly
deactivate () {
# reset old environment variables
diff --git a/Lib/venv/scripts/posix/activate.csh b/Lib/venv/scripts/posix/activate.csh
index d6f697c55ed81c..5e8d66fa9e5061 100644
--- a/Lib/venv/scripts/posix/activate.csh
+++ b/Lib/venv/scripts/posix/activate.csh
@@ -1,5 +1,6 @@
# This file must be used with "source bin/activate.csh" *from csh*.
# You cannot run it directly.
+
# Created by Davide Di Blasi .
# Ported to Python 3.3 venv by Andrew Svetlov
diff --git a/Lib/venv/scripts/posix/activate.fish b/Lib/venv/scripts/posix/activate.fish
index 9aa4446005f4d8..91ad6442e05692 100644
--- a/Lib/venv/scripts/posix/activate.fish
+++ b/Lib/venv/scripts/posix/activate.fish
@@ -1,5 +1,5 @@
# This file must be used with "source /bin/activate.fish" *from fish*
-# (https://fishshell.com/); you cannot run it directly.
+# (https://fishshell.com/). You cannot run it directly.
function deactivate -d "Exit virtual environment and return to normal shell environment"
# reset old environment variables
diff --git a/Mac/BuildScript/scripts/postflight.ensurepip b/Mac/BuildScript/scripts/postflight.ensurepip
index 36d05945b6fd90..ce3c6c1c2bf9e6 100755
--- a/Mac/BuildScript/scripts/postflight.ensurepip
+++ b/Mac/BuildScript/scripts/postflight.ensurepip
@@ -56,19 +56,19 @@ if [ -d /usr/local/bin ] ; then
cd /usr/local/bin
- # Create pipx.y and easy_install-x.y links if /usr/local/bin/pythonx.y
+ # Create pipx.y links if /usr/local/bin/pythonx.y
# is linked to this framework version
install_links_if_our_fw "python${PYVER}" \
- "pip${PYVER}" "easy_install-${PYVER}"
+ "pip${PYVER}"
# Create pipx link if /usr/local/bin/pythonx is linked to this version
install_links_if_our_fw "python${PYMAJOR}" \
"pip${PYMAJOR}"
- # Create pip and easy_install link if /usr/local/bin/python
+ # Create pip link if /usr/local/bin/python
# is linked to this version
install_links_if_our_fw "python" \
- "pip" "easy_install"
+ "pip"
)
fi
exit 0
diff --git a/Mac/Makefile.in b/Mac/Makefile.in
index f9691288414538..69ab4198988570 100644
--- a/Mac/Makefile.in
+++ b/Mac/Makefile.in
@@ -166,7 +166,6 @@ altinstallunixtools:
-if test "x$(ENSUREPIP)" != "xno" ; then \
cd "$(DESTDIR)$(FRAMEWORKUNIXTOOLSPREFIX)/bin" && \
for fn in \
- easy_install-$(VERSION) \
pip$(VERSION) \
; \
do \
diff --git a/Makefile.pre.in b/Makefile.pre.in
index a58397f21f9bb2..b3eb00240f91e9 100644
--- a/Makefile.pre.in
+++ b/Makefile.pre.in
@@ -38,6 +38,7 @@ CC= @CC@
CXX= @CXX@
LINKCC= @LINKCC@
AR= @AR@
+READELF= @READELF@
SOABI= @SOABI@
LDVERSION= @LDVERSION@
LIBPYTHON= @LIBPYTHON@
@@ -670,13 +671,18 @@ profile-opt: profile-run-stamp
bolt-opt: @PREBOLT_RULE@
rm -f *.fdata
- @LLVM_BOLT@ ./$(BUILDPYTHON) -instrument -instrumentation-file-append-pid -instrumentation-file=$(abspath $(BUILDPYTHON).bolt) -o $(BUILDPYTHON).bolt_inst
- ./$(BUILDPYTHON).bolt_inst $(PROFILE_TASK) || true
- @MERGE_FDATA@ $(BUILDPYTHON).*.fdata > $(BUILDPYTHON).fdata
- @LLVM_BOLT@ ./$(BUILDPYTHON) -o $(BUILDPYTHON).bolt -data=$(BUILDPYTHON).fdata -update-debug-sections -reorder-blocks=ext-tsp -reorder-functions=hfsort+ -split-functions -icf=1 -inline-all -split-eh -reorder-functions-use-hot-size -peepholes=none -jump-tables=aggressive -inline-ap -indirect-call-promotion=all -dyno-stats -use-gnu-stack -frame-opt=hot
- rm -f *.fdata
- rm -f $(BUILDPYTHON).bolt_inst
- mv $(BUILDPYTHON).bolt $(BUILDPYTHON)
+ @if $(READELF) -p .note.bolt_info $(BUILDPYTHON) | grep BOLT > /dev/null; then\
+ echo "skip: $(BUILDPYTHON) is already BOLTed."; \
+ else \
+ @LLVM_BOLT@ ./$(BUILDPYTHON) -instrument -instrumentation-file-append-pid -instrumentation-file=$(abspath $(BUILDPYTHON).bolt) -o $(BUILDPYTHON).bolt_inst; \
+ ./$(BUILDPYTHON).bolt_inst $(PROFILE_TASK) || true; \
+ @MERGE_FDATA@ $(BUILDPYTHON).*.fdata > $(BUILDPYTHON).fdata; \
+ @LLVM_BOLT@ ./$(BUILDPYTHON) -o $(BUILDPYTHON).bolt -data=$(BUILDPYTHON).fdata -update-debug-sections -reorder-blocks=ext-tsp -reorder-functions=hfsort+ -split-functions -icf=1 -inline-all -split-eh -reorder-functions-use-hot-size -peepholes=none -jump-tables=aggressive -inline-ap -indirect-call-promotion=all -dyno-stats -use-gnu-stack -frame-opt=hot; \
+ rm -f *.fdata; \
+ rm -f $(BUILDPYTHON).bolt_inst; \
+ mv $(BUILDPYTHON).bolt $(BUILDPYTHON); \
+ fi
+
# Compile and run with gcov
.PHONY=coverage coverage-lcov coverage-report
@@ -976,7 +982,7 @@ Makefile Modules/config.c: Makefile.pre \
Modules/Setup.local \
Modules/Setup.bootstrap \
Modules/Setup.stdlib
- $(SHELL) $(MAKESETUP) -c $(srcdir)/Modules/config.c.in \
+ $(MAKESETUP) -c $(srcdir)/Modules/config.c.in \
-s Modules \
Modules/Setup.local \
Modules/Setup.stdlib \
@@ -2417,12 +2423,12 @@ frameworkinstallextras:
# Build the toplevel Makefile
Makefile.pre: $(srcdir)/Makefile.pre.in config.status
- CONFIG_FILES=Makefile.pre CONFIG_HEADERS= $(SHELL) config.status
+ CONFIG_FILES=Makefile.pre CONFIG_HEADERS= ./config.status
$(MAKE) -f Makefile.pre Makefile
# Run the configure script.
config.status: $(srcdir)/configure
- $(SHELL) $(srcdir)/configure $(CONFIG_ARGS)
+ $(srcdir)/configure $(CONFIG_ARGS)
.PRECIOUS: config.status $(BUILDPYTHON) Makefile Makefile.pre
@@ -2447,8 +2453,8 @@ reindent:
# Rerun configure with the same options as it was run last time,
# provided the config.status script exists
recheck:
- $(SHELL) config.status --recheck
- $(SHELL) config.status
+ ./config.status --recheck
+ ./config.status
# Regenerate configure and pyconfig.h.in
.PHONY: autoconf
diff --git a/Misc/NEWS.d/3.7.0b2.rst b/Misc/NEWS.d/3.7.0b2.rst
index b2ade206bd5f97..9590914599bb86 100644
--- a/Misc/NEWS.d/3.7.0b2.rst
+++ b/Misc/NEWS.d/3.7.0b2.rst
@@ -357,7 +357,7 @@ Wirtel
Add TLSVersion constants and SSLContext.maximum_version / minimum_version
attributes. The new API wraps OpenSSL 1.1
-https://www.openssl.org/docs/man1.1.0/ssl/SSL_CTX_set_min_proto_version.html
+https://web.archive.org/web/20180309043602/https://www.openssl.org/docs/man1.1.0/ssl/SSL_CTX_set_min_proto_version.html
feature.
..
diff --git a/Misc/NEWS.d/3.8.0a1.rst b/Misc/NEWS.d/3.8.0a1.rst
index 991bbc128670b2..db2eba32e6ea34 100644
--- a/Misc/NEWS.d/3.8.0a1.rst
+++ b/Misc/NEWS.d/3.8.0a1.rst
@@ -5951,7 +5951,7 @@ Wirtel
Add TLSVersion constants and SSLContext.maximum_version / minimum_version
attributes. The new API wraps OpenSSL 1.1
-https://www.openssl.org/docs/man1.1.0/ssl/SSL_CTX_set_min_proto_version.html
+https://web.archive.org/web/20180309043602/https://www.openssl.org/docs/man1.1.0/ssl/SSL_CTX_set_min_proto_version.html
feature.
..
diff --git a/Misc/NEWS.d/3.9.0a1.rst b/Misc/NEWS.d/3.9.0a1.rst
index 633620583838df..0888a5c43087b5 100644
--- a/Misc/NEWS.d/3.9.0a1.rst
+++ b/Misc/NEWS.d/3.9.0a1.rst
@@ -4887,7 +4887,7 @@ Fix use of registry values to launch Python from Microsoft Store app.
.. section: Windows
Fix memory leak on Windows in creating an SSLContext object or running
-urllib.request.urlopen('https://...').
+``urllib.request.urlopen('https://...')``.
..
diff --git a/Misc/NEWS.d/3.9.0a2.rst b/Misc/NEWS.d/3.9.0a2.rst
index 226ea0d3df2243..a03eb10f1d523a 100644
--- a/Misc/NEWS.d/3.9.0a2.rst
+++ b/Misc/NEWS.d/3.9.0a2.rst
@@ -686,7 +686,7 @@ added.
Update documentation to state that to activate virtual environments under
fish one should use `source`, not `.` as documented at
-https://fishshell.com/docs/current/commands.html#source.
+https://fishshell.com/docs/current/cmds/source.html.
..
diff --git a/Misc/NEWS.d/3.9.0a4.rst b/Misc/NEWS.d/3.9.0a4.rst
index 2aef8b26b01696..019b34c4082d10 100644
--- a/Misc/NEWS.d/3.9.0a4.rst
+++ b/Misc/NEWS.d/3.9.0a4.rst
@@ -392,7 +392,7 @@ The distutils ``bdist_msi`` command is deprecated in Python 3.9, use
Improved performance of zipfile.Path for files with a large number of
entries. Also improved performance and fixed minor issue as published with
`importlib_metadata 1.5
-`_.
+`_.
..
diff --git a/Misc/NEWS.d/next/C API/2023-02-09-23-09-29.gh-issue-101408._paFIF.rst b/Misc/NEWS.d/next/C API/2023-02-09-23-09-29.gh-issue-101408._paFIF.rst
new file mode 100644
index 00000000000000..172d66163d42e6
--- /dev/null
+++ b/Misc/NEWS.d/next/C API/2023-02-09-23-09-29.gh-issue-101408._paFIF.rst
@@ -0,0 +1,2 @@
+:c:func:`PyObject_GC_Resize` should calculate preheader size if needed.
+Patch by Dong-hee Na.
diff --git a/Misc/NEWS.d/next/C API/2023-03-28-12-31-51.gh-issue-103091.CzZyaZ.rst b/Misc/NEWS.d/next/C API/2023-03-28-12-31-51.gh-issue-103091.CzZyaZ.rst
new file mode 100644
index 00000000000000..28c77b6816af87
--- /dev/null
+++ b/Misc/NEWS.d/next/C API/2023-03-28-12-31-51.gh-issue-103091.CzZyaZ.rst
@@ -0,0 +1 @@
+Add a new C-API function to eagerly assign a version tag to a PyTypeObject: ``PyUnstable_Type_AssignVersionTag()``.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-02-11-15-54-40.bpo-39610.fvgsCl.rst b/Misc/NEWS.d/next/Core and Builtins/2020-02-11-15-54-40.bpo-39610.fvgsCl.rst
new file mode 100644
index 00000000000000..d65e0f3db9d6f5
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2020-02-11-15-54-40.bpo-39610.fvgsCl.rst
@@ -0,0 +1,2 @@
+``len()`` for 0-dimensional :class:`memoryview`` objects (such as ``memoryview(ctypes.c_uint8(42))``) now raises a :exc:`TypeError`.
+Previously this returned ``1``, which was not consistent with ``mem_0d[0]`` raising an :exc:`IndexError``.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2022-11-08-12-36-25.gh-issue-99184.KIaqzz.rst b/Misc/NEWS.d/next/Core and Builtins/2022-11-08-12-36-25.gh-issue-99184.KIaqzz.rst
new file mode 100644
index 00000000000000..80076831badfea
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2022-11-08-12-36-25.gh-issue-99184.KIaqzz.rst
@@ -0,0 +1,2 @@
+Bypass instance attribute access of ``__name__`` in ``repr`` of
+:class:`weakref.ref`.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-02-22-14-57.gh-issue-84436.hvMgwF.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-02-22-14-57.gh-issue-84436.hvMgwF.rst
new file mode 100644
index 00000000000000..c4d8ce75b35a30
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-02-22-14-57.gh-issue-84436.hvMgwF.rst
@@ -0,0 +1,3 @@
+The implementation of PEP-683 which adds Immortal Objects by using a fixed
+reference count that skips reference counting to make objects truly
+immutable.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-07-12-18-41.gh-issue-103323.9802br.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-07-12-18-41.gh-issue-103323.9802br.rst
new file mode 100644
index 00000000000000..347c91d973e5ce
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-07-12-18-41.gh-issue-103323.9802br.rst
@@ -0,0 +1,3 @@
+We've replaced our use of ``_PyRuntime.tstate_current`` with a thread-local
+variable. This is a fairly low-level implementation detail, and there
+should be no change in behavior.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-14-22-35-23.gh-issue-101517.5EqM-S.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-14-22-35-23.gh-issue-101517.5EqM-S.rst
new file mode 100644
index 00000000000000..730c6cd40d7235
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-14-22-35-23.gh-issue-101517.5EqM-S.rst
@@ -0,0 +1 @@
+Fix bug in line numbers of instructions emitted for :keyword:`except* `.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-16-14-38-39.gh-issue-100530.OR6-sn.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-16-14-38-39.gh-issue-100530.OR6-sn.rst
new file mode 100644
index 00000000000000..5b1bcc4a680fc3
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-16-14-38-39.gh-issue-100530.OR6-sn.rst
@@ -0,0 +1 @@
+Clarify the error message raised when the called part of a class pattern isn't actually a class.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-17-16-00-32.gh-issue-102856.UunJ7y.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-17-16-00-32.gh-issue-102856.UunJ7y.rst
new file mode 100644
index 00000000000000..35eceb83816bcb
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-17-16-00-32.gh-issue-102856.UunJ7y.rst
@@ -0,0 +1 @@
+Implement the required C tokenizer changes for PEP 701. Patch by Pablo Galindo Salgado, Lysandros Nikolaou, Batuhan Taskaya, Marta Gómez Macías and sunmy2019.
diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-21-17-03-14.gh-issue-102310.anLjDx.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-21-17-03-14.gh-issue-102310.anLjDx.rst
new file mode 100644
index 00000000000000..15cb6c64adbab1
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-21-17-03-14.gh-issue-102310.anLjDx.rst
@@ -0,0 +1 @@
+Change the error range for invalid bytes literals.
diff --git a/Misc/NEWS.d/next/Library/2022-07-03-23-13-28.gh-issue-94518.511Tbh.rst b/Misc/NEWS.d/next/Library/2022-07-03-23-13-28.gh-issue-94518.511Tbh.rst
new file mode 100644
index 00000000000000..7719b74b8e5ef1
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2022-07-03-23-13-28.gh-issue-94518.511Tbh.rst
@@ -0,0 +1 @@
+Convert private :meth:`_posixsubprocess.fork_exec` to use Argument Clinic.
diff --git a/Misc/NEWS.d/next/Library/2022-11-10-16-26-47.gh-issue-99353.DQFjnt.rst b/Misc/NEWS.d/next/Library/2022-11-10-16-26-47.gh-issue-99353.DQFjnt.rst
new file mode 100644
index 00000000000000..1ad42d5c9aa53d
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2022-11-10-16-26-47.gh-issue-99353.DQFjnt.rst
@@ -0,0 +1,3 @@
+Respect the :class:`http.client.HTTPConnection` ``.debuglevel`` flag
+in :class:`urllib.request.AbstractHTTPHandler` when its constructor
+parameter ``debuglevel`` is not set. And do the same for ``*HTTPS*``.
diff --git a/Misc/NEWS.d/next/Library/2023-01-14-17-54-56.gh-issue-95299.vUhpKz.rst b/Misc/NEWS.d/next/Library/2023-01-14-17-54-56.gh-issue-95299.vUhpKz.rst
new file mode 100644
index 00000000000000..29c30848e09a83
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-01-14-17-54-56.gh-issue-95299.vUhpKz.rst
@@ -0,0 +1 @@
+Remove the bundled setuptools wheel from ``ensurepip``, and stop installing setuptools in environments created by ``venv``.
diff --git a/Misc/NEWS.d/next/Library/2023-02-06-16-45-18.gh-issue-83861.mMbIU3.rst b/Misc/NEWS.d/next/Library/2023-02-06-16-45-18.gh-issue-83861.mMbIU3.rst
new file mode 100644
index 00000000000000..e85e7a4ff2e73a
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-02-06-16-45-18.gh-issue-83861.mMbIU3.rst
@@ -0,0 +1,4 @@
+Fix datetime.astimezone method return value when invoked on a naive datetime
+instance that represents local time falling in a timezone transition gap.
+PEP 495 requires that instances with fold=1 produce earlier times than those
+with fold=0 in this case.
diff --git a/Misc/NEWS.d/next/Library/2023-02-11-15-01-32.gh-issue-101688.kwXmfM.rst b/Misc/NEWS.d/next/Library/2023-02-11-15-01-32.gh-issue-101688.kwXmfM.rst
new file mode 100644
index 00000000000000..6df69463931494
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-02-11-15-01-32.gh-issue-101688.kwXmfM.rst
@@ -0,0 +1,2 @@
+Implement :func:`types.get_original_bases` to provide further introspection
+for types.
diff --git a/Misc/NEWS.d/next/Library/2023-03-23-15-24-38.gh-issue-102953.YR4KaK.rst b/Misc/NEWS.d/next/Library/2023-03-23-15-24-38.gh-issue-102953.YR4KaK.rst
new file mode 100644
index 00000000000000..48a105a4a17b29
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-03-23-15-24-38.gh-issue-102953.YR4KaK.rst
@@ -0,0 +1,4 @@
+The extraction methods in :mod:`tarfile`, and :func:`shutil.unpack_archive`,
+have a new a *filter* argument that allows limiting tar features than may be
+surprising or dangerous, such as creating files outside the destination
+directory. See :ref:`tarfile-extraction-filter` for details.
diff --git a/Misc/NEWS.d/next/Library/2023-04-06-04-35-59.gh-issue-103285.rCZ9-G.rst b/Misc/NEWS.d/next/Library/2023-04-06-04-35-59.gh-issue-103285.rCZ9-G.rst
new file mode 100644
index 00000000000000..62b4364c2b1665
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-04-06-04-35-59.gh-issue-103285.rCZ9-G.rst
@@ -0,0 +1 @@
+Improve performance of :func:`ast.get_source_segment`.
diff --git a/Misc/NEWS.d/next/Library/2023-04-06-16-55-51.gh-issue-102778.BWeAmE.rst b/Misc/NEWS.d/next/Library/2023-04-06-16-55-51.gh-issue-102778.BWeAmE.rst
new file mode 100644
index 00000000000000..64ae5b5b6d564b
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-04-06-16-55-51.gh-issue-102778.BWeAmE.rst
@@ -0,0 +1 @@
+Support ``sys.last_exc`` in :mod:`idlelib`.
diff --git a/Misc/NEWS.d/next/Library/2023-04-08-00-48-40.gh-issue-103092.5EFts0.rst b/Misc/NEWS.d/next/Library/2023-04-08-00-48-40.gh-issue-103092.5EFts0.rst
new file mode 100644
index 00000000000000..0f2108fee763d0
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-04-08-00-48-40.gh-issue-103092.5EFts0.rst
@@ -0,0 +1 @@
+Adapt the :mod:`winreg` extension module to :pep:`687`.
diff --git a/Misc/NEWS.d/next/Library/2023-04-11-21-38-39.gh-issue-103449.-nxmhb.rst b/Misc/NEWS.d/next/Library/2023-04-11-21-38-39.gh-issue-103449.-nxmhb.rst
new file mode 100644
index 00000000000000..0b2b47af1cbaab
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-04-11-21-38-39.gh-issue-103449.-nxmhb.rst
@@ -0,0 +1 @@
+Fix a bug in doc string generation in :func:`dataclasses.dataclass`.
diff --git a/Misc/NEWS.d/next/Library/2023-04-15-11-21-38.gh-issue-103559.a9rYHG.rst b/Misc/NEWS.d/next/Library/2023-04-15-11-21-38.gh-issue-103559.a9rYHG.rst
new file mode 100644
index 00000000000000..2c9d67e2c4bf71
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-04-15-11-21-38.gh-issue-103559.a9rYHG.rst
@@ -0,0 +1 @@
+Update the bundled copy of pip to version 23.1.1.
diff --git a/Misc/NEWS.d/next/Library/2023-04-15-12-19-14.gh-issue-103556.TEf-2m.rst b/Misc/NEWS.d/next/Library/2023-04-15-12-19-14.gh-issue-103556.TEf-2m.rst
new file mode 100644
index 00000000000000..fe2267b7b79019
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-04-15-12-19-14.gh-issue-103556.TEf-2m.rst
@@ -0,0 +1,3 @@
+Now creating :class:`inspect.Signature` objects with positional-only
+parameter with a default followed by a positional-or-keyword parameter
+without one is impossible.
diff --git a/Misc/NEWS.d/next/Library/2023-04-16-19-48-21.gh-issue-103584.3mBTuM.rst b/Misc/NEWS.d/next/Library/2023-04-16-19-48-21.gh-issue-103584.3mBTuM.rst
new file mode 100644
index 00000000000000..6d7c93ade9cd94
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-04-16-19-48-21.gh-issue-103584.3mBTuM.rst
@@ -0,0 +1,12 @@
+Updated ``importlib.metadata`` with changes from ``importlib_metadata`` 5.2
+through 6.5.0, including: Support ``installed-files.txt`` for
+``Distribution.files`` when present. ``PackageMetadata`` now stipulates an
+additional ``get`` method allowing for easy querying of metadata keys that
+may not be present. ``packages_distributions`` now honors packages and
+modules with Python modules that not ``.py`` sources (e.g. ``.pyc``,
+``.so``). Expand protocol for ``PackageMetadata.get_all`` to match the
+upstream implementation of ``email.message.Message.get_all`` in
+python/typeshed#9620. Deprecated use of ``Distribution`` without defining
+abstract methods. Deprecated expectation that
+``PackageMetadata.__getitem__`` will return ``None`` for missing keys. In
+the future, it will raise a ``KeyError``.
diff --git a/Misc/NEWS.d/next/Library/2023-04-17-14-47-28.gh-issue-103596.ME1y3_.rst b/Misc/NEWS.d/next/Library/2023-04-17-14-47-28.gh-issue-103596.ME1y3_.rst
new file mode 100644
index 00000000000000..2fa27e60b58efe
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-04-17-14-47-28.gh-issue-103596.ME1y3_.rst
@@ -0,0 +1,2 @@
+Attributes/methods are no longer shadowed by same-named enum members,
+although they may be shadowed by enum.property's.
diff --git a/Misc/NEWS.d/next/Library/2023-04-22-22-37-39.gh-issue-103699.NizCjc.rst b/Misc/NEWS.d/next/Library/2023-04-22-22-37-39.gh-issue-103699.NizCjc.rst
new file mode 100644
index 00000000000000..60547a25a109bc
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-04-22-22-37-39.gh-issue-103699.NizCjc.rst
@@ -0,0 +1,2 @@
+Add ``__orig_bases__`` to non-generic TypedDicts, call-based TypedDicts, and
+call-based NamedTuples. Other TypedDicts and NamedTuples already had the attribute.
diff --git a/Misc/NEWS.d/next/Library/2023-04-23-15-39-17.gh-issue-81403.zVz9Td.rst b/Misc/NEWS.d/next/Library/2023-04-23-15-39-17.gh-issue-81403.zVz9Td.rst
new file mode 100644
index 00000000000000..6adb71f7677229
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-04-23-15-39-17.gh-issue-81403.zVz9Td.rst
@@ -0,0 +1,3 @@
+:class:`urllib.request.CacheFTPHandler` no longer raises :class:`URLError`
+if a cached FTP instance is reused. ftplib's endtransfer method calls
+voidresp to drain the connection to handle FTP instance reuse properly.
diff --git a/Misc/NEWS.d/next/Library/2023-04-24-00-34-23.gh-issue-103685.U14jBM.rst b/Misc/NEWS.d/next/Library/2023-04-24-00-34-23.gh-issue-103685.U14jBM.rst
new file mode 100644
index 00000000000000..31df04790721a8
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2023-04-24-00-34-23.gh-issue-103685.U14jBM.rst
@@ -0,0 +1 @@
+Prepare :meth:`tkinter.Menu.index` for Tk 8.7 so that it does not raise ``TclError: expected integer but got ""`` when it should return ``None``.
diff --git a/Modules/_datetimemodule.c b/Modules/_datetimemodule.c
index eda8c5610ba659..f317dc14e15bf1 100644
--- a/Modules/_datetimemodule.c
+++ b/Modules/_datetimemodule.c
@@ -6153,17 +6153,31 @@ local_to_seconds(int year, int month, int day,
static PyObject *
local_timezone_from_local(PyDateTime_DateTime *local_dt)
{
- long long seconds;
+ long long seconds, seconds2;
time_t timestamp;
+ int fold = DATE_GET_FOLD(local_dt);
seconds = local_to_seconds(GET_YEAR(local_dt),
GET_MONTH(local_dt),
GET_DAY(local_dt),
DATE_GET_HOUR(local_dt),
DATE_GET_MINUTE(local_dt),
DATE_GET_SECOND(local_dt),
- DATE_GET_FOLD(local_dt));
+ fold);
if (seconds == -1)
return NULL;
+ seconds2 = local_to_seconds(GET_YEAR(local_dt),
+ GET_MONTH(local_dt),
+ GET_DAY(local_dt),
+ DATE_GET_HOUR(local_dt),
+ DATE_GET_MINUTE(local_dt),
+ DATE_GET_SECOND(local_dt),
+ !fold);
+ if (seconds2 == -1)
+ return NULL;
+ /* Detect gap */
+ if (seconds2 != seconds && (seconds2 > seconds) == fold)
+ seconds = seconds2;
+
/* XXX: add bounds check */
timestamp = seconds - epoch;
return local_timezone_from_timestamp(timestamp);
diff --git a/Modules/_io/_iomodule.c b/Modules/_io/_iomodule.c
index 5644cc05c45800..7f4f1d939fb7e9 100644
--- a/Modules/_io/_iomodule.c
+++ b/Modules/_io/_iomodule.c
@@ -616,8 +616,9 @@ iomodule_clear(PyObject *mod) {
}
static void
-iomodule_free(PyObject *mod) {
- iomodule_clear(mod);
+iomodule_free(void *mod)
+{
+ (void)iomodule_clear((PyObject *)mod);
}
diff --git a/Modules/_posixsubprocess.c b/Modules/_posixsubprocess.c
index f3ff39215eab76..f5bce8cd7628ad 100644
--- a/Modules/_posixsubprocess.c
+++ b/Modules/_posixsubprocess.c
@@ -75,6 +75,28 @@
static struct PyModuleDef _posixsubprocessmodule;
+/*[clinic input]
+module _posixsubprocess
+[clinic start generated code]*/
+/*[clinic end generated code: output=da39a3ee5e6b4b0d input=c62211df27cf7334]*/
+
+/*[python input]
+class pid_t_converter(CConverter):
+ type = 'pid_t'
+ format_unit = '" _Py_PARSE_PID "'
+
+ def parse_arg(self, argname, displayname):
+ return """
+ {paramname} = PyLong_AsPid({argname});
+ if ({paramname} == -1 && PyErr_Occurred()) {{{{
+ goto exit;
+ }}}}
+ """.format(argname=argname, paramname=self.parser_name)
+[python start generated code]*/
+/*[python end generated code: output=da39a3ee5e6b4b0d input=5af1c116d56cbb5a]*/
+
+#include "clinic/_posixsubprocess.c.h"
+
/* Convert ASCII to a positive int, no libc call. no overflow. -1 on error. */
static int
_pos_int_from_ascii(const char *name)
@@ -744,7 +766,7 @@ do_fork_exec(char *const exec_array[],
assert(preexec_fn == Py_None);
pid = vfork();
- if (pid == -1) {
+ if (pid == (pid_t)-1) {
/* If vfork() fails, fall back to using fork(). When it isn't
* allowed in a process by the kernel, vfork can return -1
* with errno EINVAL. https://bugs.python.org/issue47151. */
@@ -784,44 +806,81 @@ do_fork_exec(char *const exec_array[],
return 0; /* Dead code to avoid a potential compiler warning. */
}
+/*[clinic input]
+_posixsubprocess.fork_exec as subprocess_fork_exec
+ args as process_args: object
+ executable_list: object
+ close_fds: bool
+ pass_fds as py_fds_to_keep: object(subclass_of='&PyTuple_Type')
+ cwd as cwd_obj: object
+ env as env_list: object
+ p2cread: int
+ p2cwrite: int
+ c2pread: int
+ c2pwrite: int
+ errread: int
+ errwrite: int
+ errpipe_read: int
+ errpipe_write: int
+ restore_signals: bool
+ call_setsid: bool
+ pgid_to_set: pid_t
+ gid as gid_object: object
+ extra_groups as extra_groups_packed: object
+ uid as uid_object: object
+ child_umask: int
+ preexec_fn: object
+ allow_vfork: bool
+ /
+
+Spawn a fresh new child process.
+
+Fork a child process, close parent file descriptors as appropriate in the
+child and duplicate the few that are needed before calling exec() in the
+child process.
+
+If close_fds is True, close file descriptors 3 and higher, except those listed
+in the sorted tuple pass_fds.
+
+The preexec_fn, if supplied, will be called immediately before closing file
+descriptors and exec.
+
+WARNING: preexec_fn is NOT SAFE if your application uses threads.
+ It may trigger infrequent, difficult to debug deadlocks.
+
+If an error occurs in the child process before the exec, it is
+serialized and written to the errpipe_write fd per subprocess.py.
+
+Returns: the child process's PID.
+
+Raises: Only on an error in the parent process.
+[clinic start generated code]*/
static PyObject *
-subprocess_fork_exec(PyObject *module, PyObject *args)
+subprocess_fork_exec_impl(PyObject *module, PyObject *process_args,
+ PyObject *executable_list, int close_fds,
+ PyObject *py_fds_to_keep, PyObject *cwd_obj,
+ PyObject *env_list, int p2cread, int p2cwrite,
+ int c2pread, int c2pwrite, int errread,
+ int errwrite, int errpipe_read, int errpipe_write,
+ int restore_signals, int call_setsid,
+ pid_t pgid_to_set, PyObject *gid_object,
+ PyObject *extra_groups_packed,
+ PyObject *uid_object, int child_umask,
+ PyObject *preexec_fn, int allow_vfork)
+/*[clinic end generated code: output=7ee4f6ee5cf22b5b input=51757287ef266ffa]*/
{
- PyObject *gc_module = NULL;
- PyObject *executable_list, *py_fds_to_keep;
- PyObject *env_list, *preexec_fn;
- PyObject *process_args, *converted_args = NULL, *fast_args = NULL;
+ PyObject *converted_args = NULL, *fast_args = NULL;
PyObject *preexec_fn_args_tuple = NULL;
- PyObject *extra_groups_packed;
- PyObject *uid_object, *gid_object;
- int p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite;
- int errpipe_read, errpipe_write, close_fds, restore_signals;
- int call_setsid;
- pid_t pgid_to_set = -1;
gid_t *extra_groups = NULL;
- int child_umask;
- PyObject *cwd_obj, *cwd_obj2 = NULL;
- const char *cwd;
+ PyObject *cwd_obj2 = NULL;
+ const char *cwd = NULL;
pid_t pid = -1;
int need_to_reenable_gc = 0;
- char *const *exec_array, *const *argv = NULL, *const *envp = NULL;
- Py_ssize_t arg_num, extra_group_size = 0;
+ char *const *argv = NULL, *const *envp = NULL;
+ Py_ssize_t extra_group_size = 0;
int need_after_fork = 0;
int saved_errno = 0;
- int allow_vfork;
-
- if (!PyArg_ParseTuple(
- args, "OOpO!OOiiiiiiiipp" _Py_PARSE_PID "OOOiOp:fork_exec",
- &process_args, &executable_list,
- &close_fds, &PyTuple_Type, &py_fds_to_keep,
- &cwd_obj, &env_list,
- &p2cread, &p2cwrite, &c2pread, &c2pwrite,
- &errread, &errwrite, &errpipe_read, &errpipe_write,
- &restore_signals, &call_setsid, &pgid_to_set,
- &gid_object, &extra_groups_packed, &uid_object, &child_umask,
- &preexec_fn, &allow_vfork))
- return NULL;
PyInterpreterState *interp = PyInterpreterState_Get();
if ((preexec_fn != Py_None) && (interp != PyInterpreterState_Main())) {
@@ -844,7 +903,7 @@ subprocess_fork_exec(PyObject *module, PyObject *args)
need_to_reenable_gc = PyGC_Disable();
}
- exec_array = _PySequence_BytesToCharpArray(executable_list);
+ char *const *exec_array = _PySequence_BytesToCharpArray(executable_list);
if (!exec_array)
goto cleanup;
@@ -862,7 +921,7 @@ subprocess_fork_exec(PyObject *module, PyObject *args)
converted_args = PyTuple_New(num_args);
if (converted_args == NULL)
goto cleanup;
- for (arg_num = 0; arg_num < num_args; ++arg_num) {
+ for (Py_ssize_t arg_num = 0; arg_num < num_args; ++arg_num) {
PyObject *borrowed_arg, *converted_arg;
if (PySequence_Fast_GET_SIZE(fast_args) != num_args) {
PyErr_SetString(PyExc_RuntimeError, "args changed during iteration");
@@ -891,8 +950,6 @@ subprocess_fork_exec(PyObject *module, PyObject *args)
if (PyUnicode_FSConverter(cwd_obj, &cwd_obj2) == 0)
goto cleanup;
cwd = PyBytes_AsString(cwd_obj2);
- } else {
- cwd = NULL;
}
if (extra_groups_packed != Py_None) {
@@ -1019,7 +1076,7 @@ subprocess_fork_exec(PyObject *module, PyObject *args)
py_fds_to_keep, preexec_fn, preexec_fn_args_tuple);
/* Parent (original) process */
- if (pid == -1) {
+ if (pid == (pid_t)-1) {
/* Capture errno for the exception. */
saved_errno = errno;
}
@@ -1068,47 +1125,17 @@ subprocess_fork_exec(PyObject *module, PyObject *args)
if (need_to_reenable_gc) {
PyGC_Enable();
}
- Py_XDECREF(gc_module);
return pid == -1 ? NULL : PyLong_FromPid(pid);
}
-
-PyDoc_STRVAR(subprocess_fork_exec_doc,
-"fork_exec(args, executable_list, close_fds, pass_fds, cwd, env,\n\
- p2cread, p2cwrite, c2pread, c2pwrite,\n\
- errread, errwrite, errpipe_read, errpipe_write,\n\
- restore_signals, call_setsid, pgid_to_set,\n\
- gid, extra_groups, uid,\n\
- preexec_fn)\n\
-\n\
-Forks a child process, closes parent file descriptors as appropriate in the\n\
-child and dups the few that are needed before calling exec() in the child\n\
-process.\n\
-\n\
-If close_fds is true, close file descriptors 3 and higher, except those listed\n\
-in the sorted tuple pass_fds.\n\
-\n\
-The preexec_fn, if supplied, will be called immediately before closing file\n\
-descriptors and exec.\n\
-WARNING: preexec_fn is NOT SAFE if your application uses threads.\n\
- It may trigger infrequent, difficult to debug deadlocks.\n\
-\n\
-If an error occurs in the child process before the exec, it is\n\
-serialized and written to the errpipe_write fd per subprocess.py.\n\
-\n\
-Returns: the child process's PID.\n\
-\n\
-Raises: Only on an error in the parent process.\n\
-");
-
/* module level code ********************************************************/
PyDoc_STRVAR(module_doc,
"A POSIX helper for the subprocess module.");
static PyMethodDef module_methods[] = {
- {"fork_exec", subprocess_fork_exec, METH_VARARGS, subprocess_fork_exec_doc},
+ SUBPROCESS_FORK_EXEC_METHODDEF
{NULL, NULL} /* sentinel */
};
diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c
index 557a6d46ed4632..30b2674d543c67 100644
--- a/Modules/_testcapimodule.c
+++ b/Modules/_testcapimodule.c
@@ -2733,6 +2733,18 @@ type_get_version(PyObject *self, PyObject *type)
}
+static PyObject *
+type_assign_version(PyObject *self, PyObject *type)
+{
+ if (!PyType_Check(type)) {
+ PyErr_SetString(PyExc_TypeError, "argument must be a type");
+ return NULL;
+ }
+ int res = PyUnstable_Type_AssignVersionTag((PyTypeObject *)type);
+ return PyLong_FromLong(res);
+}
+
+
// Test PyThreadState C API
static PyObject *
test_tstate_capi(PyObject *self, PyObject *Py_UNUSED(args))
@@ -3530,6 +3542,7 @@ static PyMethodDef TestMethods[] = {
{"test_py_is_macros", test_py_is_macros, METH_NOARGS},
{"test_py_is_funcs", test_py_is_funcs, METH_NOARGS},
{"type_get_version", type_get_version, METH_O, PyDoc_STR("type->tp_version_tag")},
+ {"type_assign_version", type_assign_version, METH_O, PyDoc_STR("PyUnstable_Type_AssignVersionTag")},
{"test_tstate_capi", test_tstate_capi, METH_NOARGS, NULL},
{"frame_getlocals", frame_getlocals, METH_O, NULL},
{"frame_getglobals", frame_getglobals, METH_O, NULL},
diff --git a/Modules/_threadmodule.c b/Modules/_threadmodule.c
index 9c12c696757439..fd2fd9ab25f113 100644
--- a/Modules/_threadmodule.c
+++ b/Modules/_threadmodule.c
@@ -946,7 +946,7 @@ local_setattro(localobject *self, PyObject *name, PyObject *v)
}
if (r == 1) {
PyErr_Format(PyExc_AttributeError,
- "'%.50s' object attribute '%U' is read-only",
+ "'%.100s' object attribute '%U' is read-only",
Py_TYPE(self)->tp_name, name);
return -1;
}
diff --git a/Modules/cjkcodecs/cjkcodecs.h b/Modules/cjkcodecs/cjkcodecs.h
index 646a9fd255ce20..1b0355310eddab 100644
--- a/Modules/cjkcodecs/cjkcodecs.h
+++ b/Modules/cjkcodecs/cjkcodecs.h
@@ -284,18 +284,45 @@ getmultibytecodec(void)
return _PyImport_GetModuleAttrString("_multibytecodec", "__create_codec");
}
+static void
+destroy_codec_capsule(PyObject *capsule)
+{
+ void *ptr = PyCapsule_GetPointer(capsule, CODEC_CAPSULE);
+ codec_capsule *data = (codec_capsule *)ptr;
+ Py_DECREF(data->cjk_module);
+ PyMem_Free(ptr);
+}
+
+static codec_capsule *
+capsulate_codec(PyObject *mod, const MultibyteCodec *codec)
+{
+ codec_capsule *data = PyMem_Malloc(sizeof(codec_capsule));
+ if (data == NULL) {
+ PyErr_NoMemory();
+ return NULL;
+ }
+ data->codec = codec;
+ data->cjk_module = Py_NewRef(mod);
+ return data;
+}
+
static PyObject *
-_getcodec(const MultibyteCodec *codec)
+_getcodec(PyObject *self, const MultibyteCodec *codec)
{
PyObject *cofunc = getmultibytecodec();
if (cofunc == NULL) {
return NULL;
}
- PyObject *codecobj = PyCapsule_New((void *)codec,
- PyMultibyteCodec_CAPSULE_NAME,
- NULL);
+ codec_capsule *data = capsulate_codec(self, codec);
+ if (data == NULL) {
+ Py_DECREF(cofunc);
+ return NULL;
+ }
+ PyObject *codecobj = PyCapsule_New(data, CODEC_CAPSULE,
+ destroy_codec_capsule);
if (codecobj == NULL) {
+ PyMem_Free(data);
Py_DECREF(cofunc);
return NULL;
}
@@ -323,7 +350,7 @@ getcodec(PyObject *self, PyObject *encoding)
for (int i = 0; i < st->num_codecs; i++) {
const MultibyteCodec *codec = &st->codec_list[i];
if (strcmp(codec->encoding, enc) == 0) {
- return _getcodec(codec);
+ return _getcodec(self, codec);
}
}
@@ -352,8 +379,7 @@ register_maps(PyObject *module)
char mhname[256] = "__map_";
strcpy(mhname + sizeof("__map_") - 1, h->charset);
- PyObject *capsule = PyCapsule_New((void *)h,
- PyMultibyteCodec_CAPSULE_NAME, NULL);
+ PyObject *capsule = PyCapsule_New((void *)h, MAP_CAPSULE, NULL);
if (capsule == NULL) {
return -1;
}
@@ -417,14 +443,14 @@ importmap(const char *modname, const char *symbol,
o = PyObject_GetAttrString(mod, symbol);
if (o == NULL)
goto errorexit;
- else if (!PyCapsule_IsValid(o, PyMultibyteCodec_CAPSULE_NAME)) {
+ else if (!PyCapsule_IsValid(o, MAP_CAPSULE)) {
PyErr_SetString(PyExc_ValueError,
"map data must be a Capsule.");
goto errorexit;
}
else {
struct dbcs_map *map;
- map = PyCapsule_GetPointer(o, PyMultibyteCodec_CAPSULE_NAME);
+ map = PyCapsule_GetPointer(o, MAP_CAPSULE);
if (encmap != NULL)
*encmap = map->encmap;
if (decmap != NULL)
diff --git a/Modules/cjkcodecs/multibytecodec.c b/Modules/cjkcodecs/multibytecodec.c
index 55778cdb59e4dc..8976ad331aaa2a 100644
--- a/Modules/cjkcodecs/multibytecodec.c
+++ b/Modules/cjkcodecs/multibytecodec.c
@@ -67,7 +67,7 @@ typedef struct {
static char *incnewkwarglist[] = {"errors", NULL};
static char *streamkwarglist[] = {"stream", "errors", NULL};
-static PyObject *multibytecodec_encode(MultibyteCodec *,
+static PyObject *multibytecodec_encode(const MultibyteCodec *,
MultibyteCodec_State *, PyObject *, Py_ssize_t *,
PyObject *, int);
@@ -221,7 +221,7 @@ expand_encodebuffer(MultibyteEncodeBuffer *buf, Py_ssize_t esize)
*/
static int
-multibytecodec_encerror(MultibyteCodec *codec,
+multibytecodec_encerror(const MultibyteCodec *codec,
MultibyteCodec_State *state,
MultibyteEncodeBuffer *buf,
PyObject *errors, Py_ssize_t e)
@@ -375,7 +375,7 @@ multibytecodec_encerror(MultibyteCodec *codec,
}
static int
-multibytecodec_decerror(MultibyteCodec *codec,
+multibytecodec_decerror(const MultibyteCodec *codec,
MultibyteCodec_State *state,
MultibyteDecodeBuffer *buf,
PyObject *errors, Py_ssize_t e)
@@ -479,7 +479,7 @@ multibytecodec_decerror(MultibyteCodec *codec,
}
static PyObject *
-multibytecodec_encode(MultibyteCodec *codec,
+multibytecodec_encode(const MultibyteCodec *codec,
MultibyteCodec_State *state,
PyObject *text, Py_ssize_t *inpos_t,
PyObject *errors, int flags)
@@ -720,9 +720,17 @@ static struct PyMethodDef multibytecodec_methods[] = {
};
static int
-multibytecodec_traverse(PyObject *self, visitproc visit, void *arg)
+multibytecodec_clear(MultibyteCodecObject *self)
+{
+ Py_CLEAR(self->cjk_module);
+ return 0;
+}
+
+static int
+multibytecodec_traverse(MultibyteCodecObject *self, visitproc visit, void *arg)
{
Py_VISIT(Py_TYPE(self));
+ Py_VISIT(self->cjk_module);
return 0;
}
@@ -731,6 +739,7 @@ multibytecodec_dealloc(MultibyteCodecObject *self)
{
PyObject_GC_UnTrack(self);
PyTypeObject *tp = Py_TYPE(self);
+ (void)multibytecodec_clear(self);
tp->tp_free(self);
Py_DECREF(tp);
}
@@ -740,6 +749,7 @@ static PyType_Slot multibytecodec_slots[] = {
{Py_tp_getattro, PyObject_GenericGetAttr},
{Py_tp_methods, multibytecodec_methods},
{Py_tp_traverse, multibytecodec_traverse},
+ {Py_tp_clear, multibytecodec_clear},
{0, NULL},
};
@@ -1953,14 +1963,14 @@ _multibytecodec___create_codec(PyObject *module, PyObject *arg)
/*[clinic end generated code: output=cfa3dce8260e809d input=6840b2a6b183fcfa]*/
{
MultibyteCodecObject *self;
- MultibyteCodec *codec;
- if (!PyCapsule_IsValid(arg, PyMultibyteCodec_CAPSULE_NAME)) {
+ if (!PyCapsule_IsValid(arg, CODEC_CAPSULE)) {
PyErr_SetString(PyExc_ValueError, "argument type invalid");
return NULL;
}
- codec = PyCapsule_GetPointer(arg, PyMultibyteCodec_CAPSULE_NAME);
+ codec_capsule *data = PyCapsule_GetPointer(arg, CODEC_CAPSULE);
+ const MultibyteCodec *codec = data->codec;
if (codec->codecinit != NULL && codec->codecinit(codec->config) != 0)
return NULL;
@@ -1969,6 +1979,7 @@ _multibytecodec___create_codec(PyObject *module, PyObject *arg)
if (self == NULL)
return NULL;
self->codec = codec;
+ self->cjk_module = Py_NewRef(data->cjk_module);
PyObject_GC_Track(self);
return (PyObject *)self;
diff --git a/Modules/cjkcodecs/multibytecodec.h b/Modules/cjkcodecs/multibytecodec.h
index 69404ba96aa1f0..327cb51129d945 100644
--- a/Modules/cjkcodecs/multibytecodec.h
+++ b/Modules/cjkcodecs/multibytecodec.h
@@ -62,14 +62,15 @@ typedef struct {
typedef struct {
PyObject_HEAD
- MultibyteCodec *codec;
+ const MultibyteCodec *codec;
+ PyObject *cjk_module;
} MultibyteCodecObject;
#define MultibyteCodec_Check(state, op) Py_IS_TYPE((op), state->multibytecodec_type)
#define _MultibyteStatefulCodec_HEAD \
PyObject_HEAD \
- MultibyteCodec *codec; \
+ const MultibyteCodec *codec; \
MultibyteCodec_State state; \
PyObject *errors;
typedef struct {
@@ -130,7 +131,13 @@ typedef struct {
#define MBENC_FLUSH 0x0001 /* encode all characters encodable */
#define MBENC_MAX MBENC_FLUSH
-#define PyMultibyteCodec_CAPSULE_NAME "multibytecodec.__map_*"
+typedef struct {
+ const MultibyteCodec *codec;
+ PyObject *cjk_module;
+} codec_capsule;
+
+#define MAP_CAPSULE "multibytecodec.map"
+#define CODEC_CAPSULE "multibytecodec.codec"
#ifdef __cplusplus
diff --git a/Modules/clinic/_posixsubprocess.c.h b/Modules/clinic/_posixsubprocess.c.h
new file mode 100644
index 00000000000000..f08878cf668908
--- /dev/null
+++ b/Modules/clinic/_posixsubprocess.c.h
@@ -0,0 +1,162 @@
+/*[clinic input]
+preserve
+[clinic start generated code]*/
+
+#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
+# include "pycore_gc.h" // PyGC_Head
+# include "pycore_runtime.h" // _Py_ID()
+#endif
+
+
+PyDoc_STRVAR(subprocess_fork_exec__doc__,
+"fork_exec($module, args, executable_list, close_fds, pass_fds, cwd,\n"
+" env, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite,\n"
+" errpipe_read, errpipe_write, restore_signals, call_setsid,\n"
+" pgid_to_set, gid, extra_groups, uid, child_umask, preexec_fn,\n"
+" allow_vfork, /)\n"
+"--\n"
+"\n"
+"Spawn a fresh new child process.\n"
+"\n"
+"Fork a child process, close parent file descriptors as appropriate in the\n"
+"child and duplicate the few that are needed before calling exec() in the\n"
+"child process.\n"
+"\n"
+"If close_fds is True, close file descriptors 3 and higher, except those listed\n"
+"in the sorted tuple pass_fds.\n"
+"\n"
+"The preexec_fn, if supplied, will be called immediately before closing file\n"
+"descriptors and exec.\n"
+"\n"
+"WARNING: preexec_fn is NOT SAFE if your application uses threads.\n"
+" It may trigger infrequent, difficult to debug deadlocks.\n"
+"\n"
+"If an error occurs in the child process before the exec, it is\n"
+"serialized and written to the errpipe_write fd per subprocess.py.\n"
+"\n"
+"Returns: the child process\'s PID.\n"
+"\n"
+"Raises: Only on an error in the parent process.");
+
+#define SUBPROCESS_FORK_EXEC_METHODDEF \
+ {"fork_exec", _PyCFunction_CAST(subprocess_fork_exec), METH_FASTCALL, subprocess_fork_exec__doc__},
+
+static PyObject *
+subprocess_fork_exec_impl(PyObject *module, PyObject *process_args,
+ PyObject *executable_list, int close_fds,
+ PyObject *py_fds_to_keep, PyObject *cwd_obj,
+ PyObject *env_list, int p2cread, int p2cwrite,
+ int c2pread, int c2pwrite, int errread,
+ int errwrite, int errpipe_read, int errpipe_write,
+ int restore_signals, int call_setsid,
+ pid_t pgid_to_set, PyObject *gid_object,
+ PyObject *extra_groups_packed,
+ PyObject *uid_object, int child_umask,
+ PyObject *preexec_fn, int allow_vfork);
+
+static PyObject *
+subprocess_fork_exec(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
+{
+ PyObject *return_value = NULL;
+ PyObject *process_args;
+ PyObject *executable_list;
+ int close_fds;
+ PyObject *py_fds_to_keep;
+ PyObject *cwd_obj;
+ PyObject *env_list;
+ int p2cread;
+ int p2cwrite;
+ int c2pread;
+ int c2pwrite;
+ int errread;
+ int errwrite;
+ int errpipe_read;
+ int errpipe_write;
+ int restore_signals;
+ int call_setsid;
+ pid_t pgid_to_set;
+ PyObject *gid_object;
+ PyObject *extra_groups_packed;
+ PyObject *uid_object;
+ int child_umask;
+ PyObject *preexec_fn;
+ int allow_vfork;
+
+ if (!_PyArg_CheckPositional("fork_exec", nargs, 23, 23)) {
+ goto exit;
+ }
+ process_args = args[0];
+ executable_list = args[1];
+ close_fds = PyObject_IsTrue(args[2]);
+ if (close_fds < 0) {
+ goto exit;
+ }
+ if (!PyTuple_Check(args[3])) {
+ _PyArg_BadArgument("fork_exec", "argument 4", "tuple", args[3]);
+ goto exit;
+ }
+ py_fds_to_keep = args[3];
+ cwd_obj = args[4];
+ env_list = args[5];
+ p2cread = _PyLong_AsInt(args[6]);
+ if (p2cread == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ p2cwrite = _PyLong_AsInt(args[7]);
+ if (p2cwrite == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ c2pread = _PyLong_AsInt(args[8]);
+ if (c2pread == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ c2pwrite = _PyLong_AsInt(args[9]);
+ if (c2pwrite == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ errread = _PyLong_AsInt(args[10]);
+ if (errread == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ errwrite = _PyLong_AsInt(args[11]);
+ if (errwrite == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ errpipe_read = _PyLong_AsInt(args[12]);
+ if (errpipe_read == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ errpipe_write = _PyLong_AsInt(args[13]);
+ if (errpipe_write == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ restore_signals = PyObject_IsTrue(args[14]);
+ if (restore_signals < 0) {
+ goto exit;
+ }
+ call_setsid = PyObject_IsTrue(args[15]);
+ if (call_setsid < 0) {
+ goto exit;
+ }
+ pgid_to_set = PyLong_AsPid(args[16]);
+ if (pgid_to_set == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ gid_object = args[17];
+ extra_groups_packed = args[18];
+ uid_object = args[19];
+ child_umask = _PyLong_AsInt(args[20]);
+ if (child_umask == -1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ preexec_fn = args[21];
+ allow_vfork = PyObject_IsTrue(args[22]);
+ if (allow_vfork < 0) {
+ goto exit;
+ }
+ return_value = subprocess_fork_exec_impl(module, process_args, executable_list, close_fds, py_fds_to_keep, cwd_obj, env_list, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, errpipe_read, errpipe_write, restore_signals, call_setsid, pgid_to_set, gid_object, extra_groups_packed, uid_object, child_umask, preexec_fn, allow_vfork);
+
+exit:
+ return return_value;
+}
+/*[clinic end generated code: output=46d71e86845c93d7 input=a9049054013a1b77]*/
diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c
index 4eaa5490b6134c..966c1e615502ef 100644
--- a/Modules/gcmodule.c
+++ b/Modules/gcmodule.c
@@ -418,8 +418,20 @@ validate_list(PyGC_Head *head, enum flagstates flags)
static void
update_refs(PyGC_Head *containers)
{
+ PyGC_Head *next;
PyGC_Head *gc = GC_NEXT(containers);
- for (; gc != containers; gc = GC_NEXT(gc)) {
+
+ while (gc != containers) {
+ next = GC_NEXT(gc);
+ /* Move any object that might have become immortal to the
+ * permanent generation as the reference count is not accurately
+ * reflecting the actual number of live references to this object
+ */
+ if (_Py_IsImmortal(FROM_GC(gc))) {
+ gc_list_move(gc, &get_gc_state()->permanent_generation.head);
+ gc = next;
+ continue;
+ }
gc_reset_refs(gc, Py_REFCNT(FROM_GC(gc)));
/* Python's cyclic gc should never see an incoming refcount
* of 0: if something decref'ed to 0, it should have been
@@ -440,6 +452,7 @@ update_refs(PyGC_Head *containers)
* check instead of an assert?
*/
_PyObject_ASSERT(FROM_GC(gc), gc_get_refs(gc) != 0);
+ gc = next;
}
}
@@ -2348,16 +2361,17 @@ PyVarObject *
_PyObject_GC_Resize(PyVarObject *op, Py_ssize_t nitems)
{
const size_t basicsize = _PyObject_VAR_SIZE(Py_TYPE(op), nitems);
+ const size_t presize = _PyType_PreHeaderSize(((PyObject *)op)->ob_type);
_PyObject_ASSERT((PyObject *)op, !_PyObject_GC_IS_TRACKED(op));
- if (basicsize > (size_t)PY_SSIZE_T_MAX - sizeof(PyGC_Head)) {
+ if (basicsize > (size_t)PY_SSIZE_T_MAX - presize) {
return (PyVarObject *)PyErr_NoMemory();
}
-
- PyGC_Head *g = AS_GC(op);
- g = (PyGC_Head *)PyObject_Realloc(g, sizeof(PyGC_Head) + basicsize);
- if (g == NULL)
+ char *mem = (char *)op - presize;
+ mem = (char *)PyObject_Realloc(mem, presize + basicsize);
+ if (mem == NULL) {
return (PyVarObject *)PyErr_NoMemory();
- op = (PyVarObject *) FROM_GC(g);
+ }
+ op = (PyVarObject *) (mem + presize);
Py_SET_SIZE(op, nitems);
return op;
}
diff --git a/Objects/boolobject.c b/Objects/boolobject.c
index 9d8e956e06f712..597a76fa5cb162 100644
--- a/Objects/boolobject.c
+++ b/Objects/boolobject.c
@@ -145,10 +145,14 @@ static PyNumberMethods bool_as_number = {
0, /* nb_index */
};
-static void _Py_NO_RETURN
-bool_dealloc(PyObject* Py_UNUSED(ignore))
+static void
+bool_dealloc(PyObject *boolean)
{
- _Py_FatalRefcountError("deallocating True or False");
+ /* This should never get called, but we also don't want to SEGV if
+ * we accidentally decref Booleans out of existence. Instead,
+ * since bools are immortal, re-set the reference count.
+ */
+ _Py_SetImmortal(boolean);
}
/* The type object for bool. Note that this cannot be subclassed! */
diff --git a/Objects/bytes_methods.c b/Objects/bytes_methods.c
index ef9e65e566ece9..33aa9c3db6e805 100644
--- a/Objects/bytes_methods.c
+++ b/Objects/bytes_methods.c
@@ -258,9 +258,12 @@ _Py_bytes_istitle(const char *cptr, Py_ssize_t len)
const unsigned char *e;
int cased, previous_is_cased;
- /* Shortcut for single character strings */
- if (len == 1)
- return PyBool_FromLong(Py_ISUPPER(*p));
+ if (len == 1) {
+ if (Py_ISUPPER(*p)) {
+ Py_RETURN_TRUE;
+ }
+ Py_RETURN_FALSE;
+ }
/* Special case for empty strings */
if (len == 0)
diff --git a/Objects/longobject.c b/Objects/longobject.c
index bb4eac0d932bb8..d98bbbb6d6ff46 100644
--- a/Objects/longobject.c
+++ b/Objects/longobject.c
@@ -52,8 +52,7 @@ static PyObject *
get_small_int(sdigit ival)
{
assert(IS_SMALL_INT(ival));
- PyObject *v = (PyObject *)&_PyLong_SMALL_INTS[_PY_NSMALLNEGINTS + ival];
- return Py_NewRef(v);
+ return (PyObject *)&_PyLong_SMALL_INTS[_PY_NSMALLNEGINTS + ival];
}
static PyLongObject *
@@ -3271,6 +3270,27 @@ long_richcompare(PyObject *self, PyObject *other, int op)
Py_RETURN_RICHCOMPARE(result, 0, op);
}
+static void
+long_dealloc(PyObject *self)
+{
+ /* This should never get called, but we also don't want to SEGV if
+ * we accidentally decref small Ints out of existence. Instead,
+ * since small Ints are immortal, re-set the reference count.
+ */
+ PyLongObject *pylong = (PyLongObject*)self;
+ if (pylong && _PyLong_IsCompact(pylong)) {
+ stwodigits ival = medium_value(pylong);
+ if (IS_SMALL_INT(ival)) {
+ PyLongObject *small_pylong = (PyLongObject *)get_small_int((sdigit)ival);
+ if (pylong == small_pylong) {
+ _Py_SetImmortal(self);
+ return;
+ }
+ }
+ }
+ Py_TYPE(self)->tp_free(self);
+}
+
static Py_hash_t
long_hash(PyLongObject *v)
{
@@ -6233,7 +6253,7 @@ PyTypeObject PyLong_Type = {
"int", /* tp_name */
offsetof(PyLongObject, long_value.ob_digit), /* tp_basicsize */
sizeof(digit), /* tp_itemsize */
- 0, /* tp_dealloc */
+ long_dealloc, /* tp_dealloc */
0, /* tp_vectorcall_offset */
0, /* tp_getattr */
0, /* tp_setattr */
diff --git a/Objects/memoryobject.c b/Objects/memoryobject.c
index 1d6cc3b508448d..34cc797b404cda 100644
--- a/Objects/memoryobject.c
+++ b/Objects/memoryobject.c
@@ -2642,7 +2642,11 @@ static Py_ssize_t
memory_length(PyMemoryViewObject *self)
{
CHECK_RELEASED_INT(self);
- return self->view.ndim == 0 ? 1 : self->view.shape[0];
+ if (self->view.ndim == 0) {
+ PyErr_SetString(PyExc_TypeError, "0-dim memory has no length");
+ return -1;
+ }
+ return self->view.shape[0];
}
/* As mapping */
diff --git a/Objects/object.c b/Objects/object.c
index 56747fa193e178..a784e6bcbf97f9 100644
--- a/Objects/object.c
+++ b/Objects/object.c
@@ -1033,7 +1033,7 @@ PyObject_GetAttr(PyObject *v, PyObject *name)
}
else {
PyErr_Format(PyExc_AttributeError,
- "'%.50s' object has no attribute '%U'",
+ "'%.100s' object has no attribute '%U'",
tp->tp_name, name);
}
@@ -1353,7 +1353,7 @@ _PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method)
}
PyErr_Format(PyExc_AttributeError,
- "'%.50s' object has no attribute '%U'",
+ "'%.100s' object has no attribute '%U'",
tp->tp_name, name);
set_attribute_error_context(obj, name);
@@ -1474,7 +1474,7 @@ _PyObject_GenericGetAttrWithDict(PyObject *obj, PyObject *name,
if (!suppress) {
PyErr_Format(PyExc_AttributeError,
- "'%.50s' object has no attribute '%U'",
+ "'%.100s' object has no attribute '%U'",
tp->tp_name, name);
set_attribute_error_context(obj, name);
@@ -1545,7 +1545,7 @@ _PyObject_GenericSetAttrWithDict(PyObject *obj, PyObject *name,
}
else {
PyErr_Format(PyExc_AttributeError,
- "'%.50s' object attribute '%U' is read-only",
+ "'%.100s' object attribute '%U' is read-only",
tp->tp_name, name);
}
goto done;
@@ -1754,10 +1754,14 @@ none_repr(PyObject *op)
return PyUnicode_FromString("None");
}
-static void _Py_NO_RETURN
-none_dealloc(PyObject* Py_UNUSED(ignore))
+static void
+none_dealloc(PyObject* none)
{
- _Py_FatalRefcountError("deallocating None");
+ /* This should never get called, but we also don't want to SEGV if
+ * we accidentally decref None out of existence. Instead,
+ * since None is an immortal object, re-set the reference count.
+ */
+ _Py_SetImmortal(none);
}
static PyObject *
@@ -1823,7 +1827,7 @@ PyTypeObject _PyNone_Type = {
"NoneType",
0,
0,
- none_dealloc, /*tp_dealloc*/ /*never called*/
+ none_dealloc, /*tp_dealloc*/
0, /*tp_vectorcall_offset*/
0, /*tp_getattr*/
0, /*tp_setattr*/
@@ -1860,8 +1864,9 @@ PyTypeObject _PyNone_Type = {
};
PyObject _Py_NoneStruct = {
- _PyObject_EXTRA_INIT
- 1, &_PyNone_Type
+ _PyObject_EXTRA_INIT
+ { _Py_IMMORTAL_REFCNT },
+ &_PyNone_Type
};
/* NotImplemented is an object that can be used to signal that an
@@ -1894,13 +1899,14 @@ notimplemented_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
Py_RETURN_NOTIMPLEMENTED;
}
-static void _Py_NO_RETURN
-notimplemented_dealloc(PyObject* ignore)
+static void
+notimplemented_dealloc(PyObject *notimplemented)
{
/* This should never get called, but we also don't want to SEGV if
- * we accidentally decref NotImplemented out of existence.
+ * we accidentally decref NotImplemented out of existence. Instead,
+ * since Notimplemented is an immortal object, re-set the reference count.
*/
- Py_FatalError("deallocating NotImplemented");
+ _Py_SetImmortal(notimplemented);
}
static int
@@ -1962,12 +1968,10 @@ PyTypeObject _PyNotImplemented_Type = {
PyObject _Py_NotImplementedStruct = {
_PyObject_EXTRA_INIT
- 1, &_PyNotImplemented_Type
+ { _Py_IMMORTAL_REFCNT },
+ &_PyNotImplemented_Type
};
-#ifdef MS_WINDOWS
-extern PyTypeObject PyHKEY_Type;
-#endif
extern PyTypeObject _Py_GenericAliasIterType;
extern PyTypeObject _PyMemoryIter_Type;
extern PyTypeObject _PyLineIterator;
@@ -2018,9 +2022,6 @@ static PyTypeObject* static_types[] = {
&PyFunction_Type,
&PyGen_Type,
&PyGetSetDescr_Type,
-#ifdef MS_WINDOWS
- &PyHKEY_Type,
-#endif
&PyInstanceMethod_Type,
&PyListIter_Type,
&PyListRevIter_Type,
@@ -2149,7 +2150,8 @@ new_reference(PyObject *op)
if (_PyRuntime.tracemalloc.config.tracing) {
_PyTraceMalloc_NewReference(op);
}
- Py_SET_REFCNT(op, 1);
+ // Skip the immortal object check in Py_SET_REFCNT; always set refcnt to 1
+ op->ob_refcnt = 1;
#ifdef Py_TRACE_REFS
_Py_AddToAllObjects(op, 1);
#endif
diff --git a/Objects/setobject.c b/Objects/setobject.c
index fcdda2a0bca2b6..58f0ae73c0c403 100644
--- a/Objects/setobject.c
+++ b/Objects/setobject.c
@@ -2543,6 +2543,7 @@ static PyTypeObject _PySetDummy_Type = {
};
static PyObject _dummy_struct = {
- _PyObject_EXTRA_INIT
- 2, &_PySetDummy_Type
+ _PyObject_EXTRA_INIT
+ { _Py_IMMORTAL_REFCNT },
+ &_PySetDummy_Type
};
diff --git a/Objects/sliceobject.c b/Objects/sliceobject.c
index 584ebce721faed..e6776ac92b669c 100644
--- a/Objects/sliceobject.c
+++ b/Objects/sliceobject.c
@@ -29,6 +29,16 @@ ellipsis_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
return Py_NewRef(Py_Ellipsis);
}
+static void
+ellipsis_dealloc(PyObject *ellipsis)
+{
+ /* This should never get called, but we also don't want to SEGV if
+ * we accidentally decref Ellipsis out of existence. Instead,
+ * since Ellipsis is an immortal object, re-set the reference count.
+ */
+ _Py_SetImmortal(ellipsis);
+}
+
static PyObject *
ellipsis_repr(PyObject *op)
{
@@ -51,7 +61,7 @@ PyTypeObject PyEllipsis_Type = {
"ellipsis", /* tp_name */
0, /* tp_basicsize */
0, /* tp_itemsize */
- 0, /*never called*/ /* tp_dealloc */
+ ellipsis_dealloc, /* tp_dealloc */
0, /* tp_vectorcall_offset */
0, /* tp_getattr */
0, /* tp_setattr */
@@ -89,7 +99,8 @@ PyTypeObject PyEllipsis_Type = {
PyObject _Py_EllipsisObject = {
_PyObject_EXTRA_INIT
- 1, &PyEllipsis_Type
+ { _Py_IMMORTAL_REFCNT },
+ &PyEllipsis_Type
};
diff --git a/Objects/typeobject.c b/Objects/typeobject.c
index f40a8fbd2e3382..e63016ba196416 100644
--- a/Objects/typeobject.c
+++ b/Objects/typeobject.c
@@ -318,27 +318,11 @@ _PyType_InitCache(PyInterpreterState *interp)
entry->version = 0;
// Set to None so _PyType_Lookup() can use Py_SETREF(),
// rather than using slower Py_XSETREF().
- // (See _PyType_FixCacheRefcounts() about the refcount.)
entry->name = Py_None;
entry->value = NULL;
}
}
-// This is the temporary fix used by pycore_create_interpreter(),
-// in pylifecycle.c. _PyType_InitCache() is called before the GIL
-// has been created (for the main interpreter) and without the
-// "current" thread state set. This causes crashes when the
-// reftotal is updated, so we don't modify the refcount in
-// _PyType_InitCache(), and instead do it later by calling
-// _PyType_FixCacheRefcounts().
-// XXX This workaround should be removed once we have immortal
-// objects (PEP 683).
-void
-_PyType_FixCacheRefcounts(void)
-{
- _Py_RefcntAdd(Py_None, (1 << MCACHE_SIZE_EXP));
-}
-
static unsigned int
_PyType_ClearCache(PyInterpreterState *interp)
@@ -608,6 +592,11 @@ assign_version_tag(PyTypeObject *type)
return 1;
}
+int PyUnstable_Type_AssignVersionTag(PyTypeObject *type)
+{
+ return assign_version_tag(type);
+}
+
static PyMemberDef type_members[] = {
{"__basicsize__", T_PYSSIZET, offsetof(PyTypeObject,tp_basicsize),READONLY},
@@ -4344,7 +4333,7 @@ _Py_type_getattro_impl(PyTypeObject *type, PyObject *name, int * suppress_missin
/* Give up */
if (suppress_missing_attribute == NULL) {
PyErr_Format(PyExc_AttributeError,
- "type object '%.50s' has no attribute '%U'",
+ "type object '%.100s' has no attribute '%U'",
type->tp_name, name);
} else {
// signal the caller we have not set an PyExc_AttributeError and gave up
@@ -9367,13 +9356,6 @@ _super_lookup_descr(PyTypeObject *su_type, PyTypeObject *su_obj_type, PyObject *
PyObject *mro, *res;
Py_ssize_t i, n;
- /* We want __class__ to return the class of the super object
- (i.e. super, or a subclass), not the class of su->obj. */
- if (PyUnicode_Check(name) &&
- PyUnicode_GET_LENGTH(name) == 9 &&
- _PyUnicode_Equal(name, &_Py_ID(__class__)))
- return NULL;
-
mro = su_obj_type->tp_mro;
if (mro == NULL)
return NULL;
@@ -9417,7 +9399,7 @@ _super_lookup_descr(PyTypeObject *su_type, PyTypeObject *su_obj_type, PyObject *
static PyObject *
do_super_lookup(superobject *su, PyTypeObject *su_type, PyObject *su_obj,
- PyTypeObject *su_obj_type, PyObject *name, int *meth_found)
+ PyTypeObject *su_obj_type, PyObject *name, int *method)
{
PyObject *res;
int temp_su = 0;
@@ -9428,8 +9410,8 @@ do_super_lookup(superobject *su, PyTypeObject *su_type, PyObject *su_obj,
res = _super_lookup_descr(su_type, su_obj_type, name);
if (res != NULL) {
- if (meth_found && _PyType_HasFeature(Py_TYPE(res), Py_TPFLAGS_METHOD_DESCRIPTOR)) {
- *meth_found = 1;
+ if (method && _PyType_HasFeature(Py_TYPE(res), Py_TPFLAGS_METHOD_DESCRIPTOR)) {
+ *method = 1;
}
else {
descrgetfunc f = Py_TYPE(res)->tp_descr_get;
@@ -9470,6 +9452,14 @@ static PyObject *
super_getattro(PyObject *self, PyObject *name)
{
superobject *su = (superobject *)self;
+
+ /* We want __class__ to return the class of the super object
+ (i.e. super, or a subclass), not the class of su->obj. */
+ if (PyUnicode_Check(name) &&
+ PyUnicode_GET_LENGTH(name) == 9 &&
+ _PyUnicode_Equal(name, &_Py_ID(__class__)))
+ return PyObject_GenericGetAttr(self, name);
+
return do_super_lookup(su, su->type, su->obj, su->obj_type, name, NULL);
}
@@ -9527,13 +9517,13 @@ supercheck(PyTypeObject *type, PyObject *obj)
}
PyObject *
-_PySuper_Lookup(PyTypeObject *su_type, PyObject *su_obj, PyObject *name, int *meth_found)
+_PySuper_Lookup(PyTypeObject *su_type, PyObject *su_obj, PyObject *name, int *method)
{
PyTypeObject *su_obj_type = supercheck(su_type, su_obj);
if (su_obj_type == NULL) {
return NULL;
}
- PyObject *res = do_super_lookup(NULL, su_type, su_obj, su_obj_type, name, meth_found);
+ PyObject *res = do_super_lookup(NULL, su_type, su_obj, su_obj_type, name, method);
Py_DECREF(su_obj_type);
return res;
}
diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c
index 85e5ae735709fd..fd056e38f3f86b 100644
--- a/Objects/unicodeobject.c
+++ b/Objects/unicodeobject.c
@@ -228,14 +228,18 @@ static inline PyObject* unicode_new_empty(void)
to strings in this dictionary are *not* counted in the string's ob_refcnt.
When the interned string reaches a refcnt of 0 the string deallocation
function will delete the reference from this dictionary.
- Another way to look at this is that to say that the actual reference
- count of a string is: s->ob_refcnt + (s->state ? 2 : 0)
*/
static inline PyObject *get_interned_dict(PyInterpreterState *interp)
{
return _Py_INTERP_CACHED_OBJECT(interp, interned_strings);
}
+Py_ssize_t
+_PyUnicode_InternedSize()
+{
+ return PyObject_Length(get_interned_dict(_PyInterpreterState_GET()));
+}
+
static int
init_interned_dict(PyInterpreterState *interp)
{
@@ -1538,30 +1542,19 @@ find_maxchar_surrogates(const wchar_t *begin, const wchar_t *end,
static void
unicode_dealloc(PyObject *unicode)
{
- PyInterpreterState *interp = _PyInterpreterState_GET();
#ifdef Py_DEBUG
if (!unicode_is_finalizing() && unicode_is_singleton(unicode)) {
_Py_FatalRefcountError("deallocating an Unicode singleton");
}
#endif
+ /* This should never get called, but we also don't want to SEGV if
+ * we accidentally decref an immortal string out of existence. Since
+ * the string is an immortal object, just re-set the reference count.
+ */
if (PyUnicode_CHECK_INTERNED(unicode)) {
- /* Revive the dead object temporarily. PyDict_DelItem() removes two
- references (key and value) which were ignored by
- PyUnicode_InternInPlace(). Use refcnt=3 rather than refcnt=2
- to prevent calling unicode_dealloc() again. Adjust refcnt after
- PyDict_DelItem(). */
- assert(Py_REFCNT(unicode) == 0);
- Py_SET_REFCNT(unicode, 3);
- PyObject *interned = get_interned_dict(interp);
- assert(interned != NULL);
- if (PyDict_DelItem(interned, unicode) != 0) {
- _PyErr_WriteUnraisableMsg("deletion of interned string failed",
- NULL);
- }
- assert(Py_REFCNT(unicode) == 1);
- Py_SET_REFCNT(unicode, 0);
+ _Py_SetImmortal(unicode);
+ return;
}
-
if (_PyUnicode_HAS_UTF8_MEMORY(unicode)) {
PyObject_Free(_PyUnicode_UTF8(unicode));
}
@@ -14637,11 +14630,21 @@ _PyUnicode_InternInPlace(PyInterpreterState *interp, PyObject **p)
return;
}
- /* The two references in interned dict (key and value) are not counted by
- refcnt. unicode_dealloc() and _PyUnicode_ClearInterned() take care of
- this. */
- Py_SET_REFCNT(s, Py_REFCNT(s) - 2);
- _PyUnicode_STATE(s).interned = 1;
+ if (_Py_IsImmortal(s)) {
+ _PyUnicode_STATE(*p).interned = SSTATE_INTERNED_IMMORTAL_STATIC;
+ return;
+ }
+#ifdef Py_REF_DEBUG
+ /* The reference count value excluding the 2 references from the
+ interned dictionary should be excluded from the RefTotal. The
+ decrements to these objects will not be registered so they
+ need to be accounted for in here. */
+ for (Py_ssize_t i = 0; i < Py_REFCNT(s) - 2; i++) {
+ _Py_DecRefTotal(_PyInterpreterState_GET());
+ }
+#endif
+ _Py_SetImmortal(s);
+ _PyUnicode_STATE(*p).interned = SSTATE_INTERNED_IMMORTAL;
}
void
@@ -14681,10 +14684,20 @@ _PyUnicode_ClearInterned(PyInterpreterState *interp)
}
assert(PyDict_CheckExact(interned));
- /* Interned unicode strings are not forcibly deallocated; rather, we give
- them their stolen references back, and then clear and DECREF the
- interned dict. */
-
+ /* TODO:
+ * Currently, the runtime is not able to guarantee that it can exit without
+ * allocations that carry over to a future initialization of Python within
+ * the same process. i.e:
+ * ./python -X showrefcount -c 'import itertools'
+ * [237 refs, 237 blocks]
+ *
+ * Therefore, this should remain disabled for until there is a strict guarantee
+ * that no memory will be left after `Py_Finalize`.
+ */
+#ifdef Py_DEBUG
+ /* For all non-singleton interned strings, restore the two valid references
+ to that instance from within the intern string dictionary and let the
+ normal reference counting process clean up these instances. */
#ifdef INTERNED_STATS
fprintf(stderr, "releasing %zd interned strings\n",
PyDict_GET_SIZE(interned));
@@ -14694,15 +14707,27 @@ _PyUnicode_ClearInterned(PyInterpreterState *interp)
Py_ssize_t pos = 0;
PyObject *s, *ignored_value;
while (PyDict_Next(interned, &pos, &s, &ignored_value)) {
- assert(PyUnicode_CHECK_INTERNED(s));
- // Restore the two references (key and value) ignored
- // by PyUnicode_InternInPlace().
- Py_SET_REFCNT(s, Py_REFCNT(s) + 2);
+ assert(PyUnicode_IS_READY(s));
+ switch (PyUnicode_CHECK_INTERNED(s)) {
+ case SSTATE_INTERNED_IMMORTAL:
+ // Skip the Immortal Instance check and restore
+ // the two references (key and value) ignored
+ // by PyUnicode_InternInPlace().
+ s->ob_refcnt = 2;
#ifdef INTERNED_STATS
- total_length += PyUnicode_GET_LENGTH(s);
+ total_length += PyUnicode_GET_LENGTH(s);
#endif
-
- _PyUnicode_STATE(s).interned = 0;
+ break;
+ case SSTATE_INTERNED_IMMORTAL_STATIC:
+ break;
+ case SSTATE_INTERNED_MORTAL:
+ /* fall through */
+ case SSTATE_NOT_INTERNED:
+ /* fall through */
+ default:
+ Py_UNREACHABLE();
+ }
+ _PyUnicode_STATE(s).interned = SSTATE_NOT_INTERNED;
}
#ifdef INTERNED_STATS
fprintf(stderr,
@@ -14710,6 +14735,12 @@ _PyUnicode_ClearInterned(PyInterpreterState *interp)
total_length);
#endif
+ struct _Py_unicode_state *state = &interp->unicode;
+ struct _Py_unicode_ids *ids = &state->ids;
+ for (Py_ssize_t i=0; i < ids->size; i++) {
+ Py_XINCREF(ids->array[i]);
+ }
+#endif /* Py_DEBUG */
clear_interned_dict(interp);
}
diff --git a/Objects/weakrefobject.c b/Objects/weakrefobject.c
index 5a3e49a6fe45e3..c1afe63ecf66f6 100644
--- a/Objects/weakrefobject.c
+++ b/Objects/weakrefobject.c
@@ -170,10 +170,7 @@ weakref_repr(PyWeakReference *self)
}
Py_INCREF(obj);
- if (_PyObject_LookupAttr(obj, &_Py_ID(__name__), &name) < 0) {
- Py_DECREF(obj);
- return NULL;
- }
+ name = _PyObject_LookupSpecial(obj, &_Py_ID(__name__));
if (name == NULL || !PyUnicode_Check(name)) {
repr = PyUnicode_FromFormat(
"",
diff --git a/PC/clinic/winreg.c.h b/PC/clinic/winreg.c.h
index 7a9474301da8a1..4109c85276f0a4 100644
--- a/PC/clinic/winreg.c.h
+++ b/PC/clinic/winreg.c.h
@@ -219,14 +219,14 @@ winreg_ConnectRegistry(PyObject *module, PyObject *const *args, Py_ssize_t nargs
_PyArg_BadArgument("ConnectRegistry", "argument 1", "str or None", args[0]);
goto exit;
}
- if (!clinic_HKEY_converter(args[1], &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), args[1], &key)) {
goto exit;
}
_return_value = winreg_ConnectRegistry_impl(module, computer_name, key);
if (_return_value == NULL) {
goto exit;
}
- return_value = PyHKEY_FromHKEY(_return_value);
+ return_value = PyHKEY_FromHKEY(_PyModule_GetState(module), _return_value);
exit:
/* Cleanup for computer_name */
@@ -275,7 +275,7 @@ winreg_CreateKey(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (!_PyArg_CheckPositional("CreateKey", nargs, 2, 2)) {
goto exit;
}
- if (!clinic_HKEY_converter(args[0], &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) {
goto exit;
}
if (args[1] == Py_None) {
@@ -295,7 +295,7 @@ winreg_CreateKey(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (_return_value == NULL) {
goto exit;
}
- return_value = PyHKEY_FromHKEY(_return_value);
+ return_value = PyHKEY_FromHKEY(_PyModule_GetState(module), _return_value);
exit:
/* Cleanup for sub_key */
@@ -382,7 +382,7 @@ winreg_CreateKeyEx(PyObject *module, PyObject *const *args, Py_ssize_t nargs, Py
if (!args) {
goto exit;
}
- if (!clinic_HKEY_converter(args[0], &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) {
goto exit;
}
if (args[1] == Py_None) {
@@ -419,7 +419,7 @@ winreg_CreateKeyEx(PyObject *module, PyObject *const *args, Py_ssize_t nargs, Py
if (_return_value == NULL) {
goto exit;
}
- return_value = PyHKEY_FromHKEY(_return_value);
+ return_value = PyHKEY_FromHKEY(_PyModule_GetState(module), _return_value);
exit:
/* Cleanup for sub_key */
@@ -466,7 +466,7 @@ winreg_DeleteKey(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (!_PyArg_CheckPositional("DeleteKey", nargs, 2, 2)) {
goto exit;
}
- if (!clinic_HKEY_converter(args[0], &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) {
goto exit;
}
if (!PyUnicode_Check(args[1])) {
@@ -566,7 +566,7 @@ winreg_DeleteKeyEx(PyObject *module, PyObject *const *args, Py_ssize_t nargs, Py
if (!args) {
goto exit;
}
- if (!clinic_HKEY_converter(args[0], &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) {
goto exit;
}
if (!PyUnicode_Check(args[1])) {
@@ -634,7 +634,7 @@ winreg_DeleteValue(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (!_PyArg_CheckPositional("DeleteValue", nargs, 2, 2)) {
goto exit;
}
- if (!clinic_HKEY_converter(args[0], &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) {
goto exit;
}
if (args[1] == Py_None) {
@@ -694,7 +694,7 @@ winreg_EnumKey(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (!_PyArg_CheckPositional("EnumKey", nargs, 2, 2)) {
goto exit;
}
- if (!clinic_HKEY_converter(args[0], &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) {
goto exit;
}
index = _PyLong_AsInt(args[1]);
@@ -751,7 +751,7 @@ winreg_EnumValue(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (!_PyArg_CheckPositional("EnumValue", nargs, 2, 2)) {
goto exit;
}
- if (!clinic_HKEY_converter(args[0], &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) {
goto exit;
}
index = _PyLong_AsInt(args[1]);
@@ -839,7 +839,7 @@ winreg_FlushKey(PyObject *module, PyObject *arg)
PyObject *return_value = NULL;
HKEY key;
- if (!clinic_HKEY_converter(arg, &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), arg, &key)) {
goto exit;
}
return_value = winreg_FlushKey_impl(module, key);
@@ -898,7 +898,7 @@ winreg_LoadKey(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (!_PyArg_CheckPositional("LoadKey", nargs, 3, 3)) {
goto exit;
}
- if (!clinic_HKEY_converter(args[0], &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) {
goto exit;
}
if (!PyUnicode_Check(args[1])) {
@@ -999,7 +999,7 @@ winreg_OpenKey(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObje
if (!args) {
goto exit;
}
- if (!clinic_HKEY_converter(args[0], &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) {
goto exit;
}
if (args[1] == Py_None) {
@@ -1036,7 +1036,7 @@ winreg_OpenKey(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObje
if (_return_value == NULL) {
goto exit;
}
- return_value = PyHKEY_FromHKEY(_return_value);
+ return_value = PyHKEY_FromHKEY(_PyModule_GetState(module), _return_value);
exit:
/* Cleanup for sub_key */
@@ -1116,7 +1116,7 @@ winreg_OpenKeyEx(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyOb
if (!args) {
goto exit;
}
- if (!clinic_HKEY_converter(args[0], &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) {
goto exit;
}
if (args[1] == Py_None) {
@@ -1153,7 +1153,7 @@ winreg_OpenKeyEx(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyOb
if (_return_value == NULL) {
goto exit;
}
- return_value = PyHKEY_FromHKEY(_return_value);
+ return_value = PyHKEY_FromHKEY(_PyModule_GetState(module), _return_value);
exit:
/* Cleanup for sub_key */
@@ -1193,7 +1193,7 @@ winreg_QueryInfoKey(PyObject *module, PyObject *arg)
PyObject *return_value = NULL;
HKEY key;
- if (!clinic_HKEY_converter(arg, &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), arg, &key)) {
goto exit;
}
return_value = winreg_QueryInfoKey_impl(module, key);
@@ -1242,7 +1242,7 @@ winreg_QueryValue(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (!_PyArg_CheckPositional("QueryValue", nargs, 2, 2)) {
goto exit;
}
- if (!clinic_HKEY_converter(args[0], &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) {
goto exit;
}
if (args[1] == Py_None) {
@@ -1303,7 +1303,7 @@ winreg_QueryValueEx(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (!_PyArg_CheckPositional("QueryValueEx", nargs, 2, 2)) {
goto exit;
}
- if (!clinic_HKEY_converter(args[0], &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) {
goto exit;
}
if (args[1] == Py_None) {
@@ -1369,7 +1369,7 @@ winreg_SaveKey(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (!_PyArg_CheckPositional("SaveKey", nargs, 2, 2)) {
goto exit;
}
- if (!clinic_HKEY_converter(args[0], &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) {
goto exit;
}
if (!PyUnicode_Check(args[1])) {
@@ -1438,7 +1438,7 @@ winreg_SetValue(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (!_PyArg_CheckPositional("SetValue", nargs, 4, 4)) {
goto exit;
}
- if (!clinic_HKEY_converter(args[0], &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) {
goto exit;
}
if (args[1] == Py_None) {
@@ -1542,7 +1542,7 @@ winreg_SetValueEx(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
if (!_PyArg_CheckPositional("SetValueEx", nargs, 5, 5)) {
goto exit;
}
- if (!clinic_HKEY_converter(args[0], &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), args[0], &key)) {
goto exit;
}
if (args[1] == Py_None) {
@@ -1603,7 +1603,7 @@ winreg_DisableReflectionKey(PyObject *module, PyObject *arg)
PyObject *return_value = NULL;
HKEY key;
- if (!clinic_HKEY_converter(arg, &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), arg, &key)) {
goto exit;
}
return_value = winreg_DisableReflectionKey_impl(module, key);
@@ -1641,7 +1641,7 @@ winreg_EnableReflectionKey(PyObject *module, PyObject *arg)
PyObject *return_value = NULL;
HKEY key;
- if (!clinic_HKEY_converter(arg, &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), arg, &key)) {
goto exit;
}
return_value = winreg_EnableReflectionKey_impl(module, key);
@@ -1677,7 +1677,7 @@ winreg_QueryReflectionKey(PyObject *module, PyObject *arg)
PyObject *return_value = NULL;
HKEY key;
- if (!clinic_HKEY_converter(arg, &key)) {
+ if (!clinic_HKEY_converter(_PyModule_GetState(module), arg, &key)) {
goto exit;
}
return_value = winreg_QueryReflectionKey_impl(module, key);
@@ -1795,4 +1795,4 @@ winreg_QueryReflectionKey(PyObject *module, PyObject *arg)
#ifndef WINREG_QUERYREFLECTIONKEY_METHODDEF
#define WINREG_QUERYREFLECTIONKEY_METHODDEF
#endif /* !defined(WINREG_QUERYREFLECTIONKEY_METHODDEF) */
-/*[clinic end generated code: output=715db416dc1321ee input=a9049054013a1b77]*/
+/*[clinic end generated code: output=15dc2e6c4d4e2ad5 input=a9049054013a1b77]*/
diff --git a/PC/layout/support/pip.py b/PC/layout/support/pip.py
index c54acb250a252e..0a6582acf348a3 100644
--- a/PC/layout/support/pip.py
+++ b/PC/layout/support/pip.py
@@ -67,7 +67,6 @@ def extract_pip_files(ns):
"--no-color",
"install",
"pip",
- "setuptools",
"--upgrade",
"--target",
str(dest),
diff --git a/PC/msvcrtmodule.c b/PC/msvcrtmodule.c
index de9a88946aff3e..090254befc934d 100644
--- a/PC/msvcrtmodule.c
+++ b/PC/msvcrtmodule.c
@@ -564,88 +564,81 @@ static struct PyMethodDef msvcrt_functions[] = {
{NULL, NULL}
};
-static void
-insertint(PyObject *d, char *name, int value)
-{
- PyObject *v = PyLong_FromLong((long) value);
- if (v == NULL) {
- /* Don't bother reporting this error */
- PyErr_Clear();
- }
- else {
- PyDict_SetItemString(d, name, v);
- Py_DECREF(v);
- }
-}
-
-static void
-insertptr(PyObject *d, char *name, void *value)
+static int
+insertptr(PyObject *mod, char *name, void *value)
{
PyObject *v = PyLong_FromVoidPtr(value);
if (v == NULL) {
- /* Don't bother reporting this error */
- PyErr_Clear();
- }
- else {
- PyDict_SetItemString(d, name, v);
- Py_DECREF(v);
+ return -1;
}
+ int rc = PyModule_AddObjectRef(mod, name, v);
+ Py_DECREF(v);
+ return rc;
}
+#define INSERTINT(MOD, NAME, VAL) do { \
+ if (PyModule_AddIntConstant(MOD, NAME, VAL) < 0) { \
+ return -1; \
+ } \
+} while (0)
+
+#define INSERTPTR(MOD, NAME, PTR) do { \
+ if (insertptr(MOD, NAME, PTR) < 0) { \
+ return -1; \
+ } \
+} while (0)
+
+#define INSERTSTR(MOD, NAME, CONST) do { \
+ if (PyModule_AddStringConstant(MOD, NAME, CONST) < 0) { \
+ return -1; \
+ } \
+} while (0)
+
static int
exec_module(PyObject* m)
{
- int st;
- PyObject *d = PyModule_GetDict(m); // Borrowed ref.
-
/* constants for the locking() function's mode argument */
- insertint(d, "LK_LOCK", _LK_LOCK);
- insertint(d, "LK_NBLCK", _LK_NBLCK);
- insertint(d, "LK_NBRLCK", _LK_NBRLCK);
- insertint(d, "LK_RLCK", _LK_RLCK);
- insertint(d, "LK_UNLCK", _LK_UNLCK);
+ INSERTINT(m, "LK_LOCK", _LK_LOCK);
+ INSERTINT(m, "LK_NBLCK", _LK_NBLCK);
+ INSERTINT(m, "LK_NBRLCK", _LK_NBRLCK);
+ INSERTINT(m, "LK_RLCK", _LK_RLCK);
+ INSERTINT(m, "LK_UNLCK", _LK_UNLCK);
#ifdef MS_WINDOWS_DESKTOP
- insertint(d, "SEM_FAILCRITICALERRORS", SEM_FAILCRITICALERRORS);
- insertint(d, "SEM_NOALIGNMENTFAULTEXCEPT", SEM_NOALIGNMENTFAULTEXCEPT);
- insertint(d, "SEM_NOGPFAULTERRORBOX", SEM_NOGPFAULTERRORBOX);
- insertint(d, "SEM_NOOPENFILEERRORBOX", SEM_NOOPENFILEERRORBOX);
+ INSERTINT(m, "SEM_FAILCRITICALERRORS", SEM_FAILCRITICALERRORS);
+ INSERTINT(m, "SEM_NOALIGNMENTFAULTEXCEPT", SEM_NOALIGNMENTFAULTEXCEPT);
+ INSERTINT(m, "SEM_NOGPFAULTERRORBOX", SEM_NOGPFAULTERRORBOX);
+ INSERTINT(m, "SEM_NOOPENFILEERRORBOX", SEM_NOOPENFILEERRORBOX);
#endif
#ifdef _DEBUG
- insertint(d, "CRT_WARN", _CRT_WARN);
- insertint(d, "CRT_ERROR", _CRT_ERROR);
- insertint(d, "CRT_ASSERT", _CRT_ASSERT);
- insertint(d, "CRTDBG_MODE_DEBUG", _CRTDBG_MODE_DEBUG);
- insertint(d, "CRTDBG_MODE_FILE", _CRTDBG_MODE_FILE);
- insertint(d, "CRTDBG_MODE_WNDW", _CRTDBG_MODE_WNDW);
- insertint(d, "CRTDBG_REPORT_MODE", _CRTDBG_REPORT_MODE);
- insertptr(d, "CRTDBG_FILE_STDERR", _CRTDBG_FILE_STDERR);
- insertptr(d, "CRTDBG_FILE_STDOUT", _CRTDBG_FILE_STDOUT);
- insertptr(d, "CRTDBG_REPORT_FILE", _CRTDBG_REPORT_FILE);
+ INSERTINT(m, "CRT_WARN", _CRT_WARN);
+ INSERTINT(m, "CRT_ERROR", _CRT_ERROR);
+ INSERTINT(m, "CRT_ASSERT", _CRT_ASSERT);
+ INSERTINT(m, "CRTDBG_MODE_DEBUG", _CRTDBG_MODE_DEBUG);
+ INSERTINT(m, "CRTDBG_MODE_FILE", _CRTDBG_MODE_FILE);
+ INSERTINT(m, "CRTDBG_MODE_WNDW", _CRTDBG_MODE_WNDW);
+ INSERTINT(m, "CRTDBG_REPORT_MODE", _CRTDBG_REPORT_MODE);
+ INSERTPTR(m, "CRTDBG_FILE_STDERR", _CRTDBG_FILE_STDERR);
+ INSERTPTR(m, "CRTDBG_FILE_STDOUT", _CRTDBG_FILE_STDOUT);
+ INSERTPTR(m, "CRTDBG_REPORT_FILE", _CRTDBG_REPORT_FILE);
#endif
+#undef INSERTINT
+#undef INSERTPTR
+
/* constants for the crt versions */
#ifdef _VC_ASSEMBLY_PUBLICKEYTOKEN
- st = PyModule_AddStringConstant(m, "VC_ASSEMBLY_PUBLICKEYTOKEN",
- _VC_ASSEMBLY_PUBLICKEYTOKEN);
- if (st < 0) {
- return -1;
- }
+ INSERTSTR(m, "VC_ASSEMBLY_PUBLICKEYTOKEN", _VC_ASSEMBLY_PUBLICKEYTOKEN);
#endif
#ifdef _CRT_ASSEMBLY_VERSION
- st = PyModule_AddStringConstant(m, "CRT_ASSEMBLY_VERSION",
- _CRT_ASSEMBLY_VERSION);
- if (st < 0) {
- return -1;
- }
+ INSERTSTR(m, "CRT_ASSEMBLY_VERSION", _CRT_ASSEMBLY_VERSION);
#endif
#ifdef __LIBRARIES_ASSEMBLY_NAME_PREFIX
- st = PyModule_AddStringConstant(m, "LIBRARIES_ASSEMBLY_NAME_PREFIX",
- __LIBRARIES_ASSEMBLY_NAME_PREFIX);
- if (st < 0) {
- return -1;
- }
+ INSERTSTR(m, "LIBRARIES_ASSEMBLY_NAME_PREFIX",
+ __LIBRARIES_ASSEMBLY_NAME_PREFIX);
#endif
+#undef INSERTSTR
+
/* constants for the 2010 crt versions */
#if defined(_VC_CRT_MAJOR_VERSION) && defined (_VC_CRT_MINOR_VERSION) && defined(_VC_CRT_BUILD_VERSION) && defined(_VC_CRT_RBUILD_VERSION)
PyObject *version = PyUnicode_FromFormat("%d.%d.%d.%d",
@@ -656,14 +649,12 @@ exec_module(PyObject* m)
if (version == NULL) {
return -1;
}
- st = PyModule_AddObjectRef(m, "CRT_ASSEMBLY_VERSION", version);
+ int st = PyModule_AddObjectRef(m, "CRT_ASSEMBLY_VERSION", version);
Py_DECREF(version);
if (st < 0) {
return -1;
}
#endif
- /* make compiler warning quiet if st is unused */
- (void)st;
return 0;
}
diff --git a/PC/winreg.c b/PC/winreg.c
index 15d32e7fcb99c9..4884125c3609ad 100644
--- a/PC/winreg.c
+++ b/PC/winreg.c
@@ -15,15 +15,22 @@
#define PY_SSIZE_T_CLEAN
#include "Python.h"
#include "pycore_object.h" // _PyObject_Init()
+#include "pycore_moduleobject.h"
#include "structmember.h" // PyMemberDef
#include
#if defined(MS_WINDOWS_DESKTOP) || defined(MS_WINDOWS_SYSTEM) || defined(MS_WINDOWS_GAMES)
-static BOOL PyHKEY_AsHKEY(PyObject *ob, HKEY *pRes, BOOL bNoneOK);
-static BOOL clinic_HKEY_converter(PyObject *ob, void *p);
-static PyObject *PyHKEY_FromHKEY(HKEY h);
-static BOOL PyHKEY_Close(PyObject *obHandle);
+typedef struct {
+ PyTypeObject *PyHKEY_Type;
+} winreg_state;
+
+/* Forward declares */
+
+static BOOL PyHKEY_AsHKEY(winreg_state *st, PyObject *ob, HKEY *pRes, BOOL bNoneOK);
+static BOOL clinic_HKEY_converter(winreg_state *st, PyObject *ob, void *p);
+static PyObject *PyHKEY_FromHKEY(winreg_state *st, HKEY h);
+static BOOL PyHKEY_Close(winreg_state *st, PyObject *obHandle);
static char errNotAHandle[] = "Object is not a handle";
@@ -35,8 +42,6 @@ static char errNotAHandle[] = "Object is not a handle";
#define PyErr_SetFromWindowsErrWithFunction(rc, fnname) \
PyErr_SetFromWindowsErr(rc)
-/* Forward declares */
-
/* Doc strings */
PyDoc_STRVAR(module_doc,
"This module provides access to the Windows registry API.\n"
@@ -114,7 +119,7 @@ typedef struct {
HKEY hkey;
} PyHKEYObject;
-#define PyHKEY_Check(op) Py_IS_TYPE(op, &PyHKEY_Type)
+#define PyHKEY_Check(st, op) Py_IS_TYPE(op, st->PyHKEY_Type)
static char *failMsg = "bad operand type";
@@ -147,7 +152,18 @@ PyHKEY_deallocFunc(PyObject *ob)
PyHKEYObject *obkey = (PyHKEYObject *)ob;
if (obkey->hkey)
RegCloseKey((HKEY)obkey->hkey);
- PyObject_Free(ob);
+
+ PyTypeObject *tp = Py_TYPE(ob);
+ PyObject_GC_UnTrack(ob);
+ PyObject_GC_Del(ob);
+ Py_DECREF(tp);
+}
+
+static int
+PyHKEY_traverseFunc(PyHKEYObject *self, visitproc visit, void *arg)
+{
+ Py_VISIT(Py_TYPE(self));
+ return 0;
}
static int
@@ -189,29 +205,6 @@ PyHKEY_hashFunc(PyObject *ob)
}
-static PyNumberMethods PyHKEY_NumberMethods =
-{
- PyHKEY_binaryFailureFunc, /* nb_add */
- PyHKEY_binaryFailureFunc, /* nb_subtract */
- PyHKEY_binaryFailureFunc, /* nb_multiply */
- PyHKEY_binaryFailureFunc, /* nb_remainder */
- PyHKEY_binaryFailureFunc, /* nb_divmod */
- PyHKEY_ternaryFailureFunc, /* nb_power */
- PyHKEY_unaryFailureFunc, /* nb_negative */
- PyHKEY_unaryFailureFunc, /* nb_positive */
- PyHKEY_unaryFailureFunc, /* nb_absolute */
- PyHKEY_boolFunc, /* nb_bool */
- PyHKEY_unaryFailureFunc, /* nb_invert */
- PyHKEY_binaryFailureFunc, /* nb_lshift */
- PyHKEY_binaryFailureFunc, /* nb_rshift */
- PyHKEY_binaryFailureFunc, /* nb_and */
- PyHKEY_binaryFailureFunc, /* nb_xor */
- PyHKEY_binaryFailureFunc, /* nb_or */
- PyHKEY_intFunc, /* nb_int */
- 0, /* nb_reserved */
- PyHKEY_unaryFailureFunc, /* nb_float */
-};
-
/*[clinic input]
module winreg
class winreg.HKEYType "PyHKEYObject *" "&PyHKEY_Type"
@@ -229,6 +222,14 @@ class HKEY_converter(CConverter):
type = 'HKEY'
converter = 'clinic_HKEY_converter'
+ def parse_arg(self, argname, displayname):
+ return """
+ if (!{converter}(_PyModule_GetState(module), {argname}, &{paramname})) {{{{
+ goto exit;
+ }}}}
+ """.format(argname=argname, paramname=self.parser_name,
+ converter=self.converter)
+
class HKEY_return_converter(CReturnConverter):
type = 'HKEY'
@@ -236,7 +237,7 @@ class HKEY_return_converter(CReturnConverter):
self.declare(data)
self.err_occurred_if_null_pointer("_return_value", data)
data.return_conversion.append(
- 'return_value = PyHKEY_FromHKEY(_return_value);\n')
+ 'return_value = PyHKEY_FromHKEY(_PyModule_GetState(module), _return_value);\n')
# HACK: this only works for PyHKEYObjects, nothing else.
# Should this be generalized and enshrined in clinic.py,
@@ -249,7 +250,7 @@ class self_return_converter(CReturnConverter):
data.return_conversion.append(
'return_value = (PyObject *)_return_value;\n')
[python start generated code]*/
-/*[python end generated code: output=da39a3ee5e6b4b0d input=2ebb7a4922d408d6]*/
+/*[python end generated code: output=da39a3ee5e6b4b0d input=17e645060c7b8ae1]*/
#include "clinic/winreg.c.h"
@@ -270,8 +271,11 @@ static PyObject *
winreg_HKEYType_Close_impl(PyHKEYObject *self)
/*[clinic end generated code: output=fced3a624fb0c344 input=6786ac75f6b89de6]*/
{
- if (!PyHKEY_Close((PyObject *)self))
+ winreg_state *st = _PyType_GetModuleState(Py_TYPE(self));
+ assert(st != NULL);
+ if (!PyHKEY_Close(st, (PyObject *)self)) {
return NULL;
+ }
Py_RETURN_NONE;
}
@@ -327,8 +331,11 @@ winreg_HKEYType___exit___impl(PyHKEYObject *self, PyObject *exc_type,
PyObject *exc_value, PyObject *traceback)
/*[clinic end generated code: output=923ebe7389e6a263 input=fb32489ee92403c7]*/
{
- if (!PyHKEY_Close((PyObject *)self))
+ winreg_state *st = _PyType_GetModuleState(Py_TYPE(self));
+ assert(st != NULL);
+ if (!PyHKEY_Close(st, (PyObject *)self)) {
return NULL;
+ }
Py_RETURN_NONE;
}
@@ -350,62 +357,71 @@ static PyMemberDef PyHKEY_memberlist[] = {
{NULL} /* Sentinel */
};
-/* The type itself */
-PyTypeObject PyHKEY_Type =
-{
- PyVarObject_HEAD_INIT(0, 0) /* fill in type at module init */
- "PyHKEY",
- sizeof(PyHKEYObject),
- 0,
- PyHKEY_deallocFunc, /* tp_dealloc */
- 0, /* tp_vectorcall_offset */
- 0, /* tp_getattr */
- 0, /* tp_setattr */
- 0, /* tp_as_async */
- 0, /* tp_repr */
- &PyHKEY_NumberMethods, /* tp_as_number */
- 0, /* tp_as_sequence */
- 0, /* tp_as_mapping */
- PyHKEY_hashFunc, /* tp_hash */
- 0, /* tp_call */
- PyHKEY_strFunc, /* tp_str */
- 0, /* tp_getattro */
- 0, /* tp_setattro */
- 0, /* tp_as_buffer */
- 0, /* tp_flags */
- PyHKEY_doc, /* tp_doc */
- 0, /*tp_traverse*/
- 0, /*tp_clear*/
- 0, /*tp_richcompare*/
- 0, /*tp_weaklistoffset*/
- 0, /*tp_iter*/
- 0, /*tp_iternext*/
- PyHKEY_methods, /*tp_methods*/
- PyHKEY_memberlist, /*tp_members*/
+static PyType_Slot pyhkey_type_slots[] = {
+ {Py_tp_dealloc, PyHKEY_deallocFunc},
+ {Py_tp_members, PyHKEY_memberlist},
+ {Py_tp_methods, PyHKEY_methods},
+ {Py_tp_doc, (char *)PyHKEY_doc},
+ {Py_tp_traverse, PyHKEY_traverseFunc},
+ {Py_tp_hash, PyHKEY_hashFunc},
+ {Py_tp_str, PyHKEY_strFunc},
+
+ // Number protocol
+ {Py_nb_add, PyHKEY_binaryFailureFunc},
+ {Py_nb_subtract, PyHKEY_binaryFailureFunc},
+ {Py_nb_multiply, PyHKEY_binaryFailureFunc},
+ {Py_nb_remainder, PyHKEY_binaryFailureFunc},
+ {Py_nb_divmod, PyHKEY_binaryFailureFunc},
+ {Py_nb_power, PyHKEY_ternaryFailureFunc},
+ {Py_nb_negative, PyHKEY_unaryFailureFunc},
+ {Py_nb_positive, PyHKEY_unaryFailureFunc},
+ {Py_nb_absolute, PyHKEY_unaryFailureFunc},
+ {Py_nb_bool, PyHKEY_boolFunc},
+ {Py_nb_invert, PyHKEY_unaryFailureFunc},
+ {Py_nb_lshift, PyHKEY_binaryFailureFunc},
+ {Py_nb_rshift, PyHKEY_binaryFailureFunc},
+ {Py_nb_and, PyHKEY_binaryFailureFunc},
+ {Py_nb_xor, PyHKEY_binaryFailureFunc},
+ {Py_nb_or, PyHKEY_binaryFailureFunc},
+ {Py_nb_int, PyHKEY_intFunc},
+ {Py_nb_float, PyHKEY_unaryFailureFunc},
+ {0, NULL},
+};
+
+static PyType_Spec pyhkey_type_spec = {
+ .name = "winreg.PyHKEY",
+ .basicsize = sizeof(PyHKEYObject),
+ .flags = (Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_IMMUTABLETYPE |
+ Py_TPFLAGS_DISALLOW_INSTANTIATION),
+ .slots = pyhkey_type_slots,
};
/************************************************************************
The public PyHKEY API (well, not public yet :-)
************************************************************************/
PyObject *
-PyHKEY_New(HKEY hInit)
+PyHKEY_New(PyObject *m, HKEY hInit)
{
- PyHKEYObject *key = PyObject_New(PyHKEYObject, &PyHKEY_Type);
- if (key)
- key->hkey = hInit;
+ winreg_state *st = _PyModule_GetState(m);
+ PyHKEYObject *key = PyObject_GC_New(PyHKEYObject, st->PyHKEY_Type);
+ if (key == NULL) {
+ return NULL;
+ }
+ key->hkey = hInit;
+ PyObject_GC_Track(key);
return (PyObject *)key;
}
BOOL
-PyHKEY_Close(PyObject *ob_handle)
+PyHKEY_Close(winreg_state *st, PyObject *ob_handle)
{
LONG rc;
HKEY key;
- if (!PyHKEY_AsHKEY(ob_handle, &key, TRUE)) {
+ if (!PyHKEY_AsHKEY(st, ob_handle, &key, TRUE)) {
return FALSE;
}
- if (PyHKEY_Check(ob_handle)) {
+ if (PyHKEY_Check(st, ob_handle)) {
((PyHKEYObject*)ob_handle)->hkey = 0;
}
rc = key ? RegCloseKey(key) : ERROR_SUCCESS;
@@ -415,7 +431,7 @@ PyHKEY_Close(PyObject *ob_handle)
}
BOOL
-PyHKEY_AsHKEY(PyObject *ob, HKEY *pHANDLE, BOOL bNoneOK)
+PyHKEY_AsHKEY(winreg_state *st, PyObject *ob, HKEY *pHANDLE, BOOL bNoneOK)
{
if (ob == Py_None) {
if (!bNoneOK) {
@@ -426,7 +442,7 @@ PyHKEY_AsHKEY(PyObject *ob, HKEY *pHANDLE, BOOL bNoneOK)
}
*pHANDLE = (HKEY)0;
}
- else if (PyHKEY_Check(ob)) {
+ else if (PyHKEY_Check(st ,ob)) {
PyHKEYObject *pH = (PyHKEYObject *)ob;
*pHANDLE = pH->hkey;
}
@@ -447,23 +463,24 @@ PyHKEY_AsHKEY(PyObject *ob, HKEY *pHANDLE, BOOL bNoneOK)
}
BOOL
-clinic_HKEY_converter(PyObject *ob, void *p)
+clinic_HKEY_converter(winreg_state *st, PyObject *ob, void *p)
{
- if (!PyHKEY_AsHKEY(ob, (HKEY *)p, FALSE))
+ if (!PyHKEY_AsHKEY(st, ob, (HKEY *)p, FALSE)) {
return FALSE;
+ }
return TRUE;
}
PyObject *
-PyHKEY_FromHKEY(HKEY h)
+PyHKEY_FromHKEY(winreg_state *st, HKEY h)
{
- /* Inline PyObject_New */
- PyHKEYObject *op = (PyHKEYObject *) PyObject_Malloc(sizeof(PyHKEYObject));
+ PyHKEYObject *op = (PyHKEYObject *)PyObject_GC_New(PyHKEYObject,
+ st->PyHKEY_Type);
if (op == NULL) {
- return PyErr_NoMemory();
+ return NULL;
}
- _PyObject_Init((PyObject*)op, &PyHKEY_Type);
op->hkey = h;
+ PyObject_GC_Track(op);
return (PyObject *)op;
}
@@ -472,11 +489,11 @@ PyHKEY_FromHKEY(HKEY h)
The module methods
************************************************************************/
BOOL
-PyWinObject_CloseHKEY(PyObject *obHandle)
+PyWinObject_CloseHKEY(winreg_state *st, PyObject *obHandle)
{
BOOL ok;
- if (PyHKEY_Check(obHandle)) {
- ok = PyHKEY_Close(obHandle);
+ if (PyHKEY_Check(st, obHandle)) {
+ ok = PyHKEY_Close(st, obHandle);
}
#if SIZEOF_LONG >= SIZEOF_HKEY
else if (PyLong_Check(obHandle)) {
@@ -826,8 +843,9 @@ static PyObject *
winreg_CloseKey(PyObject *module, PyObject *hkey)
/*[clinic end generated code: output=a4fa537019a80d15 input=5b1aac65ba5127ad]*/
{
- if (!PyHKEY_Close(hkey))
+ if (!PyHKEY_Close(_PyModule_GetState(module), hkey)) {
return NULL;
+ }
Py_RETURN_NONE;
}
@@ -2061,7 +2079,7 @@ static struct PyMethodDef winreg_methods[] = {
#define ADD_INT(VAL) do { \
if (PyModule_AddIntConstant(m, #VAL, VAL) < 0) { \
- goto error; \
+ return -1; \
} \
} while (0)
@@ -2079,38 +2097,25 @@ inskey(PyObject *mod, char *name, HKEY key)
#define ADD_KEY(VAL) do { \
if (inskey(m, #VAL, VAL) < 0) { \
- goto error; \
+ return -1; \
} \
} while (0)
-
-static struct PyModuleDef winregmodule = {
- PyModuleDef_HEAD_INIT,
- "winreg",
- module_doc,
- -1,
- winreg_methods,
- NULL,
- NULL,
- NULL,
- NULL
-};
-
-PyMODINIT_FUNC PyInit_winreg(void)
+static int
+exec_module(PyObject *m)
{
- PyObject *m = PyModule_Create(&winregmodule);
- if (m == NULL) {
- return NULL;
- }
- PyHKEY_Type.tp_doc = PyHKEY_doc;
- if (PyType_Ready(&PyHKEY_Type) < 0) {
- goto error;
+ winreg_state *st = (winreg_state *)_PyModule_GetState(m);
+
+ st->PyHKEY_Type = (PyTypeObject *)
+ PyType_FromModuleAndSpec(m, &pyhkey_type_spec, NULL);
+ if (st->PyHKEY_Type == NULL) {
+ return -1;
}
- if (PyModule_AddObjectRef(m, "HKEYType", (PyObject *)&PyHKEY_Type) < 0) {
- goto error;
+ if (PyModule_AddObjectRef(m, "HKEYType", (PyObject *)st->PyHKEY_Type) < 0) {
+ return -1;
}
if (PyModule_AddObjectRef(m, "error", PyExc_OSError) < 0) {
- goto error;
+ return -1;
}
/* Add the relevant constants */
@@ -2174,12 +2179,44 @@ PyMODINIT_FUNC PyInit_winreg(void)
ADD_INT(REG_RESOURCE_REQUIREMENTS_LIST);
#undef ADD_INT
+ return 0;
+}
- return m;
+static PyModuleDef_Slot winreg_slots[] = {
+ {Py_mod_exec, exec_module},
+ {0, NULL}
+};
-error:
- Py_DECREF(m);
- return NULL;
+static int
+winreg_traverse(PyObject *module, visitproc visit, void *arg)
+{
+ winreg_state *state = _PyModule_GetState(module);
+ Py_VISIT(state->PyHKEY_Type);
+ return 0;
+}
+
+static int
+winreg_clear(PyObject *module)
+{
+ winreg_state *state = _PyModule_GetState(module);
+ Py_CLEAR(state->PyHKEY_Type);
+ return 0;
+}
+
+static struct PyModuleDef winregmodule = {
+ .m_base = PyModuleDef_HEAD_INIT,
+ .m_name = "winreg",
+ .m_doc = module_doc,
+ .m_size = sizeof(winreg_state),
+ .m_methods = winreg_methods,
+ .m_slots = winreg_slots,
+ .m_traverse = winreg_traverse,
+ .m_clear = winreg_clear,
+};
+
+PyMODINIT_FUNC PyInit_winreg(void)
+{
+ return PyModuleDef_Init(&winregmodule);
}
#endif /* MS_WINDOWS_DESKTOP || MS_WINDOWS_SYSTEM || MS_WINDOWS_GAMES */
diff --git a/Parser/action_helpers.c b/Parser/action_helpers.c
index 46390966892d16..55c0f6fdd620f4 100644
--- a/Parser/action_helpers.c
+++ b/Parser/action_helpers.c
@@ -1,6 +1,7 @@
#include
#include "pegen.h"
+#include "tokenizer.h"
#include "string_parser.h"
#include "pycore_runtime.h" // _PyRuntime
@@ -853,96 +854,6 @@ _PyPegen_seq_delete_starred_exprs(Parser *p, asdl_seq *kwargs)
return new_seq;
}
-expr_ty
-_PyPegen_concatenate_strings(Parser *p, asdl_seq *strings)
-{
- Py_ssize_t len = asdl_seq_LEN(strings);
- assert(len > 0);
-
- Token *first = asdl_seq_GET_UNTYPED(strings, 0);
- Token *last = asdl_seq_GET_UNTYPED(strings, len - 1);
-
- int bytesmode = 0;
- PyObject *bytes_str = NULL;
-
- FstringParser state;
- _PyPegen_FstringParser_Init(&state);
-
- for (Py_ssize_t i = 0; i < len; i++) {
- Token *t = asdl_seq_GET_UNTYPED(strings, i);
-
- int this_bytesmode;
- int this_rawmode;
- PyObject *s;
- const char *fstr;
- Py_ssize_t fstrlen = -1;
-
- if (_PyPegen_parsestr(p, &this_bytesmode, &this_rawmode, &s, &fstr, &fstrlen, t) != 0) {
- goto error;
- }
-
- /* Check that we are not mixing bytes with unicode. */
- if (i != 0 && bytesmode != this_bytesmode) {
- RAISE_SYNTAX_ERROR("cannot mix bytes and nonbytes literals");
- Py_XDECREF(s);
- goto error;
- }
- bytesmode = this_bytesmode;
-
- if (fstr != NULL) {
- assert(s == NULL && !bytesmode);
-
- int result = _PyPegen_FstringParser_ConcatFstring(p, &state, &fstr, fstr + fstrlen,
- this_rawmode, 0, first, t, last);
- if (result < 0) {
- goto error;
- }
- }
- else {
- /* String or byte string. */
- assert(s != NULL && fstr == NULL);
- assert(bytesmode ? PyBytes_CheckExact(s) : PyUnicode_CheckExact(s));
-
- if (bytesmode) {
- if (i == 0) {
- bytes_str = s;
- }
- else {
- PyBytes_ConcatAndDel(&bytes_str, s);
- if (!bytes_str) {
- goto error;
- }
- }
- }
- else {
- /* This is a regular string. Concatenate it. */
- if (_PyPegen_FstringParser_ConcatAndDel(&state, s) < 0) {
- goto error;
- }
- }
- }
- }
-
- if (bytesmode) {
- if (_PyArena_AddPyObject(p->arena, bytes_str) < 0) {
- goto error;
- }
- return _PyAST_Constant(bytes_str, NULL, first->lineno,
- first->col_offset, last->end_lineno,
- last->end_col_offset, p->arena);
- }
-
- return _PyPegen_FstringParser_Finish(p, &state, first, last);
-
-error:
- Py_XDECREF(bytes_str);
- _PyPegen_FstringParser_Dealloc(&state);
- if (PyErr_Occurred()) {
- _Pypegen_raise_decode_error(p);
- }
- return NULL;
-}
-
expr_ty
_PyPegen_ensure_imaginary(Parser *p, expr_ty exp)
{
@@ -1054,6 +965,18 @@ _PyPegen_check_legacy_stmt(Parser *p, expr_ty name) {
return 0;
}
+expr_ty
+_PyPegen_check_fstring_conversion(Parser *p, Token* symbol, expr_ty conv) {
+ if (symbol->lineno != conv->lineno || symbol->end_col_offset != conv->col_offset) {
+ return RAISE_SYNTAX_ERROR_KNOWN_RANGE(
+ symbol, conv,
+ "f-string: conversion type must come right after the exclamanation mark"
+ );
+ }
+ return conv;
+}
+
+
const char *
_PyPegen_get_expr_name(expr_ty e)
{
@@ -1271,3 +1194,439 @@ _PyPegen_nonparen_genexp_in_call(Parser *p, expr_ty args, asdl_comprehension_seq
"Generator expression must be parenthesized"
);
}
+
+// Fstring stuff
+
+static expr_ty
+decode_fstring_buffer(Parser *p, int lineno, int col_offset, int end_lineno,
+ int end_col_offset)
+{
+ tokenizer_mode *tok_mode = &(p->tok->tok_mode_stack[p->tok->tok_mode_stack_index]);
+ assert(tok_mode->last_expr_buffer != NULL);
+ assert(tok_mode->last_expr_size >= 0 && tok_mode->last_expr_end >= 0);
+
+ PyObject *res = PyUnicode_DecodeUTF8(
+ tok_mode->last_expr_buffer,
+ tok_mode->last_expr_size - tok_mode->last_expr_end,
+ NULL
+ );
+ if (!res || _PyArena_AddPyObject(p->arena, res) < 0) {
+ Py_XDECREF(res);
+ return NULL;
+ }
+
+ return _PyAST_Constant(res, NULL, lineno, col_offset, end_lineno, end_col_offset, p->arena);
+}
+
+static expr_ty
+_PyPegen_decode_fstring_part(Parser* p, int is_raw, expr_ty constant) {
+ assert(PyUnicode_CheckExact(constant->v.Constant.value));
+
+ const char* bstr = PyUnicode_AsUTF8(constant->v.Constant.value);
+ if (bstr == NULL) {
+ return NULL;
+ }
+
+ size_t len;
+ if (strcmp(bstr, "{{") == 0 || strcmp(bstr, "}}") == 0) {
+ len = 1;
+ } else {
+ len = strlen(bstr);
+ }
+
+ is_raw = is_raw || strchr(bstr, '\\') == NULL;
+ PyObject *str = _PyPegen_decode_string(p, is_raw, bstr, len, NULL);
+ if (str == NULL) {
+ _Pypegen_raise_decode_error(p);
+ return NULL;
+ }
+ if (_PyArena_AddPyObject(p->arena, str) < 0) {
+ Py_DECREF(str);
+ return NULL;
+ }
+ return _PyAST_Constant(str, NULL, constant->lineno, constant->col_offset,
+ constant->end_lineno, constant->end_col_offset,
+ p->arena);
+}
+
+static asdl_expr_seq *
+unpack_top_level_joined_strs(Parser *p, asdl_expr_seq *raw_expressions)
+{
+ /* The parser might put multiple f-string values into an individual
+ * JoinedStr node at the top level due to stuff like f-string debugging
+ * expressions. This function flattens those and promotes them to the
+ * upper level. Only simplifies AST, but the compiler already takes care
+ * of the regular output, so this is not necessary if you are not going
+ * to expose the output AST to Python level. */
+
+ Py_ssize_t i, req_size, raw_size;
+
+ req_size = raw_size = asdl_seq_LEN(raw_expressions);
+ expr_ty expr;
+ for (i = 0; i < raw_size; i++) {
+ expr = asdl_seq_GET(raw_expressions, i);
+ if (expr->kind == JoinedStr_kind) {
+ req_size += asdl_seq_LEN(expr->v.JoinedStr.values) - 1;
+ }
+ }
+
+ asdl_expr_seq *expressions = _Py_asdl_expr_seq_new(req_size, p->arena);
+
+ Py_ssize_t raw_index, req_index = 0;
+ for (raw_index = 0; raw_index < raw_size; raw_index++) {
+ expr = asdl_seq_GET(raw_expressions, raw_index);
+ if (expr->kind == JoinedStr_kind) {
+ asdl_expr_seq *values = expr->v.JoinedStr.values;
+ for (Py_ssize_t n = 0; n < asdl_seq_LEN(values); n++) {
+ asdl_seq_SET(expressions, req_index, asdl_seq_GET(values, n));
+ req_index++;
+ }
+ } else {
+ asdl_seq_SET(expressions, req_index, expr);
+ req_index++;
+ }
+ }
+ return expressions;
+}
+
+expr_ty
+_PyPegen_joined_str(Parser *p, Token* a, asdl_expr_seq* raw_expressions, Token*b) {
+ asdl_expr_seq *expr = unpack_top_level_joined_strs(p, raw_expressions);
+ Py_ssize_t n_items = asdl_seq_LEN(expr);
+
+ const char* quote_str = PyBytes_AsString(a->bytes);
+ if (quote_str == NULL) {
+ return NULL;
+ }
+ int is_raw = strpbrk(quote_str, "rR") != NULL;
+
+ asdl_expr_seq *seq = _Py_asdl_expr_seq_new(n_items, p->arena);
+ if (seq == NULL) {
+ return NULL;
+ }
+
+ Py_ssize_t index = 0;
+ for (Py_ssize_t i = 0; i < n_items; i++) {
+ expr_ty item = asdl_seq_GET(expr, i);
+ if (item->kind == Constant_kind) {
+ item = _PyPegen_decode_fstring_part(p, is_raw, item);
+ if (item == NULL) {
+ return NULL;
+ }
+
+ /* Tokenizer emits string parts even when the underlying string
+ might become an empty value (e.g. FSTRING_MIDDLE with the value \\n)
+ so we need to check for them and simplify it here. */
+ if (PyUnicode_CheckExact(item->v.Constant.value)
+ && PyUnicode_GET_LENGTH(item->v.Constant.value) == 0) {
+ continue;
+ }
+ }
+ asdl_seq_SET(seq, index++, item);
+ }
+
+ asdl_expr_seq *resized_exprs;
+ if (index != n_items) {
+ resized_exprs = _Py_asdl_expr_seq_new(index, p->arena);
+ if (resized_exprs == NULL) {
+ return NULL;
+ }
+ for (Py_ssize_t i = 0; i < index; i++) {
+ asdl_seq_SET(resized_exprs, i, asdl_seq_GET(seq, i));
+ }
+ }
+ else {
+ resized_exprs = seq;
+ }
+
+ return _PyAST_JoinedStr(resized_exprs, a->lineno, a->col_offset,
+ b->end_lineno, b->end_col_offset,
+ p->arena);
+}
+
+expr_ty _PyPegen_constant_from_token(Parser* p, Token* tok) {
+ char* bstr = PyBytes_AsString(tok->bytes);
+ if (bstr == NULL) {
+ return NULL;
+ }
+ PyObject* str = PyUnicode_FromString(bstr);
+ if (str == NULL) {
+ return NULL;
+ }
+ if (_PyArena_AddPyObject(p->arena, str) < 0) {
+ Py_DECREF(str);
+ return NULL;
+ }
+ return _PyAST_Constant(str, NULL, tok->lineno, tok->col_offset,
+ tok->end_lineno, tok->end_col_offset,
+ p->arena);
+}
+
+expr_ty _PyPegen_constant_from_string(Parser* p, Token* tok) {
+ char* the_str = PyBytes_AsString(tok->bytes);
+ if (the_str == NULL) {
+ return NULL;
+ }
+ PyObject *s = _PyPegen_parse_string(p, tok);
+ if (s == NULL) {
+ _Pypegen_raise_decode_error(p);
+ return NULL;
+ }
+ if (_PyArena_AddPyObject(p->arena, s) < 0) {
+ Py_DECREF(s);
+ return NULL;
+ }
+ PyObject *kind = NULL;
+ if (the_str && the_str[0] == 'u') {
+ kind = _PyPegen_new_identifier(p, "u");
+ if (kind == NULL) {
+ return NULL;
+ }
+ }
+ return _PyAST_Constant(s, kind, tok->lineno, tok->col_offset, tok->end_lineno, tok->end_col_offset, p->arena);
+}
+
+expr_ty _PyPegen_formatted_value(Parser *p, expr_ty expression, Token *debug, expr_ty conversion,
+ expr_ty format, int lineno, int col_offset, int end_lineno, int end_col_offset,
+ PyArena *arena) {
+ int conversion_val = -1;
+ if (conversion != NULL) {
+ assert(conversion->kind == Name_kind);
+ Py_UCS4 first = PyUnicode_READ_CHAR(conversion->v.Name.id, 0);
+
+ if (PyUnicode_GET_LENGTH(conversion->v.Name.id) > 1 ||
+ !(first == 's' || first == 'r' || first == 'a')) {
+ RAISE_SYNTAX_ERROR_KNOWN_LOCATION(conversion,
+ "f-string: invalid conversion character %R: expected 's', 'r', or 'a'",
+ conversion->v.Name.id);
+ return NULL;
+ }
+
+ conversion_val = Py_SAFE_DOWNCAST(first, Py_UCS4, int);
+ }
+ else if (debug && !format) {
+ /* If no conversion is specified, use !r for debug expressions */
+ conversion_val = (int)'r';
+ }
+
+ expr_ty formatted_value = _PyAST_FormattedValue(
+ expression, conversion_val, format,
+ lineno, col_offset, end_lineno,
+ end_col_offset, arena
+ );
+
+ if (debug) {
+ /* Find the non whitespace token after the "=" */
+ int debug_end_line, debug_end_offset;
+
+ if (conversion) {
+ debug_end_line = conversion->lineno;
+ debug_end_offset = conversion->col_offset;
+ }
+ else if (format) {
+ debug_end_line = format->lineno;
+ debug_end_offset = format->col_offset + 1; // HACK: ??
+ }
+ else {
+ debug_end_line = end_lineno;
+ debug_end_offset = end_col_offset;
+ }
+
+ expr_ty debug_text = decode_fstring_buffer(p, lineno, col_offset + 1,
+ debug_end_line, debug_end_offset - 1);
+ if (!debug_text) {
+ return NULL;
+ }
+
+ asdl_expr_seq *values = _Py_asdl_expr_seq_new(2, arena);
+ asdl_seq_SET(values, 0, debug_text);
+ asdl_seq_SET(values, 1, formatted_value);
+ return _PyAST_JoinedStr(values, lineno, col_offset, debug_end_line, debug_end_offset, p->arena);
+ }
+ else {
+ return formatted_value;
+ }
+}
+
+expr_ty
+_PyPegen_concatenate_strings(Parser *p, asdl_expr_seq *strings,
+ int lineno, int col_offset, int end_lineno,
+ int end_col_offset, PyArena *arena)
+{
+ Py_ssize_t len = asdl_seq_LEN(strings);
+ assert(len > 0);
+
+ int f_string_found = 0;
+ int unicode_string_found = 0;
+ int bytes_found = 0;
+
+ Py_ssize_t i = 0;
+ Py_ssize_t n_flattened_elements = 0;
+ for (i = 0; i < len; i++) {
+ expr_ty elem = asdl_seq_GET(strings, i);
+ if (elem->kind == Constant_kind) {
+ if (PyBytes_CheckExact(elem->v.Constant.value)) {
+ bytes_found = 1;
+ } else {
+ unicode_string_found = 1;
+ }
+ n_flattened_elements++;
+ } else {
+ n_flattened_elements += asdl_seq_LEN(elem->v.JoinedStr.values);
+ f_string_found = 1;
+ }
+ }
+
+ if ((unicode_string_found || f_string_found) && bytes_found) {
+ RAISE_SYNTAX_ERROR("cannot mix bytes and nonbytes literals");
+ return NULL;
+ }
+
+ if (bytes_found) {
+ PyObject* res = PyBytes_FromString("");
+
+ /* Bytes literals never get a kind, but just for consistency
+ since they are represented as Constant nodes, we'll mirror
+ the same behavior as unicode strings for determining the
+ kind. */
+ PyObject* kind = asdl_seq_GET(strings, 0)->v.Constant.kind;
+ for (i = 0; i < len; i++) {
+ expr_ty elem = asdl_seq_GET(strings, i);
+ PyBytes_Concat(&res, elem->v.Constant.value);
+ }
+ if (!res || _PyArena_AddPyObject(arena, res) < 0) {
+ Py_XDECREF(res);
+ return NULL;
+ }
+ return _PyAST_Constant(res, kind, lineno, col_offset, end_lineno, end_col_offset, p->arena);
+ }
+
+ if (!f_string_found && len == 1) {
+ return asdl_seq_GET(strings, 0);
+ }
+
+ asdl_expr_seq* flattened = _Py_asdl_expr_seq_new(n_flattened_elements, p->arena);
+ if (flattened == NULL) {
+ return NULL;
+ }
+
+ /* build flattened list */
+ Py_ssize_t current_pos = 0;
+ Py_ssize_t j = 0;
+ for (i = 0; i < len; i++) {
+ expr_ty elem = asdl_seq_GET(strings, i);
+ if (elem->kind == Constant_kind) {
+ asdl_seq_SET(flattened, current_pos++, elem);
+ } else {
+ for (j = 0; j < asdl_seq_LEN(elem->v.JoinedStr.values); j++) {
+ expr_ty subvalue = asdl_seq_GET(elem->v.JoinedStr.values, j);
+ if (subvalue == NULL) {
+ return NULL;
+ }
+ asdl_seq_SET(flattened, current_pos++, subvalue);
+ }
+ }
+ }
+
+ /* calculate folded element count */
+ Py_ssize_t n_elements = 0;
+ int prev_is_constant = 0;
+ for (i = 0; i < n_flattened_elements; i++) {
+ expr_ty elem = asdl_seq_GET(flattened, i);
+
+ /* The concatenation of a FormattedValue and an empty Contant should
+ lead to the FormattedValue itself. Thus, we will not take any empty
+ constants into account, just as in `_PyPegen_joined_str` */
+ if (f_string_found && elem->kind == Constant_kind &&
+ PyUnicode_CheckExact(elem->v.Constant.value) &&
+ PyUnicode_GET_LENGTH(elem->v.Constant.value) == 0)
+ continue;
+
+ if (!prev_is_constant || elem->kind != Constant_kind) {
+ n_elements++;
+ }
+ prev_is_constant = elem->kind == Constant_kind;
+ }
+
+ asdl_expr_seq* values = _Py_asdl_expr_seq_new(n_elements, p->arena);
+ if (values == NULL) {
+ return NULL;
+ }
+
+ /* build folded list */
+ _PyUnicodeWriter writer;
+ current_pos = 0;
+ for (i = 0; i < n_flattened_elements; i++) {
+ expr_ty elem = asdl_seq_GET(flattened, i);
+
+ /* if the current elem and the following are constants,
+ fold them and all consequent constants */
+ if (elem->kind == Constant_kind) {
+ if (i + 1 < n_flattened_elements &&
+ asdl_seq_GET(flattened, i + 1)->kind == Constant_kind) {
+ expr_ty first_elem = elem;
+
+ /* When a string is getting concatenated, the kind of the string
+ is determined by the first string in the concatenation
+ sequence.
+
+ u"abc" "def" -> u"abcdef"
+ "abc" u"abc" -> "abcabc" */
+ PyObject *kind = elem->v.Constant.kind;
+
+ _PyUnicodeWriter_Init(&writer);
+ expr_ty last_elem = elem;
+ for (j = i; j < n_flattened_elements; j++) {
+ expr_ty current_elem = asdl_seq_GET(flattened, j);
+ if (current_elem->kind == Constant_kind) {
+ if (_PyUnicodeWriter_WriteStr(
+ &writer, current_elem->v.Constant.value)) {
+ _PyUnicodeWriter_Dealloc(&writer);
+ return NULL;
+ }
+ last_elem = current_elem;
+ } else {
+ break;
+ }
+ }
+ i = j - 1;
+
+ PyObject *concat_str = _PyUnicodeWriter_Finish(&writer);
+ if (concat_str == NULL) {
+ _PyUnicodeWriter_Dealloc(&writer);
+ return NULL;
+ }
+ if (_PyArena_AddPyObject(p->arena, concat_str) < 0) {
+ Py_DECREF(concat_str);
+ return NULL;
+ }
+ elem = _PyAST_Constant(concat_str, kind, first_elem->lineno,
+ first_elem->col_offset,
+ last_elem->end_lineno,
+ last_elem->end_col_offset, p->arena);
+ if (elem == NULL) {
+ return NULL;
+ }
+ }
+
+ /* Drop all empty contanst strings */
+ if (f_string_found &&
+ PyUnicode_CheckExact(elem->v.Constant.value) &&
+ PyUnicode_GET_LENGTH(elem->v.Constant.value) == 0) {
+ continue;
+ }
+ }
+
+ asdl_seq_SET(values, current_pos++, elem);
+ }
+
+ if (!f_string_found) {
+ assert(n_elements == 1);
+ expr_ty elem = asdl_seq_GET(values, 0);
+ assert(elem->kind == Constant_kind);
+ return elem;
+ }
+
+ assert(current_pos == n_elements);
+ return _PyAST_JoinedStr(values, lineno, col_offset, end_lineno, end_col_offset, p->arena);
+}
diff --git a/Parser/parser.c b/Parser/parser.c
index e0a88a9cc72c8b..771366844fc489 100644
--- a/Parser/parser.c
+++ b/Parser/parser.c
@@ -17,52 +17,52 @@ static KeywordToken *reserved_keywords[] = {
(KeywordToken[]) {{NULL, -1}},
(KeywordToken[]) {{NULL, -1}},
(KeywordToken[]) {
- {"if", 641},
- {"as", 639},
- {"in", 650},
+ {"if", 642},
+ {"as", 640},
+ {"in", 651},
{"or", 574},
{"is", 582},
{NULL, -1},
},
(KeywordToken[]) {
- {"del", 603},
- {"def", 651},
- {"for", 649},
- {"try", 623},
+ {"del", 604},
+ {"def", 652},
+ {"for", 650},
+ {"try", 624},
{"and", 575},
{"not", 581},
{NULL, -1},
},
(KeywordToken[]) {
- {"from", 607},
+ {"from", 608},
{"pass", 504},
- {"with", 614},
- {"elif", 643},
- {"else", 644},
- {"None", 601},
- {"True", 600},
+ {"with", 615},
+ {"elif", 644},
+ {"else", 645},
+ {"None", 602},
+ {"True", 601},
{NULL, -1},
},
(KeywordToken[]) {
{"raise", 522},
{"yield", 573},
{"break", 508},
- {"class", 653},
- {"while", 646},
- {"False", 602},
+ {"class", 654},
+ {"while", 647},
+ {"False", 603},
{NULL, -1},
},
(KeywordToken[]) {
{"return", 519},
- {"import", 606},
+ {"import", 607},
{"assert", 526},
{"global", 523},
- {"except", 636},
- {"lambda", 586},
+ {"except", 637},
+ {"lambda", 600},
{NULL, -1},
},
(KeywordToken[]) {
- {"finally", 632},
+ {"finally", 633},
{NULL, -1},
},
(KeywordToken[]) {
@@ -224,341 +224,370 @@ static char *soft_keywords[] = {
#define lambda_param_with_default_type 1144
#define lambda_param_maybe_default_type 1145
#define lambda_param_type 1146
-#define strings_type 1147
-#define list_type 1148
-#define tuple_type 1149
-#define set_type 1150
-#define dict_type 1151
-#define double_starred_kvpairs_type 1152
-#define double_starred_kvpair_type 1153
-#define kvpair_type 1154
-#define for_if_clauses_type 1155
-#define for_if_clause_type 1156
-#define listcomp_type 1157
-#define setcomp_type 1158
-#define genexp_type 1159
-#define dictcomp_type 1160
-#define arguments_type 1161
-#define args_type 1162
-#define kwargs_type 1163
-#define starred_expression_type 1164
-#define kwarg_or_starred_type 1165
-#define kwarg_or_double_starred_type 1166
-#define star_targets_type 1167
-#define star_targets_list_seq_type 1168
-#define star_targets_tuple_seq_type 1169
-#define star_target_type 1170
-#define target_with_star_atom_type 1171
-#define star_atom_type 1172
-#define single_target_type 1173
-#define single_subscript_attribute_target_type 1174
-#define t_primary_type 1175 // Left-recursive
-#define t_lookahead_type 1176
-#define del_targets_type 1177
-#define del_target_type 1178
-#define del_t_atom_type 1179
-#define type_expressions_type 1180
-#define func_type_comment_type 1181
-#define invalid_arguments_type 1182
-#define invalid_kwarg_type 1183
-#define expression_without_invalid_type 1184
-#define invalid_legacy_expression_type 1185
-#define invalid_expression_type 1186
-#define invalid_named_expression_type 1187
-#define invalid_assignment_type 1188
-#define invalid_ann_assign_target_type 1189
-#define invalid_del_stmt_type 1190
-#define invalid_block_type 1191
-#define invalid_comprehension_type 1192
-#define invalid_dict_comprehension_type 1193
-#define invalid_parameters_type 1194
-#define invalid_default_type 1195
-#define invalid_star_etc_type 1196
-#define invalid_kwds_type 1197
-#define invalid_parameters_helper_type 1198
-#define invalid_lambda_parameters_type 1199
-#define invalid_lambda_parameters_helper_type 1200
-#define invalid_lambda_star_etc_type 1201
-#define invalid_lambda_kwds_type 1202
-#define invalid_double_type_comments_type 1203
-#define invalid_with_item_type 1204
-#define invalid_for_target_type 1205
-#define invalid_group_type 1206
-#define invalid_import_type 1207
-#define invalid_import_from_targets_type 1208
-#define invalid_with_stmt_type 1209
-#define invalid_with_stmt_indent_type 1210
-#define invalid_try_stmt_type 1211
-#define invalid_except_stmt_type 1212
-#define invalid_finally_stmt_type 1213
-#define invalid_except_stmt_indent_type 1214
-#define invalid_except_star_stmt_indent_type 1215
-#define invalid_match_stmt_type 1216
-#define invalid_case_block_type 1217
-#define invalid_as_pattern_type 1218
-#define invalid_class_pattern_type 1219
-#define invalid_class_argument_pattern_type 1220
-#define invalid_if_stmt_type 1221
-#define invalid_elif_stmt_type 1222
-#define invalid_else_stmt_type 1223
-#define invalid_while_stmt_type 1224
-#define invalid_for_stmt_type 1225
-#define invalid_def_raw_type 1226
-#define invalid_class_def_raw_type 1227
-#define invalid_double_starred_kvpairs_type 1228
-#define invalid_kvpair_type 1229
-#define invalid_starred_expression_type 1230
-#define _loop0_1_type 1231
-#define _loop0_2_type 1232
-#define _loop1_3_type 1233
-#define _loop0_5_type 1234
-#define _gather_4_type 1235
-#define _tmp_6_type 1236
-#define _tmp_7_type 1237
-#define _tmp_8_type 1238
-#define _tmp_9_type 1239
-#define _tmp_10_type 1240
-#define _tmp_11_type 1241
-#define _tmp_12_type 1242
-#define _tmp_13_type 1243
-#define _loop1_14_type 1244
-#define _tmp_15_type 1245
-#define _tmp_16_type 1246
-#define _tmp_17_type 1247
-#define _loop0_19_type 1248
-#define _gather_18_type 1249
-#define _loop0_21_type 1250
-#define _gather_20_type 1251
-#define _tmp_22_type 1252
-#define _tmp_23_type 1253
-#define _loop0_24_type 1254
-#define _loop1_25_type 1255
-#define _loop0_27_type 1256
-#define _gather_26_type 1257
-#define _tmp_28_type 1258
-#define _loop0_30_type 1259
-#define _gather_29_type 1260
-#define _tmp_31_type 1261
-#define _loop1_32_type 1262
-#define _tmp_33_type 1263
-#define _tmp_34_type 1264
-#define _tmp_35_type 1265
-#define _loop0_36_type 1266
-#define _loop0_37_type 1267
-#define _loop0_38_type 1268
-#define _loop1_39_type 1269
-#define _loop0_40_type 1270
-#define _loop1_41_type 1271
-#define _loop1_42_type 1272
-#define _loop1_43_type 1273
-#define _loop0_44_type 1274
-#define _loop1_45_type 1275
-#define _loop0_46_type 1276
-#define _loop1_47_type 1277
-#define _loop0_48_type 1278
-#define _loop0_49_type 1279
-#define _loop1_50_type 1280
-#define _loop0_52_type 1281
-#define _gather_51_type 1282
-#define _loop0_54_type 1283
-#define _gather_53_type 1284
-#define _loop0_56_type 1285
-#define _gather_55_type 1286
-#define _loop0_58_type 1287
-#define _gather_57_type 1288
-#define _tmp_59_type 1289
-#define _loop1_60_type 1290
-#define _loop1_61_type 1291
-#define _tmp_62_type 1292
-#define _tmp_63_type 1293
-#define _loop1_64_type 1294
-#define _loop0_66_type 1295
-#define _gather_65_type 1296
-#define _tmp_67_type 1297
-#define _tmp_68_type 1298
-#define _tmp_69_type 1299
-#define _tmp_70_type 1300
-#define _loop0_72_type 1301
-#define _gather_71_type 1302
-#define _loop0_74_type 1303
-#define _gather_73_type 1304
-#define _tmp_75_type 1305
-#define _loop0_77_type 1306
-#define _gather_76_type 1307
-#define _loop0_79_type 1308
-#define _gather_78_type 1309
-#define _loop1_80_type 1310
-#define _loop1_81_type 1311
-#define _loop0_83_type 1312
-#define _gather_82_type 1313
-#define _loop1_84_type 1314
-#define _loop1_85_type 1315
-#define _loop1_86_type 1316
-#define _tmp_87_type 1317
-#define _loop0_89_type 1318
-#define _gather_88_type 1319
-#define _tmp_90_type 1320
-#define _tmp_91_type 1321
-#define _tmp_92_type 1322
-#define _tmp_93_type 1323
-#define _tmp_94_type 1324
-#define _loop0_95_type 1325
-#define _loop0_96_type 1326
-#define _loop0_97_type 1327
-#define _loop1_98_type 1328
-#define _loop0_99_type 1329
-#define _loop1_100_type 1330
-#define _loop1_101_type 1331
-#define _loop1_102_type 1332
-#define _loop0_103_type 1333
-#define _loop1_104_type 1334
-#define _loop0_105_type 1335
-#define _loop1_106_type 1336
-#define _loop0_107_type 1337
-#define _loop1_108_type 1338
-#define _loop1_109_type 1339
-#define _tmp_110_type 1340
-#define _loop0_112_type 1341
-#define _gather_111_type 1342
-#define _loop1_113_type 1343
-#define _loop0_114_type 1344
-#define _loop0_115_type 1345
-#define _tmp_116_type 1346
-#define _loop0_118_type 1347
-#define _gather_117_type 1348
-#define _tmp_119_type 1349
-#define _loop0_121_type 1350
-#define _gather_120_type 1351
-#define _loop0_123_type 1352
-#define _gather_122_type 1353
-#define _loop0_125_type 1354
-#define _gather_124_type 1355
-#define _loop0_127_type 1356
-#define _gather_126_type 1357
-#define _loop0_128_type 1358
-#define _loop0_130_type 1359
-#define _gather_129_type 1360
-#define _loop1_131_type 1361
-#define _tmp_132_type 1362
-#define _loop0_134_type 1363
-#define _gather_133_type 1364
-#define _loop0_136_type 1365
-#define _gather_135_type 1366
-#define _loop0_138_type 1367
-#define _gather_137_type 1368
-#define _loop0_140_type 1369
-#define _gather_139_type 1370
-#define _loop0_142_type 1371
-#define _gather_141_type 1372
-#define _tmp_143_type 1373
-#define _tmp_144_type 1374
-#define _tmp_145_type 1375
-#define _tmp_146_type 1376
-#define _tmp_147_type 1377
-#define _tmp_148_type 1378
-#define _tmp_149_type 1379
-#define _tmp_150_type 1380
-#define _tmp_151_type 1381
-#define _tmp_152_type 1382
-#define _tmp_153_type 1383
-#define _loop0_154_type 1384
-#define _loop0_155_type 1385
-#define _loop0_156_type 1386
-#define _tmp_157_type 1387
-#define _tmp_158_type 1388
-#define _tmp_159_type 1389
-#define _tmp_160_type 1390
-#define _tmp_161_type 1391
-#define _loop0_162_type 1392
-#define _loop0_163_type 1393
-#define _loop0_164_type 1394
-#define _loop1_165_type 1395
-#define _tmp_166_type 1396
-#define _loop0_167_type 1397
-#define _tmp_168_type 1398
-#define _loop0_169_type 1399
-#define _loop1_170_type 1400
-#define _tmp_171_type 1401
-#define _tmp_172_type 1402
-#define _tmp_173_type 1403
-#define _loop0_174_type 1404
-#define _tmp_175_type 1405
-#define _tmp_176_type 1406
-#define _loop1_177_type 1407
-#define _tmp_178_type 1408
-#define _loop0_179_type 1409
-#define _loop0_180_type 1410
-#define _loop0_181_type 1411
-#define _loop0_183_type 1412
-#define _gather_182_type 1413
-#define _tmp_184_type 1414
-#define _loop0_185_type 1415
-#define _tmp_186_type 1416
-#define _loop0_187_type 1417
-#define _loop1_188_type 1418
-#define _loop1_189_type 1419
-#define _tmp_190_type 1420
-#define _tmp_191_type 1421
-#define _loop0_192_type 1422
-#define _tmp_193_type 1423
-#define _tmp_194_type 1424
-#define _tmp_195_type 1425
-#define _loop0_197_type 1426
-#define _gather_196_type 1427
-#define _loop0_199_type 1428
-#define _gather_198_type 1429
-#define _loop0_201_type 1430
-#define _gather_200_type 1431
-#define _loop0_203_type 1432
-#define _gather_202_type 1433
-#define _tmp_204_type 1434
-#define _loop0_205_type 1435
-#define _loop1_206_type 1436
-#define _tmp_207_type 1437
-#define _loop0_208_type 1438
-#define _loop1_209_type 1439
-#define _tmp_210_type 1440
-#define _tmp_211_type 1441
-#define _tmp_212_type 1442
-#define _tmp_213_type 1443
-#define _tmp_214_type 1444
-#define _tmp_215_type 1445
-#define _tmp_216_type 1446
-#define _tmp_217_type 1447
-#define _tmp_218_type 1448
-#define _tmp_219_type 1449
-#define _loop0_221_type 1450
-#define _gather_220_type 1451
-#define _tmp_222_type 1452
-#define _tmp_223_type 1453
-#define _tmp_224_type 1454
-#define _tmp_225_type 1455
-#define _tmp_226_type 1456
-#define _tmp_227_type 1457
-#define _tmp_228_type 1458
-#define _tmp_229_type 1459
-#define _tmp_230_type 1460
-#define _tmp_231_type 1461
-#define _tmp_232_type 1462
-#define _tmp_233_type 1463
-#define _tmp_234_type 1464
-#define _tmp_235_type 1465
-#define _tmp_236_type 1466
-#define _tmp_237_type 1467
-#define _tmp_238_type 1468
-#define _tmp_239_type 1469
-#define _tmp_240_type 1470
-#define _tmp_241_type 1471
-#define _tmp_242_type 1472
-#define _tmp_243_type 1473
-#define _tmp_244_type 1474
-#define _tmp_245_type 1475
-#define _tmp_246_type 1476
-#define _tmp_247_type 1477
-#define _tmp_248_type 1478
-#define _tmp_249_type 1479
-#define _tmp_250_type 1480
-#define _tmp_251_type 1481
+#define fstring_middle_type 1147
+#define fstring_replacement_field_type 1148
+#define fstring_conversion_type 1149
+#define fstring_full_format_spec_type 1150
+#define fstring_format_spec_type 1151
+#define string_type 1152
+#define strings_type 1153
+#define list_type 1154
+#define tuple_type 1155
+#define set_type 1156
+#define dict_type 1157
+#define double_starred_kvpairs_type 1158
+#define double_starred_kvpair_type 1159
+#define kvpair_type 1160
+#define for_if_clauses_type 1161
+#define for_if_clause_type 1162
+#define listcomp_type 1163
+#define setcomp_type 1164
+#define genexp_type 1165
+#define dictcomp_type 1166
+#define arguments_type 1167
+#define args_type 1168
+#define kwargs_type 1169
+#define starred_expression_type 1170
+#define kwarg_or_starred_type 1171
+#define kwarg_or_double_starred_type 1172
+#define star_targets_type 1173
+#define star_targets_list_seq_type 1174
+#define star_targets_tuple_seq_type 1175
+#define star_target_type 1176
+#define target_with_star_atom_type 1177
+#define star_atom_type 1178
+#define single_target_type 1179
+#define single_subscript_attribute_target_type 1180
+#define t_primary_type 1181 // Left-recursive
+#define t_lookahead_type 1182
+#define del_targets_type 1183
+#define del_target_type 1184
+#define del_t_atom_type 1185
+#define type_expressions_type 1186
+#define func_type_comment_type 1187
+#define invalid_arguments_type 1188
+#define invalid_kwarg_type 1189
+#define expression_without_invalid_type 1190
+#define invalid_legacy_expression_type 1191
+#define invalid_expression_type 1192
+#define invalid_named_expression_type 1193
+#define invalid_assignment_type 1194
+#define invalid_ann_assign_target_type 1195
+#define invalid_del_stmt_type 1196
+#define invalid_block_type 1197
+#define invalid_comprehension_type 1198
+#define invalid_dict_comprehension_type 1199
+#define invalid_parameters_type 1200
+#define invalid_default_type 1201
+#define invalid_star_etc_type 1202
+#define invalid_kwds_type 1203
+#define invalid_parameters_helper_type 1204
+#define invalid_lambda_parameters_type 1205
+#define invalid_lambda_parameters_helper_type 1206
+#define invalid_lambda_star_etc_type 1207
+#define invalid_lambda_kwds_type 1208
+#define invalid_double_type_comments_type 1209
+#define invalid_with_item_type 1210
+#define invalid_for_target_type 1211
+#define invalid_group_type 1212
+#define invalid_import_type 1213
+#define invalid_import_from_targets_type 1214
+#define invalid_with_stmt_type 1215
+#define invalid_with_stmt_indent_type 1216
+#define invalid_try_stmt_type 1217
+#define invalid_except_stmt_type 1218
+#define invalid_finally_stmt_type 1219
+#define invalid_except_stmt_indent_type 1220
+#define invalid_except_star_stmt_indent_type 1221
+#define invalid_match_stmt_type 1222
+#define invalid_case_block_type 1223
+#define invalid_as_pattern_type 1224
+#define invalid_class_pattern_type 1225
+#define invalid_class_argument_pattern_type 1226
+#define invalid_if_stmt_type 1227
+#define invalid_elif_stmt_type 1228
+#define invalid_else_stmt_type 1229
+#define invalid_while_stmt_type 1230
+#define invalid_for_stmt_type 1231
+#define invalid_def_raw_type 1232
+#define invalid_class_def_raw_type 1233
+#define invalid_double_starred_kvpairs_type 1234
+#define invalid_kvpair_type 1235
+#define invalid_starred_expression_type 1236
+#define invalid_replacement_field_type 1237
+#define invalid_conversion_character_type 1238
+#define _loop0_1_type 1239
+#define _loop0_2_type 1240
+#define _loop0_3_type 1241
+#define _loop1_4_type 1242
+#define _loop0_6_type 1243
+#define _gather_5_type 1244
+#define _tmp_7_type 1245
+#define _tmp_8_type 1246
+#define _tmp_9_type 1247
+#define _tmp_10_type 1248
+#define _tmp_11_type 1249
+#define _tmp_12_type 1250
+#define _tmp_13_type 1251
+#define _tmp_14_type 1252
+#define _loop1_15_type 1253
+#define _tmp_16_type 1254
+#define _tmp_17_type 1255
+#define _tmp_18_type 1256
+#define _loop0_20_type 1257
+#define _gather_19_type 1258
+#define _loop0_22_type 1259
+#define _gather_21_type 1260
+#define _tmp_23_type 1261
+#define _tmp_24_type 1262
+#define _loop0_25_type 1263
+#define _loop1_26_type 1264
+#define _loop0_28_type 1265
+#define _gather_27_type 1266
+#define _tmp_29_type 1267
+#define _loop0_31_type 1268
+#define _gather_30_type 1269
+#define _tmp_32_type 1270
+#define _loop1_33_type 1271
+#define _tmp_34_type 1272
+#define _tmp_35_type 1273
+#define _tmp_36_type 1274
+#define _loop0_37_type 1275
+#define _loop0_38_type 1276
+#define _loop0_39_type 1277
+#define _loop1_40_type 1278
+#define _loop0_41_type 1279
+#define _loop1_42_type 1280
+#define _loop1_43_type 1281
+#define _loop1_44_type 1282
+#define _loop0_45_type 1283
+#define _loop1_46_type 1284
+#define _loop0_47_type 1285
+#define _loop1_48_type 1286
+#define _loop0_49_type 1287
+#define _loop0_50_type 1288
+#define _loop1_51_type 1289
+#define _loop0_53_type 1290
+#define _gather_52_type 1291
+#define _loop0_55_type 1292
+#define _gather_54_type 1293
+#define _loop0_57_type 1294
+#define _gather_56_type 1295
+#define _loop0_59_type 1296
+#define _gather_58_type 1297
+#define _tmp_60_type 1298
+#define _loop1_61_type 1299
+#define _loop1_62_type 1300
+#define _tmp_63_type 1301
+#define _tmp_64_type 1302
+#define _loop1_65_type 1303
+#define _loop0_67_type 1304
+#define _gather_66_type 1305
+#define _tmp_68_type 1306
+#define _tmp_69_type 1307
+#define _tmp_70_type 1308
+#define _tmp_71_type 1309
+#define _loop0_73_type 1310
+#define _gather_72_type 1311
+#define _loop0_75_type 1312
+#define _gather_74_type 1313
+#define _tmp_76_type 1314
+#define _loop0_78_type 1315
+#define _gather_77_type 1316
+#define _loop0_80_type 1317
+#define _gather_79_type 1318
+#define _loop1_81_type 1319
+#define _loop1_82_type 1320
+#define _loop0_84_type 1321
+#define _gather_83_type 1322
+#define _loop1_85_type 1323
+#define _loop1_86_type 1324
+#define _loop1_87_type 1325
+#define _tmp_88_type 1326
+#define _loop0_90_type 1327
+#define _gather_89_type 1328
+#define _tmp_91_type 1329
+#define _tmp_92_type 1330
+#define _tmp_93_type 1331
+#define _tmp_94_type 1332
+#define _tmp_95_type 1333
+#define _tmp_96_type 1334
+#define _loop0_97_type 1335
+#define _loop0_98_type 1336
+#define _loop0_99_type 1337
+#define _loop1_100_type 1338
+#define _loop0_101_type 1339
+#define _loop1_102_type 1340
+#define _loop1_103_type 1341
+#define _loop1_104_type 1342
+#define _loop0_105_type 1343
+#define _loop1_106_type 1344
+#define _loop0_107_type 1345
+#define _loop1_108_type 1346
+#define _loop0_109_type 1347
+#define _loop1_110_type 1348
+#define _tmp_111_type 1349
+#define _loop0_112_type 1350
+#define _loop1_113_type 1351
+#define _tmp_114_type 1352
+#define _loop0_116_type 1353
+#define _gather_115_type 1354
+#define _loop1_117_type 1355
+#define _loop0_118_type 1356
+#define _loop0_119_type 1357
+#define _tmp_120_type 1358
+#define _loop0_122_type 1359
+#define _gather_121_type 1360
+#define _tmp_123_type 1361
+#define _loop0_125_type 1362
+#define _gather_124_type 1363
+#define _loop0_127_type 1364
+#define _gather_126_type 1365
+#define _loop0_129_type 1366
+#define _gather_128_type 1367
+#define _loop0_131_type 1368
+#define _gather_130_type 1369
+#define _loop0_132_type 1370
+#define _loop0_134_type 1371
+#define _gather_133_type 1372
+#define _loop1_135_type 1373
+#define _tmp_136_type 1374
+#define _loop0_138_type 1375
+#define _gather_137_type 1376
+#define _loop0_140_type 1377
+#define _gather_139_type 1378
+#define _loop0_142_type 1379
+#define _gather_141_type 1380
+#define _loop0_144_type 1381
+#define _gather_143_type 1382
+#define _loop0_146_type 1383
+#define _gather_145_type 1384
+#define _tmp_147_type 1385
+#define _tmp_148_type 1386
+#define _tmp_149_type 1387
+#define _tmp_150_type 1388
+#define _tmp_151_type 1389
+#define _tmp_152_type 1390
+#define _tmp_153_type 1391
+#define _tmp_154_type 1392
+#define _tmp_155_type 1393
+#define _tmp_156_type 1394
+#define _tmp_157_type 1395
+#define _tmp_158_type 1396
+#define _loop0_159_type 1397
+#define _loop0_160_type 1398
+#define _loop0_161_type 1399
+#define _tmp_162_type 1400
+#define _tmp_163_type 1401
+#define _tmp_164_type 1402
+#define _tmp_165_type 1403
+#define _tmp_166_type 1404
+#define _loop0_167_type 1405
+#define _loop0_168_type 1406
+#define _loop0_169_type 1407
+#define _loop1_170_type 1408
+#define _tmp_171_type 1409
+#define _loop0_172_type 1410
+#define _tmp_173_type 1411
+#define _loop0_174_type 1412
+#define _loop1_175_type 1413
+#define _tmp_176_type 1414
+#define _tmp_177_type 1415
+#define _tmp_178_type 1416
+#define _loop0_179_type 1417
+#define _tmp_180_type 1418
+#define _tmp_181_type 1419
+#define _loop1_182_type 1420
+#define _tmp_183_type 1421
+#define _loop0_184_type 1422
+#define _loop0_185_type 1423
+#define _loop0_186_type 1424
+#define _loop0_188_type 1425
+#define _gather_187_type 1426
+#define _tmp_189_type 1427
+#define _loop0_190_type 1428
+#define _tmp_191_type 1429
+#define _loop0_192_type 1430
+#define _loop1_193_type 1431
+#define _loop1_194_type 1432
+#define _tmp_195_type 1433
+#define _tmp_196_type 1434
+#define _loop0_197_type 1435
+#define _tmp_198_type 1436
+#define _tmp_199_type 1437
+#define _tmp_200_type 1438
+#define _loop0_202_type 1439
+#define _gather_201_type 1440
+#define _loop0_204_type 1441
+#define _gather_203_type 1442
+#define _loop0_206_type 1443
+#define _gather_205_type 1444
+#define _loop0_208_type 1445
+#define _gather_207_type 1446
+#define _tmp_209_type 1447
+#define _loop0_210_type 1448
+#define _loop1_211_type 1449
+#define _tmp_212_type 1450
+#define _loop0_213_type 1451
+#define _loop1_214_type 1452
+#define _tmp_215_type 1453
+#define _tmp_216_type 1454
+#define _tmp_217_type 1455
+#define _tmp_218_type 1456
+#define _tmp_219_type 1457
+#define _tmp_220_type 1458
+#define _tmp_221_type 1459
+#define _tmp_222_type 1460
+#define _tmp_223_type 1461
+#define _tmp_224_type 1462
+#define _loop0_226_type 1463
+#define _gather_225_type 1464
+#define _tmp_227_type 1465
+#define _tmp_228_type 1466
+#define _tmp_229_type 1467
+#define _tmp_230_type 1468
+#define _tmp_231_type 1469
+#define _tmp_232_type 1470
+#define _tmp_233_type 1471
+#define _tmp_234_type 1472
+#define _tmp_235_type 1473
+#define _tmp_236_type 1474
+#define _tmp_237_type 1475
+#define _tmp_238_type 1476
+#define _tmp_239_type 1477
+#define _loop0_240_type 1478
+#define _tmp_241_type 1479
+#define _tmp_242_type 1480
+#define _tmp_243_type 1481
+#define _tmp_244_type 1482
+#define _tmp_245_type 1483
+#define _tmp_246_type 1484
+#define _tmp_247_type 1485
+#define _tmp_248_type 1486
+#define _tmp_249_type 1487
+#define _tmp_250_type 1488
+#define _tmp_251_type 1489
+#define _tmp_252_type 1490
+#define _tmp_253_type 1491
+#define _tmp_254_type 1492
+#define _tmp_255_type 1493
+#define _tmp_256_type 1494
+#define _tmp_257_type 1495
+#define _tmp_258_type 1496
+#define _tmp_259_type 1497
+#define _tmp_260_type 1498
+#define _tmp_261_type 1499
+#define _tmp_262_type 1500
+#define _tmp_263_type 1501
+#define _tmp_264_type 1502
+#define _tmp_265_type 1503
+#define _tmp_266_type 1504
+#define _tmp_267_type 1505
+#define _tmp_268_type 1506
+#define _tmp_269_type 1507
+#define _tmp_270_type 1508
+#define _tmp_271_type 1509
+#define _tmp_272_type 1510
static mod_ty file_rule(Parser *p);
static mod_ty interactive_rule(Parser *p);
@@ -707,6 +736,12 @@ static arg_ty lambda_param_no_default_rule(Parser *p);
static NameDefaultPair* lambda_param_with_default_rule(Parser *p);
static NameDefaultPair* lambda_param_maybe_default_rule(Parser *p);
static arg_ty lambda_param_rule(Parser *p);
+static expr_ty fstring_middle_rule(Parser *p);
+static expr_ty fstring_replacement_field_rule(Parser *p);
+static expr_ty fstring_conversion_rule(Parser *p);
+static expr_ty fstring_full_format_spec_rule(Parser *p);
+static expr_ty fstring_format_spec_rule(Parser *p);
+static expr_ty string_rule(Parser *p);
static expr_ty strings_rule(Parser *p);
static expr_ty list_rule(Parser *p);
static expr_ty tuple_rule(Parser *p);
@@ -791,12 +826,14 @@ static void *invalid_class_def_raw_rule(Parser *p);
static void *invalid_double_starred_kvpairs_rule(Parser *p);
static void *invalid_kvpair_rule(Parser *p);
static void *invalid_starred_expression_rule(Parser *p);
+static void *invalid_replacement_field_rule(Parser *p);
+static void *invalid_conversion_character_rule(Parser *p);
static asdl_seq *_loop0_1_rule(Parser *p);
static asdl_seq *_loop0_2_rule(Parser *p);
-static asdl_seq *_loop1_3_rule(Parser *p);
-static asdl_seq *_loop0_5_rule(Parser *p);
-static asdl_seq *_gather_4_rule(Parser *p);
-static void *_tmp_6_rule(Parser *p);
+static asdl_seq *_loop0_3_rule(Parser *p);
+static asdl_seq *_loop1_4_rule(Parser *p);
+static asdl_seq *_loop0_6_rule(Parser *p);
+static asdl_seq *_gather_5_rule(Parser *p);
static void *_tmp_7_rule(Parser *p);
static void *_tmp_8_rule(Parser *p);
static void *_tmp_9_rule(Parser *p);
@@ -804,139 +841,139 @@ static void *_tmp_10_rule(Parser *p);
static void *_tmp_11_rule(Parser *p);
static void *_tmp_12_rule(Parser *p);
static void *_tmp_13_rule(Parser *p);
-static asdl_seq *_loop1_14_rule(Parser *p);
-static void *_tmp_15_rule(Parser *p);
+static void *_tmp_14_rule(Parser *p);
+static asdl_seq *_loop1_15_rule(Parser *p);
static void *_tmp_16_rule(Parser *p);
static void *_tmp_17_rule(Parser *p);
-static asdl_seq *_loop0_19_rule(Parser *p);
-static asdl_seq *_gather_18_rule(Parser *p);
-static asdl_seq *_loop0_21_rule(Parser *p);
-static asdl_seq *_gather_20_rule(Parser *p);
-static void *_tmp_22_rule(Parser *p);
+static void *_tmp_18_rule(Parser *p);
+static asdl_seq *_loop0_20_rule(Parser *p);
+static asdl_seq *_gather_19_rule(Parser *p);
+static asdl_seq *_loop0_22_rule(Parser *p);
+static asdl_seq *_gather_21_rule(Parser *p);
static void *_tmp_23_rule(Parser *p);
-static asdl_seq *_loop0_24_rule(Parser *p);
-static asdl_seq *_loop1_25_rule(Parser *p);
-static asdl_seq *_loop0_27_rule(Parser *p);
-static asdl_seq *_gather_26_rule(Parser *p);
-static void *_tmp_28_rule(Parser *p);
-static asdl_seq *_loop0_30_rule(Parser *p);
-static asdl_seq *_gather_29_rule(Parser *p);
-static void *_tmp_31_rule(Parser *p);
-static asdl_seq *_loop1_32_rule(Parser *p);
-static void *_tmp_33_rule(Parser *p);
+static void *_tmp_24_rule(Parser *p);
+static asdl_seq *_loop0_25_rule(Parser *p);
+static asdl_seq *_loop1_26_rule(Parser *p);
+static asdl_seq *_loop0_28_rule(Parser *p);
+static asdl_seq *_gather_27_rule(Parser *p);
+static void *_tmp_29_rule(Parser *p);
+static asdl_seq *_loop0_31_rule(Parser *p);
+static asdl_seq *_gather_30_rule(Parser *p);
+static void *_tmp_32_rule(Parser *p);
+static asdl_seq *_loop1_33_rule(Parser *p);
static void *_tmp_34_rule(Parser *p);
static void *_tmp_35_rule(Parser *p);
-static asdl_seq *_loop0_36_rule(Parser *p);
+static void *_tmp_36_rule(Parser *p);
static asdl_seq *_loop0_37_rule(Parser *p);
static asdl_seq *_loop0_38_rule(Parser *p);
-static asdl_seq *_loop1_39_rule(Parser *p);
-static asdl_seq *_loop0_40_rule(Parser *p);
-static asdl_seq *_loop1_41_rule(Parser *p);
+static asdl_seq *_loop0_39_rule(Parser *p);
+static asdl_seq *_loop1_40_rule(Parser *p);
+static asdl_seq *_loop0_41_rule(Parser *p);
static asdl_seq *_loop1_42_rule(Parser *p);
static asdl_seq *_loop1_43_rule(Parser *p);
-static asdl_seq *_loop0_44_rule(Parser *p);
-static asdl_seq *_loop1_45_rule(Parser *p);
-static asdl_seq *_loop0_46_rule(Parser *p);
-static asdl_seq *_loop1_47_rule(Parser *p);
-static asdl_seq *_loop0_48_rule(Parser *p);
+static asdl_seq *_loop1_44_rule(Parser *p);
+static asdl_seq *_loop0_45_rule(Parser *p);
+static asdl_seq *_loop1_46_rule(Parser *p);
+static asdl_seq *_loop0_47_rule(Parser *p);
+static asdl_seq *_loop1_48_rule(Parser *p);
static asdl_seq *_loop0_49_rule(Parser *p);
-static asdl_seq *_loop1_50_rule(Parser *p);
-static asdl_seq *_loop0_52_rule(Parser *p);
-static asdl_seq *_gather_51_rule(Parser *p);
-static asdl_seq *_loop0_54_rule(Parser *p);
-static asdl_seq *_gather_53_rule(Parser *p);
-static asdl_seq *_loop0_56_rule(Parser *p);
-static asdl_seq *_gather_55_rule(Parser *p);
-static asdl_seq *_loop0_58_rule(Parser *p);
-static asdl_seq *_gather_57_rule(Parser *p);
-static void *_tmp_59_rule(Parser *p);
-static asdl_seq *_loop1_60_rule(Parser *p);
+static asdl_seq *_loop0_50_rule(Parser *p);
+static asdl_seq *_loop1_51_rule(Parser *p);
+static asdl_seq *_loop0_53_rule(Parser *p);
+static asdl_seq *_gather_52_rule(Parser *p);
+static asdl_seq *_loop0_55_rule(Parser *p);
+static asdl_seq *_gather_54_rule(Parser *p);
+static asdl_seq *_loop0_57_rule(Parser *p);
+static asdl_seq *_gather_56_rule(Parser *p);
+static asdl_seq *_loop0_59_rule(Parser *p);
+static asdl_seq *_gather_58_rule(Parser *p);
+static void *_tmp_60_rule(Parser *p);
static asdl_seq *_loop1_61_rule(Parser *p);
-static void *_tmp_62_rule(Parser *p);
+static asdl_seq *_loop1_62_rule(Parser *p);
static void *_tmp_63_rule(Parser *p);
-static asdl_seq *_loop1_64_rule(Parser *p);
-static asdl_seq *_loop0_66_rule(Parser *p);
-static asdl_seq *_gather_65_rule(Parser *p);
-static void *_tmp_67_rule(Parser *p);
+static void *_tmp_64_rule(Parser *p);
+static asdl_seq *_loop1_65_rule(Parser *p);
+static asdl_seq *_loop0_67_rule(Parser *p);
+static asdl_seq *_gather_66_rule(Parser *p);
static void *_tmp_68_rule(Parser *p);
static void *_tmp_69_rule(Parser *p);
static void *_tmp_70_rule(Parser *p);
-static asdl_seq *_loop0_72_rule(Parser *p);
-static asdl_seq *_gather_71_rule(Parser *p);
-static asdl_seq *_loop0_74_rule(Parser *p);
-static asdl_seq *_gather_73_rule(Parser *p);
-static void *_tmp_75_rule(Parser *p);
-static asdl_seq *_loop0_77_rule(Parser *p);
-static asdl_seq *_gather_76_rule(Parser *p);
-static asdl_seq *_loop0_79_rule(Parser *p);
-static asdl_seq *_gather_78_rule(Parser *p);
-static asdl_seq *_loop1_80_rule(Parser *p);
+static void *_tmp_71_rule(Parser *p);
+static asdl_seq *_loop0_73_rule(Parser *p);
+static asdl_seq *_gather_72_rule(Parser *p);
+static asdl_seq *_loop0_75_rule(Parser *p);
+static asdl_seq *_gather_74_rule(Parser *p);
+static void *_tmp_76_rule(Parser *p);
+static asdl_seq *_loop0_78_rule(Parser *p);
+static asdl_seq *_gather_77_rule(Parser *p);
+static asdl_seq *_loop0_80_rule(Parser *p);
+static asdl_seq *_gather_79_rule(Parser *p);
static asdl_seq *_loop1_81_rule(Parser *p);
-static asdl_seq *_loop0_83_rule(Parser *p);
-static asdl_seq *_gather_82_rule(Parser *p);
-static asdl_seq *_loop1_84_rule(Parser *p);
+static asdl_seq *_loop1_82_rule(Parser *p);
+static asdl_seq *_loop0_84_rule(Parser *p);
+static asdl_seq *_gather_83_rule(Parser *p);
static asdl_seq *_loop1_85_rule(Parser *p);
static asdl_seq *_loop1_86_rule(Parser *p);
-static void *_tmp_87_rule(Parser *p);
-static asdl_seq *_loop0_89_rule(Parser *p);
-static asdl_seq *_gather_88_rule(Parser *p);
-static void *_tmp_90_rule(Parser *p);
+static asdl_seq *_loop1_87_rule(Parser *p);
+static void *_tmp_88_rule(Parser *p);
+static asdl_seq *_loop0_90_rule(Parser *p);
+static asdl_seq *_gather_89_rule(Parser *p);
static void *_tmp_91_rule(Parser *p);
static void *_tmp_92_rule(Parser *p);
static void *_tmp_93_rule(Parser *p);
static void *_tmp_94_rule(Parser *p);
-static asdl_seq *_loop0_95_rule(Parser *p);
-static asdl_seq *_loop0_96_rule(Parser *p);
+static void *_tmp_95_rule(Parser *p);
+static void *_tmp_96_rule(Parser *p);
static asdl_seq *_loop0_97_rule(Parser *p);
-static asdl_seq *_loop1_98_rule(Parser *p);
+static asdl_seq *_loop0_98_rule(Parser *p);
static asdl_seq *_loop0_99_rule(Parser *p);
static asdl_seq *_loop1_100_rule(Parser *p);
-static asdl_seq *_loop1_101_rule(Parser *p);
+static asdl_seq *_loop0_101_rule(Parser *p);
static asdl_seq *_loop1_102_rule(Parser *p);
-static asdl_seq *_loop0_103_rule(Parser *p);
+static asdl_seq *_loop1_103_rule(Parser *p);
static asdl_seq *_loop1_104_rule(Parser *p);
static asdl_seq *_loop0_105_rule(Parser *p);
static asdl_seq *_loop1_106_rule(Parser *p);
static asdl_seq *_loop0_107_rule(Parser *p);
static asdl_seq *_loop1_108_rule(Parser *p);
-static asdl_seq *_loop1_109_rule(Parser *p);
-static void *_tmp_110_rule(Parser *p);
+static asdl_seq *_loop0_109_rule(Parser *p);
+static asdl_seq *_loop1_110_rule(Parser *p);
+static void *_tmp_111_rule(Parser *p);
static asdl_seq *_loop0_112_rule(Parser *p);
-static asdl_seq *_gather_111_rule(Parser *p);
static asdl_seq *_loop1_113_rule(Parser *p);
-static asdl_seq *_loop0_114_rule(Parser *p);
-static asdl_seq *_loop0_115_rule(Parser *p);
-static void *_tmp_116_rule(Parser *p);
+static void *_tmp_114_rule(Parser *p);
+static asdl_seq *_loop0_116_rule(Parser *p);
+static asdl_seq *_gather_115_rule(Parser *p);
+static asdl_seq *_loop1_117_rule(Parser *p);
static asdl_seq *_loop0_118_rule(Parser *p);
-static asdl_seq *_gather_117_rule(Parser *p);
-static void *_tmp_119_rule(Parser *p);
-static asdl_seq *_loop0_121_rule(Parser *p);
-static asdl_seq *_gather_120_rule(Parser *p);
-static asdl_seq *_loop0_123_rule(Parser *p);
-static asdl_seq *_gather_122_rule(Parser *p);
+static asdl_seq *_loop0_119_rule(Parser *p);
+static void *_tmp_120_rule(Parser *p);
+static asdl_seq *_loop0_122_rule(Parser *p);
+static asdl_seq *_gather_121_rule(Parser *p);
+static void *_tmp_123_rule(Parser *p);
static asdl_seq *_loop0_125_rule(Parser *p);
static asdl_seq *_gather_124_rule(Parser *p);
static asdl_seq *_loop0_127_rule(Parser *p);
static asdl_seq *_gather_126_rule(Parser *p);
-static asdl_seq *_loop0_128_rule(Parser *p);
-static asdl_seq *_loop0_130_rule(Parser *p);
-static asdl_seq *_gather_129_rule(Parser *p);
-static asdl_seq *_loop1_131_rule(Parser *p);
-static void *_tmp_132_rule(Parser *p);
+static asdl_seq *_loop0_129_rule(Parser *p);
+static asdl_seq *_gather_128_rule(Parser *p);
+static asdl_seq *_loop0_131_rule(Parser *p);
+static asdl_seq *_gather_130_rule(Parser *p);
+static asdl_seq *_loop0_132_rule(Parser *p);
static asdl_seq *_loop0_134_rule(Parser *p);
static asdl_seq *_gather_133_rule(Parser *p);
-static asdl_seq *_loop0_136_rule(Parser *p);
-static asdl_seq *_gather_135_rule(Parser *p);
+static asdl_seq *_loop1_135_rule(Parser *p);
+static void *_tmp_136_rule(Parser *p);
static asdl_seq *_loop0_138_rule(Parser *p);
static asdl_seq *_gather_137_rule(Parser *p);
static asdl_seq *_loop0_140_rule(Parser *p);
static asdl_seq *_gather_139_rule(Parser *p);
static asdl_seq *_loop0_142_rule(Parser *p);
static asdl_seq *_gather_141_rule(Parser *p);
-static void *_tmp_143_rule(Parser *p);
-static void *_tmp_144_rule(Parser *p);
-static void *_tmp_145_rule(Parser *p);
-static void *_tmp_146_rule(Parser *p);
+static asdl_seq *_loop0_144_rule(Parser *p);
+static asdl_seq *_gather_143_rule(Parser *p);
+static asdl_seq *_loop0_146_rule(Parser *p);
+static asdl_seq *_gather_145_rule(Parser *p);
static void *_tmp_147_rule(Parser *p);
static void *_tmp_148_rule(Parser *p);
static void *_tmp_149_rule(Parser *p);
@@ -944,79 +981,79 @@ static void *_tmp_150_rule(Parser *p);
static void *_tmp_151_rule(Parser *p);
static void *_tmp_152_rule(Parser *p);
static void *_tmp_153_rule(Parser *p);
-static asdl_seq *_loop0_154_rule(Parser *p);
-static asdl_seq *_loop0_155_rule(Parser *p);
-static asdl_seq *_loop0_156_rule(Parser *p);
+static void *_tmp_154_rule(Parser *p);
+static void *_tmp_155_rule(Parser *p);
+static void *_tmp_156_rule(Parser *p);
static void *_tmp_157_rule(Parser *p);
static void *_tmp_158_rule(Parser *p);
-static void *_tmp_159_rule(Parser *p);
-static void *_tmp_160_rule(Parser *p);
-static void *_tmp_161_rule(Parser *p);
-static asdl_seq *_loop0_162_rule(Parser *p);
-static asdl_seq *_loop0_163_rule(Parser *p);
-static asdl_seq *_loop0_164_rule(Parser *p);
-static asdl_seq *_loop1_165_rule(Parser *p);
+static asdl_seq *_loop0_159_rule(Parser *p);
+static asdl_seq *_loop0_160_rule(Parser *p);
+static asdl_seq *_loop0_161_rule(Parser *p);
+static void *_tmp_162_rule(Parser *p);
+static void *_tmp_163_rule(Parser *p);
+static void *_tmp_164_rule(Parser *p);
+static void *_tmp_165_rule(Parser *p);
static void *_tmp_166_rule(Parser *p);
static asdl_seq *_loop0_167_rule(Parser *p);
-static void *_tmp_168_rule(Parser *p);
+static asdl_seq *_loop0_168_rule(Parser *p);
static asdl_seq *_loop0_169_rule(Parser *p);
static asdl_seq *_loop1_170_rule(Parser *p);
static void *_tmp_171_rule(Parser *p);
-static void *_tmp_172_rule(Parser *p);
+static asdl_seq *_loop0_172_rule(Parser *p);
static void *_tmp_173_rule(Parser *p);
static asdl_seq *_loop0_174_rule(Parser *p);
-static void *_tmp_175_rule(Parser *p);
+static asdl_seq *_loop1_175_rule(Parser *p);
static void *_tmp_176_rule(Parser *p);
-static asdl_seq *_loop1_177_rule(Parser *p);
+static void *_tmp_177_rule(Parser *p);
static void *_tmp_178_rule(Parser *p);
static asdl_seq *_loop0_179_rule(Parser *p);
-static asdl_seq *_loop0_180_rule(Parser *p);
-static asdl_seq *_loop0_181_rule(Parser *p);
-static asdl_seq *_loop0_183_rule(Parser *p);
-static asdl_seq *_gather_182_rule(Parser *p);
-static void *_tmp_184_rule(Parser *p);
+static void *_tmp_180_rule(Parser *p);
+static void *_tmp_181_rule(Parser *p);
+static asdl_seq *_loop1_182_rule(Parser *p);
+static void *_tmp_183_rule(Parser *p);
+static asdl_seq *_loop0_184_rule(Parser *p);
static asdl_seq *_loop0_185_rule(Parser *p);
-static void *_tmp_186_rule(Parser *p);
-static asdl_seq *_loop0_187_rule(Parser *p);
-static asdl_seq *_loop1_188_rule(Parser *p);
-static asdl_seq *_loop1_189_rule(Parser *p);
-static void *_tmp_190_rule(Parser *p);
+static asdl_seq *_loop0_186_rule(Parser *p);
+static asdl_seq *_loop0_188_rule(Parser *p);
+static asdl_seq *_gather_187_rule(Parser *p);
+static void *_tmp_189_rule(Parser *p);
+static asdl_seq *_loop0_190_rule(Parser *p);
static void *_tmp_191_rule(Parser *p);
static asdl_seq *_loop0_192_rule(Parser *p);
-static void *_tmp_193_rule(Parser *p);
-static void *_tmp_194_rule(Parser *p);
+static asdl_seq *_loop1_193_rule(Parser *p);
+static asdl_seq *_loop1_194_rule(Parser *p);
static void *_tmp_195_rule(Parser *p);
+static void *_tmp_196_rule(Parser *p);
static asdl_seq *_loop0_197_rule(Parser *p);
-static asdl_seq *_gather_196_rule(Parser *p);
-static asdl_seq *_loop0_199_rule(Parser *p);
-static asdl_seq *_gather_198_rule(Parser *p);
-static asdl_seq *_loop0_201_rule(Parser *p);
-static asdl_seq *_gather_200_rule(Parser *p);
-static asdl_seq *_loop0_203_rule(Parser *p);
-static asdl_seq *_gather_202_rule(Parser *p);
-static void *_tmp_204_rule(Parser *p);
-static asdl_seq *_loop0_205_rule(Parser *p);
-static asdl_seq *_loop1_206_rule(Parser *p);
-static void *_tmp_207_rule(Parser *p);
+static void *_tmp_198_rule(Parser *p);
+static void *_tmp_199_rule(Parser *p);
+static void *_tmp_200_rule(Parser *p);
+static asdl_seq *_loop0_202_rule(Parser *p);
+static asdl_seq *_gather_201_rule(Parser *p);
+static asdl_seq *_loop0_204_rule(Parser *p);
+static asdl_seq *_gather_203_rule(Parser *p);
+static asdl_seq *_loop0_206_rule(Parser *p);
+static asdl_seq *_gather_205_rule(Parser *p);
static asdl_seq *_loop0_208_rule(Parser *p);
-static asdl_seq *_loop1_209_rule(Parser *p);
-static void *_tmp_210_rule(Parser *p);
-static void *_tmp_211_rule(Parser *p);
+static asdl_seq *_gather_207_rule(Parser *p);
+static void *_tmp_209_rule(Parser *p);
+static asdl_seq *_loop0_210_rule(Parser *p);
+static asdl_seq *_loop1_211_rule(Parser *p);
static void *_tmp_212_rule(Parser *p);
-static void *_tmp_213_rule(Parser *p);
-static void *_tmp_214_rule(Parser *p);
+static asdl_seq *_loop0_213_rule(Parser *p);
+static asdl_seq *_loop1_214_rule(Parser *p);
static void *_tmp_215_rule(Parser *p);
static void *_tmp_216_rule(Parser *p);
static void *_tmp_217_rule(Parser *p);
static void *_tmp_218_rule(Parser *p);
static void *_tmp_219_rule(Parser *p);
-static asdl_seq *_loop0_221_rule(Parser *p);
-static asdl_seq *_gather_220_rule(Parser *p);
+static void *_tmp_220_rule(Parser *p);
+static void *_tmp_221_rule(Parser *p);
static void *_tmp_222_rule(Parser *p);
static void *_tmp_223_rule(Parser *p);
static void *_tmp_224_rule(Parser *p);
-static void *_tmp_225_rule(Parser *p);
-static void *_tmp_226_rule(Parser *p);
+static asdl_seq *_loop0_226_rule(Parser *p);
+static asdl_seq *_gather_225_rule(Parser *p);
static void *_tmp_227_rule(Parser *p);
static void *_tmp_228_rule(Parser *p);
static void *_tmp_229_rule(Parser *p);
@@ -1030,7 +1067,7 @@ static void *_tmp_236_rule(Parser *p);
static void *_tmp_237_rule(Parser *p);
static void *_tmp_238_rule(Parser *p);
static void *_tmp_239_rule(Parser *p);
-static void *_tmp_240_rule(Parser *p);
+static asdl_seq *_loop0_240_rule(Parser *p);
static void *_tmp_241_rule(Parser *p);
static void *_tmp_242_rule(Parser *p);
static void *_tmp_243_rule(Parser *p);
@@ -1042,6 +1079,27 @@ static void *_tmp_248_rule(Parser *p);
static void *_tmp_249_rule(Parser *p);
static void *_tmp_250_rule(Parser *p);
static void *_tmp_251_rule(Parser *p);
+static void *_tmp_252_rule(Parser *p);
+static void *_tmp_253_rule(Parser *p);
+static void *_tmp_254_rule(Parser *p);
+static void *_tmp_255_rule(Parser *p);
+static void *_tmp_256_rule(Parser *p);
+static void *_tmp_257_rule(Parser *p);
+static void *_tmp_258_rule(Parser *p);
+static void *_tmp_259_rule(Parser *p);
+static void *_tmp_260_rule(Parser *p);
+static void *_tmp_261_rule(Parser *p);
+static void *_tmp_262_rule(Parser *p);
+static void *_tmp_263_rule(Parser *p);
+static void *_tmp_264_rule(Parser *p);
+static void *_tmp_265_rule(Parser *p);
+static void *_tmp_266_rule(Parser *p);
+static void *_tmp_267_rule(Parser *p);
+static void *_tmp_268_rule(Parser *p);
+static void *_tmp_269_rule(Parser *p);
+static void *_tmp_270_rule(Parser *p);
+static void *_tmp_271_rule(Parser *p);
+static void *_tmp_272_rule(Parser *p);
// file: statements? $
@@ -1247,7 +1305,7 @@ func_type_rule(Parser *p)
return _res;
}
-// fstring: star_expressions
+// fstring: FSTRING_START fstring_middle* FSTRING_END
static expr_ty
fstring_rule(Parser *p)
{
@@ -1261,24 +1319,35 @@ fstring_rule(Parser *p)
}
expr_ty _res = NULL;
int _mark = p->mark;
- { // star_expressions
+ { // FSTRING_START fstring_middle* FSTRING_END
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> fstring[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions"));
- expr_ty star_expressions_var;
+ D(fprintf(stderr, "%*c> fstring[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "FSTRING_START fstring_middle* FSTRING_END"));
+ Token * a;
+ asdl_seq * b;
+ Token * c;
if (
- (star_expressions_var = star_expressions_rule(p)) // star_expressions
+ (a = _PyPegen_expect_token(p, FSTRING_START)) // token='FSTRING_START'
+ &&
+ (b = _loop0_3_rule(p)) // fstring_middle*
+ &&
+ (c = _PyPegen_expect_token(p, FSTRING_END)) // token='FSTRING_END'
)
{
- D(fprintf(stderr, "%*c+ fstring[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions"));
- _res = star_expressions_var;
+ D(fprintf(stderr, "%*c+ fstring[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "FSTRING_START fstring_middle* FSTRING_END"));
+ _res = _PyPegen_joined_str ( p , a , ( asdl_expr_seq* ) b , c );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
goto done;
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s fstring[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "FSTRING_START fstring_middle* FSTRING_END"));
}
_res = NULL;
done:
@@ -1308,7 +1377,7 @@ statements_rule(Parser *p)
D(fprintf(stderr, "%*c> statements[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "statement+"));
asdl_seq * a;
if (
- (a = _loop1_3_rule(p)) // statement+
+ (a = _loop1_4_rule(p)) // statement+
)
{
D(fprintf(stderr, "%*c+ statements[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "statement+"));
@@ -1584,7 +1653,7 @@ simple_stmts_rule(Parser *p)
asdl_stmt_seq* a;
Token * newline_var;
if (
- (a = (asdl_stmt_seq*)_gather_4_rule(p)) // ';'.simple_stmt+
+ (a = (asdl_stmt_seq*)_gather_5_rule(p)) // ';'.simple_stmt+
&&
(_opt_var = _PyPegen_expect_token(p, 13), !p->error_indicator) // ';'?
&&
@@ -1731,7 +1800,7 @@ simple_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('import' | 'from') import_stmt"));
stmt_ty import_stmt_var;
if (
- _PyPegen_lookahead(1, _tmp_6_rule, p)
+ _PyPegen_lookahead(1, _tmp_7_rule, p)
&&
(import_stmt_var = import_stmt_rule(p)) // import_stmt
)
@@ -1806,7 +1875,7 @@ simple_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'del' del_stmt"));
stmt_ty del_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 603) // token='del'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 604) // token='del'
&&
(del_stmt_var = del_stmt_rule(p)) // del_stmt
)
@@ -2006,7 +2075,7 @@ compound_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('def' | '@' | ASYNC) function_def"));
stmt_ty function_def_var;
if (
- _PyPegen_lookahead(1, _tmp_7_rule, p)
+ _PyPegen_lookahead(1, _tmp_8_rule, p)
&&
(function_def_var = function_def_rule(p)) // function_def
)
@@ -2027,7 +2096,7 @@ compound_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'if' if_stmt"));
stmt_ty if_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 641) // token='if'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 642) // token='if'
&&
(if_stmt_var = if_stmt_rule(p)) // if_stmt
)
@@ -2048,7 +2117,7 @@ compound_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('class' | '@') class_def"));
stmt_ty class_def_var;
if (
- _PyPegen_lookahead(1, _tmp_8_rule, p)
+ _PyPegen_lookahead(1, _tmp_9_rule, p)
&&
(class_def_var = class_def_rule(p)) // class_def
)
@@ -2069,7 +2138,7 @@ compound_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('with' | ASYNC) with_stmt"));
stmt_ty with_stmt_var;
if (
- _PyPegen_lookahead(1, _tmp_9_rule, p)
+ _PyPegen_lookahead(1, _tmp_10_rule, p)
&&
(with_stmt_var = with_stmt_rule(p)) // with_stmt
)
@@ -2090,7 +2159,7 @@ compound_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('for' | ASYNC) for_stmt"));
stmt_ty for_stmt_var;
if (
- _PyPegen_lookahead(1, _tmp_10_rule, p)
+ _PyPegen_lookahead(1, _tmp_11_rule, p)
&&
(for_stmt_var = for_stmt_rule(p)) // for_stmt
)
@@ -2111,7 +2180,7 @@ compound_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'try' try_stmt"));
stmt_ty try_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 623) // token='try'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 624) // token='try'
&&
(try_stmt_var = try_stmt_rule(p)) // try_stmt
)
@@ -2132,7 +2201,7 @@ compound_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'while' while_stmt"));
stmt_ty while_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 646) // token='while'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 647) // token='while'
&&
(while_stmt_var = while_stmt_rule(p)) // while_stmt
)
@@ -2215,7 +2284,7 @@ assignment_rule(Parser *p)
&&
(b = expression_rule(p)) // expression
&&
- (c = _tmp_11_rule(p), !p->error_indicator) // ['=' annotated_rhs]
+ (c = _tmp_12_rule(p), !p->error_indicator) // ['=' annotated_rhs]
)
{
D(fprintf(stderr, "%*c+ assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME ':' expression ['=' annotated_rhs]"));
@@ -2251,13 +2320,13 @@ assignment_rule(Parser *p)
expr_ty b;
void *c;
if (
- (a = _tmp_12_rule(p)) // '(' single_target ')' | single_subscript_attribute_target
+ (a = _tmp_13_rule(p)) // '(' single_target ')' | single_subscript_attribute_target
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
(b = expression_rule(p)) // expression
&&
- (c = _tmp_13_rule(p), !p->error_indicator) // ['=' annotated_rhs]
+ (c = _tmp_14_rule(p), !p->error_indicator) // ['=' annotated_rhs]
)
{
D(fprintf(stderr, "%*c+ assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "('(' single_target ')' | single_subscript_attribute_target) ':' expression ['=' annotated_rhs]"));
@@ -2292,9 +2361,9 @@ assignment_rule(Parser *p)
void *b;
void *tc;
if (
- (a = (asdl_expr_seq*)_loop1_14_rule(p)) // ((star_targets '='))+
+ (a = (asdl_expr_seq*)_loop1_15_rule(p)) // ((star_targets '='))+
&&
- (b = _tmp_15_rule(p)) // yield_expr | star_expressions
+ (b = _tmp_16_rule(p)) // yield_expr | star_expressions
&&
_PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 22) // token='='
&&
@@ -2340,7 +2409,7 @@ assignment_rule(Parser *p)
&&
(_cut_var = 1)
&&
- (c = _tmp_16_rule(p)) // yield_expr | star_expressions
+ (c = _tmp_17_rule(p)) // yield_expr | star_expressions
)
{
D(fprintf(stderr, "%*c+ assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "single_target augassign ~ (yield_expr | star_expressions)"));
@@ -2899,7 +2968,7 @@ raise_stmt_rule(Parser *p)
&&
(a = expression_rule(p)) // expression
&&
- (b = _tmp_17_rule(p), !p->error_indicator) // ['from' expression]
+ (b = _tmp_18_rule(p), !p->error_indicator) // ['from' expression]
)
{
D(fprintf(stderr, "%*c+ raise_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'raise' expression ['from' expression]"));
@@ -2997,7 +3066,7 @@ global_stmt_rule(Parser *p)
if (
(_keyword = _PyPegen_expect_token(p, 523)) // token='global'
&&
- (a = (asdl_expr_seq*)_gather_18_rule(p)) // ','.NAME+
+ (a = (asdl_expr_seq*)_gather_19_rule(p)) // ','.NAME+
)
{
D(fprintf(stderr, "%*c+ global_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'global' ','.NAME+"));
@@ -3062,7 +3131,7 @@ nonlocal_stmt_rule(Parser *p)
if (
(_keyword = _PyPegen_expect_token(p, 524)) // token='nonlocal'
&&
- (a = (asdl_expr_seq*)_gather_20_rule(p)) // ','.NAME+
+ (a = (asdl_expr_seq*)_gather_21_rule(p)) // ','.NAME+
)
{
D(fprintf(stderr, "%*c+ nonlocal_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'nonlocal' ','.NAME+"));
@@ -3125,11 +3194,11 @@ del_stmt_rule(Parser *p)
Token * _keyword;
asdl_expr_seq* a;
if (
- (_keyword = _PyPegen_expect_token(p, 603)) // token='del'
+ (_keyword = _PyPegen_expect_token(p, 604)) // token='del'
&&
(a = del_targets_rule(p)) // del_targets
&&
- _PyPegen_lookahead(1, _tmp_22_rule, p)
+ _PyPegen_lookahead(1, _tmp_23_rule, p)
)
{
D(fprintf(stderr, "%*c+ del_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'del' del_targets &(';' | NEWLINE)"));
@@ -3278,7 +3347,7 @@ assert_stmt_rule(Parser *p)
&&
(a = expression_rule(p)) // expression
&&
- (b = _tmp_23_rule(p), !p->error_indicator) // [',' expression]
+ (b = _tmp_24_rule(p), !p->error_indicator) // [',' expression]
)
{
D(fprintf(stderr, "%*c+ assert_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'assert' expression [',' expression]"));
@@ -3418,7 +3487,7 @@ import_name_rule(Parser *p)
Token * _keyword;
asdl_alias_seq* a;
if (
- (_keyword = _PyPegen_expect_token(p, 606)) // token='import'
+ (_keyword = _PyPegen_expect_token(p, 607)) // token='import'
&&
(a = dotted_as_names_rule(p)) // dotted_as_names
)
@@ -3488,13 +3557,13 @@ import_from_rule(Parser *p)
expr_ty b;
asdl_alias_seq* c;
if (
- (_keyword = _PyPegen_expect_token(p, 607)) // token='from'
+ (_keyword = _PyPegen_expect_token(p, 608)) // token='from'
&&
- (a = _loop0_24_rule(p)) // (('.' | '...'))*
+ (a = _loop0_25_rule(p)) // (('.' | '...'))*
&&
(b = dotted_name_rule(p)) // dotted_name
&&
- (_keyword_1 = _PyPegen_expect_token(p, 606)) // token='import'
+ (_keyword_1 = _PyPegen_expect_token(p, 607)) // token='import'
&&
(c = import_from_targets_rule(p)) // import_from_targets
)
@@ -3532,11 +3601,11 @@ import_from_rule(Parser *p)
asdl_seq * a;
asdl_alias_seq* b;
if (
- (_keyword = _PyPegen_expect_token(p, 607)) // token='from'
+ (_keyword = _PyPegen_expect_token(p, 608)) // token='from'
&&
- (a = _loop1_25_rule(p)) // (('.' | '...'))+
+ (a = _loop1_26_rule(p)) // (('.' | '...'))+
&&
- (_keyword_1 = _PyPegen_expect_token(p, 606)) // token='import'
+ (_keyword_1 = _PyPegen_expect_token(p, 607)) // token='import'
&&
(b = import_from_targets_rule(p)) // import_from_targets
)
@@ -3731,7 +3800,7 @@ import_from_as_names_rule(Parser *p)
D(fprintf(stderr, "%*c> import_from_as_names[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.import_from_as_name+"));
asdl_alias_seq* a;
if (
- (a = (asdl_alias_seq*)_gather_26_rule(p)) // ','.import_from_as_name+
+ (a = (asdl_alias_seq*)_gather_27_rule(p)) // ','.import_from_as_name+
)
{
D(fprintf(stderr, "%*c+ import_from_as_names[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.import_from_as_name+"));
@@ -3787,7 +3856,7 @@ import_from_as_name_rule(Parser *p)
if (
(a = _PyPegen_name_token(p)) // NAME
&&
- (b = _tmp_28_rule(p), !p->error_indicator) // ['as' NAME]
+ (b = _tmp_29_rule(p), !p->error_indicator) // ['as' NAME]
)
{
D(fprintf(stderr, "%*c+ import_from_as_name[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME ['as' NAME]"));
@@ -3840,7 +3909,7 @@ dotted_as_names_rule(Parser *p)
D(fprintf(stderr, "%*c> dotted_as_names[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.dotted_as_name+"));
asdl_alias_seq* a;
if (
- (a = (asdl_alias_seq*)_gather_29_rule(p)) // ','.dotted_as_name+
+ (a = (asdl_alias_seq*)_gather_30_rule(p)) // ','.dotted_as_name+
)
{
D(fprintf(stderr, "%*c+ dotted_as_names[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.dotted_as_name+"));
@@ -3896,7 +3965,7 @@ dotted_as_name_rule(Parser *p)
if (
(a = dotted_name_rule(p)) // dotted_name
&&
- (b = _tmp_31_rule(p), !p->error_indicator) // ['as' NAME]
+ (b = _tmp_32_rule(p), !p->error_indicator) // ['as' NAME]
)
{
D(fprintf(stderr, "%*c+ dotted_as_name[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dotted_name ['as' NAME]"));
@@ -4151,7 +4220,7 @@ decorators_rule(Parser *p)
D(fprintf(stderr, "%*c> decorators[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(('@' named_expression NEWLINE))+"));
asdl_expr_seq* a;
if (
- (a = (asdl_expr_seq*)_loop1_32_rule(p)) // (('@' named_expression NEWLINE))+
+ (a = (asdl_expr_seq*)_loop1_33_rule(p)) // (('@' named_expression NEWLINE))+
)
{
D(fprintf(stderr, "%*c+ decorators[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(('@' named_expression NEWLINE))+"));
@@ -4293,11 +4362,11 @@ class_def_raw_rule(Parser *p)
void *b;
asdl_stmt_seq* c;
if (
- (_keyword = _PyPegen_expect_token(p, 653)) // token='class'
+ (_keyword = _PyPegen_expect_token(p, 654)) // token='class'
&&
(a = _PyPegen_name_token(p)) // NAME
&&
- (b = _tmp_33_rule(p), !p->error_indicator) // ['(' arguments? ')']
+ (b = _tmp_34_rule(p), !p->error_indicator) // ['(' arguments? ')']
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -4459,7 +4528,7 @@ function_def_raw_rule(Parser *p)
void *params;
void *tc;
if (
- (_keyword = _PyPegen_expect_token(p, 651)) // token='def'
+ (_keyword = _PyPegen_expect_token(p, 652)) // token='def'
&&
(n = _PyPegen_name_token(p)) // NAME
&&
@@ -4469,7 +4538,7 @@ function_def_raw_rule(Parser *p)
&&
(_literal_1 = _PyPegen_expect_token(p, 8)) // token=')'
&&
- (a = _tmp_34_rule(p), !p->error_indicator) // ['->' expression]
+ (a = _tmp_35_rule(p), !p->error_indicator) // ['->' expression]
&&
(_literal_2 = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':'
&&
@@ -4519,7 +4588,7 @@ function_def_raw_rule(Parser *p)
if (
(async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC'
&&
- (_keyword = _PyPegen_expect_token(p, 651)) // token='def'
+ (_keyword = _PyPegen_expect_token(p, 652)) // token='def'
&&
(n = _PyPegen_name_token(p)) // NAME
&&
@@ -4529,7 +4598,7 @@ function_def_raw_rule(Parser *p)
&&
(_literal_1 = _PyPegen_expect_token(p, 8)) // token=')'
&&
- (a = _tmp_35_rule(p), !p->error_indicator) // ['->' expression]
+ (a = _tmp_36_rule(p), !p->error_indicator) // ['->' expression]
&&
(_literal_2 = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':'
&&
@@ -4656,9 +4725,9 @@ parameters_rule(Parser *p)
if (
(a = slash_no_default_rule(p)) // slash_no_default
&&
- (b = (asdl_arg_seq*)_loop0_36_rule(p)) // param_no_default*
+ (b = (asdl_arg_seq*)_loop0_37_rule(p)) // param_no_default*
&&
- (c = _loop0_37_rule(p)) // param_with_default*
+ (c = _loop0_38_rule(p)) // param_with_default*
&&
(d = star_etc_rule(p), !p->error_indicator) // star_etc?
)
@@ -4688,7 +4757,7 @@ parameters_rule(Parser *p)
if (
(a = slash_with_default_rule(p)) // slash_with_default
&&
- (b = _loop0_38_rule(p)) // param_with_default*
+ (b = _loop0_39_rule(p)) // param_with_default*
&&
(c = star_etc_rule(p), !p->error_indicator) // star_etc?
)
@@ -4716,9 +4785,9 @@ parameters_rule(Parser *p)
asdl_seq * b;
void *c;
if (
- (a = (asdl_arg_seq*)_loop1_39_rule(p)) // param_no_default+
+ (a = (asdl_arg_seq*)_loop1_40_rule(p)) // param_no_default+
&&
- (b = _loop0_40_rule(p)) // param_with_default*
+ (b = _loop0_41_rule(p)) // param_with_default*
&&
(c = star_etc_rule(p), !p->error_indicator) // star_etc?
)
@@ -4745,7 +4814,7 @@ parameters_rule(Parser *p)
asdl_seq * a;
void *b;
if (
- (a = _loop1_41_rule(p)) // param_with_default+
+ (a = _loop1_42_rule(p)) // param_with_default+
&&
(b = star_etc_rule(p), !p->error_indicator) // star_etc?
)
@@ -4817,7 +4886,7 @@ slash_no_default_rule(Parser *p)
Token * _literal_1;
asdl_arg_seq* a;
if (
- (a = (asdl_arg_seq*)_loop1_42_rule(p)) // param_no_default+
+ (a = (asdl_arg_seq*)_loop1_43_rule(p)) // param_no_default+
&&
(_literal = _PyPegen_expect_token(p, 17)) // token='/'
&&
@@ -4846,7 +4915,7 @@ slash_no_default_rule(Parser *p)
Token * _literal;
asdl_arg_seq* a;
if (
- (a = (asdl_arg_seq*)_loop1_43_rule(p)) // param_no_default+
+ (a = (asdl_arg_seq*)_loop1_44_rule(p)) // param_no_default+
&&
(_literal = _PyPegen_expect_token(p, 17)) // token='/'
&&
@@ -4899,9 +4968,9 @@ slash_with_default_rule(Parser *p)
asdl_seq * a;
asdl_seq * b;
if (
- (a = _loop0_44_rule(p)) // param_no_default*
+ (a = _loop0_45_rule(p)) // param_no_default*
&&
- (b = _loop1_45_rule(p)) // param_with_default+
+ (b = _loop1_46_rule(p)) // param_with_default+
&&
(_literal = _PyPegen_expect_token(p, 17)) // token='/'
&&
@@ -4931,9 +5000,9 @@ slash_with_default_rule(Parser *p)
asdl_seq * a;
asdl_seq * b;
if (
- (a = _loop0_46_rule(p)) // param_no_default*
+ (a = _loop0_47_rule(p)) // param_no_default*
&&
- (b = _loop1_47_rule(p)) // param_with_default+
+ (b = _loop1_48_rule(p)) // param_with_default+
&&
(_literal = _PyPegen_expect_token(p, 17)) // token='/'
&&
@@ -5012,7 +5081,7 @@ star_etc_rule(Parser *p)
&&
(a = param_no_default_rule(p)) // param_no_default
&&
- (b = _loop0_48_rule(p)) // param_maybe_default*
+ (b = _loop0_49_rule(p)) // param_maybe_default*
&&
(c = kwds_rule(p), !p->error_indicator) // kwds?
)
@@ -5045,7 +5114,7 @@ star_etc_rule(Parser *p)
&&
(a = param_no_default_star_annotation_rule(p)) // param_no_default_star_annotation
&&
- (b = _loop0_49_rule(p)) // param_maybe_default*
+ (b = _loop0_50_rule(p)) // param_maybe_default*
&&
(c = kwds_rule(p), !p->error_indicator) // kwds?
)
@@ -5078,7 +5147,7 @@ star_etc_rule(Parser *p)
&&
(_literal_1 = _PyPegen_expect_token(p, 12)) // token=','
&&
- (b = _loop1_50_rule(p)) // param_maybe_default+
+ (b = _loop1_51_rule(p)) // param_maybe_default+
&&
(c = kwds_rule(p), !p->error_indicator) // kwds?
)
@@ -5871,7 +5940,7 @@ if_stmt_rule(Parser *p)
asdl_stmt_seq* b;
stmt_ty c;
if (
- (_keyword = _PyPegen_expect_token(p, 641)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 642)) // token='if'
&&
(a = named_expression_rule(p)) // named_expression
&&
@@ -5916,7 +5985,7 @@ if_stmt_rule(Parser *p)
asdl_stmt_seq* b;
void *c;
if (
- (_keyword = _PyPegen_expect_token(p, 641)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 642)) // token='if'
&&
(a = named_expression_rule(p)) // named_expression
&&
@@ -6012,7 +6081,7 @@ elif_stmt_rule(Parser *p)
asdl_stmt_seq* b;
stmt_ty c;
if (
- (_keyword = _PyPegen_expect_token(p, 643)) // token='elif'
+ (_keyword = _PyPegen_expect_token(p, 644)) // token='elif'
&&
(a = named_expression_rule(p)) // named_expression
&&
@@ -6057,7 +6126,7 @@ elif_stmt_rule(Parser *p)
asdl_stmt_seq* b;
void *c;
if (
- (_keyword = _PyPegen_expect_token(p, 643)) // token='elif'
+ (_keyword = _PyPegen_expect_token(p, 644)) // token='elif'
&&
(a = named_expression_rule(p)) // named_expression
&&
@@ -6139,7 +6208,7 @@ else_block_rule(Parser *p)
Token * _literal;
asdl_stmt_seq* b;
if (
- (_keyword = _PyPegen_expect_token(p, 644)) // token='else'
+ (_keyword = _PyPegen_expect_token(p, 645)) // token='else'
&&
(_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':'
&&
@@ -6219,7 +6288,7 @@ while_stmt_rule(Parser *p)
asdl_stmt_seq* b;
void *c;
if (
- (_keyword = _PyPegen_expect_token(p, 646)) // token='while'
+ (_keyword = _PyPegen_expect_token(p, 647)) // token='while'
&&
(a = named_expression_rule(p)) // named_expression
&&
@@ -6320,11 +6389,11 @@ for_stmt_rule(Parser *p)
expr_ty t;
void *tc;
if (
- (_keyword = _PyPegen_expect_token(p, 649)) // token='for'
+ (_keyword = _PyPegen_expect_token(p, 650)) // token='for'
&&
(t = star_targets_rule(p)) // star_targets
&&
- (_keyword_1 = _PyPegen_expect_token(p, 650)) // token='in'
+ (_keyword_1 = _PyPegen_expect_token(p, 651)) // token='in'
&&
(_cut_var = 1)
&&
@@ -6384,11 +6453,11 @@ for_stmt_rule(Parser *p)
if (
(async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC'
&&
- (_keyword = _PyPegen_expect_token(p, 649)) // token='for'
+ (_keyword = _PyPegen_expect_token(p, 650)) // token='for'
&&
(t = star_targets_rule(p)) // star_targets
&&
- (_keyword_1 = _PyPegen_expect_token(p, 650)) // token='in'
+ (_keyword_1 = _PyPegen_expect_token(p, 651)) // token='in'
&&
(_cut_var = 1)
&&
@@ -6517,11 +6586,11 @@ with_stmt_rule(Parser *p)
asdl_withitem_seq* a;
asdl_stmt_seq* b;
if (
- (_keyword = _PyPegen_expect_token(p, 614)) // token='with'
+ (_keyword = _PyPegen_expect_token(p, 615)) // token='with'
&&
(_literal = _PyPegen_expect_token(p, 7)) // token='('
&&
- (a = (asdl_withitem_seq*)_gather_51_rule(p)) // ','.with_item+
+ (a = (asdl_withitem_seq*)_gather_52_rule(p)) // ','.with_item+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
&&
@@ -6566,9 +6635,9 @@ with_stmt_rule(Parser *p)
asdl_stmt_seq* b;
void *tc;
if (
- (_keyword = _PyPegen_expect_token(p, 614)) // token='with'
+ (_keyword = _PyPegen_expect_token(p, 615)) // token='with'
&&
- (a = (asdl_withitem_seq*)_gather_53_rule(p)) // ','.with_item+
+ (a = (asdl_withitem_seq*)_gather_54_rule(p)) // ','.with_item+
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -6617,11 +6686,11 @@ with_stmt_rule(Parser *p)
if (
(async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC'
&&
- (_keyword = _PyPegen_expect_token(p, 614)) // token='with'
+ (_keyword = _PyPegen_expect_token(p, 615)) // token='with'
&&
(_literal = _PyPegen_expect_token(p, 7)) // token='('
&&
- (a = (asdl_withitem_seq*)_gather_55_rule(p)) // ','.with_item+
+ (a = (asdl_withitem_seq*)_gather_56_rule(p)) // ','.with_item+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
&&
@@ -6669,9 +6738,9 @@ with_stmt_rule(Parser *p)
if (
(async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC'
&&
- (_keyword = _PyPegen_expect_token(p, 614)) // token='with'
+ (_keyword = _PyPegen_expect_token(p, 615)) // token='with'
&&
- (a = (asdl_withitem_seq*)_gather_57_rule(p)) // ','.with_item+
+ (a = (asdl_withitem_seq*)_gather_58_rule(p)) // ','.with_item+
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -6756,11 +6825,11 @@ with_item_rule(Parser *p)
if (
(e = expression_rule(p)) // expression
&&
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
(t = star_target_rule(p)) // star_target
&&
- _PyPegen_lookahead(1, _tmp_59_rule, p)
+ _PyPegen_lookahead(1, _tmp_60_rule, p)
)
{
D(fprintf(stderr, "%*c+ with_item[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression 'as' star_target &(',' | ')' | ':')"));
@@ -6882,7 +6951,7 @@ try_stmt_rule(Parser *p)
asdl_stmt_seq* b;
asdl_stmt_seq* f;
if (
- (_keyword = _PyPegen_expect_token(p, 623)) // token='try'
+ (_keyword = _PyPegen_expect_token(p, 624)) // token='try'
&&
(_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':'
&&
@@ -6926,13 +6995,13 @@ try_stmt_rule(Parser *p)
asdl_excepthandler_seq* ex;
void *f;
if (
- (_keyword = _PyPegen_expect_token(p, 623)) // token='try'
+ (_keyword = _PyPegen_expect_token(p, 624)) // token='try'
&&
(_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':'
&&
(b = block_rule(p)) // block
&&
- (ex = (asdl_excepthandler_seq*)_loop1_60_rule(p)) // except_block+
+ (ex = (asdl_excepthandler_seq*)_loop1_61_rule(p)) // except_block+
&&
(el = else_block_rule(p), !p->error_indicator) // else_block?
&&
@@ -6974,13 +7043,13 @@ try_stmt_rule(Parser *p)
asdl_excepthandler_seq* ex;
void *f;
if (
- (_keyword = _PyPegen_expect_token(p, 623)) // token='try'
+ (_keyword = _PyPegen_expect_token(p, 624)) // token='try'
&&
(_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':'
&&
(b = block_rule(p)) // block
&&
- (ex = (asdl_excepthandler_seq*)_loop1_61_rule(p)) // except_star_block+
+ (ex = (asdl_excepthandler_seq*)_loop1_62_rule(p)) // except_star_block+
&&
(el = else_block_rule(p), !p->error_indicator) // else_block?
&&
@@ -7073,11 +7142,11 @@ except_block_rule(Parser *p)
expr_ty e;
void *t;
if (
- (_keyword = _PyPegen_expect_token(p, 636)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 637)) // token='except'
&&
(e = expression_rule(p)) // expression
&&
- (t = _tmp_62_rule(p), !p->error_indicator) // ['as' NAME]
+ (t = _tmp_63_rule(p), !p->error_indicator) // ['as' NAME]
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -7116,7 +7185,7 @@ except_block_rule(Parser *p)
Token * _literal;
asdl_stmt_seq* b;
if (
- (_keyword = _PyPegen_expect_token(p, 636)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 637)) // token='except'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -7228,13 +7297,13 @@ except_star_block_rule(Parser *p)
expr_ty e;
void *t;
if (
- (_keyword = _PyPegen_expect_token(p, 636)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 637)) // token='except'
&&
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
(e = expression_rule(p)) // expression
&&
- (t = _tmp_63_rule(p), !p->error_indicator) // ['as' NAME]
+ (t = _tmp_64_rule(p), !p->error_indicator) // ['as' NAME]
&&
(_literal_1 = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -7331,7 +7400,7 @@ finally_block_rule(Parser *p)
Token * _literal;
asdl_stmt_seq* a;
if (
- (_keyword = _PyPegen_expect_token(p, 632)) // token='finally'
+ (_keyword = _PyPegen_expect_token(p, 633)) // token='finally'
&&
(_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':'
&&
@@ -7406,7 +7475,7 @@ match_stmt_rule(Parser *p)
&&
(indent_var = _PyPegen_expect_token(p, INDENT)) // token='INDENT'
&&
- (cases = (asdl_match_case_seq*)_loop1_64_rule(p)) // case_block+
+ (cases = (asdl_match_case_seq*)_loop1_65_rule(p)) // case_block+
&&
(dedent_var = _PyPegen_expect_token(p, DEDENT)) // token='DEDENT'
)
@@ -7643,7 +7712,7 @@ guard_rule(Parser *p)
Token * _keyword;
expr_ty guard;
if (
- (_keyword = _PyPegen_expect_token(p, 641)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 642)) // token='if'
&&
(guard = named_expression_rule(p)) // named_expression
)
@@ -7841,7 +7910,7 @@ as_pattern_rule(Parser *p)
if (
(pattern = or_pattern_rule(p)) // or_pattern
&&
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
(target = pattern_capture_target_rule(p)) // pattern_capture_target
)
@@ -7924,7 +7993,7 @@ or_pattern_rule(Parser *p)
D(fprintf(stderr, "%*c> or_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'|'.closed_pattern+"));
asdl_pattern_seq* patterns;
if (
- (patterns = (asdl_pattern_seq*)_gather_65_rule(p)) // '|'.closed_pattern+
+ (patterns = (asdl_pattern_seq*)_gather_66_rule(p)) // '|'.closed_pattern+
)
{
D(fprintf(stderr, "%*c+ or_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'|'.closed_pattern+"));
@@ -8179,7 +8248,7 @@ literal_pattern_rule(Parser *p)
if (
(value = signed_number_rule(p)) // signed_number
&&
- _PyPegen_lookahead(0, _tmp_67_rule, p)
+ _PyPegen_lookahead(0, _tmp_68_rule, p)
)
{
D(fprintf(stderr, "%*c+ literal_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "signed_number !('+' | '-')"));
@@ -8278,7 +8347,7 @@ literal_pattern_rule(Parser *p)
D(fprintf(stderr, "%*c> literal_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 601)) // token='None'
+ (_keyword = _PyPegen_expect_token(p, 602)) // token='None'
)
{
D(fprintf(stderr, "%*c+ literal_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'"));
@@ -8311,7 +8380,7 @@ literal_pattern_rule(Parser *p)
D(fprintf(stderr, "%*c> literal_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 600)) // token='True'
+ (_keyword = _PyPegen_expect_token(p, 601)) // token='True'
)
{
D(fprintf(stderr, "%*c+ literal_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'"));
@@ -8344,7 +8413,7 @@ literal_pattern_rule(Parser *p)
D(fprintf(stderr, "%*c> literal_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 602)) // token='False'
+ (_keyword = _PyPegen_expect_token(p, 603)) // token='False'
)
{
D(fprintf(stderr, "%*c+ literal_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'"));
@@ -8414,7 +8483,7 @@ literal_expr_rule(Parser *p)
if (
(signed_number_var = signed_number_rule(p)) // signed_number
&&
- _PyPegen_lookahead(0, _tmp_68_rule, p)
+ _PyPegen_lookahead(0, _tmp_69_rule, p)
)
{
D(fprintf(stderr, "%*c+ literal_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "signed_number !('+' | '-')"));
@@ -8471,7 +8540,7 @@ literal_expr_rule(Parser *p)
D(fprintf(stderr, "%*c> literal_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 601)) // token='None'
+ (_keyword = _PyPegen_expect_token(p, 602)) // token='None'
)
{
D(fprintf(stderr, "%*c+ literal_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'"));
@@ -8504,7 +8573,7 @@ literal_expr_rule(Parser *p)
D(fprintf(stderr, "%*c> literal_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 600)) // token='True'
+ (_keyword = _PyPegen_expect_token(p, 601)) // token='True'
)
{
D(fprintf(stderr, "%*c+ literal_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'"));
@@ -8537,7 +8606,7 @@ literal_expr_rule(Parser *p)
D(fprintf(stderr, "%*c> literal_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 602)) // token='False'
+ (_keyword = _PyPegen_expect_token(p, 603)) // token='False'
)
{
D(fprintf(stderr, "%*c+ literal_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'"));
@@ -9021,7 +9090,7 @@ pattern_capture_target_rule(Parser *p)
&&
(name = _PyPegen_name_token(p)) // NAME
&&
- _PyPegen_lookahead(0, _tmp_69_rule, p)
+ _PyPegen_lookahead(0, _tmp_70_rule, p)
)
{
D(fprintf(stderr, "%*c+ pattern_capture_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "!\"_\" NAME !('.' | '(' | '=')"));
@@ -9138,7 +9207,7 @@ value_pattern_rule(Parser *p)
if (
(attr = attr_rule(p)) // attr
&&
- _PyPegen_lookahead(0, _tmp_70_rule, p)
+ _PyPegen_lookahead(0, _tmp_71_rule, p)
)
{
D(fprintf(stderr, "%*c+ value_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "attr !('.' | '(' | '=')"));
@@ -9564,7 +9633,7 @@ maybe_sequence_pattern_rule(Parser *p)
UNUSED(_opt_var); // Silence compiler warnings
asdl_seq * patterns;
if (
- (patterns = _gather_71_rule(p)) // ','.maybe_star_pattern+
+ (patterns = _gather_72_rule(p)) // ','.maybe_star_pattern+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
)
@@ -9976,13 +10045,13 @@ items_pattern_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> items_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.key_value_pattern+"));
- asdl_seq * _gather_73_var;
+ asdl_seq * _gather_74_var;
if (
- (_gather_73_var = _gather_73_rule(p)) // ','.key_value_pattern+
+ (_gather_74_var = _gather_74_rule(p)) // ','.key_value_pattern+
)
{
D(fprintf(stderr, "%*c+ items_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.key_value_pattern+"));
- _res = _gather_73_var;
+ _res = _gather_74_var;
goto done;
}
p->mark = _mark;
@@ -10019,7 +10088,7 @@ key_value_pattern_rule(Parser *p)
void *key;
pattern_ty pattern;
if (
- (key = _tmp_75_rule(p)) // literal_expr | attr
+ (key = _tmp_76_rule(p)) // literal_expr | attr
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -10350,7 +10419,7 @@ positional_patterns_rule(Parser *p)
D(fprintf(stderr, "%*c> positional_patterns[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.pattern+"));
asdl_pattern_seq* args;
if (
- (args = (asdl_pattern_seq*)_gather_76_rule(p)) // ','.pattern+
+ (args = (asdl_pattern_seq*)_gather_77_rule(p)) // ','.pattern+
)
{
D(fprintf(stderr, "%*c+ positional_patterns[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.pattern+"));
@@ -10392,13 +10461,13 @@ keyword_patterns_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> keyword_patterns[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.keyword_pattern+"));
- asdl_seq * _gather_78_var;
+ asdl_seq * _gather_79_var;
if (
- (_gather_78_var = _gather_78_rule(p)) // ','.keyword_pattern+
+ (_gather_79_var = _gather_79_rule(p)) // ','.keyword_pattern+
)
{
D(fprintf(stderr, "%*c+ keyword_patterns[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.keyword_pattern+"));
- _res = _gather_78_var;
+ _res = _gather_79_var;
goto done;
}
p->mark = _mark;
@@ -10497,7 +10566,7 @@ expressions_rule(Parser *p)
if (
(a = expression_rule(p)) // expression
&&
- (b = _loop1_80_rule(p)) // ((',' expression))+
+ (b = _loop1_81_rule(p)) // ((',' expression))+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
)
@@ -10669,11 +10738,11 @@ expression_rule(Parser *p)
if (
(a = disjunction_rule(p)) // disjunction
&&
- (_keyword = _PyPegen_expect_token(p, 641)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 642)) // token='if'
&&
(b = disjunction_rule(p)) // disjunction
&&
- (_keyword_1 = _PyPegen_expect_token(p, 644)) // token='else'
+ (_keyword_1 = _PyPegen_expect_token(p, 645)) // token='else'
&&
(c = expression_rule(p)) // expression
)
@@ -10780,7 +10849,7 @@ yield_expr_rule(Parser *p)
if (
(_keyword = _PyPegen_expect_token(p, 573)) // token='yield'
&&
- (_keyword_1 = _PyPegen_expect_token(p, 607)) // token='from'
+ (_keyword_1 = _PyPegen_expect_token(p, 608)) // token='from'
&&
(a = expression_rule(p)) // expression
)
@@ -10888,7 +10957,7 @@ star_expressions_rule(Parser *p)
if (
(a = star_expression_rule(p)) // star_expression
&&
- (b = _loop1_81_rule(p)) // ((',' star_expression))+
+ (b = _loop1_82_rule(p)) // ((',' star_expression))+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
)
@@ -11089,7 +11158,7 @@ star_named_expressions_rule(Parser *p)
UNUSED(_opt_var); // Silence compiler warnings
asdl_expr_seq* a;
if (
- (a = (asdl_expr_seq*)_gather_82_rule(p)) // ','.star_named_expression+
+ (a = (asdl_expr_seq*)_gather_83_rule(p)) // ','.star_named_expression+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
)
@@ -11389,7 +11458,7 @@ disjunction_rule(Parser *p)
if (
(a = conjunction_rule(p)) // conjunction
&&
- (b = _loop1_84_rule(p)) // (('or' conjunction))+
+ (b = _loop1_85_rule(p)) // (('or' conjunction))+
)
{
D(fprintf(stderr, "%*c+ disjunction[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "conjunction (('or' conjunction))+"));
@@ -11478,7 +11547,7 @@ conjunction_rule(Parser *p)
if (
(a = inversion_rule(p)) // inversion
&&
- (b = _loop1_85_rule(p)) // (('and' inversion))+
+ (b = _loop1_86_rule(p)) // (('and' inversion))+
)
{
D(fprintf(stderr, "%*c+ conjunction[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "inversion (('and' inversion))+"));
@@ -11652,7 +11721,7 @@ comparison_rule(Parser *p)
if (
(a = bitwise_or_rule(p)) // bitwise_or
&&
- (b = _loop1_86_rule(p)) // compare_op_bitwise_or_pair+
+ (b = _loop1_87_rule(p)) // compare_op_bitwise_or_pair+
)
{
D(fprintf(stderr, "%*c+ comparison[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "bitwise_or compare_op_bitwise_or_pair+"));
@@ -11989,10 +12058,10 @@ noteq_bitwise_or_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> noteq_bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('!=') bitwise_or"));
- void *_tmp_87_var;
+ void *_tmp_88_var;
expr_ty a;
if (
- (_tmp_87_var = _tmp_87_rule(p)) // '!='
+ (_tmp_88_var = _tmp_88_rule(p)) // '!='
&&
(a = bitwise_or_rule(p)) // bitwise_or
)
@@ -12230,7 +12299,7 @@ notin_bitwise_or_rule(Parser *p)
if (
(_keyword = _PyPegen_expect_token(p, 581)) // token='not'
&&
- (_keyword_1 = _PyPegen_expect_token(p, 650)) // token='in'
+ (_keyword_1 = _PyPegen_expect_token(p, 651)) // token='in'
&&
(a = bitwise_or_rule(p)) // bitwise_or
)
@@ -12277,7 +12346,7 @@ in_bitwise_or_rule(Parser *p)
Token * _keyword;
expr_ty a;
if (
- (_keyword = _PyPegen_expect_token(p, 650)) // token='in'
+ (_keyword = _PyPegen_expect_token(p, 651)) // token='in'
&&
(a = bitwise_or_rule(p)) // bitwise_or
)
@@ -14027,7 +14096,7 @@ slices_rule(Parser *p)
UNUSED(_opt_var); // Silence compiler warnings
asdl_expr_seq* a;
if (
- (a = (asdl_expr_seq*)_gather_88_rule(p)) // ','.(slice | starred_expression)+
+ (a = (asdl_expr_seq*)_gather_89_rule(p)) // ','.(slice | starred_expression)+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
)
@@ -14100,7 +14169,7 @@ slice_rule(Parser *p)
&&
(b = expression_rule(p), !p->error_indicator) // expression?
&&
- (c = _tmp_90_rule(p), !p->error_indicator) // [':' expression?]
+ (c = _tmp_91_rule(p), !p->error_indicator) // [':' expression?]
)
{
D(fprintf(stderr, "%*c+ slice[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression? ':' expression? [':' expression?]"));
@@ -14160,7 +14229,7 @@ slice_rule(Parser *p)
// | 'True'
// | 'False'
// | 'None'
-// | &STRING strings
+// | &(STRING | FSTRING_START) strings
// | NUMBER
// | &'(' (tuple | group | genexp)
// | &'[' (list | listcomp)
@@ -14215,7 +14284,7 @@ atom_rule(Parser *p)
D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 600)) // token='True'
+ (_keyword = _PyPegen_expect_token(p, 601)) // token='True'
)
{
D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'"));
@@ -14248,7 +14317,7 @@ atom_rule(Parser *p)
D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 602)) // token='False'
+ (_keyword = _PyPegen_expect_token(p, 603)) // token='False'
)
{
D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'"));
@@ -14281,7 +14350,7 @@ atom_rule(Parser *p)
D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 601)) // token='None'
+ (_keyword = _PyPegen_expect_token(p, 602)) // token='None'
)
{
D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'"));
@@ -14306,26 +14375,26 @@ atom_rule(Parser *p)
D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'None'"));
}
- { // &STRING strings
+ { // &(STRING | FSTRING_START) strings
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&STRING strings"));
+ D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&(STRING | FSTRING_START) strings"));
expr_ty strings_var;
if (
- _PyPegen_lookahead(1, _PyPegen_string_token, p)
+ _PyPegen_lookahead(1, _tmp_92_rule, p)
&&
(strings_var = strings_rule(p)) // strings
)
{
- D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&STRING strings"));
+ D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&(STRING | FSTRING_START) strings"));
_res = strings_var;
goto done;
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&STRING strings"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&(STRING | FSTRING_START) strings"));
}
{ // NUMBER
if (p->error_indicator) {
@@ -14352,15 +14421,15 @@ atom_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'(' (tuple | group | genexp)"));
- void *_tmp_91_var;
+ void *_tmp_93_var;
if (
_PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 7) // token='('
&&
- (_tmp_91_var = _tmp_91_rule(p)) // tuple | group | genexp
+ (_tmp_93_var = _tmp_93_rule(p)) // tuple | group | genexp
)
{
D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'(' (tuple | group | genexp)"));
- _res = _tmp_91_var;
+ _res = _tmp_93_var;
goto done;
}
p->mark = _mark;
@@ -14373,15 +14442,15 @@ atom_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'[' (list | listcomp)"));
- void *_tmp_92_var;
+ void *_tmp_94_var;
if (
_PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 9) // token='['
&&
- (_tmp_92_var = _tmp_92_rule(p)) // list | listcomp
+ (_tmp_94_var = _tmp_94_rule(p)) // list | listcomp
)
{
D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'[' (list | listcomp)"));
- _res = _tmp_92_var;
+ _res = _tmp_94_var;
goto done;
}
p->mark = _mark;
@@ -14394,15 +14463,15 @@ atom_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'{' (dict | set | dictcomp | setcomp)"));
- void *_tmp_93_var;
+ void *_tmp_95_var;
if (
_PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 25) // token='{'
&&
- (_tmp_93_var = _tmp_93_rule(p)) // dict | set | dictcomp | setcomp
+ (_tmp_95_var = _tmp_95_rule(p)) // dict | set | dictcomp | setcomp
)
{
D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'{' (dict | set | dictcomp | setcomp)"));
- _res = _tmp_93_var;
+ _res = _tmp_95_var;
goto done;
}
p->mark = _mark;
@@ -14474,7 +14543,7 @@ group_rule(Parser *p)
if (
(_literal = _PyPegen_expect_token(p, 7)) // token='('
&&
- (a = _tmp_94_rule(p)) // yield_expr | named_expression
+ (a = _tmp_96_rule(p)) // yield_expr | named_expression
&&
(_literal_1 = _PyPegen_expect_token(p, 8)) // token=')'
)
@@ -14551,7 +14620,7 @@ lambdef_rule(Parser *p)
void *a;
expr_ty b;
if (
- (_keyword = _PyPegen_expect_token(p, 586)) // token='lambda'
+ (_keyword = _PyPegen_expect_token(p, 600)) // token='lambda'
&&
(a = lambda_params_rule(p), !p->error_indicator) // lambda_params?
&&
@@ -14678,9 +14747,9 @@ lambda_parameters_rule(Parser *p)
if (
(a = lambda_slash_no_default_rule(p)) // lambda_slash_no_default
&&
- (b = (asdl_arg_seq*)_loop0_95_rule(p)) // lambda_param_no_default*
+ (b = (asdl_arg_seq*)_loop0_97_rule(p)) // lambda_param_no_default*
&&
- (c = _loop0_96_rule(p)) // lambda_param_with_default*
+ (c = _loop0_98_rule(p)) // lambda_param_with_default*
&&
(d = lambda_star_etc_rule(p), !p->error_indicator) // lambda_star_etc?
)
@@ -14710,7 +14779,7 @@ lambda_parameters_rule(Parser *p)
if (
(a = lambda_slash_with_default_rule(p)) // lambda_slash_with_default
&&
- (b = _loop0_97_rule(p)) // lambda_param_with_default*
+ (b = _loop0_99_rule(p)) // lambda_param_with_default*
&&
(c = lambda_star_etc_rule(p), !p->error_indicator) // lambda_star_etc?
)
@@ -14738,9 +14807,9 @@ lambda_parameters_rule(Parser *p)
asdl_seq * b;
void *c;
if (
- (a = (asdl_arg_seq*)_loop1_98_rule(p)) // lambda_param_no_default+
+ (a = (asdl_arg_seq*)_loop1_100_rule(p)) // lambda_param_no_default+
&&
- (b = _loop0_99_rule(p)) // lambda_param_with_default*
+ (b = _loop0_101_rule(p)) // lambda_param_with_default*
&&
(c = lambda_star_etc_rule(p), !p->error_indicator) // lambda_star_etc?
)
@@ -14767,7 +14836,7 @@ lambda_parameters_rule(Parser *p)
asdl_seq * a;
void *b;
if (
- (a = _loop1_100_rule(p)) // lambda_param_with_default+
+ (a = _loop1_102_rule(p)) // lambda_param_with_default+
&&
(b = lambda_star_etc_rule(p), !p->error_indicator) // lambda_star_etc?
)
@@ -14841,7 +14910,7 @@ lambda_slash_no_default_rule(Parser *p)
Token * _literal_1;
asdl_arg_seq* a;
if (
- (a = (asdl_arg_seq*)_loop1_101_rule(p)) // lambda_param_no_default+
+ (a = (asdl_arg_seq*)_loop1_103_rule(p)) // lambda_param_no_default+
&&
(_literal = _PyPegen_expect_token(p, 17)) // token='/'
&&
@@ -14870,7 +14939,7 @@ lambda_slash_no_default_rule(Parser *p)
Token * _literal;
asdl_arg_seq* a;
if (
- (a = (asdl_arg_seq*)_loop1_102_rule(p)) // lambda_param_no_default+
+ (a = (asdl_arg_seq*)_loop1_104_rule(p)) // lambda_param_no_default+
&&
(_literal = _PyPegen_expect_token(p, 17)) // token='/'
&&
@@ -14923,9 +14992,9 @@ lambda_slash_with_default_rule(Parser *p)
asdl_seq * a;
asdl_seq * b;
if (
- (a = _loop0_103_rule(p)) // lambda_param_no_default*
+ (a = _loop0_105_rule(p)) // lambda_param_no_default*
&&
- (b = _loop1_104_rule(p)) // lambda_param_with_default+
+ (b = _loop1_106_rule(p)) // lambda_param_with_default+
&&
(_literal = _PyPegen_expect_token(p, 17)) // token='/'
&&
@@ -14955,9 +15024,9 @@ lambda_slash_with_default_rule(Parser *p)
asdl_seq * a;
asdl_seq * b;
if (
- (a = _loop0_105_rule(p)) // lambda_param_no_default*
+ (a = _loop0_107_rule(p)) // lambda_param_no_default*
&&
- (b = _loop1_106_rule(p)) // lambda_param_with_default+
+ (b = _loop1_108_rule(p)) // lambda_param_with_default+
&&
(_literal = _PyPegen_expect_token(p, 17)) // token='/'
&&
@@ -15035,7 +15104,7 @@ lambda_star_etc_rule(Parser *p)
&&
(a = lambda_param_no_default_rule(p)) // lambda_param_no_default
&&
- (b = _loop0_107_rule(p)) // lambda_param_maybe_default*
+ (b = _loop0_109_rule(p)) // lambda_param_maybe_default*
&&
(c = lambda_kwds_rule(p), !p->error_indicator) // lambda_kwds?
)
@@ -15068,7 +15137,7 @@ lambda_star_etc_rule(Parser *p)
&&
(_literal_1 = _PyPegen_expect_token(p, 12)) // token=','
&&
- (b = _loop1_108_rule(p)) // lambda_param_maybe_default+
+ (b = _loop1_110_rule(p)) // lambda_param_maybe_default+
&&
(c = lambda_kwds_rule(p), !p->error_indicator) // lambda_kwds?
)
@@ -15475,7 +15544,387 @@ lambda_param_rule(Parser *p)
return _res;
}
-// strings: STRING+
+// fstring_middle: fstring_replacement_field | FSTRING_MIDDLE
+static expr_ty
+fstring_middle_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ expr_ty _res = NULL;
+ int _mark = p->mark;
+ { // fstring_replacement_field
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> fstring_middle[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "fstring_replacement_field"));
+ expr_ty fstring_replacement_field_var;
+ if (
+ (fstring_replacement_field_var = fstring_replacement_field_rule(p)) // fstring_replacement_field
+ )
+ {
+ D(fprintf(stderr, "%*c+ fstring_middle[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "fstring_replacement_field"));
+ _res = fstring_replacement_field_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s fstring_middle[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "fstring_replacement_field"));
+ }
+ { // FSTRING_MIDDLE
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> fstring_middle[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "FSTRING_MIDDLE"));
+ Token * t;
+ if (
+ (t = _PyPegen_expect_token(p, FSTRING_MIDDLE)) // token='FSTRING_MIDDLE'
+ )
+ {
+ D(fprintf(stderr, "%*c+ fstring_middle[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "FSTRING_MIDDLE"));
+ _res = _PyPegen_constant_from_token ( p , t );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s fstring_middle[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "FSTRING_MIDDLE"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// fstring_replacement_field:
+// | '{' (yield_expr | star_expressions) "="? fstring_conversion? fstring_full_format_spec? '}'
+// | invalid_replacement_field
+static expr_ty
+fstring_replacement_field_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ expr_ty _res = NULL;
+ int _mark = p->mark;
+ if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ int _start_lineno = p->tokens[_mark]->lineno;
+ UNUSED(_start_lineno); // Only used by EXTRA macro
+ int _start_col_offset = p->tokens[_mark]->col_offset;
+ UNUSED(_start_col_offset); // Only used by EXTRA macro
+ { // '{' (yield_expr | star_expressions) "="? fstring_conversion? fstring_full_format_spec? '}'
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> fstring_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) \"=\"? fstring_conversion? fstring_full_format_spec? '}'"));
+ Token * _literal;
+ Token * _literal_1;
+ void *a;
+ void *conversion;
+ void *debug_expr;
+ void *format;
+ if (
+ (_literal = _PyPegen_expect_token(p, 25)) // token='{'
+ &&
+ (a = _tmp_111_rule(p)) // yield_expr | star_expressions
+ &&
+ (debug_expr = _PyPegen_expect_token(p, 22), !p->error_indicator) // "="?
+ &&
+ (conversion = fstring_conversion_rule(p), !p->error_indicator) // fstring_conversion?
+ &&
+ (format = fstring_full_format_spec_rule(p), !p->error_indicator) // fstring_full_format_spec?
+ &&
+ (_literal_1 = _PyPegen_expect_token(p, 26)) // token='}'
+ )
+ {
+ D(fprintf(stderr, "%*c+ fstring_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) \"=\"? fstring_conversion? fstring_full_format_spec? '}'"));
+ Token *_token = _PyPegen_get_last_nonnwhitespace_token(p);
+ if (_token == NULL) {
+ p->level--;
+ return NULL;
+ }
+ int _end_lineno = _token->end_lineno;
+ UNUSED(_end_lineno); // Only used by EXTRA macro
+ int _end_col_offset = _token->end_col_offset;
+ UNUSED(_end_col_offset); // Only used by EXTRA macro
+ _res = _PyPegen_formatted_value ( p , a , debug_expr , conversion , format , EXTRA );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s fstring_replacement_field[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' (yield_expr | star_expressions) \"=\"? fstring_conversion? fstring_full_format_spec? '}'"));
+ }
+ if (p->call_invalid_rules) { // invalid_replacement_field
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> fstring_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_replacement_field"));
+ void *invalid_replacement_field_var;
+ if (
+ (invalid_replacement_field_var = invalid_replacement_field_rule(p)) // invalid_replacement_field
+ )
+ {
+ D(fprintf(stderr, "%*c+ fstring_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_replacement_field"));
+ _res = invalid_replacement_field_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s fstring_replacement_field[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_replacement_field"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// fstring_conversion: "!" NAME
+static expr_ty
+fstring_conversion_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ expr_ty _res = NULL;
+ int _mark = p->mark;
+ { // "!" NAME
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> fstring_conversion[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "\"!\" NAME"));
+ expr_ty conv;
+ Token * conv_token;
+ if (
+ (conv_token = _PyPegen_expect_token(p, 54)) // token='!'
+ &&
+ (conv = _PyPegen_name_token(p)) // NAME
+ )
+ {
+ D(fprintf(stderr, "%*c+ fstring_conversion[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "\"!\" NAME"));
+ _res = _PyPegen_check_fstring_conversion ( p , conv_token , conv );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s fstring_conversion[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "\"!\" NAME"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// fstring_full_format_spec: ':' fstring_format_spec*
+static expr_ty
+fstring_full_format_spec_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ expr_ty _res = NULL;
+ int _mark = p->mark;
+ if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ int _start_lineno = p->tokens[_mark]->lineno;
+ UNUSED(_start_lineno); // Only used by EXTRA macro
+ int _start_col_offset = p->tokens[_mark]->col_offset;
+ UNUSED(_start_col_offset); // Only used by EXTRA macro
+ { // ':' fstring_format_spec*
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> fstring_full_format_spec[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':' fstring_format_spec*"));
+ Token * _literal;
+ asdl_seq * spec;
+ if (
+ (_literal = _PyPegen_expect_token(p, 11)) // token=':'
+ &&
+ (spec = _loop0_112_rule(p)) // fstring_format_spec*
+ )
+ {
+ D(fprintf(stderr, "%*c+ fstring_full_format_spec[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':' fstring_format_spec*"));
+ Token *_token = _PyPegen_get_last_nonnwhitespace_token(p);
+ if (_token == NULL) {
+ p->level--;
+ return NULL;
+ }
+ int _end_lineno = _token->end_lineno;
+ UNUSED(_end_lineno); // Only used by EXTRA macro
+ int _end_col_offset = _token->end_col_offset;
+ UNUSED(_end_col_offset); // Only used by EXTRA macro
+ _res = spec ? _PyAST_JoinedStr ( ( asdl_expr_seq* ) spec , EXTRA ) : NULL;
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s fstring_full_format_spec[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':' fstring_format_spec*"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// fstring_format_spec: FSTRING_MIDDLE | fstring_replacement_field
+static expr_ty
+fstring_format_spec_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ expr_ty _res = NULL;
+ int _mark = p->mark;
+ { // FSTRING_MIDDLE
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> fstring_format_spec[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "FSTRING_MIDDLE"));
+ Token * t;
+ if (
+ (t = _PyPegen_expect_token(p, FSTRING_MIDDLE)) // token='FSTRING_MIDDLE'
+ )
+ {
+ D(fprintf(stderr, "%*c+ fstring_format_spec[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "FSTRING_MIDDLE"));
+ _res = _PyPegen_constant_from_token ( p , t );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s fstring_format_spec[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "FSTRING_MIDDLE"));
+ }
+ { // fstring_replacement_field
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> fstring_format_spec[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "fstring_replacement_field"));
+ expr_ty fstring_replacement_field_var;
+ if (
+ (fstring_replacement_field_var = fstring_replacement_field_rule(p)) // fstring_replacement_field
+ )
+ {
+ D(fprintf(stderr, "%*c+ fstring_format_spec[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "fstring_replacement_field"));
+ _res = fstring_replacement_field_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s fstring_format_spec[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "fstring_replacement_field"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// string: STRING
+static expr_ty
+string_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ expr_ty _res = NULL;
+ int _mark = p->mark;
+ { // STRING
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> string[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "STRING"));
+ Token* s;
+ if (
+ (s = (Token*)_PyPegen_string_token(p)) // STRING
+ )
+ {
+ D(fprintf(stderr, "%*c+ string[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "STRING"));
+ _res = _PyPegen_constant_from_string ( p , s );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s string[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "STRING"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// strings: ((fstring | string))+
static expr_ty
strings_rule(Parser *p)
{
@@ -15493,19 +15942,37 @@ strings_rule(Parser *p)
return _res;
}
int _mark = p->mark;
- { // STRING+
+ if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ int _start_lineno = p->tokens[_mark]->lineno;
+ UNUSED(_start_lineno); // Only used by EXTRA macro
+ int _start_col_offset = p->tokens[_mark]->col_offset;
+ UNUSED(_start_col_offset); // Only used by EXTRA macro
+ { // ((fstring | string))+
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> strings[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "STRING+"));
- asdl_seq * a;
+ D(fprintf(stderr, "%*c> strings[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "((fstring | string))+"));
+ asdl_expr_seq* a;
if (
- (a = _loop1_109_rule(p)) // STRING+
+ (a = (asdl_expr_seq*)_loop1_113_rule(p)) // ((fstring | string))+
)
{
- D(fprintf(stderr, "%*c+ strings[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "STRING+"));
- _res = _PyPegen_concatenate_strings ( p , a );
+ D(fprintf(stderr, "%*c+ strings[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "((fstring | string))+"));
+ Token *_token = _PyPegen_get_last_nonnwhitespace_token(p);
+ if (_token == NULL) {
+ p->level--;
+ return NULL;
+ }
+ int _end_lineno = _token->end_lineno;
+ UNUSED(_end_lineno); // Only used by EXTRA macro
+ int _end_col_offset = _token->end_col_offset;
+ UNUSED(_end_col_offset); // Only used by EXTRA macro
+ _res = _PyPegen_concatenate_strings ( p , a , EXTRA );
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
p->level--;
@@ -15515,7 +15982,7 @@ strings_rule(Parser *p)
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s strings[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "STRING+"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "((fstring | string))+"));
}
_res = NULL;
done:
@@ -15627,7 +16094,7 @@ tuple_rule(Parser *p)
if (
(_literal = _PyPegen_expect_token(p, 7)) // token='('
&&
- (a = _tmp_110_rule(p), !p->error_indicator) // [star_named_expression ',' star_named_expressions?]
+ (a = _tmp_114_rule(p), !p->error_indicator) // [star_named_expression ',' star_named_expressions?]
&&
(_literal_1 = _PyPegen_expect_token(p, 8)) // token=')'
)
@@ -15845,7 +16312,7 @@ double_starred_kvpairs_rule(Parser *p)
UNUSED(_opt_var); // Silence compiler warnings
asdl_seq * a;
if (
- (a = _gather_111_rule(p)) // ','.double_starred_kvpair+
+ (a = _gather_115_rule(p)) // ','.double_starred_kvpair+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
)
@@ -16007,7 +16474,7 @@ for_if_clauses_rule(Parser *p)
D(fprintf(stderr, "%*c> for_if_clauses[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "for_if_clause+"));
asdl_comprehension_seq* a;
if (
- (a = (asdl_comprehension_seq*)_loop1_113_rule(p)) // for_if_clause+
+ (a = (asdl_comprehension_seq*)_loop1_117_rule(p)) // for_if_clause+
)
{
D(fprintf(stderr, "%*c+ for_if_clauses[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "for_if_clause+"));
@@ -16062,17 +16529,17 @@ for_if_clause_rule(Parser *p)
if (
(async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC'
&&
- (_keyword = _PyPegen_expect_token(p, 649)) // token='for'
+ (_keyword = _PyPegen_expect_token(p, 650)) // token='for'
&&
(a = star_targets_rule(p)) // star_targets
&&
- (_keyword_1 = _PyPegen_expect_token(p, 650)) // token='in'
+ (_keyword_1 = _PyPegen_expect_token(p, 651)) // token='in'
&&
(_cut_var = 1)
&&
(b = disjunction_rule(p)) // disjunction
&&
- (c = (asdl_expr_seq*)_loop0_114_rule(p)) // (('if' disjunction))*
+ (c = (asdl_expr_seq*)_loop0_118_rule(p)) // (('if' disjunction))*
)
{
D(fprintf(stderr, "%*c+ for_if_clause[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC 'for' star_targets 'in' ~ disjunction (('if' disjunction))*"));
@@ -16105,17 +16572,17 @@ for_if_clause_rule(Parser *p)
expr_ty b;
asdl_expr_seq* c;
if (
- (_keyword = _PyPegen_expect_token(p, 649)) // token='for'
+ (_keyword = _PyPegen_expect_token(p, 650)) // token='for'
&&
(a = star_targets_rule(p)) // star_targets
&&
- (_keyword_1 = _PyPegen_expect_token(p, 650)) // token='in'
+ (_keyword_1 = _PyPegen_expect_token(p, 651)) // token='in'
&&
(_cut_var = 1)
&&
(b = disjunction_rule(p)) // disjunction
&&
- (c = (asdl_expr_seq*)_loop0_115_rule(p)) // (('if' disjunction))*
+ (c = (asdl_expr_seq*)_loop0_119_rule(p)) // (('if' disjunction))*
)
{
D(fprintf(stderr, "%*c+ for_if_clause[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'for' star_targets 'in' ~ disjunction (('if' disjunction))*"));
@@ -16378,7 +16845,7 @@ genexp_rule(Parser *p)
if (
(_literal = _PyPegen_expect_token(p, 7)) // token='('
&&
- (a = _tmp_116_rule(p)) // assignment_expression | expression !':='
+ (a = _tmp_120_rule(p)) // assignment_expression | expression !':='
&&
(b = for_if_clauses_rule(p)) // for_if_clauses
&&
@@ -16630,9 +17097,9 @@ args_rule(Parser *p)
asdl_expr_seq* a;
void *b;
if (
- (a = (asdl_expr_seq*)_gather_117_rule(p)) // ','.(starred_expression | (assignment_expression | expression !':=') !'=')+
+ (a = (asdl_expr_seq*)_gather_121_rule(p)) // ','.(starred_expression | (assignment_expression | expression !':=') !'=')+
&&
- (b = _tmp_119_rule(p), !p->error_indicator) // [',' kwargs]
+ (b = _tmp_123_rule(p), !p->error_indicator) // [',' kwargs]
)
{
D(fprintf(stderr, "%*c+ args[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.(starred_expression | (assignment_expression | expression !':=') !'=')+ [',' kwargs]"));
@@ -16723,11 +17190,11 @@ kwargs_rule(Parser *p)
asdl_seq * a;
asdl_seq * b;
if (
- (a = _gather_120_rule(p)) // ','.kwarg_or_starred+
+ (a = _gather_124_rule(p)) // ','.kwarg_or_starred+
&&
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
- (b = _gather_122_rule(p)) // ','.kwarg_or_double_starred+
+ (b = _gather_126_rule(p)) // ','.kwarg_or_double_starred+
)
{
D(fprintf(stderr, "%*c+ kwargs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_starred+ ',' ','.kwarg_or_double_starred+"));
@@ -16749,13 +17216,13 @@ kwargs_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> kwargs[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_starred+"));
- asdl_seq * _gather_124_var;
+ asdl_seq * _gather_128_var;
if (
- (_gather_124_var = _gather_124_rule(p)) // ','.kwarg_or_starred+
+ (_gather_128_var = _gather_128_rule(p)) // ','.kwarg_or_starred+
)
{
D(fprintf(stderr, "%*c+ kwargs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_starred+"));
- _res = _gather_124_var;
+ _res = _gather_128_var;
goto done;
}
p->mark = _mark;
@@ -16768,13 +17235,13 @@ kwargs_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> kwargs[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_double_starred+"));
- asdl_seq * _gather_126_var;
+ asdl_seq * _gather_130_var;
if (
- (_gather_126_var = _gather_126_rule(p)) // ','.kwarg_or_double_starred+
+ (_gather_130_var = _gather_130_rule(p)) // ','.kwarg_or_double_starred+
)
{
D(fprintf(stderr, "%*c+ kwargs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_double_starred+"));
- _res = _gather_126_var;
+ _res = _gather_130_var;
goto done;
}
p->mark = _mark;
@@ -17167,7 +17634,7 @@ star_targets_rule(Parser *p)
if (
(a = star_target_rule(p)) // star_target
&&
- (b = _loop0_128_rule(p)) // ((',' star_target))*
+ (b = _loop0_132_rule(p)) // ((',' star_target))*
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
)
@@ -17224,7 +17691,7 @@ star_targets_list_seq_rule(Parser *p)
UNUSED(_opt_var); // Silence compiler warnings
asdl_expr_seq* a;
if (
- (a = (asdl_expr_seq*)_gather_129_rule(p)) // ','.star_target+
+ (a = (asdl_expr_seq*)_gather_133_rule(p)) // ','.star_target+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
)
@@ -17275,7 +17742,7 @@ star_targets_tuple_seq_rule(Parser *p)
if (
(a = star_target_rule(p)) // star_target
&&
- (b = _loop1_131_rule(p)) // ((',' star_target))+
+ (b = _loop1_135_rule(p)) // ((',' star_target))+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
)
@@ -17364,7 +17831,7 @@ star_target_rule(Parser *p)
if (
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
- (a = _tmp_132_rule(p)) // !'*' star_target
+ (a = _tmp_136_rule(p)) // !'*' star_target
)
{
D(fprintf(stderr, "%*c+ star_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (!'*' star_target)"));
@@ -18295,7 +18762,7 @@ del_targets_rule(Parser *p)
UNUSED(_opt_var); // Silence compiler warnings
asdl_expr_seq* a;
if (
- (a = (asdl_expr_seq*)_gather_133_rule(p)) // ','.del_target+
+ (a = (asdl_expr_seq*)_gather_137_rule(p)) // ','.del_target+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
)
@@ -18656,7 +19123,7 @@ type_expressions_rule(Parser *p)
expr_ty b;
expr_ty c;
if (
- (a = _gather_135_rule(p)) // ','.expression+
+ (a = _gather_139_rule(p)) // ','.expression+
&&
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
@@ -18695,7 +19162,7 @@ type_expressions_rule(Parser *p)
asdl_seq * a;
expr_ty b;
if (
- (a = _gather_137_rule(p)) // ','.expression+
+ (a = _gather_141_rule(p)) // ','.expression+
&&
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
@@ -18728,7 +19195,7 @@ type_expressions_rule(Parser *p)
asdl_seq * a;
expr_ty b;
if (
- (a = _gather_139_rule(p)) // ','.expression+
+ (a = _gather_143_rule(p)) // ','.expression+
&&
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
@@ -18848,7 +19315,7 @@ type_expressions_rule(Parser *p)
D(fprintf(stderr, "%*c> type_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.expression+"));
asdl_expr_seq* a;
if (
- (a = (asdl_expr_seq*)_gather_141_rule(p)) // ','.expression+
+ (a = (asdl_expr_seq*)_gather_145_rule(p)) // ','.expression+
)
{
D(fprintf(stderr, "%*c+ type_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.expression+"));
@@ -18900,7 +19367,7 @@ func_type_comment_rule(Parser *p)
&&
(t = _PyPegen_expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT'
&&
- _PyPegen_lookahead(1, _tmp_143_rule, p)
+ _PyPegen_lookahead(1, _tmp_147_rule, p)
)
{
D(fprintf(stderr, "%*c+ func_type_comment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE TYPE_COMMENT &(NEWLINE INDENT)"));
@@ -19029,7 +19496,7 @@ invalid_arguments_rule(Parser *p)
&&
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
- (_opt_var = _tmp_144_rule(p), !p->error_indicator) // [args | expression for_if_clauses]
+ (_opt_var = _tmp_148_rule(p), !p->error_indicator) // [args | expression for_if_clauses]
)
{
D(fprintf(stderr, "%*c+ invalid_arguments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses ',' [args | expression for_if_clauses]"));
@@ -19089,13 +19556,13 @@ invalid_arguments_rule(Parser *p)
expr_ty a;
Token * b;
if (
- (_opt_var = _tmp_145_rule(p), !p->error_indicator) // [(args ',')]
+ (_opt_var = _tmp_149_rule(p), !p->error_indicator) // [(args ',')]
&&
(a = _PyPegen_name_token(p)) // NAME
&&
(b = _PyPegen_expect_token(p, 22)) // token='='
&&
- _PyPegen_lookahead(1, _tmp_146_rule, p)
+ _PyPegen_lookahead(1, _tmp_150_rule, p)
)
{
D(fprintf(stderr, "%*c+ invalid_arguments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "[(args ',')] NAME '=' &(',' | ')')"));
@@ -19234,7 +19701,7 @@ invalid_kwarg_rule(Parser *p)
Token* a;
Token * b;
if (
- (a = (Token*)_tmp_147_rule(p)) // 'True' | 'False' | 'None'
+ (a = (Token*)_tmp_151_rule(p)) // 'True' | 'False' | 'None'
&&
(b = _PyPegen_expect_token(p, 22)) // token='='
)
@@ -19294,7 +19761,7 @@ invalid_kwarg_rule(Parser *p)
expr_ty a;
Token * b;
if (
- _PyPegen_lookahead(0, _tmp_148_rule, p)
+ _PyPegen_lookahead(0, _tmp_152_rule, p)
&&
(a = expression_rule(p)) // expression
&&
@@ -19398,11 +19865,11 @@ expression_without_invalid_rule(Parser *p)
if (
(a = disjunction_rule(p)) // disjunction
&&
- (_keyword = _PyPegen_expect_token(p, 641)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 642)) // token='if'
&&
(b = disjunction_rule(p)) // disjunction
&&
- (_keyword_1 = _PyPegen_expect_token(p, 644)) // token='else'
+ (_keyword_1 = _PyPegen_expect_token(p, 645)) // token='else'
&&
(c = expression_rule(p)) // expression
)
@@ -19530,6 +19997,7 @@ invalid_legacy_expression_rule(Parser *p)
// invalid_expression:
// | !(NAME STRING | SOFT_KEYWORD) disjunction expression_without_invalid
// | disjunction 'if' disjunction !('else' | ':')
+// | 'lambda' lambda_params? ':' &(FSTRING_MIDDLE | fstring_replacement_field)
static void *
invalid_expression_rule(Parser *p)
{
@@ -19552,7 +20020,7 @@ invalid_expression_rule(Parser *p)
expr_ty a;
expr_ty b;
if (
- _PyPegen_lookahead(0, _tmp_149_rule, p)
+ _PyPegen_lookahead(0, _tmp_153_rule, p)
&&
(a = disjunction_rule(p)) // disjunction
&&
@@ -19584,11 +20052,11 @@ invalid_expression_rule(Parser *p)
if (
(a = disjunction_rule(p)) // disjunction
&&
- (_keyword = _PyPegen_expect_token(p, 641)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 642)) // token='if'
&&
(b = disjunction_rule(p)) // disjunction
&&
- _PyPegen_lookahead(0, _tmp_150_rule, p)
+ _PyPegen_lookahead(0, _tmp_154_rule, p)
)
{
D(fprintf(stderr, "%*c+ invalid_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "disjunction 'if' disjunction !('else' | ':')"));
@@ -19604,6 +20072,39 @@ invalid_expression_rule(Parser *p)
D(fprintf(stderr, "%*c%s invalid_expression[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "disjunction 'if' disjunction !('else' | ':')"));
}
+ { // 'lambda' lambda_params? ':' &(FSTRING_MIDDLE | fstring_replacement_field)
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> invalid_expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'lambda' lambda_params? ':' &(FSTRING_MIDDLE | fstring_replacement_field)"));
+ void *_opt_var;
+ UNUSED(_opt_var); // Silence compiler warnings
+ Token * a;
+ Token * b;
+ if (
+ (a = _PyPegen_expect_token(p, 600)) // token='lambda'
+ &&
+ (_opt_var = lambda_params_rule(p), !p->error_indicator) // lambda_params?
+ &&
+ (b = _PyPegen_expect_token(p, 11)) // token=':'
+ &&
+ _PyPegen_lookahead(1, _tmp_155_rule, p)
+ )
+ {
+ D(fprintf(stderr, "%*c+ invalid_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'lambda' lambda_params? ':' &(FSTRING_MIDDLE | fstring_replacement_field)"));
+ _res = RAISE_SYNTAX_ERROR_KNOWN_RANGE ( a , b , "f-string: lambda expressions are not allowed without parentheses" );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s invalid_expression[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'lambda' lambda_params? ':' &(FSTRING_MIDDLE | fstring_replacement_field)"));
+ }
_res = NULL;
done:
p->level--;
@@ -19677,7 +20178,7 @@ invalid_named_expression_rule(Parser *p)
&&
(b = bitwise_or_rule(p)) // bitwise_or
&&
- _PyPegen_lookahead(0, _tmp_151_rule, p)
+ _PyPegen_lookahead(0, _tmp_156_rule, p)
)
{
D(fprintf(stderr, "%*c+ invalid_named_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME '=' bitwise_or !('=' | ':=')"));
@@ -19703,7 +20204,7 @@ invalid_named_expression_rule(Parser *p)
Token * b;
expr_ty bitwise_or_var;
if (
- _PyPegen_lookahead(0, _tmp_152_rule, p)
+ _PyPegen_lookahead(0, _tmp_157_rule, p)
&&
(a = bitwise_or_rule(p)) // bitwise_or
&&
@@ -19711,7 +20212,7 @@ invalid_named_expression_rule(Parser *p)
&&
(bitwise_or_var = bitwise_or_rule(p)) // bitwise_or
&&
- _PyPegen_lookahead(0, _tmp_153_rule, p)
+ _PyPegen_lookahead(0, _tmp_158_rule, p)
)
{
D(fprintf(stderr, "%*c+ invalid_named_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "!(list | tuple | genexp | 'True' | 'None' | 'False') bitwise_or '=' bitwise_or !('=' | ':=')"));
@@ -19792,7 +20293,7 @@ invalid_assignment_rule(Parser *p)
D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions* ':' expression"));
Token * _literal;
Token * _literal_1;
- asdl_seq * _loop0_154_var;
+ asdl_seq * _loop0_159_var;
expr_ty a;
expr_ty expression_var;
if (
@@ -19800,7 +20301,7 @@ invalid_assignment_rule(Parser *p)
&&
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
- (_loop0_154_var = _loop0_154_rule(p)) // star_named_expressions*
+ (_loop0_159_var = _loop0_159_rule(p)) // star_named_expressions*
&&
(_literal_1 = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -19857,10 +20358,10 @@ invalid_assignment_rule(Parser *p)
}
D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "((star_targets '='))* star_expressions '='"));
Token * _literal;
- asdl_seq * _loop0_155_var;
+ asdl_seq * _loop0_160_var;
expr_ty a;
if (
- (_loop0_155_var = _loop0_155_rule(p)) // ((star_targets '='))*
+ (_loop0_160_var = _loop0_160_rule(p)) // ((star_targets '='))*
&&
(a = star_expressions_rule(p)) // star_expressions
&&
@@ -19887,10 +20388,10 @@ invalid_assignment_rule(Parser *p)
}
D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "((star_targets '='))* yield_expr '='"));
Token * _literal;
- asdl_seq * _loop0_156_var;
+ asdl_seq * _loop0_161_var;
expr_ty a;
if (
- (_loop0_156_var = _loop0_156_rule(p)) // ((star_targets '='))*
+ (_loop0_161_var = _loop0_161_rule(p)) // ((star_targets '='))*
&&
(a = yield_expr_rule(p)) // yield_expr
&&
@@ -19916,7 +20417,7 @@ invalid_assignment_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions augassign (yield_expr | star_expressions)"));
- void *_tmp_157_var;
+ void *_tmp_162_var;
expr_ty a;
AugOperator* augassign_var;
if (
@@ -19924,7 +20425,7 @@ invalid_assignment_rule(Parser *p)
&&
(augassign_var = augassign_rule(p)) // augassign
&&
- (_tmp_157_var = _tmp_157_rule(p)) // yield_expr | star_expressions
+ (_tmp_162_var = _tmp_162_rule(p)) // yield_expr | star_expressions
)
{
D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions augassign (yield_expr | star_expressions)"));
@@ -20057,7 +20558,7 @@ invalid_del_stmt_rule(Parser *p)
Token * _keyword;
expr_ty a;
if (
- (_keyword = _PyPegen_expect_token(p, 603)) // token='del'
+ (_keyword = _PyPegen_expect_token(p, 604)) // token='del'
&&
(a = star_expressions_rule(p)) // star_expressions
)
@@ -20150,11 +20651,11 @@ invalid_comprehension_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_comprehension[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('[' | '(' | '{') starred_expression for_if_clauses"));
- void *_tmp_158_var;
+ void *_tmp_163_var;
expr_ty a;
asdl_comprehension_seq* for_if_clauses_var;
if (
- (_tmp_158_var = _tmp_158_rule(p)) // '[' | '(' | '{'
+ (_tmp_163_var = _tmp_163_rule(p)) // '[' | '(' | '{'
&&
(a = starred_expression_rule(p)) // starred_expression
&&
@@ -20181,12 +20682,12 @@ invalid_comprehension_rule(Parser *p)
}
D(fprintf(stderr, "%*c> invalid_comprehension[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('[' | '{') star_named_expression ',' star_named_expressions for_if_clauses"));
Token * _literal;
- void *_tmp_159_var;
+ void *_tmp_164_var;
expr_ty a;
asdl_expr_seq* b;
asdl_comprehension_seq* for_if_clauses_var;
if (
- (_tmp_159_var = _tmp_159_rule(p)) // '[' | '{'
+ (_tmp_164_var = _tmp_164_rule(p)) // '[' | '{'
&&
(a = star_named_expression_rule(p)) // star_named_expression
&&
@@ -20216,12 +20717,12 @@ invalid_comprehension_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_comprehension[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('[' | '{') star_named_expression ',' for_if_clauses"));
- void *_tmp_160_var;
+ void *_tmp_165_var;
expr_ty a;
Token * b;
asdl_comprehension_seq* for_if_clauses_var;
if (
- (_tmp_160_var = _tmp_160_rule(p)) // '[' | '{'
+ (_tmp_165_var = _tmp_165_rule(p)) // '[' | '{'
&&
(a = star_named_expression_rule(p)) // star_named_expression
&&
@@ -20358,13 +20859,13 @@ invalid_parameters_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(slash_no_default | slash_with_default) param_maybe_default* '/'"));
- asdl_seq * _loop0_162_var;
- void *_tmp_161_var;
+ asdl_seq * _loop0_167_var;
+ void *_tmp_166_var;
Token * a;
if (
- (_tmp_161_var = _tmp_161_rule(p)) // slash_no_default | slash_with_default
+ (_tmp_166_var = _tmp_166_rule(p)) // slash_no_default | slash_with_default
&&
- (_loop0_162_var = _loop0_162_rule(p)) // param_maybe_default*
+ (_loop0_167_var = _loop0_167_rule(p)) // param_maybe_default*
&&
(a = _PyPegen_expect_token(p, 17)) // token='/'
)
@@ -20388,7 +20889,7 @@ invalid_parameters_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_no_default? param_no_default* invalid_parameters_helper param_no_default"));
- asdl_seq * _loop0_163_var;
+ asdl_seq * _loop0_168_var;
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
arg_ty a;
@@ -20396,7 +20897,7 @@ invalid_parameters_rule(Parser *p)
if (
(_opt_var = slash_no_default_rule(p), !p->error_indicator) // slash_no_default?
&&
- (_loop0_163_var = _loop0_163_rule(p)) // param_no_default*
+ (_loop0_168_var = _loop0_168_rule(p)) // param_no_default*
&&
(invalid_parameters_helper_var = invalid_parameters_helper_rule(p)) // invalid_parameters_helper
&&
@@ -20422,18 +20923,18 @@ invalid_parameters_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default* '(' param_no_default+ ','? ')'"));
- asdl_seq * _loop0_164_var;
- asdl_seq * _loop1_165_var;
+ asdl_seq * _loop0_169_var;
+ asdl_seq * _loop1_170_var;
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
Token * a;
Token * b;
if (
- (_loop0_164_var = _loop0_164_rule(p)) // param_no_default*
+ (_loop0_169_var = _loop0_169_rule(p)) // param_no_default*
&&
(a = _PyPegen_expect_token(p, 7)) // token='('
&&
- (_loop1_165_var = _loop1_165_rule(p)) // param_no_default+
+ (_loop1_170_var = _loop1_170_rule(p)) // param_no_default+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
&&
@@ -20460,22 +20961,22 @@ invalid_parameters_rule(Parser *p)
}
D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "[(slash_no_default | slash_with_default)] param_maybe_default* '*' (',' | param_no_default) param_maybe_default* '/'"));
Token * _literal;
- asdl_seq * _loop0_167_var;
- asdl_seq * _loop0_169_var;
+ asdl_seq * _loop0_172_var;
+ asdl_seq * _loop0_174_var;
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
- void *_tmp_168_var;
+ void *_tmp_173_var;
Token * a;
if (
- (_opt_var = _tmp_166_rule(p), !p->error_indicator) // [(slash_no_default | slash_with_default)]
+ (_opt_var = _tmp_171_rule(p), !p->error_indicator) // [(slash_no_default | slash_with_default)]
&&
- (_loop0_167_var = _loop0_167_rule(p)) // param_maybe_default*
+ (_loop0_172_var = _loop0_172_rule(p)) // param_maybe_default*
&&
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
- (_tmp_168_var = _tmp_168_rule(p)) // ',' | param_no_default
+ (_tmp_173_var = _tmp_173_rule(p)) // ',' | param_no_default
&&
- (_loop0_169_var = _loop0_169_rule(p)) // param_maybe_default*
+ (_loop0_174_var = _loop0_174_rule(p)) // param_maybe_default*
&&
(a = _PyPegen_expect_token(p, 17)) // token='/'
)
@@ -20500,10 +21001,10 @@ invalid_parameters_rule(Parser *p)
}
D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default+ '/' '*'"));
Token * _literal;
- asdl_seq * _loop1_170_var;
+ asdl_seq * _loop1_175_var;
Token * a;
if (
- (_loop1_170_var = _loop1_170_rule(p)) // param_maybe_default+
+ (_loop1_175_var = _loop1_175_rule(p)) // param_maybe_default+
&&
(_literal = _PyPegen_expect_token(p, 17)) // token='/'
&&
@@ -20553,7 +21054,7 @@ invalid_default_rule(Parser *p)
if (
(a = _PyPegen_expect_token(p, 22)) // token='='
&&
- _PyPegen_lookahead(1, _tmp_171_rule, p)
+ _PyPegen_lookahead(1, _tmp_176_rule, p)
)
{
D(fprintf(stderr, "%*c+ invalid_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'=' &(')' | ',')"));
@@ -20599,12 +21100,12 @@ invalid_star_etc_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (')' | ',' (')' | '**'))"));
- void *_tmp_172_var;
+ void *_tmp_177_var;
Token * a;
if (
(a = _PyPegen_expect_token(p, 16)) // token='*'
&&
- (_tmp_172_var = _tmp_172_rule(p)) // ')' | ',' (')' | '**')
+ (_tmp_177_var = _tmp_177_rule(p)) // ')' | ',' (')' | '**')
)
{
D(fprintf(stderr, "%*c+ invalid_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (')' | ',' (')' | '**'))"));
@@ -20687,20 +21188,20 @@ invalid_star_etc_rule(Parser *p)
}
D(fprintf(stderr, "%*c> invalid_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (param_no_default | ',') param_maybe_default* '*' (param_no_default | ',')"));
Token * _literal;
- asdl_seq * _loop0_174_var;
- void *_tmp_173_var;
- void *_tmp_175_var;
+ asdl_seq * _loop0_179_var;
+ void *_tmp_178_var;
+ void *_tmp_180_var;
Token * a;
if (
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
- (_tmp_173_var = _tmp_173_rule(p)) // param_no_default | ','
+ (_tmp_178_var = _tmp_178_rule(p)) // param_no_default | ','
&&
- (_loop0_174_var = _loop0_174_rule(p)) // param_maybe_default*
+ (_loop0_179_var = _loop0_179_rule(p)) // param_maybe_default*
&&
(a = _PyPegen_expect_token(p, 16)) // token='*'
&&
- (_tmp_175_var = _tmp_175_rule(p)) // param_no_default | ','
+ (_tmp_180_var = _tmp_180_rule(p)) // param_no_default | ','
)
{
D(fprintf(stderr, "%*c+ invalid_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (param_no_default | ',') param_maybe_default* '*' (param_no_default | ',')"));
@@ -20816,7 +21317,7 @@ invalid_kwds_rule(Parser *p)
&&
(_literal_1 = _PyPegen_expect_token(p, 12)) // token=','
&&
- (a = (Token*)_tmp_176_rule(p)) // '*' | '**' | '/'
+ (a = (Token*)_tmp_181_rule(p)) // '*' | '**' | '/'
)
{
D(fprintf(stderr, "%*c+ invalid_kwds[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**' param ',' ('*' | '**' | '/')"));
@@ -20882,13 +21383,13 @@ invalid_parameters_helper_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_parameters_helper[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default+"));
- asdl_seq * _loop1_177_var;
+ asdl_seq * _loop1_182_var;
if (
- (_loop1_177_var = _loop1_177_rule(p)) // param_with_default+
+ (_loop1_182_var = _loop1_182_rule(p)) // param_with_default+
)
{
D(fprintf(stderr, "%*c+ invalid_parameters_helper[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_with_default+"));
- _res = _loop1_177_var;
+ _res = _loop1_182_var;
goto done;
}
p->mark = _mark;
@@ -20954,13 +21455,13 @@ invalid_lambda_parameters_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(lambda_slash_no_default | lambda_slash_with_default) lambda_param_maybe_default* '/'"));
- asdl_seq * _loop0_179_var;
- void *_tmp_178_var;
+ asdl_seq * _loop0_184_var;
+ void *_tmp_183_var;
Token * a;
if (
- (_tmp_178_var = _tmp_178_rule(p)) // lambda_slash_no_default | lambda_slash_with_default
+ (_tmp_183_var = _tmp_183_rule(p)) // lambda_slash_no_default | lambda_slash_with_default
&&
- (_loop0_179_var = _loop0_179_rule(p)) // lambda_param_maybe_default*
+ (_loop0_184_var = _loop0_184_rule(p)) // lambda_param_maybe_default*
&&
(a = _PyPegen_expect_token(p, 17)) // token='/'
)
@@ -20984,7 +21485,7 @@ invalid_lambda_parameters_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default? lambda_param_no_default* invalid_lambda_parameters_helper lambda_param_no_default"));
- asdl_seq * _loop0_180_var;
+ asdl_seq * _loop0_185_var;
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
arg_ty a;
@@ -20992,7 +21493,7 @@ invalid_lambda_parameters_rule(Parser *p)
if (
(_opt_var = lambda_slash_no_default_rule(p), !p->error_indicator) // lambda_slash_no_default?
&&
- (_loop0_180_var = _loop0_180_rule(p)) // lambda_param_no_default*
+ (_loop0_185_var = _loop0_185_rule(p)) // lambda_param_no_default*
&&
(invalid_lambda_parameters_helper_var = invalid_lambda_parameters_helper_rule(p)) // invalid_lambda_parameters_helper
&&
@@ -21018,18 +21519,18 @@ invalid_lambda_parameters_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default* '(' ','.lambda_param+ ','? ')'"));
- asdl_seq * _gather_182_var;
- asdl_seq * _loop0_181_var;
+ asdl_seq * _gather_187_var;
+ asdl_seq * _loop0_186_var;
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
Token * a;
Token * b;
if (
- (_loop0_181_var = _loop0_181_rule(p)) // lambda_param_no_default*
+ (_loop0_186_var = _loop0_186_rule(p)) // lambda_param_no_default*
&&
(a = _PyPegen_expect_token(p, 7)) // token='('
&&
- (_gather_182_var = _gather_182_rule(p)) // ','.lambda_param+
+ (_gather_187_var = _gather_187_rule(p)) // ','.lambda_param+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
&&
@@ -21056,22 +21557,22 @@ invalid_lambda_parameters_rule(Parser *p)
}
D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "[(lambda_slash_no_default | lambda_slash_with_default)] lambda_param_maybe_default* '*' (',' | lambda_param_no_default) lambda_param_maybe_default* '/'"));
Token * _literal;
- asdl_seq * _loop0_185_var;
- asdl_seq * _loop0_187_var;
+ asdl_seq * _loop0_190_var;
+ asdl_seq * _loop0_192_var;
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
- void *_tmp_186_var;
+ void *_tmp_191_var;
Token * a;
if (
- (_opt_var = _tmp_184_rule(p), !p->error_indicator) // [(lambda_slash_no_default | lambda_slash_with_default)]
+ (_opt_var = _tmp_189_rule(p), !p->error_indicator) // [(lambda_slash_no_default | lambda_slash_with_default)]
&&
- (_loop0_185_var = _loop0_185_rule(p)) // lambda_param_maybe_default*
+ (_loop0_190_var = _loop0_190_rule(p)) // lambda_param_maybe_default*
&&
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
- (_tmp_186_var = _tmp_186_rule(p)) // ',' | lambda_param_no_default
+ (_tmp_191_var = _tmp_191_rule(p)) // ',' | lambda_param_no_default
&&
- (_loop0_187_var = _loop0_187_rule(p)) // lambda_param_maybe_default*
+ (_loop0_192_var = _loop0_192_rule(p)) // lambda_param_maybe_default*
&&
(a = _PyPegen_expect_token(p, 17)) // token='/'
)
@@ -21096,10 +21597,10 @@ invalid_lambda_parameters_rule(Parser *p)
}
D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default+ '/' '*'"));
Token * _literal;
- asdl_seq * _loop1_188_var;
+ asdl_seq * _loop1_193_var;
Token * a;
if (
- (_loop1_188_var = _loop1_188_rule(p)) // lambda_param_maybe_default+
+ (_loop1_193_var = _loop1_193_rule(p)) // lambda_param_maybe_default+
&&
(_literal = _PyPegen_expect_token(p, 17)) // token='/'
&&
@@ -21171,13 +21672,13 @@ invalid_lambda_parameters_helper_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_lambda_parameters_helper[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default+"));
- asdl_seq * _loop1_189_var;
+ asdl_seq * _loop1_194_var;
if (
- (_loop1_189_var = _loop1_189_rule(p)) // lambda_param_with_default+
+ (_loop1_194_var = _loop1_194_rule(p)) // lambda_param_with_default+
)
{
D(fprintf(stderr, "%*c+ invalid_lambda_parameters_helper[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default+"));
- _res = _loop1_189_var;
+ _res = _loop1_194_var;
goto done;
}
p->mark = _mark;
@@ -21214,11 +21715,11 @@ invalid_lambda_star_etc_rule(Parser *p)
}
D(fprintf(stderr, "%*c> invalid_lambda_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (':' | ',' (':' | '**'))"));
Token * _literal;
- void *_tmp_190_var;
+ void *_tmp_195_var;
if (
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
- (_tmp_190_var = _tmp_190_rule(p)) // ':' | ',' (':' | '**')
+ (_tmp_195_var = _tmp_195_rule(p)) // ':' | ',' (':' | '**')
)
{
D(fprintf(stderr, "%*c+ invalid_lambda_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (':' | ',' (':' | '**'))"));
@@ -21271,20 +21772,20 @@ invalid_lambda_star_etc_rule(Parser *p)
}
D(fprintf(stderr, "%*c> invalid_lambda_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (lambda_param_no_default | ',') lambda_param_maybe_default* '*' (lambda_param_no_default | ',')"));
Token * _literal;
- asdl_seq * _loop0_192_var;
- void *_tmp_191_var;
- void *_tmp_193_var;
+ asdl_seq * _loop0_197_var;
+ void *_tmp_196_var;
+ void *_tmp_198_var;
Token * a;
if (
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
- (_tmp_191_var = _tmp_191_rule(p)) // lambda_param_no_default | ','
+ (_tmp_196_var = _tmp_196_rule(p)) // lambda_param_no_default | ','
&&
- (_loop0_192_var = _loop0_192_rule(p)) // lambda_param_maybe_default*
+ (_loop0_197_var = _loop0_197_rule(p)) // lambda_param_maybe_default*
&&
(a = _PyPegen_expect_token(p, 16)) // token='*'
&&
- (_tmp_193_var = _tmp_193_rule(p)) // lambda_param_no_default | ','
+ (_tmp_198_var = _tmp_198_rule(p)) // lambda_param_no_default | ','
)
{
D(fprintf(stderr, "%*c+ invalid_lambda_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (lambda_param_no_default | ',') lambda_param_maybe_default* '*' (lambda_param_no_default | ',')"));
@@ -21403,7 +21904,7 @@ invalid_lambda_kwds_rule(Parser *p)
&&
(_literal_1 = _PyPegen_expect_token(p, 12)) // token=','
&&
- (a = (Token*)_tmp_194_rule(p)) // '*' | '**' | '/'
+ (a = (Token*)_tmp_199_rule(p)) // '*' | '**' | '/'
)
{
D(fprintf(stderr, "%*c+ invalid_lambda_kwds[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**' lambda_param ',' ('*' | '**' | '/')"));
@@ -21507,11 +22008,11 @@ invalid_with_item_rule(Parser *p)
if (
(expression_var = expression_rule(p)) // expression
&&
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
(a = expression_rule(p)) // expression
&&
- _PyPegen_lookahead(1, _tmp_195_rule, p)
+ _PyPegen_lookahead(1, _tmp_200_rule, p)
)
{
D(fprintf(stderr, "%*c+ invalid_with_item[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression 'as' expression &(',' | ')' | ':')"));
@@ -21560,7 +22061,7 @@ invalid_for_target_rule(Parser *p)
if (
(_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC?
&&
- (_keyword = _PyPegen_expect_token(p, 649)) // token='for'
+ (_keyword = _PyPegen_expect_token(p, 650)) // token='for'
&&
(a = star_expressions_rule(p)) // star_expressions
)
@@ -21692,11 +22193,11 @@ invalid_import_rule(Parser *p)
expr_ty dotted_name_var;
expr_ty dotted_name_var_1;
if (
- (a = _PyPegen_expect_token(p, 606)) // token='import'
+ (a = _PyPegen_expect_token(p, 607)) // token='import'
&&
(dotted_name_var = dotted_name_rule(p)) // dotted_name
&&
- (_keyword = _PyPegen_expect_token(p, 607)) // token='from'
+ (_keyword = _PyPegen_expect_token(p, 608)) // token='from'
&&
(dotted_name_var_1 = dotted_name_rule(p)) // dotted_name
)
@@ -21792,7 +22293,7 @@ invalid_with_stmt_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC? 'with' ','.(expression ['as' star_target])+ NEWLINE"));
- asdl_seq * _gather_196_var;
+ asdl_seq * _gather_201_var;
Token * _keyword;
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
@@ -21800,9 +22301,9 @@ invalid_with_stmt_rule(Parser *p)
if (
(_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC?
&&
- (_keyword = _PyPegen_expect_token(p, 614)) // token='with'
+ (_keyword = _PyPegen_expect_token(p, 615)) // token='with'
&&
- (_gather_196_var = _gather_196_rule(p)) // ','.(expression ['as' star_target])+
+ (_gather_201_var = _gather_201_rule(p)) // ','.(expression ['as' star_target])+
&&
(newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE'
)
@@ -21826,7 +22327,7 @@ invalid_with_stmt_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE"));
- asdl_seq * _gather_198_var;
+ asdl_seq * _gather_203_var;
Token * _keyword;
Token * _literal;
Token * _literal_1;
@@ -21838,11 +22339,11 @@ invalid_with_stmt_rule(Parser *p)
if (
(_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC?
&&
- (_keyword = _PyPegen_expect_token(p, 614)) // token='with'
+ (_keyword = _PyPegen_expect_token(p, 615)) // token='with'
&&
(_literal = _PyPegen_expect_token(p, 7)) // token='('
&&
- (_gather_198_var = _gather_198_rule(p)) // ','.(expressions ['as' star_target])+
+ (_gather_203_var = _gather_203_rule(p)) // ','.(expressions ['as' star_target])+
&&
(_opt_var_1 = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
&&
@@ -21892,7 +22393,7 @@ invalid_with_stmt_indent_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_with_stmt_indent[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC? 'with' ','.(expression ['as' star_target])+ ':' NEWLINE !INDENT"));
- asdl_seq * _gather_200_var;
+ asdl_seq * _gather_205_var;
Token * _literal;
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
@@ -21901,9 +22402,9 @@ invalid_with_stmt_indent_rule(Parser *p)
if (
(_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC?
&&
- (a = _PyPegen_expect_token(p, 614)) // token='with'
+ (a = _PyPegen_expect_token(p, 615)) // token='with'
&&
- (_gather_200_var = _gather_200_rule(p)) // ','.(expression ['as' star_target])+
+ (_gather_205_var = _gather_205_rule(p)) // ','.(expression ['as' star_target])+
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -21931,7 +22432,7 @@ invalid_with_stmt_indent_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_with_stmt_indent[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' ':' NEWLINE !INDENT"));
- asdl_seq * _gather_202_var;
+ asdl_seq * _gather_207_var;
Token * _literal;
Token * _literal_1;
Token * _literal_2;
@@ -21944,11 +22445,11 @@ invalid_with_stmt_indent_rule(Parser *p)
if (
(_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC?
&&
- (a = _PyPegen_expect_token(p, 614)) // token='with'
+ (a = _PyPegen_expect_token(p, 615)) // token='with'
&&
(_literal = _PyPegen_expect_token(p, 7)) // token='('
&&
- (_gather_202_var = _gather_202_rule(p)) // ','.(expressions ['as' star_target])+
+ (_gather_207_var = _gather_207_rule(p)) // ','.(expressions ['as' star_target])+
&&
(_opt_var_1 = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
&&
@@ -22008,7 +22509,7 @@ invalid_try_stmt_rule(Parser *p)
Token * a;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 623)) // token='try'
+ (a = _PyPegen_expect_token(p, 624)) // token='try'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -22040,13 +22541,13 @@ invalid_try_stmt_rule(Parser *p)
Token * _literal;
asdl_stmt_seq* block_var;
if (
- (_keyword = _PyPegen_expect_token(p, 623)) // token='try'
+ (_keyword = _PyPegen_expect_token(p, 624)) // token='try'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
(block_var = block_rule(p)) // block
&&
- _PyPegen_lookahead(0, _tmp_204_rule, p)
+ _PyPegen_lookahead(0, _tmp_209_rule, p)
)
{
D(fprintf(stderr, "%*c+ invalid_try_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'try' ':' block !('except' | 'finally')"));
@@ -22071,29 +22572,29 @@ invalid_try_stmt_rule(Parser *p)
Token * _keyword;
Token * _literal;
Token * _literal_1;
- asdl_seq * _loop0_205_var;
- asdl_seq * _loop1_206_var;
+ asdl_seq * _loop0_210_var;
+ asdl_seq * _loop1_211_var;
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
Token * a;
Token * b;
expr_ty expression_var;
if (
- (_keyword = _PyPegen_expect_token(p, 623)) // token='try'
+ (_keyword = _PyPegen_expect_token(p, 624)) // token='try'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
- (_loop0_205_var = _loop0_205_rule(p)) // block*
+ (_loop0_210_var = _loop0_210_rule(p)) // block*
&&
- (_loop1_206_var = _loop1_206_rule(p)) // except_block+
+ (_loop1_211_var = _loop1_211_rule(p)) // except_block+
&&
- (a = _PyPegen_expect_token(p, 636)) // token='except'
+ (a = _PyPegen_expect_token(p, 637)) // token='except'
&&
(b = _PyPegen_expect_token(p, 16)) // token='*'
&&
(expression_var = expression_rule(p)) // expression
&&
- (_opt_var = _tmp_207_rule(p), !p->error_indicator) // ['as' NAME]
+ (_opt_var = _tmp_212_rule(p), !p->error_indicator) // ['as' NAME]
&&
(_literal_1 = _PyPegen_expect_token(p, 11)) // token=':'
)
@@ -22120,23 +22621,23 @@ invalid_try_stmt_rule(Parser *p)
Token * _keyword;
Token * _literal;
Token * _literal_1;
- asdl_seq * _loop0_208_var;
- asdl_seq * _loop1_209_var;
+ asdl_seq * _loop0_213_var;
+ asdl_seq * _loop1_214_var;
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
Token * a;
if (
- (_keyword = _PyPegen_expect_token(p, 623)) // token='try'
+ (_keyword = _PyPegen_expect_token(p, 624)) // token='try'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
- (_loop0_208_var = _loop0_208_rule(p)) // block*
+ (_loop0_213_var = _loop0_213_rule(p)) // block*
&&
- (_loop1_209_var = _loop1_209_rule(p)) // except_star_block+
+ (_loop1_214_var = _loop1_214_rule(p)) // except_star_block+
&&
- (a = _PyPegen_expect_token(p, 636)) // token='except'
+ (a = _PyPegen_expect_token(p, 637)) // token='except'
&&
- (_opt_var = _tmp_210_rule(p), !p->error_indicator) // [expression ['as' NAME]]
+ (_opt_var = _tmp_215_rule(p), !p->error_indicator) // [expression ['as' NAME]]
&&
(_literal_1 = _PyPegen_expect_token(p, 11)) // token=':'
)
@@ -22194,7 +22695,7 @@ invalid_except_stmt_rule(Parser *p)
expr_ty a;
expr_ty expressions_var;
if (
- (_keyword = _PyPegen_expect_token(p, 636)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 637)) // token='except'
&&
(_opt_var = _PyPegen_expect_token(p, 16), !p->error_indicator) // '*'?
&&
@@ -22204,7 +22705,7 @@ invalid_except_stmt_rule(Parser *p)
&&
(expressions_var = expressions_rule(p)) // expressions
&&
- (_opt_var_1 = _tmp_211_rule(p), !p->error_indicator) // ['as' NAME]
+ (_opt_var_1 = _tmp_216_rule(p), !p->error_indicator) // ['as' NAME]
&&
(_literal_1 = _PyPegen_expect_token(p, 11)) // token=':'
)
@@ -22236,13 +22737,13 @@ invalid_except_stmt_rule(Parser *p)
expr_ty expression_var;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 636)) // token='except'
+ (a = _PyPegen_expect_token(p, 637)) // token='except'
&&
(_opt_var = _PyPegen_expect_token(p, 16), !p->error_indicator) // '*'?
&&
(expression_var = expression_rule(p)) // expression
&&
- (_opt_var_1 = _tmp_212_rule(p), !p->error_indicator) // ['as' NAME]
+ (_opt_var_1 = _tmp_217_rule(p), !p->error_indicator) // ['as' NAME]
&&
(newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE'
)
@@ -22269,7 +22770,7 @@ invalid_except_stmt_rule(Parser *p)
Token * a;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 636)) // token='except'
+ (a = _PyPegen_expect_token(p, 637)) // token='except'
&&
(newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE'
)
@@ -22294,14 +22795,14 @@ invalid_except_stmt_rule(Parser *p)
}
D(fprintf(stderr, "%*c> invalid_except_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except' '*' (NEWLINE | ':')"));
Token * _literal;
- void *_tmp_213_var;
+ void *_tmp_218_var;
Token * a;
if (
- (a = _PyPegen_expect_token(p, 636)) // token='except'
+ (a = _PyPegen_expect_token(p, 637)) // token='except'
&&
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
- (_tmp_213_var = _tmp_213_rule(p)) // NEWLINE | ':'
+ (_tmp_218_var = _tmp_218_rule(p)) // NEWLINE | ':'
)
{
D(fprintf(stderr, "%*c+ invalid_except_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except' '*' (NEWLINE | ':')"));
@@ -22347,7 +22848,7 @@ invalid_finally_stmt_rule(Parser *p)
Token * a;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 632)) // token='finally'
+ (a = _PyPegen_expect_token(p, 633)) // token='finally'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -22404,11 +22905,11 @@ invalid_except_stmt_indent_rule(Parser *p)
expr_ty expression_var;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 636)) // token='except'
+ (a = _PyPegen_expect_token(p, 637)) // token='except'
&&
(expression_var = expression_rule(p)) // expression
&&
- (_opt_var = _tmp_214_rule(p), !p->error_indicator) // ['as' NAME]
+ (_opt_var = _tmp_219_rule(p), !p->error_indicator) // ['as' NAME]
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -22440,7 +22941,7 @@ invalid_except_stmt_indent_rule(Parser *p)
Token * a;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 636)) // token='except'
+ (a = _PyPegen_expect_token(p, 637)) // token='except'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -22497,13 +22998,13 @@ invalid_except_star_stmt_indent_rule(Parser *p)
expr_ty expression_var;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 636)) // token='except'
+ (a = _PyPegen_expect_token(p, 637)) // token='except'
&&
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
(expression_var = expression_rule(p)) // expression
&&
- (_opt_var = _tmp_215_rule(p), !p->error_indicator) // ['as' NAME]
+ (_opt_var = _tmp_220_rule(p), !p->error_indicator) // ['as' NAME]
&&
(_literal_1 = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -22739,7 +23240,7 @@ invalid_as_pattern_rule(Parser *p)
if (
(or_pattern_var = or_pattern_rule(p)) // or_pattern
&&
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
(a = _PyPegen_expect_soft_keyword(p, "_")) // soft_keyword='"_"'
)
@@ -22769,7 +23270,7 @@ invalid_as_pattern_rule(Parser *p)
if (
(or_pattern_var = or_pattern_rule(p)) // or_pattern
&&
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
_PyPegen_lookahead_with_name(0, _PyPegen_name_token, p)
&&
@@ -22872,7 +23373,7 @@ invalid_class_argument_pattern_rule(Parser *p)
asdl_pattern_seq* a;
asdl_seq* keyword_patterns_var;
if (
- (_opt_var = _tmp_216_rule(p), !p->error_indicator) // [positional_patterns ',']
+ (_opt_var = _tmp_221_rule(p), !p->error_indicator) // [positional_patterns ',']
&&
(keyword_patterns_var = keyword_patterns_rule(p)) // keyword_patterns
&&
@@ -22926,7 +23427,7 @@ invalid_if_stmt_rule(Parser *p)
expr_ty named_expression_var;
Token * newline_var;
if (
- (_keyword = _PyPegen_expect_token(p, 641)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 642)) // token='if'
&&
(named_expression_var = named_expression_rule(p)) // named_expression
&&
@@ -22957,7 +23458,7 @@ invalid_if_stmt_rule(Parser *p)
expr_ty a_1;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 641)) // token='if'
+ (a = _PyPegen_expect_token(p, 642)) // token='if'
&&
(a_1 = named_expression_rule(p)) // named_expression
&&
@@ -23013,7 +23514,7 @@ invalid_elif_stmt_rule(Parser *p)
expr_ty named_expression_var;
Token * newline_var;
if (
- (_keyword = _PyPegen_expect_token(p, 643)) // token='elif'
+ (_keyword = _PyPegen_expect_token(p, 644)) // token='elif'
&&
(named_expression_var = named_expression_rule(p)) // named_expression
&&
@@ -23044,7 +23545,7 @@ invalid_elif_stmt_rule(Parser *p)
expr_ty named_expression_var;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 643)) // token='elif'
+ (a = _PyPegen_expect_token(p, 644)) // token='elif'
&&
(named_expression_var = named_expression_rule(p)) // named_expression
&&
@@ -23098,7 +23599,7 @@ invalid_else_stmt_rule(Parser *p)
Token * a;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 644)) // token='else'
+ (a = _PyPegen_expect_token(p, 645)) // token='else'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -23152,7 +23653,7 @@ invalid_while_stmt_rule(Parser *p)
expr_ty named_expression_var;
Token * newline_var;
if (
- (_keyword = _PyPegen_expect_token(p, 646)) // token='while'
+ (_keyword = _PyPegen_expect_token(p, 647)) // token='while'
&&
(named_expression_var = named_expression_rule(p)) // named_expression
&&
@@ -23183,7 +23684,7 @@ invalid_while_stmt_rule(Parser *p)
expr_ty named_expression_var;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 646)) // token='while'
+ (a = _PyPegen_expect_token(p, 647)) // token='while'
&&
(named_expression_var = named_expression_rule(p)) // named_expression
&&
@@ -23245,11 +23746,11 @@ invalid_for_stmt_rule(Parser *p)
if (
(_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC?
&&
- (_keyword = _PyPegen_expect_token(p, 649)) // token='for'
+ (_keyword = _PyPegen_expect_token(p, 650)) // token='for'
&&
(star_targets_var = star_targets_rule(p)) // star_targets
&&
- (_keyword_1 = _PyPegen_expect_token(p, 650)) // token='in'
+ (_keyword_1 = _PyPegen_expect_token(p, 651)) // token='in'
&&
(star_expressions_var = star_expressions_rule(p)) // star_expressions
&&
@@ -23286,11 +23787,11 @@ invalid_for_stmt_rule(Parser *p)
if (
(_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC?
&&
- (a = _PyPegen_expect_token(p, 649)) // token='for'
+ (a = _PyPegen_expect_token(p, 650)) // token='for'
&&
(star_targets_var = star_targets_rule(p)) // star_targets
&&
- (_keyword = _PyPegen_expect_token(p, 650)) // token='in'
+ (_keyword = _PyPegen_expect_token(p, 651)) // token='in'
&&
(star_expressions_var = star_expressions_rule(p)) // star_expressions
&&
@@ -23356,7 +23857,7 @@ invalid_def_raw_rule(Parser *p)
if (
(_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC?
&&
- (a = _PyPegen_expect_token(p, 651)) // token='def'
+ (a = _PyPegen_expect_token(p, 652)) // token='def'
&&
(name_var = _PyPegen_name_token(p)) // NAME
&&
@@ -23366,7 +23867,7 @@ invalid_def_raw_rule(Parser *p)
&&
(_literal_1 = _PyPegen_expect_token(p, 8)) // token=')'
&&
- (_opt_var_2 = _tmp_217_rule(p), !p->error_indicator) // ['->' expression]
+ (_opt_var_2 = _tmp_222_rule(p), !p->error_indicator) // ['->' expression]
&&
(_literal_2 = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -23422,11 +23923,11 @@ invalid_class_def_raw_rule(Parser *p)
expr_ty name_var;
Token * newline_var;
if (
- (_keyword = _PyPegen_expect_token(p, 653)) // token='class'
+ (_keyword = _PyPegen_expect_token(p, 654)) // token='class'
&&
(name_var = _PyPegen_name_token(p)) // NAME
&&
- (_opt_var = _tmp_218_rule(p), !p->error_indicator) // ['(' arguments? ')']
+ (_opt_var = _tmp_223_rule(p), !p->error_indicator) // ['(' arguments? ')']
&&
(newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE'
)
@@ -23457,11 +23958,11 @@ invalid_class_def_raw_rule(Parser *p)
expr_ty name_var;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 653)) // token='class'
+ (a = _PyPegen_expect_token(p, 654)) // token='class'
&&
(name_var = _PyPegen_name_token(p)) // NAME
&&
- (_opt_var = _tmp_219_rule(p), !p->error_indicator) // ['(' arguments? ')']
+ (_opt_var = _tmp_224_rule(p), !p->error_indicator) // ['(' arguments? ')']
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -23512,11 +24013,11 @@ invalid_double_starred_kvpairs_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_double_starred_kvpairs[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.double_starred_kvpair+ ',' invalid_kvpair"));
- asdl_seq * _gather_220_var;
+ asdl_seq * _gather_225_var;
Token * _literal;
void *invalid_kvpair_var;
if (
- (_gather_220_var = _gather_220_rule(p)) // ','.double_starred_kvpair+
+ (_gather_225_var = _gather_225_rule(p)) // ','.double_starred_kvpair+
&&
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
@@ -23524,7 +24025,7 @@ invalid_double_starred_kvpairs_rule(Parser *p)
)
{
D(fprintf(stderr, "%*c+ invalid_double_starred_kvpairs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.double_starred_kvpair+ ',' invalid_kvpair"));
- _res = _PyPegen_dummy_name(p, _gather_220_var, _literal, invalid_kvpair_var);
+ _res = _PyPegen_dummy_name(p, _gather_225_var, _literal, invalid_kvpair_var);
goto done;
}
p->mark = _mark;
@@ -23577,7 +24078,7 @@ invalid_double_starred_kvpairs_rule(Parser *p)
&&
(a = _PyPegen_expect_token(p, 11)) // token=':'
&&
- _PyPegen_lookahead(1, _tmp_222_rule, p)
+ _PyPegen_lookahead(1, _tmp_227_rule, p)
)
{
D(fprintf(stderr, "%*c+ invalid_double_starred_kvpairs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ':' &('}' | ',')"));
@@ -23688,7 +24189,7 @@ invalid_kvpair_rule(Parser *p)
&&
(a = _PyPegen_expect_token(p, 11)) // token=':'
&&
- _PyPegen_lookahead(1, _tmp_223_rule, p)
+ _PyPegen_lookahead(1, _tmp_228_rule, p)
)
{
D(fprintf(stderr, "%*c+ invalid_kvpair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ':' &('}' | ',')"));
@@ -23763,6 +24264,450 @@ invalid_starred_expression_rule(Parser *p)
return _res;
}
+// invalid_replacement_field:
+// | '{' '='
+// | '{' '!'
+// | '{' ':'
+// | '{' '}'
+// | '{' !(yield_expr | star_expressions)
+// | '{' (yield_expr | star_expressions) !('=' | '!' | ':' | '}')
+// | '{' (yield_expr | star_expressions) '=' !('!' | ':' | '}')
+// | '{' (yield_expr | star_expressions) '='? invalid_conversion_character
+// | '{' (yield_expr | star_expressions) '='? ['!' NAME] !(':' | '}')
+// | '{' (yield_expr | star_expressions) '='? ['!' NAME] ':' fstring_format_spec* !'}'
+// | '{' (yield_expr | star_expressions) '='? ['!' NAME] !'}'
+static void *
+invalid_replacement_field_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // '{' '='
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> invalid_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' '='"));
+ Token * _literal;
+ Token * a;
+ if (
+ (_literal = _PyPegen_expect_token(p, 25)) // token='{'
+ &&
+ (a = _PyPegen_expect_token(p, 22)) // token='='
+ )
+ {
+ D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' '='"));
+ _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "f-string: valid expression required before '='" );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s invalid_replacement_field[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' '='"));
+ }
+ { // '{' '!'
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> invalid_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' '!'"));
+ Token * _literal;
+ Token * a;
+ if (
+ (_literal = _PyPegen_expect_token(p, 25)) // token='{'
+ &&
+ (a = _PyPegen_expect_token(p, 54)) // token='!'
+ )
+ {
+ D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' '!'"));
+ _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "f-string: valid expression required before '!'" );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s invalid_replacement_field[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' '!'"));
+ }
+ { // '{' ':'
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> invalid_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' ':'"));
+ Token * _literal;
+ Token * a;
+ if (
+ (_literal = _PyPegen_expect_token(p, 25)) // token='{'
+ &&
+ (a = _PyPegen_expect_token(p, 11)) // token=':'
+ )
+ {
+ D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' ':'"));
+ _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "f-string: valid expression required before ':'" );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s invalid_replacement_field[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' ':'"));
+ }
+ { // '{' '}'
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> invalid_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' '}'"));
+ Token * _literal;
+ Token * a;
+ if (
+ (_literal = _PyPegen_expect_token(p, 25)) // token='{'
+ &&
+ (a = _PyPegen_expect_token(p, 26)) // token='}'
+ )
+ {
+ D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' '}'"));
+ _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "f-string: valid expression required before '}'" );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s invalid_replacement_field[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' '}'"));
+ }
+ { // '{' !(yield_expr | star_expressions)
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> invalid_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' !(yield_expr | star_expressions)"));
+ Token * _literal;
+ if (
+ (_literal = _PyPegen_expect_token(p, 25)) // token='{'
+ &&
+ _PyPegen_lookahead(0, _tmp_229_rule, p)
+ )
+ {
+ D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' !(yield_expr | star_expressions)"));
+ _res = RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN ( "f-string: expecting a valid expression after '{'" );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s invalid_replacement_field[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' !(yield_expr | star_expressions)"));
+ }
+ { // '{' (yield_expr | star_expressions) !('=' | '!' | ':' | '}')
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> invalid_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) !('=' | '!' | ':' | '}')"));
+ Token * _literal;
+ void *_tmp_230_var;
+ if (
+ (_literal = _PyPegen_expect_token(p, 25)) // token='{'
+ &&
+ (_tmp_230_var = _tmp_230_rule(p)) // yield_expr | star_expressions
+ &&
+ _PyPegen_lookahead(0, _tmp_231_rule, p)
+ )
+ {
+ D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) !('=' | '!' | ':' | '}')"));
+ _res = PyErr_Occurred ( ) ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN ( "f-string: expecting '=', or '!', or ':', or '}'" );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s invalid_replacement_field[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' (yield_expr | star_expressions) !('=' | '!' | ':' | '}')"));
+ }
+ { // '{' (yield_expr | star_expressions) '=' !('!' | ':' | '}')
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> invalid_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) '=' !('!' | ':' | '}')"));
+ Token * _literal;
+ Token * _literal_1;
+ void *_tmp_232_var;
+ if (
+ (_literal = _PyPegen_expect_token(p, 25)) // token='{'
+ &&
+ (_tmp_232_var = _tmp_232_rule(p)) // yield_expr | star_expressions
+ &&
+ (_literal_1 = _PyPegen_expect_token(p, 22)) // token='='
+ &&
+ _PyPegen_lookahead(0, _tmp_233_rule, p)
+ )
+ {
+ D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) '=' !('!' | ':' | '}')"));
+ _res = PyErr_Occurred ( ) ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN ( "f-string: expecting '!', or ':', or '}'" );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s invalid_replacement_field[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' (yield_expr | star_expressions) '=' !('!' | ':' | '}')"));
+ }
+ { // '{' (yield_expr | star_expressions) '='? invalid_conversion_character
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> invalid_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) '='? invalid_conversion_character"));
+ Token * _literal;
+ void *_opt_var;
+ UNUSED(_opt_var); // Silence compiler warnings
+ void *_tmp_234_var;
+ void *invalid_conversion_character_var;
+ if (
+ (_literal = _PyPegen_expect_token(p, 25)) // token='{'
+ &&
+ (_tmp_234_var = _tmp_234_rule(p)) // yield_expr | star_expressions
+ &&
+ (_opt_var = _PyPegen_expect_token(p, 22), !p->error_indicator) // '='?
+ &&
+ (invalid_conversion_character_var = invalid_conversion_character_rule(p)) // invalid_conversion_character
+ )
+ {
+ D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) '='? invalid_conversion_character"));
+ _res = _PyPegen_dummy_name(p, _literal, _tmp_234_var, _opt_var, invalid_conversion_character_var);
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s invalid_replacement_field[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' (yield_expr | star_expressions) '='? invalid_conversion_character"));
+ }
+ { // '{' (yield_expr | star_expressions) '='? ['!' NAME] !(':' | '}')
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> invalid_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) '='? ['!' NAME] !(':' | '}')"));
+ Token * _literal;
+ void *_opt_var;
+ UNUSED(_opt_var); // Silence compiler warnings
+ void *_opt_var_1;
+ UNUSED(_opt_var_1); // Silence compiler warnings
+ void *_tmp_235_var;
+ if (
+ (_literal = _PyPegen_expect_token(p, 25)) // token='{'
+ &&
+ (_tmp_235_var = _tmp_235_rule(p)) // yield_expr | star_expressions
+ &&
+ (_opt_var = _PyPegen_expect_token(p, 22), !p->error_indicator) // '='?
+ &&
+ (_opt_var_1 = _tmp_236_rule(p), !p->error_indicator) // ['!' NAME]
+ &&
+ _PyPegen_lookahead(0, _tmp_237_rule, p)
+ )
+ {
+ D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) '='? ['!' NAME] !(':' | '}')"));
+ _res = PyErr_Occurred ( ) ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN ( "f-string: expecting ':' or '}'" );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s invalid_replacement_field[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' (yield_expr | star_expressions) '='? ['!' NAME] !(':' | '}')"));
+ }
+ { // '{' (yield_expr | star_expressions) '='? ['!' NAME] ':' fstring_format_spec* !'}'
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> invalid_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) '='? ['!' NAME] ':' fstring_format_spec* !'}'"));
+ Token * _literal;
+ Token * _literal_1;
+ asdl_seq * _loop0_240_var;
+ void *_opt_var;
+ UNUSED(_opt_var); // Silence compiler warnings
+ void *_opt_var_1;
+ UNUSED(_opt_var_1); // Silence compiler warnings
+ void *_tmp_238_var;
+ if (
+ (_literal = _PyPegen_expect_token(p, 25)) // token='{'
+ &&
+ (_tmp_238_var = _tmp_238_rule(p)) // yield_expr | star_expressions
+ &&
+ (_opt_var = _PyPegen_expect_token(p, 22), !p->error_indicator) // '='?
+ &&
+ (_opt_var_1 = _tmp_239_rule(p), !p->error_indicator) // ['!' NAME]
+ &&
+ (_literal_1 = _PyPegen_expect_token(p, 11)) // token=':'
+ &&
+ (_loop0_240_var = _loop0_240_rule(p)) // fstring_format_spec*
+ &&
+ _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 26) // token='}'
+ )
+ {
+ D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) '='? ['!' NAME] ':' fstring_format_spec* !'}'"));
+ _res = PyErr_Occurred ( ) ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN ( "f-string: expecting '}', or format specs" );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s invalid_replacement_field[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' (yield_expr | star_expressions) '='? ['!' NAME] ':' fstring_format_spec* !'}'"));
+ }
+ { // '{' (yield_expr | star_expressions) '='? ['!' NAME] !'}'
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> invalid_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) '='? ['!' NAME] !'}'"));
+ Token * _literal;
+ void *_opt_var;
+ UNUSED(_opt_var); // Silence compiler warnings
+ void *_opt_var_1;
+ UNUSED(_opt_var_1); // Silence compiler warnings
+ void *_tmp_241_var;
+ if (
+ (_literal = _PyPegen_expect_token(p, 25)) // token='{'
+ &&
+ (_tmp_241_var = _tmp_241_rule(p)) // yield_expr | star_expressions
+ &&
+ (_opt_var = _PyPegen_expect_token(p, 22), !p->error_indicator) // '='?
+ &&
+ (_opt_var_1 = _tmp_242_rule(p), !p->error_indicator) // ['!' NAME]
+ &&
+ _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 26) // token='}'
+ )
+ {
+ D(fprintf(stderr, "%*c+ invalid_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' (yield_expr | star_expressions) '='? ['!' NAME] !'}'"));
+ _res = PyErr_Occurred ( ) ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN ( "f-string: expecting '}'" );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s invalid_replacement_field[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' (yield_expr | star_expressions) '='? ['!' NAME] !'}'"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// invalid_conversion_character: '!' &(':' | '}') | '!' !NAME
+static void *
+invalid_conversion_character_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // '!' &(':' | '}')
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> invalid_conversion_character[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!' &(':' | '}')"));
+ Token * _literal;
+ if (
+ (_literal = _PyPegen_expect_token(p, 54)) // token='!'
+ &&
+ _PyPegen_lookahead(1, _tmp_243_rule, p)
+ )
+ {
+ D(fprintf(stderr, "%*c+ invalid_conversion_character[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!' &(':' | '}')"));
+ _res = RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN ( "f-string: missing conversion character" );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s invalid_conversion_character[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'!' &(':' | '}')"));
+ }
+ { // '!' !NAME
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> invalid_conversion_character[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!' !NAME"));
+ Token * _literal;
+ if (
+ (_literal = _PyPegen_expect_token(p, 54)) // token='!'
+ &&
+ _PyPegen_lookahead_with_name(0, _PyPegen_name_token, p)
+ )
+ {
+ D(fprintf(stderr, "%*c+ invalid_conversion_character[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!' !NAME"));
+ _res = RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN ( "f-string: invalid conversion character" );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s invalid_conversion_character[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'!' !NAME"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
// _loop0_1: NEWLINE
static asdl_seq *
_loop0_1_rule(Parser *p)
@@ -23899,9 +24844,77 @@ _loop0_2_rule(Parser *p)
return _seq;
}
-// _loop1_3: statement
+// _loop0_3: fstring_middle
static asdl_seq *
-_loop1_3_rule(Parser *p)
+_loop0_3_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void *_res = NULL;
+ int _mark = p->mark;
+ void **_children = PyMem_Malloc(sizeof(void *));
+ if (!_children) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ p->level--;
+ return NULL;
+ }
+ Py_ssize_t _children_capacity = 1;
+ Py_ssize_t _n = 0;
+ { // fstring_middle
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _loop0_3[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "fstring_middle"));
+ expr_ty fstring_middle_var;
+ while (
+ (fstring_middle_var = fstring_middle_rule(p)) // fstring_middle
+ )
+ {
+ _res = fstring_middle_var;
+ if (_n == _children_capacity) {
+ _children_capacity *= 2;
+ void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
+ if (!_new_children) {
+ PyMem_Free(_children);
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ p->level--;
+ return NULL;
+ }
+ _children = _new_children;
+ }
+ _children[_n++] = _res;
+ _mark = p->mark;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _loop0_3[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "fstring_middle"));
+ }
+ asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
+ if (!_seq) {
+ PyMem_Free(_children);
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ p->level--;
+ return NULL;
+ }
+ for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]);
+ PyMem_Free(_children);
+ p->level--;
+ return _seq;
+}
+
+// _loop1_4: statement
+static asdl_seq *
+_loop1_4_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -23927,7 +24940,7 @@ _loop1_3_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_3[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "statement"));
+ D(fprintf(stderr, "%*c> _loop1_4[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "statement"));
asdl_stmt_seq* statement_var;
while (
(statement_var = statement_rule(p)) // statement
@@ -23950,7 +24963,7 @@ _loop1_3_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_3[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_4[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "statement"));
}
if (_n == 0 || p->error_indicator) {
@@ -23972,9 +24985,9 @@ _loop1_3_rule(Parser *p)
return _seq;
}
-// _loop0_5: ';' simple_stmt
+// _loop0_6: ';' simple_stmt
static asdl_seq *
-_loop0_5_rule(Parser *p)
+_loop0_6_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -24000,7 +25013,7 @@ _loop0_5_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_5[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "';' simple_stmt"));
+ D(fprintf(stderr, "%*c> _loop0_6[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "';' simple_stmt"));
Token * _literal;
stmt_ty elem;
while (
@@ -24032,7 +25045,7 @@ _loop0_5_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_5[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_6[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "';' simple_stmt"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -24049,9 +25062,9 @@ _loop0_5_rule(Parser *p)
return _seq;
}
-// _gather_4: simple_stmt _loop0_5
+// _gather_5: simple_stmt _loop0_6
static asdl_seq *
-_gather_4_rule(Parser *p)
+_gather_5_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -24063,27 +25076,27 @@ _gather_4_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // simple_stmt _loop0_5
+ { // simple_stmt _loop0_6
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_4[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "simple_stmt _loop0_5"));
+ D(fprintf(stderr, "%*c> _gather_5[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "simple_stmt _loop0_6"));
stmt_ty elem;
asdl_seq * seq;
if (
(elem = simple_stmt_rule(p)) // simple_stmt
&&
- (seq = _loop0_5_rule(p)) // _loop0_5
+ (seq = _loop0_6_rule(p)) // _loop0_6
)
{
- D(fprintf(stderr, "%*c+ _gather_4[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "simple_stmt _loop0_5"));
+ D(fprintf(stderr, "%*c+ _gather_5[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "simple_stmt _loop0_6"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_4[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "simple_stmt _loop0_5"));
+ D(fprintf(stderr, "%*c%s _gather_5[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "simple_stmt _loop0_6"));
}
_res = NULL;
done:
@@ -24091,9 +25104,9 @@ _gather_4_rule(Parser *p)
return _res;
}
-// _tmp_6: 'import' | 'from'
+// _tmp_7: 'import' | 'from'
static void *
-_tmp_6_rule(Parser *p)
+_tmp_7_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -24110,18 +25123,18 @@ _tmp_6_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_6[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'import'"));
+ D(fprintf(stderr, "%*c> _tmp_7[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'import'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 606)) // token='import'
+ (_keyword = _PyPegen_expect_token(p, 607)) // token='import'
)
{
- D(fprintf(stderr, "%*c+ _tmp_6[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'import'"));
+ D(fprintf(stderr, "%*c+ _tmp_7[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'import'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_6[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_7[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'import'"));
}
{ // 'from'
@@ -24129,18 +25142,18 @@ _tmp_6_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_6[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from'"));
+ D(fprintf(stderr, "%*c> _tmp_7[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 607)) // token='from'
+ (_keyword = _PyPegen_expect_token(p, 608)) // token='from'
)
{
- D(fprintf(stderr, "%*c+ _tmp_6[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from'"));
+ D(fprintf(stderr, "%*c+ _tmp_7[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_6[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_7[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'from'"));
}
_res = NULL;
@@ -24149,9 +25162,9 @@ _tmp_6_rule(Parser *p)
return _res;
}
-// _tmp_7: 'def' | '@' | ASYNC
+// _tmp_8: 'def' | '@' | ASYNC
static void *
-_tmp_7_rule(Parser *p)
+_tmp_8_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -24168,18 +25181,18 @@ _tmp_7_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_7[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'def'"));
+ D(fprintf(stderr, "%*c> _tmp_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'def'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 651)) // token='def'
+ (_keyword = _PyPegen_expect_token(p, 652)) // token='def'
)
{
- D(fprintf(stderr, "%*c+ _tmp_7[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'def'"));
+ D(fprintf(stderr, "%*c+ _tmp_8[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'def'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_7[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_8[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'def'"));
}
{ // '@'
@@ -24187,18 +25200,18 @@ _tmp_7_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_7[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@'"));
+ D(fprintf(stderr, "%*c> _tmp_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 49)) // token='@'
)
{
- D(fprintf(stderr, "%*c+ _tmp_7[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@'"));
+ D(fprintf(stderr, "%*c+ _tmp_8[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_7[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_8[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'@'"));
}
{ // ASYNC
@@ -24206,18 +25219,18 @@ _tmp_7_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_7[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC"));
+ D(fprintf(stderr, "%*c> _tmp_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC"));
Token * async_var;
if (
(async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC'
)
{
- D(fprintf(stderr, "%*c+ _tmp_7[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC"));
+ D(fprintf(stderr, "%*c+ _tmp_8[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC"));
_res = async_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_7[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_8[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC"));
}
_res = NULL;
@@ -24226,9 +25239,9 @@ _tmp_7_rule(Parser *p)
return _res;
}
-// _tmp_8: 'class' | '@'
+// _tmp_9: 'class' | '@'
static void *
-_tmp_8_rule(Parser *p)
+_tmp_9_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -24245,18 +25258,18 @@ _tmp_8_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'class'"));
+ D(fprintf(stderr, "%*c> _tmp_9[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'class'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 653)) // token='class'
+ (_keyword = _PyPegen_expect_token(p, 654)) // token='class'
)
{
- D(fprintf(stderr, "%*c+ _tmp_8[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'class'"));
+ D(fprintf(stderr, "%*c+ _tmp_9[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'class'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_8[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_9[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'class'"));
}
{ // '@'
@@ -24264,18 +25277,18 @@ _tmp_8_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@'"));
+ D(fprintf(stderr, "%*c> _tmp_9[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 49)) // token='@'
)
{
- D(fprintf(stderr, "%*c+ _tmp_8[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@'"));
+ D(fprintf(stderr, "%*c+ _tmp_9[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_8[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_9[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'@'"));
}
_res = NULL;
@@ -24284,9 +25297,9 @@ _tmp_8_rule(Parser *p)
return _res;
}
-// _tmp_9: 'with' | ASYNC
+// _tmp_10: 'with' | ASYNC
static void *
-_tmp_9_rule(Parser *p)
+_tmp_10_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -24303,18 +25316,18 @@ _tmp_9_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_9[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'with'"));
+ D(fprintf(stderr, "%*c> _tmp_10[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'with'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 614)) // token='with'
+ (_keyword = _PyPegen_expect_token(p, 615)) // token='with'
)
{
- D(fprintf(stderr, "%*c+ _tmp_9[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'with'"));
+ D(fprintf(stderr, "%*c+ _tmp_10[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'with'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_9[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_10[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'with'"));
}
{ // ASYNC
@@ -24322,18 +25335,18 @@ _tmp_9_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_9[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC"));
+ D(fprintf(stderr, "%*c> _tmp_10[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC"));
Token * async_var;
if (
(async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC'
)
{
- D(fprintf(stderr, "%*c+ _tmp_9[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC"));
+ D(fprintf(stderr, "%*c+ _tmp_10[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC"));
_res = async_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_9[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_10[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC"));
}
_res = NULL;
@@ -24342,9 +25355,9 @@ _tmp_9_rule(Parser *p)
return _res;
}
-// _tmp_10: 'for' | ASYNC
+// _tmp_11: 'for' | ASYNC
static void *
-_tmp_10_rule(Parser *p)
+_tmp_11_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -24361,18 +25374,18 @@ _tmp_10_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_10[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'for'"));
+ D(fprintf(stderr, "%*c> _tmp_11[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'for'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 649)) // token='for'
+ (_keyword = _PyPegen_expect_token(p, 650)) // token='for'
)
{
- D(fprintf(stderr, "%*c+ _tmp_10[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'for'"));
+ D(fprintf(stderr, "%*c+ _tmp_11[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'for'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_10[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_11[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'for'"));
}
{ // ASYNC
@@ -24380,18 +25393,18 @@ _tmp_10_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_10[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC"));
+ D(fprintf(stderr, "%*c> _tmp_11[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC"));
Token * async_var;
if (
(async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC'
)
{
- D(fprintf(stderr, "%*c+ _tmp_10[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC"));
+ D(fprintf(stderr, "%*c+ _tmp_11[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC"));
_res = async_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_10[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_11[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC"));
}
_res = NULL;
@@ -24400,9 +25413,9 @@ _tmp_10_rule(Parser *p)
return _res;
}
-// _tmp_11: '=' annotated_rhs
+// _tmp_12: '=' annotated_rhs
static void *
-_tmp_11_rule(Parser *p)
+_tmp_12_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -24419,7 +25432,7 @@ _tmp_11_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_11[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs"));
+ D(fprintf(stderr, "%*c> _tmp_12[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs"));
Token * _literal;
expr_ty d;
if (
@@ -24428,7 +25441,7 @@ _tmp_11_rule(Parser *p)
(d = annotated_rhs_rule(p)) // annotated_rhs
)
{
- D(fprintf(stderr, "%*c+ _tmp_11[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs"));
+ D(fprintf(stderr, "%*c+ _tmp_12[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs"));
_res = d;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -24438,7 +25451,7 @@ _tmp_11_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_11[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_12[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'=' annotated_rhs"));
}
_res = NULL;
@@ -24447,9 +25460,9 @@ _tmp_11_rule(Parser *p)
return _res;
}
-// _tmp_12: '(' single_target ')' | single_subscript_attribute_target
+// _tmp_13: '(' single_target ')' | single_subscript_attribute_target
static void *
-_tmp_12_rule(Parser *p)
+_tmp_13_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -24466,7 +25479,7 @@ _tmp_12_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_12[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' single_target ')'"));
+ D(fprintf(stderr, "%*c> _tmp_13[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' single_target ')'"));
Token * _literal;
Token * _literal_1;
expr_ty b;
@@ -24478,7 +25491,7 @@ _tmp_12_rule(Parser *p)
(_literal_1 = _PyPegen_expect_token(p, 8)) // token=')'
)
{
- D(fprintf(stderr, "%*c+ _tmp_12[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' single_target ')'"));
+ D(fprintf(stderr, "%*c+ _tmp_13[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' single_target ')'"));
_res = b;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -24488,7 +25501,7 @@ _tmp_12_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_12[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_13[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' single_target ')'"));
}
{ // single_subscript_attribute_target
@@ -24496,18 +25509,18 @@ _tmp_12_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_12[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "single_subscript_attribute_target"));
+ D(fprintf(stderr, "%*c> _tmp_13[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "single_subscript_attribute_target"));
expr_ty single_subscript_attribute_target_var;
if (
(single_subscript_attribute_target_var = single_subscript_attribute_target_rule(p)) // single_subscript_attribute_target
)
{
- D(fprintf(stderr, "%*c+ _tmp_12[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "single_subscript_attribute_target"));
+ D(fprintf(stderr, "%*c+ _tmp_13[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "single_subscript_attribute_target"));
_res = single_subscript_attribute_target_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_12[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_13[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "single_subscript_attribute_target"));
}
_res = NULL;
@@ -24516,9 +25529,9 @@ _tmp_12_rule(Parser *p)
return _res;
}
-// _tmp_13: '=' annotated_rhs
+// _tmp_14: '=' annotated_rhs
static void *
-_tmp_13_rule(Parser *p)
+_tmp_14_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -24535,7 +25548,7 @@ _tmp_13_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_13[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs"));
+ D(fprintf(stderr, "%*c> _tmp_14[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs"));
Token * _literal;
expr_ty d;
if (
@@ -24544,7 +25557,7 @@ _tmp_13_rule(Parser *p)
(d = annotated_rhs_rule(p)) // annotated_rhs
)
{
- D(fprintf(stderr, "%*c+ _tmp_13[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs"));
+ D(fprintf(stderr, "%*c+ _tmp_14[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs"));
_res = d;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -24554,7 +25567,7 @@ _tmp_13_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_13[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_14[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'=' annotated_rhs"));
}
_res = NULL;
@@ -24563,9 +25576,9 @@ _tmp_13_rule(Parser *p)
return _res;
}
-// _loop1_14: (star_targets '=')
+// _loop1_15: (star_targets '=')
static asdl_seq *
-_loop1_14_rule(Parser *p)
+_loop1_15_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -24591,13 +25604,13 @@ _loop1_14_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_14[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')"));
- void *_tmp_224_var;
+ D(fprintf(stderr, "%*c> _loop1_15[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')"));
+ void *_tmp_244_var;
while (
- (_tmp_224_var = _tmp_224_rule(p)) // star_targets '='
+ (_tmp_244_var = _tmp_244_rule(p)) // star_targets '='
)
{
- _res = _tmp_224_var;
+ _res = _tmp_244_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -24614,7 +25627,7 @@ _loop1_14_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_14[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_15[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(star_targets '=')"));
}
if (_n == 0 || p->error_indicator) {
@@ -24636,9 +25649,9 @@ _loop1_14_rule(Parser *p)
return _seq;
}
-// _tmp_15: yield_expr | star_expressions
+// _tmp_16: yield_expr | star_expressions
static void *
-_tmp_15_rule(Parser *p)
+_tmp_16_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -24655,18 +25668,18 @@ _tmp_15_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_15[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ D(fprintf(stderr, "%*c> _tmp_16[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr"));
expr_ty yield_expr_var;
if (
(yield_expr_var = yield_expr_rule(p)) // yield_expr
)
{
- D(fprintf(stderr, "%*c+ _tmp_15[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ D(fprintf(stderr, "%*c+ _tmp_16[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr"));
_res = yield_expr_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_15[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_16[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr"));
}
{ // star_expressions
@@ -24674,18 +25687,18 @@ _tmp_15_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_15[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ D(fprintf(stderr, "%*c> _tmp_16[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions"));
expr_ty star_expressions_var;
if (
(star_expressions_var = star_expressions_rule(p)) // star_expressions
)
{
- D(fprintf(stderr, "%*c+ _tmp_15[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ D(fprintf(stderr, "%*c+ _tmp_16[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions"));
_res = star_expressions_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_15[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_16[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions"));
}
_res = NULL;
@@ -24694,9 +25707,9 @@ _tmp_15_rule(Parser *p)
return _res;
}
-// _tmp_16: yield_expr | star_expressions
+// _tmp_17: yield_expr | star_expressions
static void *
-_tmp_16_rule(Parser *p)
+_tmp_17_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -24713,18 +25726,18 @@ _tmp_16_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_16[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ D(fprintf(stderr, "%*c> _tmp_17[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr"));
expr_ty yield_expr_var;
if (
(yield_expr_var = yield_expr_rule(p)) // yield_expr
)
{
- D(fprintf(stderr, "%*c+ _tmp_16[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ D(fprintf(stderr, "%*c+ _tmp_17[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr"));
_res = yield_expr_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_16[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_17[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr"));
}
{ // star_expressions
@@ -24732,18 +25745,18 @@ _tmp_16_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_16[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ D(fprintf(stderr, "%*c> _tmp_17[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions"));
expr_ty star_expressions_var;
if (
(star_expressions_var = star_expressions_rule(p)) // star_expressions
)
{
- D(fprintf(stderr, "%*c+ _tmp_16[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ D(fprintf(stderr, "%*c+ _tmp_17[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions"));
_res = star_expressions_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_16[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_17[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions"));
}
_res = NULL;
@@ -24752,9 +25765,9 @@ _tmp_16_rule(Parser *p)
return _res;
}
-// _tmp_17: 'from' expression
+// _tmp_18: 'from' expression
static void *
-_tmp_17_rule(Parser *p)
+_tmp_18_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -24771,16 +25784,16 @@ _tmp_17_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_17[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from' expression"));
+ D(fprintf(stderr, "%*c> _tmp_18[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from' expression"));
Token * _keyword;
expr_ty z;
if (
- (_keyword = _PyPegen_expect_token(p, 607)) // token='from'
+ (_keyword = _PyPegen_expect_token(p, 608)) // token='from'
&&
(z = expression_rule(p)) // expression
)
{
- D(fprintf(stderr, "%*c+ _tmp_17[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from' expression"));
+ D(fprintf(stderr, "%*c+ _tmp_18[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from' expression"));
_res = z;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -24790,7 +25803,7 @@ _tmp_17_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_17[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_18[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'from' expression"));
}
_res = NULL;
@@ -24799,9 +25812,9 @@ _tmp_17_rule(Parser *p)
return _res;
}
-// _loop0_19: ',' NAME
+// _loop0_20: ',' NAME
static asdl_seq *
-_loop0_19_rule(Parser *p)
+_loop0_20_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -24827,7 +25840,7 @@ _loop0_19_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_19[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' NAME"));
+ D(fprintf(stderr, "%*c> _loop0_20[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' NAME"));
Token * _literal;
expr_ty elem;
while (
@@ -24859,7 +25872,7 @@ _loop0_19_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_19[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_20[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' NAME"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -24876,9 +25889,9 @@ _loop0_19_rule(Parser *p)
return _seq;
}
-// _gather_18: NAME _loop0_19
+// _gather_19: NAME _loop0_20
static asdl_seq *
-_gather_18_rule(Parser *p)
+_gather_19_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -24890,27 +25903,27 @@ _gather_18_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // NAME _loop0_19
+ { // NAME _loop0_20
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_18[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME _loop0_19"));
+ D(fprintf(stderr, "%*c> _gather_19[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME _loop0_20"));
expr_ty elem;
asdl_seq * seq;
if (
(elem = _PyPegen_name_token(p)) // NAME
&&
- (seq = _loop0_19_rule(p)) // _loop0_19
+ (seq = _loop0_20_rule(p)) // _loop0_20
)
{
- D(fprintf(stderr, "%*c+ _gather_18[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME _loop0_19"));
+ D(fprintf(stderr, "%*c+ _gather_19[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME _loop0_20"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_18[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME _loop0_19"));
+ D(fprintf(stderr, "%*c%s _gather_19[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME _loop0_20"));
}
_res = NULL;
done:
@@ -24918,9 +25931,9 @@ _gather_18_rule(Parser *p)
return _res;
}
-// _loop0_21: ',' NAME
+// _loop0_22: ',' NAME
static asdl_seq *
-_loop0_21_rule(Parser *p)
+_loop0_22_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -24946,7 +25959,7 @@ _loop0_21_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_21[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' NAME"));
+ D(fprintf(stderr, "%*c> _loop0_22[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' NAME"));
Token * _literal;
expr_ty elem;
while (
@@ -24978,7 +25991,7 @@ _loop0_21_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_21[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_22[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' NAME"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -24995,9 +26008,9 @@ _loop0_21_rule(Parser *p)
return _seq;
}
-// _gather_20: NAME _loop0_21
+// _gather_21: NAME _loop0_22
static asdl_seq *
-_gather_20_rule(Parser *p)
+_gather_21_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -25009,27 +26022,27 @@ _gather_20_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // NAME _loop0_21
+ { // NAME _loop0_22
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_20[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME _loop0_21"));
+ D(fprintf(stderr, "%*c> _gather_21[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME _loop0_22"));
expr_ty elem;
asdl_seq * seq;
if (
(elem = _PyPegen_name_token(p)) // NAME
&&
- (seq = _loop0_21_rule(p)) // _loop0_21
+ (seq = _loop0_22_rule(p)) // _loop0_22
)
{
- D(fprintf(stderr, "%*c+ _gather_20[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME _loop0_21"));
+ D(fprintf(stderr, "%*c+ _gather_21[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME _loop0_22"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_20[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME _loop0_21"));
+ D(fprintf(stderr, "%*c%s _gather_21[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME _loop0_22"));
}
_res = NULL;
done:
@@ -25037,9 +26050,9 @@ _gather_20_rule(Parser *p)
return _res;
}
-// _tmp_22: ';' | NEWLINE
+// _tmp_23: ';' | NEWLINE
static void *
-_tmp_22_rule(Parser *p)
+_tmp_23_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -25056,18 +26069,18 @@ _tmp_22_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_22[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "';'"));
+ D(fprintf(stderr, "%*c> _tmp_23[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "';'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 13)) // token=';'
)
{
- D(fprintf(stderr, "%*c+ _tmp_22[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "';'"));
+ D(fprintf(stderr, "%*c+ _tmp_23[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "';'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_22[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_23[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "';'"));
}
{ // NEWLINE
@@ -25075,18 +26088,18 @@ _tmp_22_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_22[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE"));
+ D(fprintf(stderr, "%*c> _tmp_23[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE"));
Token * newline_var;
if (
(newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE'
)
{
- D(fprintf(stderr, "%*c+ _tmp_22[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE"));
+ D(fprintf(stderr, "%*c+ _tmp_23[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE"));
_res = newline_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_22[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_23[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE"));
}
_res = NULL;
@@ -25095,9 +26108,9 @@ _tmp_22_rule(Parser *p)
return _res;
}
-// _tmp_23: ',' expression
+// _tmp_24: ',' expression
static void *
-_tmp_23_rule(Parser *p)
+_tmp_24_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -25114,7 +26127,7 @@ _tmp_23_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_23[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression"));
+ D(fprintf(stderr, "%*c> _tmp_24[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression"));
Token * _literal;
expr_ty z;
if (
@@ -25123,7 +26136,7 @@ _tmp_23_rule(Parser *p)
(z = expression_rule(p)) // expression
)
{
- D(fprintf(stderr, "%*c+ _tmp_23[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' expression"));
+ D(fprintf(stderr, "%*c+ _tmp_24[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' expression"));
_res = z;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -25133,7 +26146,7 @@ _tmp_23_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_23[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_24[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression"));
}
_res = NULL;
@@ -25142,9 +26155,9 @@ _tmp_23_rule(Parser *p)
return _res;
}
-// _loop0_24: ('.' | '...')
+// _loop0_25: ('.' | '...')
static asdl_seq *
-_loop0_24_rule(Parser *p)
+_loop0_25_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -25170,13 +26183,13 @@ _loop0_24_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_24[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')"));
- void *_tmp_225_var;
+ D(fprintf(stderr, "%*c> _loop0_25[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')"));
+ void *_tmp_245_var;
while (
- (_tmp_225_var = _tmp_225_rule(p)) // '.' | '...'
+ (_tmp_245_var = _tmp_245_rule(p)) // '.' | '...'
)
{
- _res = _tmp_225_var;
+ _res = _tmp_245_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -25193,7 +26206,7 @@ _loop0_24_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_24[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_25[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('.' | '...')"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -25210,9 +26223,9 @@ _loop0_24_rule(Parser *p)
return _seq;
}
-// _loop1_25: ('.' | '...')
+// _loop1_26: ('.' | '...')
static asdl_seq *
-_loop1_25_rule(Parser *p)
+_loop1_26_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -25238,13 +26251,13 @@ _loop1_25_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_25[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')"));
- void *_tmp_226_var;
+ D(fprintf(stderr, "%*c> _loop1_26[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')"));
+ void *_tmp_246_var;
while (
- (_tmp_226_var = _tmp_226_rule(p)) // '.' | '...'
+ (_tmp_246_var = _tmp_246_rule(p)) // '.' | '...'
)
{
- _res = _tmp_226_var;
+ _res = _tmp_246_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -25261,7 +26274,7 @@ _loop1_25_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_25[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_26[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('.' | '...')"));
}
if (_n == 0 || p->error_indicator) {
@@ -25283,9 +26296,9 @@ _loop1_25_rule(Parser *p)
return _seq;
}
-// _loop0_27: ',' import_from_as_name
+// _loop0_28: ',' import_from_as_name
static asdl_seq *
-_loop0_27_rule(Parser *p)
+_loop0_28_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -25311,7 +26324,7 @@ _loop0_27_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_27[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' import_from_as_name"));
+ D(fprintf(stderr, "%*c> _loop0_28[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' import_from_as_name"));
Token * _literal;
alias_ty elem;
while (
@@ -25343,7 +26356,7 @@ _loop0_27_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_27[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_28[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' import_from_as_name"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -25360,9 +26373,9 @@ _loop0_27_rule(Parser *p)
return _seq;
}
-// _gather_26: import_from_as_name _loop0_27
+// _gather_27: import_from_as_name _loop0_28
static asdl_seq *
-_gather_26_rule(Parser *p)
+_gather_27_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -25374,27 +26387,27 @@ _gather_26_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // import_from_as_name _loop0_27
+ { // import_from_as_name _loop0_28
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_26[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "import_from_as_name _loop0_27"));
+ D(fprintf(stderr, "%*c> _gather_27[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "import_from_as_name _loop0_28"));
alias_ty elem;
asdl_seq * seq;
if (
(elem = import_from_as_name_rule(p)) // import_from_as_name
&&
- (seq = _loop0_27_rule(p)) // _loop0_27
+ (seq = _loop0_28_rule(p)) // _loop0_28
)
{
- D(fprintf(stderr, "%*c+ _gather_26[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "import_from_as_name _loop0_27"));
+ D(fprintf(stderr, "%*c+ _gather_27[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "import_from_as_name _loop0_28"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_26[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "import_from_as_name _loop0_27"));
+ D(fprintf(stderr, "%*c%s _gather_27[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "import_from_as_name _loop0_28"));
}
_res = NULL;
done:
@@ -25402,9 +26415,9 @@ _gather_26_rule(Parser *p)
return _res;
}
-// _tmp_28: 'as' NAME
+// _tmp_29: 'as' NAME
static void *
-_tmp_28_rule(Parser *p)
+_tmp_29_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -25421,16 +26434,16 @@ _tmp_28_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_28[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c> _tmp_29[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
Token * _keyword;
expr_ty z;
if (
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
(z = _PyPegen_name_token(p)) // NAME
)
{
- D(fprintf(stderr, "%*c+ _tmp_28[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c+ _tmp_29[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
_res = z;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -25440,7 +26453,7 @@ _tmp_28_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_28[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_29[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME"));
}
_res = NULL;
@@ -25449,9 +26462,9 @@ _tmp_28_rule(Parser *p)
return _res;
}
-// _loop0_30: ',' dotted_as_name
+// _loop0_31: ',' dotted_as_name
static asdl_seq *
-_loop0_30_rule(Parser *p)
+_loop0_31_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -25477,7 +26490,7 @@ _loop0_30_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_30[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' dotted_as_name"));
+ D(fprintf(stderr, "%*c> _loop0_31[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' dotted_as_name"));
Token * _literal;
alias_ty elem;
while (
@@ -25509,7 +26522,7 @@ _loop0_30_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_30[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_31[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' dotted_as_name"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -25526,9 +26539,9 @@ _loop0_30_rule(Parser *p)
return _seq;
}
-// _gather_29: dotted_as_name _loop0_30
+// _gather_30: dotted_as_name _loop0_31
static asdl_seq *
-_gather_29_rule(Parser *p)
+_gather_30_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -25540,27 +26553,27 @@ _gather_29_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // dotted_as_name _loop0_30
+ { // dotted_as_name _loop0_31
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_29[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dotted_as_name _loop0_30"));
+ D(fprintf(stderr, "%*c> _gather_30[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dotted_as_name _loop0_31"));
alias_ty elem;
asdl_seq * seq;
if (
(elem = dotted_as_name_rule(p)) // dotted_as_name
&&
- (seq = _loop0_30_rule(p)) // _loop0_30
+ (seq = _loop0_31_rule(p)) // _loop0_31
)
{
- D(fprintf(stderr, "%*c+ _gather_29[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dotted_as_name _loop0_30"));
+ D(fprintf(stderr, "%*c+ _gather_30[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dotted_as_name _loop0_31"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_29[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dotted_as_name _loop0_30"));
+ D(fprintf(stderr, "%*c%s _gather_30[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dotted_as_name _loop0_31"));
}
_res = NULL;
done:
@@ -25568,9 +26581,9 @@ _gather_29_rule(Parser *p)
return _res;
}
-// _tmp_31: 'as' NAME
+// _tmp_32: 'as' NAME
static void *
-_tmp_31_rule(Parser *p)
+_tmp_32_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -25587,16 +26600,16 @@ _tmp_31_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_31[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c> _tmp_32[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
Token * _keyword;
expr_ty z;
if (
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
(z = _PyPegen_name_token(p)) // NAME
)
{
- D(fprintf(stderr, "%*c+ _tmp_31[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c+ _tmp_32[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
_res = z;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -25606,7 +26619,7 @@ _tmp_31_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_31[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_32[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME"));
}
_res = NULL;
@@ -25615,9 +26628,9 @@ _tmp_31_rule(Parser *p)
return _res;
}
-// _loop1_32: ('@' named_expression NEWLINE)
+// _loop1_33: ('@' named_expression NEWLINE)
static asdl_seq *
-_loop1_32_rule(Parser *p)
+_loop1_33_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -25643,13 +26656,13 @@ _loop1_32_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_32[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('@' named_expression NEWLINE)"));
- void *_tmp_227_var;
+ D(fprintf(stderr, "%*c> _loop1_33[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('@' named_expression NEWLINE)"));
+ void *_tmp_247_var;
while (
- (_tmp_227_var = _tmp_227_rule(p)) // '@' named_expression NEWLINE
+ (_tmp_247_var = _tmp_247_rule(p)) // '@' named_expression NEWLINE
)
{
- _res = _tmp_227_var;
+ _res = _tmp_247_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -25666,7 +26679,7 @@ _loop1_32_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_32[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_33[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('@' named_expression NEWLINE)"));
}
if (_n == 0 || p->error_indicator) {
@@ -25688,9 +26701,9 @@ _loop1_32_rule(Parser *p)
return _seq;
}
-// _tmp_33: '(' arguments? ')'
+// _tmp_34: '(' arguments? ')'
static void *
-_tmp_33_rule(Parser *p)
+_tmp_34_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -25707,7 +26720,7 @@ _tmp_33_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_33[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'"));
+ D(fprintf(stderr, "%*c> _tmp_34[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'"));
Token * _literal;
Token * _literal_1;
void *z;
@@ -25719,7 +26732,7 @@ _tmp_33_rule(Parser *p)
(_literal_1 = _PyPegen_expect_token(p, 8)) // token=')'
)
{
- D(fprintf(stderr, "%*c+ _tmp_33[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'"));
+ D(fprintf(stderr, "%*c+ _tmp_34[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'"));
_res = z;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -25729,7 +26742,7 @@ _tmp_33_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_33[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_34[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' arguments? ')'"));
}
_res = NULL;
@@ -25738,9 +26751,9 @@ _tmp_33_rule(Parser *p)
return _res;
}
-// _tmp_34: '->' expression
+// _tmp_35: '->' expression
static void *
-_tmp_34_rule(Parser *p)
+_tmp_35_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -25757,7 +26770,7 @@ _tmp_34_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_34[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'->' expression"));
+ D(fprintf(stderr, "%*c> _tmp_35[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'->' expression"));
Token * _literal;
expr_ty z;
if (
@@ -25766,7 +26779,7 @@ _tmp_34_rule(Parser *p)
(z = expression_rule(p)) // expression
)
{
- D(fprintf(stderr, "%*c+ _tmp_34[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'->' expression"));
+ D(fprintf(stderr, "%*c+ _tmp_35[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'->' expression"));
_res = z;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -25776,7 +26789,7 @@ _tmp_34_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_34[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_35[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'->' expression"));
}
_res = NULL;
@@ -25785,9 +26798,9 @@ _tmp_34_rule(Parser *p)
return _res;
}
-// _tmp_35: '->' expression
+// _tmp_36: '->' expression
static void *
-_tmp_35_rule(Parser *p)
+_tmp_36_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -25804,7 +26817,7 @@ _tmp_35_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_35[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'->' expression"));
+ D(fprintf(stderr, "%*c> _tmp_36[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'->' expression"));
Token * _literal;
expr_ty z;
if (
@@ -25813,7 +26826,7 @@ _tmp_35_rule(Parser *p)
(z = expression_rule(p)) // expression
)
{
- D(fprintf(stderr, "%*c+ _tmp_35[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'->' expression"));
+ D(fprintf(stderr, "%*c+ _tmp_36[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'->' expression"));
_res = z;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -25823,7 +26836,7 @@ _tmp_35_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_35[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_36[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'->' expression"));
}
_res = NULL;
@@ -25832,9 +26845,9 @@ _tmp_35_rule(Parser *p)
return _res;
}
-// _loop0_36: param_no_default
+// _loop0_37: param_no_default
static asdl_seq *
-_loop0_36_rule(Parser *p)
+_loop0_37_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -25860,7 +26873,7 @@ _loop0_36_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_36[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
+ D(fprintf(stderr, "%*c> _loop0_37[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
arg_ty param_no_default_var;
while (
(param_no_default_var = param_no_default_rule(p)) // param_no_default
@@ -25883,7 +26896,7 @@ _loop0_36_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_36[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_37[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -25900,9 +26913,9 @@ _loop0_36_rule(Parser *p)
return _seq;
}
-// _loop0_37: param_with_default
+// _loop0_38: param_with_default
static asdl_seq *
-_loop0_37_rule(Parser *p)
+_loop0_38_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -25928,7 +26941,7 @@ _loop0_37_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_37[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default"));
+ D(fprintf(stderr, "%*c> _loop0_38[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default"));
NameDefaultPair* param_with_default_var;
while (
(param_with_default_var = param_with_default_rule(p)) // param_with_default
@@ -25951,7 +26964,7 @@ _loop0_37_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_37[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_38[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -25968,9 +26981,9 @@ _loop0_37_rule(Parser *p)
return _seq;
}
-// _loop0_38: param_with_default
+// _loop0_39: param_with_default
static asdl_seq *
-_loop0_38_rule(Parser *p)
+_loop0_39_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -25996,7 +27009,7 @@ _loop0_38_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_38[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default"));
+ D(fprintf(stderr, "%*c> _loop0_39[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default"));
NameDefaultPair* param_with_default_var;
while (
(param_with_default_var = param_with_default_rule(p)) // param_with_default
@@ -26019,7 +27032,7 @@ _loop0_38_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_38[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_39[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -26036,9 +27049,9 @@ _loop0_38_rule(Parser *p)
return _seq;
}
-// _loop1_39: param_no_default
+// _loop1_40: param_no_default
static asdl_seq *
-_loop1_39_rule(Parser *p)
+_loop1_40_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -26064,7 +27077,7 @@ _loop1_39_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_39[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
+ D(fprintf(stderr, "%*c> _loop1_40[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
arg_ty param_no_default_var;
while (
(param_no_default_var = param_no_default_rule(p)) // param_no_default
@@ -26087,7 +27100,7 @@ _loop1_39_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_39[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_40[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default"));
}
if (_n == 0 || p->error_indicator) {
@@ -26109,9 +27122,9 @@ _loop1_39_rule(Parser *p)
return _seq;
}
-// _loop0_40: param_with_default
+// _loop0_41: param_with_default
static asdl_seq *
-_loop0_40_rule(Parser *p)
+_loop0_41_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -26137,7 +27150,7 @@ _loop0_40_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_40[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default"));
+ D(fprintf(stderr, "%*c> _loop0_41[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default"));
NameDefaultPair* param_with_default_var;
while (
(param_with_default_var = param_with_default_rule(p)) // param_with_default
@@ -26160,7 +27173,7 @@ _loop0_40_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_40[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_41[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -26177,9 +27190,9 @@ _loop0_40_rule(Parser *p)
return _seq;
}
-// _loop1_41: param_with_default
+// _loop1_42: param_with_default
static asdl_seq *
-_loop1_41_rule(Parser *p)
+_loop1_42_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -26205,7 +27218,7 @@ _loop1_41_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_41[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default"));
+ D(fprintf(stderr, "%*c> _loop1_42[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default"));
NameDefaultPair* param_with_default_var;
while (
(param_with_default_var = param_with_default_rule(p)) // param_with_default
@@ -26228,7 +27241,7 @@ _loop1_41_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_41[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_42[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default"));
}
if (_n == 0 || p->error_indicator) {
@@ -26250,9 +27263,9 @@ _loop1_41_rule(Parser *p)
return _seq;
}
-// _loop1_42: param_no_default
+// _loop1_43: param_no_default
static asdl_seq *
-_loop1_42_rule(Parser *p)
+_loop1_43_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -26278,7 +27291,7 @@ _loop1_42_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_42[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
+ D(fprintf(stderr, "%*c> _loop1_43[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
arg_ty param_no_default_var;
while (
(param_no_default_var = param_no_default_rule(p)) // param_no_default
@@ -26301,7 +27314,7 @@ _loop1_42_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_42[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_43[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default"));
}
if (_n == 0 || p->error_indicator) {
@@ -26323,9 +27336,9 @@ _loop1_42_rule(Parser *p)
return _seq;
}
-// _loop1_43: param_no_default
+// _loop1_44: param_no_default
static asdl_seq *
-_loop1_43_rule(Parser *p)
+_loop1_44_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -26351,7 +27364,7 @@ _loop1_43_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_43[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
+ D(fprintf(stderr, "%*c> _loop1_44[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
arg_ty param_no_default_var;
while (
(param_no_default_var = param_no_default_rule(p)) // param_no_default
@@ -26374,7 +27387,7 @@ _loop1_43_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_43[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_44[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default"));
}
if (_n == 0 || p->error_indicator) {
@@ -26396,9 +27409,9 @@ _loop1_43_rule(Parser *p)
return _seq;
}
-// _loop0_44: param_no_default
+// _loop0_45: param_no_default
static asdl_seq *
-_loop0_44_rule(Parser *p)
+_loop0_45_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -26424,7 +27437,7 @@ _loop0_44_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_44[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
+ D(fprintf(stderr, "%*c> _loop0_45[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
arg_ty param_no_default_var;
while (
(param_no_default_var = param_no_default_rule(p)) // param_no_default
@@ -26447,7 +27460,7 @@ _loop0_44_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_44[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_45[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -26464,9 +27477,9 @@ _loop0_44_rule(Parser *p)
return _seq;
}
-// _loop1_45: param_with_default
+// _loop1_46: param_with_default
static asdl_seq *
-_loop1_45_rule(Parser *p)
+_loop1_46_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -26492,7 +27505,7 @@ _loop1_45_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_45[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default"));
+ D(fprintf(stderr, "%*c> _loop1_46[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default"));
NameDefaultPair* param_with_default_var;
while (
(param_with_default_var = param_with_default_rule(p)) // param_with_default
@@ -26515,7 +27528,7 @@ _loop1_45_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_45[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_46[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default"));
}
if (_n == 0 || p->error_indicator) {
@@ -26537,9 +27550,9 @@ _loop1_45_rule(Parser *p)
return _seq;
}
-// _loop0_46: param_no_default
+// _loop0_47: param_no_default
static asdl_seq *
-_loop0_46_rule(Parser *p)
+_loop0_47_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -26565,7 +27578,7 @@ _loop0_46_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_46[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
+ D(fprintf(stderr, "%*c> _loop0_47[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
arg_ty param_no_default_var;
while (
(param_no_default_var = param_no_default_rule(p)) // param_no_default
@@ -26588,7 +27601,7 @@ _loop0_46_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_46[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_47[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -26605,9 +27618,9 @@ _loop0_46_rule(Parser *p)
return _seq;
}
-// _loop1_47: param_with_default
+// _loop1_48: param_with_default
static asdl_seq *
-_loop1_47_rule(Parser *p)
+_loop1_48_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -26633,7 +27646,7 @@ _loop1_47_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_47[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default"));
+ D(fprintf(stderr, "%*c> _loop1_48[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default"));
NameDefaultPair* param_with_default_var;
while (
(param_with_default_var = param_with_default_rule(p)) // param_with_default
@@ -26656,7 +27669,7 @@ _loop1_47_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_47[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_48[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default"));
}
if (_n == 0 || p->error_indicator) {
@@ -26678,9 +27691,9 @@ _loop1_47_rule(Parser *p)
return _seq;
}
-// _loop0_48: param_maybe_default
+// _loop0_49: param_maybe_default
static asdl_seq *
-_loop0_48_rule(Parser *p)
+_loop0_49_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -26706,7 +27719,7 @@ _loop0_48_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_48[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default"));
+ D(fprintf(stderr, "%*c> _loop0_49[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default"));
NameDefaultPair* param_maybe_default_var;
while (
(param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default
@@ -26729,7 +27742,7 @@ _loop0_48_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_48[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_49[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -26746,9 +27759,9 @@ _loop0_48_rule(Parser *p)
return _seq;
}
-// _loop0_49: param_maybe_default
+// _loop0_50: param_maybe_default
static asdl_seq *
-_loop0_49_rule(Parser *p)
+_loop0_50_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -26774,7 +27787,7 @@ _loop0_49_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_49[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default"));
+ D(fprintf(stderr, "%*c> _loop0_50[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default"));
NameDefaultPair* param_maybe_default_var;
while (
(param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default
@@ -26797,7 +27810,7 @@ _loop0_49_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_49[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_50[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -26814,9 +27827,9 @@ _loop0_49_rule(Parser *p)
return _seq;
}
-// _loop1_50: param_maybe_default
+// _loop1_51: param_maybe_default
static asdl_seq *
-_loop1_50_rule(Parser *p)
+_loop1_51_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -26842,7 +27855,7 @@ _loop1_50_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_50[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default"));
+ D(fprintf(stderr, "%*c> _loop1_51[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default"));
NameDefaultPair* param_maybe_default_var;
while (
(param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default
@@ -26865,7 +27878,7 @@ _loop1_50_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_50[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_51[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default"));
}
if (_n == 0 || p->error_indicator) {
@@ -26887,9 +27900,9 @@ _loop1_50_rule(Parser *p)
return _seq;
}
-// _loop0_52: ',' with_item
+// _loop0_53: ',' with_item
static asdl_seq *
-_loop0_52_rule(Parser *p)
+_loop0_53_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -26915,7 +27928,7 @@ _loop0_52_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_52[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item"));
+ D(fprintf(stderr, "%*c> _loop0_53[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item"));
Token * _literal;
withitem_ty elem;
while (
@@ -26947,7 +27960,7 @@ _loop0_52_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_52[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_53[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' with_item"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -26964,9 +27977,9 @@ _loop0_52_rule(Parser *p)
return _seq;
}
-// _gather_51: with_item _loop0_52
+// _gather_52: with_item _loop0_53
static asdl_seq *
-_gather_51_rule(Parser *p)
+_gather_52_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -26978,27 +27991,27 @@ _gather_51_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // with_item _loop0_52
+ { // with_item _loop0_53
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_51[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_52"));
+ D(fprintf(stderr, "%*c> _gather_52[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_53"));
withitem_ty elem;
asdl_seq * seq;
if (
(elem = with_item_rule(p)) // with_item
&&
- (seq = _loop0_52_rule(p)) // _loop0_52
+ (seq = _loop0_53_rule(p)) // _loop0_53
)
{
- D(fprintf(stderr, "%*c+ _gather_51[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_52"));
+ D(fprintf(stderr, "%*c+ _gather_52[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_53"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_51[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_52"));
+ D(fprintf(stderr, "%*c%s _gather_52[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_53"));
}
_res = NULL;
done:
@@ -27006,9 +28019,9 @@ _gather_51_rule(Parser *p)
return _res;
}
-// _loop0_54: ',' with_item
+// _loop0_55: ',' with_item
static asdl_seq *
-_loop0_54_rule(Parser *p)
+_loop0_55_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -27034,7 +28047,7 @@ _loop0_54_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_54[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item"));
+ D(fprintf(stderr, "%*c> _loop0_55[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item"));
Token * _literal;
withitem_ty elem;
while (
@@ -27066,7 +28079,7 @@ _loop0_54_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_54[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_55[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' with_item"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -27083,9 +28096,9 @@ _loop0_54_rule(Parser *p)
return _seq;
}
-// _gather_53: with_item _loop0_54
+// _gather_54: with_item _loop0_55
static asdl_seq *
-_gather_53_rule(Parser *p)
+_gather_54_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -27097,27 +28110,27 @@ _gather_53_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // with_item _loop0_54
+ { // with_item _loop0_55
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_53[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_54"));
+ D(fprintf(stderr, "%*c> _gather_54[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_55"));
withitem_ty elem;
asdl_seq * seq;
if (
(elem = with_item_rule(p)) // with_item
&&
- (seq = _loop0_54_rule(p)) // _loop0_54
+ (seq = _loop0_55_rule(p)) // _loop0_55
)
{
- D(fprintf(stderr, "%*c+ _gather_53[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_54"));
+ D(fprintf(stderr, "%*c+ _gather_54[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_55"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_53[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_54"));
+ D(fprintf(stderr, "%*c%s _gather_54[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_55"));
}
_res = NULL;
done:
@@ -27125,9 +28138,9 @@ _gather_53_rule(Parser *p)
return _res;
}
-// _loop0_56: ',' with_item
+// _loop0_57: ',' with_item
static asdl_seq *
-_loop0_56_rule(Parser *p)
+_loop0_57_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -27153,7 +28166,7 @@ _loop0_56_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_56[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item"));
+ D(fprintf(stderr, "%*c> _loop0_57[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item"));
Token * _literal;
withitem_ty elem;
while (
@@ -27185,7 +28198,7 @@ _loop0_56_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_56[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_57[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' with_item"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -27202,9 +28215,9 @@ _loop0_56_rule(Parser *p)
return _seq;
}
-// _gather_55: with_item _loop0_56
+// _gather_56: with_item _loop0_57
static asdl_seq *
-_gather_55_rule(Parser *p)
+_gather_56_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -27216,27 +28229,27 @@ _gather_55_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // with_item _loop0_56
+ { // with_item _loop0_57
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_55[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_56"));
+ D(fprintf(stderr, "%*c> _gather_56[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_57"));
withitem_ty elem;
asdl_seq * seq;
if (
(elem = with_item_rule(p)) // with_item
&&
- (seq = _loop0_56_rule(p)) // _loop0_56
+ (seq = _loop0_57_rule(p)) // _loop0_57
)
{
- D(fprintf(stderr, "%*c+ _gather_55[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_56"));
+ D(fprintf(stderr, "%*c+ _gather_56[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_57"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_55[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_56"));
+ D(fprintf(stderr, "%*c%s _gather_56[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_57"));
}
_res = NULL;
done:
@@ -27244,9 +28257,9 @@ _gather_55_rule(Parser *p)
return _res;
}
-// _loop0_58: ',' with_item
+// _loop0_59: ',' with_item
static asdl_seq *
-_loop0_58_rule(Parser *p)
+_loop0_59_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -27272,7 +28285,7 @@ _loop0_58_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_58[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item"));
+ D(fprintf(stderr, "%*c> _loop0_59[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item"));
Token * _literal;
withitem_ty elem;
while (
@@ -27304,7 +28317,7 @@ _loop0_58_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_58[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_59[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' with_item"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -27321,9 +28334,9 @@ _loop0_58_rule(Parser *p)
return _seq;
}
-// _gather_57: with_item _loop0_58
+// _gather_58: with_item _loop0_59
static asdl_seq *
-_gather_57_rule(Parser *p)
+_gather_58_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -27335,27 +28348,27 @@ _gather_57_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // with_item _loop0_58
+ { // with_item _loop0_59
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_57[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_58"));
+ D(fprintf(stderr, "%*c> _gather_58[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_59"));
withitem_ty elem;
asdl_seq * seq;
if (
(elem = with_item_rule(p)) // with_item
&&
- (seq = _loop0_58_rule(p)) // _loop0_58
+ (seq = _loop0_59_rule(p)) // _loop0_59
)
{
- D(fprintf(stderr, "%*c+ _gather_57[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_58"));
+ D(fprintf(stderr, "%*c+ _gather_58[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_59"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_57[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_58"));
+ D(fprintf(stderr, "%*c%s _gather_58[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_59"));
}
_res = NULL;
done:
@@ -27363,9 +28376,9 @@ _gather_57_rule(Parser *p)
return _res;
}
-// _tmp_59: ',' | ')' | ':'
+// _tmp_60: ',' | ')' | ':'
static void *
-_tmp_59_rule(Parser *p)
+_tmp_60_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -27382,18 +28395,18 @@ _tmp_59_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_59[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c> _tmp_60[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_59[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c+ _tmp_60[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_59[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_60[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','"));
}
{ // ')'
@@ -27401,18 +28414,18 @@ _tmp_59_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_59[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'"));
+ D(fprintf(stderr, "%*c> _tmp_60[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 8)) // token=')'
)
{
- D(fprintf(stderr, "%*c+ _tmp_59[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'"));
+ D(fprintf(stderr, "%*c+ _tmp_60[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_59[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_60[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'"));
}
{ // ':'
@@ -27420,18 +28433,18 @@ _tmp_59_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_59[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c> _tmp_60[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
)
{
- D(fprintf(stderr, "%*c+ _tmp_59[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c+ _tmp_60[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_59[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_60[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'"));
}
_res = NULL;
@@ -27440,9 +28453,9 @@ _tmp_59_rule(Parser *p)
return _res;
}
-// _loop1_60: except_block
+// _loop1_61: except_block
static asdl_seq *
-_loop1_60_rule(Parser *p)
+_loop1_61_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -27468,7 +28481,7 @@ _loop1_60_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_60[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_block"));
+ D(fprintf(stderr, "%*c> _loop1_61[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_block"));
excepthandler_ty except_block_var;
while (
(except_block_var = except_block_rule(p)) // except_block
@@ -27491,7 +28504,7 @@ _loop1_60_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_60[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_61[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "except_block"));
}
if (_n == 0 || p->error_indicator) {
@@ -27513,9 +28526,9 @@ _loop1_60_rule(Parser *p)
return _seq;
}
-// _loop1_61: except_star_block
+// _loop1_62: except_star_block
static asdl_seq *
-_loop1_61_rule(Parser *p)
+_loop1_62_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -27541,7 +28554,7 @@ _loop1_61_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_61[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_star_block"));
+ D(fprintf(stderr, "%*c> _loop1_62[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_star_block"));
excepthandler_ty except_star_block_var;
while (
(except_star_block_var = except_star_block_rule(p)) // except_star_block
@@ -27564,7 +28577,7 @@ _loop1_61_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_61[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_62[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "except_star_block"));
}
if (_n == 0 || p->error_indicator) {
@@ -27586,9 +28599,9 @@ _loop1_61_rule(Parser *p)
return _seq;
}
-// _tmp_62: 'as' NAME
+// _tmp_63: 'as' NAME
static void *
-_tmp_62_rule(Parser *p)
+_tmp_63_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -27605,16 +28618,16 @@ _tmp_62_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_62[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c> _tmp_63[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
Token * _keyword;
expr_ty z;
if (
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
(z = _PyPegen_name_token(p)) // NAME
)
{
- D(fprintf(stderr, "%*c+ _tmp_62[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c+ _tmp_63[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
_res = z;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -27624,7 +28637,7 @@ _tmp_62_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_62[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_63[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME"));
}
_res = NULL;
@@ -27633,9 +28646,9 @@ _tmp_62_rule(Parser *p)
return _res;
}
-// _tmp_63: 'as' NAME
+// _tmp_64: 'as' NAME
static void *
-_tmp_63_rule(Parser *p)
+_tmp_64_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -27652,16 +28665,16 @@ _tmp_63_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_63[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c> _tmp_64[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
Token * _keyword;
expr_ty z;
if (
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
(z = _PyPegen_name_token(p)) // NAME
)
{
- D(fprintf(stderr, "%*c+ _tmp_63[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c+ _tmp_64[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
_res = z;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -27671,7 +28684,7 @@ _tmp_63_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_63[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_64[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME"));
}
_res = NULL;
@@ -27680,9 +28693,9 @@ _tmp_63_rule(Parser *p)
return _res;
}
-// _loop1_64: case_block
+// _loop1_65: case_block
static asdl_seq *
-_loop1_64_rule(Parser *p)
+_loop1_65_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -27708,7 +28721,7 @@ _loop1_64_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_64[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "case_block"));
+ D(fprintf(stderr, "%*c> _loop1_65[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "case_block"));
match_case_ty case_block_var;
while (
(case_block_var = case_block_rule(p)) // case_block
@@ -27731,7 +28744,7 @@ _loop1_64_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_64[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_65[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "case_block"));
}
if (_n == 0 || p->error_indicator) {
@@ -27753,9 +28766,9 @@ _loop1_64_rule(Parser *p)
return _seq;
}
-// _loop0_66: '|' closed_pattern
+// _loop0_67: '|' closed_pattern
static asdl_seq *
-_loop0_66_rule(Parser *p)
+_loop0_67_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -27781,7 +28794,7 @@ _loop0_66_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_66[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'|' closed_pattern"));
+ D(fprintf(stderr, "%*c> _loop0_67[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'|' closed_pattern"));
Token * _literal;
pattern_ty elem;
while (
@@ -27813,7 +28826,7 @@ _loop0_66_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_66[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_67[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'|' closed_pattern"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -27830,9 +28843,9 @@ _loop0_66_rule(Parser *p)
return _seq;
}
-// _gather_65: closed_pattern _loop0_66
+// _gather_66: closed_pattern _loop0_67
static asdl_seq *
-_gather_65_rule(Parser *p)
+_gather_66_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -27844,85 +28857,27 @@ _gather_65_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // closed_pattern _loop0_66
+ { // closed_pattern _loop0_67
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_65[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "closed_pattern _loop0_66"));
+ D(fprintf(stderr, "%*c> _gather_66[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "closed_pattern _loop0_67"));
pattern_ty elem;
asdl_seq * seq;
if (
(elem = closed_pattern_rule(p)) // closed_pattern
&&
- (seq = _loop0_66_rule(p)) // _loop0_66
+ (seq = _loop0_67_rule(p)) // _loop0_67
)
{
- D(fprintf(stderr, "%*c+ _gather_65[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "closed_pattern _loop0_66"));
+ D(fprintf(stderr, "%*c+ _gather_66[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "closed_pattern _loop0_67"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_65[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "closed_pattern _loop0_66"));
- }
- _res = NULL;
- done:
- p->level--;
- return _res;
-}
-
-// _tmp_67: '+' | '-'
-static void *
-_tmp_67_rule(Parser *p)
-{
- if (p->level++ == MAXSTACK) {
- p->error_indicator = 1;
- PyErr_NoMemory();
- }
- if (p->error_indicator) {
- p->level--;
- return NULL;
- }
- void * _res = NULL;
- int _mark = p->mark;
- { // '+'
- if (p->error_indicator) {
- p->level--;
- return NULL;
- }
- D(fprintf(stderr, "%*c> _tmp_67[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'+'"));
- Token * _literal;
- if (
- (_literal = _PyPegen_expect_token(p, 14)) // token='+'
- )
- {
- D(fprintf(stderr, "%*c+ _tmp_67[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'+'"));
- _res = _literal;
- goto done;
- }
- p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_67[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'+'"));
- }
- { // '-'
- if (p->error_indicator) {
- p->level--;
- return NULL;
- }
- D(fprintf(stderr, "%*c> _tmp_67[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'-'"));
- Token * _literal;
- if (
- (_literal = _PyPegen_expect_token(p, 15)) // token='-'
- )
- {
- D(fprintf(stderr, "%*c+ _tmp_67[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'-'"));
- _res = _literal;
- goto done;
- }
- p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_67[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'-'"));
+ D(fprintf(stderr, "%*c%s _gather_66[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "closed_pattern _loop0_67"));
}
_res = NULL;
done:
@@ -27988,7 +28943,7 @@ _tmp_68_rule(Parser *p)
return _res;
}
-// _tmp_69: '.' | '(' | '='
+// _tmp_69: '+' | '-'
static void *
_tmp_69_rule(Parser *p)
{
@@ -28002,62 +28957,43 @@ _tmp_69_rule(Parser *p)
}
void * _res = NULL;
int _mark = p->mark;
- { // '.'
- if (p->error_indicator) {
- p->level--;
- return NULL;
- }
- D(fprintf(stderr, "%*c> _tmp_69[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'"));
- Token * _literal;
- if (
- (_literal = _PyPegen_expect_token(p, 23)) // token='.'
- )
- {
- D(fprintf(stderr, "%*c+ _tmp_69[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'"));
- _res = _literal;
- goto done;
- }
- p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_69[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'"));
- }
- { // '('
+ { // '+'
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_69[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'('"));
+ D(fprintf(stderr, "%*c> _tmp_69[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'+'"));
Token * _literal;
if (
- (_literal = _PyPegen_expect_token(p, 7)) // token='('
+ (_literal = _PyPegen_expect_token(p, 14)) // token='+'
)
{
- D(fprintf(stderr, "%*c+ _tmp_69[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'('"));
+ D(fprintf(stderr, "%*c+ _tmp_69[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'+'"));
_res = _literal;
goto done;
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s _tmp_69[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'('"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'+'"));
}
- { // '='
+ { // '-'
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_69[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='"));
+ D(fprintf(stderr, "%*c> _tmp_69[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'-'"));
Token * _literal;
if (
- (_literal = _PyPegen_expect_token(p, 22)) // token='='
+ (_literal = _PyPegen_expect_token(p, 15)) // token='-'
)
{
- D(fprintf(stderr, "%*c+ _tmp_69[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='"));
+ D(fprintf(stderr, "%*c+ _tmp_69[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'-'"));
_res = _literal;
goto done;
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s _tmp_69[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'='"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'-'"));
}
_res = NULL;
done:
@@ -28142,9 +29078,86 @@ _tmp_70_rule(Parser *p)
return _res;
}
-// _loop0_72: ',' maybe_star_pattern
+// _tmp_71: '.' | '(' | '='
+static void *
+_tmp_71_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // '.'
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_71[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'"));
+ Token * _literal;
+ if (
+ (_literal = _PyPegen_expect_token(p, 23)) // token='.'
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_71[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'"));
+ _res = _literal;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_71[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'"));
+ }
+ { // '('
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_71[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'('"));
+ Token * _literal;
+ if (
+ (_literal = _PyPegen_expect_token(p, 7)) // token='('
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_71[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'('"));
+ _res = _literal;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_71[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'('"));
+ }
+ { // '='
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_71[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='"));
+ Token * _literal;
+ if (
+ (_literal = _PyPegen_expect_token(p, 22)) // token='='
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_71[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='"));
+ _res = _literal;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_71[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'='"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _loop0_73: ',' maybe_star_pattern
static asdl_seq *
-_loop0_72_rule(Parser *p)
+_loop0_73_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -28170,7 +29183,7 @@ _loop0_72_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_72[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' maybe_star_pattern"));
+ D(fprintf(stderr, "%*c> _loop0_73[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' maybe_star_pattern"));
Token * _literal;
pattern_ty elem;
while (
@@ -28202,7 +29215,7 @@ _loop0_72_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_72[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_73[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' maybe_star_pattern"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -28219,9 +29232,9 @@ _loop0_72_rule(Parser *p)
return _seq;
}
-// _gather_71: maybe_star_pattern _loop0_72
+// _gather_72: maybe_star_pattern _loop0_73
static asdl_seq *
-_gather_71_rule(Parser *p)
+_gather_72_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -28233,27 +29246,27 @@ _gather_71_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // maybe_star_pattern _loop0_72
+ { // maybe_star_pattern _loop0_73
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_71[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "maybe_star_pattern _loop0_72"));
+ D(fprintf(stderr, "%*c> _gather_72[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "maybe_star_pattern _loop0_73"));
pattern_ty elem;
asdl_seq * seq;
if (
(elem = maybe_star_pattern_rule(p)) // maybe_star_pattern
&&
- (seq = _loop0_72_rule(p)) // _loop0_72
+ (seq = _loop0_73_rule(p)) // _loop0_73
)
{
- D(fprintf(stderr, "%*c+ _gather_71[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "maybe_star_pattern _loop0_72"));
+ D(fprintf(stderr, "%*c+ _gather_72[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "maybe_star_pattern _loop0_73"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_71[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "maybe_star_pattern _loop0_72"));
+ D(fprintf(stderr, "%*c%s _gather_72[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "maybe_star_pattern _loop0_73"));
}
_res = NULL;
done:
@@ -28261,9 +29274,9 @@ _gather_71_rule(Parser *p)
return _res;
}
-// _loop0_74: ',' key_value_pattern
+// _loop0_75: ',' key_value_pattern
static asdl_seq *
-_loop0_74_rule(Parser *p)
+_loop0_75_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -28289,7 +29302,7 @@ _loop0_74_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_74[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' key_value_pattern"));
+ D(fprintf(stderr, "%*c> _loop0_75[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' key_value_pattern"));
Token * _literal;
KeyPatternPair* elem;
while (
@@ -28321,7 +29334,7 @@ _loop0_74_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_74[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_75[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' key_value_pattern"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -28338,9 +29351,9 @@ _loop0_74_rule(Parser *p)
return _seq;
}
-// _gather_73: key_value_pattern _loop0_74
+// _gather_74: key_value_pattern _loop0_75
static asdl_seq *
-_gather_73_rule(Parser *p)
+_gather_74_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -28352,27 +29365,27 @@ _gather_73_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // key_value_pattern _loop0_74
+ { // key_value_pattern _loop0_75
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_73[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "key_value_pattern _loop0_74"));
+ D(fprintf(stderr, "%*c> _gather_74[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "key_value_pattern _loop0_75"));
KeyPatternPair* elem;
asdl_seq * seq;
if (
(elem = key_value_pattern_rule(p)) // key_value_pattern
&&
- (seq = _loop0_74_rule(p)) // _loop0_74
+ (seq = _loop0_75_rule(p)) // _loop0_75
)
{
- D(fprintf(stderr, "%*c+ _gather_73[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "key_value_pattern _loop0_74"));
+ D(fprintf(stderr, "%*c+ _gather_74[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "key_value_pattern _loop0_75"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_73[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "key_value_pattern _loop0_74"));
+ D(fprintf(stderr, "%*c%s _gather_74[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "key_value_pattern _loop0_75"));
}
_res = NULL;
done:
@@ -28380,9 +29393,9 @@ _gather_73_rule(Parser *p)
return _res;
}
-// _tmp_75: literal_expr | attr
+// _tmp_76: literal_expr | attr
static void *
-_tmp_75_rule(Parser *p)
+_tmp_76_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -28399,18 +29412,18 @@ _tmp_75_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_75[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "literal_expr"));
+ D(fprintf(stderr, "%*c> _tmp_76[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "literal_expr"));
expr_ty literal_expr_var;
if (
(literal_expr_var = literal_expr_rule(p)) // literal_expr
)
{
- D(fprintf(stderr, "%*c+ _tmp_75[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "literal_expr"));
+ D(fprintf(stderr, "%*c+ _tmp_76[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "literal_expr"));
_res = literal_expr_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_75[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_76[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "literal_expr"));
}
{ // attr
@@ -28418,18 +29431,18 @@ _tmp_75_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_75[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "attr"));
+ D(fprintf(stderr, "%*c> _tmp_76[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "attr"));
expr_ty attr_var;
if (
(attr_var = attr_rule(p)) // attr
)
{
- D(fprintf(stderr, "%*c+ _tmp_75[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "attr"));
+ D(fprintf(stderr, "%*c+ _tmp_76[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "attr"));
_res = attr_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_75[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_76[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "attr"));
}
_res = NULL;
@@ -28438,9 +29451,9 @@ _tmp_75_rule(Parser *p)
return _res;
}
-// _loop0_77: ',' pattern
+// _loop0_78: ',' pattern
static asdl_seq *
-_loop0_77_rule(Parser *p)
+_loop0_78_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -28466,7 +29479,7 @@ _loop0_77_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_77[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' pattern"));
+ D(fprintf(stderr, "%*c> _loop0_78[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' pattern"));
Token * _literal;
pattern_ty elem;
while (
@@ -28498,7 +29511,7 @@ _loop0_77_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_77[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_78[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' pattern"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -28515,9 +29528,9 @@ _loop0_77_rule(Parser *p)
return _seq;
}
-// _gather_76: pattern _loop0_77
+// _gather_77: pattern _loop0_78
static asdl_seq *
-_gather_76_rule(Parser *p)
+_gather_77_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -28529,27 +29542,27 @@ _gather_76_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // pattern _loop0_77
+ { // pattern _loop0_78
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_76[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "pattern _loop0_77"));
+ D(fprintf(stderr, "%*c> _gather_77[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "pattern _loop0_78"));
pattern_ty elem;
asdl_seq * seq;
if (
(elem = pattern_rule(p)) // pattern
&&
- (seq = _loop0_77_rule(p)) // _loop0_77
+ (seq = _loop0_78_rule(p)) // _loop0_78
)
{
- D(fprintf(stderr, "%*c+ _gather_76[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "pattern _loop0_77"));
+ D(fprintf(stderr, "%*c+ _gather_77[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "pattern _loop0_78"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_76[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "pattern _loop0_77"));
+ D(fprintf(stderr, "%*c%s _gather_77[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "pattern _loop0_78"));
}
_res = NULL;
done:
@@ -28557,9 +29570,9 @@ _gather_76_rule(Parser *p)
return _res;
}
-// _loop0_79: ',' keyword_pattern
+// _loop0_80: ',' keyword_pattern
static asdl_seq *
-_loop0_79_rule(Parser *p)
+_loop0_80_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -28585,7 +29598,7 @@ _loop0_79_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_79[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' keyword_pattern"));
+ D(fprintf(stderr, "%*c> _loop0_80[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' keyword_pattern"));
Token * _literal;
KeyPatternPair* elem;
while (
@@ -28617,7 +29630,7 @@ _loop0_79_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_79[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_80[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' keyword_pattern"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -28634,9 +29647,9 @@ _loop0_79_rule(Parser *p)
return _seq;
}
-// _gather_78: keyword_pattern _loop0_79
+// _gather_79: keyword_pattern _loop0_80
static asdl_seq *
-_gather_78_rule(Parser *p)
+_gather_79_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -28648,27 +29661,27 @@ _gather_78_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // keyword_pattern _loop0_79
+ { // keyword_pattern _loop0_80
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_78[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "keyword_pattern _loop0_79"));
+ D(fprintf(stderr, "%*c> _gather_79[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "keyword_pattern _loop0_80"));
KeyPatternPair* elem;
asdl_seq * seq;
if (
(elem = keyword_pattern_rule(p)) // keyword_pattern
&&
- (seq = _loop0_79_rule(p)) // _loop0_79
+ (seq = _loop0_80_rule(p)) // _loop0_80
)
{
- D(fprintf(stderr, "%*c+ _gather_78[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "keyword_pattern _loop0_79"));
+ D(fprintf(stderr, "%*c+ _gather_79[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "keyword_pattern _loop0_80"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_78[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "keyword_pattern _loop0_79"));
+ D(fprintf(stderr, "%*c%s _gather_79[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "keyword_pattern _loop0_80"));
}
_res = NULL;
done:
@@ -28676,9 +29689,9 @@ _gather_78_rule(Parser *p)
return _res;
}
-// _loop1_80: (',' expression)
+// _loop1_81: (',' expression)
static asdl_seq *
-_loop1_80_rule(Parser *p)
+_loop1_81_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -28704,13 +29717,13 @@ _loop1_80_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_80[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' expression)"));
- void *_tmp_228_var;
+ D(fprintf(stderr, "%*c> _loop1_81[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' expression)"));
+ void *_tmp_248_var;
while (
- (_tmp_228_var = _tmp_228_rule(p)) // ',' expression
+ (_tmp_248_var = _tmp_248_rule(p)) // ',' expression
)
{
- _res = _tmp_228_var;
+ _res = _tmp_248_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -28727,7 +29740,7 @@ _loop1_80_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_80[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_81[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(',' expression)"));
}
if (_n == 0 || p->error_indicator) {
@@ -28749,9 +29762,9 @@ _loop1_80_rule(Parser *p)
return _seq;
}
-// _loop1_81: (',' star_expression)
+// _loop1_82: (',' star_expression)
static asdl_seq *
-_loop1_81_rule(Parser *p)
+_loop1_82_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -28777,13 +29790,13 @@ _loop1_81_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_81[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_expression)"));
- void *_tmp_229_var;
+ D(fprintf(stderr, "%*c> _loop1_82[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_expression)"));
+ void *_tmp_249_var;
while (
- (_tmp_229_var = _tmp_229_rule(p)) // ',' star_expression
+ (_tmp_249_var = _tmp_249_rule(p)) // ',' star_expression
)
{
- _res = _tmp_229_var;
+ _res = _tmp_249_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -28800,7 +29813,7 @@ _loop1_81_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_81[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_82[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(',' star_expression)"));
}
if (_n == 0 || p->error_indicator) {
@@ -28822,9 +29835,9 @@ _loop1_81_rule(Parser *p)
return _seq;
}
-// _loop0_83: ',' star_named_expression
+// _loop0_84: ',' star_named_expression
static asdl_seq *
-_loop0_83_rule(Parser *p)
+_loop0_84_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -28850,7 +29863,7 @@ _loop0_83_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_83[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_named_expression"));
+ D(fprintf(stderr, "%*c> _loop0_84[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_named_expression"));
Token * _literal;
expr_ty elem;
while (
@@ -28882,7 +29895,7 @@ _loop0_83_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_83[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_84[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_named_expression"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -28899,9 +29912,9 @@ _loop0_83_rule(Parser *p)
return _seq;
}
-// _gather_82: star_named_expression _loop0_83
+// _gather_83: star_named_expression _loop0_84
static asdl_seq *
-_gather_82_rule(Parser *p)
+_gather_83_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -28913,27 +29926,27 @@ _gather_82_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // star_named_expression _loop0_83
+ { // star_named_expression _loop0_84
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_82[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression _loop0_83"));
+ D(fprintf(stderr, "%*c> _gather_83[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression _loop0_84"));
expr_ty elem;
asdl_seq * seq;
if (
(elem = star_named_expression_rule(p)) // star_named_expression
&&
- (seq = _loop0_83_rule(p)) // _loop0_83
+ (seq = _loop0_84_rule(p)) // _loop0_84
)
{
- D(fprintf(stderr, "%*c+ _gather_82[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_named_expression _loop0_83"));
+ D(fprintf(stderr, "%*c+ _gather_83[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_named_expression _loop0_84"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_82[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expression _loop0_83"));
+ D(fprintf(stderr, "%*c%s _gather_83[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expression _loop0_84"));
}
_res = NULL;
done:
@@ -28941,9 +29954,9 @@ _gather_82_rule(Parser *p)
return _res;
}
-// _loop1_84: ('or' conjunction)
+// _loop1_85: ('or' conjunction)
static asdl_seq *
-_loop1_84_rule(Parser *p)
+_loop1_85_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -28969,13 +29982,13 @@ _loop1_84_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_84[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('or' conjunction)"));
- void *_tmp_230_var;
+ D(fprintf(stderr, "%*c> _loop1_85[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('or' conjunction)"));
+ void *_tmp_250_var;
while (
- (_tmp_230_var = _tmp_230_rule(p)) // 'or' conjunction
+ (_tmp_250_var = _tmp_250_rule(p)) // 'or' conjunction
)
{
- _res = _tmp_230_var;
+ _res = _tmp_250_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -28992,7 +30005,7 @@ _loop1_84_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_84[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_85[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('or' conjunction)"));
}
if (_n == 0 || p->error_indicator) {
@@ -29014,9 +30027,9 @@ _loop1_84_rule(Parser *p)
return _seq;
}
-// _loop1_85: ('and' inversion)
+// _loop1_86: ('and' inversion)
static asdl_seq *
-_loop1_85_rule(Parser *p)
+_loop1_86_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -29042,13 +30055,13 @@ _loop1_85_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_85[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('and' inversion)"));
- void *_tmp_231_var;
+ D(fprintf(stderr, "%*c> _loop1_86[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('and' inversion)"));
+ void *_tmp_251_var;
while (
- (_tmp_231_var = _tmp_231_rule(p)) // 'and' inversion
+ (_tmp_251_var = _tmp_251_rule(p)) // 'and' inversion
)
{
- _res = _tmp_231_var;
+ _res = _tmp_251_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -29065,7 +30078,7 @@ _loop1_85_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_85[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_86[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('and' inversion)"));
}
if (_n == 0 || p->error_indicator) {
@@ -29087,9 +30100,9 @@ _loop1_85_rule(Parser *p)
return _seq;
}
-// _loop1_86: compare_op_bitwise_or_pair
+// _loop1_87: compare_op_bitwise_or_pair
static asdl_seq *
-_loop1_86_rule(Parser *p)
+_loop1_87_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -29115,7 +30128,7 @@ _loop1_86_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_86[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "compare_op_bitwise_or_pair"));
+ D(fprintf(stderr, "%*c> _loop1_87[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "compare_op_bitwise_or_pair"));
CmpopExprPair* compare_op_bitwise_or_pair_var;
while (
(compare_op_bitwise_or_pair_var = compare_op_bitwise_or_pair_rule(p)) // compare_op_bitwise_or_pair
@@ -29138,7 +30151,7 @@ _loop1_86_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_86[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_87[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "compare_op_bitwise_or_pair"));
}
if (_n == 0 || p->error_indicator) {
@@ -29160,9 +30173,9 @@ _loop1_86_rule(Parser *p)
return _seq;
}
-// _tmp_87: '!='
+// _tmp_88: '!='
static void *
-_tmp_87_rule(Parser *p)
+_tmp_88_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -29179,13 +30192,13 @@ _tmp_87_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_87[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!='"));
+ D(fprintf(stderr, "%*c> _tmp_88[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!='"));
Token * tok;
if (
(tok = _PyPegen_expect_token(p, 28)) // token='!='
)
{
- D(fprintf(stderr, "%*c+ _tmp_87[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!='"));
+ D(fprintf(stderr, "%*c+ _tmp_88[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!='"));
_res = _PyPegen_check_barry_as_flufl ( p , tok ) ? NULL : tok;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -29195,7 +30208,7 @@ _tmp_87_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_87[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_88[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'!='"));
}
_res = NULL;
@@ -29204,9 +30217,9 @@ _tmp_87_rule(Parser *p)
return _res;
}
-// _loop0_89: ',' (slice | starred_expression)
+// _loop0_90: ',' (slice | starred_expression)
static asdl_seq *
-_loop0_89_rule(Parser *p)
+_loop0_90_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -29232,13 +30245,13 @@ _loop0_89_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_89[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (slice | starred_expression)"));
+ D(fprintf(stderr, "%*c> _loop0_90[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (slice | starred_expression)"));
Token * _literal;
void *elem;
while (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
- (elem = _tmp_232_rule(p)) // slice | starred_expression
+ (elem = _tmp_252_rule(p)) // slice | starred_expression
)
{
_res = elem;
@@ -29264,7 +30277,7 @@ _loop0_89_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_89[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_90[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (slice | starred_expression)"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -29281,9 +30294,9 @@ _loop0_89_rule(Parser *p)
return _seq;
}
-// _gather_88: (slice | starred_expression) _loop0_89
+// _gather_89: (slice | starred_expression) _loop0_90
static asdl_seq *
-_gather_88_rule(Parser *p)
+_gather_89_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -29295,27 +30308,27 @@ _gather_88_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // (slice | starred_expression) _loop0_89
+ { // (slice | starred_expression) _loop0_90
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_88[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(slice | starred_expression) _loop0_89"));
+ D(fprintf(stderr, "%*c> _gather_89[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(slice | starred_expression) _loop0_90"));
void *elem;
asdl_seq * seq;
if (
- (elem = _tmp_232_rule(p)) // slice | starred_expression
+ (elem = _tmp_252_rule(p)) // slice | starred_expression
&&
- (seq = _loop0_89_rule(p)) // _loop0_89
+ (seq = _loop0_90_rule(p)) // _loop0_90
)
{
- D(fprintf(stderr, "%*c+ _gather_88[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(slice | starred_expression) _loop0_89"));
+ D(fprintf(stderr, "%*c+ _gather_89[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(slice | starred_expression) _loop0_90"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_88[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(slice | starred_expression) _loop0_89"));
+ D(fprintf(stderr, "%*c%s _gather_89[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(slice | starred_expression) _loop0_90"));
}
_res = NULL;
done:
@@ -29323,9 +30336,9 @@ _gather_88_rule(Parser *p)
return _res;
}
-// _tmp_90: ':' expression?
+// _tmp_91: ':' expression?
static void *
-_tmp_90_rule(Parser *p)
+_tmp_91_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -29342,7 +30355,7 @@ _tmp_90_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_90[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':' expression?"));
+ D(fprintf(stderr, "%*c> _tmp_91[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':' expression?"));
Token * _literal;
void *d;
if (
@@ -29351,7 +30364,7 @@ _tmp_90_rule(Parser *p)
(d = expression_rule(p), !p->error_indicator) // expression?
)
{
- D(fprintf(stderr, "%*c+ _tmp_90[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':' expression?"));
+ D(fprintf(stderr, "%*c+ _tmp_91[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':' expression?"));
_res = d;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -29361,7 +30374,7 @@ _tmp_90_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_90[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_91[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':' expression?"));
}
_res = NULL;
@@ -29370,9 +30383,67 @@ _tmp_90_rule(Parser *p)
return _res;
}
-// _tmp_91: tuple | group | genexp
+// _tmp_92: STRING | FSTRING_START
static void *
-_tmp_91_rule(Parser *p)
+_tmp_92_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // STRING
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_92[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "STRING"));
+ expr_ty string_var;
+ if (
+ (string_var = _PyPegen_string_token(p)) // STRING
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_92[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "STRING"));
+ _res = string_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_92[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "STRING"));
+ }
+ { // FSTRING_START
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_92[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "FSTRING_START"));
+ Token * fstring_start_var;
+ if (
+ (fstring_start_var = _PyPegen_expect_token(p, FSTRING_START)) // token='FSTRING_START'
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_92[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "FSTRING_START"));
+ _res = fstring_start_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_92[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "FSTRING_START"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _tmp_93: tuple | group | genexp
+static void *
+_tmp_93_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -29389,18 +30460,18 @@ _tmp_91_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_91[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tuple"));
+ D(fprintf(stderr, "%*c> _tmp_93[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tuple"));
expr_ty tuple_var;
if (
(tuple_var = tuple_rule(p)) // tuple
)
{
- D(fprintf(stderr, "%*c+ _tmp_91[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "tuple"));
+ D(fprintf(stderr, "%*c+ _tmp_93[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "tuple"));
_res = tuple_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_91[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_93[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "tuple"));
}
{ // group
@@ -29408,18 +30479,18 @@ _tmp_91_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_91[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "group"));
+ D(fprintf(stderr, "%*c> _tmp_93[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "group"));
expr_ty group_var;
if (
(group_var = group_rule(p)) // group
)
{
- D(fprintf(stderr, "%*c+ _tmp_91[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "group"));
+ D(fprintf(stderr, "%*c+ _tmp_93[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "group"));
_res = group_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_91[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_93[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "group"));
}
{ // genexp
@@ -29427,18 +30498,18 @@ _tmp_91_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_91[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "genexp"));
+ D(fprintf(stderr, "%*c> _tmp_93[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "genexp"));
expr_ty genexp_var;
if (
(genexp_var = genexp_rule(p)) // genexp
)
{
- D(fprintf(stderr, "%*c+ _tmp_91[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "genexp"));
+ D(fprintf(stderr, "%*c+ _tmp_93[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "genexp"));
_res = genexp_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_91[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_93[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "genexp"));
}
_res = NULL;
@@ -29447,9 +30518,9 @@ _tmp_91_rule(Parser *p)
return _res;
}
-// _tmp_92: list | listcomp
+// _tmp_94: list | listcomp
static void *
-_tmp_92_rule(Parser *p)
+_tmp_94_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -29466,18 +30537,18 @@ _tmp_92_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_92[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "list"));
+ D(fprintf(stderr, "%*c> _tmp_94[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "list"));
expr_ty list_var;
if (
(list_var = list_rule(p)) // list
)
{
- D(fprintf(stderr, "%*c+ _tmp_92[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "list"));
+ D(fprintf(stderr, "%*c+ _tmp_94[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "list"));
_res = list_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_92[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_94[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "list"));
}
{ // listcomp
@@ -29485,18 +30556,18 @@ _tmp_92_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_92[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "listcomp"));
+ D(fprintf(stderr, "%*c> _tmp_94[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "listcomp"));
expr_ty listcomp_var;
if (
(listcomp_var = listcomp_rule(p)) // listcomp
)
{
- D(fprintf(stderr, "%*c+ _tmp_92[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "listcomp"));
+ D(fprintf(stderr, "%*c+ _tmp_94[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "listcomp"));
_res = listcomp_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_92[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_94[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "listcomp"));
}
_res = NULL;
@@ -29505,9 +30576,9 @@ _tmp_92_rule(Parser *p)
return _res;
}
-// _tmp_93: dict | set | dictcomp | setcomp
+// _tmp_95: dict | set | dictcomp | setcomp
static void *
-_tmp_93_rule(Parser *p)
+_tmp_95_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -29524,18 +30595,18 @@ _tmp_93_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_93[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dict"));
+ D(fprintf(stderr, "%*c> _tmp_95[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dict"));
expr_ty dict_var;
if (
(dict_var = dict_rule(p)) // dict
)
{
- D(fprintf(stderr, "%*c+ _tmp_93[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dict"));
+ D(fprintf(stderr, "%*c+ _tmp_95[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dict"));
_res = dict_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_93[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_95[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dict"));
}
{ // set
@@ -29543,18 +30614,18 @@ _tmp_93_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_93[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "set"));
+ D(fprintf(stderr, "%*c> _tmp_95[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "set"));
expr_ty set_var;
if (
(set_var = set_rule(p)) // set
)
{
- D(fprintf(stderr, "%*c+ _tmp_93[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "set"));
+ D(fprintf(stderr, "%*c+ _tmp_95[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "set"));
_res = set_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_93[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_95[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "set"));
}
{ // dictcomp
@@ -29562,18 +30633,18 @@ _tmp_93_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_93[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dictcomp"));
+ D(fprintf(stderr, "%*c> _tmp_95[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dictcomp"));
expr_ty dictcomp_var;
if (
(dictcomp_var = dictcomp_rule(p)) // dictcomp
)
{
- D(fprintf(stderr, "%*c+ _tmp_93[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dictcomp"));
+ D(fprintf(stderr, "%*c+ _tmp_95[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dictcomp"));
_res = dictcomp_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_93[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_95[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dictcomp"));
}
{ // setcomp
@@ -29581,18 +30652,18 @@ _tmp_93_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_93[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "setcomp"));
+ D(fprintf(stderr, "%*c> _tmp_95[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "setcomp"));
expr_ty setcomp_var;
if (
(setcomp_var = setcomp_rule(p)) // setcomp
)
{
- D(fprintf(stderr, "%*c+ _tmp_93[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "setcomp"));
+ D(fprintf(stderr, "%*c+ _tmp_95[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "setcomp"));
_res = setcomp_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_93[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_95[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "setcomp"));
}
_res = NULL;
@@ -29601,9 +30672,9 @@ _tmp_93_rule(Parser *p)
return _res;
}
-// _tmp_94: yield_expr | named_expression
+// _tmp_96: yield_expr | named_expression
static void *
-_tmp_94_rule(Parser *p)
+_tmp_96_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -29620,18 +30691,18 @@ _tmp_94_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_94[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ D(fprintf(stderr, "%*c> _tmp_96[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr"));
expr_ty yield_expr_var;
if (
(yield_expr_var = yield_expr_rule(p)) // yield_expr
)
{
- D(fprintf(stderr, "%*c+ _tmp_94[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ D(fprintf(stderr, "%*c+ _tmp_96[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr"));
_res = yield_expr_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_94[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_96[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr"));
}
{ // named_expression
@@ -29639,18 +30710,18 @@ _tmp_94_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_94[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "named_expression"));
+ D(fprintf(stderr, "%*c> _tmp_96[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "named_expression"));
expr_ty named_expression_var;
if (
(named_expression_var = named_expression_rule(p)) // named_expression
)
{
- D(fprintf(stderr, "%*c+ _tmp_94[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "named_expression"));
+ D(fprintf(stderr, "%*c+ _tmp_96[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "named_expression"));
_res = named_expression_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_94[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_96[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "named_expression"));
}
_res = NULL;
@@ -29659,143 +30730,7 @@ _tmp_94_rule(Parser *p)
return _res;
}
-// _loop0_95: lambda_param_no_default
-static asdl_seq *
-_loop0_95_rule(Parser *p)
-{
- if (p->level++ == MAXSTACK) {
- p->error_indicator = 1;
- PyErr_NoMemory();
- }
- if (p->error_indicator) {
- p->level--;
- return NULL;
- }
- void *_res = NULL;
- int _mark = p->mark;
- void **_children = PyMem_Malloc(sizeof(void *));
- if (!_children) {
- p->error_indicator = 1;
- PyErr_NoMemory();
- p->level--;
- return NULL;
- }
- Py_ssize_t _children_capacity = 1;
- Py_ssize_t _n = 0;
- { // lambda_param_no_default
- if (p->error_indicator) {
- p->level--;
- return NULL;
- }
- D(fprintf(stderr, "%*c> _loop0_95[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
- arg_ty lambda_param_no_default_var;
- while (
- (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default
- )
- {
- _res = lambda_param_no_default_var;
- if (_n == _children_capacity) {
- _children_capacity *= 2;
- void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
- if (!_new_children) {
- PyMem_Free(_children);
- p->error_indicator = 1;
- PyErr_NoMemory();
- p->level--;
- return NULL;
- }
- _children = _new_children;
- }
- _children[_n++] = _res;
- _mark = p->mark;
- }
- p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_95[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default"));
- }
- asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
- if (!_seq) {
- PyMem_Free(_children);
- p->error_indicator = 1;
- PyErr_NoMemory();
- p->level--;
- return NULL;
- }
- for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]);
- PyMem_Free(_children);
- p->level--;
- return _seq;
-}
-
-// _loop0_96: lambda_param_with_default
-static asdl_seq *
-_loop0_96_rule(Parser *p)
-{
- if (p->level++ == MAXSTACK) {
- p->error_indicator = 1;
- PyErr_NoMemory();
- }
- if (p->error_indicator) {
- p->level--;
- return NULL;
- }
- void *_res = NULL;
- int _mark = p->mark;
- void **_children = PyMem_Malloc(sizeof(void *));
- if (!_children) {
- p->error_indicator = 1;
- PyErr_NoMemory();
- p->level--;
- return NULL;
- }
- Py_ssize_t _children_capacity = 1;
- Py_ssize_t _n = 0;
- { // lambda_param_with_default
- if (p->error_indicator) {
- p->level--;
- return NULL;
- }
- D(fprintf(stderr, "%*c> _loop0_96[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default"));
- NameDefaultPair* lambda_param_with_default_var;
- while (
- (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default
- )
- {
- _res = lambda_param_with_default_var;
- if (_n == _children_capacity) {
- _children_capacity *= 2;
- void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
- if (!_new_children) {
- PyMem_Free(_children);
- p->error_indicator = 1;
- PyErr_NoMemory();
- p->level--;
- return NULL;
- }
- _children = _new_children;
- }
- _children[_n++] = _res;
- _mark = p->mark;
- }
- p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_96[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default"));
- }
- asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
- if (!_seq) {
- PyMem_Free(_children);
- p->error_indicator = 1;
- PyErr_NoMemory();
- p->level--;
- return NULL;
- }
- for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]);
- PyMem_Free(_children);
- p->level--;
- return _seq;
-}
-
-// _loop0_97: lambda_param_with_default
+// _loop0_97: lambda_param_no_default
static asdl_seq *
_loop0_97_rule(Parser *p)
{
@@ -29818,18 +30753,18 @@ _loop0_97_rule(Parser *p)
}
Py_ssize_t _children_capacity = 1;
Py_ssize_t _n = 0;
- { // lambda_param_with_default
+ { // lambda_param_no_default
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_97[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default"));
- NameDefaultPair* lambda_param_with_default_var;
+ D(fprintf(stderr, "%*c> _loop0_97[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
+ arg_ty lambda_param_no_default_var;
while (
- (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default
+ (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default
)
{
- _res = lambda_param_with_default_var;
+ _res = lambda_param_no_default_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -29847,7 +30782,7 @@ _loop0_97_rule(Parser *p)
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s _loop0_97[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
if (!_seq) {
@@ -29863,9 +30798,9 @@ _loop0_97_rule(Parser *p)
return _seq;
}
-// _loop1_98: lambda_param_no_default
+// _loop0_98: lambda_param_with_default
static asdl_seq *
-_loop1_98_rule(Parser *p)
+_loop0_98_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -29886,18 +30821,18 @@ _loop1_98_rule(Parser *p)
}
Py_ssize_t _children_capacity = 1;
Py_ssize_t _n = 0;
- { // lambda_param_no_default
+ { // lambda_param_with_default
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_98[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
- arg_ty lambda_param_no_default_var;
+ D(fprintf(stderr, "%*c> _loop0_98[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default"));
+ NameDefaultPair* lambda_param_with_default_var;
while (
- (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default
+ (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default
)
{
- _res = lambda_param_no_default_var;
+ _res = lambda_param_with_default_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -29914,13 +30849,8 @@ _loop1_98_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_98[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default"));
- }
- if (_n == 0 || p->error_indicator) {
- PyMem_Free(_children);
- p->level--;
- return NULL;
+ D(fprintf(stderr, "%*c%s _loop0_98[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
if (!_seq) {
@@ -30004,7 +30934,7 @@ _loop0_99_rule(Parser *p)
return _seq;
}
-// _loop1_100: lambda_param_with_default
+// _loop1_100: lambda_param_no_default
static asdl_seq *
_loop1_100_rule(Parser *p)
{
@@ -30027,18 +30957,18 @@ _loop1_100_rule(Parser *p)
}
Py_ssize_t _children_capacity = 1;
Py_ssize_t _n = 0;
- { // lambda_param_with_default
+ { // lambda_param_no_default
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_100[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default"));
- NameDefaultPair* lambda_param_with_default_var;
+ D(fprintf(stderr, "%*c> _loop1_100[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
+ arg_ty lambda_param_no_default_var;
while (
- (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default
+ (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default
)
{
- _res = lambda_param_with_default_var;
+ _res = lambda_param_no_default_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -30056,7 +30986,7 @@ _loop1_100_rule(Parser *p)
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s _loop1_100[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default"));
}
if (_n == 0 || p->error_indicator) {
PyMem_Free(_children);
@@ -30077,9 +31007,9 @@ _loop1_100_rule(Parser *p)
return _seq;
}
-// _loop1_101: lambda_param_no_default
+// _loop0_101: lambda_param_with_default
static asdl_seq *
-_loop1_101_rule(Parser *p)
+_loop0_101_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -30100,18 +31030,18 @@ _loop1_101_rule(Parser *p)
}
Py_ssize_t _children_capacity = 1;
Py_ssize_t _n = 0;
- { // lambda_param_no_default
+ { // lambda_param_with_default
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_101[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
- arg_ty lambda_param_no_default_var;
+ D(fprintf(stderr, "%*c> _loop0_101[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default"));
+ NameDefaultPair* lambda_param_with_default_var;
while (
- (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default
+ (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default
)
{
- _res = lambda_param_no_default_var;
+ _res = lambda_param_with_default_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -30128,13 +31058,8 @@ _loop1_101_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_101[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default"));
- }
- if (_n == 0 || p->error_indicator) {
- PyMem_Free(_children);
- p->level--;
- return NULL;
+ D(fprintf(stderr, "%*c%s _loop0_101[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
if (!_seq) {
@@ -30150,7 +31075,7 @@ _loop1_101_rule(Parser *p)
return _seq;
}
-// _loop1_102: lambda_param_no_default
+// _loop1_102: lambda_param_with_default
static asdl_seq *
_loop1_102_rule(Parser *p)
{
@@ -30173,18 +31098,18 @@ _loop1_102_rule(Parser *p)
}
Py_ssize_t _children_capacity = 1;
Py_ssize_t _n = 0;
- { // lambda_param_no_default
+ { // lambda_param_with_default
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_102[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
- arg_ty lambda_param_no_default_var;
+ D(fprintf(stderr, "%*c> _loop1_102[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default"));
+ NameDefaultPair* lambda_param_with_default_var;
while (
- (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default
+ (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default
)
{
- _res = lambda_param_no_default_var;
+ _res = lambda_param_with_default_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -30202,7 +31127,7 @@ _loop1_102_rule(Parser *p)
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s _loop1_102[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default"));
}
if (_n == 0 || p->error_indicator) {
PyMem_Free(_children);
@@ -30223,9 +31148,9 @@ _loop1_102_rule(Parser *p)
return _seq;
}
-// _loop0_103: lambda_param_no_default
+// _loop1_103: lambda_param_no_default
static asdl_seq *
-_loop0_103_rule(Parser *p)
+_loop1_103_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -30251,7 +31176,7 @@ _loop0_103_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_103[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
+ D(fprintf(stderr, "%*c> _loop1_103[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
arg_ty lambda_param_no_default_var;
while (
(lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default
@@ -30274,9 +31199,14 @@ _loop0_103_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_103[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_103[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default"));
}
+ if (_n == 0 || p->error_indicator) {
+ PyMem_Free(_children);
+ p->level--;
+ return NULL;
+ }
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
if (!_seq) {
PyMem_Free(_children);
@@ -30291,7 +31221,7 @@ _loop0_103_rule(Parser *p)
return _seq;
}
-// _loop1_104: lambda_param_with_default
+// _loop1_104: lambda_param_no_default
static asdl_seq *
_loop1_104_rule(Parser *p)
{
@@ -30314,18 +31244,18 @@ _loop1_104_rule(Parser *p)
}
Py_ssize_t _children_capacity = 1;
Py_ssize_t _n = 0;
- { // lambda_param_with_default
+ { // lambda_param_no_default
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_104[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default"));
- NameDefaultPair* lambda_param_with_default_var;
+ D(fprintf(stderr, "%*c> _loop1_104[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
+ arg_ty lambda_param_no_default_var;
while (
- (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default
+ (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default
)
{
- _res = lambda_param_with_default_var;
+ _res = lambda_param_no_default_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -30343,7 +31273,7 @@ _loop1_104_rule(Parser *p)
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s _loop1_104[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default"));
}
if (_n == 0 || p->error_indicator) {
PyMem_Free(_children);
@@ -30505,7 +31435,7 @@ _loop1_106_rule(Parser *p)
return _seq;
}
-// _loop0_107: lambda_param_maybe_default
+// _loop0_107: lambda_param_no_default
static asdl_seq *
_loop0_107_rule(Parser *p)
{
@@ -30528,18 +31458,18 @@ _loop0_107_rule(Parser *p)
}
Py_ssize_t _children_capacity = 1;
Py_ssize_t _n = 0;
- { // lambda_param_maybe_default
+ { // lambda_param_no_default
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_107[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default"));
- NameDefaultPair* lambda_param_maybe_default_var;
+ D(fprintf(stderr, "%*c> _loop0_107[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
+ arg_ty lambda_param_no_default_var;
while (
- (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default
+ (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default
)
{
- _res = lambda_param_maybe_default_var;
+ _res = lambda_param_no_default_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -30557,7 +31487,7 @@ _loop0_107_rule(Parser *p)
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s _loop0_107[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
if (!_seq) {
@@ -30573,7 +31503,7 @@ _loop0_107_rule(Parser *p)
return _seq;
}
-// _loop1_108: lambda_param_maybe_default
+// _loop1_108: lambda_param_with_default
static asdl_seq *
_loop1_108_rule(Parser *p)
{
@@ -30596,18 +31526,18 @@ _loop1_108_rule(Parser *p)
}
Py_ssize_t _children_capacity = 1;
Py_ssize_t _n = 0;
- { // lambda_param_maybe_default
+ { // lambda_param_with_default
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_108[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default"));
- NameDefaultPair* lambda_param_maybe_default_var;
+ D(fprintf(stderr, "%*c> _loop1_108[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default"));
+ NameDefaultPair* lambda_param_with_default_var;
while (
- (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default
+ (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default
)
{
- _res = lambda_param_maybe_default_var;
+ _res = lambda_param_with_default_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -30625,7 +31555,7 @@ _loop1_108_rule(Parser *p)
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s _loop1_108[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default"));
}
if (_n == 0 || p->error_indicator) {
PyMem_Free(_children);
@@ -30646,9 +31576,9 @@ _loop1_108_rule(Parser *p)
return _seq;
}
-// _loop1_109: STRING
+// _loop0_109: lambda_param_maybe_default
static asdl_seq *
-_loop1_109_rule(Parser *p)
+_loop0_109_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -30669,18 +31599,18 @@ _loop1_109_rule(Parser *p)
}
Py_ssize_t _children_capacity = 1;
Py_ssize_t _n = 0;
- { // STRING
+ { // lambda_param_maybe_default
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_109[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "STRING"));
- expr_ty string_var;
+ D(fprintf(stderr, "%*c> _loop0_109[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default"));
+ NameDefaultPair* lambda_param_maybe_default_var;
while (
- (string_var = _PyPegen_string_token(p)) // STRING
+ (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default
)
{
- _res = string_var;
+ _res = lambda_param_maybe_default_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -30697,13 +31627,8 @@ _loop1_109_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_109[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "STRING"));
- }
- if (_n == 0 || p->error_indicator) {
- PyMem_Free(_children);
- p->level--;
- return NULL;
+ D(fprintf(stderr, "%*c%s _loop0_109[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
if (!_seq) {
@@ -30719,59 +31644,9 @@ _loop1_109_rule(Parser *p)
return _seq;
}
-// _tmp_110: star_named_expression ',' star_named_expressions?
-static void *
-_tmp_110_rule(Parser *p)
-{
- if (p->level++ == MAXSTACK) {
- p->error_indicator = 1;
- PyErr_NoMemory();
- }
- if (p->error_indicator) {
- p->level--;
- return NULL;
- }
- void * _res = NULL;
- int _mark = p->mark;
- { // star_named_expression ',' star_named_expressions?
- if (p->error_indicator) {
- p->level--;
- return NULL;
- }
- D(fprintf(stderr, "%*c> _tmp_110[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions?"));
- Token * _literal;
- expr_ty y;
- void *z;
- if (
- (y = star_named_expression_rule(p)) // star_named_expression
- &&
- (_literal = _PyPegen_expect_token(p, 12)) // token=','
- &&
- (z = star_named_expressions_rule(p), !p->error_indicator) // star_named_expressions?
- )
- {
- D(fprintf(stderr, "%*c+ _tmp_110[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions?"));
- _res = _PyPegen_seq_insert_in_front ( p , y , z );
- if (_res == NULL && PyErr_Occurred()) {
- p->error_indicator = 1;
- p->level--;
- return NULL;
- }
- goto done;
- }
- p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_110[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expression ',' star_named_expressions?"));
- }
- _res = NULL;
- done:
- p->level--;
- return _res;
-}
-
-// _loop0_112: ',' double_starred_kvpair
+// _loop1_110: lambda_param_maybe_default
static asdl_seq *
-_loop0_112_rule(Parser *p)
+_loop1_110_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -30792,27 +31667,18 @@ _loop0_112_rule(Parser *p)
}
Py_ssize_t _children_capacity = 1;
Py_ssize_t _n = 0;
- { // ',' double_starred_kvpair
+ { // lambda_param_maybe_default
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_112[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' double_starred_kvpair"));
- Token * _literal;
- KeyValuePair* elem;
+ D(fprintf(stderr, "%*c> _loop1_110[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default"));
+ NameDefaultPair* lambda_param_maybe_default_var;
while (
- (_literal = _PyPegen_expect_token(p, 12)) // token=','
- &&
- (elem = double_starred_kvpair_rule(p)) // double_starred_kvpair
+ (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default
)
{
- _res = elem;
- if (_res == NULL && PyErr_Occurred()) {
- p->error_indicator = 1;
- PyMem_Free(_children);
- p->level--;
- return NULL;
- }
+ _res = lambda_param_maybe_default_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -30829,8 +31695,13 @@ _loop0_112_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_112[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' double_starred_kvpair"));
+ D(fprintf(stderr, "%*c%s _loop1_110[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default"));
+ }
+ if (_n == 0 || p->error_indicator) {
+ PyMem_Free(_children);
+ p->level--;
+ return NULL;
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
if (!_seq) {
@@ -30846,9 +31717,9 @@ _loop0_112_rule(Parser *p)
return _seq;
}
-// _gather_111: double_starred_kvpair _loop0_112
-static asdl_seq *
-_gather_111_rule(Parser *p)
+// _tmp_111: yield_expr | star_expressions
+static void *
+_tmp_111_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -30858,112 +31729,55 @@ _gather_111_rule(Parser *p)
p->level--;
return NULL;
}
- asdl_seq * _res = NULL;
+ void * _res = NULL;
int _mark = p->mark;
- { // double_starred_kvpair _loop0_112
+ { // yield_expr
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_111[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_112"));
- KeyValuePair* elem;
- asdl_seq * seq;
+ D(fprintf(stderr, "%*c> _tmp_111[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ expr_ty yield_expr_var;
if (
- (elem = double_starred_kvpair_rule(p)) // double_starred_kvpair
- &&
- (seq = _loop0_112_rule(p)) // _loop0_112
+ (yield_expr_var = yield_expr_rule(p)) // yield_expr
)
{
- D(fprintf(stderr, "%*c+ _gather_111[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_112"));
- _res = _PyPegen_seq_insert_in_front(p, elem, seq);
+ D(fprintf(stderr, "%*c+ _tmp_111[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ _res = yield_expr_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_111[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "double_starred_kvpair _loop0_112"));
- }
- _res = NULL;
- done:
- p->level--;
- return _res;
-}
-
-// _loop1_113: for_if_clause
-static asdl_seq *
-_loop1_113_rule(Parser *p)
-{
- if (p->level++ == MAXSTACK) {
- p->error_indicator = 1;
- PyErr_NoMemory();
- }
- if (p->error_indicator) {
- p->level--;
- return NULL;
- }
- void *_res = NULL;
- int _mark = p->mark;
- void **_children = PyMem_Malloc(sizeof(void *));
- if (!_children) {
- p->error_indicator = 1;
- PyErr_NoMemory();
- p->level--;
- return NULL;
+ D(fprintf(stderr, "%*c%s _tmp_111[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr"));
}
- Py_ssize_t _children_capacity = 1;
- Py_ssize_t _n = 0;
- { // for_if_clause
+ { // star_expressions
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_113[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "for_if_clause"));
- comprehension_ty for_if_clause_var;
- while (
- (for_if_clause_var = for_if_clause_rule(p)) // for_if_clause
+ D(fprintf(stderr, "%*c> _tmp_111[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ expr_ty star_expressions_var;
+ if (
+ (star_expressions_var = star_expressions_rule(p)) // star_expressions
)
{
- _res = for_if_clause_var;
- if (_n == _children_capacity) {
- _children_capacity *= 2;
- void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
- if (!_new_children) {
- PyMem_Free(_children);
- p->error_indicator = 1;
- PyErr_NoMemory();
- p->level--;
- return NULL;
- }
- _children = _new_children;
- }
- _children[_n++] = _res;
- _mark = p->mark;
+ D(fprintf(stderr, "%*c+ _tmp_111[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ _res = star_expressions_var;
+ goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_113[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "for_if_clause"));
- }
- if (_n == 0 || p->error_indicator) {
- PyMem_Free(_children);
- p->level--;
- return NULL;
- }
- asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
- if (!_seq) {
- PyMem_Free(_children);
- p->error_indicator = 1;
- PyErr_NoMemory();
- p->level--;
- return NULL;
+ D(fprintf(stderr, "%*c%s _tmp_111[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions"));
}
- for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]);
- PyMem_Free(_children);
+ _res = NULL;
+ done:
p->level--;
- return _seq;
+ return _res;
}
-// _loop0_114: ('if' disjunction)
+// _loop0_112: fstring_format_spec
static asdl_seq *
-_loop0_114_rule(Parser *p)
+_loop0_112_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -30984,18 +31798,18 @@ _loop0_114_rule(Parser *p)
}
Py_ssize_t _children_capacity = 1;
Py_ssize_t _n = 0;
- { // ('if' disjunction)
+ { // fstring_format_spec
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_114[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)"));
- void *_tmp_233_var;
+ D(fprintf(stderr, "%*c> _loop0_112[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "fstring_format_spec"));
+ expr_ty fstring_format_spec_var;
while (
- (_tmp_233_var = _tmp_233_rule(p)) // 'if' disjunction
+ (fstring_format_spec_var = fstring_format_spec_rule(p)) // fstring_format_spec
)
{
- _res = _tmp_233_var;
+ _res = fstring_format_spec_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -31012,8 +31826,8 @@ _loop0_114_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_114[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('if' disjunction)"));
+ D(fprintf(stderr, "%*c%s _loop0_112[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "fstring_format_spec"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
if (!_seq) {
@@ -31029,9 +31843,9 @@ _loop0_114_rule(Parser *p)
return _seq;
}
-// _loop0_115: ('if' disjunction)
+// _loop1_113: (fstring | string)
static asdl_seq *
-_loop0_115_rule(Parser *p)
+_loop1_113_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -31052,18 +31866,18 @@ _loop0_115_rule(Parser *p)
}
Py_ssize_t _children_capacity = 1;
Py_ssize_t _n = 0;
- { // ('if' disjunction)
+ { // (fstring | string)
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_115[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)"));
- void *_tmp_234_var;
+ D(fprintf(stderr, "%*c> _loop1_113[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(fstring | string)"));
+ void *_tmp_253_var;
while (
- (_tmp_234_var = _tmp_234_rule(p)) // 'if' disjunction
+ (_tmp_253_var = _tmp_253_rule(p)) // fstring | string
)
{
- _res = _tmp_234_var;
+ _res = _tmp_253_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -31080,8 +31894,13 @@ _loop0_115_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_115[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('if' disjunction)"));
+ D(fprintf(stderr, "%*c%s _loop1_113[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(fstring | string)"));
+ }
+ if (_n == 0 || p->error_indicator) {
+ PyMem_Free(_children);
+ p->level--;
+ return NULL;
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
if (!_seq) {
@@ -31097,9 +31916,9 @@ _loop0_115_rule(Parser *p)
return _seq;
}
-// _tmp_116: assignment_expression | expression !':='
+// _tmp_114: star_named_expression ',' star_named_expressions?
static void *
-_tmp_116_rule(Parser *p)
+_tmp_114_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -31111,45 +31930,35 @@ _tmp_116_rule(Parser *p)
}
void * _res = NULL;
int _mark = p->mark;
- { // assignment_expression
- if (p->error_indicator) {
- p->level--;
- return NULL;
- }
- D(fprintf(stderr, "%*c> _tmp_116[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "assignment_expression"));
- expr_ty assignment_expression_var;
- if (
- (assignment_expression_var = assignment_expression_rule(p)) // assignment_expression
- )
- {
- D(fprintf(stderr, "%*c+ _tmp_116[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "assignment_expression"));
- _res = assignment_expression_var;
- goto done;
- }
- p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_116[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "assignment_expression"));
- }
- { // expression !':='
+ { // star_named_expression ',' star_named_expressions?
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_116[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression !':='"));
- expr_ty expression_var;
+ D(fprintf(stderr, "%*c> _tmp_114[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions?"));
+ Token * _literal;
+ expr_ty y;
+ void *z;
if (
- (expression_var = expression_rule(p)) // expression
+ (y = star_named_expression_rule(p)) // star_named_expression
&&
- _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 53) // token=':='
+ (_literal = _PyPegen_expect_token(p, 12)) // token=','
+ &&
+ (z = star_named_expressions_rule(p), !p->error_indicator) // star_named_expressions?
)
{
- D(fprintf(stderr, "%*c+ _tmp_116[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression !':='"));
- _res = expression_var;
+ D(fprintf(stderr, "%*c+ _tmp_114[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions?"));
+ _res = _PyPegen_seq_insert_in_front ( p , y , z );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_116[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression !':='"));
+ D(fprintf(stderr, "%*c%s _tmp_114[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expression ',' star_named_expressions?"));
}
_res = NULL;
done:
@@ -31157,9 +31966,9 @@ _tmp_116_rule(Parser *p)
return _res;
}
-// _loop0_118: ',' (starred_expression | (assignment_expression | expression !':=') !'=')
+// _loop0_116: ',' double_starred_kvpair
static asdl_seq *
-_loop0_118_rule(Parser *p)
+_loop0_116_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -31180,18 +31989,18 @@ _loop0_118_rule(Parser *p)
}
Py_ssize_t _children_capacity = 1;
Py_ssize_t _n = 0;
- { // ',' (starred_expression | (assignment_expression | expression !':=') !'=')
+ { // ',' double_starred_kvpair
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_118[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (starred_expression | (assignment_expression | expression !':=') !'=')"));
+ D(fprintf(stderr, "%*c> _loop0_116[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' double_starred_kvpair"));
Token * _literal;
- void *elem;
+ KeyValuePair* elem;
while (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
- (elem = _tmp_235_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'='
+ (elem = double_starred_kvpair_rule(p)) // double_starred_kvpair
)
{
_res = elem;
@@ -31217,8 +32026,8 @@ _loop0_118_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_118[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (starred_expression | (assignment_expression | expression !':=') !'=')"));
+ D(fprintf(stderr, "%*c%s _loop0_116[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' double_starred_kvpair"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
if (!_seq) {
@@ -31234,10 +32043,9 @@ _loop0_118_rule(Parser *p)
return _seq;
}
-// _gather_117:
-// | (starred_expression | (assignment_expression | expression !':=') !'=') _loop0_118
+// _gather_115: double_starred_kvpair _loop0_116
static asdl_seq *
-_gather_117_rule(Parser *p)
+_gather_115_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -31249,27 +32057,27 @@ _gather_117_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // (starred_expression | (assignment_expression | expression !':=') !'=') _loop0_118
+ { // double_starred_kvpair _loop0_116
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_117[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_118"));
- void *elem;
+ D(fprintf(stderr, "%*c> _gather_115[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_116"));
+ KeyValuePair* elem;
asdl_seq * seq;
if (
- (elem = _tmp_235_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'='
+ (elem = double_starred_kvpair_rule(p)) // double_starred_kvpair
&&
- (seq = _loop0_118_rule(p)) // _loop0_118
+ (seq = _loop0_116_rule(p)) // _loop0_116
)
{
- D(fprintf(stderr, "%*c+ _gather_117[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_118"));
+ D(fprintf(stderr, "%*c+ _gather_115[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_116"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_117[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_118"));
+ D(fprintf(stderr, "%*c%s _gather_115[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "double_starred_kvpair _loop0_116"));
}
_res = NULL;
done:
@@ -31277,9 +32085,9 @@ _gather_117_rule(Parser *p)
return _res;
}
-// _tmp_119: ',' kwargs
-static void *
-_tmp_119_rule(Parser *p)
+// _loop1_117: for_if_clause
+static asdl_seq *
+_loop1_117_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -31289,44 +32097,70 @@ _tmp_119_rule(Parser *p)
p->level--;
return NULL;
}
- void * _res = NULL;
+ void *_res = NULL;
int _mark = p->mark;
- { // ',' kwargs
+ void **_children = PyMem_Malloc(sizeof(void *));
+ if (!_children) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ p->level--;
+ return NULL;
+ }
+ Py_ssize_t _children_capacity = 1;
+ Py_ssize_t _n = 0;
+ { // for_if_clause
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_119[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwargs"));
- Token * _literal;
- asdl_seq* k;
- if (
- (_literal = _PyPegen_expect_token(p, 12)) // token=','
- &&
- (k = kwargs_rule(p)) // kwargs
+ D(fprintf(stderr, "%*c> _loop1_117[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "for_if_clause"));
+ comprehension_ty for_if_clause_var;
+ while (
+ (for_if_clause_var = for_if_clause_rule(p)) // for_if_clause
)
{
- D(fprintf(stderr, "%*c+ _tmp_119[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' kwargs"));
- _res = k;
- if (_res == NULL && PyErr_Occurred()) {
- p->error_indicator = 1;
- p->level--;
- return NULL;
+ _res = for_if_clause_var;
+ if (_n == _children_capacity) {
+ _children_capacity *= 2;
+ void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
+ if (!_new_children) {
+ PyMem_Free(_children);
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ p->level--;
+ return NULL;
+ }
+ _children = _new_children;
}
- goto done;
+ _children[_n++] = _res;
+ _mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_119[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwargs"));
+ D(fprintf(stderr, "%*c%s _loop1_117[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "for_if_clause"));
}
- _res = NULL;
- done:
+ if (_n == 0 || p->error_indicator) {
+ PyMem_Free(_children);
+ p->level--;
+ return NULL;
+ }
+ asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
+ if (!_seq) {
+ PyMem_Free(_children);
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ p->level--;
+ return NULL;
+ }
+ for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]);
+ PyMem_Free(_children);
p->level--;
- return _res;
+ return _seq;
}
-// _loop0_121: ',' kwarg_or_starred
+// _loop0_118: ('if' disjunction)
static asdl_seq *
-_loop0_121_rule(Parser *p)
+_loop0_118_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -31347,27 +32181,18 @@ _loop0_121_rule(Parser *p)
}
Py_ssize_t _children_capacity = 1;
Py_ssize_t _n = 0;
- { // ',' kwarg_or_starred
+ { // ('if' disjunction)
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_121[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_starred"));
- Token * _literal;
- KeywordOrStarred* elem;
+ D(fprintf(stderr, "%*c> _loop0_118[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)"));
+ void *_tmp_254_var;
while (
- (_literal = _PyPegen_expect_token(p, 12)) // token=','
- &&
- (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred
+ (_tmp_254_var = _tmp_254_rule(p)) // 'if' disjunction
)
{
- _res = elem;
- if (_res == NULL && PyErr_Occurred()) {
- p->error_indicator = 1;
- PyMem_Free(_children);
- p->level--;
- return NULL;
- }
+ _res = _tmp_254_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -31384,8 +32209,8 @@ _loop0_121_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_121[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwarg_or_starred"));
+ D(fprintf(stderr, "%*c%s _loop0_118[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('if' disjunction)"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
if (!_seq) {
@@ -31401,9 +32226,9 @@ _loop0_121_rule(Parser *p)
return _seq;
}
-// _gather_120: kwarg_or_starred _loop0_121
+// _loop0_119: ('if' disjunction)
static asdl_seq *
-_gather_120_rule(Parser *p)
+_loop0_119_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -31413,29 +32238,115 @@ _gather_120_rule(Parser *p)
p->level--;
return NULL;
}
- asdl_seq * _res = NULL;
+ void *_res = NULL;
int _mark = p->mark;
- { // kwarg_or_starred _loop0_121
+ void **_children = PyMem_Malloc(sizeof(void *));
+ if (!_children) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ p->level--;
+ return NULL;
+ }
+ Py_ssize_t _children_capacity = 1;
+ Py_ssize_t _n = 0;
+ { // ('if' disjunction)
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_120[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_121"));
- KeywordOrStarred* elem;
- asdl_seq * seq;
+ D(fprintf(stderr, "%*c> _loop0_119[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)"));
+ void *_tmp_255_var;
+ while (
+ (_tmp_255_var = _tmp_255_rule(p)) // 'if' disjunction
+ )
+ {
+ _res = _tmp_255_var;
+ if (_n == _children_capacity) {
+ _children_capacity *= 2;
+ void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
+ if (!_new_children) {
+ PyMem_Free(_children);
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ p->level--;
+ return NULL;
+ }
+ _children = _new_children;
+ }
+ _children[_n++] = _res;
+ _mark = p->mark;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _loop0_119[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('if' disjunction)"));
+ }
+ asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
+ if (!_seq) {
+ PyMem_Free(_children);
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ p->level--;
+ return NULL;
+ }
+ for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]);
+ PyMem_Free(_children);
+ p->level--;
+ return _seq;
+}
+
+// _tmp_120: assignment_expression | expression !':='
+static void *
+_tmp_120_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // assignment_expression
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_120[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "assignment_expression"));
+ expr_ty assignment_expression_var;
if (
- (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred
+ (assignment_expression_var = assignment_expression_rule(p)) // assignment_expression
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_120[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "assignment_expression"));
+ _res = assignment_expression_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_120[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "assignment_expression"));
+ }
+ { // expression !':='
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_120[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression !':='"));
+ expr_ty expression_var;
+ if (
+ (expression_var = expression_rule(p)) // expression
&&
- (seq = _loop0_121_rule(p)) // _loop0_121
+ _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 53) // token=':='
)
{
- D(fprintf(stderr, "%*c+ _gather_120[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_121"));
- _res = _PyPegen_seq_insert_in_front(p, elem, seq);
+ D(fprintf(stderr, "%*c+ _tmp_120[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression !':='"));
+ _res = expression_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_120[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_starred _loop0_121"));
+ D(fprintf(stderr, "%*c%s _tmp_120[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression !':='"));
}
_res = NULL;
done:
@@ -31443,9 +32354,9 @@ _gather_120_rule(Parser *p)
return _res;
}
-// _loop0_123: ',' kwarg_or_double_starred
+// _loop0_122: ',' (starred_expression | (assignment_expression | expression !':=') !'=')
static asdl_seq *
-_loop0_123_rule(Parser *p)
+_loop0_122_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -31466,18 +32377,18 @@ _loop0_123_rule(Parser *p)
}
Py_ssize_t _children_capacity = 1;
Py_ssize_t _n = 0;
- { // ',' kwarg_or_double_starred
+ { // ',' (starred_expression | (assignment_expression | expression !':=') !'=')
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_123[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_double_starred"));
+ D(fprintf(stderr, "%*c> _loop0_122[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (starred_expression | (assignment_expression | expression !':=') !'=')"));
Token * _literal;
- KeywordOrStarred* elem;
+ void *elem;
while (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
- (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred
+ (elem = _tmp_256_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'='
)
{
_res = elem;
@@ -31503,8 +32414,8 @@ _loop0_123_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_123[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwarg_or_double_starred"));
+ D(fprintf(stderr, "%*c%s _loop0_122[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (starred_expression | (assignment_expression | expression !':=') !'=')"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
if (!_seq) {
@@ -31520,9 +32431,10 @@ _loop0_123_rule(Parser *p)
return _seq;
}
-// _gather_122: kwarg_or_double_starred _loop0_123
+// _gather_121:
+// | (starred_expression | (assignment_expression | expression !':=') !'=') _loop0_122
static asdl_seq *
-_gather_122_rule(Parser *p)
+_gather_121_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -31534,27 +32446,74 @@ _gather_122_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // kwarg_or_double_starred _loop0_123
+ { // (starred_expression | (assignment_expression | expression !':=') !'=') _loop0_122
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_122[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_123"));
- KeywordOrStarred* elem;
+ D(fprintf(stderr, "%*c> _gather_121[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_122"));
+ void *elem;
asdl_seq * seq;
if (
- (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred
+ (elem = _tmp_256_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'='
&&
- (seq = _loop0_123_rule(p)) // _loop0_123
+ (seq = _loop0_122_rule(p)) // _loop0_122
)
{
- D(fprintf(stderr, "%*c+ _gather_122[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_123"));
+ D(fprintf(stderr, "%*c+ _gather_121[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_122"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_122[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_double_starred _loop0_123"));
+ D(fprintf(stderr, "%*c%s _gather_121[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_122"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _tmp_123: ',' kwargs
+static void *
+_tmp_123_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // ',' kwargs
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_123[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwargs"));
+ Token * _literal;
+ asdl_seq* k;
+ if (
+ (_literal = _PyPegen_expect_token(p, 12)) // token=','
+ &&
+ (k = kwargs_rule(p)) // kwargs
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_123[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' kwargs"));
+ _res = k;
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_123[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwargs"));
}
_res = NULL;
done:
@@ -31800,9 +32759,247 @@ _gather_126_rule(Parser *p)
return _res;
}
-// _loop0_128: (',' star_target)
+// _loop0_129: ',' kwarg_or_starred
static asdl_seq *
-_loop0_128_rule(Parser *p)
+_loop0_129_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void *_res = NULL;
+ int _mark = p->mark;
+ void **_children = PyMem_Malloc(sizeof(void *));
+ if (!_children) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ p->level--;
+ return NULL;
+ }
+ Py_ssize_t _children_capacity = 1;
+ Py_ssize_t _n = 0;
+ { // ',' kwarg_or_starred
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _loop0_129[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_starred"));
+ Token * _literal;
+ KeywordOrStarred* elem;
+ while (
+ (_literal = _PyPegen_expect_token(p, 12)) // token=','
+ &&
+ (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred
+ )
+ {
+ _res = elem;
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ PyMem_Free(_children);
+ p->level--;
+ return NULL;
+ }
+ if (_n == _children_capacity) {
+ _children_capacity *= 2;
+ void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
+ if (!_new_children) {
+ PyMem_Free(_children);
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ p->level--;
+ return NULL;
+ }
+ _children = _new_children;
+ }
+ _children[_n++] = _res;
+ _mark = p->mark;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _loop0_129[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwarg_or_starred"));
+ }
+ asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
+ if (!_seq) {
+ PyMem_Free(_children);
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ p->level--;
+ return NULL;
+ }
+ for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]);
+ PyMem_Free(_children);
+ p->level--;
+ return _seq;
+}
+
+// _gather_128: kwarg_or_starred _loop0_129
+static asdl_seq *
+_gather_128_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ asdl_seq * _res = NULL;
+ int _mark = p->mark;
+ { // kwarg_or_starred _loop0_129
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _gather_128[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_129"));
+ KeywordOrStarred* elem;
+ asdl_seq * seq;
+ if (
+ (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred
+ &&
+ (seq = _loop0_129_rule(p)) // _loop0_129
+ )
+ {
+ D(fprintf(stderr, "%*c+ _gather_128[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_129"));
+ _res = _PyPegen_seq_insert_in_front(p, elem, seq);
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _gather_128[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_starred _loop0_129"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _loop0_131: ',' kwarg_or_double_starred
+static asdl_seq *
+_loop0_131_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void *_res = NULL;
+ int _mark = p->mark;
+ void **_children = PyMem_Malloc(sizeof(void *));
+ if (!_children) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ p->level--;
+ return NULL;
+ }
+ Py_ssize_t _children_capacity = 1;
+ Py_ssize_t _n = 0;
+ { // ',' kwarg_or_double_starred
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _loop0_131[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_double_starred"));
+ Token * _literal;
+ KeywordOrStarred* elem;
+ while (
+ (_literal = _PyPegen_expect_token(p, 12)) // token=','
+ &&
+ (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred
+ )
+ {
+ _res = elem;
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ PyMem_Free(_children);
+ p->level--;
+ return NULL;
+ }
+ if (_n == _children_capacity) {
+ _children_capacity *= 2;
+ void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
+ if (!_new_children) {
+ PyMem_Free(_children);
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ p->level--;
+ return NULL;
+ }
+ _children = _new_children;
+ }
+ _children[_n++] = _res;
+ _mark = p->mark;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _loop0_131[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwarg_or_double_starred"));
+ }
+ asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
+ if (!_seq) {
+ PyMem_Free(_children);
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ p->level--;
+ return NULL;
+ }
+ for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]);
+ PyMem_Free(_children);
+ p->level--;
+ return _seq;
+}
+
+// _gather_130: kwarg_or_double_starred _loop0_131
+static asdl_seq *
+_gather_130_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ asdl_seq * _res = NULL;
+ int _mark = p->mark;
+ { // kwarg_or_double_starred _loop0_131
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _gather_130[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_131"));
+ KeywordOrStarred* elem;
+ asdl_seq * seq;
+ if (
+ (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred
+ &&
+ (seq = _loop0_131_rule(p)) // _loop0_131
+ )
+ {
+ D(fprintf(stderr, "%*c+ _gather_130[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_131"));
+ _res = _PyPegen_seq_insert_in_front(p, elem, seq);
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _gather_130[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_double_starred _loop0_131"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _loop0_132: (',' star_target)
+static asdl_seq *
+_loop0_132_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -31828,13 +33025,13 @@ _loop0_128_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_128[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_target)"));
- void *_tmp_236_var;
+ D(fprintf(stderr, "%*c> _loop0_132[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_target)"));
+ void *_tmp_257_var;
while (
- (_tmp_236_var = _tmp_236_rule(p)) // ',' star_target
+ (_tmp_257_var = _tmp_257_rule(p)) // ',' star_target
)
{
- _res = _tmp_236_var;
+ _res = _tmp_257_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -31851,7 +33048,7 @@ _loop0_128_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_128[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_132[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(',' star_target)"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -31868,9 +33065,9 @@ _loop0_128_rule(Parser *p)
return _seq;
}
-// _loop0_130: ',' star_target
+// _loop0_134: ',' star_target
static asdl_seq *
-_loop0_130_rule(Parser *p)
+_loop0_134_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -31896,7 +33093,7 @@ _loop0_130_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_130[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target"));
+ D(fprintf(stderr, "%*c> _loop0_134[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target"));
Token * _literal;
expr_ty elem;
while (
@@ -31928,7 +33125,7 @@ _loop0_130_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_130[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_134[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_target"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -31945,9 +33142,9 @@ _loop0_130_rule(Parser *p)
return _seq;
}
-// _gather_129: star_target _loop0_130
+// _gather_133: star_target _loop0_134
static asdl_seq *
-_gather_129_rule(Parser *p)
+_gather_133_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -31959,27 +33156,27 @@ _gather_129_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // star_target _loop0_130
+ { // star_target _loop0_134
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_129[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_target _loop0_130"));
+ D(fprintf(stderr, "%*c> _gather_133[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_target _loop0_134"));
expr_ty elem;
asdl_seq * seq;
if (
(elem = star_target_rule(p)) // star_target
&&
- (seq = _loop0_130_rule(p)) // _loop0_130
+ (seq = _loop0_134_rule(p)) // _loop0_134
)
{
- D(fprintf(stderr, "%*c+ _gather_129[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_target _loop0_130"));
+ D(fprintf(stderr, "%*c+ _gather_133[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_target _loop0_134"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_129[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_target _loop0_130"));
+ D(fprintf(stderr, "%*c%s _gather_133[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_target _loop0_134"));
}
_res = NULL;
done:
@@ -31987,9 +33184,9 @@ _gather_129_rule(Parser *p)
return _res;
}
-// _loop1_131: (',' star_target)
+// _loop1_135: (',' star_target)
static asdl_seq *
-_loop1_131_rule(Parser *p)
+_loop1_135_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -32015,13 +33212,13 @@ _loop1_131_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_131[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_target)"));
- void *_tmp_237_var;
+ D(fprintf(stderr, "%*c> _loop1_135[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_target)"));
+ void *_tmp_258_var;
while (
- (_tmp_237_var = _tmp_237_rule(p)) // ',' star_target
+ (_tmp_258_var = _tmp_258_rule(p)) // ',' star_target
)
{
- _res = _tmp_237_var;
+ _res = _tmp_258_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -32038,7 +33235,7 @@ _loop1_131_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_131[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_135[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(',' star_target)"));
}
if (_n == 0 || p->error_indicator) {
@@ -32060,9 +33257,9 @@ _loop1_131_rule(Parser *p)
return _seq;
}
-// _tmp_132: !'*' star_target
+// _tmp_136: !'*' star_target
static void *
-_tmp_132_rule(Parser *p)
+_tmp_136_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -32079,7 +33276,7 @@ _tmp_132_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_132[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "!'*' star_target"));
+ D(fprintf(stderr, "%*c> _tmp_136[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "!'*' star_target"));
expr_ty star_target_var;
if (
_PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 16) // token='*'
@@ -32087,12 +33284,12 @@ _tmp_132_rule(Parser *p)
(star_target_var = star_target_rule(p)) // star_target
)
{
- D(fprintf(stderr, "%*c+ _tmp_132[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "!'*' star_target"));
+ D(fprintf(stderr, "%*c+ _tmp_136[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "!'*' star_target"));
_res = star_target_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_132[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_136[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "!'*' star_target"));
}
_res = NULL;
@@ -32101,9 +33298,9 @@ _tmp_132_rule(Parser *p)
return _res;
}
-// _loop0_134: ',' del_target
+// _loop0_138: ',' del_target
static asdl_seq *
-_loop0_134_rule(Parser *p)
+_loop0_138_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -32129,7 +33326,7 @@ _loop0_134_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_134[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' del_target"));
+ D(fprintf(stderr, "%*c> _loop0_138[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' del_target"));
Token * _literal;
expr_ty elem;
while (
@@ -32161,7 +33358,7 @@ _loop0_134_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_134[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_138[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' del_target"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -32178,9 +33375,9 @@ _loop0_134_rule(Parser *p)
return _seq;
}
-// _gather_133: del_target _loop0_134
+// _gather_137: del_target _loop0_138
static asdl_seq *
-_gather_133_rule(Parser *p)
+_gather_137_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -32192,27 +33389,27 @@ _gather_133_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // del_target _loop0_134
+ { // del_target _loop0_138
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_133[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "del_target _loop0_134"));
+ D(fprintf(stderr, "%*c> _gather_137[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "del_target _loop0_138"));
expr_ty elem;
asdl_seq * seq;
if (
(elem = del_target_rule(p)) // del_target
&&
- (seq = _loop0_134_rule(p)) // _loop0_134
+ (seq = _loop0_138_rule(p)) // _loop0_138
)
{
- D(fprintf(stderr, "%*c+ _gather_133[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "del_target _loop0_134"));
+ D(fprintf(stderr, "%*c+ _gather_137[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "del_target _loop0_138"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_133[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "del_target _loop0_134"));
+ D(fprintf(stderr, "%*c%s _gather_137[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "del_target _loop0_138"));
}
_res = NULL;
done:
@@ -32220,9 +33417,9 @@ _gather_133_rule(Parser *p)
return _res;
}
-// _loop0_136: ',' expression
+// _loop0_140: ',' expression
static asdl_seq *
-_loop0_136_rule(Parser *p)
+_loop0_140_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -32248,7 +33445,7 @@ _loop0_136_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_136[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression"));
+ D(fprintf(stderr, "%*c> _loop0_140[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression"));
Token * _literal;
expr_ty elem;
while (
@@ -32280,7 +33477,7 @@ _loop0_136_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_136[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_140[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -32297,9 +33494,9 @@ _loop0_136_rule(Parser *p)
return _seq;
}
-// _gather_135: expression _loop0_136
+// _gather_139: expression _loop0_140
static asdl_seq *
-_gather_135_rule(Parser *p)
+_gather_139_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -32311,27 +33508,27 @@ _gather_135_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // expression _loop0_136
+ { // expression _loop0_140
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_135[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_136"));
+ D(fprintf(stderr, "%*c> _gather_139[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_140"));
expr_ty elem;
asdl_seq * seq;
if (
(elem = expression_rule(p)) // expression
&&
- (seq = _loop0_136_rule(p)) // _loop0_136
+ (seq = _loop0_140_rule(p)) // _loop0_140
)
{
- D(fprintf(stderr, "%*c+ _gather_135[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_136"));
+ D(fprintf(stderr, "%*c+ _gather_139[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_140"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_135[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_136"));
+ D(fprintf(stderr, "%*c%s _gather_139[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_140"));
}
_res = NULL;
done:
@@ -32339,9 +33536,9 @@ _gather_135_rule(Parser *p)
return _res;
}
-// _loop0_138: ',' expression
+// _loop0_142: ',' expression
static asdl_seq *
-_loop0_138_rule(Parser *p)
+_loop0_142_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -32367,7 +33564,7 @@ _loop0_138_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_138[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression"));
+ D(fprintf(stderr, "%*c> _loop0_142[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression"));
Token * _literal;
expr_ty elem;
while (
@@ -32399,7 +33596,7 @@ _loop0_138_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_138[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_142[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -32416,9 +33613,9 @@ _loop0_138_rule(Parser *p)
return _seq;
}
-// _gather_137: expression _loop0_138
+// _gather_141: expression _loop0_142
static asdl_seq *
-_gather_137_rule(Parser *p)
+_gather_141_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -32430,27 +33627,27 @@ _gather_137_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // expression _loop0_138
+ { // expression _loop0_142
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_137[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_138"));
+ D(fprintf(stderr, "%*c> _gather_141[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_142"));
expr_ty elem;
asdl_seq * seq;
if (
(elem = expression_rule(p)) // expression
&&
- (seq = _loop0_138_rule(p)) // _loop0_138
+ (seq = _loop0_142_rule(p)) // _loop0_142
)
{
- D(fprintf(stderr, "%*c+ _gather_137[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_138"));
+ D(fprintf(stderr, "%*c+ _gather_141[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_142"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_137[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_138"));
+ D(fprintf(stderr, "%*c%s _gather_141[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_142"));
}
_res = NULL;
done:
@@ -32458,9 +33655,9 @@ _gather_137_rule(Parser *p)
return _res;
}
-// _loop0_140: ',' expression
+// _loop0_144: ',' expression
static asdl_seq *
-_loop0_140_rule(Parser *p)
+_loop0_144_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -32486,7 +33683,7 @@ _loop0_140_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_140[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression"));
+ D(fprintf(stderr, "%*c> _loop0_144[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression"));
Token * _literal;
expr_ty elem;
while (
@@ -32518,7 +33715,7 @@ _loop0_140_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_140[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_144[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -32535,9 +33732,9 @@ _loop0_140_rule(Parser *p)
return _seq;
}
-// _gather_139: expression _loop0_140
+// _gather_143: expression _loop0_144
static asdl_seq *
-_gather_139_rule(Parser *p)
+_gather_143_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -32549,27 +33746,27 @@ _gather_139_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // expression _loop0_140
+ { // expression _loop0_144
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_139[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_140"));
+ D(fprintf(stderr, "%*c> _gather_143[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_144"));
expr_ty elem;
asdl_seq * seq;
if (
(elem = expression_rule(p)) // expression
&&
- (seq = _loop0_140_rule(p)) // _loop0_140
+ (seq = _loop0_144_rule(p)) // _loop0_144
)
{
- D(fprintf(stderr, "%*c+ _gather_139[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_140"));
+ D(fprintf(stderr, "%*c+ _gather_143[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_144"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_139[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_140"));
+ D(fprintf(stderr, "%*c%s _gather_143[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_144"));
}
_res = NULL;
done:
@@ -32577,9 +33774,9 @@ _gather_139_rule(Parser *p)
return _res;
}
-// _loop0_142: ',' expression
+// _loop0_146: ',' expression
static asdl_seq *
-_loop0_142_rule(Parser *p)
+_loop0_146_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -32605,7 +33802,7 @@ _loop0_142_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_142[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression"));
+ D(fprintf(stderr, "%*c> _loop0_146[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression"));
Token * _literal;
expr_ty elem;
while (
@@ -32637,7 +33834,7 @@ _loop0_142_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_142[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_146[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -32654,9 +33851,9 @@ _loop0_142_rule(Parser *p)
return _seq;
}
-// _gather_141: expression _loop0_142
+// _gather_145: expression _loop0_146
static asdl_seq *
-_gather_141_rule(Parser *p)
+_gather_145_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -32668,27 +33865,27 @@ _gather_141_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // expression _loop0_142
+ { // expression _loop0_146
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_141[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_142"));
+ D(fprintf(stderr, "%*c> _gather_145[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_146"));
expr_ty elem;
asdl_seq * seq;
if (
(elem = expression_rule(p)) // expression
&&
- (seq = _loop0_142_rule(p)) // _loop0_142
+ (seq = _loop0_146_rule(p)) // _loop0_146
)
{
- D(fprintf(stderr, "%*c+ _gather_141[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_142"));
+ D(fprintf(stderr, "%*c+ _gather_145[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_146"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_141[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_142"));
+ D(fprintf(stderr, "%*c%s _gather_145[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_146"));
}
_res = NULL;
done:
@@ -32696,9 +33893,9 @@ _gather_141_rule(Parser *p)
return _res;
}
-// _tmp_143: NEWLINE INDENT
+// _tmp_147: NEWLINE INDENT
static void *
-_tmp_143_rule(Parser *p)
+_tmp_147_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -32715,7 +33912,7 @@ _tmp_143_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_143[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE INDENT"));
+ D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE INDENT"));
Token * indent_var;
Token * newline_var;
if (
@@ -32724,12 +33921,12 @@ _tmp_143_rule(Parser *p)
(indent_var = _PyPegen_expect_token(p, INDENT)) // token='INDENT'
)
{
- D(fprintf(stderr, "%*c+ _tmp_143[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE INDENT"));
+ D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE INDENT"));
_res = _PyPegen_dummy_name(p, newline_var, indent_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_143[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE INDENT"));
}
_res = NULL;
@@ -32738,9 +33935,9 @@ _tmp_143_rule(Parser *p)
return _res;
}
-// _tmp_144: args | expression for_if_clauses
+// _tmp_148: args | expression for_if_clauses
static void *
-_tmp_144_rule(Parser *p)
+_tmp_148_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -32757,18 +33954,18 @@ _tmp_144_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_144[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args"));
+ D(fprintf(stderr, "%*c> _tmp_148[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args"));
expr_ty args_var;
if (
(args_var = args_rule(p)) // args
)
{
- D(fprintf(stderr, "%*c+ _tmp_144[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args"));
+ D(fprintf(stderr, "%*c+ _tmp_148[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args"));
_res = args_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_144[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_148[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "args"));
}
{ // expression for_if_clauses
@@ -32776,7 +33973,7 @@ _tmp_144_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_144[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses"));
+ D(fprintf(stderr, "%*c> _tmp_148[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses"));
expr_ty expression_var;
asdl_comprehension_seq* for_if_clauses_var;
if (
@@ -32785,12 +33982,12 @@ _tmp_144_rule(Parser *p)
(for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses
)
{
- D(fprintf(stderr, "%*c+ _tmp_144[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses"));
+ D(fprintf(stderr, "%*c+ _tmp_148[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses"));
_res = _PyPegen_dummy_name(p, expression_var, for_if_clauses_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_144[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_148[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression for_if_clauses"));
}
_res = NULL;
@@ -32799,9 +33996,9 @@ _tmp_144_rule(Parser *p)
return _res;
}
-// _tmp_145: args ','
+// _tmp_149: args ','
static void *
-_tmp_145_rule(Parser *p)
+_tmp_149_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -32818,7 +34015,7 @@ _tmp_145_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_145[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args ','"));
+ D(fprintf(stderr, "%*c> _tmp_149[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args ','"));
Token * _literal;
expr_ty args_var;
if (
@@ -32827,12 +34024,12 @@ _tmp_145_rule(Parser *p)
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_145[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args ','"));
+ D(fprintf(stderr, "%*c+ _tmp_149[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args ','"));
_res = _PyPegen_dummy_name(p, args_var, _literal);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_145[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_149[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "args ','"));
}
_res = NULL;
@@ -32841,9 +34038,9 @@ _tmp_145_rule(Parser *p)
return _res;
}
-// _tmp_146: ',' | ')'
+// _tmp_150: ',' | ')'
static void *
-_tmp_146_rule(Parser *p)
+_tmp_150_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -32860,18 +34057,18 @@ _tmp_146_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_146[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_146[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_146[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','"));
}
{ // ')'
@@ -32879,18 +34076,18 @@ _tmp_146_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_146[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'"));
+ D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 8)) // token=')'
)
{
- D(fprintf(stderr, "%*c+ _tmp_146[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'"));
+ D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_146[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'"));
}
_res = NULL;
@@ -32899,9 +34096,9 @@ _tmp_146_rule(Parser *p)
return _res;
}
-// _tmp_147: 'True' | 'False' | 'None'
+// _tmp_151: 'True' | 'False' | 'None'
static void *
-_tmp_147_rule(Parser *p)
+_tmp_151_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -32918,18 +34115,18 @@ _tmp_147_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'"));
+ D(fprintf(stderr, "%*c> _tmp_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 600)) // token='True'
+ (_keyword = _PyPegen_expect_token(p, 601)) // token='True'
)
{
- D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'"));
+ D(fprintf(stderr, "%*c+ _tmp_151[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_151[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'True'"));
}
{ // 'False'
@@ -32937,18 +34134,18 @@ _tmp_147_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'"));
+ D(fprintf(stderr, "%*c> _tmp_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 602)) // token='False'
+ (_keyword = _PyPegen_expect_token(p, 603)) // token='False'
)
{
- D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'"));
+ D(fprintf(stderr, "%*c+ _tmp_151[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_151[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'False'"));
}
{ // 'None'
@@ -32956,18 +34153,18 @@ _tmp_147_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'"));
+ D(fprintf(stderr, "%*c> _tmp_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 601)) // token='None'
+ (_keyword = _PyPegen_expect_token(p, 602)) // token='None'
)
{
- D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'"));
+ D(fprintf(stderr, "%*c+ _tmp_151[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_151[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'None'"));
}
_res = NULL;
@@ -32976,9 +34173,9 @@ _tmp_147_rule(Parser *p)
return _res;
}
-// _tmp_148: NAME '='
+// _tmp_152: NAME '='
static void *
-_tmp_148_rule(Parser *p)
+_tmp_152_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -32995,7 +34192,7 @@ _tmp_148_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_148[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME '='"));
+ D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME '='"));
Token * _literal;
expr_ty name_var;
if (
@@ -33004,12 +34201,12 @@ _tmp_148_rule(Parser *p)
(_literal = _PyPegen_expect_token(p, 22)) // token='='
)
{
- D(fprintf(stderr, "%*c+ _tmp_148[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME '='"));
+ D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME '='"));
_res = _PyPegen_dummy_name(p, name_var, _literal);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_148[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME '='"));
}
_res = NULL;
@@ -33018,9 +34215,9 @@ _tmp_148_rule(Parser *p)
return _res;
}
-// _tmp_149: NAME STRING | SOFT_KEYWORD
+// _tmp_153: NAME STRING | SOFT_KEYWORD
static void *
-_tmp_149_rule(Parser *p)
+_tmp_153_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -33037,7 +34234,7 @@ _tmp_149_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_149[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME STRING"));
+ D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME STRING"));
expr_ty name_var;
expr_ty string_var;
if (
@@ -33046,12 +34243,12 @@ _tmp_149_rule(Parser *p)
(string_var = _PyPegen_string_token(p)) // STRING
)
{
- D(fprintf(stderr, "%*c+ _tmp_149[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME STRING"));
+ D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME STRING"));
_res = _PyPegen_dummy_name(p, name_var, string_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_149[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME STRING"));
}
{ // SOFT_KEYWORD
@@ -33059,18 +34256,18 @@ _tmp_149_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_149[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "SOFT_KEYWORD"));
+ D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "SOFT_KEYWORD"));
expr_ty soft_keyword_var;
if (
(soft_keyword_var = _PyPegen_soft_keyword_token(p)) // SOFT_KEYWORD
)
{
- D(fprintf(stderr, "%*c+ _tmp_149[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "SOFT_KEYWORD"));
+ D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "SOFT_KEYWORD"));
_res = soft_keyword_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_149[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "SOFT_KEYWORD"));
}
_res = NULL;
@@ -33079,9 +34276,9 @@ _tmp_149_rule(Parser *p)
return _res;
}
-// _tmp_150: 'else' | ':'
+// _tmp_154: 'else' | ':'
static void *
-_tmp_150_rule(Parser *p)
+_tmp_154_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -33098,18 +34295,18 @@ _tmp_150_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'else'"));
+ D(fprintf(stderr, "%*c> _tmp_154[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'else'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 644)) // token='else'
+ (_keyword = _PyPegen_expect_token(p, 645)) // token='else'
)
{
- D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'else'"));
+ D(fprintf(stderr, "%*c+ _tmp_154[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'else'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_154[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'else'"));
}
{ // ':'
@@ -33117,18 +34314,18 @@ _tmp_150_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c> _tmp_154[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
)
{
- D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c+ _tmp_154[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_154[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'"));
}
_res = NULL;
@@ -33137,9 +34334,67 @@ _tmp_150_rule(Parser *p)
return _res;
}
-// _tmp_151: '=' | ':='
+// _tmp_155: FSTRING_MIDDLE | fstring_replacement_field
static void *
-_tmp_151_rule(Parser *p)
+_tmp_155_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // FSTRING_MIDDLE
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_155[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "FSTRING_MIDDLE"));
+ Token * fstring_middle_var;
+ if (
+ (fstring_middle_var = _PyPegen_expect_token(p, FSTRING_MIDDLE)) // token='FSTRING_MIDDLE'
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_155[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "FSTRING_MIDDLE"));
+ _res = fstring_middle_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_155[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "FSTRING_MIDDLE"));
+ }
+ { // fstring_replacement_field
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_155[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "fstring_replacement_field"));
+ expr_ty fstring_replacement_field_var;
+ if (
+ (fstring_replacement_field_var = fstring_replacement_field_rule(p)) // fstring_replacement_field
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_155[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "fstring_replacement_field"));
+ _res = fstring_replacement_field_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_155[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "fstring_replacement_field"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _tmp_156: '=' | ':='
+static void *
+_tmp_156_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -33156,18 +34411,18 @@ _tmp_151_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='"));
+ D(fprintf(stderr, "%*c> _tmp_156[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 22)) // token='='
)
{
- D(fprintf(stderr, "%*c+ _tmp_151[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='"));
+ D(fprintf(stderr, "%*c+ _tmp_156[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_151[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_156[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'='"));
}
{ // ':='
@@ -33175,18 +34430,18 @@ _tmp_151_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':='"));
+ D(fprintf(stderr, "%*c> _tmp_156[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':='"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 53)) // token=':='
)
{
- D(fprintf(stderr, "%*c+ _tmp_151[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':='"));
+ D(fprintf(stderr, "%*c+ _tmp_156[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':='"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_151[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_156[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':='"));
}
_res = NULL;
@@ -33195,9 +34450,9 @@ _tmp_151_rule(Parser *p)
return _res;
}
-// _tmp_152: list | tuple | genexp | 'True' | 'None' | 'False'
+// _tmp_157: list | tuple | genexp | 'True' | 'None' | 'False'
static void *
-_tmp_152_rule(Parser *p)
+_tmp_157_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -33214,18 +34469,18 @@ _tmp_152_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "list"));
+ D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "list"));
expr_ty list_var;
if (
(list_var = list_rule(p)) // list
)
{
- D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "list"));
+ D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "list"));
_res = list_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "list"));
}
{ // tuple
@@ -33233,18 +34488,18 @@ _tmp_152_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tuple"));
+ D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tuple"));
expr_ty tuple_var;
if (
(tuple_var = tuple_rule(p)) // tuple
)
{
- D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "tuple"));
+ D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "tuple"));
_res = tuple_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "tuple"));
}
{ // genexp
@@ -33252,18 +34507,18 @@ _tmp_152_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "genexp"));
+ D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "genexp"));
expr_ty genexp_var;
if (
(genexp_var = genexp_rule(p)) // genexp
)
{
- D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "genexp"));
+ D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "genexp"));
_res = genexp_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "genexp"));
}
{ // 'True'
@@ -33271,18 +34526,18 @@ _tmp_152_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'"));
+ D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 600)) // token='True'
+ (_keyword = _PyPegen_expect_token(p, 601)) // token='True'
)
{
- D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'"));
+ D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'True'"));
}
{ // 'None'
@@ -33290,18 +34545,18 @@ _tmp_152_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'"));
+ D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 601)) // token='None'
+ (_keyword = _PyPegen_expect_token(p, 602)) // token='None'
)
{
- D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'"));
+ D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'None'"));
}
{ // 'False'
@@ -33309,18 +34564,18 @@ _tmp_152_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'"));
+ D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 602)) // token='False'
+ (_keyword = _PyPegen_expect_token(p, 603)) // token='False'
)
{
- D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'"));
+ D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'False'"));
}
_res = NULL;
@@ -33329,9 +34584,9 @@ _tmp_152_rule(Parser *p)
return _res;
}
-// _tmp_153: '=' | ':='
+// _tmp_158: '=' | ':='
static void *
-_tmp_153_rule(Parser *p)
+_tmp_158_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -33348,18 +34603,18 @@ _tmp_153_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='"));
+ D(fprintf(stderr, "%*c> _tmp_158[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 22)) // token='='
)
{
- D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='"));
+ D(fprintf(stderr, "%*c+ _tmp_158[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_158[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'='"));
}
{ // ':='
@@ -33367,18 +34622,18 @@ _tmp_153_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':='"));
+ D(fprintf(stderr, "%*c> _tmp_158[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':='"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 53)) // token=':='
)
{
- D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':='"));
+ D(fprintf(stderr, "%*c+ _tmp_158[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':='"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_158[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':='"));
}
_res = NULL;
@@ -33387,9 +34642,9 @@ _tmp_153_rule(Parser *p)
return _res;
}
-// _loop0_154: star_named_expressions
+// _loop0_159: star_named_expressions
static asdl_seq *
-_loop0_154_rule(Parser *p)
+_loop0_159_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -33415,7 +34670,7 @@ _loop0_154_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_154[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expressions"));
+ D(fprintf(stderr, "%*c> _loop0_159[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expressions"));
asdl_expr_seq* star_named_expressions_var;
while (
(star_named_expressions_var = star_named_expressions_rule(p)) // star_named_expressions
@@ -33438,7 +34693,7 @@ _loop0_154_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_154[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_159[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expressions"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -33455,9 +34710,9 @@ _loop0_154_rule(Parser *p)
return _seq;
}
-// _loop0_155: (star_targets '=')
+// _loop0_160: (star_targets '=')
static asdl_seq *
-_loop0_155_rule(Parser *p)
+_loop0_160_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -33483,13 +34738,13 @@ _loop0_155_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_155[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')"));
- void *_tmp_238_var;
+ D(fprintf(stderr, "%*c> _loop0_160[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')"));
+ void *_tmp_259_var;
while (
- (_tmp_238_var = _tmp_238_rule(p)) // star_targets '='
+ (_tmp_259_var = _tmp_259_rule(p)) // star_targets '='
)
{
- _res = _tmp_238_var;
+ _res = _tmp_259_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -33506,7 +34761,7 @@ _loop0_155_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_155[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_160[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(star_targets '=')"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -33523,9 +34778,9 @@ _loop0_155_rule(Parser *p)
return _seq;
}
-// _loop0_156: (star_targets '=')
+// _loop0_161: (star_targets '=')
static asdl_seq *
-_loop0_156_rule(Parser *p)
+_loop0_161_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -33551,13 +34806,13 @@ _loop0_156_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_156[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')"));
- void *_tmp_239_var;
+ D(fprintf(stderr, "%*c> _loop0_161[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')"));
+ void *_tmp_260_var;
while (
- (_tmp_239_var = _tmp_239_rule(p)) // star_targets '='
+ (_tmp_260_var = _tmp_260_rule(p)) // star_targets '='
)
{
- _res = _tmp_239_var;
+ _res = _tmp_260_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -33574,7 +34829,7 @@ _loop0_156_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_156[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_161[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(star_targets '=')"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -33591,9 +34846,9 @@ _loop0_156_rule(Parser *p)
return _seq;
}
-// _tmp_157: yield_expr | star_expressions
+// _tmp_162: yield_expr | star_expressions
static void *
-_tmp_157_rule(Parser *p)
+_tmp_162_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -33610,18 +34865,18 @@ _tmp_157_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ D(fprintf(stderr, "%*c> _tmp_162[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr"));
expr_ty yield_expr_var;
if (
(yield_expr_var = yield_expr_rule(p)) // yield_expr
)
{
- D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ D(fprintf(stderr, "%*c+ _tmp_162[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr"));
_res = yield_expr_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_162[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr"));
}
{ // star_expressions
@@ -33629,18 +34884,18 @@ _tmp_157_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ D(fprintf(stderr, "%*c> _tmp_162[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions"));
expr_ty star_expressions_var;
if (
(star_expressions_var = star_expressions_rule(p)) // star_expressions
)
{
- D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ D(fprintf(stderr, "%*c+ _tmp_162[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions"));
_res = star_expressions_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_162[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions"));
}
_res = NULL;
@@ -33649,9 +34904,9 @@ _tmp_157_rule(Parser *p)
return _res;
}
-// _tmp_158: '[' | '(' | '{'
+// _tmp_163: '[' | '(' | '{'
static void *
-_tmp_158_rule(Parser *p)
+_tmp_163_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -33668,18 +34923,18 @@ _tmp_158_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_158[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['"));
+ D(fprintf(stderr, "%*c> _tmp_163[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 9)) // token='['
)
{
- D(fprintf(stderr, "%*c+ _tmp_158[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['"));
+ D(fprintf(stderr, "%*c+ _tmp_163[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_158[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_163[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'['"));
}
{ // '('
@@ -33687,18 +34942,18 @@ _tmp_158_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_158[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'('"));
+ D(fprintf(stderr, "%*c> _tmp_163[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'('"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 7)) // token='('
)
{
- D(fprintf(stderr, "%*c+ _tmp_158[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'('"));
+ D(fprintf(stderr, "%*c+ _tmp_163[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'('"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_158[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_163[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'('"));
}
{ // '{'
@@ -33706,18 +34961,18 @@ _tmp_158_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_158[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'"));
+ D(fprintf(stderr, "%*c> _tmp_163[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 25)) // token='{'
)
{
- D(fprintf(stderr, "%*c+ _tmp_158[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'"));
+ D(fprintf(stderr, "%*c+ _tmp_163[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_158[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_163[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{'"));
}
_res = NULL;
@@ -33726,9 +34981,9 @@ _tmp_158_rule(Parser *p)
return _res;
}
-// _tmp_159: '[' | '{'
+// _tmp_164: '[' | '{'
static void *
-_tmp_159_rule(Parser *p)
+_tmp_164_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -33745,18 +35000,18 @@ _tmp_159_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_159[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['"));
+ D(fprintf(stderr, "%*c> _tmp_164[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 9)) // token='['
)
{
- D(fprintf(stderr, "%*c+ _tmp_159[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['"));
+ D(fprintf(stderr, "%*c+ _tmp_164[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_159[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_164[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'['"));
}
{ // '{'
@@ -33764,18 +35019,18 @@ _tmp_159_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_159[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'"));
+ D(fprintf(stderr, "%*c> _tmp_164[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 25)) // token='{'
)
{
- D(fprintf(stderr, "%*c+ _tmp_159[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'"));
+ D(fprintf(stderr, "%*c+ _tmp_164[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_159[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_164[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{'"));
}
_res = NULL;
@@ -33784,9 +35039,9 @@ _tmp_159_rule(Parser *p)
return _res;
}
-// _tmp_160: '[' | '{'
+// _tmp_165: '[' | '{'
static void *
-_tmp_160_rule(Parser *p)
+_tmp_165_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -33803,18 +35058,18 @@ _tmp_160_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_160[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['"));
+ D(fprintf(stderr, "%*c> _tmp_165[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 9)) // token='['
)
{
- D(fprintf(stderr, "%*c+ _tmp_160[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['"));
+ D(fprintf(stderr, "%*c+ _tmp_165[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_160[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_165[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'['"));
}
{ // '{'
@@ -33822,18 +35077,18 @@ _tmp_160_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_160[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'"));
+ D(fprintf(stderr, "%*c> _tmp_165[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 25)) // token='{'
)
{
- D(fprintf(stderr, "%*c+ _tmp_160[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'"));
+ D(fprintf(stderr, "%*c+ _tmp_165[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_160[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_165[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{'"));
}
_res = NULL;
@@ -33842,9 +35097,9 @@ _tmp_160_rule(Parser *p)
return _res;
}
-// _tmp_161: slash_no_default | slash_with_default
+// _tmp_166: slash_no_default | slash_with_default
static void *
-_tmp_161_rule(Parser *p)
+_tmp_166_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -33861,18 +35116,18 @@ _tmp_161_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_161[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_no_default"));
+ D(fprintf(stderr, "%*c> _tmp_166[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_no_default"));
asdl_arg_seq* slash_no_default_var;
if (
(slash_no_default_var = slash_no_default_rule(p)) // slash_no_default
)
{
- D(fprintf(stderr, "%*c+ _tmp_161[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_no_default"));
+ D(fprintf(stderr, "%*c+ _tmp_166[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_no_default"));
_res = slash_no_default_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_161[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_166[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_no_default"));
}
{ // slash_with_default
@@ -33880,18 +35135,18 @@ _tmp_161_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_161[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default"));
+ D(fprintf(stderr, "%*c> _tmp_166[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default"));
SlashWithDefault* slash_with_default_var;
if (
(slash_with_default_var = slash_with_default_rule(p)) // slash_with_default
)
{
- D(fprintf(stderr, "%*c+ _tmp_161[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default"));
+ D(fprintf(stderr, "%*c+ _tmp_166[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default"));
_res = slash_with_default_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_161[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_166[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_with_default"));
}
_res = NULL;
@@ -33900,9 +35155,9 @@ _tmp_161_rule(Parser *p)
return _res;
}
-// _loop0_162: param_maybe_default
+// _loop0_167: param_maybe_default
static asdl_seq *
-_loop0_162_rule(Parser *p)
+_loop0_167_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -33928,7 +35183,7 @@ _loop0_162_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_162[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default"));
+ D(fprintf(stderr, "%*c> _loop0_167[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default"));
NameDefaultPair* param_maybe_default_var;
while (
(param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default
@@ -33951,7 +35206,7 @@ _loop0_162_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_162[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_167[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -33968,9 +35223,9 @@ _loop0_162_rule(Parser *p)
return _seq;
}
-// _loop0_163: param_no_default
+// _loop0_168: param_no_default
static asdl_seq *
-_loop0_163_rule(Parser *p)
+_loop0_168_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -33996,7 +35251,7 @@ _loop0_163_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_163[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
+ D(fprintf(stderr, "%*c> _loop0_168[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
arg_ty param_no_default_var;
while (
(param_no_default_var = param_no_default_rule(p)) // param_no_default
@@ -34019,7 +35274,7 @@ _loop0_163_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_163[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_168[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -34036,9 +35291,9 @@ _loop0_163_rule(Parser *p)
return _seq;
}
-// _loop0_164: param_no_default
+// _loop0_169: param_no_default
static asdl_seq *
-_loop0_164_rule(Parser *p)
+_loop0_169_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -34064,7 +35319,7 @@ _loop0_164_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_164[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
+ D(fprintf(stderr, "%*c> _loop0_169[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
arg_ty param_no_default_var;
while (
(param_no_default_var = param_no_default_rule(p)) // param_no_default
@@ -34087,7 +35342,7 @@ _loop0_164_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_164[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_169[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -34104,9 +35359,9 @@ _loop0_164_rule(Parser *p)
return _seq;
}
-// _loop1_165: param_no_default
+// _loop1_170: param_no_default
static asdl_seq *
-_loop1_165_rule(Parser *p)
+_loop1_170_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -34132,7 +35387,7 @@ _loop1_165_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_165[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
+ D(fprintf(stderr, "%*c> _loop1_170[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
arg_ty param_no_default_var;
while (
(param_no_default_var = param_no_default_rule(p)) // param_no_default
@@ -34155,7 +35410,7 @@ _loop1_165_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_165[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_170[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default"));
}
if (_n == 0 || p->error_indicator) {
@@ -34177,9 +35432,9 @@ _loop1_165_rule(Parser *p)
return _seq;
}
-// _tmp_166: slash_no_default | slash_with_default
+// _tmp_171: slash_no_default | slash_with_default
static void *
-_tmp_166_rule(Parser *p)
+_tmp_171_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -34196,18 +35451,18 @@ _tmp_166_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_166[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_no_default"));
+ D(fprintf(stderr, "%*c> _tmp_171[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_no_default"));
asdl_arg_seq* slash_no_default_var;
if (
(slash_no_default_var = slash_no_default_rule(p)) // slash_no_default
)
{
- D(fprintf(stderr, "%*c+ _tmp_166[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_no_default"));
+ D(fprintf(stderr, "%*c+ _tmp_171[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_no_default"));
_res = slash_no_default_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_166[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_171[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_no_default"));
}
{ // slash_with_default
@@ -34215,18 +35470,18 @@ _tmp_166_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_166[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default"));
+ D(fprintf(stderr, "%*c> _tmp_171[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default"));
SlashWithDefault* slash_with_default_var;
if (
(slash_with_default_var = slash_with_default_rule(p)) // slash_with_default
)
{
- D(fprintf(stderr, "%*c+ _tmp_166[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default"));
+ D(fprintf(stderr, "%*c+ _tmp_171[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default"));
_res = slash_with_default_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_166[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_171[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_with_default"));
}
_res = NULL;
@@ -34235,9 +35490,9 @@ _tmp_166_rule(Parser *p)
return _res;
}
-// _loop0_167: param_maybe_default
+// _loop0_172: param_maybe_default
static asdl_seq *
-_loop0_167_rule(Parser *p)
+_loop0_172_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -34263,7 +35518,7 @@ _loop0_167_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_167[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default"));
+ D(fprintf(stderr, "%*c> _loop0_172[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default"));
NameDefaultPair* param_maybe_default_var;
while (
(param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default
@@ -34286,7 +35541,7 @@ _loop0_167_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_167[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_172[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -34303,9 +35558,9 @@ _loop0_167_rule(Parser *p)
return _seq;
}
-// _tmp_168: ',' | param_no_default
+// _tmp_173: ',' | param_no_default
static void *
-_tmp_168_rule(Parser *p)
+_tmp_173_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -34322,18 +35577,18 @@ _tmp_168_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_168[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c> _tmp_173[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_168[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c+ _tmp_173[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_168[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_173[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','"));
}
{ // param_no_default
@@ -34341,18 +35596,18 @@ _tmp_168_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_168[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
+ D(fprintf(stderr, "%*c> _tmp_173[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
arg_ty param_no_default_var;
if (
(param_no_default_var = param_no_default_rule(p)) // param_no_default
)
{
- D(fprintf(stderr, "%*c+ _tmp_168[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default"));
+ D(fprintf(stderr, "%*c+ _tmp_173[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default"));
_res = param_no_default_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_168[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_173[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default"));
}
_res = NULL;
@@ -34361,9 +35616,9 @@ _tmp_168_rule(Parser *p)
return _res;
}
-// _loop0_169: param_maybe_default
+// _loop0_174: param_maybe_default
static asdl_seq *
-_loop0_169_rule(Parser *p)
+_loop0_174_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -34389,7 +35644,7 @@ _loop0_169_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_169[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default"));
+ D(fprintf(stderr, "%*c> _loop0_174[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default"));
NameDefaultPair* param_maybe_default_var;
while (
(param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default
@@ -34412,7 +35667,7 @@ _loop0_169_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_169[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_174[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -34429,9 +35684,9 @@ _loop0_169_rule(Parser *p)
return _seq;
}
-// _loop1_170: param_maybe_default
+// _loop1_175: param_maybe_default
static asdl_seq *
-_loop1_170_rule(Parser *p)
+_loop1_175_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -34457,7 +35712,7 @@ _loop1_170_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_170[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default"));
+ D(fprintf(stderr, "%*c> _loop1_175[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default"));
NameDefaultPair* param_maybe_default_var;
while (
(param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default
@@ -34480,7 +35735,7 @@ _loop1_170_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_170[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_175[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default"));
}
if (_n == 0 || p->error_indicator) {
@@ -34502,9 +35757,9 @@ _loop1_170_rule(Parser *p)
return _seq;
}
-// _tmp_171: ')' | ','
+// _tmp_176: ')' | ','
static void *
-_tmp_171_rule(Parser *p)
+_tmp_176_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -34521,18 +35776,18 @@ _tmp_171_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_171[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'"));
+ D(fprintf(stderr, "%*c> _tmp_176[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 8)) // token=')'
)
{
- D(fprintf(stderr, "%*c+ _tmp_171[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'"));
+ D(fprintf(stderr, "%*c+ _tmp_176[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_171[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_176[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'"));
}
{ // ','
@@ -34540,18 +35795,18 @@ _tmp_171_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_171[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c> _tmp_176[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_171[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c+ _tmp_176[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_171[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_176[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','"));
}
_res = NULL;
@@ -34560,9 +35815,9 @@ _tmp_171_rule(Parser *p)
return _res;
}
-// _tmp_172: ')' | ',' (')' | '**')
+// _tmp_177: ')' | ',' (')' | '**')
static void *
-_tmp_172_rule(Parser *p)
+_tmp_177_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -34579,18 +35834,18 @@ _tmp_172_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_172[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'"));
+ D(fprintf(stderr, "%*c> _tmp_177[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 8)) // token=')'
)
{
- D(fprintf(stderr, "%*c+ _tmp_172[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'"));
+ D(fprintf(stderr, "%*c+ _tmp_177[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_172[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_177[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'"));
}
{ // ',' (')' | '**')
@@ -34598,21 +35853,21 @@ _tmp_172_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_172[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')"));
+ D(fprintf(stderr, "%*c> _tmp_177[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')"));
Token * _literal;
- void *_tmp_240_var;
+ void *_tmp_261_var;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
- (_tmp_240_var = _tmp_240_rule(p)) // ')' | '**'
+ (_tmp_261_var = _tmp_261_rule(p)) // ')' | '**'
)
{
- D(fprintf(stderr, "%*c+ _tmp_172[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')"));
- _res = _PyPegen_dummy_name(p, _literal, _tmp_240_var);
+ D(fprintf(stderr, "%*c+ _tmp_177[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')"));
+ _res = _PyPegen_dummy_name(p, _literal, _tmp_261_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_172[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_177[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (')' | '**')"));
}
_res = NULL;
@@ -34621,9 +35876,9 @@ _tmp_172_rule(Parser *p)
return _res;
}
-// _tmp_173: param_no_default | ','
+// _tmp_178: param_no_default | ','
static void *
-_tmp_173_rule(Parser *p)
+_tmp_178_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -34640,18 +35895,18 @@ _tmp_173_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_173[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
+ D(fprintf(stderr, "%*c> _tmp_178[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
arg_ty param_no_default_var;
if (
(param_no_default_var = param_no_default_rule(p)) // param_no_default
)
{
- D(fprintf(stderr, "%*c+ _tmp_173[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default"));
+ D(fprintf(stderr, "%*c+ _tmp_178[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default"));
_res = param_no_default_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_173[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_178[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default"));
}
{ // ','
@@ -34659,18 +35914,18 @@ _tmp_173_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_173[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c> _tmp_178[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_173[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c+ _tmp_178[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_173[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_178[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','"));
}
_res = NULL;
@@ -34679,9 +35934,9 @@ _tmp_173_rule(Parser *p)
return _res;
}
-// _loop0_174: param_maybe_default
+// _loop0_179: param_maybe_default
static asdl_seq *
-_loop0_174_rule(Parser *p)
+_loop0_179_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -34707,7 +35962,7 @@ _loop0_174_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_174[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default"));
+ D(fprintf(stderr, "%*c> _loop0_179[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default"));
NameDefaultPair* param_maybe_default_var;
while (
(param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default
@@ -34730,7 +35985,7 @@ _loop0_174_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_174[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_179[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -34747,9 +36002,9 @@ _loop0_174_rule(Parser *p)
return _seq;
}
-// _tmp_175: param_no_default | ','
+// _tmp_180: param_no_default | ','
static void *
-_tmp_175_rule(Parser *p)
+_tmp_180_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -34766,18 +36021,18 @@ _tmp_175_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_175[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
+ D(fprintf(stderr, "%*c> _tmp_180[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
arg_ty param_no_default_var;
if (
(param_no_default_var = param_no_default_rule(p)) // param_no_default
)
{
- D(fprintf(stderr, "%*c+ _tmp_175[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default"));
+ D(fprintf(stderr, "%*c+ _tmp_180[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default"));
_res = param_no_default_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_175[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_180[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default"));
}
{ // ','
@@ -34785,18 +36040,18 @@ _tmp_175_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_175[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c> _tmp_180[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_175[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c+ _tmp_180[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_175[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_180[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','"));
}
_res = NULL;
@@ -34805,9 +36060,9 @@ _tmp_175_rule(Parser *p)
return _res;
}
-// _tmp_176: '*' | '**' | '/'
+// _tmp_181: '*' | '**' | '/'
static void *
-_tmp_176_rule(Parser *p)
+_tmp_181_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -34824,18 +36079,18 @@ _tmp_176_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_176[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*'"));
+ D(fprintf(stderr, "%*c> _tmp_181[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
)
{
- D(fprintf(stderr, "%*c+ _tmp_176[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*'"));
+ D(fprintf(stderr, "%*c+ _tmp_181[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_176[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_181[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*'"));
}
{ // '**'
@@ -34843,18 +36098,18 @@ _tmp_176_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_176[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'"));
+ D(fprintf(stderr, "%*c> _tmp_181[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 35)) // token='**'
)
{
- D(fprintf(stderr, "%*c+ _tmp_176[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'"));
+ D(fprintf(stderr, "%*c+ _tmp_181[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_176[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_181[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'"));
}
{ // '/'
@@ -34862,18 +36117,18 @@ _tmp_176_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_176[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'/'"));
+ D(fprintf(stderr, "%*c> _tmp_181[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'/'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 17)) // token='/'
)
{
- D(fprintf(stderr, "%*c+ _tmp_176[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'/'"));
+ D(fprintf(stderr, "%*c+ _tmp_181[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'/'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_176[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_181[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'/'"));
}
_res = NULL;
@@ -34882,9 +36137,9 @@ _tmp_176_rule(Parser *p)
return _res;
}
-// _loop1_177: param_with_default
+// _loop1_182: param_with_default
static asdl_seq *
-_loop1_177_rule(Parser *p)
+_loop1_182_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -34910,7 +36165,7 @@ _loop1_177_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_177[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default"));
+ D(fprintf(stderr, "%*c> _loop1_182[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default"));
NameDefaultPair* param_with_default_var;
while (
(param_with_default_var = param_with_default_rule(p)) // param_with_default
@@ -34933,7 +36188,7 @@ _loop1_177_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_177[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_182[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default"));
}
if (_n == 0 || p->error_indicator) {
@@ -34955,9 +36210,9 @@ _loop1_177_rule(Parser *p)
return _seq;
}
-// _tmp_178: lambda_slash_no_default | lambda_slash_with_default
+// _tmp_183: lambda_slash_no_default | lambda_slash_with_default
static void *
-_tmp_178_rule(Parser *p)
+_tmp_183_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -34974,18 +36229,18 @@ _tmp_178_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_178[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default"));
+ D(fprintf(stderr, "%*c> _tmp_183[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default"));
asdl_arg_seq* lambda_slash_no_default_var;
if (
(lambda_slash_no_default_var = lambda_slash_no_default_rule(p)) // lambda_slash_no_default
)
{
- D(fprintf(stderr, "%*c+ _tmp_178[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default"));
+ D(fprintf(stderr, "%*c+ _tmp_183[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default"));
_res = lambda_slash_no_default_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_178[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_183[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_slash_no_default"));
}
{ // lambda_slash_with_default
@@ -34993,18 +36248,18 @@ _tmp_178_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_178[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default"));
+ D(fprintf(stderr, "%*c> _tmp_183[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default"));
SlashWithDefault* lambda_slash_with_default_var;
if (
(lambda_slash_with_default_var = lambda_slash_with_default_rule(p)) // lambda_slash_with_default
)
{
- D(fprintf(stderr, "%*c+ _tmp_178[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default"));
+ D(fprintf(stderr, "%*c+ _tmp_183[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default"));
_res = lambda_slash_with_default_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_178[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_183[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_slash_with_default"));
}
_res = NULL;
@@ -35013,9 +36268,9 @@ _tmp_178_rule(Parser *p)
return _res;
}
-// _loop0_179: lambda_param_maybe_default
+// _loop0_184: lambda_param_maybe_default
static asdl_seq *
-_loop0_179_rule(Parser *p)
+_loop0_184_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -35041,7 +36296,7 @@ _loop0_179_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_179[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default"));
+ D(fprintf(stderr, "%*c> _loop0_184[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default"));
NameDefaultPair* lambda_param_maybe_default_var;
while (
(lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default
@@ -35064,7 +36319,7 @@ _loop0_179_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_179[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_184[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -35081,9 +36336,9 @@ _loop0_179_rule(Parser *p)
return _seq;
}
-// _loop0_180: lambda_param_no_default
+// _loop0_185: lambda_param_no_default
static asdl_seq *
-_loop0_180_rule(Parser *p)
+_loop0_185_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -35109,7 +36364,7 @@ _loop0_180_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_180[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
+ D(fprintf(stderr, "%*c> _loop0_185[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
arg_ty lambda_param_no_default_var;
while (
(lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default
@@ -35132,7 +36387,7 @@ _loop0_180_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_180[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_185[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -35149,9 +36404,9 @@ _loop0_180_rule(Parser *p)
return _seq;
}
-// _loop0_181: lambda_param_no_default
+// _loop0_186: lambda_param_no_default
static asdl_seq *
-_loop0_181_rule(Parser *p)
+_loop0_186_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -35177,7 +36432,7 @@ _loop0_181_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_181[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
+ D(fprintf(stderr, "%*c> _loop0_186[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
arg_ty lambda_param_no_default_var;
while (
(lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default
@@ -35200,7 +36455,7 @@ _loop0_181_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_181[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_186[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -35217,9 +36472,9 @@ _loop0_181_rule(Parser *p)
return _seq;
}
-// _loop0_183: ',' lambda_param
+// _loop0_188: ',' lambda_param
static asdl_seq *
-_loop0_183_rule(Parser *p)
+_loop0_188_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -35245,7 +36500,7 @@ _loop0_183_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_183[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' lambda_param"));
+ D(fprintf(stderr, "%*c> _loop0_188[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' lambda_param"));
Token * _literal;
arg_ty elem;
while (
@@ -35277,7 +36532,7 @@ _loop0_183_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_183[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_188[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' lambda_param"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -35294,9 +36549,9 @@ _loop0_183_rule(Parser *p)
return _seq;
}
-// _gather_182: lambda_param _loop0_183
+// _gather_187: lambda_param _loop0_188
static asdl_seq *
-_gather_182_rule(Parser *p)
+_gather_187_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -35308,27 +36563,27 @@ _gather_182_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // lambda_param _loop0_183
+ { // lambda_param _loop0_188
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_182[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param _loop0_183"));
+ D(fprintf(stderr, "%*c> _gather_187[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param _loop0_188"));
arg_ty elem;
asdl_seq * seq;
if (
(elem = lambda_param_rule(p)) // lambda_param
&&
- (seq = _loop0_183_rule(p)) // _loop0_183
+ (seq = _loop0_188_rule(p)) // _loop0_188
)
{
- D(fprintf(stderr, "%*c+ _gather_182[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param _loop0_183"));
+ D(fprintf(stderr, "%*c+ _gather_187[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param _loop0_188"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_182[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param _loop0_183"));
+ D(fprintf(stderr, "%*c%s _gather_187[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param _loop0_188"));
}
_res = NULL;
done:
@@ -35336,9 +36591,9 @@ _gather_182_rule(Parser *p)
return _res;
}
-// _tmp_184: lambda_slash_no_default | lambda_slash_with_default
+// _tmp_189: lambda_slash_no_default | lambda_slash_with_default
static void *
-_tmp_184_rule(Parser *p)
+_tmp_189_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -35355,18 +36610,18 @@ _tmp_184_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_184[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default"));
+ D(fprintf(stderr, "%*c> _tmp_189[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default"));
asdl_arg_seq* lambda_slash_no_default_var;
if (
(lambda_slash_no_default_var = lambda_slash_no_default_rule(p)) // lambda_slash_no_default
)
{
- D(fprintf(stderr, "%*c+ _tmp_184[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default"));
+ D(fprintf(stderr, "%*c+ _tmp_189[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default"));
_res = lambda_slash_no_default_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_184[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_189[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_slash_no_default"));
}
{ // lambda_slash_with_default
@@ -35374,18 +36629,18 @@ _tmp_184_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_184[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default"));
+ D(fprintf(stderr, "%*c> _tmp_189[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default"));
SlashWithDefault* lambda_slash_with_default_var;
if (
(lambda_slash_with_default_var = lambda_slash_with_default_rule(p)) // lambda_slash_with_default
)
{
- D(fprintf(stderr, "%*c+ _tmp_184[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default"));
+ D(fprintf(stderr, "%*c+ _tmp_189[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default"));
_res = lambda_slash_with_default_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_184[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_189[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_slash_with_default"));
}
_res = NULL;
@@ -35394,9 +36649,9 @@ _tmp_184_rule(Parser *p)
return _res;
}
-// _loop0_185: lambda_param_maybe_default
+// _loop0_190: lambda_param_maybe_default
static asdl_seq *
-_loop0_185_rule(Parser *p)
+_loop0_190_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -35422,7 +36677,7 @@ _loop0_185_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_185[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default"));
+ D(fprintf(stderr, "%*c> _loop0_190[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default"));
NameDefaultPair* lambda_param_maybe_default_var;
while (
(lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default
@@ -35445,7 +36700,7 @@ _loop0_185_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_185[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_190[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -35462,9 +36717,9 @@ _loop0_185_rule(Parser *p)
return _seq;
}
-// _tmp_186: ',' | lambda_param_no_default
+// _tmp_191: ',' | lambda_param_no_default
static void *
-_tmp_186_rule(Parser *p)
+_tmp_191_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -35481,18 +36736,18 @@ _tmp_186_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_186[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c> _tmp_191[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_186[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c+ _tmp_191[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_186[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_191[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','"));
}
{ // lambda_param_no_default
@@ -35500,18 +36755,18 @@ _tmp_186_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_186[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
+ D(fprintf(stderr, "%*c> _tmp_191[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
arg_ty lambda_param_no_default_var;
if (
(lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default
)
{
- D(fprintf(stderr, "%*c+ _tmp_186[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
+ D(fprintf(stderr, "%*c+ _tmp_191[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
_res = lambda_param_no_default_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_186[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_191[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default"));
}
_res = NULL;
@@ -35520,9 +36775,9 @@ _tmp_186_rule(Parser *p)
return _res;
}
-// _loop0_187: lambda_param_maybe_default
+// _loop0_192: lambda_param_maybe_default
static asdl_seq *
-_loop0_187_rule(Parser *p)
+_loop0_192_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -35548,7 +36803,7 @@ _loop0_187_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_187[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default"));
+ D(fprintf(stderr, "%*c> _loop0_192[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default"));
NameDefaultPair* lambda_param_maybe_default_var;
while (
(lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default
@@ -35571,7 +36826,7 @@ _loop0_187_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_187[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_192[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -35588,9 +36843,9 @@ _loop0_187_rule(Parser *p)
return _seq;
}
-// _loop1_188: lambda_param_maybe_default
+// _loop1_193: lambda_param_maybe_default
static asdl_seq *
-_loop1_188_rule(Parser *p)
+_loop1_193_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -35616,7 +36871,7 @@ _loop1_188_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_188[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default"));
+ D(fprintf(stderr, "%*c> _loop1_193[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default"));
NameDefaultPair* lambda_param_maybe_default_var;
while (
(lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default
@@ -35639,7 +36894,7 @@ _loop1_188_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_188[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_193[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default"));
}
if (_n == 0 || p->error_indicator) {
@@ -35661,9 +36916,9 @@ _loop1_188_rule(Parser *p)
return _seq;
}
-// _loop1_189: lambda_param_with_default
+// _loop1_194: lambda_param_with_default
static asdl_seq *
-_loop1_189_rule(Parser *p)
+_loop1_194_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -35689,7 +36944,7 @@ _loop1_189_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_189[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default"));
+ D(fprintf(stderr, "%*c> _loop1_194[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default"));
NameDefaultPair* lambda_param_with_default_var;
while (
(lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default
@@ -35712,7 +36967,7 @@ _loop1_189_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_189[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_194[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default"));
}
if (_n == 0 || p->error_indicator) {
@@ -35734,9 +36989,9 @@ _loop1_189_rule(Parser *p)
return _seq;
}
-// _tmp_190: ':' | ',' (':' | '**')
+// _tmp_195: ':' | ',' (':' | '**')
static void *
-_tmp_190_rule(Parser *p)
+_tmp_195_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -35753,18 +37008,18 @@ _tmp_190_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_190[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c> _tmp_195[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
)
{
- D(fprintf(stderr, "%*c+ _tmp_190[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c+ _tmp_195[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_190[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_195[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'"));
}
{ // ',' (':' | '**')
@@ -35772,21 +37027,21 @@ _tmp_190_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_190[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')"));
+ D(fprintf(stderr, "%*c> _tmp_195[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')"));
Token * _literal;
- void *_tmp_241_var;
+ void *_tmp_262_var;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
- (_tmp_241_var = _tmp_241_rule(p)) // ':' | '**'
+ (_tmp_262_var = _tmp_262_rule(p)) // ':' | '**'
)
{
- D(fprintf(stderr, "%*c+ _tmp_190[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')"));
- _res = _PyPegen_dummy_name(p, _literal, _tmp_241_var);
+ D(fprintf(stderr, "%*c+ _tmp_195[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')"));
+ _res = _PyPegen_dummy_name(p, _literal, _tmp_262_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_190[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_195[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (':' | '**')"));
}
_res = NULL;
@@ -35795,9 +37050,9 @@ _tmp_190_rule(Parser *p)
return _res;
}
-// _tmp_191: lambda_param_no_default | ','
+// _tmp_196: lambda_param_no_default | ','
static void *
-_tmp_191_rule(Parser *p)
+_tmp_196_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -35814,18 +37069,18 @@ _tmp_191_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_191[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
+ D(fprintf(stderr, "%*c> _tmp_196[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
arg_ty lambda_param_no_default_var;
if (
(lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default
)
{
- D(fprintf(stderr, "%*c+ _tmp_191[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
+ D(fprintf(stderr, "%*c+ _tmp_196[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
_res = lambda_param_no_default_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_191[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_196[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default"));
}
{ // ','
@@ -35833,18 +37088,18 @@ _tmp_191_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_191[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c> _tmp_196[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_191[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c+ _tmp_196[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_191[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_196[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','"));
}
_res = NULL;
@@ -35853,9 +37108,9 @@ _tmp_191_rule(Parser *p)
return _res;
}
-// _loop0_192: lambda_param_maybe_default
+// _loop0_197: lambda_param_maybe_default
static asdl_seq *
-_loop0_192_rule(Parser *p)
+_loop0_197_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -35881,7 +37136,7 @@ _loop0_192_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_192[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default"));
+ D(fprintf(stderr, "%*c> _loop0_197[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default"));
NameDefaultPair* lambda_param_maybe_default_var;
while (
(lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default
@@ -35904,7 +37159,7 @@ _loop0_192_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_192[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_197[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -35921,9 +37176,9 @@ _loop0_192_rule(Parser *p)
return _seq;
}
-// _tmp_193: lambda_param_no_default | ','
+// _tmp_198: lambda_param_no_default | ','
static void *
-_tmp_193_rule(Parser *p)
+_tmp_198_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -35940,18 +37195,18 @@ _tmp_193_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_193[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
+ D(fprintf(stderr, "%*c> _tmp_198[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
arg_ty lambda_param_no_default_var;
if (
(lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default
)
{
- D(fprintf(stderr, "%*c+ _tmp_193[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
+ D(fprintf(stderr, "%*c+ _tmp_198[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
_res = lambda_param_no_default_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_193[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_198[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default"));
}
{ // ','
@@ -35959,18 +37214,18 @@ _tmp_193_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_193[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c> _tmp_198[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_193[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c+ _tmp_198[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_193[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_198[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','"));
}
_res = NULL;
@@ -35979,9 +37234,9 @@ _tmp_193_rule(Parser *p)
return _res;
}
-// _tmp_194: '*' | '**' | '/'
+// _tmp_199: '*' | '**' | '/'
static void *
-_tmp_194_rule(Parser *p)
+_tmp_199_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -35998,18 +37253,18 @@ _tmp_194_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_194[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*'"));
+ D(fprintf(stderr, "%*c> _tmp_199[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
)
{
- D(fprintf(stderr, "%*c+ _tmp_194[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*'"));
+ D(fprintf(stderr, "%*c+ _tmp_199[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_194[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_199[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*'"));
}
{ // '**'
@@ -36017,18 +37272,18 @@ _tmp_194_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_194[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'"));
+ D(fprintf(stderr, "%*c> _tmp_199[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 35)) // token='**'
)
{
- D(fprintf(stderr, "%*c+ _tmp_194[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'"));
+ D(fprintf(stderr, "%*c+ _tmp_199[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_194[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_199[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'"));
}
{ // '/'
@@ -36036,18 +37291,18 @@ _tmp_194_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_194[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'/'"));
+ D(fprintf(stderr, "%*c> _tmp_199[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'/'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 17)) // token='/'
)
{
- D(fprintf(stderr, "%*c+ _tmp_194[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'/'"));
+ D(fprintf(stderr, "%*c+ _tmp_199[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'/'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_194[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_199[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'/'"));
}
_res = NULL;
@@ -36056,9 +37311,9 @@ _tmp_194_rule(Parser *p)
return _res;
}
-// _tmp_195: ',' | ')' | ':'
+// _tmp_200: ',' | ')' | ':'
static void *
-_tmp_195_rule(Parser *p)
+_tmp_200_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -36075,18 +37330,18 @@ _tmp_195_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_195[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c> _tmp_200[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_195[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c+ _tmp_200[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_195[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_200[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','"));
}
{ // ')'
@@ -36094,18 +37349,18 @@ _tmp_195_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_195[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'"));
+ D(fprintf(stderr, "%*c> _tmp_200[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 8)) // token=')'
)
{
- D(fprintf(stderr, "%*c+ _tmp_195[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'"));
+ D(fprintf(stderr, "%*c+ _tmp_200[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_195[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_200[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'"));
}
{ // ':'
@@ -36113,18 +37368,18 @@ _tmp_195_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_195[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c> _tmp_200[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
)
{
- D(fprintf(stderr, "%*c+ _tmp_195[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c+ _tmp_200[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_195[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_200[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'"));
}
_res = NULL;
@@ -36133,9 +37388,9 @@ _tmp_195_rule(Parser *p)
return _res;
}
-// _loop0_197: ',' (expression ['as' star_target])
+// _loop0_202: ',' (expression ['as' star_target])
static asdl_seq *
-_loop0_197_rule(Parser *p)
+_loop0_202_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -36161,13 +37416,13 @@ _loop0_197_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_197[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expression ['as' star_target])"));
+ D(fprintf(stderr, "%*c> _loop0_202[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expression ['as' star_target])"));
Token * _literal;
void *elem;
while (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
- (elem = _tmp_242_rule(p)) // expression ['as' star_target]
+ (elem = _tmp_263_rule(p)) // expression ['as' star_target]
)
{
_res = elem;
@@ -36193,7 +37448,7 @@ _loop0_197_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_197[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_202[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (expression ['as' star_target])"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -36210,9 +37465,9 @@ _loop0_197_rule(Parser *p)
return _seq;
}
-// _gather_196: (expression ['as' star_target]) _loop0_197
+// _gather_201: (expression ['as' star_target]) _loop0_202
static asdl_seq *
-_gather_196_rule(Parser *p)
+_gather_201_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -36224,27 +37479,27 @@ _gather_196_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // (expression ['as' star_target]) _loop0_197
+ { // (expression ['as' star_target]) _loop0_202
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_196[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_197"));
+ D(fprintf(stderr, "%*c> _gather_201[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_202"));
void *elem;
asdl_seq * seq;
if (
- (elem = _tmp_242_rule(p)) // expression ['as' star_target]
+ (elem = _tmp_263_rule(p)) // expression ['as' star_target]
&&
- (seq = _loop0_197_rule(p)) // _loop0_197
+ (seq = _loop0_202_rule(p)) // _loop0_202
)
{
- D(fprintf(stderr, "%*c+ _gather_196[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_197"));
+ D(fprintf(stderr, "%*c+ _gather_201[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_202"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_196[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expression ['as' star_target]) _loop0_197"));
+ D(fprintf(stderr, "%*c%s _gather_201[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expression ['as' star_target]) _loop0_202"));
}
_res = NULL;
done:
@@ -36252,9 +37507,9 @@ _gather_196_rule(Parser *p)
return _res;
}
-// _loop0_199: ',' (expressions ['as' star_target])
+// _loop0_204: ',' (expressions ['as' star_target])
static asdl_seq *
-_loop0_199_rule(Parser *p)
+_loop0_204_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -36280,13 +37535,13 @@ _loop0_199_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_199[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expressions ['as' star_target])"));
+ D(fprintf(stderr, "%*c> _loop0_204[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expressions ['as' star_target])"));
Token * _literal;
void *elem;
while (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
- (elem = _tmp_243_rule(p)) // expressions ['as' star_target]
+ (elem = _tmp_264_rule(p)) // expressions ['as' star_target]
)
{
_res = elem;
@@ -36312,7 +37567,7 @@ _loop0_199_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_199[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_204[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (expressions ['as' star_target])"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -36329,9 +37584,9 @@ _loop0_199_rule(Parser *p)
return _seq;
}
-// _gather_198: (expressions ['as' star_target]) _loop0_199
+// _gather_203: (expressions ['as' star_target]) _loop0_204
static asdl_seq *
-_gather_198_rule(Parser *p)
+_gather_203_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -36343,27 +37598,27 @@ _gather_198_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // (expressions ['as' star_target]) _loop0_199
+ { // (expressions ['as' star_target]) _loop0_204
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_198[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_199"));
+ D(fprintf(stderr, "%*c> _gather_203[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_204"));
void *elem;
asdl_seq * seq;
if (
- (elem = _tmp_243_rule(p)) // expressions ['as' star_target]
+ (elem = _tmp_264_rule(p)) // expressions ['as' star_target]
&&
- (seq = _loop0_199_rule(p)) // _loop0_199
+ (seq = _loop0_204_rule(p)) // _loop0_204
)
{
- D(fprintf(stderr, "%*c+ _gather_198[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_199"));
+ D(fprintf(stderr, "%*c+ _gather_203[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_204"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_198[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expressions ['as' star_target]) _loop0_199"));
+ D(fprintf(stderr, "%*c%s _gather_203[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expressions ['as' star_target]) _loop0_204"));
}
_res = NULL;
done:
@@ -36371,9 +37626,9 @@ _gather_198_rule(Parser *p)
return _res;
}
-// _loop0_201: ',' (expression ['as' star_target])
+// _loop0_206: ',' (expression ['as' star_target])
static asdl_seq *
-_loop0_201_rule(Parser *p)
+_loop0_206_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -36399,13 +37654,13 @@ _loop0_201_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_201[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expression ['as' star_target])"));
+ D(fprintf(stderr, "%*c> _loop0_206[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expression ['as' star_target])"));
Token * _literal;
void *elem;
while (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
- (elem = _tmp_244_rule(p)) // expression ['as' star_target]
+ (elem = _tmp_265_rule(p)) // expression ['as' star_target]
)
{
_res = elem;
@@ -36431,7 +37686,7 @@ _loop0_201_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_201[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_206[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (expression ['as' star_target])"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -36448,9 +37703,9 @@ _loop0_201_rule(Parser *p)
return _seq;
}
-// _gather_200: (expression ['as' star_target]) _loop0_201
+// _gather_205: (expression ['as' star_target]) _loop0_206
static asdl_seq *
-_gather_200_rule(Parser *p)
+_gather_205_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -36462,27 +37717,27 @@ _gather_200_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // (expression ['as' star_target]) _loop0_201
+ { // (expression ['as' star_target]) _loop0_206
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_200[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_201"));
+ D(fprintf(stderr, "%*c> _gather_205[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_206"));
void *elem;
asdl_seq * seq;
if (
- (elem = _tmp_244_rule(p)) // expression ['as' star_target]
+ (elem = _tmp_265_rule(p)) // expression ['as' star_target]
&&
- (seq = _loop0_201_rule(p)) // _loop0_201
+ (seq = _loop0_206_rule(p)) // _loop0_206
)
{
- D(fprintf(stderr, "%*c+ _gather_200[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_201"));
+ D(fprintf(stderr, "%*c+ _gather_205[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_206"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_200[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expression ['as' star_target]) _loop0_201"));
+ D(fprintf(stderr, "%*c%s _gather_205[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expression ['as' star_target]) _loop0_206"));
}
_res = NULL;
done:
@@ -36490,9 +37745,9 @@ _gather_200_rule(Parser *p)
return _res;
}
-// _loop0_203: ',' (expressions ['as' star_target])
+// _loop0_208: ',' (expressions ['as' star_target])
static asdl_seq *
-_loop0_203_rule(Parser *p)
+_loop0_208_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -36518,13 +37773,13 @@ _loop0_203_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_203[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expressions ['as' star_target])"));
+ D(fprintf(stderr, "%*c> _loop0_208[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expressions ['as' star_target])"));
Token * _literal;
void *elem;
while (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
- (elem = _tmp_245_rule(p)) // expressions ['as' star_target]
+ (elem = _tmp_266_rule(p)) // expressions ['as' star_target]
)
{
_res = elem;
@@ -36550,7 +37805,7 @@ _loop0_203_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_203[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_208[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (expressions ['as' star_target])"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -36567,9 +37822,9 @@ _loop0_203_rule(Parser *p)
return _seq;
}
-// _gather_202: (expressions ['as' star_target]) _loop0_203
+// _gather_207: (expressions ['as' star_target]) _loop0_208
static asdl_seq *
-_gather_202_rule(Parser *p)
+_gather_207_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -36581,27 +37836,27 @@ _gather_202_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // (expressions ['as' star_target]) _loop0_203
+ { // (expressions ['as' star_target]) _loop0_208
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_202[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_203"));
+ D(fprintf(stderr, "%*c> _gather_207[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_208"));
void *elem;
asdl_seq * seq;
if (
- (elem = _tmp_245_rule(p)) // expressions ['as' star_target]
+ (elem = _tmp_266_rule(p)) // expressions ['as' star_target]
&&
- (seq = _loop0_203_rule(p)) // _loop0_203
+ (seq = _loop0_208_rule(p)) // _loop0_208
)
{
- D(fprintf(stderr, "%*c+ _gather_202[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_203"));
+ D(fprintf(stderr, "%*c+ _gather_207[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_208"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_202[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expressions ['as' star_target]) _loop0_203"));
+ D(fprintf(stderr, "%*c%s _gather_207[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expressions ['as' star_target]) _loop0_208"));
}
_res = NULL;
done:
@@ -36609,9 +37864,9 @@ _gather_202_rule(Parser *p)
return _res;
}
-// _tmp_204: 'except' | 'finally'
+// _tmp_209: 'except' | 'finally'
static void *
-_tmp_204_rule(Parser *p)
+_tmp_209_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -36628,18 +37883,18 @@ _tmp_204_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_204[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except'"));
+ D(fprintf(stderr, "%*c> _tmp_209[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 636)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 637)) // token='except'
)
{
- D(fprintf(stderr, "%*c+ _tmp_204[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except'"));
+ D(fprintf(stderr, "%*c+ _tmp_209[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_204[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_209[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'except'"));
}
{ // 'finally'
@@ -36647,18 +37902,18 @@ _tmp_204_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_204[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'finally'"));
+ D(fprintf(stderr, "%*c> _tmp_209[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'finally'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 632)) // token='finally'
+ (_keyword = _PyPegen_expect_token(p, 633)) // token='finally'
)
{
- D(fprintf(stderr, "%*c+ _tmp_204[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'finally'"));
+ D(fprintf(stderr, "%*c+ _tmp_209[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'finally'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_204[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_209[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'finally'"));
}
_res = NULL;
@@ -36667,9 +37922,9 @@ _tmp_204_rule(Parser *p)
return _res;
}
-// _loop0_205: block
+// _loop0_210: block
static asdl_seq *
-_loop0_205_rule(Parser *p)
+_loop0_210_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -36695,7 +37950,7 @@ _loop0_205_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_205[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "block"));
+ D(fprintf(stderr, "%*c> _loop0_210[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "block"));
asdl_stmt_seq* block_var;
while (
(block_var = block_rule(p)) // block
@@ -36718,7 +37973,7 @@ _loop0_205_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_205[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_210[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "block"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -36735,9 +37990,9 @@ _loop0_205_rule(Parser *p)
return _seq;
}
-// _loop1_206: except_block
+// _loop1_211: except_block
static asdl_seq *
-_loop1_206_rule(Parser *p)
+_loop1_211_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -36763,7 +38018,7 @@ _loop1_206_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_206[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_block"));
+ D(fprintf(stderr, "%*c> _loop1_211[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_block"));
excepthandler_ty except_block_var;
while (
(except_block_var = except_block_rule(p)) // except_block
@@ -36786,7 +38041,7 @@ _loop1_206_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_206[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_211[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "except_block"));
}
if (_n == 0 || p->error_indicator) {
@@ -36808,9 +38063,9 @@ _loop1_206_rule(Parser *p)
return _seq;
}
-// _tmp_207: 'as' NAME
+// _tmp_212: 'as' NAME
static void *
-_tmp_207_rule(Parser *p)
+_tmp_212_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -36827,21 +38082,21 @@ _tmp_207_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_207[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c> _tmp_212[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
Token * _keyword;
expr_ty name_var;
if (
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
(name_var = _PyPegen_name_token(p)) // NAME
)
{
- D(fprintf(stderr, "%*c+ _tmp_207[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c+ _tmp_212[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
_res = _PyPegen_dummy_name(p, _keyword, name_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_207[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_212[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME"));
}
_res = NULL;
@@ -36850,9 +38105,9 @@ _tmp_207_rule(Parser *p)
return _res;
}
-// _loop0_208: block
+// _loop0_213: block
static asdl_seq *
-_loop0_208_rule(Parser *p)
+_loop0_213_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -36878,7 +38133,7 @@ _loop0_208_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_208[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "block"));
+ D(fprintf(stderr, "%*c> _loop0_213[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "block"));
asdl_stmt_seq* block_var;
while (
(block_var = block_rule(p)) // block
@@ -36901,7 +38156,7 @@ _loop0_208_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_208[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_213[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "block"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -36918,9 +38173,9 @@ _loop0_208_rule(Parser *p)
return _seq;
}
-// _loop1_209: except_star_block
+// _loop1_214: except_star_block
static asdl_seq *
-_loop1_209_rule(Parser *p)
+_loop1_214_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -36946,7 +38201,7 @@ _loop1_209_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_209[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_star_block"));
+ D(fprintf(stderr, "%*c> _loop1_214[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_star_block"));
excepthandler_ty except_star_block_var;
while (
(except_star_block_var = except_star_block_rule(p)) // except_star_block
@@ -36969,7 +38224,7 @@ _loop1_209_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_209[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_214[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "except_star_block"));
}
if (_n == 0 || p->error_indicator) {
@@ -36991,9 +38246,9 @@ _loop1_209_rule(Parser *p)
return _seq;
}
-// _tmp_210: expression ['as' NAME]
+// _tmp_215: expression ['as' NAME]
static void *
-_tmp_210_rule(Parser *p)
+_tmp_215_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37010,22 +38265,22 @@ _tmp_210_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_210[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' NAME]"));
+ D(fprintf(stderr, "%*c> _tmp_215[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' NAME]"));
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
expr_ty expression_var;
if (
(expression_var = expression_rule(p)) // expression
&&
- (_opt_var = _tmp_246_rule(p), !p->error_indicator) // ['as' NAME]
+ (_opt_var = _tmp_267_rule(p), !p->error_indicator) // ['as' NAME]
)
{
- D(fprintf(stderr, "%*c+ _tmp_210[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' NAME]"));
+ D(fprintf(stderr, "%*c+ _tmp_215[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' NAME]"));
_res = _PyPegen_dummy_name(p, expression_var, _opt_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_210[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_215[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ['as' NAME]"));
}
_res = NULL;
@@ -37034,9 +38289,9 @@ _tmp_210_rule(Parser *p)
return _res;
}
-// _tmp_211: 'as' NAME
+// _tmp_216: 'as' NAME
static void *
-_tmp_211_rule(Parser *p)
+_tmp_216_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37053,21 +38308,21 @@ _tmp_211_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_211[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c> _tmp_216[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
Token * _keyword;
expr_ty name_var;
if (
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
(name_var = _PyPegen_name_token(p)) // NAME
)
{
- D(fprintf(stderr, "%*c+ _tmp_211[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c+ _tmp_216[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
_res = _PyPegen_dummy_name(p, _keyword, name_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_211[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_216[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME"));
}
_res = NULL;
@@ -37076,9 +38331,9 @@ _tmp_211_rule(Parser *p)
return _res;
}
-// _tmp_212: 'as' NAME
+// _tmp_217: 'as' NAME
static void *
-_tmp_212_rule(Parser *p)
+_tmp_217_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37095,21 +38350,21 @@ _tmp_212_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_212[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c> _tmp_217[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
Token * _keyword;
expr_ty name_var;
if (
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
(name_var = _PyPegen_name_token(p)) // NAME
)
{
- D(fprintf(stderr, "%*c+ _tmp_212[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c+ _tmp_217[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
_res = _PyPegen_dummy_name(p, _keyword, name_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_212[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_217[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME"));
}
_res = NULL;
@@ -37118,9 +38373,9 @@ _tmp_212_rule(Parser *p)
return _res;
}
-// _tmp_213: NEWLINE | ':'
+// _tmp_218: NEWLINE | ':'
static void *
-_tmp_213_rule(Parser *p)
+_tmp_218_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37137,18 +38392,18 @@ _tmp_213_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_213[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE"));
+ D(fprintf(stderr, "%*c> _tmp_218[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE"));
Token * newline_var;
if (
(newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE'
)
{
- D(fprintf(stderr, "%*c+ _tmp_213[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE"));
+ D(fprintf(stderr, "%*c+ _tmp_218[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE"));
_res = newline_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_213[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_218[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE"));
}
{ // ':'
@@ -37156,18 +38411,18 @@ _tmp_213_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_213[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c> _tmp_218[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
)
{
- D(fprintf(stderr, "%*c+ _tmp_213[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c+ _tmp_218[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_213[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_218[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'"));
}
_res = NULL;
@@ -37176,9 +38431,9 @@ _tmp_213_rule(Parser *p)
return _res;
}
-// _tmp_214: 'as' NAME
+// _tmp_219: 'as' NAME
static void *
-_tmp_214_rule(Parser *p)
+_tmp_219_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37195,21 +38450,21 @@ _tmp_214_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_214[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c> _tmp_219[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
Token * _keyword;
expr_ty name_var;
if (
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
(name_var = _PyPegen_name_token(p)) // NAME
)
{
- D(fprintf(stderr, "%*c+ _tmp_214[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c+ _tmp_219[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
_res = _PyPegen_dummy_name(p, _keyword, name_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_214[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_219[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME"));
}
_res = NULL;
@@ -37218,9 +38473,9 @@ _tmp_214_rule(Parser *p)
return _res;
}
-// _tmp_215: 'as' NAME
+// _tmp_220: 'as' NAME
static void *
-_tmp_215_rule(Parser *p)
+_tmp_220_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37237,21 +38492,21 @@ _tmp_215_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_215[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c> _tmp_220[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
Token * _keyword;
expr_ty name_var;
if (
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
(name_var = _PyPegen_name_token(p)) // NAME
)
{
- D(fprintf(stderr, "%*c+ _tmp_215[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c+ _tmp_220[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
_res = _PyPegen_dummy_name(p, _keyword, name_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_215[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_220[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME"));
}
_res = NULL;
@@ -37260,9 +38515,9 @@ _tmp_215_rule(Parser *p)
return _res;
}
-// _tmp_216: positional_patterns ','
+// _tmp_221: positional_patterns ','
static void *
-_tmp_216_rule(Parser *p)
+_tmp_221_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37279,7 +38534,7 @@ _tmp_216_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_216[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "positional_patterns ','"));
+ D(fprintf(stderr, "%*c> _tmp_221[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "positional_patterns ','"));
Token * _literal;
asdl_pattern_seq* positional_patterns_var;
if (
@@ -37288,12 +38543,12 @@ _tmp_216_rule(Parser *p)
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_216[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "positional_patterns ','"));
+ D(fprintf(stderr, "%*c+ _tmp_221[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "positional_patterns ','"));
_res = _PyPegen_dummy_name(p, positional_patterns_var, _literal);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_216[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_221[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "positional_patterns ','"));
}
_res = NULL;
@@ -37302,9 +38557,9 @@ _tmp_216_rule(Parser *p)
return _res;
}
-// _tmp_217: '->' expression
+// _tmp_222: '->' expression
static void *
-_tmp_217_rule(Parser *p)
+_tmp_222_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37321,7 +38576,7 @@ _tmp_217_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_217[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'->' expression"));
+ D(fprintf(stderr, "%*c> _tmp_222[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'->' expression"));
Token * _literal;
expr_ty expression_var;
if (
@@ -37330,12 +38585,12 @@ _tmp_217_rule(Parser *p)
(expression_var = expression_rule(p)) // expression
)
{
- D(fprintf(stderr, "%*c+ _tmp_217[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'->' expression"));
+ D(fprintf(stderr, "%*c+ _tmp_222[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'->' expression"));
_res = _PyPegen_dummy_name(p, _literal, expression_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_217[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_222[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'->' expression"));
}
_res = NULL;
@@ -37344,9 +38599,9 @@ _tmp_217_rule(Parser *p)
return _res;
}
-// _tmp_218: '(' arguments? ')'
+// _tmp_223: '(' arguments? ')'
static void *
-_tmp_218_rule(Parser *p)
+_tmp_223_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37363,7 +38618,7 @@ _tmp_218_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_218[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'"));
+ D(fprintf(stderr, "%*c> _tmp_223[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'"));
Token * _literal;
Token * _literal_1;
void *_opt_var;
@@ -37376,12 +38631,12 @@ _tmp_218_rule(Parser *p)
(_literal_1 = _PyPegen_expect_token(p, 8)) // token=')'
)
{
- D(fprintf(stderr, "%*c+ _tmp_218[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'"));
+ D(fprintf(stderr, "%*c+ _tmp_223[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'"));
_res = _PyPegen_dummy_name(p, _literal, _opt_var, _literal_1);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_218[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_223[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' arguments? ')'"));
}
_res = NULL;
@@ -37390,9 +38645,9 @@ _tmp_218_rule(Parser *p)
return _res;
}
-// _tmp_219: '(' arguments? ')'
+// _tmp_224: '(' arguments? ')'
static void *
-_tmp_219_rule(Parser *p)
+_tmp_224_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37409,7 +38664,7 @@ _tmp_219_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_219[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'"));
+ D(fprintf(stderr, "%*c> _tmp_224[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'"));
Token * _literal;
Token * _literal_1;
void *_opt_var;
@@ -37422,12 +38677,12 @@ _tmp_219_rule(Parser *p)
(_literal_1 = _PyPegen_expect_token(p, 8)) // token=')'
)
{
- D(fprintf(stderr, "%*c+ _tmp_219[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'"));
+ D(fprintf(stderr, "%*c+ _tmp_224[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'"));
_res = _PyPegen_dummy_name(p, _literal, _opt_var, _literal_1);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_219[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_224[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' arguments? ')'"));
}
_res = NULL;
@@ -37436,9 +38691,9 @@ _tmp_219_rule(Parser *p)
return _res;
}
-// _loop0_221: ',' double_starred_kvpair
+// _loop0_226: ',' double_starred_kvpair
static asdl_seq *
-_loop0_221_rule(Parser *p)
+_loop0_226_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37464,7 +38719,7 @@ _loop0_221_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_221[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' double_starred_kvpair"));
+ D(fprintf(stderr, "%*c> _loop0_226[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' double_starred_kvpair"));
Token * _literal;
KeyValuePair* elem;
while (
@@ -37496,7 +38751,7 @@ _loop0_221_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_221[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_226[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' double_starred_kvpair"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -37513,9 +38768,9 @@ _loop0_221_rule(Parser *p)
return _seq;
}
-// _gather_220: double_starred_kvpair _loop0_221
+// _gather_225: double_starred_kvpair _loop0_226
static asdl_seq *
-_gather_220_rule(Parser *p)
+_gather_225_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37527,27 +38782,27 @@ _gather_220_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // double_starred_kvpair _loop0_221
+ { // double_starred_kvpair _loop0_226
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_220[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_221"));
+ D(fprintf(stderr, "%*c> _gather_225[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_226"));
KeyValuePair* elem;
asdl_seq * seq;
if (
(elem = double_starred_kvpair_rule(p)) // double_starred_kvpair
&&
- (seq = _loop0_221_rule(p)) // _loop0_221
+ (seq = _loop0_226_rule(p)) // _loop0_226
)
{
- D(fprintf(stderr, "%*c+ _gather_220[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_221"));
+ D(fprintf(stderr, "%*c+ _gather_225[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_226"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_220[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "double_starred_kvpair _loop0_221"));
+ D(fprintf(stderr, "%*c%s _gather_225[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "double_starred_kvpair _loop0_226"));
}
_res = NULL;
done:
@@ -37555,9 +38810,9 @@ _gather_220_rule(Parser *p)
return _res;
}
-// _tmp_222: '}' | ','
+// _tmp_227: '}' | ','
static void *
-_tmp_222_rule(Parser *p)
+_tmp_227_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37574,18 +38829,18 @@ _tmp_222_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_222[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'"));
+ D(fprintf(stderr, "%*c> _tmp_227[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 26)) // token='}'
)
{
- D(fprintf(stderr, "%*c+ _tmp_222[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'"));
+ D(fprintf(stderr, "%*c+ _tmp_227[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_222[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_227[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'}'"));
}
{ // ','
@@ -37593,18 +38848,18 @@ _tmp_222_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_222[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c> _tmp_227[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_222[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c+ _tmp_227[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_222[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_227[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','"));
}
_res = NULL;
@@ -37613,9 +38868,9 @@ _tmp_222_rule(Parser *p)
return _res;
}
-// _tmp_223: '}' | ','
+// _tmp_228: '}' | ','
static void *
-_tmp_223_rule(Parser *p)
+_tmp_228_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37632,18 +38887,18 @@ _tmp_223_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_223[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'"));
+ D(fprintf(stderr, "%*c> _tmp_228[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 26)) // token='}'
)
{
- D(fprintf(stderr, "%*c+ _tmp_223[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'"));
+ D(fprintf(stderr, "%*c+ _tmp_228[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_223[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_228[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'}'"));
}
{ // ','
@@ -37651,18 +38906,18 @@ _tmp_223_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_223[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c> _tmp_228[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_223[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c+ _tmp_228[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_223[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_228[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','"));
}
_res = NULL;
@@ -37671,9 +38926,898 @@ _tmp_223_rule(Parser *p)
return _res;
}
-// _tmp_224: star_targets '='
+// _tmp_229: yield_expr | star_expressions
static void *
-_tmp_224_rule(Parser *p)
+_tmp_229_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // yield_expr
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_229[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ expr_ty yield_expr_var;
+ if (
+ (yield_expr_var = yield_expr_rule(p)) // yield_expr
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_229[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ _res = yield_expr_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_229[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr"));
+ }
+ { // star_expressions
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_229[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ expr_ty star_expressions_var;
+ if (
+ (star_expressions_var = star_expressions_rule(p)) // star_expressions
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_229[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ _res = star_expressions_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_229[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _tmp_230: yield_expr | star_expressions
+static void *
+_tmp_230_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // yield_expr
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_230[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ expr_ty yield_expr_var;
+ if (
+ (yield_expr_var = yield_expr_rule(p)) // yield_expr
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_230[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ _res = yield_expr_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_230[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr"));
+ }
+ { // star_expressions
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_230[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ expr_ty star_expressions_var;
+ if (
+ (star_expressions_var = star_expressions_rule(p)) // star_expressions
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_230[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ _res = star_expressions_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_230[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _tmp_231: '=' | '!' | ':' | '}'
+static void *
+_tmp_231_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // '='
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_231[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='"));
+ Token * _literal;
+ if (
+ (_literal = _PyPegen_expect_token(p, 22)) // token='='
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_231[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='"));
+ _res = _literal;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_231[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'='"));
+ }
+ { // '!'
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_231[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!'"));
+ Token * _literal;
+ if (
+ (_literal = _PyPegen_expect_token(p, 54)) // token='!'
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_231[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!'"));
+ _res = _literal;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_231[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'!'"));
+ }
+ { // ':'
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_231[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
+ Token * _literal;
+ if (
+ (_literal = _PyPegen_expect_token(p, 11)) // token=':'
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_231[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
+ _res = _literal;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_231[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'"));
+ }
+ { // '}'
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_231[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'"));
+ Token * _literal;
+ if (
+ (_literal = _PyPegen_expect_token(p, 26)) // token='}'
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_231[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'"));
+ _res = _literal;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_231[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'}'"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _tmp_232: yield_expr | star_expressions
+static void *
+_tmp_232_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // yield_expr
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_232[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ expr_ty yield_expr_var;
+ if (
+ (yield_expr_var = yield_expr_rule(p)) // yield_expr
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_232[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ _res = yield_expr_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_232[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr"));
+ }
+ { // star_expressions
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_232[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ expr_ty star_expressions_var;
+ if (
+ (star_expressions_var = star_expressions_rule(p)) // star_expressions
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_232[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ _res = star_expressions_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_232[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _tmp_233: '!' | ':' | '}'
+static void *
+_tmp_233_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // '!'
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_233[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!'"));
+ Token * _literal;
+ if (
+ (_literal = _PyPegen_expect_token(p, 54)) // token='!'
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_233[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!'"));
+ _res = _literal;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_233[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'!'"));
+ }
+ { // ':'
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_233[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
+ Token * _literal;
+ if (
+ (_literal = _PyPegen_expect_token(p, 11)) // token=':'
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_233[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
+ _res = _literal;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_233[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'"));
+ }
+ { // '}'
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_233[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'"));
+ Token * _literal;
+ if (
+ (_literal = _PyPegen_expect_token(p, 26)) // token='}'
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_233[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'"));
+ _res = _literal;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_233[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'}'"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _tmp_234: yield_expr | star_expressions
+static void *
+_tmp_234_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // yield_expr
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_234[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ expr_ty yield_expr_var;
+ if (
+ (yield_expr_var = yield_expr_rule(p)) // yield_expr
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_234[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ _res = yield_expr_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_234[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr"));
+ }
+ { // star_expressions
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_234[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ expr_ty star_expressions_var;
+ if (
+ (star_expressions_var = star_expressions_rule(p)) // star_expressions
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_234[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ _res = star_expressions_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_234[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _tmp_235: yield_expr | star_expressions
+static void *
+_tmp_235_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // yield_expr
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_235[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ expr_ty yield_expr_var;
+ if (
+ (yield_expr_var = yield_expr_rule(p)) // yield_expr
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_235[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ _res = yield_expr_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_235[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr"));
+ }
+ { // star_expressions
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_235[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ expr_ty star_expressions_var;
+ if (
+ (star_expressions_var = star_expressions_rule(p)) // star_expressions
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_235[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ _res = star_expressions_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_235[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _tmp_236: '!' NAME
+static void *
+_tmp_236_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // '!' NAME
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_236[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!' NAME"));
+ Token * _literal;
+ expr_ty name_var;
+ if (
+ (_literal = _PyPegen_expect_token(p, 54)) // token='!'
+ &&
+ (name_var = _PyPegen_name_token(p)) // NAME
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_236[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!' NAME"));
+ _res = _PyPegen_dummy_name(p, _literal, name_var);
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_236[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'!' NAME"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _tmp_237: ':' | '}'
+static void *
+_tmp_237_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // ':'
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_237[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
+ Token * _literal;
+ if (
+ (_literal = _PyPegen_expect_token(p, 11)) // token=':'
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_237[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
+ _res = _literal;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_237[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'"));
+ }
+ { // '}'
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_237[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'"));
+ Token * _literal;
+ if (
+ (_literal = _PyPegen_expect_token(p, 26)) // token='}'
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_237[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'"));
+ _res = _literal;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_237[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'}'"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _tmp_238: yield_expr | star_expressions
+static void *
+_tmp_238_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // yield_expr
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_238[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ expr_ty yield_expr_var;
+ if (
+ (yield_expr_var = yield_expr_rule(p)) // yield_expr
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_238[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ _res = yield_expr_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_238[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr"));
+ }
+ { // star_expressions
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_238[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ expr_ty star_expressions_var;
+ if (
+ (star_expressions_var = star_expressions_rule(p)) // star_expressions
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_238[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ _res = star_expressions_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_238[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _tmp_239: '!' NAME
+static void *
+_tmp_239_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // '!' NAME
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_239[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!' NAME"));
+ Token * _literal;
+ expr_ty name_var;
+ if (
+ (_literal = _PyPegen_expect_token(p, 54)) // token='!'
+ &&
+ (name_var = _PyPegen_name_token(p)) // NAME
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_239[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!' NAME"));
+ _res = _PyPegen_dummy_name(p, _literal, name_var);
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_239[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'!' NAME"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _loop0_240: fstring_format_spec
+static asdl_seq *
+_loop0_240_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void *_res = NULL;
+ int _mark = p->mark;
+ void **_children = PyMem_Malloc(sizeof(void *));
+ if (!_children) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ p->level--;
+ return NULL;
+ }
+ Py_ssize_t _children_capacity = 1;
+ Py_ssize_t _n = 0;
+ { // fstring_format_spec
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _loop0_240[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "fstring_format_spec"));
+ expr_ty fstring_format_spec_var;
+ while (
+ (fstring_format_spec_var = fstring_format_spec_rule(p)) // fstring_format_spec
+ )
+ {
+ _res = fstring_format_spec_var;
+ if (_n == _children_capacity) {
+ _children_capacity *= 2;
+ void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
+ if (!_new_children) {
+ PyMem_Free(_children);
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ p->level--;
+ return NULL;
+ }
+ _children = _new_children;
+ }
+ _children[_n++] = _res;
+ _mark = p->mark;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _loop0_240[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "fstring_format_spec"));
+ }
+ asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
+ if (!_seq) {
+ PyMem_Free(_children);
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ p->level--;
+ return NULL;
+ }
+ for (int i = 0; i < _n; i++) asdl_seq_SET_UNTYPED(_seq, i, _children[i]);
+ PyMem_Free(_children);
+ p->level--;
+ return _seq;
+}
+
+// _tmp_241: yield_expr | star_expressions
+static void *
+_tmp_241_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // yield_expr
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_241[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ expr_ty yield_expr_var;
+ if (
+ (yield_expr_var = yield_expr_rule(p)) // yield_expr
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_241[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ _res = yield_expr_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_241[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr"));
+ }
+ { // star_expressions
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_241[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ expr_ty star_expressions_var;
+ if (
+ (star_expressions_var = star_expressions_rule(p)) // star_expressions
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_241[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions"));
+ _res = star_expressions_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_241[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _tmp_242: '!' NAME
+static void *
+_tmp_242_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // '!' NAME
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_242[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!' NAME"));
+ Token * _literal;
+ expr_ty name_var;
+ if (
+ (_literal = _PyPegen_expect_token(p, 54)) // token='!'
+ &&
+ (name_var = _PyPegen_name_token(p)) // NAME
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_242[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!' NAME"));
+ _res = _PyPegen_dummy_name(p, _literal, name_var);
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_242[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'!' NAME"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _tmp_243: ':' | '}'
+static void *
+_tmp_243_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // ':'
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_243[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
+ Token * _literal;
+ if (
+ (_literal = _PyPegen_expect_token(p, 11)) // token=':'
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_243[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
+ _res = _literal;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_243[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'"));
+ }
+ { // '}'
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_243[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'"));
+ Token * _literal;
+ if (
+ (_literal = _PyPegen_expect_token(p, 26)) // token='}'
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_243[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'"));
+ _res = _literal;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_243[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'}'"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _tmp_244: star_targets '='
+static void *
+_tmp_244_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37690,7 +39834,7 @@ _tmp_224_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_224[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='"));
+ D(fprintf(stderr, "%*c> _tmp_244[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='"));
Token * _literal;
expr_ty z;
if (
@@ -37699,7 +39843,7 @@ _tmp_224_rule(Parser *p)
(_literal = _PyPegen_expect_token(p, 22)) // token='='
)
{
- D(fprintf(stderr, "%*c+ _tmp_224[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='"));
+ D(fprintf(stderr, "%*c+ _tmp_244[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='"));
_res = z;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -37709,7 +39853,7 @@ _tmp_224_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_224[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_244[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='"));
}
_res = NULL;
@@ -37718,9 +39862,9 @@ _tmp_224_rule(Parser *p)
return _res;
}
-// _tmp_225: '.' | '...'
+// _tmp_245: '.' | '...'
static void *
-_tmp_225_rule(Parser *p)
+_tmp_245_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37737,18 +39881,18 @@ _tmp_225_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_225[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'"));
+ D(fprintf(stderr, "%*c> _tmp_245[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 23)) // token='.'
)
{
- D(fprintf(stderr, "%*c+ _tmp_225[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'"));
+ D(fprintf(stderr, "%*c+ _tmp_245[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_225[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_245[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'"));
}
{ // '...'
@@ -37756,18 +39900,18 @@ _tmp_225_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_225[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'"));
+ D(fprintf(stderr, "%*c> _tmp_245[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 52)) // token='...'
)
{
- D(fprintf(stderr, "%*c+ _tmp_225[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'"));
+ D(fprintf(stderr, "%*c+ _tmp_245[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_225[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_245[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'...'"));
}
_res = NULL;
@@ -37776,9 +39920,9 @@ _tmp_225_rule(Parser *p)
return _res;
}
-// _tmp_226: '.' | '...'
+// _tmp_246: '.' | '...'
static void *
-_tmp_226_rule(Parser *p)
+_tmp_246_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37795,18 +39939,18 @@ _tmp_226_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_226[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'"));
+ D(fprintf(stderr, "%*c> _tmp_246[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 23)) // token='.'
)
{
- D(fprintf(stderr, "%*c+ _tmp_226[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'"));
+ D(fprintf(stderr, "%*c+ _tmp_246[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_226[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_246[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'"));
}
{ // '...'
@@ -37814,18 +39958,18 @@ _tmp_226_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_226[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'"));
+ D(fprintf(stderr, "%*c> _tmp_246[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 52)) // token='...'
)
{
- D(fprintf(stderr, "%*c+ _tmp_226[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'"));
+ D(fprintf(stderr, "%*c+ _tmp_246[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_226[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_246[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'...'"));
}
_res = NULL;
@@ -37834,9 +39978,9 @@ _tmp_226_rule(Parser *p)
return _res;
}
-// _tmp_227: '@' named_expression NEWLINE
+// _tmp_247: '@' named_expression NEWLINE
static void *
-_tmp_227_rule(Parser *p)
+_tmp_247_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37853,7 +39997,7 @@ _tmp_227_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_227[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE"));
+ D(fprintf(stderr, "%*c> _tmp_247[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE"));
Token * _literal;
expr_ty f;
Token * newline_var;
@@ -37865,7 +40009,7 @@ _tmp_227_rule(Parser *p)
(newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE'
)
{
- D(fprintf(stderr, "%*c+ _tmp_227[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE"));
+ D(fprintf(stderr, "%*c+ _tmp_247[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE"));
_res = f;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -37875,7 +40019,7 @@ _tmp_227_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_227[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_247[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'@' named_expression NEWLINE"));
}
_res = NULL;
@@ -37884,9 +40028,9 @@ _tmp_227_rule(Parser *p)
return _res;
}
-// _tmp_228: ',' expression
+// _tmp_248: ',' expression
static void *
-_tmp_228_rule(Parser *p)
+_tmp_248_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37903,7 +40047,7 @@ _tmp_228_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_228[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression"));
+ D(fprintf(stderr, "%*c> _tmp_248[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression"));
Token * _literal;
expr_ty c;
if (
@@ -37912,7 +40056,7 @@ _tmp_228_rule(Parser *p)
(c = expression_rule(p)) // expression
)
{
- D(fprintf(stderr, "%*c+ _tmp_228[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' expression"));
+ D(fprintf(stderr, "%*c+ _tmp_248[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' expression"));
_res = c;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -37922,7 +40066,7 @@ _tmp_228_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_228[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_248[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression"));
}
_res = NULL;
@@ -37931,9 +40075,9 @@ _tmp_228_rule(Parser *p)
return _res;
}
-// _tmp_229: ',' star_expression
+// _tmp_249: ',' star_expression
static void *
-_tmp_229_rule(Parser *p)
+_tmp_249_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37950,7 +40094,7 @@ _tmp_229_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_229[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_expression"));
+ D(fprintf(stderr, "%*c> _tmp_249[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_expression"));
Token * _literal;
expr_ty c;
if (
@@ -37959,7 +40103,7 @@ _tmp_229_rule(Parser *p)
(c = star_expression_rule(p)) // star_expression
)
{
- D(fprintf(stderr, "%*c+ _tmp_229[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_expression"));
+ D(fprintf(stderr, "%*c+ _tmp_249[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_expression"));
_res = c;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -37969,7 +40113,7 @@ _tmp_229_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_229[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_249[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_expression"));
}
_res = NULL;
@@ -37978,9 +40122,9 @@ _tmp_229_rule(Parser *p)
return _res;
}
-// _tmp_230: 'or' conjunction
+// _tmp_250: 'or' conjunction
static void *
-_tmp_230_rule(Parser *p)
+_tmp_250_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -37997,7 +40141,7 @@ _tmp_230_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_230[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'or' conjunction"));
+ D(fprintf(stderr, "%*c> _tmp_250[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'or' conjunction"));
Token * _keyword;
expr_ty c;
if (
@@ -38006,7 +40150,7 @@ _tmp_230_rule(Parser *p)
(c = conjunction_rule(p)) // conjunction
)
{
- D(fprintf(stderr, "%*c+ _tmp_230[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'or' conjunction"));
+ D(fprintf(stderr, "%*c+ _tmp_250[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'or' conjunction"));
_res = c;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -38016,7 +40160,7 @@ _tmp_230_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_230[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_250[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'or' conjunction"));
}
_res = NULL;
@@ -38025,9 +40169,9 @@ _tmp_230_rule(Parser *p)
return _res;
}
-// _tmp_231: 'and' inversion
+// _tmp_251: 'and' inversion
static void *
-_tmp_231_rule(Parser *p)
+_tmp_251_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38044,7 +40188,7 @@ _tmp_231_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_231[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'and' inversion"));
+ D(fprintf(stderr, "%*c> _tmp_251[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'and' inversion"));
Token * _keyword;
expr_ty c;
if (
@@ -38053,7 +40197,7 @@ _tmp_231_rule(Parser *p)
(c = inversion_rule(p)) // inversion
)
{
- D(fprintf(stderr, "%*c+ _tmp_231[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'and' inversion"));
+ D(fprintf(stderr, "%*c+ _tmp_251[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'and' inversion"));
_res = c;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -38063,7 +40207,7 @@ _tmp_231_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_231[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_251[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'and' inversion"));
}
_res = NULL;
@@ -38072,9 +40216,9 @@ _tmp_231_rule(Parser *p)
return _res;
}
-// _tmp_232: slice | starred_expression
+// _tmp_252: slice | starred_expression
static void *
-_tmp_232_rule(Parser *p)
+_tmp_252_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38091,18 +40235,18 @@ _tmp_232_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_232[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slice"));
+ D(fprintf(stderr, "%*c> _tmp_252[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slice"));
expr_ty slice_var;
if (
(slice_var = slice_rule(p)) // slice
)
{
- D(fprintf(stderr, "%*c+ _tmp_232[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slice"));
+ D(fprintf(stderr, "%*c+ _tmp_252[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slice"));
_res = slice_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_232[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_252[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slice"));
}
{ // starred_expression
@@ -38110,18 +40254,18 @@ _tmp_232_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_232[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression"));
+ D(fprintf(stderr, "%*c> _tmp_252[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression"));
expr_ty starred_expression_var;
if (
(starred_expression_var = starred_expression_rule(p)) // starred_expression
)
{
- D(fprintf(stderr, "%*c+ _tmp_232[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression"));
+ D(fprintf(stderr, "%*c+ _tmp_252[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression"));
_res = starred_expression_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_232[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_252[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "starred_expression"));
}
_res = NULL;
@@ -38130,9 +40274,67 @@ _tmp_232_rule(Parser *p)
return _res;
}
-// _tmp_233: 'if' disjunction
+// _tmp_253: fstring | string
static void *
-_tmp_233_rule(Parser *p)
+_tmp_253_rule(Parser *p)
+{
+ if (p->level++ == MAXSTACK) {
+ p->error_indicator = 1;
+ PyErr_NoMemory();
+ }
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ void * _res = NULL;
+ int _mark = p->mark;
+ { // fstring
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_253[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "fstring"));
+ expr_ty fstring_var;
+ if (
+ (fstring_var = fstring_rule(p)) // fstring
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_253[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "fstring"));
+ _res = fstring_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_253[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "fstring"));
+ }
+ { // string
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> _tmp_253[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "string"));
+ expr_ty string_var;
+ if (
+ (string_var = string_rule(p)) // string
+ )
+ {
+ D(fprintf(stderr, "%*c+ _tmp_253[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "string"));
+ _res = string_var;
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s _tmp_253[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "string"));
+ }
+ _res = NULL;
+ done:
+ p->level--;
+ return _res;
+}
+
+// _tmp_254: 'if' disjunction
+static void *
+_tmp_254_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38149,16 +40351,16 @@ _tmp_233_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_233[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction"));
+ D(fprintf(stderr, "%*c> _tmp_254[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction"));
Token * _keyword;
expr_ty z;
if (
- (_keyword = _PyPegen_expect_token(p, 641)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 642)) // token='if'
&&
(z = disjunction_rule(p)) // disjunction
)
{
- D(fprintf(stderr, "%*c+ _tmp_233[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction"));
+ D(fprintf(stderr, "%*c+ _tmp_254[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction"));
_res = z;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -38168,7 +40370,7 @@ _tmp_233_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_233[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_254[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'if' disjunction"));
}
_res = NULL;
@@ -38177,9 +40379,9 @@ _tmp_233_rule(Parser *p)
return _res;
}
-// _tmp_234: 'if' disjunction
+// _tmp_255: 'if' disjunction
static void *
-_tmp_234_rule(Parser *p)
+_tmp_255_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38196,16 +40398,16 @@ _tmp_234_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_234[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction"));
+ D(fprintf(stderr, "%*c> _tmp_255[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction"));
Token * _keyword;
expr_ty z;
if (
- (_keyword = _PyPegen_expect_token(p, 641)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 642)) // token='if'
&&
(z = disjunction_rule(p)) // disjunction
)
{
- D(fprintf(stderr, "%*c+ _tmp_234[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction"));
+ D(fprintf(stderr, "%*c+ _tmp_255[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction"));
_res = z;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -38215,7 +40417,7 @@ _tmp_234_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_234[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_255[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'if' disjunction"));
}
_res = NULL;
@@ -38224,9 +40426,9 @@ _tmp_234_rule(Parser *p)
return _res;
}
-// _tmp_235: starred_expression | (assignment_expression | expression !':=') !'='
+// _tmp_256: starred_expression | (assignment_expression | expression !':=') !'='
static void *
-_tmp_235_rule(Parser *p)
+_tmp_256_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38243,18 +40445,18 @@ _tmp_235_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_235[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression"));
+ D(fprintf(stderr, "%*c> _tmp_256[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression"));
expr_ty starred_expression_var;
if (
(starred_expression_var = starred_expression_rule(p)) // starred_expression
)
{
- D(fprintf(stderr, "%*c+ _tmp_235[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression"));
+ D(fprintf(stderr, "%*c+ _tmp_256[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression"));
_res = starred_expression_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_235[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_256[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "starred_expression"));
}
{ // (assignment_expression | expression !':=') !'='
@@ -38262,20 +40464,20 @@ _tmp_235_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_235[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='"));
- void *_tmp_247_var;
+ D(fprintf(stderr, "%*c> _tmp_256[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='"));
+ void *_tmp_268_var;
if (
- (_tmp_247_var = _tmp_247_rule(p)) // assignment_expression | expression !':='
+ (_tmp_268_var = _tmp_268_rule(p)) // assignment_expression | expression !':='
&&
_PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 22) // token='='
)
{
- D(fprintf(stderr, "%*c+ _tmp_235[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='"));
- _res = _tmp_247_var;
+ D(fprintf(stderr, "%*c+ _tmp_256[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='"));
+ _res = _tmp_268_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_235[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_256[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(assignment_expression | expression !':=') !'='"));
}
_res = NULL;
@@ -38284,9 +40486,9 @@ _tmp_235_rule(Parser *p)
return _res;
}
-// _tmp_236: ',' star_target
+// _tmp_257: ',' star_target
static void *
-_tmp_236_rule(Parser *p)
+_tmp_257_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38303,7 +40505,7 @@ _tmp_236_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_236[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target"));
+ D(fprintf(stderr, "%*c> _tmp_257[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target"));
Token * _literal;
expr_ty c;
if (
@@ -38312,7 +40514,7 @@ _tmp_236_rule(Parser *p)
(c = star_target_rule(p)) // star_target
)
{
- D(fprintf(stderr, "%*c+ _tmp_236[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target"));
+ D(fprintf(stderr, "%*c+ _tmp_257[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target"));
_res = c;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -38322,7 +40524,7 @@ _tmp_236_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_236[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_257[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_target"));
}
_res = NULL;
@@ -38331,9 +40533,9 @@ _tmp_236_rule(Parser *p)
return _res;
}
-// _tmp_237: ',' star_target
+// _tmp_258: ',' star_target
static void *
-_tmp_237_rule(Parser *p)
+_tmp_258_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38350,7 +40552,7 @@ _tmp_237_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_237[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target"));
+ D(fprintf(stderr, "%*c> _tmp_258[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target"));
Token * _literal;
expr_ty c;
if (
@@ -38359,7 +40561,7 @@ _tmp_237_rule(Parser *p)
(c = star_target_rule(p)) // star_target
)
{
- D(fprintf(stderr, "%*c+ _tmp_237[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target"));
+ D(fprintf(stderr, "%*c+ _tmp_258[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target"));
_res = c;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -38369,7 +40571,7 @@ _tmp_237_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_237[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_258[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_target"));
}
_res = NULL;
@@ -38378,9 +40580,9 @@ _tmp_237_rule(Parser *p)
return _res;
}
-// _tmp_238: star_targets '='
+// _tmp_259: star_targets '='
static void *
-_tmp_238_rule(Parser *p)
+_tmp_259_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38397,7 +40599,7 @@ _tmp_238_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_238[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='"));
+ D(fprintf(stderr, "%*c> _tmp_259[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='"));
Token * _literal;
expr_ty star_targets_var;
if (
@@ -38406,12 +40608,12 @@ _tmp_238_rule(Parser *p)
(_literal = _PyPegen_expect_token(p, 22)) // token='='
)
{
- D(fprintf(stderr, "%*c+ _tmp_238[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='"));
+ D(fprintf(stderr, "%*c+ _tmp_259[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='"));
_res = _PyPegen_dummy_name(p, star_targets_var, _literal);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_238[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_259[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='"));
}
_res = NULL;
@@ -38420,9 +40622,9 @@ _tmp_238_rule(Parser *p)
return _res;
}
-// _tmp_239: star_targets '='
+// _tmp_260: star_targets '='
static void *
-_tmp_239_rule(Parser *p)
+_tmp_260_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38439,7 +40641,7 @@ _tmp_239_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_239[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='"));
+ D(fprintf(stderr, "%*c> _tmp_260[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='"));
Token * _literal;
expr_ty star_targets_var;
if (
@@ -38448,12 +40650,12 @@ _tmp_239_rule(Parser *p)
(_literal = _PyPegen_expect_token(p, 22)) // token='='
)
{
- D(fprintf(stderr, "%*c+ _tmp_239[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='"));
+ D(fprintf(stderr, "%*c+ _tmp_260[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='"));
_res = _PyPegen_dummy_name(p, star_targets_var, _literal);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_239[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_260[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='"));
}
_res = NULL;
@@ -38462,9 +40664,9 @@ _tmp_239_rule(Parser *p)
return _res;
}
-// _tmp_240: ')' | '**'
+// _tmp_261: ')' | '**'
static void *
-_tmp_240_rule(Parser *p)
+_tmp_261_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38481,18 +40683,18 @@ _tmp_240_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_240[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'"));
+ D(fprintf(stderr, "%*c> _tmp_261[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 8)) // token=')'
)
{
- D(fprintf(stderr, "%*c+ _tmp_240[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'"));
+ D(fprintf(stderr, "%*c+ _tmp_261[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_240[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_261[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'"));
}
{ // '**'
@@ -38500,18 +40702,18 @@ _tmp_240_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_240[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'"));
+ D(fprintf(stderr, "%*c> _tmp_261[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 35)) // token='**'
)
{
- D(fprintf(stderr, "%*c+ _tmp_240[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'"));
+ D(fprintf(stderr, "%*c+ _tmp_261[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_240[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_261[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'"));
}
_res = NULL;
@@ -38520,9 +40722,9 @@ _tmp_240_rule(Parser *p)
return _res;
}
-// _tmp_241: ':' | '**'
+// _tmp_262: ':' | '**'
static void *
-_tmp_241_rule(Parser *p)
+_tmp_262_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38539,18 +40741,18 @@ _tmp_241_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_241[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c> _tmp_262[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
)
{
- D(fprintf(stderr, "%*c+ _tmp_241[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c+ _tmp_262[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_241[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_262[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'"));
}
{ // '**'
@@ -38558,18 +40760,18 @@ _tmp_241_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_241[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'"));
+ D(fprintf(stderr, "%*c> _tmp_262[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 35)) // token='**'
)
{
- D(fprintf(stderr, "%*c+ _tmp_241[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'"));
+ D(fprintf(stderr, "%*c+ _tmp_262[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_241[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_262[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'"));
}
_res = NULL;
@@ -38578,9 +40780,9 @@ _tmp_241_rule(Parser *p)
return _res;
}
-// _tmp_242: expression ['as' star_target]
+// _tmp_263: expression ['as' star_target]
static void *
-_tmp_242_rule(Parser *p)
+_tmp_263_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38597,22 +40799,22 @@ _tmp_242_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_242[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]"));
+ D(fprintf(stderr, "%*c> _tmp_263[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]"));
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
expr_ty expression_var;
if (
(expression_var = expression_rule(p)) // expression
&&
- (_opt_var = _tmp_248_rule(p), !p->error_indicator) // ['as' star_target]
+ (_opt_var = _tmp_269_rule(p), !p->error_indicator) // ['as' star_target]
)
{
- D(fprintf(stderr, "%*c+ _tmp_242[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]"));
+ D(fprintf(stderr, "%*c+ _tmp_263[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]"));
_res = _PyPegen_dummy_name(p, expression_var, _opt_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_242[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_263[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ['as' star_target]"));
}
_res = NULL;
@@ -38621,9 +40823,9 @@ _tmp_242_rule(Parser *p)
return _res;
}
-// _tmp_243: expressions ['as' star_target]
+// _tmp_264: expressions ['as' star_target]
static void *
-_tmp_243_rule(Parser *p)
+_tmp_264_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38640,22 +40842,22 @@ _tmp_243_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_243[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]"));
+ D(fprintf(stderr, "%*c> _tmp_264[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]"));
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
expr_ty expressions_var;
if (
(expressions_var = expressions_rule(p)) // expressions
&&
- (_opt_var = _tmp_249_rule(p), !p->error_indicator) // ['as' star_target]
+ (_opt_var = _tmp_270_rule(p), !p->error_indicator) // ['as' star_target]
)
{
- D(fprintf(stderr, "%*c+ _tmp_243[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]"));
+ D(fprintf(stderr, "%*c+ _tmp_264[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]"));
_res = _PyPegen_dummy_name(p, expressions_var, _opt_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_243[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_264[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expressions ['as' star_target]"));
}
_res = NULL;
@@ -38664,9 +40866,9 @@ _tmp_243_rule(Parser *p)
return _res;
}
-// _tmp_244: expression ['as' star_target]
+// _tmp_265: expression ['as' star_target]
static void *
-_tmp_244_rule(Parser *p)
+_tmp_265_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38683,22 +40885,22 @@ _tmp_244_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_244[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]"));
+ D(fprintf(stderr, "%*c> _tmp_265[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]"));
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
expr_ty expression_var;
if (
(expression_var = expression_rule(p)) // expression
&&
- (_opt_var = _tmp_250_rule(p), !p->error_indicator) // ['as' star_target]
+ (_opt_var = _tmp_271_rule(p), !p->error_indicator) // ['as' star_target]
)
{
- D(fprintf(stderr, "%*c+ _tmp_244[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]"));
+ D(fprintf(stderr, "%*c+ _tmp_265[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]"));
_res = _PyPegen_dummy_name(p, expression_var, _opt_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_244[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_265[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ['as' star_target]"));
}
_res = NULL;
@@ -38707,9 +40909,9 @@ _tmp_244_rule(Parser *p)
return _res;
}
-// _tmp_245: expressions ['as' star_target]
+// _tmp_266: expressions ['as' star_target]
static void *
-_tmp_245_rule(Parser *p)
+_tmp_266_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38726,22 +40928,22 @@ _tmp_245_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_245[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]"));
+ D(fprintf(stderr, "%*c> _tmp_266[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]"));
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
expr_ty expressions_var;
if (
(expressions_var = expressions_rule(p)) // expressions
&&
- (_opt_var = _tmp_251_rule(p), !p->error_indicator) // ['as' star_target]
+ (_opt_var = _tmp_272_rule(p), !p->error_indicator) // ['as' star_target]
)
{
- D(fprintf(stderr, "%*c+ _tmp_245[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]"));
+ D(fprintf(stderr, "%*c+ _tmp_266[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]"));
_res = _PyPegen_dummy_name(p, expressions_var, _opt_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_245[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_266[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expressions ['as' star_target]"));
}
_res = NULL;
@@ -38750,9 +40952,9 @@ _tmp_245_rule(Parser *p)
return _res;
}
-// _tmp_246: 'as' NAME
+// _tmp_267: 'as' NAME
static void *
-_tmp_246_rule(Parser *p)
+_tmp_267_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38769,21 +40971,21 @@ _tmp_246_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_246[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c> _tmp_267[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
Token * _keyword;
expr_ty name_var;
if (
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
(name_var = _PyPegen_name_token(p)) // NAME
)
{
- D(fprintf(stderr, "%*c+ _tmp_246[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c+ _tmp_267[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
_res = _PyPegen_dummy_name(p, _keyword, name_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_246[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_267[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME"));
}
_res = NULL;
@@ -38792,9 +40994,9 @@ _tmp_246_rule(Parser *p)
return _res;
}
-// _tmp_247: assignment_expression | expression !':='
+// _tmp_268: assignment_expression | expression !':='
static void *
-_tmp_247_rule(Parser *p)
+_tmp_268_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38811,18 +41013,18 @@ _tmp_247_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_247[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "assignment_expression"));
+ D(fprintf(stderr, "%*c> _tmp_268[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "assignment_expression"));
expr_ty assignment_expression_var;
if (
(assignment_expression_var = assignment_expression_rule(p)) // assignment_expression
)
{
- D(fprintf(stderr, "%*c+ _tmp_247[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "assignment_expression"));
+ D(fprintf(stderr, "%*c+ _tmp_268[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "assignment_expression"));
_res = assignment_expression_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_247[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_268[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "assignment_expression"));
}
{ // expression !':='
@@ -38830,7 +41032,7 @@ _tmp_247_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_247[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression !':='"));
+ D(fprintf(stderr, "%*c> _tmp_268[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression !':='"));
expr_ty expression_var;
if (
(expression_var = expression_rule(p)) // expression
@@ -38838,12 +41040,12 @@ _tmp_247_rule(Parser *p)
_PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 53) // token=':='
)
{
- D(fprintf(stderr, "%*c+ _tmp_247[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression !':='"));
+ D(fprintf(stderr, "%*c+ _tmp_268[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression !':='"));
_res = expression_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_247[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_268[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression !':='"));
}
_res = NULL;
@@ -38852,9 +41054,9 @@ _tmp_247_rule(Parser *p)
return _res;
}
-// _tmp_248: 'as' star_target
+// _tmp_269: 'as' star_target
static void *
-_tmp_248_rule(Parser *p)
+_tmp_269_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38871,21 +41073,21 @@ _tmp_248_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_248[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target"));
+ D(fprintf(stderr, "%*c> _tmp_269[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target"));
Token * _keyword;
expr_ty star_target_var;
if (
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
(star_target_var = star_target_rule(p)) // star_target
)
{
- D(fprintf(stderr, "%*c+ _tmp_248[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target"));
+ D(fprintf(stderr, "%*c+ _tmp_269[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target"));
_res = _PyPegen_dummy_name(p, _keyword, star_target_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_248[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_269[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' star_target"));
}
_res = NULL;
@@ -38894,9 +41096,9 @@ _tmp_248_rule(Parser *p)
return _res;
}
-// _tmp_249: 'as' star_target
+// _tmp_270: 'as' star_target
static void *
-_tmp_249_rule(Parser *p)
+_tmp_270_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38913,21 +41115,21 @@ _tmp_249_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_249[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target"));
+ D(fprintf(stderr, "%*c> _tmp_270[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target"));
Token * _keyword;
expr_ty star_target_var;
if (
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
(star_target_var = star_target_rule(p)) // star_target
)
{
- D(fprintf(stderr, "%*c+ _tmp_249[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target"));
+ D(fprintf(stderr, "%*c+ _tmp_270[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target"));
_res = _PyPegen_dummy_name(p, _keyword, star_target_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_249[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_270[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' star_target"));
}
_res = NULL;
@@ -38936,9 +41138,9 @@ _tmp_249_rule(Parser *p)
return _res;
}
-// _tmp_250: 'as' star_target
+// _tmp_271: 'as' star_target
static void *
-_tmp_250_rule(Parser *p)
+_tmp_271_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38955,21 +41157,21 @@ _tmp_250_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_250[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target"));
+ D(fprintf(stderr, "%*c> _tmp_271[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target"));
Token * _keyword;
expr_ty star_target_var;
if (
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
(star_target_var = star_target_rule(p)) // star_target
)
{
- D(fprintf(stderr, "%*c+ _tmp_250[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target"));
+ D(fprintf(stderr, "%*c+ _tmp_271[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target"));
_res = _PyPegen_dummy_name(p, _keyword, star_target_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_250[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_271[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' star_target"));
}
_res = NULL;
@@ -38978,9 +41180,9 @@ _tmp_250_rule(Parser *p)
return _res;
}
-// _tmp_251: 'as' star_target
+// _tmp_272: 'as' star_target
static void *
-_tmp_251_rule(Parser *p)
+_tmp_272_rule(Parser *p)
{
if (p->level++ == MAXSTACK) {
p->error_indicator = 1;
@@ -38997,21 +41199,21 @@ _tmp_251_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_251[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target"));
+ D(fprintf(stderr, "%*c> _tmp_272[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target"));
Token * _keyword;
expr_ty star_target_var;
if (
- (_keyword = _PyPegen_expect_token(p, 639)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 640)) // token='as'
&&
(star_target_var = star_target_rule(p)) // star_target
)
{
- D(fprintf(stderr, "%*c+ _tmp_251[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target"));
+ D(fprintf(stderr, "%*c+ _tmp_272[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target"));
_res = _PyPegen_dummy_name(p, _keyword, star_target_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_251[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_272[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' star_target"));
}
_res = NULL;
diff --git a/Parser/pegen.c b/Parser/pegen.c
index b79ae4cb1fb370..262bfabfba7a25 100644
--- a/Parser/pegen.c
+++ b/Parser/pegen.c
@@ -359,7 +359,7 @@ _PyPegen_expect_token(Parser *p, int type)
}
Token *t = p->tokens[p->mark];
if (t->type != type) {
- return NULL;
+ return NULL;
}
p->mark += 1;
return t;
diff --git a/Parser/pegen.h b/Parser/pegen.h
index ad5c97f5f7e5d1..6962013c2d18b4 100644
--- a/Parser/pegen.h
+++ b/Parser/pegen.h
@@ -138,6 +138,7 @@ void* _PyPegen_expect_forced_result(Parser *p, void* result, const char* expecte
Token *_PyPegen_expect_forced_token(Parser *p, int type, const char* expected);
expr_ty _PyPegen_expect_soft_keyword(Parser *p, const char *keyword);
expr_ty _PyPegen_soft_keyword_token(Parser *p);
+expr_ty _PyPegen_fstring_middle_token(Parser* p);
Token *_PyPegen_get_last_nonnwhitespace_token(Parser *);
int _PyPegen_fill_token(Parser *p);
expr_ty _PyPegen_name_token(Parser *p);
@@ -155,7 +156,7 @@ typedef enum {
int _Pypegen_raise_decode_error(Parser *p);
void _PyPegen_raise_tokenizer_init_error(PyObject *filename);
int _Pypegen_tokenizer_error(Parser *p);
-void *_PyPegen_raise_error(Parser *p, PyObject *errtype, const char *errmsg, ...);
+void *_PyPegen_raise_error(Parser *p, PyObject *errtype, int use_mark, const char *errmsg, ...);
void *_PyPegen_raise_error_known_location(Parser *p, PyObject *errtype,
Py_ssize_t lineno, Py_ssize_t col_offset,
Py_ssize_t end_lineno, Py_ssize_t end_col_offset,
@@ -175,8 +176,9 @@ RAISE_ERROR_KNOWN_LOCATION(Parser *p, PyObject *errtype,
va_end(va);
return NULL;
}
-#define RAISE_SYNTAX_ERROR(msg, ...) _PyPegen_raise_error(p, PyExc_SyntaxError, msg, ##__VA_ARGS__)
-#define RAISE_INDENTATION_ERROR(msg, ...) _PyPegen_raise_error(p, PyExc_IndentationError, msg, ##__VA_ARGS__)
+#define RAISE_SYNTAX_ERROR(msg, ...) _PyPegen_raise_error(p, PyExc_SyntaxError, 0, msg, ##__VA_ARGS__)
+#define RAISE_INDENTATION_ERROR(msg, ...) _PyPegen_raise_error(p, PyExc_IndentationError, 0, msg, ##__VA_ARGS__)
+#define RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN(msg, ...) _PyPegen_raise_error(p, PyExc_SyntaxError, 1, msg, ##__VA_ARGS__)
#define RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, msg, ...) \
RAISE_ERROR_KNOWN_LOCATION(p, PyExc_SyntaxError, (a)->lineno, (a)->col_offset, (b)->end_lineno, (b)->end_col_offset, msg, ##__VA_ARGS__)
#define RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, msg, ...) \
@@ -308,6 +310,7 @@ StarEtc *_PyPegen_star_etc(Parser *, arg_ty, asdl_seq *, arg_ty);
arguments_ty _PyPegen_make_arguments(Parser *, asdl_arg_seq *, SlashWithDefault *,
asdl_arg_seq *, asdl_seq *, StarEtc *);
arguments_ty _PyPegen_empty_arguments(Parser *);
+expr_ty _PyPegen_formatted_value(Parser *, expr_ty, Token *, expr_ty, expr_ty, int, int, int, int, PyArena *);
AugOperator *_PyPegen_augoperator(Parser*, operator_ty type);
stmt_ty _PyPegen_function_def_decorators(Parser *, asdl_expr_seq *, stmt_ty);
stmt_ty _PyPegen_class_def_decorators(Parser *, asdl_expr_seq *, stmt_ty);
@@ -317,12 +320,16 @@ asdl_keyword_seq *_PyPegen_seq_delete_starred_exprs(Parser *, asdl_seq *);
expr_ty _PyPegen_collect_call_seqs(Parser *, asdl_expr_seq *, asdl_seq *,
int lineno, int col_offset, int end_lineno,
int end_col_offset, PyArena *arena);
-expr_ty _PyPegen_concatenate_strings(Parser *p, asdl_seq *);
+expr_ty _PyPegen_constant_from_token(Parser* p, Token* tok);
+expr_ty _PyPegen_constant_from_string(Parser* p, Token* tok);
+expr_ty _PyPegen_concatenate_strings(Parser *p, asdl_expr_seq *, int, int, int, int, PyArena *);
+expr_ty _PyPegen_FetchRawForm(Parser *p, int, int, int, int);
expr_ty _PyPegen_ensure_imaginary(Parser *p, expr_ty);
expr_ty _PyPegen_ensure_real(Parser *p, expr_ty);
asdl_seq *_PyPegen_join_sequences(Parser *, asdl_seq *, asdl_seq *);
int _PyPegen_check_barry_as_flufl(Parser *, Token *);
int _PyPegen_check_legacy_stmt(Parser *p, expr_ty t);
+expr_ty _PyPegen_check_fstring_conversion(Parser *p, Token *, expr_ty t);
mod_ty _PyPegen_make_module(Parser *, asdl_stmt_seq *);
void *_PyPegen_arguments_parsing_error(Parser *, expr_ty);
expr_ty _PyPegen_get_last_comprehension_item(comprehension_ty comprehension);
@@ -338,6 +345,9 @@ void *_PyPegen_run_parser(Parser *);
mod_ty _PyPegen_run_parser_from_string(const char *, int, PyObject *, PyCompilerFlags *, PyArena *);
asdl_stmt_seq *_PyPegen_interactive_exit(Parser *);
+// TODO: move to the correct place in this file
+expr_ty _PyPegen_joined_str(Parser *p, Token* a, asdl_expr_seq* expr, Token*b);
+
// Generated function in parse.c - function definition in python.gram
void *_PyPegen_parse(Parser *);
diff --git a/Parser/pegen_errors.c b/Parser/pegen_errors.c
index 6ea7600119b643..e26bad20a27575 100644
--- a/Parser/pegen_errors.c
+++ b/Parser/pegen_errors.c
@@ -192,7 +192,10 @@ _PyPegen_tokenize_full_source_to_check_for_errors(Parser *p) {
exit:
- if (PyErr_Occurred()) {
+ // If we're in an f-string, we want the syntax error in the expression part
+ // to propagate, so that tokenizer errors (like expecting '}') that happen afterwards
+ // do not swallow it.
+ if (PyErr_Occurred() && p->tok->tok_mode_stack_index <= 0) {
Py_XDECREF(value);
Py_XDECREF(type);
Py_XDECREF(traceback);
@@ -205,7 +208,7 @@ _PyPegen_tokenize_full_source_to_check_for_errors(Parser *p) {
// PARSER ERRORS
void *
-_PyPegen_raise_error(Parser *p, PyObject *errtype, const char *errmsg, ...)
+_PyPegen_raise_error(Parser *p, PyObject *errtype, int use_mark, const char *errmsg, ...)
{
if (p->fill == 0) {
va_list va;
@@ -214,8 +217,13 @@ _PyPegen_raise_error(Parser *p, PyObject *errtype, const char *errmsg, ...)
va_end(va);
return NULL;
}
-
- Token *t = p->known_err_token != NULL ? p->known_err_token : p->tokens[p->fill - 1];
+ if (use_mark && p->mark == p->fill && _PyPegen_fill_token(p) < 0) {
+ p->error_indicator = 1;
+ return NULL;
+ }
+ Token *t = p->known_err_token != NULL
+ ? p->known_err_token
+ : p->tokens[use_mark ? p->mark : p->fill - 1];
Py_ssize_t col_offset;
Py_ssize_t end_col_offset = -1;
if (t->col_offset == -1) {
diff --git a/Parser/string_parser.c b/Parser/string_parser.c
index c096bea7426e5c..d4ce33850f7c58 100644
--- a/Parser/string_parser.c
+++ b/Parser/string_parser.c
@@ -135,7 +135,9 @@ decode_unicode_with_escapes(Parser *parser, const char *s, size_t len, Token *t)
const char *first_invalid_escape;
v = _PyUnicode_DecodeUnicodeEscapeInternal(s, len, NULL, NULL, &first_invalid_escape);
- if (v != NULL && first_invalid_escape != NULL) {
+ // HACK: later we can simply pass the line no, since we don't preserve the tokens
+ // when we are decoding the string but we preserve the line numbers.
+ if (v != NULL && first_invalid_escape != NULL && t != NULL) {
if (warn_invalid_escape_sequence(parser, first_invalid_escape, t) < 0) {
/* We have not decref u before because first_invalid_escape points
inside u. */
@@ -166,43 +168,43 @@ decode_bytes_with_escapes(Parser *p, const char *s, Py_ssize_t len, Token *t)
return result;
}
-/* s must include the bracketing quote characters, and r, b, u,
- &/or f prefixes (if any), and embedded escape sequences (if any).
- _PyPegen_parsestr parses it, and sets *result to decoded Python string object.
- If the string is an f-string, set *fstr and *fstrlen to the unparsed
- string object. Return 0 if no errors occurred. */
-int
-_PyPegen_parsestr(Parser *p, int *bytesmode, int *rawmode, PyObject **result,
- const char **fstr, Py_ssize_t *fstrlen, Token *t)
+PyObject *
+_PyPegen_decode_string(Parser *p, int raw, const char *s, size_t len, Token *t)
+{
+ if (raw) {
+ return PyUnicode_DecodeUTF8Stateful(s, len, NULL, NULL);
+ }
+ return decode_unicode_with_escapes(p, s, len, t);
+}
+
+/* s must include the bracketing quote characters, and r, b &/or f prefixes
+ (if any), and embedded escape sequences (if any). (f-strings are handled by the parser)
+ _PyPegen_parse_string parses it, and returns the decoded Python string object. */
+PyObject *
+_PyPegen_parse_string(Parser *p, Token *t)
{
const char *s = PyBytes_AsString(t->bytes);
if (s == NULL) {
- return -1;
+ return NULL;
}
size_t len;
int quote = Py_CHARMASK(*s);
- int fmode = 0;
- *bytesmode = 0;
- *rawmode = 0;
- *result = NULL;
- *fstr = NULL;
+ int bytesmode = 0;
+ int rawmode = 0;
+
if (Py_ISALPHA(quote)) {
- while (!*bytesmode || !*rawmode) {
+ while (!bytesmode || !rawmode) {
if (quote == 'b' || quote == 'B') {
quote =(unsigned char)*++s;
- *bytesmode = 1;
+ bytesmode = 1;
}
else if (quote == 'u' || quote == 'U') {
quote = (unsigned char)*++s;
}
else if (quote == 'r' || quote == 'R') {
quote = (unsigned char)*++s;
- *rawmode = 1;
- }
- else if (quote == 'f' || quote == 'F') {
- quote = (unsigned char)*++s;
- fmode = 1;
+ rawmode = 1;
}
else {
break;
@@ -210,32 +212,21 @@ _PyPegen_parsestr(Parser *p, int *bytesmode, int *rawmode, PyObject **result,
}
}
- /* fstrings are only allowed in Python 3.6 and greater */
- if (fmode && p->feature_version < 6) {
- p->error_indicator = 1;
- RAISE_SYNTAX_ERROR("Format strings are only supported in Python 3.6 and greater");
- return -1;
- }
-
- if (fmode && *bytesmode) {
- PyErr_BadInternalCall();
- return -1;
- }
if (quote != '\'' && quote != '\"') {
PyErr_BadInternalCall();
- return -1;
+ return NULL;
}
/* Skip the leading quote char. */
s++;
len = strlen(s);
if (len > INT_MAX) {
PyErr_SetString(PyExc_OverflowError, "string to parse is too long");
- return -1;
+ return NULL;
}
if (s[--len] != quote) {
/* Last quote char must match the first. */
PyErr_BadInternalCall();
- return -1;
+ return NULL;
}
if (len >= 4 && s[0] == quote && s[1] == quote) {
/* A triple quoted string. We've already skipped one quote at
@@ -246,1037 +237,28 @@ _PyPegen_parsestr(Parser *p, int *bytesmode, int *rawmode, PyObject **result,
/* And check that the last two match. */
if (s[--len] != quote || s[--len] != quote) {
PyErr_BadInternalCall();
- return -1;
+ return NULL;
}
}
- if (fmode) {
- /* Just return the bytes. The caller will parse the resulting
- string. */
- *fstr = s;
- *fstrlen = len;
- return 0;
- }
-
- /* Not an f-string. */
/* Avoid invoking escape decoding routines if possible. */
- *rawmode = *rawmode || strchr(s, '\\') == NULL;
- if (*bytesmode) {
+ rawmode = rawmode || strchr(s, '\\') == NULL;
+ if (bytesmode) {
/* Disallow non-ASCII characters. */
const char *ch;
for (ch = s; *ch; ch++) {
if (Py_CHARMASK(*ch) >= 0x80) {
- RAISE_SYNTAX_ERROR(
+ RAISE_SYNTAX_ERROR_KNOWN_LOCATION(
+ t,
"bytes can only contain ASCII "
"literal characters");
- return -1;
- }
- }
- if (*rawmode) {
- *result = PyBytes_FromStringAndSize(s, len);
- }
- else {
- *result = decode_bytes_with_escapes(p, s, len, t);
- }
- }
- else {
- if (*rawmode) {
- *result = PyUnicode_DecodeUTF8Stateful(s, len, NULL, NULL);
- }
- else {
- *result = decode_unicode_with_escapes(p, s, len, t);
- }
- }
- return *result == NULL ? -1 : 0;
-}
-
-
-
-// FSTRING STUFF
-
-/* Fix locations for the given node and its children.
-
- `parent` is the enclosing node.
- `expr_start` is the starting position of the expression (pointing to the open brace).
- `n` is the node which locations are going to be fixed relative to parent.
- `expr_str` is the child node's string representation, including braces.
-*/
-static bool
-fstring_find_expr_location(Token *parent, const char* expr_start, char *expr_str, int *p_lines, int *p_cols)
-{
- *p_lines = 0;
- *p_cols = 0;
- assert(expr_start != NULL && *expr_start == '{');
- if (parent && parent->bytes) {
- const char *parent_str = PyBytes_AsString(parent->bytes);
- if (!parent_str) {
- return false;
- }
- // The following is needed, in order to correctly shift the column
- // offset, in the case that (disregarding any whitespace) a newline
- // immediately follows the opening curly brace of the fstring expression.
- bool newline_after_brace = 1;
- const char *start = expr_start + 1;
- while (start && *start != '}' && *start != '\n') {
- if (*start != ' ' && *start != '\t' && *start != '\f') {
- newline_after_brace = 0;
- break;
- }
- start++;
- }
-
- // Account for the characters from the last newline character to our
- // left until the beginning of expr_start.
- if (!newline_after_brace) {
- start = expr_start;
- while (start > parent_str && *start != '\n') {
- start--;
- }
- *p_cols += (int)(expr_start - start);
- if (*start == '\n') {
- *p_cols -= 1;
- }
- }
- /* adjust the start based on the number of newlines encountered
- before the f-string expression */
- for (const char *p = parent_str; p < expr_start; p++) {
- if (*p == '\n') {
- (*p_lines)++;
- }
- }
- }
- return true;
-}
-
-
-/* Compile this expression in to an expr_ty. Add parens around the
- expression, in order to allow leading spaces in the expression. */
-static expr_ty
-fstring_compile_expr(Parser *p, const char *expr_start, const char *expr_end,
- Token *t)
-{
- expr_ty expr = NULL;
- char *str;
- Py_ssize_t len;
- const char *s;
- expr_ty result = NULL;
-
- assert(expr_end >= expr_start);
- assert(*(expr_start-1) == '{');
- assert(*expr_end == '}' || *expr_end == '!' || *expr_end == ':' ||
- *expr_end == '=');
-
- /* If the substring is all whitespace, it's an error. We need to catch this
- here, and not when we call PyParser_SimpleParseStringFlagsFilename,
- because turning the expression '' in to '()' would go from being invalid
- to valid. */
- for (s = expr_start; s != expr_end; s++) {
- char c = *s;
- /* The Python parser ignores only the following whitespace
- characters (\r already is converted to \n). */
- if (!(c == ' ' || c == '\t' || c == '\n' || c == '\f')) {
- break;
- }
- }
-
- if (s == expr_end) {
- if (*expr_end == '!' || *expr_end == ':' || *expr_end == '=') {
- RAISE_SYNTAX_ERROR("f-string: expression required before '%c'", *expr_end);
- return NULL;
- }
- RAISE_SYNTAX_ERROR("f-string: empty expression not allowed");
- return NULL;
- }
-
- len = expr_end - expr_start;
- /* Allocate 3 extra bytes: open paren, close paren, null byte. */
- str = PyMem_Calloc(len + 3, sizeof(char));
- if (str == NULL) {
- PyErr_NoMemory();
- return NULL;
- }
-
- // The call to fstring_find_expr_location is responsible for finding the column offset
- // the generated AST nodes need to be shifted to the right, which is equal to the number
- // of the f-string characters before the expression starts.
- memcpy(str+1, expr_start, len);
- int lines, cols;
- if (!fstring_find_expr_location(t, expr_start-1, str+1, &lines, &cols)) {
- PyMem_Free(str);
- return NULL;
- }
-
- // The parentheses are needed in order to allow for leading whitespace within
- // the f-string expression. This consequently gets parsed as a group (see the
- // group rule in python.gram).
- str[0] = '(';
- str[len+1] = ')';
-
- struct tok_state* tok = _PyTokenizer_FromString(str, 1);
- if (tok == NULL) {
- PyMem_Free(str);
- return NULL;
- }
- tok->filename = Py_NewRef(p->tok->filename);
- tok->lineno = t->lineno + lines - 1;
-
- Parser *p2 = _PyPegen_Parser_New(tok, Py_fstring_input, p->flags, p->feature_version,
- NULL, p->arena);
-
- p2->starting_lineno = t->lineno + lines;
- p2->starting_col_offset = lines != 0 ? cols : t->col_offset + cols;
-
- expr = _PyPegen_run_parser(p2);
-
- if (expr == NULL) {
- goto exit;
- }
- result = expr;
-
-exit:
- PyMem_Free(str);
- _PyPegen_Parser_Free(p2);
- _PyTokenizer_Free(tok);
- return result;
-}
-
-/* Return -1 on error.
-
- Return 0 if we reached the end of the literal.
-
- Return 1 if we haven't reached the end of the literal, but we want
- the caller to process the literal up to this point. Used for
- doubled braces.
-*/
-static int
-fstring_find_literal(Parser *p, const char **str, const char *end, int raw,
- PyObject **literal, int recurse_lvl, Token *t)
-{
- /* Get any literal string. It ends when we hit an un-doubled left
- brace (which isn't part of a unicode name escape such as
- "\N{EULER CONSTANT}"), or the end of the string. */
-
- const char *s = *str;
- const char *literal_start = s;
- int result = 0;
-
- assert(*literal == NULL);
- while (s < end) {
- char ch = *s++;
- if (!raw && ch == '\\' && s < end) {
- ch = *s++;
- if (ch == 'N') {
- /* We need to look at and skip matching braces for "\N{name}"
- sequences because otherwise we'll think the opening '{'
- starts an expression, which is not the case with "\N".
- Keep looking for either a matched '{' '}' pair, or the end
- of the string. */
-
- if (s < end && *s++ == '{') {
- while (s < end && *s++ != '}') {
- }
- continue;
- }
-
- /* This is an invalid "\N" sequence, since it's a "\N" not
- followed by a "{". Just keep parsing this literal. This
- error will be caught later by
- decode_unicode_with_escapes(). */
- continue;
- }
- if (ch == '{' && warn_invalid_escape_sequence(p, s-1, t) < 0) {
- return -1;
- }
- }
- if (ch == '{' || ch == '}') {
- /* Check for doubled braces, but only at the top level. If
- we checked at every level, then f'{0:{3}}' would fail
- with the two closing braces. */
- if (recurse_lvl == 0) {
- if (s < end && *s == ch) {
- /* We're going to tell the caller that the literal ends
- here, but that they should continue scanning. But also
- skip over the second brace when we resume scanning. */
- *str = s + 1;
- result = 1;
- goto done;
- }
-
- /* Where a single '{' is the start of a new expression, a
- single '}' is not allowed. */
- if (ch == '}') {
- *str = s - 1;
- RAISE_SYNTAX_ERROR("f-string: single '}' is not allowed");
- return -1;
- }
- }
- /* We're either at a '{', which means we're starting another
- expression; or a '}', which means we're at the end of this
- f-string (for a nested format_spec). */
- s--;
- break;
- }
- }
- *str = s;
- assert(s <= end);
- assert(s == end || *s == '{' || *s == '}');
-done:
- if (literal_start != s) {
- if (raw) {
- *literal = PyUnicode_DecodeUTF8Stateful(literal_start,
- s - literal_start,
- NULL, NULL);
- }
- else {
- *literal = decode_unicode_with_escapes(p, literal_start,
- s - literal_start, t);
- }
- if (!*literal) {
- return -1;
- }
- }
- return result;
-}
-
-/* Forward declaration because parsing is recursive. */
-static expr_ty
-fstring_parse(Parser *p, const char **str, const char *end, int raw, int recurse_lvl,
- Token *first_token, Token* t, Token *last_token);
-
-/* Parse the f-string at *str, ending at end. We know *str starts an
- expression (so it must be a '{'). Returns the FormattedValue node, which
- includes the expression, conversion character, format_spec expression, and
- optionally the text of the expression (if = is used).
-
- Note that I don't do a perfect job here: I don't make sure that a
- closing brace doesn't match an opening paren, for example. It
- doesn't need to error on all invalid expressions, just correctly
- find the end of all valid ones. Any errors inside the expression
- will be caught when we parse it later.
-
- *expression is set to the expression. For an '=' "debug" expression,
- *expr_text is set to the debug text (the original text of the expression,
- including the '=' and any whitespace around it, as a string object). If
- not a debug expression, *expr_text set to NULL. */
-static int
-fstring_find_expr(Parser *p, const char **str, const char *end, int raw, int recurse_lvl,
- PyObject **expr_text, expr_ty *expression, Token *first_token,
- Token *t, Token *last_token)
-{
- /* Return -1 on error, else 0. */
-
- const char *expr_start;
- const char *expr_end;
- expr_ty simple_expression;
- expr_ty format_spec = NULL; /* Optional format specifier. */
- int conversion = -1; /* The conversion char. Use default if not
- specified, or !r if using = and no format
- spec. */
-
- /* 0 if we're not in a string, else the quote char we're trying to
- match (single or double quote). */
- char quote_char = 0;
-
- /* If we're inside a string, 1=normal, 3=triple-quoted. */
- int string_type = 0;
-
- /* Keep track of nesting level for braces/parens/brackets in
- expressions. */
- Py_ssize_t nested_depth = 0;
- char parenstack[MAXLEVEL];
-
- *expr_text = NULL;
-
- /* Can only nest one level deep. */
- if (recurse_lvl >= 2) {
- RAISE_SYNTAX_ERROR("f-string: expressions nested too deeply");
- goto error;
- }
-
- /* The first char must be a left brace, or we wouldn't have gotten
- here. Skip over it. */
- assert(**str == '{');
- *str += 1;
-
- expr_start = *str;
- for (; *str < end; (*str)++) {
- char ch;
-
- /* Loop invariants. */
- assert(nested_depth >= 0);
- assert(*str >= expr_start && *str < end);
- if (quote_char) {
- assert(string_type == 1 || string_type == 3);
- } else {
- assert(string_type == 0);
- }
-
- ch = **str;
- /* Nowhere inside an expression is a backslash allowed. */
- if (ch == '\\') {
- /* Error: can't include a backslash character, inside
- parens or strings or not. */
- RAISE_SYNTAX_ERROR(
- "f-string expression part "
- "cannot include a backslash");
- goto error;
- }
- if (quote_char) {
- /* We're inside a string. See if we're at the end. */
- /* This code needs to implement the same non-error logic
- as tok_get from tokenizer.c, at the letter_quote
- label. To actually share that code would be a
- nightmare. But, it's unlikely to change and is small,
- so duplicate it here. Note we don't need to catch all
- of the errors, since they'll be caught when parsing the
- expression. We just need to match the non-error
- cases. Thus we can ignore \n in single-quoted strings,
- for example. Or non-terminated strings. */
- if (ch == quote_char) {
- /* Does this match the string_type (single or triple
- quoted)? */
- if (string_type == 3) {
- if (*str+2 < end && *(*str+1) == ch && *(*str+2) == ch) {
- /* We're at the end of a triple quoted string. */
- *str += 2;
- string_type = 0;
- quote_char = 0;
- continue;
- }
- } else {
- /* We're at the end of a normal string. */
- quote_char = 0;
- string_type = 0;
- continue;
- }
- }
- } else if (ch == '\'' || ch == '"') {
- /* Is this a triple quoted string? */
- if (*str+2 < end && *(*str+1) == ch && *(*str+2) == ch) {
- string_type = 3;
- *str += 2;
- } else {
- /* Start of a normal string. */
- string_type = 1;
- }
- /* Start looking for the end of the string. */
- quote_char = ch;
- } else if (ch == '[' || ch == '{' || ch == '(') {
- if (nested_depth >= MAXLEVEL) {
- RAISE_SYNTAX_ERROR("f-string: too many nested parenthesis");
- goto error;
- }
- parenstack[nested_depth] = ch;
- nested_depth++;
- } else if (ch == '#') {
- /* Error: can't include a comment character, inside parens
- or not. */
- RAISE_SYNTAX_ERROR("f-string expression part cannot include '#'");
- goto error;
- } else if (nested_depth == 0 &&
- (ch == '!' || ch == ':' || ch == '}' ||
- ch == '=' || ch == '>' || ch == '<')) {
- /* See if there's a next character. */
- if (*str+1 < end) {
- char next = *(*str+1);
-
- /* For "!=". since '=' is not an allowed conversion character,
- nothing is lost in this test. */
- if ((ch == '!' && next == '=') || /* != */
- (ch == '=' && next == '=') || /* == */
- (ch == '<' && next == '=') || /* <= */
- (ch == '>' && next == '=') /* >= */
- ) {
- *str += 1;
- continue;
- }
- }
- /* Don't get out of the loop for these, if they're single
- chars (not part of 2-char tokens). If by themselves, they
- don't end an expression (unlike say '!'). */
- if (ch == '>' || ch == '<') {
- continue;
- }
-
- /* Normal way out of this loop. */
- break;
- } else if (ch == ']' || ch == '}' || ch == ')') {
- if (!nested_depth) {
- RAISE_SYNTAX_ERROR("f-string: unmatched '%c'", ch);
- goto error;
- }
- nested_depth--;
- int opening = (unsigned char)parenstack[nested_depth];
- if (!((opening == '(' && ch == ')') ||
- (opening == '[' && ch == ']') ||
- (opening == '{' && ch == '}')))
- {
- RAISE_SYNTAX_ERROR(
- "f-string: closing parenthesis '%c' "
- "does not match opening parenthesis '%c'",
- ch, opening);
- goto error;
- }
- } else {
- /* Just consume this char and loop around. */
- }
- }
- expr_end = *str;
- /* If we leave the above loop in a string or with mismatched parens, we
- don't really care. We'll get a syntax error when compiling the
- expression. But, we can produce a better error message, so let's just
- do that.*/
- if (quote_char) {
- RAISE_SYNTAX_ERROR("f-string: unterminated string");
- goto error;
- }
- if (nested_depth) {
- int opening = (unsigned char)parenstack[nested_depth - 1];
- RAISE_SYNTAX_ERROR("f-string: unmatched '%c'", opening);
- goto error;
- }
-
- if (*str >= end) {
- goto unexpected_end_of_string;
- }
-
- /* Compile the expression as soon as possible, so we show errors
- related to the expression before errors related to the
- conversion or format_spec. */
- simple_expression = fstring_compile_expr(p, expr_start, expr_end, t);
- if (!simple_expression) {
- goto error;
- }
-
- /* Check for =, which puts the text value of the expression in
- expr_text. */
- if (**str == '=') {
- if (p->feature_version < 8) {
- RAISE_SYNTAX_ERROR("f-string: self documenting expressions are "
- "only supported in Python 3.8 and greater");
- goto error;
- }
- *str += 1;
-
- /* Skip over ASCII whitespace. No need to test for end of string
- here, since we know there's at least a trailing quote somewhere
- ahead. */
- while (Py_ISSPACE(**str)) {
- *str += 1;
- }
- if (*str >= end) {
- goto unexpected_end_of_string;
- }
- /* Set *expr_text to the text of the expression. */
- *expr_text = PyUnicode_FromStringAndSize(expr_start, *str-expr_start);
- if (!*expr_text) {
- goto error;
- }
- }
-
- /* Check for a conversion char, if present. */
- if (**str == '!') {
- *str += 1;
- const char *conv_start = *str;
- while (1) {
- if (*str >= end) {
- goto unexpected_end_of_string;
- }
- if (**str == '}' || **str == ':') {
- break;
- }
- *str += 1;
- }
- if (*str == conv_start) {
- RAISE_SYNTAX_ERROR(
- "f-string: missed conversion character");
- goto error;
- }
-
- conversion = (unsigned char)*conv_start;
- /* Validate the conversion. */
- if ((*str != conv_start + 1) ||
- !(conversion == 's' || conversion == 'r' || conversion == 'a'))
- {
- PyObject *conv_obj = PyUnicode_FromStringAndSize(conv_start,
- *str-conv_start);
- if (conv_obj) {
- RAISE_SYNTAX_ERROR(
- "f-string: invalid conversion character %R: "
- "expected 's', 'r', or 'a'",
- conv_obj);
- Py_DECREF(conv_obj);
- }
- goto error;
- }
-
- }
-
- /* Check for the format spec, if present. */
- assert(*str < end);
- if (**str == ':') {
- *str += 1;
- if (*str >= end) {
- goto unexpected_end_of_string;
- }
-
- /* Parse the format spec. */
- format_spec = fstring_parse(p, str, end, raw, recurse_lvl+1,
- first_token, t, last_token);
- if (!format_spec) {
- goto error;
- }
- }
-
- if (*str >= end || **str != '}') {
- goto unexpected_end_of_string;
- }
-
- /* We're at a right brace. Consume it. */
- assert(*str < end);
- assert(**str == '}');
- *str += 1;
-
- /* If we're in = mode (detected by non-NULL expr_text), and have no format
- spec and no explicit conversion, set the conversion to 'r'. */
- if (*expr_text && format_spec == NULL && conversion == -1) {
- conversion = 'r';
- }
-
- /* And now create the FormattedValue node that represents this
- entire expression with the conversion and format spec. */
- //TODO: Fix this
- *expression = _PyAST_FormattedValue(simple_expression, conversion,
- format_spec, first_token->lineno,
- first_token->col_offset,
- last_token->end_lineno,
- last_token->end_col_offset, p->arena);
- if (!*expression) {
- goto error;
- }
-
- return 0;
-
-unexpected_end_of_string:
- RAISE_SYNTAX_ERROR("f-string: expecting '}'");
- /* Falls through to error. */
-
-error:
- Py_XDECREF(*expr_text);
- return -1;
-
-}
-
-/* Return -1 on error.
-
- Return 0 if we have a literal (possible zero length) and an
- expression (zero length if at the end of the string.
-
- Return 1 if we have a literal, but no expression, and we want the
- caller to call us again. This is used to deal with doubled
- braces.
-
- When called multiple times on the string 'a{{b{0}c', this function
- will return:
-
- 1. the literal 'a{' with no expression, and a return value
- of 1. Despite the fact that there's no expression, the return
- value of 1 means we're not finished yet.
-
- 2. the literal 'b' and the expression '0', with a return value of
- 0. The fact that there's an expression means we're not finished.
-
- 3. literal 'c' with no expression and a return value of 0. The
- combination of the return value of 0 with no expression means
- we're finished.
-*/
-static int
-fstring_find_literal_and_expr(Parser *p, const char **str, const char *end, int raw,
- int recurse_lvl, PyObject **literal,
- PyObject **expr_text, expr_ty *expression,
- Token *first_token, Token *t, Token *last_token)
-{
- int result;
-
- assert(*literal == NULL && *expression == NULL);
-
- /* Get any literal string. */
- result = fstring_find_literal(p, str, end, raw, literal, recurse_lvl, t);
- if (result < 0) {
- goto error;
- }
-
- assert(result == 0 || result == 1);
-
- if (result == 1) {
- /* We have a literal, but don't look at the expression. */
- return 1;
- }
-
- if (*str >= end || **str == '}') {
- /* We're at the end of the string or the end of a nested
- f-string: no expression. The top-level error case where we
- expect to be at the end of the string but we're at a '}' is
- handled later. */
- return 0;
- }
-
- /* We must now be the start of an expression, on a '{'. */
- assert(**str == '{');
-
- if (fstring_find_expr(p, str, end, raw, recurse_lvl, expr_text,
- expression, first_token, t, last_token) < 0) {
- goto error;
- }
-
- return 0;
-
-error:
- Py_CLEAR(*literal);
- return -1;
-}
-
-#ifdef NDEBUG
-#define ExprList_check_invariants(l)
-#else
-static void
-ExprList_check_invariants(ExprList *l)
-{
- /* Check our invariants. Make sure this object is "live", and
- hasn't been deallocated. */
- assert(l->size >= 0);
- assert(l->p != NULL);
- if (l->size <= EXPRLIST_N_CACHED) {
- assert(l->data == l->p);
- }
-}
-#endif
-
-static void
-ExprList_Init(ExprList *l)
-{
- l->allocated = EXPRLIST_N_CACHED;
- l->size = 0;
-
- /* Until we start allocating dynamically, p points to data. */
- l->p = l->data;
-
- ExprList_check_invariants(l);
-}
-
-static int
-ExprList_Append(ExprList *l, expr_ty exp)
-{
- ExprList_check_invariants(l);
- if (l->size >= l->allocated) {
- /* We need to alloc (or realloc) the memory. */
- Py_ssize_t new_size = l->allocated * 2;
-
- /* See if we've ever allocated anything dynamically. */
- if (l->p == l->data) {
- Py_ssize_t i;
- /* We're still using the cached data. Switch to
- alloc-ing. */
- l->p = PyMem_Malloc(sizeof(expr_ty) * new_size);
- if (!l->p) {
- return -1;
- }
- /* Copy the cached data into the new buffer. */
- for (i = 0; i < l->size; i++) {
- l->p[i] = l->data[i];
- }
- } else {
- /* Just realloc. */
- expr_ty *tmp = PyMem_Realloc(l->p, sizeof(expr_ty) * new_size);
- if (!tmp) {
- PyMem_Free(l->p);
- l->p = NULL;
- return -1;
- }
- l->p = tmp;
- }
-
- l->allocated = new_size;
- assert(l->allocated == 2 * l->size);
- }
-
- l->p[l->size++] = exp;
-
- ExprList_check_invariants(l);
- return 0;
-}
-
-static void
-ExprList_Dealloc(ExprList *l)
-{
- ExprList_check_invariants(l);
-
- /* If there's been an error, or we've never dynamically allocated,
- do nothing. */
- if (!l->p || l->p == l->data) {
- /* Do nothing. */
- } else {
- /* We have dynamically allocated. Free the memory. */
- PyMem_Free(l->p);
- }
- l->p = NULL;
- l->size = -1;
-}
-
-static asdl_expr_seq *
-ExprList_Finish(ExprList *l, PyArena *arena)
-{
- asdl_expr_seq *seq;
-
- ExprList_check_invariants(l);
-
- /* Allocate the asdl_seq and copy the expressions in to it. */
- seq = _Py_asdl_expr_seq_new(l->size, arena);
- if (seq) {
- Py_ssize_t i;
- for (i = 0; i < l->size; i++) {
- asdl_seq_SET(seq, i, l->p[i]);
- }
- }
- ExprList_Dealloc(l);
- return seq;
-}
-
-#ifdef NDEBUG
-#define FstringParser_check_invariants(state)
-#else
-static void
-FstringParser_check_invariants(FstringParser *state)
-{
- if (state->last_str) {
- assert(PyUnicode_CheckExact(state->last_str));
- }
- ExprList_check_invariants(&state->expr_list);
-}
-#endif
-
-void
-_PyPegen_FstringParser_Init(FstringParser *state)
-{
- state->last_str = NULL;
- state->fmode = 0;
- ExprList_Init(&state->expr_list);
- FstringParser_check_invariants(state);
-}
-
-void
-_PyPegen_FstringParser_Dealloc(FstringParser *state)
-{
- FstringParser_check_invariants(state);
-
- Py_XDECREF(state->last_str);
- ExprList_Dealloc(&state->expr_list);
-}
-
-/* Make a Constant node, but decref the PyUnicode object being added. */
-static expr_ty
-make_str_node_and_del(Parser *p, PyObject **str, Token* first_token, Token *last_token)
-{
- PyObject *s = *str;
- PyObject *kind = NULL;
- *str = NULL;
- assert(PyUnicode_CheckExact(s));
- if (_PyArena_AddPyObject(p->arena, s) < 0) {
- Py_DECREF(s);
- return NULL;
- }
- const char* the_str = PyBytes_AsString(first_token->bytes);
- if (the_str && the_str[0] == 'u') {
- kind = _PyPegen_new_identifier(p, "u");
- }
-
- if (kind == NULL && PyErr_Occurred()) {
- return NULL;
- }
-
- return _PyAST_Constant(s, kind, first_token->lineno, first_token->col_offset,
- last_token->end_lineno, last_token->end_col_offset,
- p->arena);
-
-}
-
-
-/* Add a non-f-string (that is, a regular literal string). str is
- decref'd. */
-int
-_PyPegen_FstringParser_ConcatAndDel(FstringParser *state, PyObject *str)
-{
- FstringParser_check_invariants(state);
-
- assert(PyUnicode_CheckExact(str));
-
- if (PyUnicode_GET_LENGTH(str) == 0) {
- Py_DECREF(str);
- return 0;
- }
-
- if (!state->last_str) {
- /* We didn't have a string before, so just remember this one. */
- state->last_str = str;
- } else {
- /* Concatenate this with the previous string. */
- PyUnicode_AppendAndDel(&state->last_str, str);
- if (!state->last_str) {
- return -1;
- }
- }
- FstringParser_check_invariants(state);
- return 0;
-}
-
-/* Parse an f-string. The f-string is in *str to end, with no
- 'f' or quotes. */
-int
-_PyPegen_FstringParser_ConcatFstring(Parser *p, FstringParser *state, const char **str,
- const char *end, int raw, int recurse_lvl,
- Token *first_token, Token* t, Token *last_token)
-{
- FstringParser_check_invariants(state);
- state->fmode = 1;
-
- /* Parse the f-string. */
- while (1) {
- PyObject *literal = NULL;
- PyObject *expr_text = NULL;
- expr_ty expression = NULL;
-
- /* If there's a zero length literal in front of the
- expression, literal will be NULL. If we're at the end of
- the f-string, expression will be NULL (unless result == 1,
- see below). */
- int result = fstring_find_literal_and_expr(p, str, end, raw, recurse_lvl,
- &literal, &expr_text,
- &expression, first_token, t, last_token);
- if (result < 0) {
- return -1;
- }
-
- /* Add the literal, if any. */
- if (literal && _PyPegen_FstringParser_ConcatAndDel(state, literal) < 0) {
- Py_XDECREF(expr_text);
- return -1;
- }
- /* Add the expr_text, if any. */
- if (expr_text && _PyPegen_FstringParser_ConcatAndDel(state, expr_text) < 0) {
- return -1;
- }
-
- /* We've dealt with the literal and expr_text, their ownership has
- been transferred to the state object. Don't look at them again. */
-
- /* See if we should just loop around to get the next literal
- and expression, while ignoring the expression this
- time. This is used for un-doubling braces, as an
- optimization. */
- if (result == 1) {
- continue;
- }
-
- if (!expression) {
- /* We're done with this f-string. */
- break;
- }
-
- /* We know we have an expression. Convert any existing string
- to a Constant node. */
- if (state->last_str) {
- /* Convert the existing last_str literal to a Constant node. */
- expr_ty last_str = make_str_node_and_del(p, &state->last_str, first_token, last_token);
- if (!last_str || ExprList_Append(&state->expr_list, last_str) < 0) {
- return -1;
- }
- }
-
- if (ExprList_Append(&state->expr_list, expression) < 0) {
- return -1;
- }
- }
-
- /* If recurse_lvl is zero, then we must be at the end of the
- string. Otherwise, we must be at a right brace. */
-
- if (recurse_lvl == 0 && *str < end-1) {
- RAISE_SYNTAX_ERROR("f-string: unexpected end of string");
- return -1;
- }
- if (recurse_lvl != 0 && **str != '}') {
- RAISE_SYNTAX_ERROR("f-string: expecting '}'");
- return -1;
- }
-
- FstringParser_check_invariants(state);
- return 0;
-}
-
-/* Convert the partial state reflected in last_str and expr_list to an
- expr_ty. The expr_ty can be a Constant, or a JoinedStr. */
-expr_ty
-_PyPegen_FstringParser_Finish(Parser *p, FstringParser *state, Token* first_token,
- Token *last_token)
-{
- asdl_expr_seq *seq;
-
- FstringParser_check_invariants(state);
-
- /* If we're just a constant string with no expressions, return
- that. */
- if (!state->fmode) {
- assert(!state->expr_list.size);
- if (!state->last_str) {
- /* Create a zero length string. */
- state->last_str = PyUnicode_FromStringAndSize(NULL, 0);
- if (!state->last_str) {
- goto error;
+ return NULL;
}
}
- return make_str_node_and_del(p, &state->last_str, first_token, last_token);
- }
-
- /* Create a Constant node out of last_str, if needed. It will be the
- last node in our expression list. */
- if (state->last_str) {
- expr_ty str = make_str_node_and_del(p, &state->last_str, first_token, last_token);
- if (!str || ExprList_Append(&state->expr_list, str) < 0) {
- goto error;
+ if (rawmode) {
+ return PyBytes_FromStringAndSize(s, len);
}
+ return decode_bytes_with_escapes(p, s, len, t);
}
- /* This has already been freed. */
- assert(state->last_str == NULL);
-
- seq = ExprList_Finish(&state->expr_list, p->arena);
- if (!seq) {
- goto error;
- }
-
- return _PyAST_JoinedStr(seq, first_token->lineno, first_token->col_offset,
- last_token->end_lineno, last_token->end_col_offset,
- p->arena);
-
-error:
- _PyPegen_FstringParser_Dealloc(state);
- return NULL;
-}
-
-/* Given an f-string (with no 'f' or quotes) that's in *str and ends
- at end, parse it into an expr_ty. Return NULL on error. Adjust
- str to point past the parsed portion. */
-static expr_ty
-fstring_parse(Parser *p, const char **str, const char *end, int raw,
- int recurse_lvl, Token *first_token, Token* t, Token *last_token)
-{
- FstringParser state;
-
- _PyPegen_FstringParser_Init(&state);
- if (_PyPegen_FstringParser_ConcatFstring(p, &state, str, end, raw, recurse_lvl,
- first_token, t, last_token) < 0) {
- _PyPegen_FstringParser_Dealloc(&state);
- return NULL;
- }
-
- return _PyPegen_FstringParser_Finish(p, &state, t, t);
+ return _PyPegen_decode_string(p, rawmode, s, len, t);
}
diff --git a/Parser/string_parser.h b/Parser/string_parser.h
index 4a22f3d3086f47..0b34de1b4e41e9 100644
--- a/Parser/string_parser.h
+++ b/Parser/string_parser.h
@@ -5,42 +5,7 @@
#include
#include "pegen.h"
-#define EXPRLIST_N_CACHED 64
-
-typedef struct {
- /* Incrementally build an array of expr_ty, so be used in an
- asdl_seq. Cache some small but reasonably sized number of
- expr_ty's, and then after that start dynamically allocating,
- doubling the number allocated each time. Note that the f-string
- f'{0}a{1}' contains 3 expr_ty's: 2 FormattedValue's, and one
- Constant for the literal 'a'. So you add expr_ty's about twice as
- fast as you add expressions in an f-string. */
-
- Py_ssize_t allocated; /* Number we've allocated. */
- Py_ssize_t size; /* Number we've used. */
- expr_ty *p; /* Pointer to the memory we're actually
- using. Will point to 'data' until we
- start dynamically allocating. */
- expr_ty data[EXPRLIST_N_CACHED];
-} ExprList;
-
-/* The FstringParser is designed to add a mix of strings and
- f-strings, and concat them together as needed. Ultimately, it
- generates an expr_ty. */
-typedef struct {
- PyObject *last_str;
- ExprList expr_list;
- int fmode;
-} FstringParser;
-
-void _PyPegen_FstringParser_Init(FstringParser *);
-int _PyPegen_parsestr(Parser *, int *, int *, PyObject **,
- const char **, Py_ssize_t *, Token *);
-int _PyPegen_FstringParser_ConcatFstring(Parser *, FstringParser *, const char **,
- const char *, int, int, Token *, Token *,
- Token *);
-int _PyPegen_FstringParser_ConcatAndDel(FstringParser *, PyObject *);
-expr_ty _PyPegen_FstringParser_Finish(Parser *, FstringParser *, Token *, Token *);
-void _PyPegen_FstringParser_Dealloc(FstringParser *);
+PyObject *_PyPegen_parse_string(Parser *, Token *);
+PyObject *_PyPegen_decode_string(Parser *, int, const char *, size_t, Token *);
#endif
diff --git a/Parser/token.c b/Parser/token.c
index 6299ad2f563144..82267fbfcd0c54 100644
--- a/Parser/token.c
+++ b/Parser/token.c
@@ -60,12 +60,16 @@ const char * const _PyParser_TokenNames[] = {
"RARROW",
"ELLIPSIS",
"COLONEQUAL",
+ "EXCLAMATION",
"OP",
"AWAIT",
"ASYNC",
"TYPE_IGNORE",
"TYPE_COMMENT",
"SOFT_KEYWORD",
+ "FSTRING_START",
+ "FSTRING_MIDDLE",
+ "FSTRING_END",
"",
"",
"",
@@ -79,6 +83,7 @@ int
_PyToken_OneChar(int c1)
{
switch (c1) {
+ case '!': return EXCLAMATION;
case '%': return PERCENT;
case '&': return AMPER;
case '(': return LPAR;
diff --git a/Parser/tokenizer.c b/Parser/tokenizer.c
index 463c0e00ca1411..5244ab7d4f7e02 100644
--- a/Parser/tokenizer.c
+++ b/Parser/tokenizer.c
@@ -11,11 +11,6 @@
#include "tokenizer.h"
#include "errcode.h"
-#include "unicodeobject.h"
-#include "bytesobject.h"
-#include "fileobject.h"
-#include "abstract.h"
-
/* Alternate tab spacing */
#define ALTTABSIZE 1
@@ -43,6 +38,24 @@
tok->lineno++; \
tok->col_offset = 0;
+#define INSIDE_FSTRING(tok) (tok->tok_mode_stack_index > 0)
+#define INSIDE_FSTRING_EXPR(tok) (tok->curly_bracket_expr_start_depth >= 0)
+#ifdef Py_DEBUG
+static inline tokenizer_mode* TOK_GET_MODE(struct tok_state* tok) {
+ assert(tok->tok_mode_stack_index >= 0);
+ assert(tok->tok_mode_stack_index < MAXLEVEL);
+ return &(tok->tok_mode_stack[tok->tok_mode_stack_index]);
+}
+static inline tokenizer_mode* TOK_NEXT_MODE(struct tok_state* tok) {
+ assert(tok->tok_mode_stack_index >= 0);
+ assert(tok->tok_mode_stack_index < MAXLEVEL);
+ return &(tok->tok_mode_stack[++tok->tok_mode_stack_index]);
+}
+#else
+#define TOK_GET_MODE(tok) (&(tok->tok_mode_stack[tok->tok_mode_stack_index]))
+#define TOK_NEXT_MODE(tok) (&(tok->tok_mode_stack[++tok->tok_mode_stack_index]))
+#endif
+
/* Forward */
static struct tok_state *tok_new(void);
static int tok_nextc(struct tok_state *tok);
@@ -98,6 +111,9 @@ tok_new(void)
tok->interactive_underflow = IUNDERFLOW_NORMAL;
tok->str = NULL;
tok->report_warnings = 1;
+ tok->tok_mode_stack[0] = (tokenizer_mode){.kind =TOK_REGULAR_MODE, .f_string_quote='\0', .f_string_quote_size = 0};
+ tok->tok_mode_stack_index = 0;
+ tok->tok_report_warnings = 1;
#ifdef Py_DEBUG
tok->debug = _Py_GetConfig()->parser_debug;
#endif
@@ -345,6 +361,108 @@ tok_concatenate_interactive_new_line(struct tok_state *tok, const char *line) {
return 0;
}
+/* Traverse and remember all f-string buffers, in order to be able to restore
+ them after reallocating tok->buf */
+static void
+remember_fstring_buffers(struct tok_state *tok)
+{
+ int index;
+ tokenizer_mode *mode;
+
+ for (index = tok->tok_mode_stack_index; index >= 0; --index) {
+ mode = &(tok->tok_mode_stack[index]);
+ if (mode->kind == TOK_FSTRING_MODE) {
+ mode->f_string_start_offset = mode->f_string_start - tok->buf;
+ mode->f_string_multi_line_start_offset = mode->f_string_multi_line_start - tok->buf;
+ }
+ }
+}
+
+/* Traverse and restore all f-string buffers after reallocating tok->buf */
+static void
+restore_fstring_buffers(struct tok_state *tok)
+{
+ int index;
+ tokenizer_mode *mode;
+
+ for (index = tok->tok_mode_stack_index; index >= 0; --index) {
+ mode = &(tok->tok_mode_stack[index]);
+ if (mode->kind == TOK_FSTRING_MODE) {
+ mode->f_string_start = tok->buf + mode->f_string_start_offset;
+ mode->f_string_multi_line_start = tok->buf + mode->f_string_multi_line_start_offset;
+ }
+ }
+}
+
+static int
+update_fstring_expr(struct tok_state *tok, char cur)
+{
+ assert(tok->cur != NULL);
+
+ Py_ssize_t size = strlen(tok->cur);
+ tokenizer_mode *tok_mode = TOK_GET_MODE(tok);
+
+ switch (cur) {
+ case 0:
+ if (!tok_mode->last_expr_buffer || tok_mode->last_expr_end >= 0) {
+ return 1;
+ }
+ char *new_buffer = PyMem_Realloc(
+ tok_mode->last_expr_buffer,
+ tok_mode->last_expr_size + size
+ );
+ if (new_buffer == NULL) {
+ PyMem_Free(tok_mode->last_expr_buffer);
+ goto error;
+ }
+ tok_mode->last_expr_buffer = new_buffer;
+ strncpy(tok_mode->last_expr_buffer + tok_mode->last_expr_size, tok->cur, size);
+ tok_mode->last_expr_size += size;
+ break;
+ case '{':
+ if (tok_mode->last_expr_buffer != NULL) {
+ PyMem_Free(tok_mode->last_expr_buffer);
+ }
+ tok_mode->last_expr_buffer = PyMem_Malloc(size);
+ if (tok_mode->last_expr_buffer == NULL) {
+ goto error;
+ }
+ tok_mode->last_expr_size = size;
+ tok_mode->last_expr_end = -1;
+ strncpy(tok_mode->last_expr_buffer, tok->cur, size);
+ break;
+ case '}':
+ case '!':
+ case ':':
+ if (tok_mode->last_expr_end == -1) {
+ tok_mode->last_expr_end = strlen(tok->start);
+ }
+ break;
+ default:
+ Py_UNREACHABLE();
+ }
+ return 1;
+error:
+ tok->done = E_NOMEM;
+ return 0;
+}
+
+static void
+free_fstring_expressions(struct tok_state *tok)
+{
+ int index;
+ tokenizer_mode *mode;
+
+ for (index = tok->tok_mode_stack_index; index >= 0; --index) {
+ mode = &(tok->tok_mode_stack[index]);
+ if (mode->last_expr_buffer != NULL) {
+ PyMem_Free(mode->last_expr_buffer);
+ mode->last_expr_buffer = NULL;
+ mode->last_expr_size = 0;
+ mode->last_expr_end = -1;
+ }
+ }
+}
/* Read a line of text from TOK into S, using the stream in TOK.
Return NULL on failure, else S.
@@ -372,6 +490,7 @@ tok_reserve_buf(struct tok_state *tok, Py_ssize_t size)
Py_ssize_t start = tok->start == NULL ? -1 : tok->start - tok->buf;
Py_ssize_t line_start = tok->start == NULL ? -1 : tok->line_start - tok->buf;
Py_ssize_t multi_line_start = tok->multi_line_start - tok->buf;
+ remember_fstring_buffers(tok);
newbuf = (char *)PyMem_Realloc(newbuf, newsize);
if (newbuf == NULL) {
tok->done = E_NOMEM;
@@ -384,6 +503,7 @@ tok_reserve_buf(struct tok_state *tok, Py_ssize_t size)
tok->start = start < 0 ? NULL : tok->buf + start;
tok->line_start = line_start < 0 ? NULL : tok->buf + line_start;
tok->multi_line_start = multi_line_start < 0 ? NULL : tok->buf + multi_line_start;
+ restore_fstring_buffers(tok);
}
return 1;
}
@@ -838,6 +958,7 @@ _PyTokenizer_Free(struct tok_state *tok)
if (tok->interactive_src_start != NULL) {
PyMem_Free(tok->interactive_src_start);
}
+ free_fstring_expressions(tok);
PyMem_Free(tok);
}
@@ -854,6 +975,9 @@ tok_readline_raw(struct tok_state *tok)
if (line == NULL) {
return 1;
}
+ if (tok->tok_mode_stack_index && !update_fstring_expr(tok, 0)) {
+ return 0;
+ }
if (tok->fp_interactive &&
tok_concatenate_interactive_new_line(tok, line) == -1) {
return 0;
@@ -941,6 +1065,7 @@ tok_underflow_interactive(struct tok_state *tok) {
}
else if (tok->start != NULL) {
Py_ssize_t cur_multi_line_start = tok->multi_line_start - tok->buf;
+ remember_fstring_buffers(tok);
size_t size = strlen(newtok);
ADVANCE_LINENO();
if (!tok_reserve_buf(tok, size + 1)) {
@@ -953,6 +1078,7 @@ tok_underflow_interactive(struct tok_state *tok) {
PyMem_Free(newtok);
tok->inp += size;
tok->multi_line_start = tok->buf + cur_multi_line_start;
+ restore_fstring_buffers(tok);
}
else {
ADVANCE_LINENO();
@@ -969,6 +1095,10 @@ tok_underflow_interactive(struct tok_state *tok) {
}
return 0;
}
+
+ if (tok->tok_mode_stack_index && !update_fstring_expr(tok, 0)) {
+ return 0;
+ }
return 1;
}
@@ -1073,7 +1203,7 @@ tok_nextc(struct tok_state *tok)
return Py_CHARMASK(*tok->cur++); /* Fast path */
}
if (tok->done != E_OK) {
- return EOF;
+ return EOF;
}
if (tok->fp == NULL) {
rc = tok_underflow_string(tok);
@@ -1115,7 +1245,7 @@ tok_backup(struct tok_state *tok, int c)
if (--tok->cur < tok->buf) {
Py_FatalError("tokenizer beginning of buffer");
}
- if ((int)(unsigned char)*tok->cur != c) {
+ if ((int)(unsigned char)*tok->cur != Py_CHARMASK(c)) {
Py_FatalError("tok_backup: wrong character");
}
tok->col_offset--;
@@ -1172,6 +1302,7 @@ _syntaxerror_range(struct tok_state *tok, const char *format,
static int
syntaxerror(struct tok_state *tok, const char *format, ...)
{
+ // This errors are cleaned on startup. Todo: Fix it.
va_list vargs;
va_start(vargs, format);
int ret = _syntaxerror_range(tok, format, -1, -1, vargs);
@@ -1234,6 +1365,41 @@ parser_warn(struct tok_state *tok, PyObject *category, const char *format, ...)
return -1;
}
+static int
+warn_invalid_escape_sequence(struct tok_state *tok, int first_invalid_escape_char)
+{
+
+ if (!tok->tok_report_warnings) {
+ return 0;
+ }
+
+ PyObject *msg = PyUnicode_FromFormat(
+ "invalid escape sequence '\\%c'",
+ (char) first_invalid_escape_char
+ );
+
+ if (msg == NULL) {
+ return -1;
+ }
+
+ if (PyErr_WarnExplicitObject(PyExc_DeprecationWarning, msg, tok->filename,
+ tok->lineno, NULL, NULL) < 0) {
+ Py_DECREF(msg);
+
+ if (PyErr_ExceptionMatches(PyExc_DeprecationWarning)) {
+ /* Replace the DeprecationWarning exception with a SyntaxError
+ to get a more accurate error report */
+ PyErr_Clear();
+ return syntaxerror(tok, "invalid escape sequence '\\%c'", (char) first_invalid_escape_char);
+ }
+
+ return -1;
+ }
+
+ Py_DECREF(msg);
+ return 0;
+}
+
static int
lookahead(struct tok_state *tok, const char *test)
{
@@ -1389,7 +1555,6 @@ tok_decimal_tail(struct tok_state *tok)
return c;
}
-/* Get next token, after space stripping etc. */
static inline int
tok_continuation_line(struct tok_state *tok) {
@@ -1427,7 +1592,12 @@ token_setup(struct tok_state *tok, struct token *token, int type, const char *st
{
assert((start == NULL && end == NULL) || (start != NULL && end != NULL));
token->level = tok->level;
- token->lineno = type == STRING ? tok->first_lineno : tok->lineno;
+ if (ISSTRINGLIT(type)) {
+ token->lineno = tok->first_lineno;
+ }
+ else {
+ token->lineno = tok->lineno;
+ }
token->end_lineno = tok->lineno;
token->col_offset = token->end_col_offset = -1;
token->start = start;
@@ -1441,7 +1611,7 @@ token_setup(struct tok_state *tok, struct token *token, int type, const char *st
}
static int
-tok_get(struct tok_state *tok, struct token *token)
+tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct token *token)
{
int c;
int blankline, nonascii;
@@ -1602,6 +1772,11 @@ tok_get(struct tok_state *tok, struct token *token)
/* Skip comment, unless it's a type comment */
if (c == '#') {
+
+ if (INSIDE_FSTRING(tok)) {
+ return MAKE_TOKEN(syntaxerror(tok, "f-string expression part cannot include '#'"));
+ }
+
const char *prefix, *p, *type_start;
int current_starting_col_offset;
@@ -1703,6 +1878,9 @@ tok_get(struct tok_state *tok, struct token *token)
}
c = tok_nextc(tok);
if (c == '"' || c == '\'') {
+ if (saw_f) {
+ goto f_string_quote;
+ }
goto letter_quote;
}
}
@@ -1748,7 +1926,9 @@ tok_get(struct tok_state *tok, struct token *token)
int ahead_tok_kind;
memcpy(&ahead_tok, tok, sizeof(ahead_tok));
- ahead_tok_kind = tok_get(&ahead_tok, &ahead_token);
+ ahead_tok_kind = tok_get_normal_mode(&ahead_tok,
+ current_tok,
+ &ahead_token);
if (ahead_tok_kind == NAME
&& ahead_tok.cur - ahead_tok.start == 3
@@ -2003,6 +2183,68 @@ tok_get(struct tok_state *tok, struct token *token)
return MAKE_TOKEN(NUMBER);
}
+ f_string_quote:
+ if (((tolower(*tok->start) == 'f' || tolower(*tok->start) == 'r') && (c == '\'' || c == '"'))) {
+ int quote = c;
+ int quote_size = 1; /* 1 or 3 */
+
+ /* Nodes of type STRING, especially multi line strings
+ must be handled differently in order to get both
+ the starting line number and the column offset right.
+ (cf. issue 16806) */
+ tok->first_lineno = tok->lineno;
+ tok->multi_line_start = tok->line_start;
+
+ /* Find the quote size and start of string */
+ int after_quote = tok_nextc(tok);
+ if (after_quote == quote) {
+ int after_after_quote = tok_nextc(tok);
+ if (after_after_quote == quote) {
+ quote_size = 3;
+ }
+ else {
+ // TODO: Check this
+ tok_backup(tok, after_after_quote);
+ tok_backup(tok, after_quote);
+ }
+ }
+ if (after_quote != quote) {
+ tok_backup(tok, after_quote);
+ }
+
+
+ p_start = tok->start;
+ p_end = tok->cur;
+ tokenizer_mode *the_current_tok = TOK_NEXT_MODE(tok);
+ the_current_tok->kind = TOK_FSTRING_MODE;
+ the_current_tok->f_string_quote = quote;
+ the_current_tok->f_string_quote_size = quote_size;
+ the_current_tok->f_string_start = tok->start;
+ the_current_tok->f_string_multi_line_start = tok->line_start;
+ the_current_tok->f_string_start_offset = -1;
+ the_current_tok->f_string_multi_line_start_offset = -1;
+ the_current_tok->last_expr_buffer = NULL;
+ the_current_tok->last_expr_size = 0;
+ the_current_tok->last_expr_end = -1;
+
+ switch (*tok->start) {
+ case 'F':
+ case 'f':
+ the_current_tok->f_string_raw = tolower(*(tok->start + 1)) == 'r';
+ break;
+ case 'R':
+ case 'r':
+ the_current_tok->f_string_raw = 1;
+ break;
+ default:
+ Py_UNREACHABLE();
+ }
+
+ the_current_tok->curly_bracket_depth = 0;
+ the_current_tok->curly_bracket_expr_start_depth = -1;
+ return MAKE_TOKEN(FSTRING_START);
+ }
+
letter_quote:
/* String */
if (c == '\'' || c == '"') {
@@ -2047,6 +2289,20 @@ tok_get(struct tok_state *tok, struct token *token)
tok->line_start = tok->multi_line_start;
int start = tok->lineno;
tok->lineno = tok->first_lineno;
+
+ if (INSIDE_FSTRING(tok)) {
+ /* When we are in an f-string, before raising the
+ * unterminated string literal error, check whether
+ * does the initial quote matches with f-strings quotes
+ * and if it is, then this must be a missing '}' token
+ * so raise the proper error */
+ tokenizer_mode *the_current_tok = TOK_GET_MODE(tok);
+ if (the_current_tok->f_string_quote == quote &&
+ the_current_tok->f_string_quote_size == quote_size) {
+ return MAKE_TOKEN(syntaxerror(tok, "f-string: expecting '}'", start));
+ }
+ }
+
if (quote_size == 3) {
syntaxerror(tok, "unterminated triple-quoted string literal"
" (detected at line %d)", start);
@@ -2089,6 +2345,26 @@ tok_get(struct tok_state *tok, struct token *token)
goto again; /* Read next line */
}
+ /* Punctuation character */
+ int is_punctuation = (c == ':' || c == '}' || c == '!' || c == '{');
+ if (is_punctuation && INSIDE_FSTRING(tok) && INSIDE_FSTRING_EXPR(current_tok)) {
+ /* This code block gets executed before the curly_bracket_depth is incremented
+ * by the `{` case, so for ensuring that we are on the 0th level, we need
+ * to adjust it manually */
+ int cursor = current_tok->curly_bracket_depth - (c != '{');
+
+ if (cursor == 0 && !update_fstring_expr(tok, c)) {
+ return MAKE_TOKEN(ENDMARKER);
+ }
+
+ if (c == ':' && cursor == current_tok->curly_bracket_expr_start_depth) {
+ current_tok->kind = TOK_FSTRING_MODE;
+ p_start = tok->start;
+ p_end = tok->cur;
+ return MAKE_TOKEN(_PyToken_OneChar(c));
+ }
+ }
+
/* Check for two-character token */
{
int c2 = tok_nextc(tok);
@@ -2121,11 +2397,17 @@ tok_get(struct tok_state *tok, struct token *token)
tok->parenlinenostack[tok->level] = tok->lineno;
tok->parencolstack[tok->level] = (int)(tok->start - tok->line_start);
tok->level++;
+ if (INSIDE_FSTRING(tok)) {
+ current_tok->curly_bracket_depth++;
+ }
break;
case ')':
case ']':
case '}':
if (!tok->level) {
+ if (INSIDE_FSTRING(tok) && !current_tok->curly_bracket_depth && c == '}') {
+ return MAKE_TOKEN(syntaxerror(tok, "f-string: single '}' is not allowed"));
+ }
return MAKE_TOKEN(syntaxerror(tok, "unmatched '%c'", c));
}
tok->level--;
@@ -2134,6 +2416,18 @@ tok_get(struct tok_state *tok, struct token *token)
(opening == '[' && c == ']') ||
(opening == '{' && c == '}')))
{
+ /* If the opening bracket belongs to an f-string's expression
+ part (e.g. f"{)}") and the closing bracket is an arbitrary
+ nested expression, then instead of matching a different
+ syntactical construct with it; we'll throw an unmatched
+ parentheses error. */
+ if (INSIDE_FSTRING(tok) && opening == '{') {
+ assert(current_tok->curly_bracket_depth >= 0);
+ int previous_bracket = current_tok->curly_bracket_depth - 1;
+ if (previous_bracket == current_tok->curly_bracket_expr_start_depth) {
+ return MAKE_TOKEN(syntaxerror(tok, "f-string: unmatched '%c'", c));
+ }
+ }
if (tok->parenlinenostack[tok->level] != tok->lineno) {
return MAKE_TOKEN(syntaxerror(tok,
"closing parenthesis '%c' does not match "
@@ -2147,6 +2441,16 @@ tok_get(struct tok_state *tok, struct token *token)
c, opening));
}
}
+
+ if (INSIDE_FSTRING(tok)) {
+ current_tok->curly_bracket_depth--;
+ if (c == '}' && current_tok->curly_bracket_depth == current_tok->curly_bracket_expr_start_depth) {
+ current_tok->curly_bracket_expr_start_depth--;
+ current_tok->kind = TOK_FSTRING_MODE;
+ }
+ }
+ break;
+ default:
break;
}
@@ -2162,6 +2466,191 @@ tok_get(struct tok_state *tok, struct token *token)
return MAKE_TOKEN(_PyToken_OneChar(c));
}
+static int
+tok_get_fstring_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct token *token)
+{
+ const char *p_start = NULL;
+ const char *p_end = NULL;
+ int end_quote_size = 0;
+ int unicode_escape = 0;
+
+ tok->start = tok->cur;
+ tok->first_lineno = tok->lineno;
+ tok->starting_col_offset = tok->col_offset;
+
+ // If we start with a bracket, we defer to the normal mode as there is nothing for us to tokenize
+ // before it.
+ int start_char = tok_nextc(tok);
+ if (start_char == '{') {
+ int peek1 = tok_nextc(tok);
+ tok_backup(tok, peek1);
+ tok_backup(tok, start_char);
+ if (peek1 != '{') {
+ current_tok->curly_bracket_expr_start_depth++;
+ if (current_tok->curly_bracket_expr_start_depth >= MAX_EXPR_NESTING) {
+ return MAKE_TOKEN(syntaxerror(tok, "f-string: expressions nested too deeply"));
+ }
+ TOK_GET_MODE(tok)->kind = TOK_REGULAR_MODE;
+ return tok_get_normal_mode(tok, current_tok, token);
+ }
+ }
+ else {
+ tok_backup(tok, start_char);
+ }
+
+ // Check if we are at the end of the string
+ for (int i = 0; i < current_tok->f_string_quote_size; i++) {
+ int quote = tok_nextc(tok);
+ if (quote != current_tok->f_string_quote) {
+ tok_backup(tok, quote);
+ goto f_string_middle;
+ }
+ }
+
+ if (current_tok->last_expr_buffer != NULL) {
+ PyMem_Free(current_tok->last_expr_buffer);
+ current_tok->last_expr_buffer = NULL;
+ current_tok->last_expr_size = 0;
+ current_tok->last_expr_end = -1;
+ }
+
+ p_start = tok->start;
+ p_end = tok->cur;
+ tok->tok_mode_stack_index--;
+ return MAKE_TOKEN(FSTRING_END);
+
+f_string_middle:
+
+ while (end_quote_size != current_tok->f_string_quote_size) {
+ int c = tok_nextc(tok);
+ if (c == EOF || (current_tok->f_string_quote_size == 1 && c == '\n')) {
+ assert(tok->multi_line_start != NULL);
+ // shift the tok_state's location into
+ // the start of string, and report the error
+ // from the initial quote character
+ tok->cur = (char *)current_tok->f_string_start;
+ tok->cur++;
+ tok->line_start = current_tok->f_string_multi_line_start;
+ int start = tok->lineno;
+ tok->lineno = tok->first_lineno;
+
+ if (current_tok->f_string_quote_size == 3) {
+ return MAKE_TOKEN(syntaxerror(tok,
+ "unterminated triple-quoted f-string literal"
+ " (detected at line %d)", start));
+ }
+ else {
+ return MAKE_TOKEN(syntaxerror(tok,
+ "unterminated f-string literal (detected at"
+ " line %d)", start));
+ }
+ }
+
+ if (c == current_tok->f_string_quote) {
+ end_quote_size += 1;
+ continue;
+ } else {
+ end_quote_size = 0;
+ }
+
+ int in_format_spec = (
+ current_tok->last_expr_end != -1
+ &&
+ INSIDE_FSTRING_EXPR(current_tok)
+ );
+ if (c == '{') {
+ int peek = tok_nextc(tok);
+ if (peek != '{' || in_format_spec) {
+ tok_backup(tok, peek);
+ tok_backup(tok, c);
+ current_tok->curly_bracket_expr_start_depth++;
+ if (current_tok->curly_bracket_expr_start_depth >= MAX_EXPR_NESTING) {
+ return MAKE_TOKEN(syntaxerror(tok, "f-string: expressions nested too deeply"));
+ }
+ TOK_GET_MODE(tok)->kind = TOK_REGULAR_MODE;
+ p_start = tok->start;
+ p_end = tok->cur;
+ } else {
+ p_start = tok->start;
+ p_end = tok->cur - 1;
+ }
+ return MAKE_TOKEN(FSTRING_MIDDLE);
+ } else if (c == '}') {
+ if (unicode_escape) {
+ p_start = tok->start;
+ p_end = tok->cur;
+ return MAKE_TOKEN(FSTRING_MIDDLE);
+ }
+ int peek = tok_nextc(tok);
+
+ // The tokenizer can only be in the format spec if we have already completed the expression
+ // scanning (indicated by the end of the expression being set) and we are not at the top level
+ // of the bracket stack (-1 is the top level). Since format specifiers can't legally use double
+ // brackets, we can bypass it here.
+ if (peek == '}' && !in_format_spec) {
+ p_start = tok->start;
+ p_end = tok->cur - 1;
+ } else {
+ tok_backup(tok, peek);
+ tok_backup(tok, c);
+ TOK_GET_MODE(tok)->kind = TOK_REGULAR_MODE;
+ p_start = tok->start;
+ p_end = tok->cur;
+ }
+ return MAKE_TOKEN(FSTRING_MIDDLE);
+ } else if (c == '\\') {
+ int peek = tok_nextc(tok);
+ // Special case when the backslash is right before a curly
+ // brace. We have to restore and return the control back
+ // to the loop for the next iteration.
+ if (peek == '{' || peek == '}') {
+ if (!current_tok->f_string_raw) {
+ if (warn_invalid_escape_sequence(tok, peek)) {
+ return MAKE_TOKEN(ERRORTOKEN);
+ }
+ }
+ tok_backup(tok, peek);
+ continue;
+ }
+
+ if (!current_tok->f_string_raw) {
+ if (peek == 'N') {
+ /* Handle named unicode escapes (\N{BULLET}) */
+ peek = tok_nextc(tok);
+ if (peek == '{') {
+ unicode_escape = 1;
+ } else {
+ tok_backup(tok, peek);
+ }
+ }
+ } /* else {
+ skip the escaped character
+ }*/
+ }
+ }
+
+ // Backup the f-string quotes to emit a final FSTRING_MIDDLE and
+ // add the quotes to the FSTRING_END in the next tokenizer iteration.
+ for (int i = 0; i < current_tok->f_string_quote_size; i++) {
+ tok_backup(tok, current_tok->f_string_quote);
+ }
+ p_start = tok->start;
+ p_end = tok->cur;
+ return MAKE_TOKEN(FSTRING_MIDDLE);
+}
+
+
+static int
+tok_get(struct tok_state *tok, struct token *token)
+{
+ tokenizer_mode *current_tok = TOK_GET_MODE(tok);
+ if (current_tok->kind == TOK_REGULAR_MODE) {
+ return tok_get_normal_mode(tok, current_tok, token);
+ } else {
+ return tok_get_fstring_mode(tok, current_tok, token);
+ }
+}
+
int
_PyTokenizer_Get(struct tok_state *tok, struct token *token)
{
diff --git a/Parser/tokenizer.h b/Parser/tokenizer.h
index 16a94d5f51d664..2b94aecce626c3 100644
--- a/Parser/tokenizer.h
+++ b/Parser/tokenizer.h
@@ -33,6 +33,33 @@ struct token {
const char *start, *end;
};
+enum tokenizer_mode_kind_t {
+ TOK_REGULAR_MODE,
+ TOK_FSTRING_MODE,
+};
+
+#define MAX_EXPR_NESTING 3
+
+typedef struct _tokenizer_mode {
+ enum tokenizer_mode_kind_t kind;
+
+ int curly_bracket_depth;
+ int curly_bracket_expr_start_depth;
+
+ char f_string_quote;
+ int f_string_quote_size;
+ int f_string_raw;
+ const char* f_string_start;
+ const char* f_string_multi_line_start;
+
+ Py_ssize_t f_string_start_offset;
+ Py_ssize_t f_string_multi_line_start_offset;
+
+ Py_ssize_t last_expr_size;
+ Py_ssize_t last_expr_end;
+ char* last_expr_buffer;
+} tokenizer_mode;
+
/* Tokenizer state */
struct tok_state {
/* Input state; buf <= cur <= inp <= end */
@@ -93,6 +120,10 @@ struct tok_state {
/* How to proceed when asked for a new token in interactive mode */
enum interactive_underflow_t interactive_underflow;
int report_warnings;
+ // TODO: Factor this into its own thing
+ tokenizer_mode tok_mode_stack[MAXLEVEL];
+ int tok_mode_stack_index;
+ int tok_report_warnings;
#ifdef Py_DEBUG
int debug;
#endif
diff --git a/Programs/_testembed.c b/Programs/_testembed.c
index 00717114b40286..f78ba41fe7b4eb 100644
--- a/Programs/_testembed.c
+++ b/Programs/_testembed.c
@@ -1911,14 +1911,13 @@ static int test_unicode_id_init(void)
str1 = _PyUnicode_FromId(&PyId_test_unicode_id_init);
assert(str1 != NULL);
- assert(Py_REFCNT(str1) == 1);
+ assert(_Py_IsImmortal(str1));
str2 = PyUnicode_FromString("test_unicode_id_init");
assert(str2 != NULL);
assert(PyUnicode_Compare(str1, str2) == 0);
- // str1 is a borrowed reference
Py_DECREF(str2);
Py_Finalize();
diff --git a/Programs/test_frozenmain.h b/Programs/test_frozenmain.h
index 4ac472a88261e1..cd9d1032629f49 100644
--- a/Programs/test_frozenmain.h
+++ b/Programs/test_frozenmain.h
@@ -27,12 +27,12 @@ unsigned char M_test_frozenmain[] = {
218,3,107,101,121,169,0,243,0,0,0,0,250,18,116,101,
115,116,95,102,114,111,122,101,110,109,97,105,110,46,112,121,
250,8,60,109,111,100,117,108,101,62,114,18,0,0,0,1,
- 0,0,0,115,100,0,0,0,240,3,1,1,1,243,8,0,
+ 0,0,0,115,102,0,0,0,240,3,1,1,1,243,8,0,
1,11,219,0,24,225,0,5,208,6,26,212,0,27,217,0,
5,128,106,144,35,151,40,145,40,212,0,27,216,9,38,208,
9,26,215,9,38,209,9,38,211,9,40,168,24,209,9,50,
128,6,240,2,6,12,2,242,0,7,1,42,128,67,241,14,
- 0,5,10,208,10,40,144,67,209,10,40,152,54,160,35,153,
- 59,209,10,40,213,4,41,241,15,7,1,42,114,16,0,0,
- 0,
+ 0,5,10,136,71,144,67,144,53,152,2,152,54,160,35,153,
+ 59,152,45,208,10,40,213,4,41,241,15,7,1,42,114,16,
+ 0,0,0,
};
diff --git a/Python/Python-tokenize.c b/Python/Python-tokenize.c
index 8daa9877254e2e..416dc5971bca3d 100644
--- a/Python/Python-tokenize.c
+++ b/Python/Python-tokenize.c
@@ -86,8 +86,8 @@ tokenizeriter_next(tokenizeriterobject *it)
Py_DECREF(str);
return NULL;
}
- const char *line_start = type == STRING ? it->tok->multi_line_start : it->tok->line_start;
- int lineno = type == STRING ? it->tok->first_lineno : it->tok->lineno;
+ const char *line_start = ISSTRINGLIT(type) ? it->tok->multi_line_start : it->tok->line_start;
+ int lineno = ISSTRINGLIT(type) ? it->tok->first_lineno : it->tok->lineno;
int end_lineno = it->tok->lineno;
int col_offset = -1;
int end_col_offset = -1;
diff --git a/Python/bytecodes.c b/Python/bytecodes.c
index c267d5bc031496..d48a1711963058 100644
--- a/Python/bytecodes.c
+++ b/Python/bytecodes.c
@@ -1573,16 +1573,16 @@ dummy_func(
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
#endif /* ENABLE_SPECIALIZATION */
if (global_super == (PyObject *)&PySuper_Type && PyType_Check(class)) {
- int meth_found = 0;
+ int method = 0;
Py_DECREF(global_super);
- res = _PySuper_Lookup((PyTypeObject *)class, self, name, load_method ? &meth_found : NULL);
+ res = _PySuper_Lookup((PyTypeObject *)class, self, name, load_method ? &method : NULL);
Py_DECREF(class);
if (res == NULL) {
Py_DECREF(self);
ERROR_IF(true, error);
}
// Works with CALL, pushes two values: either `meth | self` or `NULL | meth`.
- if (meth_found) {
+ if (method) {
res2 = res;
res = self; // transfer ownership
} else {
@@ -1590,13 +1590,8 @@ dummy_func(
Py_DECREF(self);
}
} else {
- PyObject *super;
- if (oparg & 2) {
- super = PyObject_CallNoArgs(global_super);
- } else {
- PyObject *stack[] = {class, self};
- super = PyObject_Vectorcall(global_super, stack, 2, NULL);
- }
+ PyObject *stack[] = {class, self};
+ PyObject *super = PyObject_Vectorcall(global_super, stack, oparg & 2, NULL);
DECREF_INPUTS();
ERROR_IF(super == NULL, error);
res = PyObject_GetAttr(super, name);
diff --git a/Python/ceval.c b/Python/ceval.c
index c6dce354956462..5d5221b2e40990 100644
--- a/Python/ceval.c
+++ b/Python/ceval.c
@@ -54,8 +54,11 @@
#undef Py_DECREF
#define Py_DECREF(arg) \
do { \
- _Py_DECREF_STAT_INC(); \
PyObject *op = _PyObject_CAST(arg); \
+ if (_Py_IsImmortal(op)) { \
+ break; \
+ } \
+ _Py_DECREF_STAT_INC(); \
if (--op->ob_refcnt == 0) { \
destructor dealloc = Py_TYPE(op)->tp_dealloc; \
(*dealloc)(op); \
@@ -78,8 +81,11 @@
#undef _Py_DECREF_SPECIALIZED
#define _Py_DECREF_SPECIALIZED(arg, dealloc) \
do { \
- _Py_DECREF_STAT_INC(); \
PyObject *op = _PyObject_CAST(arg); \
+ if (_Py_IsImmortal(op)) { \
+ break; \
+ } \
+ _Py_DECREF_STAT_INC(); \
if (--op->ob_refcnt == 0) { \
destructor d = (destructor)(dealloc); \
d(op); \
@@ -417,7 +423,7 @@ match_class(PyThreadState *tstate, PyObject *subject, PyObject *type,
Py_ssize_t nargs, PyObject *kwargs)
{
if (!PyType_Check(type)) {
- const char *e = "called match pattern must be a type";
+ const char *e = "called match pattern must be a class";
_PyErr_Format(tstate, PyExc_TypeError, e);
return NULL;
}
diff --git a/Python/clinic/sysmodule.c.h b/Python/clinic/sysmodule.c.h
index 46252dd404325b..7a7c188bcccc37 100644
--- a/Python/clinic/sysmodule.c.h
+++ b/Python/clinic/sysmodule.c.h
@@ -912,6 +912,34 @@ sys_getallocatedblocks(PyObject *module, PyObject *Py_UNUSED(ignored))
return return_value;
}
+PyDoc_STRVAR(sys_getunicodeinternedsize__doc__,
+"getunicodeinternedsize($module, /)\n"
+"--\n"
+"\n"
+"Return the number of elements of the unicode interned dictionary");
+
+#define SYS_GETUNICODEINTERNEDSIZE_METHODDEF \
+ {"getunicodeinternedsize", (PyCFunction)sys_getunicodeinternedsize, METH_NOARGS, sys_getunicodeinternedsize__doc__},
+
+static Py_ssize_t
+sys_getunicodeinternedsize_impl(PyObject *module);
+
+static PyObject *
+sys_getunicodeinternedsize(PyObject *module, PyObject *Py_UNUSED(ignored))
+{
+ PyObject *return_value = NULL;
+ Py_ssize_t _return_value;
+
+ _return_value = sys_getunicodeinternedsize_impl(module);
+ if ((_return_value == -1) && PyErr_Occurred()) {
+ goto exit;
+ }
+ return_value = PyLong_FromSsize_t(_return_value);
+
+exit:
+ return return_value;
+}
+
PyDoc_STRVAR(sys__getframe__doc__,
"_getframe($module, depth=0, /)\n"
"--\n"
@@ -1387,4 +1415,4 @@ sys__getframemodulename(PyObject *module, PyObject *const *args, Py_ssize_t narg
#ifndef SYS_GETANDROIDAPILEVEL_METHODDEF
#define SYS_GETANDROIDAPILEVEL_METHODDEF
#endif /* !defined(SYS_GETANDROIDAPILEVEL_METHODDEF) */
-/*[clinic end generated code: output=5c761f14326ced54 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=6d598acc26237fbe input=a9049054013a1b77]*/
diff --git a/Python/compile.c b/Python/compile.c
index bcef92665bcb9c..2f7072827241da 100644
--- a/Python/compile.c
+++ b/Python/compile.c
@@ -1053,21 +1053,21 @@ compiler_addop_name(struct compiler_unit *u, location loc,
}
if (opcode == LOAD_SUPER_ATTR) {
arg <<= 2;
+ arg |= 2;
}
if (opcode == LOAD_SUPER_METHOD) {
opcode = LOAD_SUPER_ATTR;
arg <<= 2;
- arg |= 1;
+ arg |= 3;
}
if (opcode == LOAD_ZERO_SUPER_ATTR) {
opcode = LOAD_SUPER_ATTR;
arg <<= 2;
- arg |= 2;
}
if (opcode == LOAD_ZERO_SUPER_METHOD) {
opcode = LOAD_SUPER_ATTR;
arg <<= 2;
- arg |= 3;
+ arg |= 1;
}
return codegen_addop_i(&u->u_instr_sequence, opcode, arg, loc);
}
@@ -3067,11 +3067,9 @@ compiler_try_except(struct compiler *c, stmt_ty s)
[orig, res, exc]
[orig, res, exc, E1] CHECK_EG_MATCH
[orig, res, rest/exc, match?] COPY 1
- [orig, res, rest/exc, match?, match?] POP_JUMP_IF_NOT_NONE H1
- [orig, res, exc, None] POP_TOP
- [orig, res, exc] JUMP L2
+ [orig, res, rest/exc, match?, match?] POP_JUMP_IF_NONE C1
- [orig, res, rest, match] H1: (or POP if no V1)
+ [orig, res, rest, match] (or POP if no V1)
[orig, res, rest] SETUP_FINALLY R1
[orig, res, rest]
@@ -3079,8 +3077,14 @@ compiler_try_except(struct compiler *c, stmt_ty s)
[orig, res, rest, i, v] R1: LIST_APPEND 3 ) exc raised in except* body - add to res
[orig, res, rest, i] POP
+ [orig, res, rest] JUMP LE2
+
+ [orig, res, rest] L2: NOP ) for lineno
+ [orig, res, rest] JUMP LE2
+
+ [orig, res, rest/exc, None] C1: POP
- [orig, res, rest] L2:
+ [orig, res, rest] LE2:
.............................etc.......................
[orig, res, rest] Ln+1: LIST_APPEND 1 ) add unhandled exc to res (could be None)
@@ -3136,7 +3140,8 @@ compiler_try_star_except(struct compiler *c, stmt_ty s)
location loc = LOC(handler);
NEW_JUMP_TARGET_LABEL(c, next_except);
except = next_except;
- NEW_JUMP_TARGET_LABEL(c, handle_match);
+ NEW_JUMP_TARGET_LABEL(c, except_with_error);
+ NEW_JUMP_TARGET_LABEL(c, no_match);
if (i == 0) {
/* create empty list for exceptions raised/reraise in the except* blocks */
/*
@@ -3154,13 +3159,9 @@ compiler_try_star_except(struct compiler *c, stmt_ty s)
VISIT(c, expr, handler->v.ExceptHandler.type);
ADDOP(c, loc, CHECK_EG_MATCH);
ADDOP_I(c, loc, COPY, 1);
- ADDOP_JUMP(c, loc, POP_JUMP_IF_NOT_NONE, handle_match);
- ADDOP(c, loc, POP_TOP); // match
- ADDOP_JUMP(c, loc, JUMP, except);
+ ADDOP_JUMP(c, loc, POP_JUMP_IF_NONE, no_match);
}
- USE_LABEL(c, handle_match);
-
NEW_JUMP_TARGET_LABEL(c, cleanup_end);
NEW_JUMP_TARGET_LABEL(c, cleanup_body);
@@ -3219,9 +3220,16 @@ compiler_try_star_except(struct compiler *c, stmt_ty s)
/* add exception raised to the res list */
ADDOP_I(c, NO_LOCATION, LIST_APPEND, 3); // exc
ADDOP(c, NO_LOCATION, POP_TOP); // lasti
- ADDOP_JUMP(c, NO_LOCATION, JUMP, except);
+ ADDOP_JUMP(c, NO_LOCATION, JUMP, except_with_error);
USE_LABEL(c, except);
+ ADDOP(c, NO_LOCATION, NOP); // to hold a propagated location info
+ ADDOP_JUMP(c, NO_LOCATION, JUMP, except_with_error);
+
+ USE_LABEL(c, no_match);
+ ADDOP(c, loc, POP_TOP); // match (None)
+
+ USE_LABEL(c, except_with_error);
if (i == n - 1) {
/* Add exc to the list (if not None it's the unhandled part of the EG) */
@@ -4237,18 +4245,20 @@ is_import_originated(struct compiler *c, expr_ty e)
}
static int
-can_optimize_super_call(struct compiler *c, expr_ty e)
+can_optimize_super_call(struct compiler *c, expr_ty attr)
{
+ expr_ty e = attr->v.Attribute.value;
if (e->kind != Call_kind ||
e->v.Call.func->kind != Name_kind ||
!_PyUnicode_EqualToASCIIString(e->v.Call.func->v.Name.id, "super") ||
+ _PyUnicode_EqualToASCIIString(attr->v.Attribute.attr, "__class__") ||
asdl_seq_LEN(e->v.Call.keywords) != 0) {
return 0;
}
Py_ssize_t num_args = asdl_seq_LEN(e->v.Call.args);
PyObject *super_name = e->v.Call.func->v.Name.id;
- // try to detect statically-visible shadowing of 'super' name
+ // detect statically-visible shadowing of 'super' name
int scope = _PyST_GetScope(c->u->u_ste, super_name);
if (scope != GLOBAL_IMPLICIT) {
return 0;
@@ -4388,7 +4398,7 @@ maybe_optimize_method_call(struct compiler *c, expr_ty e)
/* Alright, we can optimize the code. */
location loc = LOC(meth);
- if (can_optimize_super_call(c, meth->v.Attribute.value)) {
+ if (can_optimize_super_call(c, meth)) {
RETURN_IF_ERROR(load_args_for_super(c, meth->v.Attribute.value));
int opcode = asdl_seq_LEN(meth->v.Attribute.value->v.Call.args) ?
LOAD_SUPER_METHOD : LOAD_ZERO_SUPER_METHOD;
@@ -5406,7 +5416,7 @@ compiler_visit_expr1(struct compiler *c, expr_ty e)
return compiler_formatted_value(c, e);
/* The following exprs can be assignment targets. */
case Attribute_kind:
- if (e->v.Attribute.ctx == Load && can_optimize_super_call(c, e->v.Attribute.value)) {
+ if (e->v.Attribute.ctx == Load && can_optimize_super_call(c, e)) {
RETURN_IF_ERROR(load_args_for_super(c, e->v.Attribute.value));
int opcode = asdl_seq_LEN(e->v.Attribute.value->v.Call.args) ?
LOAD_SUPER_ATTR : LOAD_ZERO_SUPER_ATTR;
@@ -6808,7 +6818,10 @@ insert_prefix_instructions(struct compiler_unit *u, basicblock *entryblock,
.i_loc = NO_LOCATION,
.i_target = NULL,
};
- RETURN_IF_ERROR(_PyBasicblock_InsertInstruction(entryblock, ncellsused, &make_cell));
+ if (_PyBasicblock_InsertInstruction(entryblock, ncellsused, &make_cell) < 0) {
+ PyMem_RawFree(sorted);
+ return ERROR;
+ }
ncellsused += 1;
}
PyMem_RawFree(sorted);
@@ -6980,7 +6993,7 @@ optimize_and_assemble_code_unit(struct compiler_unit *u, PyObject *const_cache,
maxdepth, g.g_entryblock, nlocalsplus,
code_flags, filename);
- error:
+error:
Py_XDECREF(consts);
instr_sequence_fini(&optimized_instrs);
_PyCfgBuilder_Fini(&g);
@@ -7078,7 +7091,9 @@ instructions_to_instr_sequence(PyObject *instructions, instr_sequence *seq)
for (int i = 0; i < num_insts; i++) {
if (is_target[i]) {
- RETURN_IF_ERROR(instr_sequence_use_label(seq, i));
+ if (instr_sequence_use_label(seq, i) < 0) {
+ goto error;
+ }
}
PyObject *item = PyList_GET_ITEM(instructions, i);
if (!PyTuple_Check(item) || PyTuple_GET_SIZE(item) != 6) {
@@ -7116,10 +7131,14 @@ instructions_to_instr_sequence(PyObject *instructions, instr_sequence *seq)
if (PyErr_Occurred()) {
goto error;
}
- RETURN_IF_ERROR(instr_sequence_addop(seq, opcode, oparg, loc));
+ if (instr_sequence_addop(seq, opcode, oparg, loc) < 0) {
+ goto error;
+ }
}
if (seq->s_used && !IS_TERMINATOR_OPCODE(seq->s_instrs[seq->s_used-1].i_opcode)) {
- RETURN_IF_ERROR(instr_sequence_addop(seq, RETURN_VALUE, 0, NO_LOCATION));
+ if (instr_sequence_addop(seq, RETURN_VALUE, 0, NO_LOCATION) < 0) {
+ goto error;
+ }
}
PyMem_Free(is_target);
return SUCCESS;
@@ -7134,12 +7153,17 @@ instructions_to_cfg(PyObject *instructions, cfg_builder *g)
instr_sequence seq;
memset(&seq, 0, sizeof(instr_sequence));
- RETURN_IF_ERROR(
- instructions_to_instr_sequence(instructions, &seq));
-
- RETURN_IF_ERROR(instr_sequence_to_cfg(&seq, g));
+ if (instructions_to_instr_sequence(instructions, &seq) < 0) {
+ goto error;
+ }
+ if (instr_sequence_to_cfg(&seq, g) < 0) {
+ goto error;
+ }
instr_sequence_fini(&seq);
return SUCCESS;
+error:
+ instr_sequence_fini(&seq);
+ return ERROR;
}
static PyObject *
diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h
index 016a9523a1ab7e..6c636ec32b59c6 100644
--- a/Python/generated_cases.c.h
+++ b/Python/generated_cases.c.h
@@ -2189,16 +2189,16 @@
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
#endif /* ENABLE_SPECIALIZATION */
if (global_super == (PyObject *)&PySuper_Type && PyType_Check(class)) {
- int meth_found = 0;
+ int method = 0;
Py_DECREF(global_super);
- res = _PySuper_Lookup((PyTypeObject *)class, self, name, load_method ? &meth_found : NULL);
+ res = _PySuper_Lookup((PyTypeObject *)class, self, name, load_method ? &method : NULL);
Py_DECREF(class);
if (res == NULL) {
Py_DECREF(self);
if (true) goto pop_3_error;
}
// Works with CALL, pushes two values: either `meth | self` or `NULL | meth`.
- if (meth_found) {
+ if (method) {
res2 = res;
res = self; // transfer ownership
} else {
@@ -2206,24 +2206,19 @@
Py_DECREF(self);
}
} else {
- PyObject *super;
- if (oparg & 2) {
- super = PyObject_CallNoArgs(global_super);
- } else {
- PyObject *stack[] = {class, self};
- super = PyObject_Vectorcall(global_super, stack, 2, NULL);
- }
- #line 2217 "Python/generated_cases.c.h"
+ PyObject *stack[] = {class, self};
+ PyObject *super = PyObject_Vectorcall(global_super, stack, oparg & 2, NULL);
+ #line 2212 "Python/generated_cases.c.h"
Py_DECREF(global_super);
Py_DECREF(class);
Py_DECREF(self);
- #line 1601 "Python/bytecodes.c"
+ #line 1596 "Python/bytecodes.c"
if (super == NULL) goto pop_3_error;
res = PyObject_GetAttr(super, name);
Py_DECREF(super);
if (res == NULL) goto pop_3_error;
}
- #line 2227 "Python/generated_cases.c.h"
+ #line 2222 "Python/generated_cases.c.h"
STACK_SHRINK(2);
STACK_GROW(((oparg & 1) ? 1 : 0));
stack_pointer[-1] = res;
@@ -2241,7 +2236,7 @@
uint32_t class_version = read_u32(&next_instr[1].cache);
uint32_t self_type_version = read_u32(&next_instr[3].cache);
PyObject *method = read_obj(&next_instr[5].cache);
- #line 1609 "Python/bytecodes.c"
+ #line 1604 "Python/bytecodes.c"
DEOPT_IF(global_super != (PyObject *)&PySuper_Type, LOAD_SUPER_ATTR);
DEOPT_IF(!PyType_Check(class), LOAD_SUPER_ATTR);
DEOPT_IF(((PyTypeObject *)class)->tp_version_tag != class_version, LOAD_SUPER_ATTR);
@@ -2252,7 +2247,7 @@
Py_INCREF(res2);
Py_DECREF(global_super);
Py_DECREF(class);
- #line 2256 "Python/generated_cases.c.h"
+ #line 2251 "Python/generated_cases.c.h"
STACK_SHRINK(1);
stack_pointer[-1] = res;
stack_pointer[-2] = res2;
@@ -2266,7 +2261,7 @@
PyObject *owner = stack_pointer[-1];
PyObject *res2 = NULL;
PyObject *res;
- #line 1636 "Python/bytecodes.c"
+ #line 1631 "Python/bytecodes.c"
#if ENABLE_SPECIALIZATION
_PyAttrCache *cache = (_PyAttrCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
@@ -2300,9 +2295,9 @@
NULL | meth | arg1 | ... | argN
*/
- #line 2304 "Python/generated_cases.c.h"
+ #line 2299 "Python/generated_cases.c.h"
Py_DECREF(owner);
- #line 1670 "Python/bytecodes.c"
+ #line 1665 "Python/bytecodes.c"
if (meth == NULL) goto pop_1_error;
res2 = NULL;
res = meth;
@@ -2311,12 +2306,12 @@
else {
/* Classic, pushes one value. */
res = PyObject_GetAttr(owner, name);
- #line 2315 "Python/generated_cases.c.h"
+ #line 2310 "Python/generated_cases.c.h"
Py_DECREF(owner);
- #line 1679 "Python/bytecodes.c"
+ #line 1674 "Python/bytecodes.c"
if (res == NULL) goto pop_1_error;
}
- #line 2320 "Python/generated_cases.c.h"
+ #line 2315 "Python/generated_cases.c.h"
STACK_GROW(((oparg & 1) ? 1 : 0));
stack_pointer[-1] = res;
if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; }
@@ -2330,7 +2325,7 @@
PyObject *res;
uint32_t type_version = read_u32(&next_instr[1].cache);
uint16_t index = read_u16(&next_instr[3].cache);
- #line 1684 "Python/bytecodes.c"
+ #line 1679 "Python/bytecodes.c"
PyTypeObject *tp = Py_TYPE(owner);
assert(type_version != 0);
DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR);
@@ -2343,7 +2338,7 @@
STAT_INC(LOAD_ATTR, hit);
Py_INCREF(res);
res2 = NULL;
- #line 2347 "Python/generated_cases.c.h"
+ #line 2342 "Python/generated_cases.c.h"
Py_DECREF(owner);
STACK_GROW(((oparg & 1) ? 1 : 0));
stack_pointer[-1] = res;
@@ -2358,7 +2353,7 @@
PyObject *res;
uint32_t type_version = read_u32(&next_instr[1].cache);
uint16_t index = read_u16(&next_instr[3].cache);
- #line 1700 "Python/bytecodes.c"
+ #line 1695 "Python/bytecodes.c"
DEOPT_IF(!PyModule_CheckExact(owner), LOAD_ATTR);
PyDictObject *dict = (PyDictObject *)((PyModuleObject *)owner)->md_dict;
assert(dict != NULL);
@@ -2371,7 +2366,7 @@
STAT_INC(LOAD_ATTR, hit);
Py_INCREF(res);
res2 = NULL;
- #line 2375 "Python/generated_cases.c.h"
+ #line 2370 "Python/generated_cases.c.h"
Py_DECREF(owner);
STACK_GROW(((oparg & 1) ? 1 : 0));
stack_pointer[-1] = res;
@@ -2386,7 +2381,7 @@
PyObject *res;
uint32_t type_version = read_u32(&next_instr[1].cache);
uint16_t index = read_u16(&next_instr[3].cache);
- #line 1716 "Python/bytecodes.c"
+ #line 1711 "Python/bytecodes.c"
PyTypeObject *tp = Py_TYPE(owner);
assert(type_version != 0);
DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR);
@@ -2413,7 +2408,7 @@
STAT_INC(LOAD_ATTR, hit);
Py_INCREF(res);
res2 = NULL;
- #line 2417 "Python/generated_cases.c.h"
+ #line 2412 "Python/generated_cases.c.h"
Py_DECREF(owner);
STACK_GROW(((oparg & 1) ? 1 : 0));
stack_pointer[-1] = res;
@@ -2428,7 +2423,7 @@
PyObject *res;
uint32_t type_version = read_u32(&next_instr[1].cache);
uint16_t index = read_u16(&next_instr[3].cache);
- #line 1746 "Python/bytecodes.c"
+ #line 1741 "Python/bytecodes.c"
PyTypeObject *tp = Py_TYPE(owner);
assert(type_version != 0);
DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR);
@@ -2438,7 +2433,7 @@
STAT_INC(LOAD_ATTR, hit);
Py_INCREF(res);
res2 = NULL;
- #line 2442 "Python/generated_cases.c.h"
+ #line 2437 "Python/generated_cases.c.h"
Py_DECREF(owner);
STACK_GROW(((oparg & 1) ? 1 : 0));
stack_pointer[-1] = res;
@@ -2453,7 +2448,7 @@
PyObject *res;
uint32_t type_version = read_u32(&next_instr[1].cache);
PyObject *descr = read_obj(&next_instr[5].cache);
- #line 1759 "Python/bytecodes.c"
+ #line 1754 "Python/bytecodes.c"
DEOPT_IF(!PyType_Check(cls), LOAD_ATTR);
DEOPT_IF(((PyTypeObject *)cls)->tp_version_tag != type_version,
@@ -2465,7 +2460,7 @@
res = descr;
assert(res != NULL);
Py_INCREF(res);
- #line 2469 "Python/generated_cases.c.h"
+ #line 2464 "Python/generated_cases.c.h"
Py_DECREF(cls);
STACK_GROW(((oparg & 1) ? 1 : 0));
stack_pointer[-1] = res;
@@ -2479,7 +2474,7 @@
uint32_t type_version = read_u32(&next_instr[1].cache);
uint32_t func_version = read_u32(&next_instr[3].cache);
PyObject *fget = read_obj(&next_instr[5].cache);
- #line 1774 "Python/bytecodes.c"
+ #line 1769 "Python/bytecodes.c"
DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR);
PyTypeObject *cls = Py_TYPE(owner);
@@ -2503,7 +2498,7 @@
JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR);
frame->return_offset = 0;
DISPATCH_INLINED(new_frame);
- #line 2507 "Python/generated_cases.c.h"
+ #line 2502 "Python/generated_cases.c.h"
}
TARGET(LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN) {
@@ -2511,7 +2506,7 @@
uint32_t type_version = read_u32(&next_instr[1].cache);
uint32_t func_version = read_u32(&next_instr[3].cache);
PyObject *getattribute = read_obj(&next_instr[5].cache);
- #line 1800 "Python/bytecodes.c"
+ #line 1795 "Python/bytecodes.c"
DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR);
PyTypeObject *cls = Py_TYPE(owner);
DEOPT_IF(cls->tp_version_tag != type_version, LOAD_ATTR);
@@ -2537,7 +2532,7 @@
JUMPBY(INLINE_CACHE_ENTRIES_LOAD_ATTR);
frame->return_offset = 0;
DISPATCH_INLINED(new_frame);
- #line 2541 "Python/generated_cases.c.h"
+ #line 2536 "Python/generated_cases.c.h"
}
TARGET(STORE_ATTR_INSTANCE_VALUE) {
@@ -2545,7 +2540,7 @@
PyObject *value = stack_pointer[-2];
uint32_t type_version = read_u32(&next_instr[1].cache);
uint16_t index = read_u16(&next_instr[3].cache);
- #line 1828 "Python/bytecodes.c"
+ #line 1823 "Python/bytecodes.c"
PyTypeObject *tp = Py_TYPE(owner);
assert(type_version != 0);
DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR);
@@ -2563,7 +2558,7 @@
Py_DECREF(old_value);
}
Py_DECREF(owner);
- #line 2567 "Python/generated_cases.c.h"
+ #line 2562 "Python/generated_cases.c.h"
STACK_SHRINK(2);
next_instr += 4;
DISPATCH();
@@ -2574,7 +2569,7 @@
PyObject *value = stack_pointer[-2];
uint32_t type_version = read_u32(&next_instr[1].cache);
uint16_t hint = read_u16(&next_instr[3].cache);
- #line 1848 "Python/bytecodes.c"
+ #line 1843 "Python/bytecodes.c"
PyTypeObject *tp = Py_TYPE(owner);
assert(type_version != 0);
DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR);
@@ -2613,7 +2608,7 @@
/* PEP 509 */
dict->ma_version_tag = new_version;
Py_DECREF(owner);
- #line 2617 "Python/generated_cases.c.h"
+ #line 2612 "Python/generated_cases.c.h"
STACK_SHRINK(2);
next_instr += 4;
DISPATCH();
@@ -2624,7 +2619,7 @@
PyObject *value = stack_pointer[-2];
uint32_t type_version = read_u32(&next_instr[1].cache);
uint16_t index = read_u16(&next_instr[3].cache);
- #line 1889 "Python/bytecodes.c"
+ #line 1884 "Python/bytecodes.c"
PyTypeObject *tp = Py_TYPE(owner);
assert(type_version != 0);
DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR);
@@ -2634,7 +2629,7 @@
*(PyObject **)addr = value;
Py_XDECREF(old_value);
Py_DECREF(owner);
- #line 2638 "Python/generated_cases.c.h"
+ #line 2633 "Python/generated_cases.c.h"
STACK_SHRINK(2);
next_instr += 4;
DISPATCH();
@@ -2646,7 +2641,7 @@
PyObject *right = stack_pointer[-1];
PyObject *left = stack_pointer[-2];
PyObject *res;
- #line 1908 "Python/bytecodes.c"
+ #line 1903 "Python/bytecodes.c"
#if ENABLE_SPECIALIZATION
_PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
@@ -2659,12 +2654,12 @@
#endif /* ENABLE_SPECIALIZATION */
assert((oparg >> 4) <= Py_GE);
res = PyObject_RichCompare(left, right, oparg>>4);
- #line 2663 "Python/generated_cases.c.h"
+ #line 2658 "Python/generated_cases.c.h"
Py_DECREF(left);
Py_DECREF(right);
- #line 1921 "Python/bytecodes.c"
+ #line 1916 "Python/bytecodes.c"
if (res == NULL) goto pop_2_error;
- #line 2668 "Python/generated_cases.c.h"
+ #line 2663 "Python/generated_cases.c.h"
STACK_SHRINK(1);
stack_pointer[-1] = res;
next_instr += 1;
@@ -2675,7 +2670,7 @@
PyObject *right = stack_pointer[-1];
PyObject *left = stack_pointer[-2];
PyObject *res;
- #line 1925 "Python/bytecodes.c"
+ #line 1920 "Python/bytecodes.c"
DEOPT_IF(!PyFloat_CheckExact(left), COMPARE_OP);
DEOPT_IF(!PyFloat_CheckExact(right), COMPARE_OP);
STAT_INC(COMPARE_OP, hit);
@@ -2687,7 +2682,7 @@
_Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc);
res = (sign_ish & oparg) ? Py_True : Py_False;
Py_INCREF(res);
- #line 2691 "Python/generated_cases.c.h"
+ #line 2686 "Python/generated_cases.c.h"
STACK_SHRINK(1);
stack_pointer[-1] = res;
next_instr += 1;
@@ -2698,7 +2693,7 @@
PyObject *right = stack_pointer[-1];
PyObject *left = stack_pointer[-2];
PyObject *res;
- #line 1940 "Python/bytecodes.c"
+ #line 1935 "Python/bytecodes.c"
DEOPT_IF(!PyLong_CheckExact(left), COMPARE_OP);
DEOPT_IF(!PyLong_CheckExact(right), COMPARE_OP);
DEOPT_IF(!_PyLong_IsCompact((PyLongObject *)left), COMPARE_OP);
@@ -2714,7 +2709,7 @@
_Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free);
res = (sign_ish & oparg) ? Py_True : Py_False;
Py_INCREF(res);
- #line 2718 "Python/generated_cases.c.h"
+ #line 2713 "Python/generated_cases.c.h"
STACK_SHRINK(1);
stack_pointer[-1] = res;
next_instr += 1;
@@ -2725,7 +2720,7 @@
PyObject *right = stack_pointer[-1];
PyObject *left = stack_pointer[-2];
PyObject *res;
- #line 1959 "Python/bytecodes.c"
+ #line 1954 "Python/bytecodes.c"
DEOPT_IF(!PyUnicode_CheckExact(left), COMPARE_OP);
DEOPT_IF(!PyUnicode_CheckExact(right), COMPARE_OP);
STAT_INC(COMPARE_OP, hit);
@@ -2738,7 +2733,7 @@
assert(COMPARISON_NOT_EQUALS + 1 == COMPARISON_EQUALS);
res = ((COMPARISON_NOT_EQUALS + eq) & oparg) ? Py_True : Py_False;
Py_INCREF(res);
- #line 2742 "Python/generated_cases.c.h"
+ #line 2737 "Python/generated_cases.c.h"
STACK_SHRINK(1);
stack_pointer[-1] = res;
next_instr += 1;
@@ -2749,14 +2744,14 @@
PyObject *right = stack_pointer[-1];
PyObject *left = stack_pointer[-2];
PyObject *b;
- #line 1974 "Python/bytecodes.c"
+ #line 1969 "Python/bytecodes.c"
int res = Py_Is(left, right) ^ oparg;
- #line 2755 "Python/generated_cases.c.h"
+ #line 2750 "Python/generated_cases.c.h"
Py_DECREF(left);
Py_DECREF(right);
- #line 1976 "Python/bytecodes.c"
+ #line 1971 "Python/bytecodes.c"
b = Py_NewRef(res ? Py_True : Py_False);
- #line 2760 "Python/generated_cases.c.h"
+ #line 2755 "Python/generated_cases.c.h"
STACK_SHRINK(1);
stack_pointer[-1] = b;
DISPATCH();
@@ -2766,15 +2761,15 @@
PyObject *right = stack_pointer[-1];
PyObject *left = stack_pointer[-2];
PyObject *b;
- #line 1980 "Python/bytecodes.c"
+ #line 1975 "Python/bytecodes.c"
int res = PySequence_Contains(right, left);
- #line 2772 "Python/generated_cases.c.h"
+ #line 2767 "Python/generated_cases.c.h"
Py_DECREF(left);
Py_DECREF(right);
- #line 1982 "Python/bytecodes.c"
+ #line 1977 "Python/bytecodes.c"
if (res < 0) goto pop_2_error;
b = Py_NewRef((res^oparg) ? Py_True : Py_False);
- #line 2778 "Python/generated_cases.c.h"
+ #line 2773 "Python/generated_cases.c.h"
STACK_SHRINK(1);
stack_pointer[-1] = b;
DISPATCH();
@@ -2785,12 +2780,12 @@
PyObject *exc_value = stack_pointer[-2];
PyObject *rest;
PyObject *match;
- #line 1987 "Python/bytecodes.c"
+ #line 1982 "Python/bytecodes.c"
if (check_except_star_type_valid(tstate, match_type) < 0) {
- #line 2791 "Python/generated_cases.c.h"
+ #line 2786 "Python/generated_cases.c.h"
Py_DECREF(exc_value);
Py_DECREF(match_type);
- #line 1989 "Python/bytecodes.c"
+ #line 1984 "Python/bytecodes.c"
if (true) goto pop_2_error;
}
@@ -2798,10 +2793,10 @@
rest = NULL;
int res = exception_group_match(exc_value, match_type,
&match, &rest);
- #line 2802 "Python/generated_cases.c.h"
+ #line 2797 "Python/generated_cases.c.h"
Py_DECREF(exc_value);
Py_DECREF(match_type);
- #line 1997 "Python/bytecodes.c"
+ #line 1992 "Python/bytecodes.c"
if (res < 0) goto pop_2_error;
assert((match == NULL) == (rest == NULL));
@@ -2810,7 +2805,7 @@
if (!Py_IsNone(match)) {
PyErr_SetHandledException(match);
}
- #line 2814 "Python/generated_cases.c.h"
+ #line 2809 "Python/generated_cases.c.h"
stack_pointer[-1] = match;
stack_pointer[-2] = rest;
DISPATCH();
@@ -2820,21 +2815,21 @@
PyObject *right = stack_pointer[-1];
PyObject *left = stack_pointer[-2];
PyObject *b;
- #line 2008 "Python/bytecodes.c"
+ #line 2003 "Python/bytecodes.c"
assert(PyExceptionInstance_Check(left));
if (check_except_type_valid(tstate, right) < 0) {
- #line 2827 "Python/generated_cases.c.h"
+ #line 2822 "Python/generated_cases.c.h"
Py_DECREF(right);
- #line 2011 "Python/bytecodes.c"
+ #line 2006 "Python/bytecodes.c"
if (true) goto pop_1_error;
}
int res = PyErr_GivenExceptionMatches(left, right);
- #line 2834 "Python/generated_cases.c.h"
+ #line 2829 "Python/generated_cases.c.h"
Py_DECREF(right);
- #line 2016 "Python/bytecodes.c"
+ #line 2011 "Python/bytecodes.c"
b = Py_NewRef(res ? Py_True : Py_False);
- #line 2838 "Python/generated_cases.c.h"
+ #line 2833 "Python/generated_cases.c.h"
stack_pointer[-1] = b;
DISPATCH();
}
@@ -2843,15 +2838,15 @@
PyObject *fromlist = stack_pointer[-1];
PyObject *level = stack_pointer[-2];
PyObject *res;
- #line 2020 "Python/bytecodes.c"
+ #line 2015 "Python/bytecodes.c"
PyObject *name = GETITEM(frame->f_code->co_names, oparg);
res = import_name(tstate, frame, name, fromlist, level);
- #line 2850 "Python/generated_cases.c.h"
+ #line 2845 "Python/generated_cases.c.h"
Py_DECREF(level);
Py_DECREF(fromlist);
- #line 2023 "Python/bytecodes.c"
+ #line 2018 "Python/bytecodes.c"
if (res == NULL) goto pop_2_error;
- #line 2855 "Python/generated_cases.c.h"
+ #line 2850 "Python/generated_cases.c.h"
STACK_SHRINK(1);
stack_pointer[-1] = res;
DISPATCH();
@@ -2860,29 +2855,29 @@
TARGET(IMPORT_FROM) {
PyObject *from = stack_pointer[-1];
PyObject *res;
- #line 2027 "Python/bytecodes.c"
+ #line 2022 "Python/bytecodes.c"
PyObject *name = GETITEM(frame->f_code->co_names, oparg);
res = import_from(tstate, from, name);
if (res == NULL) goto error;
- #line 2868 "Python/generated_cases.c.h"
+ #line 2863 "Python/generated_cases.c.h"
STACK_GROW(1);
stack_pointer[-1] = res;
DISPATCH();
}
TARGET(JUMP_FORWARD) {
- #line 2033 "Python/bytecodes.c"
+ #line 2028 "Python/bytecodes.c"
JUMPBY(oparg);
- #line 2877 "Python/generated_cases.c.h"
+ #line 2872 "Python/generated_cases.c.h"
DISPATCH();
}
TARGET(JUMP_BACKWARD) {
PREDICTED(JUMP_BACKWARD);
- #line 2037 "Python/bytecodes.c"
+ #line 2032 "Python/bytecodes.c"
assert(oparg < INSTR_OFFSET());
JUMPBY(-oparg);
- #line 2886 "Python/generated_cases.c.h"
+ #line 2881 "Python/generated_cases.c.h"
CHECK_EVAL_BREAKER();
DISPATCH();
}
@@ -2890,7 +2885,7 @@
TARGET(POP_JUMP_IF_FALSE) {
PREDICTED(POP_JUMP_IF_FALSE);
PyObject *cond = stack_pointer[-1];
- #line 2043 "Python/bytecodes.c"
+ #line 2038 "Python/bytecodes.c"
if (Py_IsTrue(cond)) {
_Py_DECREF_NO_DEALLOC(cond);
}
@@ -2900,9 +2895,9 @@
}
else {
int err = PyObject_IsTrue(cond);
- #line 2904 "Python/generated_cases.c.h"
+ #line 2899 "Python/generated_cases.c.h"
Py_DECREF(cond);
- #line 2053 "Python/bytecodes.c"
+ #line 2048 "Python/bytecodes.c"
if (err == 0) {
JUMPBY(oparg);
}
@@ -2910,14 +2905,14 @@
if (err < 0) goto pop_1_error;
}
}
- #line 2914 "Python/generated_cases.c.h"
+ #line 2909 "Python/generated_cases.c.h"
STACK_SHRINK(1);
DISPATCH();
}
TARGET(POP_JUMP_IF_TRUE) {
PyObject *cond = stack_pointer[-1];
- #line 2063 "Python/bytecodes.c"
+ #line 2058 "Python/bytecodes.c"
if (Py_IsFalse(cond)) {
_Py_DECREF_NO_DEALLOC(cond);
}
@@ -2927,9 +2922,9 @@
}
else {
int err = PyObject_IsTrue(cond);
- #line 2931 "Python/generated_cases.c.h"
+ #line 2926 "Python/generated_cases.c.h"
Py_DECREF(cond);
- #line 2073 "Python/bytecodes.c"
+ #line 2068 "Python/bytecodes.c"
if (err > 0) {
JUMPBY(oparg);
}
@@ -2937,67 +2932,67 @@
if (err < 0) goto pop_1_error;
}
}
- #line 2941 "Python/generated_cases.c.h"
+ #line 2936 "Python/generated_cases.c.h"
STACK_SHRINK(1);
DISPATCH();
}
TARGET(POP_JUMP_IF_NOT_NONE) {
PyObject *value = stack_pointer[-1];
- #line 2083 "Python/bytecodes.c"
+ #line 2078 "Python/bytecodes.c"
if (!Py_IsNone(value)) {
- #line 2950 "Python/generated_cases.c.h"
+ #line 2945 "Python/generated_cases.c.h"
Py_DECREF(value);
- #line 2085 "Python/bytecodes.c"
+ #line 2080 "Python/bytecodes.c"
JUMPBY(oparg);
}
else {
_Py_DECREF_NO_DEALLOC(value);
}
- #line 2958 "Python/generated_cases.c.h"
+ #line 2953 "Python/generated_cases.c.h"
STACK_SHRINK(1);
DISPATCH();
}
TARGET(POP_JUMP_IF_NONE) {
PyObject *value = stack_pointer[-1];
- #line 2093 "Python/bytecodes.c"
+ #line 2088 "Python/bytecodes.c"
if (Py_IsNone(value)) {
_Py_DECREF_NO_DEALLOC(value);
JUMPBY(oparg);
}
else {
- #line 2971 "Python/generated_cases.c.h"
+ #line 2966 "Python/generated_cases.c.h"
Py_DECREF(value);
- #line 2099 "Python/bytecodes.c"
+ #line 2094 "Python/bytecodes.c"
}
- #line 2975 "Python/generated_cases.c.h"
+ #line 2970 "Python/generated_cases.c.h"
STACK_SHRINK(1);
DISPATCH();
}
TARGET(JUMP_BACKWARD_NO_INTERRUPT) {
- #line 2103 "Python/bytecodes.c"
+ #line 2098 "Python/bytecodes.c"
/* This bytecode is used in the `yield from` or `await` loop.
* If there is an interrupt, we want it handled in the innermost
* generator or coroutine, so we deliberately do not check it here.
* (see bpo-30039).
*/
JUMPBY(-oparg);
- #line 2988 "Python/generated_cases.c.h"
+ #line 2983 "Python/generated_cases.c.h"
DISPATCH();
}
TARGET(GET_LEN) {
PyObject *obj = stack_pointer[-1];
PyObject *len_o;
- #line 2112 "Python/bytecodes.c"
+ #line 2107 "Python/bytecodes.c"
// PUSH(len(TOS))
Py_ssize_t len_i = PyObject_Length(obj);
if (len_i < 0) goto error;
len_o = PyLong_FromSsize_t(len_i);
if (len_o == NULL) goto error;
- #line 3001 "Python/generated_cases.c.h"
+ #line 2996 "Python/generated_cases.c.h"
STACK_GROW(1);
stack_pointer[-1] = len_o;
DISPATCH();
@@ -3008,16 +3003,16 @@
PyObject *type = stack_pointer[-2];
PyObject *subject = stack_pointer[-3];
PyObject *attrs;
- #line 2120 "Python/bytecodes.c"
+ #line 2115 "Python/bytecodes.c"
// Pop TOS and TOS1. Set TOS to a tuple of attributes on success, or
// None on failure.
assert(PyTuple_CheckExact(names));
attrs = match_class(tstate, subject, type, oparg, names);
- #line 3017 "Python/generated_cases.c.h"
+ #line 3012 "Python/generated_cases.c.h"
Py_DECREF(subject);
Py_DECREF(type);
Py_DECREF(names);
- #line 2125 "Python/bytecodes.c"
+ #line 2120 "Python/bytecodes.c"
if (attrs) {
assert(PyTuple_CheckExact(attrs)); // Success!
}
@@ -3025,7 +3020,7 @@
if (_PyErr_Occurred(tstate)) goto pop_3_error;
attrs = Py_NewRef(Py_None); // Failure!
}
- #line 3029 "Python/generated_cases.c.h"
+ #line 3024 "Python/generated_cases.c.h"
STACK_SHRINK(2);
stack_pointer[-1] = attrs;
DISPATCH();
@@ -3034,10 +3029,10 @@
TARGET(MATCH_MAPPING) {
PyObject *subject = stack_pointer[-1];
PyObject *res;
- #line 2135 "Python/bytecodes.c"
+ #line 2130 "Python/bytecodes.c"
int match = Py_TYPE(subject)->tp_flags & Py_TPFLAGS_MAPPING;
res = Py_NewRef(match ? Py_True : Py_False);
- #line 3041 "Python/generated_cases.c.h"
+ #line 3036 "Python/generated_cases.c.h"
STACK_GROW(1);
stack_pointer[-1] = res;
PREDICT(POP_JUMP_IF_FALSE);
@@ -3047,10 +3042,10 @@
TARGET(MATCH_SEQUENCE) {
PyObject *subject = stack_pointer[-1];
PyObject *res;
- #line 2141 "Python/bytecodes.c"
+ #line 2136 "Python/bytecodes.c"
int match = Py_TYPE(subject)->tp_flags & Py_TPFLAGS_SEQUENCE;
res = Py_NewRef(match ? Py_True : Py_False);
- #line 3054 "Python/generated_cases.c.h"
+ #line 3049 "Python/generated_cases.c.h"
STACK_GROW(1);
stack_pointer[-1] = res;
PREDICT(POP_JUMP_IF_FALSE);
@@ -3061,11 +3056,11 @@
PyObject *keys = stack_pointer[-1];
PyObject *subject = stack_pointer[-2];
PyObject *values_or_none;
- #line 2147 "Python/bytecodes.c"
+ #line 2142 "Python/bytecodes.c"
// On successful match, PUSH(values). Otherwise, PUSH(None).
values_or_none = match_keys(tstate, subject, keys);
if (values_or_none == NULL) goto error;
- #line 3069 "Python/generated_cases.c.h"
+ #line 3064 "Python/generated_cases.c.h"
STACK_GROW(1);
stack_pointer[-1] = values_or_none;
DISPATCH();
@@ -3074,14 +3069,14 @@
TARGET(GET_ITER) {
PyObject *iterable = stack_pointer[-1];
PyObject *iter;
- #line 2153 "Python/bytecodes.c"
+ #line 2148 "Python/bytecodes.c"
/* before: [obj]; after [getiter(obj)] */
iter = PyObject_GetIter(iterable);
- #line 3081 "Python/generated_cases.c.h"
+ #line 3076 "Python/generated_cases.c.h"
Py_DECREF(iterable);
- #line 2156 "Python/bytecodes.c"
+ #line 2151 "Python/bytecodes.c"
if (iter == NULL) goto pop_1_error;
- #line 3085 "Python/generated_cases.c.h"
+ #line 3080 "Python/generated_cases.c.h"
stack_pointer[-1] = iter;
DISPATCH();
}
@@ -3089,7 +3084,7 @@
TARGET(GET_YIELD_FROM_ITER) {
PyObject *iterable = stack_pointer[-1];
PyObject *iter;
- #line 2160 "Python/bytecodes.c"
+ #line 2155 "Python/bytecodes.c"
/* before: [obj]; after [getiter(obj)] */
if (PyCoro_CheckExact(iterable)) {
/* `iterable` is a coroutine */
@@ -3112,11 +3107,11 @@
if (iter == NULL) {
goto error;
}
- #line 3116 "Python/generated_cases.c.h"
+ #line 3111 "Python/generated_cases.c.h"
Py_DECREF(iterable);
- #line 2183 "Python/bytecodes.c"
+ #line 2178 "Python/bytecodes.c"
}
- #line 3120 "Python/generated_cases.c.h"
+ #line 3115 "Python/generated_cases.c.h"
stack_pointer[-1] = iter;
PREDICT(LOAD_CONST);
DISPATCH();
@@ -3127,7 +3122,7 @@
static_assert(INLINE_CACHE_ENTRIES_FOR_ITER == 1, "incorrect cache size");
PyObject *iter = stack_pointer[-1];
PyObject *next;
- #line 2202 "Python/bytecodes.c"
+ #line 2197 "Python/bytecodes.c"
#if ENABLE_SPECIALIZATION
_PyForIterCache *cache = (_PyForIterCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
@@ -3158,7 +3153,7 @@
DISPATCH();
}
// Common case: no jump, leave it to the code generator
- #line 3162 "Python/generated_cases.c.h"
+ #line 3157 "Python/generated_cases.c.h"
STACK_GROW(1);
stack_pointer[-1] = next;
next_instr += 1;
@@ -3166,7 +3161,7 @@
}
TARGET(INSTRUMENTED_FOR_ITER) {
- #line 2235 "Python/bytecodes.c"
+ #line 2230 "Python/bytecodes.c"
_Py_CODEUNIT *here = next_instr-1;
_Py_CODEUNIT *target;
PyObject *iter = TOP();
@@ -3192,14 +3187,14 @@
target = next_instr + INLINE_CACHE_ENTRIES_FOR_ITER + oparg + 1;
}
INSTRUMENTED_JUMP(here, target, PY_MONITORING_EVENT_BRANCH);
- #line 3196 "Python/generated_cases.c.h"
+ #line 3191 "Python/generated_cases.c.h"
DISPATCH();
}
TARGET(FOR_ITER_LIST) {
PyObject *iter = stack_pointer[-1];
PyObject *next;
- #line 2263 "Python/bytecodes.c"
+ #line 2258 "Python/bytecodes.c"
DEOPT_IF(Py_TYPE(iter) != &PyListIter_Type, FOR_ITER);
_PyListIterObject *it = (_PyListIterObject *)iter;
STAT_INC(FOR_ITER, hit);
@@ -3219,7 +3214,7 @@
DISPATCH();
end_for_iter_list:
// Common case: no jump, leave it to the code generator
- #line 3223 "Python/generated_cases.c.h"
+ #line 3218 "Python/generated_cases.c.h"
STACK_GROW(1);
stack_pointer[-1] = next;
next_instr += 1;
@@ -3229,7 +3224,7 @@
TARGET(FOR_ITER_TUPLE) {
PyObject *iter = stack_pointer[-1];
PyObject *next;
- #line 2285 "Python/bytecodes.c"
+ #line 2280 "Python/bytecodes.c"
_PyTupleIterObject *it = (_PyTupleIterObject *)iter;
DEOPT_IF(Py_TYPE(it) != &PyTupleIter_Type, FOR_ITER);
STAT_INC(FOR_ITER, hit);
@@ -3249,7 +3244,7 @@
DISPATCH();
end_for_iter_tuple:
// Common case: no jump, leave it to the code generator
- #line 3253 "Python/generated_cases.c.h"
+ #line 3248 "Python/generated_cases.c.h"
STACK_GROW(1);
stack_pointer[-1] = next;
next_instr += 1;
@@ -3259,7 +3254,7 @@
TARGET(FOR_ITER_RANGE) {
PyObject *iter = stack_pointer[-1];
PyObject *next;
- #line 2307 "Python/bytecodes.c"
+ #line 2302 "Python/bytecodes.c"
_PyRangeIterObject *r = (_PyRangeIterObject *)iter;
DEOPT_IF(Py_TYPE(r) != &PyRangeIter_Type, FOR_ITER);
STAT_INC(FOR_ITER, hit);
@@ -3277,7 +3272,7 @@
if (next == NULL) {
goto error;
}
- #line 3281 "Python/generated_cases.c.h"
+ #line 3276 "Python/generated_cases.c.h"
STACK_GROW(1);
stack_pointer[-1] = next;
next_instr += 1;
@@ -3286,7 +3281,7 @@
TARGET(FOR_ITER_GEN) {
PyObject *iter = stack_pointer[-1];
- #line 2327 "Python/bytecodes.c"
+ #line 2322 "Python/bytecodes.c"
PyGenObject *gen = (PyGenObject *)iter;
DEOPT_IF(Py_TYPE(gen) != &PyGen_Type, FOR_ITER);
DEOPT_IF(gen->gi_frame_state >= FRAME_EXECUTING, FOR_ITER);
@@ -3301,14 +3296,14 @@
assert(next_instr[oparg].op.code == END_FOR ||
next_instr[oparg].op.code == INSTRUMENTED_END_FOR);
DISPATCH_INLINED(gen_frame);
- #line 3305 "Python/generated_cases.c.h"
+ #line 3300 "Python/generated_cases.c.h"
}
TARGET(BEFORE_ASYNC_WITH) {
PyObject *mgr = stack_pointer[-1];
PyObject *exit;
PyObject *res;
- #line 2344 "Python/bytecodes.c"
+ #line 2339 "Python/bytecodes.c"
PyObject *enter = _PyObject_LookupSpecial(mgr, &_Py_ID(__aenter__));
if (enter == NULL) {
if (!_PyErr_Occurred(tstate)) {
@@ -3331,16 +3326,16 @@
Py_DECREF(enter);
goto error;
}
- #line 3335 "Python/generated_cases.c.h"
+ #line 3330 "Python/generated_cases.c.h"
Py_DECREF(mgr);
- #line 2367 "Python/bytecodes.c"
+ #line 2362 "Python/bytecodes.c"
res = _PyObject_CallNoArgs(enter);
Py_DECREF(enter);
if (res == NULL) {
Py_DECREF(exit);
if (true) goto pop_1_error;
}
- #line 3344 "Python/generated_cases.c.h"
+ #line 3339 "Python/generated_cases.c.h"
STACK_GROW(1);
stack_pointer[-1] = res;
stack_pointer[-2] = exit;
@@ -3352,7 +3347,7 @@
PyObject *mgr = stack_pointer[-1];
PyObject *exit;
PyObject *res;
- #line 2377 "Python/bytecodes.c"
+ #line 2372 "Python/bytecodes.c"
/* pop the context manager, push its __exit__ and the
* value returned from calling its __enter__
*/
@@ -3378,16 +3373,16 @@
Py_DECREF(enter);
goto error;
}
- #line 3382 "Python/generated_cases.c.h"
+ #line 3377 "Python/generated_cases.c.h"
Py_DECREF(mgr);
- #line 2403 "Python/bytecodes.c"
+ #line 2398 "Python/bytecodes.c"
res = _PyObject_CallNoArgs(enter);
Py_DECREF(enter);
if (res == NULL) {
Py_DECREF(exit);
if (true) goto pop_1_error;
}
- #line 3391 "Python/generated_cases.c.h"
+ #line 3386 "Python/generated_cases.c.h"
STACK_GROW(1);
stack_pointer[-1] = res;
stack_pointer[-2] = exit;
@@ -3399,7 +3394,7 @@
PyObject *lasti = stack_pointer[-3];
PyObject *exit_func = stack_pointer[-4];
PyObject *res;
- #line 2412 "Python/bytecodes.c"
+ #line 2407 "Python/bytecodes.c"
/* At the top of the stack are 4 values:
- val: TOP = exc_info()
- unused: SECOND = previous exception
@@ -3420,7 +3415,7 @@
res = PyObject_Vectorcall(exit_func, stack + 1,
3 | PY_VECTORCALL_ARGUMENTS_OFFSET, NULL);
if (res == NULL) goto error;
- #line 3424 "Python/generated_cases.c.h"
+ #line 3419 "Python/generated_cases.c.h"
STACK_GROW(1);
stack_pointer[-1] = res;
DISPATCH();
@@ -3429,7 +3424,7 @@
TARGET(PUSH_EXC_INFO) {
PyObject *new_exc = stack_pointer[-1];
PyObject *prev_exc;
- #line 2435 "Python/bytecodes.c"
+ #line 2430 "Python/bytecodes.c"
_PyErr_StackItem *exc_info = tstate->exc_info;
if (exc_info->exc_value != NULL) {
prev_exc = exc_info->exc_value;
@@ -3439,7 +3434,7 @@
}
assert(PyExceptionInstance_Check(new_exc));
exc_info->exc_value = Py_NewRef(new_exc);
- #line 3443 "Python/generated_cases.c.h"
+ #line 3438 "Python/generated_cases.c.h"
STACK_GROW(1);
stack_pointer[-1] = new_exc;
stack_pointer[-2] = prev_exc;
@@ -3453,7 +3448,7 @@
uint32_t type_version = read_u32(&next_instr[1].cache);
uint32_t keys_version = read_u32(&next_instr[3].cache);
PyObject *descr = read_obj(&next_instr[5].cache);
- #line 2447 "Python/bytecodes.c"
+ #line 2442 "Python/bytecodes.c"
/* Cached method object */
PyTypeObject *self_cls = Py_TYPE(self);
assert(type_version != 0);
@@ -3470,7 +3465,7 @@
assert(_PyType_HasFeature(Py_TYPE(res2), Py_TPFLAGS_METHOD_DESCRIPTOR));
res = self;
assert(oparg & 1);
- #line 3474 "Python/generated_cases.c.h"
+ #line 3469 "Python/generated_cases.c.h"
STACK_GROW(((oparg & 1) ? 1 : 0));
stack_pointer[-1] = res;
if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; }
@@ -3484,7 +3479,7 @@
PyObject *res;
uint32_t type_version = read_u32(&next_instr[1].cache);
PyObject *descr = read_obj(&next_instr[5].cache);
- #line 2466 "Python/bytecodes.c"
+ #line 2461 "Python/bytecodes.c"
PyTypeObject *self_cls = Py_TYPE(self);
DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR);
assert(self_cls->tp_dictoffset == 0);
@@ -3494,7 +3489,7 @@
res2 = Py_NewRef(descr);
res = self;
assert(oparg & 1);
- #line 3498 "Python/generated_cases.c.h"
+ #line 3493 "Python/generated_cases.c.h"
STACK_GROW(((oparg & 1) ? 1 : 0));
stack_pointer[-1] = res;
if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; }
@@ -3508,7 +3503,7 @@
PyObject *res;
uint32_t type_version = read_u32(&next_instr[1].cache);
PyObject *descr = read_obj(&next_instr[5].cache);
- #line 2478 "Python/bytecodes.c"
+ #line 2473 "Python/bytecodes.c"
PyTypeObject *self_cls = Py_TYPE(self);
DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR);
Py_ssize_t dictoffset = self_cls->tp_dictoffset;
@@ -3522,7 +3517,7 @@
res2 = Py_NewRef(descr);
res = self;
assert(oparg & 1);
- #line 3526 "Python/generated_cases.c.h"
+ #line 3521 "Python/generated_cases.c.h"
STACK_GROW(((oparg & 1) ? 1 : 0));
stack_pointer[-1] = res;
if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; }
@@ -3531,16 +3526,16 @@
}
TARGET(KW_NAMES) {
- #line 2494 "Python/bytecodes.c"
+ #line 2489 "Python/bytecodes.c"
assert(kwnames == NULL);
assert(oparg < PyTuple_GET_SIZE(frame->f_code->co_consts));
kwnames = GETITEM(frame->f_code->co_consts, oparg);
- #line 3539 "Python/generated_cases.c.h"
+ #line 3534 "Python/generated_cases.c.h"
DISPATCH();
}
TARGET(INSTRUMENTED_CALL) {
- #line 2500 "Python/bytecodes.c"
+ #line 2495 "Python/bytecodes.c"
int is_meth = PEEK(oparg+2) != NULL;
int total_args = oparg + is_meth;
PyObject *function = PEEK(total_args + 1);
@@ -3553,7 +3548,7 @@
_PyCallCache *cache = (_PyCallCache *)next_instr;
INCREMENT_ADAPTIVE_COUNTER(cache->counter);
GO_TO_INSTRUCTION(CALL);
- #line 3557 "Python/generated_cases.c.h"
+ #line 3552 "Python/generated_cases.c.h"
}
TARGET(CALL) {
@@ -3563,7 +3558,7 @@
PyObject *callable = stack_pointer[-(1 + oparg)];
PyObject *method = stack_pointer[-(2 + oparg)];
PyObject *res;
- #line 2545 "Python/bytecodes.c"
+ #line 2540 "Python/bytecodes.c"
int is_meth = method != NULL;
int total_args = oparg;
if (is_meth) {
@@ -3645,7 +3640,7 @@
Py_DECREF(args[i]);
}
if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; }
- #line 3649 "Python/generated_cases.c.h"
+ #line 3644 "Python/generated_cases.c.h"
STACK_SHRINK(oparg);
STACK_SHRINK(1);
stack_pointer[-1] = res;
@@ -3657,7 +3652,7 @@
TARGET(CALL_BOUND_METHOD_EXACT_ARGS) {
PyObject *callable = stack_pointer[-(1 + oparg)];
PyObject *method = stack_pointer[-(2 + oparg)];
- #line 2633 "Python/bytecodes.c"
+ #line 2628 "Python/bytecodes.c"
DEOPT_IF(method != NULL, CALL);
DEOPT_IF(Py_TYPE(callable) != &PyMethod_Type, CALL);
STAT_INC(CALL, hit);
@@ -3667,7 +3662,7 @@
PEEK(oparg + 2) = Py_NewRef(meth); // method
Py_DECREF(callable);
GO_TO_INSTRUCTION(CALL_PY_EXACT_ARGS);
- #line 3671 "Python/generated_cases.c.h"
+ #line 3666 "Python/generated_cases.c.h"
}
TARGET(CALL_PY_EXACT_ARGS) {
@@ -3676,7 +3671,7 @@
PyObject *callable = stack_pointer[-(1 + oparg)];
PyObject *method = stack_pointer[-(2 + oparg)];
uint32_t func_version = read_u32(&next_instr[1].cache);
- #line 2645 "Python/bytecodes.c"
+ #line 2640 "Python/bytecodes.c"
assert(kwnames == NULL);
DEOPT_IF(tstate->interp->eval_frame, CALL);
int is_meth = method != NULL;
@@ -3702,7 +3697,7 @@
JUMPBY(INLINE_CACHE_ENTRIES_CALL);
frame->return_offset = 0;
DISPATCH_INLINED(new_frame);
- #line 3706 "Python/generated_cases.c.h"
+ #line 3701 "Python/generated_cases.c.h"
}
TARGET(CALL_PY_WITH_DEFAULTS) {
@@ -3710,7 +3705,7 @@
PyObject *callable = stack_pointer[-(1 + oparg)];
PyObject *method = stack_pointer[-(2 + oparg)];
uint32_t func_version = read_u32(&next_instr[1].cache);
- #line 2673 "Python/bytecodes.c"
+ #line 2668 "Python/bytecodes.c"
assert(kwnames == NULL);
DEOPT_IF(tstate->interp->eval_frame, CALL);
int is_meth = method != NULL;
@@ -3746,7 +3741,7 @@
JUMPBY(INLINE_CACHE_ENTRIES_CALL);
frame->return_offset = 0;
DISPATCH_INLINED(new_frame);
- #line 3750 "Python/generated_cases.c.h"
+ #line 3745 "Python/generated_cases.c.h"
}
TARGET(CALL_NO_KW_TYPE_1) {
@@ -3754,7 +3749,7 @@
PyObject *callable = stack_pointer[-(1 + oparg)];
PyObject *null = stack_pointer[-(2 + oparg)];
PyObject *res;
- #line 2711 "Python/bytecodes.c"
+ #line 2706 "Python/bytecodes.c"
assert(kwnames == NULL);
assert(oparg == 1);
DEOPT_IF(null != NULL, CALL);
@@ -3764,7 +3759,7 @@
res = Py_NewRef(Py_TYPE(obj));
Py_DECREF(obj);
Py_DECREF(&PyType_Type); // I.e., callable
- #line 3768 "Python/generated_cases.c.h"
+ #line 3763 "Python/generated_cases.c.h"
STACK_SHRINK(oparg);
STACK_SHRINK(1);
stack_pointer[-1] = res;
@@ -3777,7 +3772,7 @@
PyObject *callable = stack_pointer[-(1 + oparg)];
PyObject *null = stack_pointer[-(2 + oparg)];
PyObject *res;
- #line 2723 "Python/bytecodes.c"
+ #line 2718 "Python/bytecodes.c"
assert(kwnames == NULL);
assert(oparg == 1);
DEOPT_IF(null != NULL, CALL);
@@ -3788,7 +3783,7 @@
Py_DECREF(arg);
Py_DECREF(&PyUnicode_Type); // I.e., callable
if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; }
- #line 3792 "Python/generated_cases.c.h"
+ #line 3787 "Python/generated_cases.c.h"
STACK_SHRINK(oparg);
STACK_SHRINK(1);
stack_pointer[-1] = res;
@@ -3802,7 +3797,7 @@
PyObject *callable = stack_pointer[-(1 + oparg)];
PyObject *null = stack_pointer[-(2 + oparg)];
PyObject *res;
- #line 2737 "Python/bytecodes.c"
+ #line 2732 "Python/bytecodes.c"
assert(kwnames == NULL);
assert(oparg == 1);
DEOPT_IF(null != NULL, CALL);
@@ -3813,7 +3808,7 @@
Py_DECREF(arg);
Py_DECREF(&PyTuple_Type); // I.e., tuple
if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; }
- #line 3817 "Python/generated_cases.c.h"
+ #line 3812 "Python/generated_cases.c.h"
STACK_SHRINK(oparg);
STACK_SHRINK(1);
stack_pointer[-1] = res;
@@ -3827,7 +3822,7 @@
PyObject *callable = stack_pointer[-(1 + oparg)];
PyObject *method = stack_pointer[-(2 + oparg)];
PyObject *res;
- #line 2751 "Python/bytecodes.c"
+ #line 2746 "Python/bytecodes.c"
int is_meth = method != NULL;
int total_args = oparg;
if (is_meth) {
@@ -3849,7 +3844,7 @@
}
Py_DECREF(tp);
if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; }
- #line 3853 "Python/generated_cases.c.h"
+ #line 3848 "Python/generated_cases.c.h"
STACK_SHRINK(oparg);
STACK_SHRINK(1);
stack_pointer[-1] = res;
@@ -3863,7 +3858,7 @@
PyObject *callable = stack_pointer[-(1 + oparg)];
PyObject *method = stack_pointer[-(2 + oparg)];
PyObject *res;
- #line 2776 "Python/bytecodes.c"
+ #line 2771 "Python/bytecodes.c"
/* Builtin METH_O functions */
assert(kwnames == NULL);
int is_meth = method != NULL;
@@ -3891,7 +3886,7 @@
Py_DECREF(arg);
Py_DECREF(callable);
if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; }
- #line 3895 "Python/generated_cases.c.h"
+ #line 3890 "Python/generated_cases.c.h"
STACK_SHRINK(oparg);
STACK_SHRINK(1);
stack_pointer[-1] = res;
@@ -3905,7 +3900,7 @@
PyObject *callable = stack_pointer[-(1 + oparg)];
PyObject *method = stack_pointer[-(2 + oparg)];
PyObject *res;
- #line 2807 "Python/bytecodes.c"
+ #line 2802 "Python/bytecodes.c"
/* Builtin METH_FASTCALL functions, without keywords */
assert(kwnames == NULL);
int is_meth = method != NULL;
@@ -3937,7 +3932,7 @@
'invalid'). In those cases an exception is set, so we must
handle it.
*/
- #line 3941 "Python/generated_cases.c.h"
+ #line 3936 "Python/generated_cases.c.h"
STACK_SHRINK(oparg);
STACK_SHRINK(1);
stack_pointer[-1] = res;
@@ -3951,7 +3946,7 @@
PyObject *callable = stack_pointer[-(1 + oparg)];
PyObject *method = stack_pointer[-(2 + oparg)];
PyObject *res;
- #line 2842 "Python/bytecodes.c"
+ #line 2837 "Python/bytecodes.c"
/* Builtin METH_FASTCALL | METH_KEYWORDS functions */
int is_meth = method != NULL;
int total_args = oparg;
@@ -3983,7 +3978,7 @@
}
Py_DECREF(callable);
if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; }
- #line 3987 "Python/generated_cases.c.h"
+ #line 3982 "Python/generated_cases.c.h"
STACK_SHRINK(oparg);
STACK_SHRINK(1);
stack_pointer[-1] = res;
@@ -3997,7 +3992,7 @@
PyObject *callable = stack_pointer[-(1 + oparg)];
PyObject *method = stack_pointer[-(2 + oparg)];
PyObject *res;
- #line 2877 "Python/bytecodes.c"
+ #line 2872 "Python/bytecodes.c"
assert(kwnames == NULL);
/* len(o) */
int is_meth = method != NULL;
@@ -4022,7 +4017,7 @@
Py_DECREF(callable);
Py_DECREF(arg);
if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; }
- #line 4026 "Python/generated_cases.c.h"
+ #line 4021 "Python/generated_cases.c.h"
STACK_SHRINK(oparg);
STACK_SHRINK(1);
stack_pointer[-1] = res;
@@ -4035,7 +4030,7 @@
PyObject *callable = stack_pointer[-(1 + oparg)];
PyObject *method = stack_pointer[-(2 + oparg)];
PyObject *res;
- #line 2904 "Python/bytecodes.c"
+ #line 2899 "Python/bytecodes.c"
assert(kwnames == NULL);
/* isinstance(o, o2) */
int is_meth = method != NULL;
@@ -4062,7 +4057,7 @@
Py_DECREF(cls);
Py_DECREF(callable);
if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; }
- #line 4066 "Python/generated_cases.c.h"
+ #line 4061 "Python/generated_cases.c.h"
STACK_SHRINK(oparg);
STACK_SHRINK(1);
stack_pointer[-1] = res;
@@ -4074,7 +4069,7 @@
PyObject **args = (stack_pointer - oparg);
PyObject *self = stack_pointer[-(1 + oparg)];
PyObject *method = stack_pointer[-(2 + oparg)];
- #line 2934 "Python/bytecodes.c"
+ #line 2929 "Python/bytecodes.c"
assert(kwnames == NULL);
assert(oparg == 1);
assert(method != NULL);
@@ -4092,14 +4087,14 @@
JUMPBY(INLINE_CACHE_ENTRIES_CALL + 1);
assert(next_instr[-1].op.code == POP_TOP);
DISPATCH();
- #line 4096 "Python/generated_cases.c.h"
+ #line 4091 "Python/generated_cases.c.h"
}
TARGET(CALL_NO_KW_METHOD_DESCRIPTOR_O) {
PyObject **args = (stack_pointer - oparg);
PyObject *method = stack_pointer[-(2 + oparg)];
PyObject *res;
- #line 2954 "Python/bytecodes.c"
+ #line 2949 "Python/bytecodes.c"
assert(kwnames == NULL);
int is_meth = method != NULL;
int total_args = oparg;
@@ -4130,7 +4125,7 @@
Py_DECREF(arg);
Py_DECREF(callable);
if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; }
- #line 4134 "Python/generated_cases.c.h"
+ #line 4129 "Python/generated_cases.c.h"
STACK_SHRINK(oparg);
STACK_SHRINK(1);
stack_pointer[-1] = res;
@@ -4143,7 +4138,7 @@
PyObject **args = (stack_pointer - oparg);
PyObject *method = stack_pointer[-(2 + oparg)];
PyObject *res;
- #line 2988 "Python/bytecodes.c"
+ #line 2983 "Python/bytecodes.c"
int is_meth = method != NULL;
int total_args = oparg;
if (is_meth) {
@@ -4172,7 +4167,7 @@
}
Py_DECREF(callable);
if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; }
- #line 4176 "Python/generated_cases.c.h"
+ #line 4171 "Python/generated_cases.c.h"
STACK_SHRINK(oparg);
STACK_SHRINK(1);
stack_pointer[-1] = res;
@@ -4185,7 +4180,7 @@
PyObject **args = (stack_pointer - oparg);
PyObject *method = stack_pointer[-(2 + oparg)];
PyObject *res;
- #line 3020 "Python/bytecodes.c"
+ #line 3015 "Python/bytecodes.c"
assert(kwnames == NULL);
assert(oparg == 0 || oparg == 1);
int is_meth = method != NULL;
@@ -4214,7 +4209,7 @@
Py_DECREF(self);
Py_DECREF(callable);
if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; }
- #line 4218 "Python/generated_cases.c.h"
+ #line 4213 "Python/generated_cases.c.h"
STACK_SHRINK(oparg);
STACK_SHRINK(1);
stack_pointer[-1] = res;
@@ -4227,7 +4222,7 @@
PyObject **args = (stack_pointer - oparg);
PyObject *method = stack_pointer[-(2 + oparg)];
PyObject *res;
- #line 3052 "Python/bytecodes.c"
+ #line 3047 "Python/bytecodes.c"
assert(kwnames == NULL);
int is_meth = method != NULL;
int total_args = oparg;
@@ -4255,7 +4250,7 @@
}
Py_DECREF(callable);
if (res == NULL) { STACK_SHRINK(oparg); goto pop_2_error; }
- #line 4259 "Python/generated_cases.c.h"
+ #line 4254 "Python/generated_cases.c.h"
STACK_SHRINK(oparg);
STACK_SHRINK(1);
stack_pointer[-1] = res;
@@ -4265,9 +4260,9 @@
}
TARGET(INSTRUMENTED_CALL_FUNCTION_EX) {
- #line 3083 "Python/bytecodes.c"
+ #line 3078 "Python/bytecodes.c"
GO_TO_INSTRUCTION(CALL_FUNCTION_EX);
- #line 4271 "Python/generated_cases.c.h"
+ #line 4266 "Python/generated_cases.c.h"
}
TARGET(CALL_FUNCTION_EX) {
@@ -4276,7 +4271,7 @@
PyObject *callargs = stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))];
PyObject *func = stack_pointer[-(2 + ((oparg & 1) ? 1 : 0))];
PyObject *result;
- #line 3087 "Python/bytecodes.c"
+ #line 3082 "Python/bytecodes.c"
// DICT_MERGE is called before this opcode if there are kwargs.
// It converts all dict subtypes in kwargs into regular dicts.
assert(kwargs == NULL || PyDict_CheckExact(kwargs));
@@ -4319,14 +4314,14 @@
else {
result = PyObject_Call(func, callargs, kwargs);
}
- #line 4323 "Python/generated_cases.c.h"
+ #line 4318 "Python/generated_cases.c.h"
Py_DECREF(func);
Py_DECREF(callargs);
Py_XDECREF(kwargs);
- #line 3130 "Python/bytecodes.c"
+ #line 3125 "Python/bytecodes.c"
assert(PEEK(3 + (oparg & 1)) == NULL);
if (result == NULL) { STACK_SHRINK(((oparg & 1) ? 1 : 0)); goto pop_3_error; }
- #line 4330 "Python/generated_cases.c.h"
+ #line 4325 "Python/generated_cases.c.h"
STACK_SHRINK(((oparg & 1) ? 1 : 0));
STACK_SHRINK(2);
stack_pointer[-1] = result;
@@ -4341,7 +4336,7 @@
PyObject *kwdefaults = (oparg & 0x02) ? stack_pointer[-(1 + ((oparg & 0x08) ? 1 : 0) + ((oparg & 0x04) ? 1 : 0) + ((oparg & 0x02) ? 1 : 0))] : NULL;
PyObject *defaults = (oparg & 0x01) ? stack_pointer[-(1 + ((oparg & 0x08) ? 1 : 0) + ((oparg & 0x04) ? 1 : 0) + ((oparg & 0x02) ? 1 : 0) + ((oparg & 0x01) ? 1 : 0))] : NULL;
PyObject *func;
- #line 3140 "Python/bytecodes.c"
+ #line 3135 "Python/bytecodes.c"
PyFunctionObject *func_obj = (PyFunctionObject *)
PyFunction_New(codeobj, GLOBALS());
@@ -4370,14 +4365,14 @@
func_obj->func_version = ((PyCodeObject *)codeobj)->co_version;
func = (PyObject *)func_obj;
- #line 4374 "Python/generated_cases.c.h"
+ #line 4369 "Python/generated_cases.c.h"
STACK_SHRINK(((oparg & 0x01) ? 1 : 0) + ((oparg & 0x02) ? 1 : 0) + ((oparg & 0x04) ? 1 : 0) + ((oparg & 0x08) ? 1 : 0));
stack_pointer[-1] = func;
DISPATCH();
}
TARGET(RETURN_GENERATOR) {
- #line 3171 "Python/bytecodes.c"
+ #line 3166 "Python/bytecodes.c"
assert(PyFunction_Check(frame->f_funcobj));
PyFunctionObject *func = (PyFunctionObject *)frame->f_funcobj;
PyGenObject *gen = (PyGenObject *)_Py_MakeCoro(func);
@@ -4398,7 +4393,7 @@
frame = cframe.current_frame = prev;
_PyFrame_StackPush(frame, (PyObject *)gen);
goto resume_frame;
- #line 4402 "Python/generated_cases.c.h"
+ #line 4397 "Python/generated_cases.c.h"
}
TARGET(BUILD_SLICE) {
@@ -4406,15 +4401,15 @@
PyObject *stop = stack_pointer[-(1 + ((oparg == 3) ? 1 : 0))];
PyObject *start = stack_pointer[-(2 + ((oparg == 3) ? 1 : 0))];
PyObject *slice;
- #line 3194 "Python/bytecodes.c"
+ #line 3189 "Python/bytecodes.c"
slice = PySlice_New(start, stop, step);
- #line 4412 "Python/generated_cases.c.h"
+ #line 4407 "Python/generated_cases.c.h"
Py_DECREF(start);
Py_DECREF(stop);
Py_XDECREF(step);
- #line 3196 "Python/bytecodes.c"
+ #line 3191 "Python/bytecodes.c"
if (slice == NULL) { STACK_SHRINK(((oparg == 3) ? 1 : 0)); goto pop_2_error; }
- #line 4418 "Python/generated_cases.c.h"
+ #line 4413 "Python/generated_cases.c.h"
STACK_SHRINK(((oparg == 3) ? 1 : 0));
STACK_SHRINK(1);
stack_pointer[-1] = slice;
@@ -4425,7 +4420,7 @@
PyObject *fmt_spec = ((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? stack_pointer[-((((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? 1 : 0))] : NULL;
PyObject *value = stack_pointer[-(1 + (((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? 1 : 0))];
PyObject *result;
- #line 3200 "Python/bytecodes.c"
+ #line 3195 "Python/bytecodes.c"
/* Handles f-string value formatting. */
PyObject *(*conv_fn)(PyObject *);
int which_conversion = oparg & FVC_MASK;
@@ -4460,7 +4455,7 @@
Py_DECREF(value);
Py_XDECREF(fmt_spec);
if (result == NULL) { STACK_SHRINK((((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? 1 : 0)); goto pop_1_error; }
- #line 4464 "Python/generated_cases.c.h"
+ #line 4459 "Python/generated_cases.c.h"
STACK_SHRINK((((oparg & FVS_MASK) == FVS_HAVE_SPEC) ? 1 : 0));
stack_pointer[-1] = result;
DISPATCH();
@@ -4469,10 +4464,10 @@
TARGET(COPY) {
PyObject *bottom = stack_pointer[-(1 + (oparg-1))];
PyObject *top;
- #line 3237 "Python/bytecodes.c"
+ #line 3232 "Python/bytecodes.c"
assert(oparg > 0);
top = Py_NewRef(bottom);
- #line 4476 "Python/generated_cases.c.h"
+ #line 4471 "Python/generated_cases.c.h"
STACK_GROW(1);
stack_pointer[-1] = top;
DISPATCH();
@@ -4484,7 +4479,7 @@
PyObject *rhs = stack_pointer[-1];
PyObject *lhs = stack_pointer[-2];
PyObject *res;
- #line 3242 "Python/bytecodes.c"
+ #line 3237 "Python/bytecodes.c"
#if ENABLE_SPECIALIZATION
_PyBinaryOpCache *cache = (_PyBinaryOpCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
@@ -4499,12 +4494,12 @@
assert((unsigned)oparg < Py_ARRAY_LENGTH(binary_ops));
assert(binary_ops[oparg]);
res = binary_ops[oparg](lhs, rhs);
- #line 4503 "Python/generated_cases.c.h"
+ #line 4498 "Python/generated_cases.c.h"
Py_DECREF(lhs);
Py_DECREF(rhs);
- #line 3257 "Python/bytecodes.c"
+ #line 3252 "Python/bytecodes.c"
if (res == NULL) goto pop_2_error;
- #line 4508 "Python/generated_cases.c.h"
+ #line 4503 "Python/generated_cases.c.h"
STACK_SHRINK(1);
stack_pointer[-1] = res;
next_instr += 1;
@@ -4514,16 +4509,16 @@
TARGET(SWAP) {
PyObject *top = stack_pointer[-1];
PyObject *bottom = stack_pointer[-(2 + (oparg-2))];
- #line 3262 "Python/bytecodes.c"
+ #line 3257 "Python/bytecodes.c"
assert(oparg >= 2);
- #line 4520 "Python/generated_cases.c.h"
+ #line 4515 "Python/generated_cases.c.h"
stack_pointer[-1] = bottom;
stack_pointer[-(2 + (oparg-2))] = top;
DISPATCH();
}
TARGET(INSTRUMENTED_LINE) {
- #line 3266 "Python/bytecodes.c"
+ #line 3261 "Python/bytecodes.c"
_Py_CODEUNIT *here = next_instr-1;
_PyFrame_SetStackPointer(frame, stack_pointer);
int original_opcode = _Py_call_instrumentation_line(
@@ -4543,11 +4538,11 @@
}
opcode = original_opcode;
DISPATCH_GOTO();
- #line 4547 "Python/generated_cases.c.h"
+ #line 4542 "Python/generated_cases.c.h"
}
TARGET(INSTRUMENTED_INSTRUCTION) {
- #line 3288 "Python/bytecodes.c"
+ #line 3283 "Python/bytecodes.c"
int next_opcode = _Py_call_instrumentation_instruction(
tstate, frame, next_instr-1);
if (next_opcode < 0) goto error;
@@ -4559,26 +4554,26 @@
assert(next_opcode > 0 && next_opcode < 256);
opcode = next_opcode;
DISPATCH_GOTO();
- #line 4563 "Python/generated_cases.c.h"
+ #line 4558 "Python/generated_cases.c.h"
}
TARGET(INSTRUMENTED_JUMP_FORWARD) {
- #line 3302 "Python/bytecodes.c"
+ #line 3297 "Python/bytecodes.c"
INSTRUMENTED_JUMP(next_instr-1, next_instr+oparg, PY_MONITORING_EVENT_JUMP);
- #line 4569 "Python/generated_cases.c.h"
+ #line 4564 "Python/generated_cases.c.h"
DISPATCH();
}
TARGET(INSTRUMENTED_JUMP_BACKWARD) {
- #line 3306 "Python/bytecodes.c"
+ #line 3301 "Python/bytecodes.c"
INSTRUMENTED_JUMP(next_instr-1, next_instr-oparg, PY_MONITORING_EVENT_JUMP);
- #line 4576 "Python/generated_cases.c.h"
+ #line 4571 "Python/generated_cases.c.h"
CHECK_EVAL_BREAKER();
DISPATCH();
}
TARGET(INSTRUMENTED_POP_JUMP_IF_TRUE) {
- #line 3311 "Python/bytecodes.c"
+ #line 3306 "Python/bytecodes.c"
PyObject *cond = POP();
int err = PyObject_IsTrue(cond);
Py_DECREF(cond);
@@ -4587,12 +4582,12 @@
assert(err == 0 || err == 1);
int offset = err*oparg;
INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH);
- #line 4591 "Python/generated_cases.c.h"
+ #line 4586 "Python/generated_cases.c.h"
DISPATCH();
}
TARGET(INSTRUMENTED_POP_JUMP_IF_FALSE) {
- #line 3322 "Python/bytecodes.c"
+ #line 3317 "Python/bytecodes.c"
PyObject *cond = POP();
int err = PyObject_IsTrue(cond);
Py_DECREF(cond);
@@ -4601,12 +4596,12 @@
assert(err == 0 || err == 1);
int offset = (1-err)*oparg;
INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH);
- #line 4605 "Python/generated_cases.c.h"
+ #line 4600 "Python/generated_cases.c.h"
DISPATCH();
}
TARGET(INSTRUMENTED_POP_JUMP_IF_NONE) {
- #line 3333 "Python/bytecodes.c"
+ #line 3328 "Python/bytecodes.c"
PyObject *value = POP();
_Py_CODEUNIT *here = next_instr-1;
int offset;
@@ -4619,12 +4614,12 @@
offset = 0;
}
INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH);
- #line 4623 "Python/generated_cases.c.h"
+ #line 4618 "Python/generated_cases.c.h"
DISPATCH();
}
TARGET(INSTRUMENTED_POP_JUMP_IF_NOT_NONE) {
- #line 3348 "Python/bytecodes.c"
+ #line 3343 "Python/bytecodes.c"
PyObject *value = POP();
_Py_CODEUNIT *here = next_instr-1;
int offset;
@@ -4637,30 +4632,30 @@
offset = oparg;
}
INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH);
- #line 4641 "Python/generated_cases.c.h"
+ #line 4636 "Python/generated_cases.c.h"
DISPATCH();
}
TARGET(EXTENDED_ARG) {
- #line 3363 "Python/bytecodes.c"
+ #line 3358 "Python/bytecodes.c"
assert(oparg);
opcode = next_instr->op.code;
oparg = oparg << 8 | next_instr->op.arg;
PRE_DISPATCH_GOTO();
DISPATCH_GOTO();
- #line 4652 "Python/generated_cases.c.h"
+ #line 4647 "Python/generated_cases.c.h"
}
TARGET(CACHE) {
- #line 3371 "Python/bytecodes.c"
+ #line 3366 "Python/bytecodes.c"
assert(0 && "Executing a cache.");
Py_UNREACHABLE();
- #line 4659 "Python/generated_cases.c.h"
+ #line 4654 "Python/generated_cases.c.h"
}
TARGET(RESERVED) {
- #line 3376 "Python/bytecodes.c"
+ #line 3371 "Python/bytecodes.c"
assert(0 && "Executing RESERVED instruction.");
Py_UNREACHABLE();
- #line 4666 "Python/generated_cases.c.h"
+ #line 4661 "Python/generated_cases.c.h"
}
diff --git a/Python/instrumentation.c b/Python/instrumentation.c
index 853e8a10e81463..8334f596eb3e19 100644
--- a/Python/instrumentation.c
+++ b/Python/instrumentation.c
@@ -16,14 +16,14 @@
static PyObject DISABLE =
{
- _PyObject_IMMORTAL_REFCNT,
- &PyBaseObject_Type
+ .ob_refcnt = _Py_IMMORTAL_REFCNT,
+ .ob_type = &PyBaseObject_Type
};
PyObject _PyInstrumentation_MISSING =
{
- _PyObject_IMMORTAL_REFCNT,
- &PyBaseObject_Type
+ .ob_refcnt = _Py_IMMORTAL_REFCNT,
+ .ob_type = &PyBaseObject_Type
};
static const int8_t EVENT_FOR_OPCODE[256] = {
diff --git a/Python/legacy_tracing.c b/Python/legacy_tracing.c
index cf345bddda79b0..e509e63a087a52 100644
--- a/Python/legacy_tracing.c
+++ b/Python/legacy_tracing.c
@@ -324,7 +324,7 @@ sys_trace_exception_handled(
PyTypeObject _PyLegacyEventHandler_Type = {
- _PyVarObject_IMMORTAL_INIT(&PyType_Type, 0),
+ PyVarObject_HEAD_INIT(&PyType_Type, 0)
"sys.legacy_event_handler",
sizeof(_PyLegacyEventHandler),
.tp_dealloc = (destructor)PyObject_Free,
diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c
index d6627bc6b7e86b..a510c9b22168bc 100644
--- a/Python/pylifecycle.c
+++ b/Python/pylifecycle.c
@@ -808,11 +808,6 @@ pycore_interp_init(PyThreadState *tstate)
PyStatus status;
PyObject *sysmod = NULL;
- // This is a temporary fix until we have immortal objects.
- // (See _PyType_InitCache() in typeobject.c.)
- extern void _PyType_FixCacheRefcounts(void);
- _PyType_FixCacheRefcounts();
-
// Create singletons before the first PyType_Ready() call, since
// PyType_Ready() uses singletons like the Unicode empty string (tp_doc)
// and the empty tuple singletons (tp_bases).
diff --git a/Python/pystate.c b/Python/pystate.c
index 1e04887ef04a2c..d108cfc7e50a0a 100644
--- a/Python/pystate.c
+++ b/Python/pystate.c
@@ -60,23 +60,43 @@ extern "C" {
For each of these functions, the GIL must be held by the current thread.
*/
+
+#ifdef HAVE_THREAD_LOCAL
+_Py_thread_local PyThreadState *_Py_tss_tstate = NULL;
+#endif
+
static inline PyThreadState *
-current_fast_get(_PyRuntimeState *runtime)
+current_fast_get(_PyRuntimeState *Py_UNUSED(runtime))
{
- return (PyThreadState*)_Py_atomic_load_relaxed(&runtime->tstate_current);
+#ifdef HAVE_THREAD_LOCAL
+ return _Py_tss_tstate;
+#else
+ // XXX Fall back to the PyThread_tss_*() API.
+# error "no supported thread-local variable storage classifier"
+#endif
}
static inline void
-current_fast_set(_PyRuntimeState *runtime, PyThreadState *tstate)
+current_fast_set(_PyRuntimeState *Py_UNUSED(runtime), PyThreadState *tstate)
{
assert(tstate != NULL);
- _Py_atomic_store_relaxed(&runtime->tstate_current, (uintptr_t)tstate);
+#ifdef HAVE_THREAD_LOCAL
+ _Py_tss_tstate = tstate;
+#else
+ // XXX Fall back to the PyThread_tss_*() API.
+# error "no supported thread-local variable storage classifier"
+#endif
}
static inline void
-current_fast_clear(_PyRuntimeState *runtime)
+current_fast_clear(_PyRuntimeState *Py_UNUSED(runtime))
{
- _Py_atomic_store_relaxed(&runtime->tstate_current, (uintptr_t)NULL);
+#ifdef HAVE_THREAD_LOCAL
+ _Py_tss_tstate = NULL;
+#else
+ // XXX Fall back to the PyThread_tss_*() API.
+# error "no supported thread-local variable storage classifier"
+#endif
}
#define tstate_verify_not_active(tstate) \
@@ -84,6 +104,12 @@ current_fast_clear(_PyRuntimeState *runtime)
_Py_FatalErrorFormat(__func__, "tstate %p is still current", tstate); \
}
+PyThreadState *
+_PyThreadState_GetCurrent(void)
+{
+ return current_fast_get(&_PyRuntime);
+}
+
//------------------------------------------------
// the thread state bound to the current OS thread
diff --git a/Python/sysmodule.c b/Python/sysmodule.c
index 4d693a1be1f89e..1e42e8dfceb5cc 100644
--- a/Python/sysmodule.c
+++ b/Python/sysmodule.c
@@ -1874,6 +1874,18 @@ sys_getallocatedblocks_impl(PyObject *module)
return _Py_GetAllocatedBlocks();
}
+/*[clinic input]
+sys.getunicodeinternedsize -> Py_ssize_t
+
+Return the number of elements of the unicode interned dictionary
+[clinic start generated code]*/
+
+static Py_ssize_t
+sys_getunicodeinternedsize_impl(PyObject *module)
+/*[clinic end generated code: output=ad0e4c9738ed4129 input=726298eaa063347a]*/
+{
+ return _PyUnicode_InternedSize();
+}
/*[clinic input]
sys._getframe
@@ -2243,6 +2255,7 @@ static PyMethodDef sys_methods[] = {
SYS_GETDEFAULTENCODING_METHODDEF
SYS_GETDLOPENFLAGS_METHODDEF
SYS_GETALLOCATEDBLOCKS_METHODDEF
+ SYS_GETUNICODEINTERNEDSIZE_METHODDEF
SYS_GETFILESYSTEMENCODING_METHODDEF
SYS_GETFILESYSTEMENCODEERRORS_METHODDEF
#ifdef Py_TRACE_REFS
diff --git a/Tools/build/deepfreeze.py b/Tools/build/deepfreeze.py
index aba5fecd8b1a99..5cfef5c572c4ae 100644
--- a/Tools/build/deepfreeze.py
+++ b/Tools/build/deepfreeze.py
@@ -142,7 +142,7 @@ def block(self, prefix: str, suffix: str = "") -> None:
def object_head(self, typename: str) -> None:
with self.block(".ob_base =", ","):
- self.write(f".ob_refcnt = 999999999,")
+ self.write(f".ob_refcnt = _Py_IMMORTAL_REFCNT,")
self.write(f".ob_type = &{typename},")
def object_var_head(self, typename: str, size: int) -> None:
diff --git a/Tools/build/generate_token.py b/Tools/build/generate_token.py
index fc12835b7762ad..3bd307c1733867 100755
--- a/Tools/build/generate_token.py
+++ b/Tools/build/generate_token.py
@@ -80,6 +80,8 @@ def update_file(file, content):
(x) == NEWLINE || \\
(x) == INDENT || \\
(x) == DEDENT)
+#define ISSTRINGLIT(x) ((x) == STRING || \\
+ (x) == FSTRING_MIDDLE)
// Symbols exported for test_peg_generator
diff --git a/Tools/build/verify_ensurepip_wheels.py b/Tools/build/verify_ensurepip_wheels.py
index 044d1fd6b3cf2d..09fd5d9e3103ac 100755
--- a/Tools/build/verify_ensurepip_wheels.py
+++ b/Tools/build/verify_ensurepip_wheels.py
@@ -14,7 +14,7 @@
from pathlib import Path
from urllib.request import urlopen
-PACKAGE_NAMES = ("pip", "setuptools")
+PACKAGE_NAMES = ("pip",)
ENSURE_PIP_ROOT = Path(__file__).parent.parent.parent / "Lib/ensurepip"
WHEEL_DIR = ENSURE_PIP_ROOT / "_bundled"
ENSURE_PIP_INIT_PY_TEXT = (ENSURE_PIP_ROOT / "__init__.py").read_text(encoding="utf-8")
diff --git a/Tools/peg_generator/pegen/c_generator.py b/Tools/peg_generator/pegen/c_generator.py
index e72ce7afdc4796..f57b6275f671d3 100644
--- a/Tools/peg_generator/pegen/c_generator.py
+++ b/Tools/peg_generator/pegen/c_generator.py
@@ -68,6 +68,7 @@ class NodeTypes(Enum):
KEYWORD = 4
SOFT_KEYWORD = 5
CUT_OPERATOR = 6
+ F_STRING_CHUNK = 7
BASE_NODETYPES = {
diff --git a/configure b/configure
index 4ae8258438e620..8133d47f61355b 100755
--- a/configure
+++ b/configure
@@ -892,6 +892,8 @@ PGO_PROF_USE_FLAG
PGO_PROF_GEN_FLAG
MERGE_FDATA
LLVM_BOLT
+ac_ct_READELF
+READELF
PREBOLT_RULE
LLVM_AR_FOUND
LLVM_AR
@@ -7916,6 +7918,112 @@ if test "$Py_BOLT" = 'true' ; then
DEF_MAKE_ALL_RULE="bolt-opt"
DEF_MAKE_RULE="build_all"
+
+ if test -n "$ac_tool_prefix"; then
+ for ac_prog in readelf
+ do
+ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
+set dummy $ac_tool_prefix$ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_READELF+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ if test -n "$READELF"; then
+ ac_cv_prog_READELF="$READELF" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+ IFS=$as_save_IFS
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+ ac_cv_prog_READELF="$ac_tool_prefix$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+done
+ done
+IFS=$as_save_IFS
+
+fi
+fi
+READELF=$ac_cv_prog_READELF
+if test -n "$READELF"; then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $READELF" >&5
+$as_echo "$READELF" >&6; }
+else
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+ test -n "$READELF" && break
+ done
+fi
+if test -z "$READELF"; then
+ ac_ct_READELF=$READELF
+ for ac_prog in readelf
+do
+ # Extract the first word of "$ac_prog", so it can be a program name with args.
+set dummy $ac_prog; ac_word=$2
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
+$as_echo_n "checking for $ac_word... " >&6; }
+if ${ac_cv_prog_ac_ct_READELF+:} false; then :
+ $as_echo_n "(cached) " >&6
+else
+ if test -n "$ac_ct_READELF"; then
+ ac_cv_prog_ac_ct_READELF="$ac_ct_READELF" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+ IFS=$as_save_IFS
+ test -z "$as_dir" && as_dir=.
+ for ac_exec_ext in '' $ac_executable_extensions; do
+ if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+ ac_cv_prog_ac_ct_READELF="$ac_prog"
+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
+ break 2
+ fi
+done
+ done
+IFS=$as_save_IFS
+
+fi
+fi
+ac_ct_READELF=$ac_cv_prog_ac_ct_READELF
+if test -n "$ac_ct_READELF"; then
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_READELF" >&5
+$as_echo "$ac_ct_READELF" >&6; }
+else
+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
+$as_echo "no" >&6; }
+fi
+
+
+ test -n "$ac_ct_READELF" && break
+done
+
+ if test "x$ac_ct_READELF" = x; then
+ READELF=""notfound""
+ else
+ case $cross_compiling:$ac_tool_warned in
+yes:)
+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
+ac_tool_warned=yes ;;
+esac
+ READELF=$ac_ct_READELF
+ fi
+fi
+
+ if test "$READELF" == "notfound"
+ then
+ as_fn_error $? "readelf is required for a --enable-bolt build but could not be found." "$LINENO" 5
+ fi
+
# -fno-reorder-blocks-and-partition is required for bolt to work.
# Possibly GCC only.
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether C compiler accepts -fno-reorder-blocks-and-partition" >&5
diff --git a/configure.ac b/configure.ac
index 4d9eb46f5ce7d8..3f20d8980d8abc 100644
--- a/configure.ac
+++ b/configure.ac
@@ -1938,6 +1938,13 @@ if test "$Py_BOLT" = 'true' ; then
DEF_MAKE_ALL_RULE="bolt-opt"
DEF_MAKE_RULE="build_all"
+ AC_SUBST(READELF)
+ AC_CHECK_TOOLS(READELF, [readelf], "notfound")
+ if test "$READELF" == "notfound"
+ then
+ AC_MSG_ERROR([readelf is required for a --enable-bolt build but could not be found.])
+ fi
+
# -fno-reorder-blocks-and-partition is required for bolt to work.
# Possibly GCC only.
AX_CHECK_COMPILE_FLAG([-fno-reorder-blocks-and-partition],[