diff --git a/Doc/_static/og-image.png b/Doc/_static/og-image.png
new file mode 100644
index 00000000000000..0e80751e740387
Binary files /dev/null and b/Doc/_static/og-image.png differ
diff --git a/Doc/c-api/refcounting.rst b/Doc/c-api/refcounting.rst
index cd1f2ef7076836..d8e9c2da6f3ff3 100644
--- a/Doc/c-api/refcounting.rst
+++ b/Doc/c-api/refcounting.rst
@@ -7,8 +7,8 @@
Reference Counting
******************
-The macros in this section are used for managing reference counts of Python
-objects.
+The functions and macros in this section are used for managing reference counts
+of Python objects.
.. c:function:: Py_ssize_t Py_REFCNT(PyObject *o)
@@ -129,6 +129,11 @@ objects.
It is a good idea to use this macro whenever decrementing the reference
count of an object that might be traversed during garbage collection.
+ .. versionchanged:: 3.12
+ The macro argument is now only evaluated once. If the argument has side
+ effects, these are no longer duplicated.
+
+
.. c:function:: void Py_IncRef(PyObject *o)
Increment the reference count for object *o*. A function version of :c:func:`Py_XINCREF`.
@@ -139,3 +144,40 @@ objects.
Decrement the reference count for object *o*. A function version of :c:func:`Py_XDECREF`.
It can be used for runtime dynamic embedding of Python.
+
+
+.. c:macro:: Py_SETREF(dst, src)
+
+ Macro safely decrementing the `dst` reference count and setting `dst` to
+ `src`.
+
+ As in case of :c:func:`Py_CLEAR`, "the obvious" code can be deadly::
+
+ Py_DECREF(dst);
+ dst = src;
+
+ The safe way is::
+
+ Py_SETREF(dst, src);
+
+ That arranges to set `dst` to `src` _before_ decrementing reference count of
+ *dst* old value, so that any code triggered as a side-effect of `dst`
+ getting torn down no longer believes `dst` points to a valid object.
+
+ .. versionadded:: 3.6
+
+ .. versionchanged:: 3.12
+ The macro arguments are now only evaluated once. If an argument has side
+ effects, these are no longer duplicated.
+
+
+.. c:macro:: Py_XSETREF(dst, src)
+
+ Variant of :c:macro:`Py_SETREF` macro that uses :c:func:`Py_XDECREF` instead
+ of :c:func:`Py_DECREF`.
+
+ .. versionadded:: 3.6
+
+ .. versionchanged:: 3.12
+ The macro arguments are now only evaluated once. If an argument has side
+ effects, these are no longer duplicated.
diff --git a/Doc/conf.py b/Doc/conf.py
index 6fad5c668dab31..b3da8fa9ec4497 100644
--- a/Doc/conf.py
+++ b/Doc/conf.py
@@ -13,9 +13,25 @@
# General configuration
# ---------------------
-extensions = ['sphinx.ext.coverage', 'sphinx.ext.doctest',
- 'pyspecific', 'c_annotations', 'escape4chm',
- 'asdl_highlight', 'peg_highlight', 'glossary_search']
+extensions = [
+ 'asdl_highlight',
+ 'c_annotations',
+ 'escape4chm',
+ 'glossary_search',
+ 'peg_highlight',
+ 'pyspecific',
+ 'sphinx.ext.coverage',
+ 'sphinx.ext.doctest',
+]
+
+# Skip if downstream redistributors haven't installed it
+try:
+ import sphinxext.opengraph
+except ImportError:
+ pass
+else:
+ extensions.append('sphinxext.opengraph')
+
doctest_global_setup = '''
try:
@@ -89,6 +105,14 @@
# Short title used e.g. for
HTML tags.
html_short_title = '%s Documentation' % release
+# Deployment preview information, from Netlify
+# (See netlify.toml and https://docs.netlify.com/configure-builds/environment-variables/#git-metadata)
+html_context = {
+ "is_deployment_preview": os.getenv("IS_DEPLOYMENT_PREVIEW"),
+ "repository_url": os.getenv("REPOSITORY_URL"),
+ "pr_id": os.getenv("REVIEW_ID")
+}
+
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
@@ -114,7 +138,7 @@
html_use_opensearch = 'https://docs.python.org/' + version
# Additional static files.
-html_static_path = ['tools/static']
+html_static_path = ['_static', 'tools/static']
# Output file base name for HTML help builder.
htmlhelp_basename = 'python' + release.replace('.', '')
@@ -238,3 +262,13 @@
# Relative filename of the data files
refcount_file = 'data/refcounts.dat'
stable_abi_file = 'data/stable_abi.dat'
+
+# sphinxext-opengraph config
+ogp_site_url = 'https://docs.python.org/3/'
+ogp_site_name = 'Python documentation'
+ogp_image = '_static/og-image.png'
+ogp_custom_meta_tags = [
+ '',
+ '',
+ '',
+]
diff --git a/Doc/howto/enum.rst b/Doc/howto/enum.rst
index 98d9f4febe2dfa..4525acb04503b3 100644
--- a/Doc/howto/enum.rst
+++ b/Doc/howto/enum.rst
@@ -158,6 +158,7 @@ And a function to display the chores for a given day::
... for chore, days in chores.items():
... if day in days:
... print(chore)
+ ...
>>> show_chores(chores_for_ethan, Weekday.SATURDAY)
answer SO questions
@@ -459,6 +460,31 @@ sense to allow sharing some common behavior between a group of enumerations.
(See `OrderedEnum`_ for an example.)
+.. _enum-dataclass-support:
+
+Dataclass support
+-----------------
+
+When inheriting from a :class:`~dataclasses.dataclass`,
+the :meth:`~Enum.__repr__` omits the inherited class' name. For example::
+
+ >>> @dataclass
+ ... class CreatureDataMixin:
+ ... size: str
+ ... legs: int
+ ... tail: bool = field(repr=False, default=True)
+ ...
+ >>> class Creature(CreatureDataMixin, Enum):
+ ... BEETLE = 'small', 6
+ ... DOG = 'medium', 4
+ ...
+ >>> Creature.DOG
+
+
+Use the :func:`!dataclass` argument ``repr=False``
+to use the standard :func:`repr`.
+
+
Pickling
--------
@@ -687,6 +713,7 @@ It is also possible to name the combinations::
... W = 2
... X = 1
... RWX = 7
+ ...
>>> Perm.RWX
>>> ~Perm.RWX
diff --git a/Doc/library/argparse.rst b/Doc/library/argparse.rst
index f8839d0986d047..e6c96486492572 100644
--- a/Doc/library/argparse.rst
+++ b/Doc/library/argparse.rst
@@ -565,6 +565,7 @@ arguments they contain. For example::
>>> with open('args.txt', 'w', encoding=sys.getfilesystemencoding()) as fp:
... fp.write('-f\nbar')
+ ...
>>> parser = argparse.ArgumentParser(fromfile_prefix_chars='@')
>>> parser.add_argument('-f')
>>> parser.parse_args(['-f', 'foo', '@args.txt'])
diff --git a/Doc/library/asyncio-eventloop.rst b/Doc/library/asyncio-eventloop.rst
index 0bcaed5477fa91..fd47b0c24d8a16 100644
--- a/Doc/library/asyncio-eventloop.rst
+++ b/Doc/library/asyncio-eventloop.rst
@@ -43,10 +43,12 @@ an event loop:
Get the current event loop.
- If there is no current event loop set in the current OS thread,
- the OS thread is main, and :func:`set_event_loop` has not yet
- been called, asyncio will create a new event loop and set it as the
- current one.
+ When called from a coroutine or a callback (e.g. scheduled with
+ call_soon or similar API), this function will always return the
+ running event loop.
+
+ If there is no running event loop set, the function will return
+ the result of calling ``get_event_loop_policy().get_event_loop()``.
Because this function has rather complex behavior (especially
when custom event loop policies are in use), using the
@@ -57,11 +59,11 @@ an event loop:
instead of using these lower level functions to manually create and close an
event loop.
- .. deprecated:: 3.10
- Emits a deprecation warning if there is no running event loop.
- In future Python releases, this function may become an alias of
- :func:`get_running_loop` and will accordingly raise a
- :exc:`RuntimeError` if there is no running event loop.
+ .. note::
+ In Python versions 3.10.0--3.10.8 and 3.11.0 this function
+ (and other functions which used it implicitly) emitted a
+ :exc:`DeprecationWarning` if there was no running event loop, even if
+ the current loop was set.
.. function:: set_event_loop(loop)
diff --git a/Doc/library/asyncio-llapi-index.rst b/Doc/library/asyncio-llapi-index.rst
index b7ad888a7b67ab..9ce48a24444e66 100644
--- a/Doc/library/asyncio-llapi-index.rst
+++ b/Doc/library/asyncio-llapi-index.rst
@@ -19,7 +19,7 @@ Obtaining the Event Loop
- The **preferred** function to get the running event loop.
* - :func:`asyncio.get_event_loop`
- - Get an event loop instance (current or via the policy).
+ - Get an event loop instance (running or current via the current policy).
* - :func:`asyncio.set_event_loop`
- Set the event loop as current via the current policy.
diff --git a/Doc/library/asyncio-policy.rst b/Doc/library/asyncio-policy.rst
index 98c85015874689..ccd95244947534 100644
--- a/Doc/library/asyncio-policy.rst
+++ b/Doc/library/asyncio-policy.rst
@@ -116,6 +116,10 @@ asyncio ships with the following built-in policies:
On Windows, :class:`ProactorEventLoop` is now used by default.
+ .. versionchanged:: 3.12
+ :meth:`get_event_loop` now raises a :exc:`RuntimeError` if there is no
+ current event loop set.
+
.. class:: WindowsSelectorEventLoopPolicy
diff --git a/Doc/library/bz2.rst b/Doc/library/bz2.rst
index ae5a1598f84b44..32df99869eb530 100644
--- a/Doc/library/bz2.rst
+++ b/Doc/library/bz2.rst
@@ -320,9 +320,11 @@ Writing and reading a bzip2-compressed file in binary mode:
>>> with bz2.open("myfile.bz2", "wb") as f:
... # Write compressed data to file
... unused = f.write(data)
+ ...
>>> with bz2.open("myfile.bz2", "rb") as f:
... # Decompress data from file
... content = f.read()
+ ...
>>> content == data # Check equality to original object after round-trip
True
diff --git a/Doc/library/collections.rst b/Doc/library/collections.rst
index 53b4b69f84b7bf..2cffc2300a2298 100644
--- a/Doc/library/collections.rst
+++ b/Doc/library/collections.rst
@@ -229,6 +229,7 @@ For example::
>>> cnt = Counter()
>>> for word in ['red', 'blue', 'red', 'green', 'blue', 'blue']:
... cnt[word] += 1
+ ...
>>> cnt
Counter({'blue': 3, 'red': 2, 'green': 1})
@@ -818,6 +819,7 @@ zero):
>>> def constant_factory(value):
... return lambda: value
+ ...
>>> d = defaultdict(constant_factory(''))
>>> d.update(name='John', action='ran')
>>> '%(name)s %(action)s to %(object)s' % d
diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst
index f7e2bb3f3c6de3..8bfed19d3fd2c6 100644
--- a/Doc/library/datetime.rst
+++ b/Doc/library/datetime.rst
@@ -765,6 +765,7 @@ Example of counting days to an event::
>>> my_birthday = date(today.year, 6, 24)
>>> if my_birthday < today:
... my_birthday = my_birthday.replace(year=today.year + 1)
+ ...
>>> my_birthday
datetime.date(2008, 6, 24)
>>> time_to_birthday = abs(my_birthday - today)
@@ -2601,7 +2602,7 @@ Notes:
(9)
When used with the :meth:`strptime` method, the leading zero is optional
- for formats ``%d``, ``%m``, ``%H``, ``%I``, ``%M``, ``%S``, ``%J``, ``%U``,
+ for formats ``%d``, ``%m``, ``%H``, ``%I``, ``%M``, ``%S``, ``%j``, ``%U``,
``%W``, and ``%V``. Format ``%y`` does require a leading zero.
.. rubric:: Footnotes
diff --git a/Doc/library/decimal.rst b/Doc/library/decimal.rst
index 260108136df7f1..fec9b86864c578 100644
--- a/Doc/library/decimal.rst
+++ b/Doc/library/decimal.rst
@@ -2057,6 +2057,7 @@ to handle the :meth:`quantize` step:
>>> def mul(x, y, fp=TWOPLACES):
... return (x * y).quantize(fp)
+ ...
>>> def div(x, y, fp=TWOPLACES):
... return (x / y).quantize(fp)
diff --git a/Doc/library/doctest.rst b/Doc/library/doctest.rst
index c106d5a3383a5e..d6e4dca0860671 100644
--- a/Doc/library/doctest.rst
+++ b/Doc/library/doctest.rst
@@ -351,6 +351,7 @@ The fine print:
>>> def f(x):
... r'''Backslashes in a raw docstring: m\n'''
+ ...
>>> print(f.__doc__)
Backslashes in a raw docstring: m\n
@@ -360,6 +361,7 @@ The fine print:
>>> def f(x):
... '''Backslashes in a raw docstring: m\\n'''
+ ...
>>> print(f.__doc__)
Backslashes in a raw docstring: m\n
@@ -1055,7 +1057,7 @@ from text files and modules with doctests:
from a text file using :func:`DocFileSuite`.
-.. function:: DocTestSuite(module=None, globs=None, extraglobs=None, test_finder=None, setUp=None, tearDown=None, checker=None)
+.. function:: DocTestSuite(module=None, globs=None, extraglobs=None, test_finder=None, setUp=None, tearDown=None, optionflags=0, checker=None)
Convert doctest tests for a module to a :class:`unittest.TestSuite`.
diff --git a/Doc/library/email.policy.rst b/Doc/library/email.policy.rst
index bf53b9520fc723..2439dee676c9b0 100644
--- a/Doc/library/email.policy.rst
+++ b/Doc/library/email.policy.rst
@@ -97,6 +97,7 @@ file on disk and pass it to the system ``sendmail`` program on a Unix system:
>>> from subprocess import Popen, PIPE
>>> with open('mymsg.txt', 'rb') as f:
... msg = message_from_binary_file(f, policy=policy.default)
+ ...
>>> p = Popen(['sendmail', msg['To'].addresses[0]], stdin=PIPE)
>>> g = BytesGenerator(p.stdin, policy=msg.policy.clone(linesep='\r\n'))
>>> g.flatten(msg)
diff --git a/Doc/library/enum.rst b/Doc/library/enum.rst
index 208aecf11c809b..25a6e1f0b61677 100644
--- a/Doc/library/enum.rst
+++ b/Doc/library/enum.rst
@@ -292,6 +292,7 @@ Data Types
... @classmethod
... def today(cls):
... print('today is %s' % cls(date.today().isoweekday()).name)
+ ...
>>> dir(Weekday.SATURDAY)
['__class__', '__doc__', '__eq__', '__hash__', '__module__', 'name', 'today', 'value']
@@ -309,11 +310,12 @@ Data Types
>>> class PowersOfThree(Enum):
... @staticmethod
... def _generate_next_value_(name, start, count, last_values):
- ... return (count + 1) * 3
+ ... return 3 ** (count + 1)
... FIRST = auto()
... SECOND = auto()
+ ...
>>> PowersOfThree.SECOND.value
- 6
+ 9
.. method:: Enum.__init_subclass__(cls, **kwds)
@@ -336,6 +338,7 @@ Data Types
... if member.value == value:
... return member
... return None
+ ...
>>> Build.DEBUG.value
'debug'
>>> Build('deBUG')
@@ -353,6 +356,7 @@ Data Types
... def __repr__(self):
... cls_name = self.__class__.__name__
... return f'{cls_name}.{self.name}'
+ ...
>>> OtherStyle.ALTERNATE, str(OtherStyle.ALTERNATE), f"{OtherStyle.ALTERNATE}"
(OtherStyle.ALTERNATE, 'OtherStyle.ALTERNATE', 'OtherStyle.ALTERNATE')
@@ -367,6 +371,7 @@ Data Types
... SOMETHING_ELSE = auto()
... def __str__(self):
... return f'{self.name}'
+ ...
>>> OtherStyle.ALTERNATE, str(OtherStyle.ALTERNATE), f"{OtherStyle.ALTERNATE}"
(, 'ALTERNATE', 'ALTERNATE')
@@ -381,6 +386,7 @@ Data Types
... SOMETHING_ELSE = auto()
... def __format__(self, spec):
... return f'{self.name}'
+ ...
>>> OtherStyle.ALTERNATE, str(OtherStyle.ALTERNATE), f"{OtherStyle.ALTERNATE}"
(, 'OtherStyle.ALTERNATE', 'ALTERNATE')
@@ -389,6 +395,8 @@ Data Types
Using :class:`auto` with :class:`Enum` results in integers of increasing value,
starting with ``1``.
+ .. versionchanged:: 3.12 Added :ref:`enum-dataclass-support`
+
.. class:: IntEnum
@@ -401,6 +409,7 @@ Data Types
... ONE = 1
... TWO = 2
... THREE = 3
+ ...
>>> Numbers.THREE
>>> Numbers.ONE + Numbers.TWO
@@ -461,6 +470,7 @@ Data Types
... RED = auto()
... GREEN = auto()
... BLUE = auto()
+ ...
>>> purple = Color.RED | Color.BLUE
>>> white = Color.RED | Color.GREEN | Color.BLUE
>>> Color.GREEN in purple
@@ -568,6 +578,7 @@ Data Types
... RED = auto()
... GREEN = auto()
... BLUE = auto()
+ ...
>>> Color.RED & 2
>>> Color.RED | 2
@@ -693,6 +704,7 @@ Data Types
... RED = auto()
... GREEN = auto()
... BLUE = auto()
+ ...
>>> StrictFlag(2**2 + 2**4)
Traceback (most recent call last):
...
@@ -710,6 +722,7 @@ Data Types
... RED = auto()
... GREEN = auto()
... BLUE = auto()
+ ...
>>> ConformFlag(2**2 + 2**4)
@@ -723,6 +736,7 @@ Data Types
... RED = auto()
... GREEN = auto()
... BLUE = auto()
+ ...
>>> EjectFlag(2**2 + 2**4)
20
@@ -736,6 +750,7 @@ Data Types
... RED = auto()
... GREEN = auto()
... BLUE = auto()
+ ...
>>> KeepFlag(2**2 + 2**4)
diff --git a/Doc/library/functions.rst b/Doc/library/functions.rst
index 110e7e5d7fb9a7..2110990d188973 100644
--- a/Doc/library/functions.rst
+++ b/Doc/library/functions.rst
@@ -462,6 +462,7 @@ are always available. They are listed here in alphabetical order.
>>> class Shape:
... def __dir__(self):
... return ['area', 'perimeter', 'location']
+ ...
>>> s = Shape()
>>> dir(s)
['area', 'location', 'perimeter']
diff --git a/Doc/library/hashlib.rst b/Doc/library/hashlib.rst
index 8e47312fe77bf5..f8d10c0c295c7a 100644
--- a/Doc/library/hashlib.rst
+++ b/Doc/library/hashlib.rst
@@ -497,6 +497,7 @@ update the hash:
>>> h = blake2b()
>>> for item in items:
... h.update(item)
+ ...
>>> h.hexdigest()
'6ff843ba685842aa82031d3f53c48b66326df7639a63d128974c5c14f31a0f33343a8c65551134ed1ae0f2b0dd2bb495dc81039e3eeb0aa1bb0388bbeac29183'
diff --git a/Doc/library/http.server.rst b/Doc/library/http.server.rst
index 81b6bf5373b495..3290b9beab3ed9 100644
--- a/Doc/library/http.server.rst
+++ b/Doc/library/http.server.rst
@@ -512,3 +512,12 @@ Security Considerations
:class:`SimpleHTTPRequestHandler` will follow symbolic links when handling
requests, this makes it possible for files outside of the specified directory
to be served.
+
+Earlier versions of Python did not scrub control characters from the
+log messages emitted to stderr from ``python -m http.server`` or the
+default :class:`BaseHTTPRequestHandler` ``.log_message``
+implementation. This could allow remote clients connecting to your
+server to send nefarious control codes to your terminal.
+
+.. versionadded:: 3.12
+ Control characters are scrubbed in stderr logs.
diff --git a/Doc/library/inspect.rst b/Doc/library/inspect.rst
index 9cb7a6f94e49cd..6705577551dcc5 100644
--- a/Doc/library/inspect.rst
+++ b/Doc/library/inspect.rst
@@ -715,6 +715,7 @@ function.
>>> def test(a, b):
... pass
+ ...
>>> sig = signature(test)
>>> new_sig = sig.replace(return_annotation="new return anno")
>>> str(new_sig)
@@ -1054,6 +1055,7 @@ Classes and functions
>>> from inspect import getcallargs
>>> def f(a, b=1, *pos, **named):
... pass
+ ...
>>> getcallargs(f, 1, 2, 3) == {'a': 1, 'named': {}, 'b': 2, 'pos': (3,)}
True
>>> getcallargs(f, a=2, x=4) == {'a': 2, 'named': {'x': 4}, 'b': 1, 'pos': ()}
diff --git a/Doc/library/itertools.rst b/Doc/library/itertools.rst
index 0b5978505a9672..624d2430ac20d7 100644
--- a/Doc/library/itertools.rst
+++ b/Doc/library/itertools.rst
@@ -52,7 +52,7 @@ Iterator Arguments Results
Iterator Arguments Results Example
============================ ============================ ================================================= =============================================================
:func:`accumulate` p [,func] p0, p0+p1, p0+p1+p2, ... ``accumulate([1,2,3,4,5]) --> 1 3 6 10 15``
-:func:`batched` p, n [p0, p1, ..., p_n-1], ... ``batched('ABCDEFG', n=3) --> ABC DEF G``
+:func:`batched` p, n (p0, p1, ..., p_n-1), ... ``batched('ABCDEFG', n=3) --> ABC DEF G``
:func:`chain` p, q, ... p0, p1, ... plast, q0, q1, ... ``chain('ABC', 'DEF') --> A B C D E F``
:func:`chain.from_iterable` iterable p0, p1, ... plast, q0, q1, ... ``chain.from_iterable(['ABC', 'DEF']) --> A B C D E F``
:func:`compress` data, selectors (d[0] if s[0]), (d[1] if s[1]), ... ``compress('ABCDEF', [1,0,1,0,1,1]) --> A C E F``
@@ -166,11 +166,11 @@ loops that truncate the stream.
.. function:: batched(iterable, n)
- Batch data from the *iterable* into lists of length *n*. The last
+ Batch data from the *iterable* into tuples of length *n*. The last
batch may be shorter than *n*.
- Loops over the input iterable and accumulates data into lists up to
- size *n*. The input is consumed lazily, just enough to fill a list.
+ Loops over the input iterable and accumulates data into tuples up to
+ size *n*. The input is consumed lazily, just enough to fill a batch.
The result is yielded as soon as the batch is full or when the input
iterable is exhausted:
@@ -179,14 +179,14 @@ loops that truncate the stream.
>>> flattened_data = ['roses', 'red', 'violets', 'blue', 'sugar', 'sweet']
>>> unflattened = list(batched(flattened_data, 2))
>>> unflattened
- [['roses', 'red'], ['violets', 'blue'], ['sugar', 'sweet']]
+ [('roses', 'red'), ('violets', 'blue'), ('sugar', 'sweet')]
>>> for batch in batched('ABCDEFG', 3):
... print(batch)
...
- ['A', 'B', 'C']
- ['D', 'E', 'F']
- ['G']
+ ('A', 'B', 'C')
+ ('D', 'E', 'F')
+ ('G',)
Roughly equivalent to::
@@ -195,7 +195,7 @@ loops that truncate the stream.
if n < 1:
raise ValueError('n must be at least one')
it = iter(iterable)
- while (batch := list(islice(it, n))):
+ while (batch := tuple(islice(it, n))):
yield batch
.. versionadded:: 3.12
diff --git a/Doc/library/profile.rst b/Doc/library/profile.rst
index 2d95096f4cb83a..c2189e02656c7a 100644
--- a/Doc/library/profile.rst
+++ b/Doc/library/profile.rst
@@ -274,7 +274,7 @@ functions:
with cProfile.Profile() as pr:
# ... do something ...
- pr.print_stats()
+ pr.print_stats()
.. versionchanged:: 3.8
Added context manager support.
diff --git a/Doc/library/re.rst b/Doc/library/re.rst
index e6e242320fd878..f7d46586cf7570 100644
--- a/Doc/library/re.rst
+++ b/Doc/library/re.rst
@@ -973,6 +973,7 @@ Functions
>>> def dashrepl(matchobj):
... if matchobj.group(0) == '-': return ' '
... else: return '-'
+ ...
>>> re.sub('-{1,2}', dashrepl, 'pro----gram-files')
'pro--gram files'
>>> re.sub(r'\sAND\s', ' & ', 'Baked Beans And Spam', flags=re.IGNORECASE)
@@ -1672,6 +1673,7 @@ in each word of a sentence except for the first and last characters::
... inner_word = list(m.group(2))
... random.shuffle(inner_word)
... return m.group(1) + "".join(inner_word) + m.group(3)
+ ...
>>> text = "Professor Abdolmalek, please report your absences promptly."
>>> re.sub(r"(\w)(\w+)(\w)", repl, text)
'Poefsrosr Aealmlobdk, pslaee reorpt your abnseces plmrptoy.'
diff --git a/Doc/library/socket.rst b/Doc/library/socket.rst
index c946407ea1d83f..de2e1aa3868bb3 100644
--- a/Doc/library/socket.rst
+++ b/Doc/library/socket.rst
@@ -428,7 +428,14 @@ Constants
.. versionchanged:: 3.12
Added ``SO_RTABLE`` and ``SO_USER_COOKIE``. On OpenBSD
and FreeBSD respectively those constants can be used in the same way that
- ``SO_MARK`` is used on Linux.
+ ``SO_MARK`` is used on Linux. Also added missing TCP socket options from
+ Linux: ``TCP_MD5SIG``, ``TCP_THIN_LINEAR_TIMEOUTS``, ``TCP_THIN_DUPACK``,
+ ``TCP_REPAIR``, ``TCP_REPAIR_QUEUE``, ``TCP_QUEUE_SEQ``,
+ ``TCP_REPAIR_OPTIONS``, ``TCP_TIMESTAMP``, ``TCP_CC_INFO``,
+ ``TCP_SAVE_SYN``, ``TCP_SAVED_SYN``, ``TCP_REPAIR_WINDOW``,
+ ``TCP_FASTOPEN_CONNECT``, ``TCP_ULP``, ``TCP_MD5SIG_EXT``,
+ ``TCP_FASTOPEN_KEY``, ``TCP_FASTOPEN_NO_COOKIE``,
+ ``TCP_ZEROCOPY_RECEIVE``, ``TCP_INQ``, ``TCP_TX_DELAY``.
.. data:: AF_CAN
PF_CAN
diff --git a/Doc/library/sqlite3.rst b/Doc/library/sqlite3.rst
index 960f2966afe1f2..3622864a4b06f9 100644
--- a/Doc/library/sqlite3.rst
+++ b/Doc/library/sqlite3.rst
@@ -397,6 +397,7 @@ Module functions
>>> con = sqlite3.connect(":memory:")
>>> def evil_trace(stmt):
... 5/0
+ ...
>>> con.set_trace_callback(evil_trace)
>>> def debug(unraisable):
... print(f"{unraisable.exc_value!r} in callback {unraisable.object.__name__}")
@@ -1929,12 +1930,16 @@ How to use placeholders to bind values in SQL queries
SQL operations usually need to use values from Python variables. However,
beware of using Python's string operations to assemble queries, as they
-are vulnerable to `SQL injection attacks`_ (see the `xkcd webcomic
-`_ for a humorous example of what can go wrong)::
-
- # Never do this -- insecure!
- symbol = 'RHAT'
- cur.execute("SELECT * FROM stocks WHERE symbol = '%s'" % symbol)
+are vulnerable to `SQL injection attacks`_. For example, an attacker can simply
+close the single quote and inject ``OR TRUE`` to select all rows::
+
+ >>> # Never do this -- insecure!
+ >>> symbol = input()
+ ' OR TRUE; --
+ >>> sql = "SELECT * FROM stocks WHERE symbol = '%s'" % symbol
+ >>> print(sql)
+ SELECT * FROM stocks WHERE symbol = '' OR TRUE; --'
+ >>> cur.execute(sql)
Instead, use the DB-API's parameter substitution. To insert a variable into a
query string, use a placeholder in the string, and substitute the actual values
diff --git a/Doc/library/statistics.rst b/Doc/library/statistics.rst
index 88a887960edb58..f934b0e0319dca 100644
--- a/Doc/library/statistics.rst
+++ b/Doc/library/statistics.rst
@@ -996,6 +996,7 @@ probability that the Python room will stay within its capacity limits?
>>> seed(8675309)
>>> def trial():
... return choices(('Python', 'Ruby'), (p, q), k=n).count('Python')
+ ...
>>> mean(trial() <= k for i in range(10_000))
0.8398
diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst
index 785b76a11f2f38..c785336944f50a 100644
--- a/Doc/library/stdtypes.rst
+++ b/Doc/library/stdtypes.rst
@@ -4459,6 +4459,7 @@ can be used interchangeably to index the same dictionary entry.
>>> class Counter(dict):
... def __missing__(self, key):
... return 0
+ ...
>>> c = Counter()
>>> c['red']
0
@@ -4716,6 +4717,7 @@ An example of dictionary view usage::
>>> n = 0
>>> for val in values:
... n += val
+ ...
>>> print(n)
504
@@ -4741,7 +4743,7 @@ An example of dictionary view usage::
>>> # get back a read-only proxy for the original dictionary
>>> values.mapping
- mappingproxy({'eggs': 2, 'sausage': 1, 'bacon': 1, 'spam': 500})
+ mappingproxy({'bacon': 1, 'spam': 500})
>>> values.mapping['spam']
500
@@ -5501,7 +5503,7 @@ When an operation would exceed the limit, a :exc:`ValueError` is raised:
>>> _ = int('2' * 5432)
Traceback (most recent call last):
...
- ValueError: Exceeds the limit (4300) for integer string conversion: value has 5432 digits; use sys.set_int_max_str_digits() to increase the limit.
+ ValueError: Exceeds the limit (4300 digits) for integer string conversion: value has 5432 digits; use sys.set_int_max_str_digits() to increase the limit.
>>> i = int('2' * 4300)
>>> len(str(i))
4300
@@ -5509,7 +5511,7 @@ When an operation would exceed the limit, a :exc:`ValueError` is raised:
>>> len(str(i_squared))
Traceback (most recent call last):
...
- ValueError: Exceeds the limit (4300) for integer string conversion: value has 8599 digits; use sys.set_int_max_str_digits() to increase the limit.
+ ValueError: Exceeds the limit (4300 digits) for integer string conversion: value has 8599 digits; use sys.set_int_max_str_digits() to increase the limit.
>>> len(hex(i_squared))
7144
>>> assert int(hex(i_squared), base=16) == i*i # Hexadecimal is unlimited.
diff --git a/Doc/library/sysconfig.rst b/Doc/library/sysconfig.rst
index 024988777030f8..839c2c015b49ae 100644
--- a/Doc/library/sysconfig.rst
+++ b/Doc/library/sysconfig.rst
@@ -121,7 +121,7 @@ identifier. Python currently uses eight paths:
Return the default scheme name for the current platform.
- .. versionchanged:: 3.10
+ .. versionadded:: 3.10
This function was previously named ``_get_default_scheme()`` and
considered an implementation detail.
diff --git a/Doc/library/typing.rst b/Doc/library/typing.rst
index 94c9cb11f02d6d..356f919a1897b2 100644
--- a/Doc/library/typing.rst
+++ b/Doc/library/typing.rst
@@ -2575,6 +2575,10 @@ Functions and decorators
assumed to be True or False if it is omitted by the caller.
* ``kw_only_default`` indicates whether the ``kw_only`` parameter is
assumed to be True or False if it is omitted by the caller.
+ * ``frozen_default`` indicates whether the ``frozen`` parameter is
+ assumed to be True or False if it is omitted by the caller.
+
+ .. versionadded:: 3.12
* ``field_specifiers`` specifies a static list of supported classes
or functions that describe fields, similar to ``dataclasses.field()``.
* Arbitrary other keyword arguments are accepted in order to allow for
diff --git a/Doc/library/unittest.mock.rst b/Doc/library/unittest.mock.rst
index b768557e6075f6..e009f303fef317 100644
--- a/Doc/library/unittest.mock.rst
+++ b/Doc/library/unittest.mock.rst
@@ -1604,6 +1604,7 @@ decorator:
>>> @patch.dict(foo, {'newkey': 'newvalue'})
... def test():
... assert foo == {'newkey': 'newvalue'}
+ ...
>>> test()
>>> assert foo == {}
diff --git a/Doc/library/xml.etree.elementtree.rst b/Doc/library/xml.etree.elementtree.rst
index 2fe0d2e082fb3a..876de29b17ca3c 100644
--- a/Doc/library/xml.etree.elementtree.rst
+++ b/Doc/library/xml.etree.elementtree.rst
@@ -1212,6 +1212,7 @@ Example of changing the attribute "target" of every link in first paragraph::
[, ]
>>> for i in links: # Iterates through all found links
... i.attrib["target"] = "blank"
+ ...
>>> tree.write("output.xhtml")
.. _elementtree-qname-objects:
diff --git a/Doc/library/zipfile.rst b/Doc/library/zipfile.rst
index 4dd9fa961a8d98..82709dbc92496d 100644
--- a/Doc/library/zipfile.rst
+++ b/Doc/library/zipfile.rst
@@ -672,6 +672,7 @@ The :class:`PyZipFile` constructor takes the same parameters as the
>>> def notests(s):
... fn = os.path.basename(s)
... return (not (fn == 'test' or fn.startswith('test_')))
+ ...
>>> zf.writepy('myprog', filterfunc=notests)
The :meth:`writepy` method makes archives with file names like
diff --git a/Doc/requirements.txt b/Doc/requirements.txt
index 958665db69e227..134f39d6d7b3d4 100644
--- a/Doc/requirements.txt
+++ b/Doc/requirements.txt
@@ -8,6 +8,7 @@ sphinx==4.5.0
blurb
sphinx-lint==0.6.7
+sphinxext-opengraph>=0.7.1
# The theme used by the documentation is stored separately, so we need
# to install that as well.
diff --git a/Doc/tools/templates/layout.html b/Doc/tools/templates/layout.html
index 98ccf4224804b2..460161cd320223 100644
--- a/Doc/tools/templates/layout.html
+++ b/Doc/tools/templates/layout.html
@@ -8,6 +8,19 @@
{% trans %} Python documentation for the current stable release{% endtrans %}.
{%- endif %}
+
+{%- if is_deployment_preview %}
+
+{%- endif %}
{% endblock %}
{% block rootrellink %}
diff --git a/Doc/tutorial/classes.rst b/Doc/tutorial/classes.rst
index 9ecbf8b87efbf1..0e5a9402bc50e3 100644
--- a/Doc/tutorial/classes.rst
+++ b/Doc/tutorial/classes.rst
@@ -119,12 +119,12 @@ directly accessible:
* the innermost scope, which is searched first, contains the local names
* the scopes of any enclosing functions, which are searched starting with the
- nearest enclosing scope, contains non-local, but also non-global names
+ nearest enclosing scope, contain non-local, but also non-global names
* the next-to-last scope contains the current module's global names
* the outermost scope (searched last) is the namespace containing built-in names
If a name is declared global, then all references and assignments go directly to
-the middle scope containing the module's global names. To rebind variables
+the next-to-last scope containing the module's global names. To rebind variables
found outside of the innermost scope, the :keyword:`nonlocal` statement can be
used; if not declared nonlocal, those variables are read-only (an attempt to
write to such a variable will simply create a *new* local variable in the
diff --git a/Doc/using/configure.rst b/Doc/using/configure.rst
index 3df6ff4873b72e..db4bf7412292bc 100644
--- a/Doc/using/configure.rst
+++ b/Doc/using/configure.rst
@@ -24,6 +24,7 @@ Features required to build CPython:
.. versionchanged:: 3.11
C11 compiler, IEEE 754 and NaN support are now required.
+ On Windows, Visual Studio 2017 or later is required.
.. versionchanged:: 3.10
OpenSSL 1.1.1 is now required.
diff --git a/Doc/whatsnew/2.7.rst b/Doc/whatsnew/2.7.rst
index 276ab63b97f8a9..810a2cd2537c34 100644
--- a/Doc/whatsnew/2.7.rst
+++ b/Doc/whatsnew/2.7.rst
@@ -1331,6 +1331,7 @@ changes, or look through the Subversion logs for all the details.
>>> from inspect import getcallargs
>>> def f(a, b=1, *pos, **named):
... pass
+ ...
>>> getcallargs(f, 1, 2, 3)
{'a': 1, 'b': 2, 'pos': (3,), 'named': {}}
>>> getcallargs(f, a=2, x=4)
diff --git a/Doc/whatsnew/3.12.rst b/Doc/whatsnew/3.12.rst
index 6f5ce818d961a5..73dc462f0b3303 100644
--- a/Doc/whatsnew/3.12.rst
+++ b/Doc/whatsnew/3.12.rst
@@ -686,6 +686,18 @@ Changes in the Python API
around process-global resources, which are best managed from the main interpreter.
(Contributed by Dong-hee Na in :gh:`99127`.)
+* :func:`asyncio.get_event_loop` and many other :mod:`asyncio` functions like
+ :func:`~asyncio.ensure_future`, :func:`~asyncio.shield` or
+ :func:`~asyncio.gather`, and also the
+ :meth:`~asyncio.BaseDefaultEventLoopPolicy.get_event_loop` method of
+ :class:`~asyncio.BaseDefaultEventLoopPolicy` now raise a :exc:`RuntimeError`
+ if called when there is no running event loop and the current event loop was
+ not set.
+ Previously they implicitly created and set a new current event loop.
+ :exc:`DeprecationWarning` is no longer emitted if there is no running
+ event loop but the current event loop is set in the policy.
+ (Contributed by Serhiy Storchaka in :gh:`93453`.)
+
Build Changes
=============
@@ -839,6 +851,11 @@ Porting to Python 3.12
:class:`bytes` type is accepted for bytes strings.
(Contributed by Victor Stinner in :gh:`98393`.)
+* The :c:macro:`Py_CLEAR`, :c:macro:`Py_SETREF` and :c:macro:`Py_XSETREF`
+ macros now only evaluate their arguments once. If an argument has side
+ effects, these side effects are no longer duplicated.
+ (Contributed by Victor Stinner in :gh:`98724`.)
+
Deprecated
----------
diff --git a/Doc/whatsnew/3.2.rst b/Doc/whatsnew/3.2.rst
index 6037db9f954d26..1b1455b72b9291 100644
--- a/Doc/whatsnew/3.2.rst
+++ b/Doc/whatsnew/3.2.rst
@@ -468,6 +468,7 @@ Some smaller changes made to the core Python language are:
>>> class LowerCasedDict(dict):
... def __getitem__(self, key):
... return dict.__getitem__(self, key.lower())
+ ...
>>> lcd = LowerCasedDict(part='widgets', quantity=10)
>>> 'There are {QUANTITY} {Part} in stock'.format_map(lcd)
'There are 10 widgets in stock'
@@ -475,6 +476,7 @@ Some smaller changes made to the core Python language are:
>>> class PlaceholderDict(dict):
... def __missing__(self, key):
... return '<{}>'.format(key)
+ ...
>>> 'Hello {name}, welcome to {location}'.format_map(PlaceholderDict())
'Hello , welcome to '
@@ -1886,6 +1888,7 @@ inspect
>>> from inspect import getgeneratorstate
>>> def gen():
... yield 'demo'
+ ...
>>> g = gen()
>>> getgeneratorstate(g)
'GEN_CREATED'
diff --git a/Doc/whatsnew/3.3.rst b/Doc/whatsnew/3.3.rst
index 96a632577b2c56..9e8d42469b019c 100644
--- a/Doc/whatsnew/3.3.rst
+++ b/Doc/whatsnew/3.3.rst
@@ -560,6 +560,7 @@ Example with (non-bound) methods::
>>> class C:
... def meth(self):
... pass
+ ...
>>> C.meth.__name__
'meth'
>>> C.meth.__qualname__
diff --git a/Include/cpython/code.h b/Include/cpython/code.h
index f11d099e0379ef..fc7c5ed702439f 100644
--- a/Include/cpython/code.h
+++ b/Include/cpython/code.h
@@ -87,6 +87,7 @@ typedef struct {
int co_nplaincellvars; /* number of non-arg cell variables */ \
int co_ncellvars; /* total number of cell variables */ \
int co_nfreevars; /* number of free variables */ \
+ uint32_t co_version; /* version number */ \
\
PyObject *co_localsplusnames; /* tuple mapping offsets to names */ \
PyObject *co_localspluskinds; /* Bytes mapping to local kinds (one byte \
diff --git a/Include/cpython/object.h b/Include/cpython/object.h
index 3abfcb7d44f0fb..4263370861302b 100644
--- a/Include/cpython/object.h
+++ b/Include/cpython/object.h
@@ -305,38 +305,69 @@ _PyObject_GenericSetAttrWithDict(PyObject *, PyObject *,
PyAPI_FUNC(PyObject *) _PyObject_FunctionStr(PyObject *);
-/* Safely decref `op` and set `op` to `op2`.
+/* Safely decref `dst` and set `dst` to `src`.
*
* As in case of Py_CLEAR "the obvious" code can be deadly:
*
- * Py_DECREF(op);
- * op = op2;
+ * Py_DECREF(dst);
+ * dst = src;
*
* The safe way is:
*
- * Py_SETREF(op, op2);
+ * Py_SETREF(dst, src);
*
- * That arranges to set `op` to `op2` _before_ decref'ing, so that any code
- * triggered as a side-effect of `op` getting torn down no longer believes
- * `op` points to a valid object.
+ * That arranges to set `dst` to `src` _before_ decref'ing, so that any code
+ * triggered as a side-effect of `dst` getting torn down no longer believes
+ * `dst` points to a valid object.
*
- * Py_XSETREF is a variant of Py_SETREF that uses Py_XDECREF instead of
- * Py_DECREF.
+ * Temporary variables are used to only evalutate macro arguments once and so
+ * avoid the duplication of side effects. _Py_TYPEOF() or memcpy() is used to
+ * avoid a miscompilation caused by type punning. See Py_CLEAR() comment for
+ * implementation details about type punning.
+ *
+ * The memcpy() implementation does not emit a compiler warning if 'src' has
+ * not the same type than 'src': any pointer type is accepted for 'src'.
*/
-
-#define Py_SETREF(op, op2) \
- do { \
- PyObject *_py_tmp = _PyObject_CAST(op); \
- (op) = (op2); \
- Py_DECREF(_py_tmp); \
+#ifdef _Py_TYPEOF
+#define Py_SETREF(dst, src) \
+ do { \
+ _Py_TYPEOF(dst)* _tmp_dst_ptr = &(dst); \
+ _Py_TYPEOF(dst) _tmp_old_dst = (*_tmp_dst_ptr); \
+ *_tmp_dst_ptr = (src); \
+ Py_DECREF(_tmp_old_dst); \
} while (0)
+#else
+#define Py_SETREF(dst, src) \
+ do { \
+ PyObject **_tmp_dst_ptr = _Py_CAST(PyObject**, &(dst)); \
+ PyObject *_tmp_old_dst = (*_tmp_dst_ptr); \
+ PyObject *_tmp_src = _PyObject_CAST(src); \
+ memcpy(_tmp_dst_ptr, &_tmp_src, sizeof(PyObject*)); \
+ Py_DECREF(_tmp_old_dst); \
+ } while (0)
+#endif
-#define Py_XSETREF(op, op2) \
- do { \
- PyObject *_py_tmp = _PyObject_CAST(op); \
- (op) = (op2); \
- Py_XDECREF(_py_tmp); \
+/* Py_XSETREF() is a variant of Py_SETREF() that uses Py_XDECREF() instead of
+ * Py_DECREF().
+ */
+#ifdef _Py_TYPEOF
+#define Py_XSETREF(dst, src) \
+ do { \
+ _Py_TYPEOF(dst)* _tmp_dst_ptr = &(dst); \
+ _Py_TYPEOF(dst) _tmp_old_dst = (*_tmp_dst_ptr); \
+ *_tmp_dst_ptr = (src); \
+ Py_XDECREF(_tmp_old_dst); \
+ } while (0)
+#else
+#define Py_XSETREF(dst, src) \
+ do { \
+ PyObject **_tmp_dst_ptr = _Py_CAST(PyObject**, &(dst)); \
+ PyObject *_tmp_old_dst = (*_tmp_dst_ptr); \
+ PyObject *_tmp_src = _PyObject_CAST(src); \
+ memcpy(_tmp_dst_ptr, &_tmp_src, sizeof(PyObject*)); \
+ Py_XDECREF(_tmp_old_dst); \
} while (0)
+#endif
PyAPI_DATA(PyTypeObject) _PyNone_Type;
diff --git a/Include/cpython/pystate.h b/Include/cpython/pystate.h
index 0f56b1f2190564..0117c23f518cdb 100644
--- a/Include/cpython/pystate.h
+++ b/Include/cpython/pystate.h
@@ -353,6 +353,9 @@ PyAPI_FUNC(const PyConfig*) _Py_GetConfig(void);
// is necessary to pass safely between interpreters in the same process.
typedef struct _xid _PyCrossInterpreterData;
+typedef PyObject *(*xid_newobjectfunc)(_PyCrossInterpreterData *);
+typedef void (*xid_freefunc)(void *);
+
struct _xid {
// data is the cross-interpreter-safe derivation of a Python object
// (see _PyObject_GetCrossInterpreterData). It will be NULL if the
@@ -379,7 +382,7 @@ struct _xid {
// interpreter given the data. The resulting object (a new
// reference) will be equivalent to the original object. This field
// is required.
- PyObject *(*new_object)(_PyCrossInterpreterData *);
+ xid_newobjectfunc new_object;
// free is called when the data is released. If it is NULL then
// nothing will be done to free the data. For some types this is
// okay (e.g. bytes) and for those types this field should be set
@@ -389,9 +392,20 @@ struct _xid {
// leak. In that case, at the very least this field should be set
// to PyMem_RawFree (the default if not explicitly set to NULL).
// The call will happen with the original interpreter activated.
- void (*free)(void *);
+ xid_freefunc free;
};
+PyAPI_FUNC(void) _PyCrossInterpreterData_Init(
+ _PyCrossInterpreterData *data,
+ PyInterpreterState *interp, void *shared, PyObject *obj,
+ xid_newobjectfunc new_object);
+PyAPI_FUNC(int) _PyCrossInterpreterData_InitWithSize(
+ _PyCrossInterpreterData *,
+ PyInterpreterState *interp, const size_t, PyObject *,
+ xid_newobjectfunc);
+PyAPI_FUNC(void) _PyCrossInterpreterData_Clear(
+ PyInterpreterState *, _PyCrossInterpreterData *);
+
PyAPI_FUNC(int) _PyObject_GetCrossInterpreterData(PyObject *, _PyCrossInterpreterData *);
PyAPI_FUNC(PyObject *) _PyCrossInterpreterData_NewObject(_PyCrossInterpreterData *);
PyAPI_FUNC(int) _PyCrossInterpreterData_Release(_PyCrossInterpreterData *);
@@ -400,7 +414,8 @@ PyAPI_FUNC(int) _PyObject_CheckCrossInterpreterData(PyObject *);
/* cross-interpreter data registry */
-typedef int (*crossinterpdatafunc)(PyObject *, _PyCrossInterpreterData *);
+typedef int (*crossinterpdatafunc)(PyThreadState *tstate, PyObject *,
+ _PyCrossInterpreterData *);
PyAPI_FUNC(int) _PyCrossInterpreterData_RegisterClass(PyTypeObject *, crossinterpdatafunc);
PyAPI_FUNC(int) _PyCrossInterpreterData_UnregisterClass(PyTypeObject *);
diff --git a/Include/cpython/unicodeobject.h b/Include/cpython/unicodeobject.h
index a75336f590e81b..75a74ffa2f9dff 100644
--- a/Include/cpython/unicodeobject.h
+++ b/Include/cpython/unicodeobject.h
@@ -231,7 +231,7 @@ enum PyUnicode_Kind {
// new compiler warnings on "kind < PyUnicode_KIND(str)" (compare signed and
// unsigned numbers) where kind type is an int or on
// "unsigned int kind = PyUnicode_KIND(str)" (cast signed to unsigned).
-#define PyUnicode_KIND(op) (_PyASCIIObject_CAST(op)->state.kind)
+#define PyUnicode_KIND(op) _Py_RVALUE(_PyASCIIObject_CAST(op)->state.kind)
/* Return a void pointer to the raw unicode buffer. */
static inline void* _PyUnicode_COMPACT_DATA(PyObject *op) {
diff --git a/Include/exports.h b/Include/exports.h
index fc1a5c5ead6276..59373c39ff757c 100644
--- a/Include/exports.h
+++ b/Include/exports.h
@@ -2,9 +2,15 @@
#define Py_EXPORTS_H
#if defined(_WIN32) || defined(__CYGWIN__)
- #define Py_IMPORTED_SYMBOL __declspec(dllimport)
- #define Py_EXPORTED_SYMBOL __declspec(dllexport)
- #define Py_LOCAL_SYMBOL
+ #if defined(Py_ENABLE_SHARED)
+ #define Py_IMPORTED_SYMBOL __declspec(dllimport)
+ #define Py_EXPORTED_SYMBOL __declspec(dllexport)
+ #define Py_LOCAL_SYMBOL
+ #else
+ #define Py_IMPORTED_SYMBOL
+ #define Py_EXPORTED_SYMBOL
+ #define Py_LOCAL_SYMBOL
+ #endif
#else
/*
* If we only ever used gcc >= 5, we could use __has_attribute(visibility)
diff --git a/Include/internal/pycore_ceval_state.h b/Include/internal/pycore_ceval_state.h
new file mode 100644
index 00000000000000..9ba42eb03b2676
--- /dev/null
+++ b/Include/internal/pycore_ceval_state.h
@@ -0,0 +1,100 @@
+#ifndef Py_INTERNAL_CEVAL_STATE_H
+#define Py_INTERNAL_CEVAL_STATE_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
+#endif
+
+
+#include "pycore_atomic.h" /* _Py_atomic_address */
+#include "pycore_gil.h" // struct _gil_runtime_state
+
+
+typedef enum {
+ PERF_STATUS_FAILED = -1, // Perf trampoline is in an invalid state
+ PERF_STATUS_NO_INIT = 0, // Perf trampoline is not initialized
+ PERF_STATUS_OK = 1, // Perf trampoline is ready to be executed
+} perf_status_t;
+
+
+#ifdef PY_HAVE_PERF_TRAMPOLINE
+struct code_arena_st;
+
+struct trampoline_api_st {
+ void* (*init_state)(void);
+ void (*write_state)(void* state, const void *code_addr,
+ unsigned int code_size, PyCodeObject* code);
+ int (*free_state)(void* state);
+ void *state;
+};
+#endif
+
+struct _ceval_runtime_state {
+ struct {
+#ifdef PY_HAVE_PERF_TRAMPOLINE
+ perf_status_t status;
+ Py_ssize_t extra_code_index;
+ struct code_arena_st *code_arena;
+ struct trampoline_api_st trampoline_api;
+ FILE *map_file;
+#else
+ int _not_used;
+#endif
+ } perf;
+ /* Request for checking signals. It is shared by all interpreters (see
+ bpo-40513). Any thread of any interpreter can receive a signal, but only
+ the main thread of the main interpreter can handle signals: see
+ _Py_ThreadCanHandleSignals(). */
+ _Py_atomic_int signals_pending;
+ struct _gil_runtime_state gil;
+};
+
+#ifdef PY_HAVE_PERF_TRAMPOLINE
+# define _PyEval_RUNTIME_PERF_INIT \
+ { \
+ .status = PERF_STATUS_NO_INIT, \
+ .extra_code_index = -1, \
+ }
+#else
+# define _PyEval_RUNTIME_PERF_INIT {0}
+#endif
+
+
+struct _pending_calls {
+ int busy;
+ PyThread_type_lock lock;
+ /* Request for running pending calls. */
+ _Py_atomic_int calls_to_do;
+ /* Request for looking at the `async_exc` field of the current
+ thread state.
+ Guarded by the GIL. */
+ int async_exc;
+#define NPENDINGCALLS 32
+ struct {
+ int (*func)(void *);
+ void *arg;
+ } calls[NPENDINGCALLS];
+ int first;
+ int last;
+};
+
+struct _ceval_state {
+ int recursion_limit;
+ /* This single variable consolidates all requests to break out of
+ the fast path in the eval loop. */
+ _Py_atomic_int eval_breaker;
+ /* Request for dropping the GIL */
+ _Py_atomic_int gil_drop_request;
+ /* The GC is ready to be executed */
+ _Py_atomic_int gc_scheduled;
+ struct _pending_calls pending;
+};
+
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_INTERNAL_CEVAL_STATE_H */
diff --git a/Include/internal/pycore_code.h b/Include/internal/pycore_code.h
index 357fc85a95cf15..f22fd45f8319d5 100644
--- a/Include/internal/pycore_code.h
+++ b/Include/internal/pycore_code.h
@@ -474,6 +474,8 @@ typedef struct _PyShimCodeDef {
extern PyCodeObject *
_Py_MakeShimCode(const _PyShimCodeDef *code);
+extern uint32_t _Py_next_func_version;
+
#ifdef __cplusplus
}
diff --git a/Include/internal/pycore_dtoa.h b/Include/internal/pycore_dtoa.h
index fdc6e74ecd25e3..67189cf0ade665 100644
--- a/Include/internal/pycore_dtoa.h
+++ b/Include/internal/pycore_dtoa.h
@@ -25,7 +25,7 @@ Bigint {
#ifdef Py_USING_MEMORY_DEBUGGER
struct _dtoa_runtime_state {
- int _not_used;
+ int _not_used;
};
#define _dtoa_runtime_state_INIT {0}
@@ -41,9 +41,12 @@ struct _dtoa_runtime_state {
((PRIVATE_MEM+sizeof(double)-1)/sizeof(double))
struct _dtoa_runtime_state {
- struct Bigint *freelist[Bigint_Kmax+1];
- double preallocated[Bigint_PREALLOC_SIZE];
- double *preallocated_next;
+ /* p5s is a linked list of powers of 5 of the form 5**(2**i), i >= 2 */
+ // XXX This should be freed during runtime fini.
+ struct Bigint *p5s;
+ struct Bigint *freelist[Bigint_Kmax+1];
+ double preallocated[Bigint_PREALLOC_SIZE];
+ double *preallocated_next;
};
#define _dtoa_runtime_state_INIT(runtime) \
{ \
diff --git a/Include/internal/pycore_faulthandler.h b/Include/internal/pycore_faulthandler.h
new file mode 100644
index 00000000000000..e6aec7745a6479
--- /dev/null
+++ b/Include/internal/pycore_faulthandler.h
@@ -0,0 +1,99 @@
+#ifndef Py_INTERNAL_FAULTHANDLER_H
+#define Py_INTERNAL_FAULTHANDLER_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
+#endif
+
+#ifdef HAVE_SIGACTION
+# include
+#endif
+
+
+#ifndef MS_WINDOWS
+ /* register() is useless on Windows, because only SIGSEGV, SIGABRT and
+ SIGILL can be handled by the process, and these signals can only be used
+ with enable(), not using register() */
+# define FAULTHANDLER_USER
+#endif
+
+
+#ifdef HAVE_SIGACTION
+/* Using an alternative stack requires sigaltstack()
+ and sigaction() SA_ONSTACK */
+# ifdef HAVE_SIGALTSTACK
+# define FAULTHANDLER_USE_ALT_STACK
+# endif
+typedef struct sigaction _Py_sighandler_t;
+#else
+typedef PyOS_sighandler_t _Py_sighandler_t;
+#endif // HAVE_SIGACTION
+
+
+#ifdef FAULTHANDLER_USER
+struct faulthandler_user_signal {
+ int enabled;
+ PyObject *file;
+ int fd;
+ int all_threads;
+ int chain;
+ _Py_sighandler_t previous;
+ PyInterpreterState *interp;
+};
+#endif /* FAULTHANDLER_USER */
+
+
+struct _faulthandler_runtime_state {
+ struct {
+ int enabled;
+ PyObject *file;
+ int fd;
+ int all_threads;
+ PyInterpreterState *interp;
+#ifdef MS_WINDOWS
+ void *exc_handler;
+#endif
+ } fatal_error;
+
+ struct {
+ PyObject *file;
+ int fd;
+ PY_TIMEOUT_T timeout_us; /* timeout in microseconds */
+ int repeat;
+ PyInterpreterState *interp;
+ int exit;
+ char *header;
+ size_t header_len;
+ /* The main thread always holds this lock. It is only released when
+ faulthandler_thread() is interrupted before this thread exits, or at
+ Python exit. */
+ PyThread_type_lock cancel_event;
+ /* released by child thread when joined */
+ PyThread_type_lock running;
+ } thread;
+
+#ifdef FAULTHANDLER_USER
+ struct faulthandler_user_signal *user_signals;
+#endif
+
+#ifdef FAULTHANDLER_USE_ALT_STACK
+ stack_t stack;
+ stack_t old_stack;
+#endif
+};
+
+#define _faulthandler_runtime_state_INIT \
+ { \
+ .fatal_error = { \
+ .fd = -1, \
+ }, \
+ }
+
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_INTERNAL_FAULTHANDLER_H */
diff --git a/Include/internal/pycore_global_objects_fini_generated.h b/Include/internal/pycore_global_objects_fini_generated.h
index 9951fa9951e67a..6aba2f19ebde4a 100644
--- a/Include/internal/pycore_global_objects_fini_generated.h
+++ b/Include/internal/pycore_global_objects_fini_generated.h
@@ -1051,6 +1051,7 @@ _PyStaticObjects_CheckRefcnt(PyInterpreterState *interp) {
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(node_offset));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(ns));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(nstype));
+ _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(nt));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(null));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(number));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(obj));
@@ -1089,6 +1090,7 @@ _PyStaticObjects_CheckRefcnt(PyInterpreterState *interp) {
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(pos));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(pos1));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(pos2));
+ _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(posix));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(print_file_and_line));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(priority));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(progress));
diff --git a/Include/internal/pycore_global_strings.h b/Include/internal/pycore_global_strings.h
index 12144b02f45574..acb9a4fbb92dce 100644
--- a/Include/internal/pycore_global_strings.h
+++ b/Include/internal/pycore_global_strings.h
@@ -537,6 +537,7 @@ struct _Py_global_strings {
STRUCT_FOR_ID(node_offset)
STRUCT_FOR_ID(ns)
STRUCT_FOR_ID(nstype)
+ STRUCT_FOR_ID(nt)
STRUCT_FOR_ID(null)
STRUCT_FOR_ID(number)
STRUCT_FOR_ID(obj)
@@ -575,6 +576,7 @@ struct _Py_global_strings {
STRUCT_FOR_ID(pos)
STRUCT_FOR_ID(pos1)
STRUCT_FOR_ID(pos2)
+ STRUCT_FOR_ID(posix)
STRUCT_FOR_ID(print_file_and_line)
STRUCT_FOR_ID(priority)
STRUCT_FOR_ID(progress)
diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h
index ed90f6392019fa..0e3d46852f2e6d 100644
--- a/Include/internal/pycore_interp.h
+++ b/Include/internal/pycore_interp.h
@@ -12,6 +12,7 @@ extern "C" {
#include "pycore_atomic.h" // _Py_atomic_address
#include "pycore_ast_state.h" // struct ast_state
+#include "pycore_ceval_state.h" // struct _ceval_state
#include "pycore_code.h" // struct callable_cache
#include "pycore_context.h" // struct _Py_context_state
#include "pycore_dict_state.h" // struct _Py_dict_state
@@ -28,37 +29,6 @@ extern "C" {
#include "pycore_warnings.h" // struct _warnings_runtime_state
-struct _pending_calls {
- int busy;
- PyThread_type_lock lock;
- /* Request for running pending calls. */
- _Py_atomic_int calls_to_do;
- /* Request for looking at the `async_exc` field of the current
- thread state.
- Guarded by the GIL. */
- int async_exc;
-#define NPENDINGCALLS 32
- struct {
- int (*func)(void *);
- void *arg;
- } calls[NPENDINGCALLS];
- int first;
- int last;
-};
-
-struct _ceval_state {
- int recursion_limit;
- /* This single variable consolidates all requests to break out of
- the fast path in the eval loop. */
- _Py_atomic_int eval_breaker;
- /* Request for dropping the GIL */
- _Py_atomic_int gil_drop_request;
- /* The GC is ready to be executed */
- _Py_atomic_int gc_scheduled;
- struct _pending_calls pending;
-};
-
-
// atexit state
typedef struct {
PyObject *func;
diff --git a/Include/internal/pycore_obmalloc.h b/Include/internal/pycore_obmalloc.h
index 93349d89c6ab52..a5c7f4528f9126 100644
--- a/Include/internal/pycore_obmalloc.h
+++ b/Include/internal/pycore_obmalloc.h
@@ -658,6 +658,7 @@ struct _obmalloc_usage {
struct _obmalloc_state {
+ int dump_debug_stats;
struct _obmalloc_pools pools;
struct _obmalloc_mgmt mgmt;
struct _obmalloc_usage usage;
diff --git a/Include/internal/pycore_obmalloc_init.h b/Include/internal/pycore_obmalloc_init.h
index c0fb057d06652b..c9f197e72de9f5 100644
--- a/Include/internal/pycore_obmalloc_init.h
+++ b/Include/internal/pycore_obmalloc_init.h
@@ -56,6 +56,7 @@ extern "C" {
#define _obmalloc_state_INIT(obmalloc) \
{ \
+ .dump_debug_stats = -1, \
.pools = { \
.used = _obmalloc_pools_INIT(obmalloc.pools), \
}, \
diff --git a/Include/internal/pycore_opcode.h b/Include/internal/pycore_opcode.h
index 0d31ca166a7d2e..da8a272f2fa2d0 100644
--- a/Include/internal/pycore_opcode.h
+++ b/Include/internal/pycore_opcode.h
@@ -125,6 +125,7 @@ const uint8_t _PyOpcode_Deopt[256] = {
[FOR_ITER_GEN] = FOR_ITER,
[FOR_ITER_LIST] = FOR_ITER,
[FOR_ITER_RANGE] = FOR_ITER,
+ [FOR_ITER_TUPLE] = FOR_ITER,
[GET_AITER] = GET_AITER,
[GET_ANEXT] = GET_ANEXT,
[GET_AWAITABLE] = GET_AWAITABLE,
@@ -293,31 +294,31 @@ static const char *const _PyOpcode_OpName[263] = {
[FOR_ITER_LIST] = "FOR_ITER_LIST",
[STORE_SUBSCR] = "STORE_SUBSCR",
[DELETE_SUBSCR] = "DELETE_SUBSCR",
- [FOR_ITER_RANGE] = "FOR_ITER_RANGE",
+ [FOR_ITER_TUPLE] = "FOR_ITER_TUPLE",
[STOPITERATION_ERROR] = "STOPITERATION_ERROR",
+ [FOR_ITER_RANGE] = "FOR_ITER_RANGE",
[FOR_ITER_GEN] = "FOR_ITER_GEN",
[LOAD_ATTR_CLASS] = "LOAD_ATTR_CLASS",
[LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN] = "LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN",
- [LOAD_ATTR_INSTANCE_VALUE] = "LOAD_ATTR_INSTANCE_VALUE",
[GET_ITER] = "GET_ITER",
[GET_YIELD_FROM_ITER] = "GET_YIELD_FROM_ITER",
[PRINT_EXPR] = "PRINT_EXPR",
[LOAD_BUILD_CLASS] = "LOAD_BUILD_CLASS",
+ [LOAD_ATTR_INSTANCE_VALUE] = "LOAD_ATTR_INSTANCE_VALUE",
[LOAD_ATTR_MODULE] = "LOAD_ATTR_MODULE",
- [LOAD_ATTR_PROPERTY] = "LOAD_ATTR_PROPERTY",
[LOAD_ASSERTION_ERROR] = "LOAD_ASSERTION_ERROR",
[RETURN_GENERATOR] = "RETURN_GENERATOR",
+ [LOAD_ATTR_PROPERTY] = "LOAD_ATTR_PROPERTY",
[LOAD_ATTR_SLOT] = "LOAD_ATTR_SLOT",
[LOAD_ATTR_WITH_HINT] = "LOAD_ATTR_WITH_HINT",
[LOAD_ATTR_METHOD_LAZY_DICT] = "LOAD_ATTR_METHOD_LAZY_DICT",
[LOAD_ATTR_METHOD_NO_DICT] = "LOAD_ATTR_METHOD_NO_DICT",
[LOAD_ATTR_METHOD_WITH_DICT] = "LOAD_ATTR_METHOD_WITH_DICT",
- [LOAD_ATTR_METHOD_WITH_VALUES] = "LOAD_ATTR_METHOD_WITH_VALUES",
[LIST_TO_TUPLE] = "LIST_TO_TUPLE",
[RETURN_VALUE] = "RETURN_VALUE",
[IMPORT_STAR] = "IMPORT_STAR",
[SETUP_ANNOTATIONS] = "SETUP_ANNOTATIONS",
- [LOAD_CONST__LOAD_FAST] = "LOAD_CONST__LOAD_FAST",
+ [LOAD_ATTR_METHOD_WITH_VALUES] = "LOAD_ATTR_METHOD_WITH_VALUES",
[ASYNC_GEN_WRAP] = "ASYNC_GEN_WRAP",
[PREP_RERAISE_STAR] = "PREP_RERAISE_STAR",
[POP_EXCEPT] = "POP_EXCEPT",
@@ -344,7 +345,7 @@ static const char *const _PyOpcode_OpName[263] = {
[JUMP_FORWARD] = "JUMP_FORWARD",
[JUMP_IF_FALSE_OR_POP] = "JUMP_IF_FALSE_OR_POP",
[JUMP_IF_TRUE_OR_POP] = "JUMP_IF_TRUE_OR_POP",
- [LOAD_FAST__LOAD_CONST] = "LOAD_FAST__LOAD_CONST",
+ [LOAD_CONST__LOAD_FAST] = "LOAD_CONST__LOAD_FAST",
[POP_JUMP_IF_FALSE] = "POP_JUMP_IF_FALSE",
[POP_JUMP_IF_TRUE] = "POP_JUMP_IF_TRUE",
[LOAD_GLOBAL] = "LOAD_GLOBAL",
@@ -352,7 +353,7 @@ static const char *const _PyOpcode_OpName[263] = {
[CONTAINS_OP] = "CONTAINS_OP",
[RERAISE] = "RERAISE",
[COPY] = "COPY",
- [LOAD_FAST__LOAD_FAST] = "LOAD_FAST__LOAD_FAST",
+ [LOAD_FAST__LOAD_CONST] = "LOAD_FAST__LOAD_CONST",
[BINARY_OP] = "BINARY_OP",
[SEND] = "SEND",
[LOAD_FAST] = "LOAD_FAST",
@@ -372,9 +373,9 @@ static const char *const _PyOpcode_OpName[263] = {
[STORE_DEREF] = "STORE_DEREF",
[DELETE_DEREF] = "DELETE_DEREF",
[JUMP_BACKWARD] = "JUMP_BACKWARD",
- [LOAD_GLOBAL_BUILTIN] = "LOAD_GLOBAL_BUILTIN",
+ [LOAD_FAST__LOAD_FAST] = "LOAD_FAST__LOAD_FAST",
[CALL_FUNCTION_EX] = "CALL_FUNCTION_EX",
- [LOAD_GLOBAL_MODULE] = "LOAD_GLOBAL_MODULE",
+ [LOAD_GLOBAL_BUILTIN] = "LOAD_GLOBAL_BUILTIN",
[EXTENDED_ARG] = "EXTENDED_ARG",
[LIST_APPEND] = "LIST_APPEND",
[SET_ADD] = "SET_ADD",
@@ -384,24 +385,24 @@ static const char *const _PyOpcode_OpName[263] = {
[YIELD_VALUE] = "YIELD_VALUE",
[RESUME] = "RESUME",
[MATCH_CLASS] = "MATCH_CLASS",
+ [LOAD_GLOBAL_MODULE] = "LOAD_GLOBAL_MODULE",
[STORE_ATTR_INSTANCE_VALUE] = "STORE_ATTR_INSTANCE_VALUE",
- [STORE_ATTR_SLOT] = "STORE_ATTR_SLOT",
[FORMAT_VALUE] = "FORMAT_VALUE",
[BUILD_CONST_KEY_MAP] = "BUILD_CONST_KEY_MAP",
[BUILD_STRING] = "BUILD_STRING",
+ [STORE_ATTR_SLOT] = "STORE_ATTR_SLOT",
[STORE_ATTR_WITH_HINT] = "STORE_ATTR_WITH_HINT",
[STORE_FAST__LOAD_FAST] = "STORE_FAST__LOAD_FAST",
[STORE_FAST__STORE_FAST] = "STORE_FAST__STORE_FAST",
- [STORE_SUBSCR_DICT] = "STORE_SUBSCR_DICT",
[LIST_EXTEND] = "LIST_EXTEND",
[SET_UPDATE] = "SET_UPDATE",
[DICT_MERGE] = "DICT_MERGE",
[DICT_UPDATE] = "DICT_UPDATE",
+ [STORE_SUBSCR_DICT] = "STORE_SUBSCR_DICT",
[STORE_SUBSCR_LIST_INT] = "STORE_SUBSCR_LIST_INT",
[UNPACK_SEQUENCE_LIST] = "UNPACK_SEQUENCE_LIST",
[UNPACK_SEQUENCE_TUPLE] = "UNPACK_SEQUENCE_TUPLE",
[UNPACK_SEQUENCE_TWO_TUPLE] = "UNPACK_SEQUENCE_TWO_TUPLE",
- [170] = "<170>",
[CALL] = "CALL",
[KW_NAMES] = "KW_NAMES",
[173] = "<173>",
@@ -498,7 +499,6 @@ static const char *const _PyOpcode_OpName[263] = {
#endif
#define EXTRA_CASES \
- case 170: \
case 173: \
case 174: \
case 175: \
diff --git a/Include/internal/pycore_parser.h b/Include/internal/pycore_parser.h
index e2de24e2ca9734..2d2b56bd824cb4 100644
--- a/Include/internal/pycore_parser.h
+++ b/Include/internal/pycore_parser.h
@@ -8,12 +8,31 @@ extern "C" {
# error "this header requires Py_BUILD_CORE define"
#endif
+
+#include "pycore_pyarena.h" // PyArena
+
+
+#ifdef Py_DEBUG
+#define _PYPEGEN_NSTATISTICS 2000
+#endif
+
+struct _parser_runtime_state {
+#ifdef Py_DEBUG
+ long memo_statistics[_PYPEGEN_NSTATISTICS];
+#else
+ int _not_used;
+#endif
+};
+
+
+
extern struct _mod* _PyParser_ASTFromString(
const char *str,
PyObject* filename,
int mode,
PyCompilerFlags *flags,
PyArena *arena);
+
extern struct _mod* _PyParser_ASTFromFile(
FILE *fp,
PyObject *filename_ob,
@@ -25,6 +44,7 @@ extern struct _mod* _PyParser_ASTFromFile(
int *errcode,
PyArena *arena);
+
#ifdef __cplusplus
}
#endif
diff --git a/Include/internal/pycore_pylifecycle.h b/Include/internal/pycore_pylifecycle.h
index 4c0ffa7a9b1ab7..370e4cbd59f976 100644
--- a/Include/internal/pycore_pylifecycle.h
+++ b/Include/internal/pycore_pylifecycle.h
@@ -44,6 +44,7 @@ extern void _PySys_Fini(PyInterpreterState *interp);
extern int _PyBuiltins_AddExceptions(PyObject * bltinmod);
extern PyStatus _Py_HashRandomization_Init(const PyConfig *);
+extern PyStatus _PyTime_Init(void);
extern PyStatus _PyImportZip_Init(PyThreadState *tstate);
extern PyStatus _PyGC_Init(PyInterpreterState *interp);
extern PyStatus _PyAtExit_Init(PyInterpreterState *interp);
diff --git a/Include/internal/pycore_pymem.h b/Include/internal/pycore_pymem.h
index 5749af7465f6f0..4cc953d8d779c9 100644
--- a/Include/internal/pycore_pymem.h
+++ b/Include/internal/pycore_pymem.h
@@ -90,28 +90,6 @@ PyAPI_FUNC(int) _PyMem_GetAllocatorName(
PYMEM_ALLOCATOR_NOT_SET does nothing. */
PyAPI_FUNC(int) _PyMem_SetupAllocators(PyMemAllocatorName allocator);
-struct _PyTraceMalloc_Config {
- /* Module initialized?
- Variable protected by the GIL */
- enum {
- TRACEMALLOC_NOT_INITIALIZED,
- TRACEMALLOC_INITIALIZED,
- TRACEMALLOC_FINALIZED
- } initialized;
-
- /* Is tracemalloc tracing memory allocations?
- Variable protected by the GIL */
- int tracing;
-
- /* limit of the number of frames in a traceback, 1 by default.
- Variable protected by the GIL. */
- int max_nframe;
-};
-
-#define _PyTraceMalloc_Config_INIT \
- {.initialized = TRACEMALLOC_NOT_INITIALIZED, \
- .tracing = 0, \
- .max_nframe = 1}
#ifdef __cplusplus
}
diff --git a/Include/internal/pycore_pythread.h b/Include/internal/pycore_pythread.h
new file mode 100644
index 00000000000000..f53921494c158f
--- /dev/null
+++ b/Include/internal/pycore_pythread.h
@@ -0,0 +1,81 @@
+#ifndef Py_INTERNAL_PYTHREAD_H
+#define Py_INTERNAL_PYTHREAD_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
+#endif
+
+
+#ifndef _POSIX_THREADS
+/* This means pthreads are not implemented in libc headers, hence the macro
+ not present in unistd.h. But they still can be implemented as an external
+ library (e.g. gnu pth in pthread emulation) */
+# ifdef HAVE_PTHREAD_H
+# include /* _POSIX_THREADS */
+# endif
+# ifndef _POSIX_THREADS
+/* Check if we're running on HP-UX and _SC_THREADS is defined. If so, then
+ enough of the Posix threads package is implemented to support python
+ threads.
+
+ This is valid for HP-UX 11.23 running on an ia64 system. If needed, add
+ a check of __ia64 to verify that we're running on an ia64 system instead
+ of a pa-risc system.
+*/
+# ifdef __hpux
+# ifdef _SC_THREADS
+# define _POSIX_THREADS
+# endif
+# endif
+# endif /* _POSIX_THREADS */
+#endif /* _POSIX_THREADS */
+
+#if defined(_POSIX_THREADS) || defined(HAVE_PTHREAD_STUBS)
+# define _USE_PTHREADS
+#endif
+
+#if defined(_USE_PTHREADS) && defined(HAVE_PTHREAD_CONDATTR_SETCLOCK) && defined(HAVE_CLOCK_GETTIME) && defined(CLOCK_MONOTONIC)
+// monotonic is supported statically. It doesn't mean it works on runtime.
+# define CONDATTR_MONOTONIC
+#endif
+
+
+#if defined(HAVE_PTHREAD_STUBS)
+// pthread_key
+struct py_stub_tls_entry {
+ bool in_use;
+ void *value;
+};
+#endif
+
+struct _pythread_runtime_state {
+ int initialized;
+
+#ifdef _USE_PTHREADS
+ // This matches when thread_pthread.h is used.
+ struct {
+ /* NULL when pthread_condattr_setclock(CLOCK_MONOTONIC) is not supported. */
+ pthread_condattr_t *ptr;
+# ifdef CONDATTR_MONOTONIC
+ /* The value to which condattr_monotonic is set. */
+ pthread_condattr_t val;
+# endif
+ } _condattr_monotonic;
+
+#endif // USE_PTHREADS
+
+#if defined(HAVE_PTHREAD_STUBS)
+ struct {
+ struct py_stub_tls_entry tls_entries[PTHREAD_KEYS_MAX];
+ } stubs;
+#endif
+};
+
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_INTERNAL_PYTHREAD_H */
diff --git a/Include/internal/pycore_runtime.h b/Include/internal/pycore_runtime.h
index c1829cb1bdadeb..92ed45956c99b3 100644
--- a/Include/internal/pycore_runtime.h
+++ b/Include/internal/pycore_runtime.h
@@ -9,17 +9,23 @@ extern "C" {
#endif
#include "pycore_atomic.h" /* _Py_atomic_address */
+#include "pycore_ceval_state.h" // struct _ceval_runtime_state
#include "pycore_dict_state.h" // struct _Py_dict_runtime_state
#include "pycore_dtoa.h" // struct _dtoa_runtime_state
#include "pycore_floatobject.h" // struct _Py_float_runtime_state
+#include "pycore_faulthandler.h" // struct _faulthandler_runtime_state
#include "pycore_function.h" // struct _func_runtime_state
-#include "pycore_gil.h" // struct _gil_runtime_state
#include "pycore_global_objects.h" // struct _Py_global_objects
#include "pycore_import.h" // struct _import_runtime_state
#include "pycore_interp.h" // PyInterpreterState
+#include "pycore_parser.h" // struct _parser_runtime_state
#include "pycore_pymem.h" // struct _pymem_allocators
#include "pycore_pyhash.h" // struct pyhash_runtime_state
+#include "pycore_pythread.h" // struct _pythread_runtime_state
#include "pycore_obmalloc.h" // struct obmalloc_state
+#include "pycore_signal.h" // struct _signals_runtime_state
+#include "pycore_time.h" // struct _time_runtime_state
+#include "pycore_tracemalloc.h" // struct _tracemalloc_runtime_state
#include "pycore_unicodeobject.h" // struct _Py_unicode_runtime_ids
struct _getargs_runtime_state {
@@ -29,15 +35,6 @@ struct _getargs_runtime_state {
/* ceval state */
-struct _ceval_runtime_state {
- /* Request for checking signals. It is shared by all interpreters (see
- bpo-40513). Any thread of any interpreter can receive a signal, but only
- the main thread of the main interpreter can handle signals: see
- _Py_ThreadCanHandleSignals(). */
- _Py_atomic_int signals_pending;
- struct _gil_runtime_state gil;
-};
-
/* GIL state */
struct _gilstate_runtime_state {
@@ -97,11 +94,9 @@ typedef struct pyruntimestate {
struct _pymem_allocators allocators;
struct _obmalloc_state obmalloc;
struct pyhash_runtime_state pyhash_state;
- struct {
- /* True if the main interpreter thread exited due to an unhandled
- * KeyboardInterrupt exception, suggesting the user pressed ^C. */
- int unhandled_keyboard_interrupt;
- } signals;
+ struct _time_runtime_state time;
+ struct _pythread_runtime_state threads;
+ struct _signals_runtime_state signals;
struct pyinterpreters {
PyThread_type_lock mutex;
@@ -129,6 +124,10 @@ typedef struct pyruntimestate {
unsigned long main_thread;
+ PyWideStringList orig_argv;
+
+ struct _parser_runtime_state parser;
+
#define NEXITFUNCS 32
void (*exitfuncs[NEXITFUNCS])(void);
int nexitfuncs;
@@ -137,11 +136,10 @@ typedef struct pyruntimestate {
struct _ceval_runtime_state ceval;
struct _gilstate_runtime_state gilstate;
struct _getargs_runtime_state getargs;
- struct {
- struct _PyTraceMalloc_Config config;
- } tracemalloc;
struct _dtoa_runtime_state dtoa;
struct _fileutils_state fileutils;
+ struct _faulthandler_runtime_state faulthandler;
+ struct _tracemalloc_runtime_state tracemalloc;
PyPreConfig preconfig;
diff --git a/Include/internal/pycore_runtime_init.h b/Include/internal/pycore_runtime_init.h
index ab53876e355fd8..1431096e2d24ba 100644
--- a/Include/internal/pycore_runtime_init.h
+++ b/Include/internal/pycore_runtime_init.h
@@ -26,6 +26,7 @@ extern "C" {
}, \
.obmalloc = _obmalloc_state_INIT(runtime.obmalloc), \
.pyhash_state = pyhash_state_INIT, \
+ .signals = _signals_RUNTIME_INIT, \
.interpreters = { \
/* This prevents interpreters from getting created \
until _PyInterpreterState_Enable() is called. */ \
@@ -41,19 +42,21 @@ extern "C" {
.header = 1, \
}, \
}, \
+ .ceval = { \
+ .perf = _PyEval_RUNTIME_PERF_INIT, \
+ }, \
.gilstate = { \
.check_enabled = 1, \
/* A TSS key must be initialized with Py_tss_NEEDS_INIT \
in accordance with the specification. */ \
.autoTSSkey = Py_tss_NEEDS_INIT, \
}, \
- .tracemalloc = { \
- .config = _PyTraceMalloc_Config_INIT, \
- }, \
.dtoa = _dtoa_runtime_state_INIT(runtime), \
.fileutils = { \
.force_ascii = -1, \
}, \
+ .faulthandler = _faulthandler_runtime_state_INIT, \
+ .tracemalloc = _tracemalloc_runtime_state_INIT, \
.float_state = { \
.float_format = _py_float_format_unknown, \
.double_format = _py_float_format_unknown, \
diff --git a/Include/internal/pycore_runtime_init_generated.h b/Include/internal/pycore_runtime_init_generated.h
index 87b0f2ed8dfa8c..6d1b8702c77698 100644
--- a/Include/internal/pycore_runtime_init_generated.h
+++ b/Include/internal/pycore_runtime_init_generated.h
@@ -1043,6 +1043,7 @@ extern "C" {
INIT_ID(node_offset), \
INIT_ID(ns), \
INIT_ID(nstype), \
+ INIT_ID(nt), \
INIT_ID(null), \
INIT_ID(number), \
INIT_ID(obj), \
@@ -1081,6 +1082,7 @@ extern "C" {
INIT_ID(pos), \
INIT_ID(pos1), \
INIT_ID(pos2), \
+ INIT_ID(posix), \
INIT_ID(print_file_and_line), \
INIT_ID(priority), \
INIT_ID(progress), \
diff --git a/Include/internal/pycore_signal.h b/Include/internal/pycore_signal.h
index b921dd170e9f6f..ca3f69d09fc0c1 100644
--- a/Include/internal/pycore_signal.h
+++ b/Include/internal/pycore_signal.h
@@ -10,8 +10,11 @@ extern "C" {
# error "this header requires Py_BUILD_CORE define"
#endif
+#include "pycore_atomic.h" // _Py_atomic_address
+
#include // NSIG
+
#ifdef _SIG_MAXSIG
// gh-91145: On FreeBSD, defines NSIG as 32: it doesn't include
// realtime signals: [SIGRTMIN,SIGRTMAX]. Use _SIG_MAXSIG instead. For
@@ -29,6 +32,66 @@ extern "C" {
# define Py_NSIG 64 // Use a reasonable default value
#endif
+#define INVALID_FD (-1)
+
+struct _signals_runtime_state {
+ volatile struct {
+ _Py_atomic_int tripped;
+ /* func is atomic to ensure that PyErr_SetInterrupt is async-signal-safe
+ * (even though it would probably be otherwise, anyway).
+ */
+ _Py_atomic_address func;
+ } handlers[Py_NSIG];
+
+ volatile struct {
+#ifdef MS_WINDOWS
+ /* This would be "SOCKET fd" if were always included.
+ It isn't so we must cast to SOCKET where appropriate. */
+ volatile int fd;
+#elif defined(__VXWORKS__)
+ int fd;
+#else
+ sig_atomic_t fd;
+#endif
+
+ int warn_on_full_buffer;
+#ifdef MS_WINDOWS
+ int use_send;
+#endif
+ } wakeup;
+
+ /* Speed up sigcheck() when none tripped */
+ _Py_atomic_int is_tripped;
+
+ /* These objects necessarily belong to the main interpreter. */
+ PyObject *default_handler;
+ PyObject *ignore_handler;
+
+#ifdef MS_WINDOWS
+ /* This would be "HANDLE sigint_event" if were always included.
+ It isn't so we must cast to HANDLE everywhere "sigint_event" is used. */
+ void *sigint_event;
+#endif
+
+ /* True if the main interpreter thread exited due to an unhandled
+ * KeyboardInterrupt exception, suggesting the user pressed ^C. */
+ int unhandled_keyboard_interrupt;
+};
+
+#ifdef MS_WINDOWS
+# define _signals_WAKEUP_INIT \
+ {.fd = INVALID_FD, .warn_on_full_buffer = 1, .use_send = 0}
+#else
+# define _signals_WAKEUP_INIT \
+ {.fd = INVALID_FD, .warn_on_full_buffer = 1}
+#endif
+
+#define _signals_RUNTIME_INIT \
+ { \
+ .wakeup = _signals_WAKEUP_INIT, \
+ }
+
+
#ifdef __cplusplus
}
#endif
diff --git a/Include/internal/pycore_time.h b/Include/internal/pycore_time.h
new file mode 100644
index 00000000000000..949170c4493799
--- /dev/null
+++ b/Include/internal/pycore_time.h
@@ -0,0 +1,25 @@
+#ifndef Py_INTERNAL_TIME_H
+#define Py_INTERNAL_TIME_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
+#endif
+
+
+struct _time_runtime_state {
+#ifdef HAVE_TIMES
+ int ticks_per_second_initialized;
+ long ticks_per_second;
+#else
+ int _not_used;
+#endif
+};
+
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_INTERNAL_TIME_H */
diff --git a/Include/internal/pycore_tracemalloc.h b/Include/internal/pycore_tracemalloc.h
new file mode 100644
index 00000000000000..08d7d1096c78ce
--- /dev/null
+++ b/Include/internal/pycore_tracemalloc.h
@@ -0,0 +1,121 @@
+#ifndef Py_INTERNAL_TRACEMALLOC_H
+#define Py_INTERNAL_TRACEMALLOC_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
+#endif
+
+#include "pycore_hashtable.h" // _Py_hashtable_t
+
+
+/* Trace memory blocks allocated by PyMem_RawMalloc() */
+#define TRACE_RAW_MALLOC
+
+
+struct _PyTraceMalloc_Config {
+ /* Module initialized?
+ Variable protected by the GIL */
+ enum {
+ TRACEMALLOC_NOT_INITIALIZED,
+ TRACEMALLOC_INITIALIZED,
+ TRACEMALLOC_FINALIZED
+ } initialized;
+
+ /* Is tracemalloc tracing memory allocations?
+ Variable protected by the GIL */
+ int tracing;
+
+ /* limit of the number of frames in a traceback, 1 by default.
+ Variable protected by the GIL. */
+ int max_nframe;
+};
+
+
+/* Pack the frame_t structure to reduce the memory footprint on 64-bit
+ architectures: 12 bytes instead of 16. */
+struct
+#ifdef __GNUC__
+__attribute__((packed))
+#elif defined(_MSC_VER)
+#pragma pack(push, 4)
+#endif
+tracemalloc_frame {
+ /* filename cannot be NULL: "" is used if the Python frame
+ filename is NULL */
+ PyObject *filename;
+ unsigned int lineno;
+};
+#ifdef _MSC_VER
+#pragma pack(pop)
+#endif
+
+struct tracemalloc_traceback {
+ Py_uhash_t hash;
+ /* Number of frames stored */
+ uint16_t nframe;
+ /* Total number of frames the traceback had */
+ uint16_t total_nframe;
+ struct tracemalloc_frame frames[1];
+};
+
+
+struct _tracemalloc_runtime_state {
+ struct _PyTraceMalloc_Config config;
+
+ /* Protected by the GIL */
+ struct {
+ PyMemAllocatorEx mem;
+ PyMemAllocatorEx raw;
+ PyMemAllocatorEx obj;
+ } allocators;
+
+#if defined(TRACE_RAW_MALLOC)
+ PyThread_type_lock tables_lock;
+#endif
+ /* Size in bytes of currently traced memory.
+ Protected by TABLES_LOCK(). */
+ size_t traced_memory;
+ /* Peak size in bytes of traced memory.
+ Protected by TABLES_LOCK(). */
+ size_t peak_traced_memory;
+ /* Hash table used as a set to intern filenames:
+ PyObject* => PyObject*.
+ Protected by the GIL */
+ _Py_hashtable_t *filenames;
+ /* Buffer to store a new traceback in traceback_new().
+ Protected by the GIL. */
+ struct tracemalloc_traceback *traceback;
+ /* Hash table used as a set to intern tracebacks:
+ traceback_t* => traceback_t*
+ Protected by the GIL */
+ _Py_hashtable_t *tracebacks;
+ /* pointer (void*) => trace (trace_t*).
+ Protected by TABLES_LOCK(). */
+ _Py_hashtable_t *traces;
+ /* domain (unsigned int) => traces (_Py_hashtable_t).
+ Protected by TABLES_LOCK(). */
+ _Py_hashtable_t *domains;
+
+ struct tracemalloc_traceback empty_traceback;
+
+ Py_tss_t reentrant_key;
+};
+
+#define _tracemalloc_runtime_state_INIT \
+ { \
+ .config = { \
+ .initialized = TRACEMALLOC_NOT_INITIALIZED, \
+ .tracing = 0, \
+ .max_nframe = 1, \
+ }, \
+ .reentrant_key = Py_tss_NEEDS_INIT, \
+ }
+
+
+#ifdef __cplusplus
+}
+#endif
+#endif // !Py_INTERNAL_TRACEMALLOC_H
diff --git a/Include/internal/pycore_tuple.h b/Include/internal/pycore_tuple.h
index 504c36338d9e96..edc70843b57531 100644
--- a/Include/internal/pycore_tuple.h
+++ b/Include/internal/pycore_tuple.h
@@ -67,6 +67,13 @@ struct _Py_tuple_state {
extern PyObject *_PyTuple_FromArray(PyObject *const *, Py_ssize_t);
extern PyObject *_PyTuple_FromArraySteal(PyObject *const *, Py_ssize_t);
+
+typedef struct {
+ PyObject_HEAD
+ Py_ssize_t it_index;
+ PyTupleObject *it_seq; /* Set to NULL when iterator is exhausted */
+} _PyTupleIterObject;
+
#ifdef __cplusplus
}
#endif
diff --git a/Include/internal/pycore_unicodeobject.h b/Include/internal/pycore_unicodeobject.h
index b315ca1ae5b64b..19faceebf1d8ee 100644
--- a/Include/internal/pycore_unicodeobject.h
+++ b/Include/internal/pycore_unicodeobject.h
@@ -9,6 +9,7 @@ extern "C" {
#endif
#include "pycore_fileutils.h" // _Py_error_handler
+#include "pycore_ucnhash.h" // _PyUnicode_Name_CAPI
void _PyUnicode_ExactDealloc(PyObject *op);
@@ -52,6 +53,8 @@ struct _Py_unicode_ids {
struct _Py_unicode_state {
struct _Py_unicode_fs_codec fs_codec;
+ _PyUnicode_Name_CAPI *ucnhash_capi;
+
// Unicode identifiers (_Py_Identifier): see _PyUnicode_FromId()
struct _Py_unicode_ids ids;
};
diff --git a/Include/internal/pycore_unicodeobject_generated.h b/Include/internal/pycore_unicodeobject_generated.h
index 80be342b5b3b44..7f407c0141b8a5 100644
--- a/Include/internal/pycore_unicodeobject_generated.h
+++ b/Include/internal/pycore_unicodeobject_generated.h
@@ -980,6 +980,8 @@ _PyUnicode_InitStaticStrings(void) {
PyUnicode_InternInPlace(&string);
string = &_Py_ID(nstype);
PyUnicode_InternInPlace(&string);
+ string = &_Py_ID(nt);
+ PyUnicode_InternInPlace(&string);
string = &_Py_ID(null);
PyUnicode_InternInPlace(&string);
string = &_Py_ID(number);
@@ -1056,6 +1058,8 @@ _PyUnicode_InitStaticStrings(void) {
PyUnicode_InternInPlace(&string);
string = &_Py_ID(pos2);
PyUnicode_InternInPlace(&string);
+ string = &_Py_ID(posix);
+ PyUnicode_InternInPlace(&string);
string = &_Py_ID(print_file_and_line);
PyUnicode_InternInPlace(&string);
string = &_Py_ID(priority);
diff --git a/Include/object.h b/Include/object.h
index 75624fe8c77a51..3774f126730005 100644
--- a/Include/object.h
+++ b/Include/object.h
@@ -598,15 +598,44 @@ static inline void Py_DECREF(PyObject *op)
* one of those can't cause problems -- but in part that relies on that
* Python integers aren't currently weakly referencable. Best practice is
* to use Py_CLEAR() even if you can't think of a reason for why you need to.
+ *
+ * gh-98724: Use a temporary variable to only evaluate the macro argument once,
+ * to avoid the duplication of side effects if the argument has side effects.
+ *
+ * gh-99701: If the PyObject* type is used with casting arguments to PyObject*,
+ * the code can be miscompiled with strict aliasing because of type punning.
+ * With strict aliasing, a compiler considers that two pointers of different
+ * types cannot read or write the same memory which enables optimization
+ * opportunities.
+ *
+ * If available, use _Py_TYPEOF() to use the 'op' type for temporary variables,
+ * and so avoid type punning. Otherwise, use memcpy() which causes type erasure
+ * and so prevents the compiler to reuse an old cached 'op' value after
+ * Py_CLEAR().
*/
-#define Py_CLEAR(op) \
- do { \
- PyObject *_py_tmp = _PyObject_CAST(op); \
- if (_py_tmp != NULL) { \
- (op) = NULL; \
- Py_DECREF(_py_tmp); \
- } \
+#ifdef _Py_TYPEOF
+#define Py_CLEAR(op) \
+ do { \
+ _Py_TYPEOF(op)* _tmp_op_ptr = &(op); \
+ _Py_TYPEOF(op) _tmp_old_op = (*_tmp_op_ptr); \
+ if (_tmp_old_op != NULL) { \
+ *_tmp_op_ptr = _Py_NULL; \
+ Py_DECREF(_tmp_old_op); \
+ } \
} while (0)
+#else
+#define Py_CLEAR(op) \
+ do { \
+ PyObject **_tmp_op_ptr = _Py_CAST(PyObject**, &(op)); \
+ PyObject *_tmp_old_op = (*_tmp_op_ptr); \
+ if (_tmp_old_op != NULL) { \
+ PyObject *_null_ptr = _Py_NULL; \
+ memcpy(_tmp_op_ptr, &_null_ptr, sizeof(PyObject*)); \
+ Py_DECREF(_tmp_old_op); \
+ } \
+ } while (0)
+#endif
+
/* Function to use in case the object pointer can be NULL: */
static inline void Py_XINCREF(PyObject *op)
diff --git a/Include/opcode.h b/Include/opcode.h
index f284313d2ed756..888250ed37e8cb 100644
--- a/Include/opcode.h
+++ b/Include/opcode.h
@@ -162,34 +162,35 @@ extern "C" {
#define COMPARE_OP_INT_JUMP 57
#define COMPARE_OP_STR_JUMP 58
#define FOR_ITER_LIST 59
-#define FOR_ITER_RANGE 62
-#define FOR_ITER_GEN 64
-#define LOAD_ATTR_CLASS 65
-#define LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN 66
-#define LOAD_ATTR_INSTANCE_VALUE 67
-#define LOAD_ATTR_MODULE 72
-#define LOAD_ATTR_PROPERTY 73
-#define LOAD_ATTR_SLOT 76
-#define LOAD_ATTR_WITH_HINT 77
-#define LOAD_ATTR_METHOD_LAZY_DICT 78
-#define LOAD_ATTR_METHOD_NO_DICT 79
-#define LOAD_ATTR_METHOD_WITH_DICT 80
-#define LOAD_ATTR_METHOD_WITH_VALUES 81
-#define LOAD_CONST__LOAD_FAST 86
-#define LOAD_FAST__LOAD_CONST 113
-#define LOAD_FAST__LOAD_FAST 121
-#define LOAD_GLOBAL_BUILTIN 141
-#define LOAD_GLOBAL_MODULE 143
-#define STORE_ATTR_INSTANCE_VALUE 153
-#define STORE_ATTR_SLOT 154
-#define STORE_ATTR_WITH_HINT 158
-#define STORE_FAST__LOAD_FAST 159
-#define STORE_FAST__STORE_FAST 160
-#define STORE_SUBSCR_DICT 161
-#define STORE_SUBSCR_LIST_INT 166
-#define UNPACK_SEQUENCE_LIST 167
-#define UNPACK_SEQUENCE_TUPLE 168
-#define UNPACK_SEQUENCE_TWO_TUPLE 169
+#define FOR_ITER_TUPLE 62
+#define FOR_ITER_RANGE 64
+#define FOR_ITER_GEN 65
+#define LOAD_ATTR_CLASS 66
+#define LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN 67
+#define LOAD_ATTR_INSTANCE_VALUE 72
+#define LOAD_ATTR_MODULE 73
+#define LOAD_ATTR_PROPERTY 76
+#define LOAD_ATTR_SLOT 77
+#define LOAD_ATTR_WITH_HINT 78
+#define LOAD_ATTR_METHOD_LAZY_DICT 79
+#define LOAD_ATTR_METHOD_NO_DICT 80
+#define LOAD_ATTR_METHOD_WITH_DICT 81
+#define LOAD_ATTR_METHOD_WITH_VALUES 86
+#define LOAD_CONST__LOAD_FAST 113
+#define LOAD_FAST__LOAD_CONST 121
+#define LOAD_FAST__LOAD_FAST 141
+#define LOAD_GLOBAL_BUILTIN 143
+#define LOAD_GLOBAL_MODULE 153
+#define STORE_ATTR_INSTANCE_VALUE 154
+#define STORE_ATTR_SLOT 158
+#define STORE_ATTR_WITH_HINT 159
+#define STORE_FAST__LOAD_FAST 160
+#define STORE_FAST__STORE_FAST 161
+#define STORE_SUBSCR_DICT 166
+#define STORE_SUBSCR_LIST_INT 167
+#define UNPACK_SEQUENCE_LIST 168
+#define UNPACK_SEQUENCE_TUPLE 169
+#define UNPACK_SEQUENCE_TWO_TUPLE 170
#define DO_TRACING 255
#define HAS_ARG(op) ((((op) >= HAVE_ARGUMENT) && (!IS_PSEUDO_OPCODE(op)))\
diff --git a/Include/patchlevel.h b/Include/patchlevel.h
index a16b8d7104e3b6..3a7e3b47a88501 100644
--- a/Include/patchlevel.h
+++ b/Include/patchlevel.h
@@ -20,10 +20,10 @@
#define PY_MINOR_VERSION 12
#define PY_MICRO_VERSION 0
#define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_ALPHA
-#define PY_RELEASE_SERIAL 2
+#define PY_RELEASE_SERIAL 3
/* Version as a string */
-#define PY_VERSION "3.12.0a2+"
+#define PY_VERSION "3.12.0a3+"
/*--end constants--*/
/* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2.
diff --git a/Include/pyport.h b/Include/pyport.h
index b3ff2f4882e90f..b1b2a74779691d 100644
--- a/Include/pyport.h
+++ b/Include/pyport.h
@@ -698,6 +698,15 @@ extern char * _getpty(int *, int, mode_t, int);
# define _Py__has_builtin(x) 0
#endif
+// _Py_TYPEOF(expr) gets the type of an expression.
+//
+// Example: _Py_TYPEOF(x) x_copy = (x);
+//
+// The macro is only defined if GCC or clang compiler is used.
+#if defined(__GNUC__) || defined(__clang__)
+# define _Py_TYPEOF(expr) __typeof__(expr)
+#endif
+
/* A convenient way for code to know if sanitizers are enabled. */
#if defined(__has_feature)
diff --git a/Lib/asyncio/events.py b/Lib/asyncio/events.py
index 2836bbcc463fe5..34a8869dff8def 100644
--- a/Lib/asyncio/events.py
+++ b/Lib/asyncio/events.py
@@ -619,7 +619,7 @@ def get_event_loop(self):
Returns an event loop object implementing the BaseEventLoop interface,
or raises an exception in case no event loop has been set for the
- current context and the current policy does not specify to create one.
+ current context.
It should never return None."""
raise NotImplementedError
@@ -672,11 +672,6 @@ def get_event_loop(self):
Returns an instance of EventLoop or raises an exception.
"""
- if (self._local._loop is None and
- not self._local._set_called and
- threading.current_thread() is threading.main_thread()):
- self.set_event_loop(self.new_event_loop())
-
if self._local._loop is None:
raise RuntimeError('There is no current event loop in thread %r.'
% threading.current_thread().name)
@@ -786,16 +781,9 @@ def get_event_loop():
the result of `get_event_loop_policy().get_event_loop()` call.
"""
# NOTE: this function is implemented in C (see _asynciomodule.c)
- return _py__get_event_loop()
-
-
-def _get_event_loop(stacklevel=3):
current_loop = _get_running_loop()
if current_loop is not None:
return current_loop
- import warnings
- warnings.warn('There is no current event loop',
- DeprecationWarning, stacklevel=stacklevel)
return get_event_loop_policy().get_event_loop()
@@ -825,7 +813,6 @@ def set_child_watcher(watcher):
_py__set_running_loop = _set_running_loop
_py_get_running_loop = get_running_loop
_py_get_event_loop = get_event_loop
-_py__get_event_loop = _get_event_loop
try:
@@ -833,7 +820,7 @@ def set_child_watcher(watcher):
# functions in asyncio. Pure Python implementation is
# about 4 times slower than C-accelerated.
from _asyncio import (_get_running_loop, _set_running_loop,
- get_running_loop, get_event_loop, _get_event_loop)
+ get_running_loop, get_event_loop)
except ImportError:
pass
else:
@@ -842,7 +829,6 @@ def set_child_watcher(watcher):
_c__set_running_loop = _set_running_loop
_c_get_running_loop = get_running_loop
_c_get_event_loop = get_event_loop
- _c__get_event_loop = _get_event_loop
if hasattr(os, 'fork'):
diff --git a/Lib/asyncio/futures.py b/Lib/asyncio/futures.py
index 3a6b44a0910869..97fc4e3fcb60ee 100644
--- a/Lib/asyncio/futures.py
+++ b/Lib/asyncio/futures.py
@@ -77,7 +77,7 @@ def __init__(self, *, loop=None):
the default event loop.
"""
if loop is None:
- self._loop = events._get_event_loop()
+ self._loop = events.get_event_loop()
else:
self._loop = loop
self._callbacks = []
@@ -413,7 +413,7 @@ def wrap_future(future, *, loop=None):
assert isinstance(future, concurrent.futures.Future), \
f'concurrent.futures.Future is expected, got {future!r}'
if loop is None:
- loop = events._get_event_loop()
+ loop = events.get_event_loop()
new_future = loop.create_future()
_chain_future(future, new_future)
return new_future
diff --git a/Lib/asyncio/proactor_events.py b/Lib/asyncio/proactor_events.py
index c6aab408fc7410..1e2a730cf368a9 100644
--- a/Lib/asyncio/proactor_events.py
+++ b/Lib/asyncio/proactor_events.py
@@ -288,7 +288,8 @@ def _loop_reading(self, fut=None):
# we got end-of-file so no need to reschedule a new read
return
- data = self._data[:length]
+ # It's a new slice so make it immutable so protocols upstream don't have problems
+ data = bytes(memoryview(self._data)[:length])
else:
# the future will be replaced by next proactor.recv call
fut.cancel()
diff --git a/Lib/asyncio/streams.py b/Lib/asyncio/streams.py
index c4d837a1170819..0f9098b4195633 100644
--- a/Lib/asyncio/streams.py
+++ b/Lib/asyncio/streams.py
@@ -125,7 +125,7 @@ class FlowControlMixin(protocols.Protocol):
def __init__(self, loop=None):
if loop is None:
- self._loop = events._get_event_loop(stacklevel=4)
+ self._loop = events.get_event_loop()
else:
self._loop = loop
self._paused = False
@@ -404,7 +404,7 @@ def __init__(self, limit=_DEFAULT_LIMIT, loop=None):
self._limit = limit
if loop is None:
- self._loop = events._get_event_loop()
+ self._loop = events.get_event_loop()
else:
self._loop = loop
self._buffer = bytearray()
@@ -688,7 +688,7 @@ async def read(self, n=-1):
await self._wait_for_data('read')
# This will work right even if buffer is less than n bytes
- data = bytes(self._buffer[:n])
+ data = bytes(memoryview(self._buffer)[:n])
del self._buffer[:n]
self._maybe_resume_transport()
@@ -730,7 +730,7 @@ async def readexactly(self, n):
data = bytes(self._buffer)
self._buffer.clear()
else:
- data = bytes(self._buffer[:n])
+ data = bytes(memoryview(self._buffer)[:n])
del self._buffer[:n]
self._maybe_resume_transport()
return data
diff --git a/Lib/asyncio/tasks.py b/Lib/asyncio/tasks.py
index 571013745aa03a..fa853283c0c5e4 100644
--- a/Lib/asyncio/tasks.py
+++ b/Lib/asyncio/tasks.py
@@ -582,7 +582,7 @@ def as_completed(fs, *, timeout=None):
from .queues import Queue # Import here to avoid circular import problem.
done = Queue()
- loop = events._get_event_loop()
+ loop = events.get_event_loop()
todo = {ensure_future(f, loop=loop) for f in set(fs)}
timeout_handle = None
@@ -668,7 +668,7 @@ def _ensure_future(coro_or_future, *, loop=None):
'is required')
if loop is None:
- loop = events._get_event_loop(stacklevel=4)
+ loop = events.get_event_loop()
try:
return loop.create_task(coro_or_future)
except RuntimeError:
@@ -749,7 +749,7 @@ def gather(*coros_or_futures, return_exceptions=False):
gather won't cancel any other awaitables.
"""
if not coros_or_futures:
- loop = events._get_event_loop()
+ loop = events.get_event_loop()
outer = loop.create_future()
outer.set_result([])
return outer
diff --git a/Lib/csv.py b/Lib/csv.py
index 309a8f3f486365..4ef8be45ca9e0a 100644
--- a/Lib/csv.py
+++ b/Lib/csv.py
@@ -139,7 +139,8 @@ def __init__(self, f, fieldnames, restval="", extrasaction="raise",
fieldnames = list(fieldnames)
self.fieldnames = fieldnames # list of keys for the dict
self.restval = restval # for writing short dicts
- if extrasaction.lower() not in ("raise", "ignore"):
+ extrasaction = extrasaction.lower()
+ if extrasaction not in ("raise", "ignore"):
raise ValueError("extrasaction (%s) must be 'raise' or 'ignore'"
% extrasaction)
self.extrasaction = extrasaction
diff --git a/Lib/enum.py b/Lib/enum.py
index 1b683c702d59b4..a0cad066dc23f7 100644
--- a/Lib/enum.py
+++ b/Lib/enum.py
@@ -436,7 +436,9 @@ def __setitem__(self, key, value):
if isinstance(value, auto):
single = True
value = (value, )
- if isinstance(value, tuple):
+ if type(value) is tuple and any(isinstance(v, auto) for v in value):
+ # insist on an actual tuple, no subclasses, in keeping with only supporting
+ # top-level auto() usage (not contained in any other data structure)
auto_valued = []
for v in value:
if isinstance(v, auto):
@@ -955,7 +957,15 @@ def _find_data_repr_(mcls, class_name, bases):
return base._value_repr_
elif '__repr__' in base.__dict__:
# this is our data repr
- return base.__dict__['__repr__']
+ # double-check if a dataclass with a default __repr__
+ if (
+ '__dataclass_fields__' in base.__dict__
+ and '__dataclass_params__' in base.__dict__
+ and base.__dict__['__dataclass_params__'].repr
+ ):
+ return _dataclass_repr
+ else:
+ return base.__dict__['__repr__']
return None
@classmethod
@@ -1046,20 +1056,20 @@ class Enum(metaclass=EnumType):
Access them by:
- - attribute access::
+ - attribute access:
- >>> Color.RED
-
+ >>> Color.RED
+
- value lookup:
- >>> Color(1)
-
+ >>> Color(1)
+
- name lookup:
- >>> Color['RED']
-
+ >>> Color['RED']
+
Enumerations can be iterated over, and know how many members they have:
@@ -1551,6 +1561,14 @@ def _power_of_two(value):
return False
return value == 2 ** _high_bit(value)
+def _dataclass_repr(self):
+ dcf = self.__dataclass_fields__
+ return ', '.join(
+ '%s=%r' % (k, getattr(self, k))
+ for k in dcf.keys()
+ if dcf[k].repr
+ )
+
def global_enum_repr(self):
"""
use module.enum_name instead of class.enum_name
diff --git a/Lib/http/server.py b/Lib/http/server.py
index 8aee31bac2752a..8acabff605e795 100644
--- a/Lib/http/server.py
+++ b/Lib/http/server.py
@@ -93,6 +93,7 @@
import html
import http.client
import io
+import itertools
import mimetypes
import os
import posixpath
@@ -562,6 +563,11 @@ def log_error(self, format, *args):
self.log_message(format, *args)
+ # https://en.wikipedia.org/wiki/List_of_Unicode_characters#Control_codes
+ _control_char_table = str.maketrans(
+ {c: fr'\x{c:02x}' for c in itertools.chain(range(0x20), range(0x7f,0xa0))})
+ _control_char_table[ord('\\')] = r'\\'
+
def log_message(self, format, *args):
"""Log an arbitrary message.
@@ -577,12 +583,16 @@ def log_message(self, format, *args):
The client ip and current date/time are prefixed to
every message.
+ Unicode control characters are replaced with escaped hex
+ before writing the output to stderr.
+
"""
+ message = format % args
sys.stderr.write("%s - - [%s] %s\n" %
(self.address_string(),
self.log_date_time_string(),
- format%args))
+ message.translate(self._control_char_table)))
def version_string(self):
"""Return the server software version string."""
diff --git a/Lib/inspect.py b/Lib/inspect.py
index 31ac888126b57c..e165937e448a95 100644
--- a/Lib/inspect.py
+++ b/Lib/inspect.py
@@ -1160,7 +1160,6 @@ def __init__(self):
self.started = False
self.passline = False
self.indecorator = False
- self.decoratorhasargs = False
self.last = 1
self.body_col0 = None
@@ -1175,13 +1174,6 @@ def tokeneater(self, type, token, srowcol, erowcol, line):
self.islambda = True
self.started = True
self.passline = True # skip to the end of the line
- elif token == "(":
- if self.indecorator:
- self.decoratorhasargs = True
- elif token == ")":
- if self.indecorator:
- self.indecorator = False
- self.decoratorhasargs = False
elif type == tokenize.NEWLINE:
self.passline = False # stop skipping when a NEWLINE is seen
self.last = srowcol[0]
@@ -1189,7 +1181,7 @@ def tokeneater(self, type, token, srowcol, erowcol, line):
raise EndOfBlock
# hitting a NEWLINE when in a decorator without args
# ends the decorator
- if self.indecorator and not self.decoratorhasargs:
+ if self.indecorator:
self.indecorator = False
elif self.passline:
pass
diff --git a/Lib/opcode.py b/Lib/opcode.py
index fa6dbe5d24170c..fc57affbac5814 100644
--- a/Lib/opcode.py
+++ b/Lib/opcode.py
@@ -320,6 +320,7 @@ def pseudo_op(name, op, real_ops):
],
"FOR_ITER": [
"FOR_ITER_LIST",
+ "FOR_ITER_TUPLE",
"FOR_ITER_RANGE",
"FOR_ITER_GEN",
],
diff --git a/Lib/pydoc_data/topics.py b/Lib/pydoc_data/topics.py
index a817dc3547fa93..4ba680cceda93a 100644
--- a/Lib/pydoc_data/topics.py
+++ b/Lib/pydoc_data/topics.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Autogenerated by Sphinx on Mon Nov 14 12:13:19 2022
+# Autogenerated by Sphinx on Tue Dec 6 19:31:49 2022
topics = {'assert': 'The "assert" statement\n'
'**********************\n'
'\n'
@@ -11109,8 +11109,9 @@
'y)" is\n'
'typically invalid without special support in "MyClass". To '
'be able to\n'
- 'use that kind of patterns, the class needs to define a\n'
- '*__match_args__* attribute.\n'
+ 'use that kind of pattern, the class needs to define a '
+ '*__match_args__*\n'
+ 'attribute.\n'
'\n'
'object.__match_args__\n'
'\n'
@@ -11510,7 +11511,7 @@
'property\n'
' being one of “Lm”, “Lt”, “Lu”, “Ll”, or “Lo”. Note '
'that this is\n'
- ' different from the “Alphabetic” property defined in the '
+ ' different from the Alphabetic property defined in the '
'Unicode\n'
' Standard.\n'
'\n'
@@ -11559,9 +11560,9 @@
'according to the\n'
' language definition, section Identifiers and keywords.\n'
'\n'
- ' Call "keyword.iskeyword()" to test whether string "s" '
- 'is a reserved\n'
- ' identifier, such as "def" and "class".\n'
+ ' "keyword.iskeyword()" can be used to test whether '
+ 'string "s" is a\n'
+ ' reserved identifier, such as "def" and "class".\n'
'\n'
' Example:\n'
'\n'
diff --git a/Lib/test/inspect_fodder2.py b/Lib/test/inspect_fodder2.py
index e7d4b53ebefcc6..2dc49817087c44 100644
--- a/Lib/test/inspect_fodder2.py
+++ b/Lib/test/inspect_fodder2.py
@@ -259,3 +259,17 @@ def all_markers_with_args_and_kwargs(a, b, /, c, d, *args, e, f, **kwargs):
#line 259
def all_markers_with_defaults(a, b=1, /, c=2, d=3, *, e=4, f=5):
pass
+
+# line 263
+def deco_factory(**kwargs):
+ def deco(f):
+ @wraps(f)
+ def wrapper(*a, **kwd):
+ kwd.update(kwargs)
+ return f(*a, **kwd)
+ return wrapper
+ return deco
+
+@deco_factory(foo=(1 + 2), bar=lambda: 1)
+def complex_decorated(foo=0, bar=lambda: 0):
+ return foo + bar()
diff --git a/Lib/test/libregrtest/main.py b/Lib/test/libregrtest/main.py
index 3eeef029b22d48..19ccf2db5e7f06 100644
--- a/Lib/test/libregrtest/main.py
+++ b/Lib/test/libregrtest/main.py
@@ -17,7 +17,8 @@
ChildError, DidNotRun)
from test.libregrtest.setup import setup_tests
from test.libregrtest.pgo import setup_pgo_tests
-from test.libregrtest.utils import removepy, count, format_duration, printlist
+from test.libregrtest.utils import (removepy, count, format_duration,
+ printlist, get_build_info)
from test import support
from test.support import os_helper
from test.support import threading_helper
@@ -491,6 +492,7 @@ def display_header(self):
print("==", platform.python_implementation(), *sys.version.split())
print("==", platform.platform(aliased=True),
"%s-endian" % sys.byteorder)
+ print("== Python build:", ' '.join(get_build_info()))
print("== cwd:", os.getcwd())
cpu_count = os.cpu_count()
if cpu_count:
diff --git a/Lib/test/libregrtest/utils.py b/Lib/test/libregrtest/utils.py
index e6909170334e15..fb13fa0e243ba7 100644
--- a/Lib/test/libregrtest/utils.py
+++ b/Lib/test/libregrtest/utils.py
@@ -1,6 +1,7 @@
import math
import os.path
import sys
+import sysconfig
import textwrap
from test import support
@@ -208,3 +209,87 @@ def clear_caches():
pass
else:
fractions._hash_algorithm.cache_clear()
+
+
+def get_build_info():
+ # Get most important configure and build options as a list of strings.
+ # Example: ['debug', 'ASAN+MSAN'] or ['release', 'LTO+PGO'].
+
+ config_args = sysconfig.get_config_var('CONFIG_ARGS') or ''
+ cflags = sysconfig.get_config_var('PY_CFLAGS') or ''
+ cflags_nodist = sysconfig.get_config_var('PY_CFLAGS_NODIST') or ''
+ ldflags_nodist = sysconfig.get_config_var('PY_LDFLAGS_NODIST') or ''
+
+ build = []
+ if hasattr(sys, 'gettotalrefcount'):
+ # --with-pydebug
+ build.append('debug')
+
+ if '-DNDEBUG' in (cflags + cflags_nodist):
+ build.append('without_assert')
+ else:
+ build.append('release')
+
+ if '--with-assertions' in config_args:
+ build.append('with_assert')
+ elif '-DNDEBUG' not in (cflags + cflags_nodist):
+ build.append('with_assert')
+
+ # --enable-framework=name
+ framework = sysconfig.get_config_var('PYTHONFRAMEWORK')
+ if framework:
+ build.append(f'framework={framework}')
+
+ # --enable-shared
+ shared = int(sysconfig.get_config_var('PY_ENABLE_SHARED') or '0')
+ if shared:
+ build.append('shared')
+
+ # --with-lto
+ optimizations = []
+ if '-flto=thin' in ldflags_nodist:
+ optimizations.append('ThinLTO')
+ elif '-flto' in ldflags_nodist:
+ optimizations.append('LTO')
+
+ # --enable-optimizations
+ pgo_options = (
+ # GCC
+ '-fprofile-use',
+ # clang: -fprofile-instr-use=code.profclangd
+ '-fprofile-instr-use',
+ # ICC
+ "-prof-use",
+ )
+ if any(option in cflags_nodist for option in pgo_options):
+ optimizations.append('PGO')
+ if optimizations:
+ build.append('+'.join(optimizations))
+
+ # --with-address-sanitizer
+ sanitizers = []
+ if support.check_sanitizer(address=True):
+ sanitizers.append("ASAN")
+ # --with-memory-sanitizer
+ if support.check_sanitizer(memory=True):
+ sanitizers.append("MSAN")
+ # --with-undefined-behavior-sanitizer
+ if support.check_sanitizer(ub=True):
+ sanitizers.append("UBSAN")
+ if sanitizers:
+ build.append('+'.join(sanitizers))
+
+ # --with-trace-refs
+ if hasattr(sys, 'getobjects'):
+ build.append("TraceRefs")
+ # --enable-pystats
+ if hasattr(sys, '_stats_on'):
+ build.append("pystats")
+ # --with-valgrind
+ if sysconfig.get_config_var('WITH_VALGRIND'):
+ build.append("valgrind")
+ # --with-dtrace
+ if sysconfig.get_config_var('WITH_DTRACE'):
+ build.append("dtrace")
+
+ return build
diff --git a/Lib/test/test__xxsubinterpreters.py b/Lib/test/test__xxsubinterpreters.py
index f274b637d94701..18900bb9f7162c 100644
--- a/Lib/test/test__xxsubinterpreters.py
+++ b/Lib/test/test__xxsubinterpreters.py
@@ -295,8 +295,8 @@ def clean_up_channels():
class TestBase(unittest.TestCase):
def tearDown(self):
- clean_up_interpreters()
clean_up_channels()
+ clean_up_interpreters()
##################################
@@ -411,6 +411,15 @@ def test_non_shareable_int(self):
interpreters.channel_send(self.cid, i)
+class ModuleTests(TestBase):
+
+ def test_import_in_interpreter(self):
+ _run_output(
+ interpreters.create(),
+ 'import _xxsubinterpreters as _interpreters',
+ )
+
+
##################################
# interpreter tests
diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py
index 773fba87632b0a..ab6a63faa59085 100644
--- a/Lib/test/test_ast.py
+++ b/Lib/test/test_ast.py
@@ -837,7 +837,8 @@ def check_limit(prefix, repeated):
details = "Compiling ({!r} + {!r} * {})".format(
prefix, repeated, depth)
with self.assertRaises(RecursionError, msg=details):
- ast.parse(broken)
+ with support.infinite_recursion():
+ ast.parse(broken)
check_limit("a", "()")
check_limit("a", ".b")
diff --git a/Lib/test/test_asyncio/test_base_events.py b/Lib/test/test_asyncio/test_base_events.py
index 7421d18dc636c8..3b4026cb73869a 100644
--- a/Lib/test/test_asyncio/test_base_events.py
+++ b/Lib/test/test_asyncio/test_base_events.py
@@ -746,7 +746,7 @@ async def coro():
def test_env_var_debug(self):
code = '\n'.join((
'import asyncio',
- 'loop = asyncio.get_event_loop()',
+ 'loop = asyncio.new_event_loop()',
'print(loop.get_debug())'))
# Test with -E to not fail if the unit test was run with
@@ -861,20 +861,15 @@ async def raise_keyboard_interrupt():
self.loop._process_events = mock.Mock()
- try:
+ with self.assertRaises(KeyboardInterrupt):
self.loop.run_until_complete(raise_keyboard_interrupt())
- except KeyboardInterrupt:
- pass
def func():
self.loop.stop()
func.called = True
func.called = False
- try:
- self.loop.call_soon(func)
- self.loop.run_forever()
- except KeyboardInterrupt:
- pass
+ self.loop.call_soon(self.loop.call_soon, func)
+ self.loop.run_forever()
self.assertTrue(func.called)
def test_single_selecter_event_callback_after_stopping(self):
diff --git a/Lib/test/test_asyncio/test_events.py b/Lib/test/test_asyncio/test_events.py
index cabe75f56d9fb0..153b2de8172273 100644
--- a/Lib/test/test_asyncio/test_events.py
+++ b/Lib/test/test_asyncio/test_events.py
@@ -2550,29 +2550,8 @@ def test_event_loop_policy(self):
def test_get_event_loop(self):
policy = asyncio.DefaultEventLoopPolicy()
self.assertIsNone(policy._local._loop)
-
- loop = policy.get_event_loop()
- self.assertIsInstance(loop, asyncio.AbstractEventLoop)
-
- self.assertIs(policy._local._loop, loop)
- self.assertIs(loop, policy.get_event_loop())
- loop.close()
-
- def test_get_event_loop_calls_set_event_loop(self):
- policy = asyncio.DefaultEventLoopPolicy()
-
- with mock.patch.object(
- policy, "set_event_loop",
- wraps=policy.set_event_loop) as m_set_event_loop:
-
- loop = policy.get_event_loop()
-
- # policy._local._loop must be set through .set_event_loop()
- # (the unix DefaultEventLoopPolicy needs this call to attach
- # the child watcher correctly)
- m_set_event_loop.assert_called_with(loop)
-
- loop.close()
+ with self.assertRaisesRegex(RuntimeError, 'no current event loop'):
+ policy.get_event_loop()
def test_get_event_loop_after_set_none(self):
policy = asyncio.DefaultEventLoopPolicy()
@@ -2599,7 +2578,8 @@ def test_new_event_loop(self):
def test_set_event_loop(self):
policy = asyncio.DefaultEventLoopPolicy()
- old_loop = policy.get_event_loop()
+ old_loop = policy.new_event_loop()
+ policy.set_event_loop(old_loop)
self.assertRaises(TypeError, policy.set_event_loop, object())
@@ -2716,15 +2696,11 @@ def get_event_loop(self):
asyncio.set_event_loop_policy(Policy())
loop = asyncio.new_event_loop()
- with self.assertWarns(DeprecationWarning) as cm:
- with self.assertRaises(TestError):
- asyncio.get_event_loop()
- self.assertEqual(cm.filename, __file__)
+ with self.assertRaises(TestError):
+ asyncio.get_event_loop()
asyncio.set_event_loop(None)
- with self.assertWarns(DeprecationWarning) as cm:
- with self.assertRaises(TestError):
- asyncio.get_event_loop()
- self.assertEqual(cm.filename, __file__)
+ with self.assertRaises(TestError):
+ asyncio.get_event_loop()
with self.assertRaisesRegex(RuntimeError, 'no running'):
asyncio.get_running_loop()
@@ -2738,16 +2714,11 @@ async def func():
loop.run_until_complete(func())
asyncio.set_event_loop(loop)
- with self.assertWarns(DeprecationWarning) as cm:
- with self.assertRaises(TestError):
- asyncio.get_event_loop()
- self.assertEqual(cm.filename, __file__)
-
+ with self.assertRaises(TestError):
+ asyncio.get_event_loop()
asyncio.set_event_loop(None)
- with self.assertWarns(DeprecationWarning) as cm:
- with self.assertRaises(TestError):
- asyncio.get_event_loop()
- self.assertEqual(cm.filename, __file__)
+ with self.assertRaises(TestError):
+ asyncio.get_event_loop()
finally:
asyncio.set_event_loop_policy(old_policy)
@@ -2766,15 +2737,11 @@ def test_get_event_loop_returns_running_loop2(self):
loop = asyncio.new_event_loop()
self.addCleanup(loop.close)
- with self.assertWarns(DeprecationWarning) as cm:
- loop2 = asyncio.get_event_loop()
- self.addCleanup(loop2.close)
- self.assertEqual(cm.filename, __file__)
+ with self.assertRaisesRegex(RuntimeError, 'no current'):
+ asyncio.get_event_loop()
asyncio.set_event_loop(None)
- with self.assertWarns(DeprecationWarning) as cm:
- with self.assertRaisesRegex(RuntimeError, 'no current'):
- asyncio.get_event_loop()
- self.assertEqual(cm.filename, __file__)
+ with self.assertRaisesRegex(RuntimeError, 'no current'):
+ asyncio.get_event_loop()
with self.assertRaisesRegex(RuntimeError, 'no running'):
asyncio.get_running_loop()
@@ -2788,15 +2755,11 @@ async def func():
loop.run_until_complete(func())
asyncio.set_event_loop(loop)
- with self.assertWarns(DeprecationWarning) as cm:
- self.assertIs(asyncio.get_event_loop(), loop)
- self.assertEqual(cm.filename, __file__)
+ self.assertIs(asyncio.get_event_loop(), loop)
asyncio.set_event_loop(None)
- with self.assertWarns(DeprecationWarning) as cm:
- with self.assertRaisesRegex(RuntimeError, 'no current'):
- asyncio.get_event_loop()
- self.assertEqual(cm.filename, __file__)
+ with self.assertRaisesRegex(RuntimeError, 'no current'):
+ asyncio.get_event_loop()
finally:
asyncio.set_event_loop_policy(old_policy)
diff --git a/Lib/test/test_asyncio/test_futures.py b/Lib/test/test_asyncio/test_futures.py
index 83ea01c2452521..56b0b864de2ddf 100644
--- a/Lib/test/test_asyncio/test_futures.py
+++ b/Lib/test/test_asyncio/test_futures.py
@@ -146,10 +146,8 @@ def test_initial_state(self):
self.assertTrue(f.cancelled())
def test_constructor_without_loop(self):
- with self.assertWarns(DeprecationWarning) as cm:
- with self.assertRaisesRegex(RuntimeError, 'There is no current event loop'):
- self._new_future()
- self.assertEqual(cm.filename, __file__)
+ with self.assertRaisesRegex(RuntimeError, 'no current event loop'):
+ self._new_future()
def test_constructor_use_running_loop(self):
async def test():
@@ -159,12 +157,10 @@ async def test():
self.assertIs(f.get_loop(), self.loop)
def test_constructor_use_global_loop(self):
- # Deprecated in 3.10
+ # Deprecated in 3.10, undeprecated in 3.12
asyncio.set_event_loop(self.loop)
self.addCleanup(asyncio.set_event_loop, None)
- with self.assertWarns(DeprecationWarning) as cm:
- f = self._new_future()
- self.assertEqual(cm.filename, __file__)
+ f = self._new_future()
self.assertIs(f._loop, self.loop)
self.assertIs(f.get_loop(), self.loop)
@@ -500,10 +496,8 @@ def run(arg):
return (arg, threading.get_ident())
ex = concurrent.futures.ThreadPoolExecutor(1)
f1 = ex.submit(run, 'oi')
- with self.assertWarns(DeprecationWarning) as cm:
- with self.assertRaises(RuntimeError):
- asyncio.wrap_future(f1)
- self.assertEqual(cm.filename, __file__)
+ with self.assertRaisesRegex(RuntimeError, 'no current event loop'):
+ asyncio.wrap_future(f1)
ex.shutdown(wait=True)
def test_wrap_future_use_running_loop(self):
@@ -518,16 +512,14 @@ async def test():
ex.shutdown(wait=True)
def test_wrap_future_use_global_loop(self):
- # Deprecated in 3.10
+ # Deprecated in 3.10, undeprecated in 3.12
asyncio.set_event_loop(self.loop)
self.addCleanup(asyncio.set_event_loop, None)
def run(arg):
return (arg, threading.get_ident())
ex = concurrent.futures.ThreadPoolExecutor(1)
f1 = ex.submit(run, 'oi')
- with self.assertWarns(DeprecationWarning) as cm:
- f2 = asyncio.wrap_future(f1)
- self.assertEqual(cm.filename, __file__)
+ f2 = asyncio.wrap_future(f1)
self.assertIs(self.loop, f2._loop)
ex.shutdown(wait=True)
diff --git a/Lib/test/test_asyncio/test_proactor_events.py b/Lib/test/test_asyncio/test_proactor_events.py
index ae30185cef776a..6cb7dc300c5331 100644
--- a/Lib/test/test_asyncio/test_proactor_events.py
+++ b/Lib/test/test_asyncio/test_proactor_events.py
@@ -75,7 +75,10 @@ def test_loop_reading_data(self):
called_buf = bytearray(self.buffer_size)
called_buf[:len(buf)] = buf
self.loop._proactor.recv_into.assert_called_with(self.sock, called_buf)
- self.protocol.data_received.assert_called_with(bytearray(buf))
+ self.protocol.data_received.assert_called_with(buf)
+ # assert_called_with maps bytearray and bytes to the same thing so check manually
+ # regression test for https://github.com/python/cpython/issues/99941
+ self.assertIsInstance(self.protocol.data_received.call_args.args[0], bytes)
@unittest.skipIf(sys.flags.optimize, "Assertions are disabled in optimized mode")
def test_loop_reading_no_data(self):
diff --git a/Lib/test/test_asyncio/test_streams.py b/Lib/test/test_asyncio/test_streams.py
index 01d5407a497a04..7f9dc621808358 100644
--- a/Lib/test/test_asyncio/test_streams.py
+++ b/Lib/test/test_asyncio/test_streams.py
@@ -816,10 +816,8 @@ def test_read_all_from_pipe_reader(self):
self.assertEqual(data, b'data')
def test_streamreader_constructor_without_loop(self):
- with self.assertWarns(DeprecationWarning) as cm:
- with self.assertRaisesRegex(RuntimeError, 'There is no current event loop'):
- asyncio.StreamReader()
- self.assertEqual(cm.filename, __file__)
+ with self.assertRaisesRegex(RuntimeError, 'no current event loop'):
+ asyncio.StreamReader()
def test_streamreader_constructor_use_running_loop(self):
# asyncio issue #184: Ensure that StreamReaderProtocol constructor
@@ -833,21 +831,17 @@ async def test():
def test_streamreader_constructor_use_global_loop(self):
# asyncio issue #184: Ensure that StreamReaderProtocol constructor
# retrieves the current loop if the loop parameter is not set
- # Deprecated in 3.10
+ # Deprecated in 3.10, undeprecated in 3.12
self.addCleanup(asyncio.set_event_loop, None)
asyncio.set_event_loop(self.loop)
- with self.assertWarns(DeprecationWarning) as cm:
- reader = asyncio.StreamReader()
- self.assertEqual(cm.filename, __file__)
+ reader = asyncio.StreamReader()
self.assertIs(reader._loop, self.loop)
def test_streamreaderprotocol_constructor_without_loop(self):
reader = mock.Mock()
- with self.assertWarns(DeprecationWarning) as cm:
- with self.assertRaisesRegex(RuntimeError, 'There is no current event loop'):
- asyncio.StreamReaderProtocol(reader)
- self.assertEqual(cm.filename, __file__)
+ with self.assertRaisesRegex(RuntimeError, 'no current event loop'):
+ asyncio.StreamReaderProtocol(reader)
def test_streamreaderprotocol_constructor_use_running_loop(self):
# asyncio issue #184: Ensure that StreamReaderProtocol constructor
@@ -861,13 +855,11 @@ async def test():
def test_streamreaderprotocol_constructor_use_global_loop(self):
# asyncio issue #184: Ensure that StreamReaderProtocol constructor
# retrieves the current loop if the loop parameter is not set
- # Deprecated in 3.10
+ # Deprecated in 3.10, undeprecated in 3.12
self.addCleanup(asyncio.set_event_loop, None)
asyncio.set_event_loop(self.loop)
reader = mock.Mock()
- with self.assertWarns(DeprecationWarning) as cm:
- protocol = asyncio.StreamReaderProtocol(reader)
- self.assertEqual(cm.filename, __file__)
+ protocol = asyncio.StreamReaderProtocol(reader)
self.assertIs(protocol._loop, self.loop)
def test_multiple_drain(self):
diff --git a/Lib/test/test_asyncio/test_tasks.py b/Lib/test/test_asyncio/test_tasks.py
index d8ba2f4e2a742a..5168b8250ef0a2 100644
--- a/Lib/test/test_asyncio/test_tasks.py
+++ b/Lib/test/test_asyncio/test_tasks.py
@@ -196,10 +196,8 @@ async def notmuch():
a = notmuch()
self.addCleanup(a.close)
- with self.assertWarns(DeprecationWarning) as cm:
- with self.assertRaisesRegex(RuntimeError, 'There is no current event loop'):
- asyncio.ensure_future(a)
- self.assertEqual(cm.filename, __file__)
+ with self.assertRaisesRegex(RuntimeError, 'no current event loop'):
+ asyncio.ensure_future(a)
async def test():
return asyncio.ensure_future(notmuch())
@@ -209,12 +207,10 @@ async def test():
self.assertTrue(t.done())
self.assertEqual(t.result(), 'ok')
- # Deprecated in 3.10
+ # Deprecated in 3.10, undeprecated in 3.12
asyncio.set_event_loop(self.loop)
self.addCleanup(asyncio.set_event_loop, None)
- with self.assertWarns(DeprecationWarning) as cm:
- t = asyncio.ensure_future(notmuch())
- self.assertEqual(cm.filename, __file__)
+ t = asyncio.ensure_future(notmuch())
self.assertIs(t._loop, self.loop)
self.loop.run_until_complete(t)
self.assertTrue(t.done())
@@ -1532,10 +1528,8 @@ async def coro():
self.addCleanup(a.close)
futs = asyncio.as_completed([a])
- with self.assertWarns(DeprecationWarning) as cm:
- with self.assertRaisesRegex(RuntimeError, 'There is no current event loop'):
- list(futs)
- self.assertEqual(cm.filename, __file__)
+ with self.assertRaisesRegex(RuntimeError, 'no current event loop'):
+ list(futs)
def test_as_completed_coroutine_use_running_loop(self):
loop = self.new_test_loop()
@@ -1965,10 +1959,8 @@ async def coro():
inner = coro()
self.addCleanup(inner.close)
- with self.assertWarns(DeprecationWarning) as cm:
- with self.assertRaisesRegex(RuntimeError, 'There is no current event loop'):
- asyncio.shield(inner)
- self.assertEqual(cm.filename, __file__)
+ with self.assertRaisesRegex(RuntimeError, 'no current event loop'):
+ asyncio.shield(inner)
def test_shield_coroutine_use_running_loop(self):
async def coro():
@@ -1982,15 +1974,13 @@ async def test():
self.assertEqual(res, 42)
def test_shield_coroutine_use_global_loop(self):
- # Deprecated in 3.10
+ # Deprecated in 3.10, undeprecated in 3.12
async def coro():
return 42
asyncio.set_event_loop(self.loop)
self.addCleanup(asyncio.set_event_loop, None)
- with self.assertWarns(DeprecationWarning) as cm:
- outer = asyncio.shield(coro())
- self.assertEqual(cm.filename, __file__)
+ outer = asyncio.shield(coro())
self.assertEqual(outer._loop, self.loop)
res = self.loop.run_until_complete(outer)
self.assertEqual(res, 42)
@@ -2102,8 +2092,8 @@ def test_cancel_gather_1(self):
async def create():
# The indirection fut->child_coro is needed since otherwise the
# gathering task is done at the same time as the child future
- def child_coro():
- return (yield from fut)
+ async def child_coro():
+ return await fut
gather_future = asyncio.gather(child_coro())
return asyncio.ensure_future(gather_future)
gather_task = loop.run_until_complete(create())
@@ -2827,7 +2817,7 @@ def test_current_task_no_running_loop(self):
self.assertIsNone(asyncio.current_task(loop=self.loop))
def test_current_task_no_running_loop_implicit(self):
- with self.assertRaises(RuntimeError):
+ with self.assertRaisesRegex(RuntimeError, 'no running event loop'):
asyncio.current_task()
def test_current_task_with_implicit_loop(self):
@@ -2991,10 +2981,8 @@ def _gather(self, *args, **kwargs):
return asyncio.gather(*args, **kwargs)
def test_constructor_empty_sequence_without_loop(self):
- with self.assertWarns(DeprecationWarning) as cm:
- with self.assertRaises(RuntimeError):
- asyncio.gather()
- self.assertEqual(cm.filename, __file__)
+ with self.assertRaisesRegex(RuntimeError, 'no current event loop'):
+ asyncio.gather()
def test_constructor_empty_sequence_use_running_loop(self):
async def gather():
@@ -3007,12 +2995,10 @@ async def gather():
self.assertEqual(fut.result(), [])
def test_constructor_empty_sequence_use_global_loop(self):
- # Deprecated in 3.10
+ # Deprecated in 3.10, undeprecated in 3.12
asyncio.set_event_loop(self.one_loop)
self.addCleanup(asyncio.set_event_loop, None)
- with self.assertWarns(DeprecationWarning) as cm:
- fut = asyncio.gather()
- self.assertEqual(cm.filename, __file__)
+ fut = asyncio.gather()
self.assertIsInstance(fut, asyncio.Future)
self.assertIs(fut._loop, self.one_loop)
self._run_loop(self.one_loop)
@@ -3100,10 +3086,8 @@ async def coro():
self.addCleanup(gen1.close)
gen2 = coro()
self.addCleanup(gen2.close)
- with self.assertWarns(DeprecationWarning) as cm:
- with self.assertRaises(RuntimeError):
- asyncio.gather(gen1, gen2)
- self.assertEqual(cm.filename, __file__)
+ with self.assertRaisesRegex(RuntimeError, 'no current event loop'):
+ asyncio.gather(gen1, gen2)
def test_constructor_use_running_loop(self):
async def coro():
@@ -3117,16 +3101,14 @@ async def gather():
self.one_loop.run_until_complete(fut)
def test_constructor_use_global_loop(self):
- # Deprecated in 3.10
+ # Deprecated in 3.10, undeprecated in 3.12
async def coro():
return 'abc'
asyncio.set_event_loop(self.other_loop)
self.addCleanup(asyncio.set_event_loop, None)
gen1 = coro()
gen2 = coro()
- with self.assertWarns(DeprecationWarning) as cm:
- fut = asyncio.gather(gen1, gen2)
- self.assertEqual(cm.filename, __file__)
+ fut = asyncio.gather(gen1, gen2)
self.assertIs(fut._loop, self.other_loop)
self.other_loop.run_until_complete(fut)
diff --git a/Lib/test/test_asyncio/test_unix_events.py b/Lib/test/test_asyncio/test_unix_events.py
index 309a1cfdb4aa96..600a5900da088d 100644
--- a/Lib/test/test_asyncio/test_unix_events.py
+++ b/Lib/test/test_asyncio/test_unix_events.py
@@ -1775,7 +1775,8 @@ def f():
def test_child_watcher_replace_mainloop_existing(self):
policy = self.create_policy()
- loop = policy.get_event_loop()
+ loop = policy.new_event_loop()
+ policy.set_event_loop(loop)
# Explicitly setup SafeChildWatcher,
# default ThreadedChildWatcher has no _loop property
@@ -1884,13 +1885,15 @@ async def test_fork_not_share_event_loop(self):
# child
try:
loop = asyncio.get_event_loop_policy().get_event_loop()
- os.write(w, str(id(loop)).encode())
+ except RuntimeError:
+ os.write(w, b'NO LOOP')
+ except:
+ os.write(w, b'ERROR:' + ascii(sys.exc_info()).encode())
finally:
os._exit(0)
else:
# parent
- child_loop = int(os.read(r, 100).decode())
- self.assertNotEqual(child_loop, id(loop))
+ self.assertEqual(os.read(r, 100), b'NO LOOP')
wait_process(pid, exitcode=0)
@hashlib_helper.requires_hashdigest('md5')
diff --git a/Lib/test/test_code.py b/Lib/test/test_code.py
index 4e4d82314a9fb8..02ab8fbcdb0700 100644
--- a/Lib/test/test_code.py
+++ b/Lib/test/test_code.py
@@ -143,7 +143,7 @@
gc_collect)
from test.support.script_helper import assert_python_ok
from test.support import threading_helper
-from opcode import opmap
+from opcode import opmap, opname
COPY_FREE_VARS = opmap['COPY_FREE_VARS']
@@ -339,15 +339,19 @@ def func():
self.assertEqual(list(new_code.co_lines()), [])
def test_invalid_bytecode(self):
- def foo(): pass
- foo.__code__ = co = foo.__code__.replace(co_code=b'\xee\x00d\x00S\x00')
+ def foo():
+ pass
- with self.assertRaises(SystemError) as se:
- foo()
- self.assertEqual(
- f"{co.co_filename}:{co.co_firstlineno}: unknown opcode 238",
- str(se.exception))
+ # assert that opcode 238 is invalid
+ self.assertEqual(opname[238], '<238>')
+ # change first opcode to 0xee (=238)
+ foo.__code__ = foo.__code__.replace(
+ co_code=b'\xee' + foo.__code__.co_code[1:])
+
+ msg = f"unknown opcode 238"
+ with self.assertRaisesRegex(SystemError, msg):
+ foo()
@requires_debug_ranges()
def test_co_positions_artificial_instructions(self):
diff --git a/Lib/test/test_coroutines.py b/Lib/test/test_coroutines.py
index f91c9cc47741b5..43a3ff0536fe28 100644
--- a/Lib/test/test_coroutines.py
+++ b/Lib/test/test_coroutines.py
@@ -2418,7 +2418,8 @@ class UnawaitedWarningDuringShutdownTest(unittest.TestCase):
def test_unawaited_warning_during_shutdown(self):
code = ("import asyncio\n"
"async def f(): pass\n"
- "asyncio.gather(f())\n")
+ "async def t(): asyncio.gather(f())\n"
+ "asyncio.run(t())\n")
assert_python_ok("-c", code)
code = ("import sys\n"
diff --git a/Lib/test/test_csv.py b/Lib/test/test_csv.py
index d64bff13a44e87..8289ddb1c3a54f 100644
--- a/Lib/test/test_csv.py
+++ b/Lib/test/test_csv.py
@@ -762,6 +762,10 @@ def test_write_field_not_in_field_names_raise(self):
dictrow = {'f0': 0, 'f1': 1, 'f2': 2, 'f3': 3}
self.assertRaises(ValueError, csv.DictWriter.writerow, writer, dictrow)
+ # see bpo-44512 (differently cased 'raise' should not result in 'ignore')
+ writer = csv.DictWriter(fileobj, ['f1', 'f2'], extrasaction="RAISE")
+ self.assertRaises(ValueError, csv.DictWriter.writerow, writer, dictrow)
+
def test_write_field_not_in_field_names_ignore(self):
fileobj = StringIO()
writer = csv.DictWriter(fileobj, ['f1', 'f2'], extrasaction="ignore")
@@ -769,6 +773,10 @@ def test_write_field_not_in_field_names_ignore(self):
csv.DictWriter.writerow(writer, dictrow)
self.assertEqual(fileobj.getvalue(), "1,2\r\n")
+ # bpo-44512
+ writer = csv.DictWriter(fileobj, ['f1', 'f2'], extrasaction="IGNORE")
+ csv.DictWriter.writerow(writer, dictrow)
+
def test_dict_reader_fieldnames_accepts_iter(self):
fieldnames = ["a", "b", "c"]
f = StringIO()
diff --git a/Lib/test/test_enum.py b/Lib/test/test_enum.py
index b6082cf02b18d7..d876c8e5fb7798 100644
--- a/Lib/test/test_enum.py
+++ b/Lib/test/test_enum.py
@@ -2717,17 +2717,67 @@ def upper(self):
def test_repr_with_dataclass(self):
"ensure dataclass-mixin has correct repr()"
- from dataclasses import dataclass
- @dataclass
+ #
+ # check overridden dataclass __repr__ is used
+ #
+ from dataclasses import dataclass, field
+ @dataclass(repr=False)
class Foo:
__qualname__ = 'Foo'
a: int
+ def __repr__(self):
+ return 'ha hah!'
class Entries(Foo, Enum):
ENTRY1 = 1
self.assertTrue(isinstance(Entries.ENTRY1, Foo))
self.assertTrue(Entries._member_type_ is Foo, Entries._member_type_)
self.assertTrue(Entries.ENTRY1.value == Foo(1), Entries.ENTRY1.value)
- self.assertEqual(repr(Entries.ENTRY1), '')
+ self.assertEqual(repr(Entries.ENTRY1), '')
+ #
+ # check auto-generated dataclass __repr__ is not used
+ #
+ @dataclass
+ class CreatureDataMixin:
+ __qualname__ = 'CreatureDataMixin'
+ size: str
+ legs: int
+ tail: bool = field(repr=False, default=True)
+ class Creature(CreatureDataMixin, Enum):
+ __qualname__ = 'Creature'
+ BEETLE = ('small', 6)
+ DOG = ('medium', 4)
+ self.assertEqual(repr(Creature.DOG), "")
+ #
+ # check inherited repr used
+ #
+ class Huh:
+ def __repr__(self):
+ return 'inherited'
+ @dataclass(repr=False)
+ class CreatureDataMixin(Huh):
+ __qualname__ = 'CreatureDataMixin'
+ size: str
+ legs: int
+ tail: bool = field(repr=False, default=True)
+ class Creature(CreatureDataMixin, Enum):
+ __qualname__ = 'Creature'
+ BEETLE = ('small', 6)
+ DOG = ('medium', 4)
+ self.assertEqual(repr(Creature.DOG), "")
+ #
+ # check default object.__repr__ used if nothing provided
+ #
+ @dataclass(repr=False)
+ class CreatureDataMixin:
+ __qualname__ = 'CreatureDataMixin'
+ size: str
+ legs: int
+ tail: bool = field(repr=False, default=True)
+ class Creature(CreatureDataMixin, Enum):
+ __qualname__ = 'Creature'
+ BEETLE = ('small', 6)
+ DOG = ('medium', 4)
+ self.assertRegex(repr(Creature.DOG), "")
def test_repr_with_init_data_type_mixin(self):
# non-data_type is a mixin that doesn't define __new__
@@ -2791,6 +2841,19 @@ class MyIntFlag(IntFlag):
self.assertEqual(deep, flags)
self.assertEqual(copied.value, 1 | 2 | 8)
+ def test_namedtuple_as_value(self):
+ from collections import namedtuple
+ TTuple = namedtuple('TTuple', 'id a blist')
+ class NTEnum(Enum):
+ NONE = TTuple(0, 0, [])
+ A = TTuple(1, 2, [4])
+ B = TTuple(2, 4, [0, 1, 2])
+ self.assertEqual(repr(NTEnum.NONE), "")
+ self.assertEqual(NTEnum.NONE.value, TTuple(id=0, a=0, blist=[]))
+ self.assertEqual(
+ [x.value for x in NTEnum],
+ [TTuple(id=0, a=0, blist=[]), TTuple(id=1, a=2, blist=[4]), TTuple(id=2, a=4, blist=[0, 1, 2])],
+ )
class TestOrder(unittest.TestCase):
"test usage of the `_order_` attribute"
@@ -4503,11 +4566,6 @@ class Quadruple(Enum):
COMPLEX_A = 2j
COMPLEX_B = 3j
-class _ModuleWrapper:
- """We use this class as a namespace for swapping modules."""
- def __init__(self, module):
- self.__dict__.update(module.__dict__)
-
class TestConvert(unittest.TestCase):
def tearDown(self):
# Reset the module-level test variables to their original integer
@@ -4547,12 +4605,6 @@ def test_convert_int(self):
self.assertEqual(test_type.CONVERT_TEST_NAME_D, 5)
self.assertEqual(test_type.CONVERT_TEST_NAME_E, 5)
# Ensure that test_type only picked up names matching the filter.
- int_dir = dir(int) + [
- 'CONVERT_TEST_NAME_A', 'CONVERT_TEST_NAME_B', 'CONVERT_TEST_NAME_C',
- 'CONVERT_TEST_NAME_D', 'CONVERT_TEST_NAME_E', 'CONVERT_TEST_NAME_F',
- 'CONVERT_TEST_SIGABRT', 'CONVERT_TEST_SIGIOT',
- 'CONVERT_TEST_EIO', 'CONVERT_TEST_EBUS',
- ]
extra = [name for name in dir(test_type) if name not in enum_dir(test_type)]
missing = [name for name in enum_dir(test_type) if name not in dir(test_type)]
self.assertEqual(
@@ -4594,7 +4646,6 @@ def test_convert_str(self):
self.assertEqual(test_type.CONVERT_STR_TEST_1, 'hello')
self.assertEqual(test_type.CONVERT_STR_TEST_2, 'goodbye')
# Ensure that test_type only picked up names matching the filter.
- str_dir = dir(str) + ['CONVERT_STR_TEST_1', 'CONVERT_STR_TEST_2']
extra = [name for name in dir(test_type) if name not in enum_dir(test_type)]
missing = [name for name in enum_dir(test_type) if name not in dir(test_type)]
self.assertEqual(
@@ -4662,8 +4713,6 @@ def member_dir(member):
allowed.add(name)
return sorted(allowed)
-missing = object()
-
if __name__ == '__main__':
unittest.main()
diff --git a/Lib/test/test_frame.py b/Lib/test/test_frame.py
index a7db22007dedce..ed413f105e5b17 100644
--- a/Lib/test/test_frame.py
+++ b/Lib/test/test_frame.py
@@ -2,6 +2,7 @@
import re
import sys
import textwrap
+import threading
import types
import unittest
import weakref
@@ -11,6 +12,7 @@
_testcapi = None
from test import support
+from test.support import threading_helper
from test.support.script_helper import assert_python_ok
@@ -329,6 +331,46 @@ def f():
if old_enabled:
gc.enable()
+ @support.cpython_only
+ @threading_helper.requires_working_threading()
+ def test_sneaky_frame_object_teardown(self):
+
+ class SneakyDel:
+ def __del__(self):
+ """
+ Stash a reference to the entire stack for walking later.
+
+ It may look crazy, but you'd be surprised how common this is
+ when using a test runner (like pytest). The typical recipe is:
+ ResourceWarning + -Werror + a custom sys.unraisablehook.
+ """
+ nonlocal sneaky_frame_object
+ sneaky_frame_object = sys._getframe()
+
+ class SneakyThread(threading.Thread):
+ """
+ A separate thread isn't needed to make this code crash, but it does
+ make crashes more consistent, since it means sneaky_frame_object is
+ backed by freed memory after the thread completes!
+ """
+
+ def run(self):
+ """Run SneakyDel.__del__ as this frame is popped."""
+ ref = SneakyDel()
+
+ sneaky_frame_object = None
+ t = SneakyThread()
+ t.start()
+ t.join()
+ # sneaky_frame_object can be anything, really, but it's crucial that
+ # SneakyThread.run's frame isn't anywhere on the stack while it's being
+ # torn down:
+ self.assertIsNotNone(sneaky_frame_object)
+ while sneaky_frame_object is not None:
+ self.assertIsNot(
+ sneaky_frame_object.f_code, SneakyThread.run.__code__
+ )
+ sneaky_frame_object = sneaky_frame_object.f_back
@unittest.skipIf(_testcapi is None, 'need _testcapi')
class TestCAPI(unittest.TestCase):
diff --git a/Lib/test/test_httpservers.py b/Lib/test/test_httpservers.py
index a937258069ed89..ca078862cca6b9 100644
--- a/Lib/test/test_httpservers.py
+++ b/Lib/test/test_httpservers.py
@@ -26,7 +26,7 @@
import datetime
import threading
from unittest import mock
-from io import BytesIO
+from io import BytesIO, StringIO
import unittest
from test import support
@@ -990,6 +990,27 @@ def verify_http_server_response(self, response):
match = self.HTTPResponseMatch.search(response)
self.assertIsNotNone(match)
+ def test_unprintable_not_logged(self):
+ # We call the method from the class directly as our Socketless
+ # Handler subclass overrode it... nice for everything BUT this test.
+ self.handler.client_address = ('127.0.0.1', 1337)
+ log_message = BaseHTTPRequestHandler.log_message
+ with mock.patch.object(sys, 'stderr', StringIO()) as fake_stderr:
+ log_message(self.handler, '/foo')
+ log_message(self.handler, '/\033bar\000\033')
+ log_message(self.handler, '/spam %s.', 'a')
+ log_message(self.handler, '/spam %s.', '\033\x7f\x9f\xa0beans')
+ log_message(self.handler, '"GET /foo\\b"ar\007 HTTP/1.0"')
+ stderr = fake_stderr.getvalue()
+ self.assertNotIn('\033', stderr) # non-printable chars are caught.
+ self.assertNotIn('\000', stderr) # non-printable chars are caught.
+ lines = stderr.splitlines()
+ self.assertIn('/foo', lines[0])
+ self.assertIn(r'/\x1bbar\x00\x1b', lines[1])
+ self.assertIn('/spam a.', lines[2])
+ self.assertIn('/spam \\x1b\\x7f\\x9f\xa0beans.', lines[3])
+ self.assertIn(r'"GET /foo\\b"ar\x07 HTTP/1.0"', lines[4])
+
def test_http_1_1(self):
result = self.send_typical_request(b'GET / HTTP/1.1\r\n\r\n')
self.verify_http_server_response(result[0])
diff --git a/Lib/test/test_inspect.py b/Lib/test/test_inspect.py
index 3f5c299ce681c5..2b7977b1648f70 100644
--- a/Lib/test/test_inspect.py
+++ b/Lib/test/test_inspect.py
@@ -886,6 +886,12 @@ def test_class(self):
self.assertSourceEqual(self.fodderModule.X, 1, 2)
+class TestComplexDecorator(GetSourceBase):
+ fodderModule = mod2
+
+ def test_parens_in_decorator(self):
+ self.assertSourceEqual(self.fodderModule.complex_decorated, 273, 275)
+
class _BrokenDataDescriptor(object):
"""
A broken data descriptor. See bug #1785.
diff --git a/Lib/test/test_itertools.py b/Lib/test/test_itertools.py
index 5f5bcbc7cfb8d7..b447b6cbab9c22 100644
--- a/Lib/test/test_itertools.py
+++ b/Lib/test/test_itertools.py
@@ -161,11 +161,11 @@ def test_accumulate(self):
def test_batched(self):
self.assertEqual(list(batched('ABCDEFG', 3)),
- [['A', 'B', 'C'], ['D', 'E', 'F'], ['G']])
+ [('A', 'B', 'C'), ('D', 'E', 'F'), ('G',)])
self.assertEqual(list(batched('ABCDEFG', 2)),
- [['A', 'B'], ['C', 'D'], ['E', 'F'], ['G']])
+ [('A', 'B'), ('C', 'D'), ('E', 'F'), ('G',)])
self.assertEqual(list(batched('ABCDEFG', 1)),
- [['A'], ['B'], ['C'], ['D'], ['E'], ['F'], ['G']])
+ [('A',), ('B',), ('C',), ('D',), ('E',), ('F',), ('G',)])
with self.assertRaises(TypeError): # Too few arguments
list(batched('ABCDEFG'))
@@ -188,8 +188,8 @@ def test_batched(self):
with self.subTest(s=s, n=n, batches=batches):
# Order is preserved and no data is lost
self.assertEqual(''.join(chain(*batches)), s)
- # Each batch is an exact list
- self.assertTrue(all(type(batch) is list for batch in batches))
+ # Each batch is an exact tuple
+ self.assertTrue(all(type(batch) is tuple for batch in batches))
# All but the last batch is of size n
if batches:
last_batch = batches.pop()
@@ -1809,12 +1809,12 @@ class TestPurePythonRoughEquivalents(unittest.TestCase):
def test_batched_recipe(self):
def batched_recipe(iterable, n):
- "Batch data into lists of length n. The last batch may be shorter."
+ "Batch data into tuples of length n. The last batch may be shorter."
# batched('ABCDEFG', 3) --> ABC DEF G
if n < 1:
raise ValueError('n must be at least one')
it = iter(iterable)
- while (batch := list(islice(it, n))):
+ while (batch := tuple(islice(it, n))):
yield batch
for iterable, n in product(
@@ -2087,7 +2087,7 @@ def test_accumulate(self):
def test_batched(self):
s = 'abcde'
- r = [['a', 'b'], ['c', 'd'], ['e']]
+ r = [('a', 'b'), ('c', 'd'), ('e',)]
n = 2
for g in (G, I, Ig, L, R):
with self.subTest(g=g):
diff --git a/Lib/test/test_minidom.py b/Lib/test/test_minidom.py
index ef38c362103fc6..2ca3908bd1caac 100644
--- a/Lib/test/test_minidom.py
+++ b/Lib/test/test_minidom.py
@@ -1163,14 +1163,10 @@ def testEncodings(self):
# Verify that character decoding errors raise exceptions instead
# of crashing
- if pyexpat.version_info >= (2, 4, 5):
- self.assertRaises(ExpatError, parseString,
- b'')
- self.assertRaises(ExpatError, parseString,
- b'Comment \xe7a va ? Tr\xe8s bien ?')
- else:
- self.assertRaises(UnicodeDecodeError, parseString,
- b'Comment \xe7a va ? Tr\xe8s bien ?')
+ with self.assertRaises((UnicodeDecodeError, ExpatError)):
+ parseString(
+ b'Comment \xe7a va ? Tr\xe8s bien ?'
+ )
doc.unlink()
@@ -1631,13 +1627,11 @@ def testEmptyXMLNSValue(self):
self.confirm(doc2.namespaceURI == xml.dom.EMPTY_NAMESPACE)
def testExceptionOnSpacesInXMLNSValue(self):
- if pyexpat.version_info >= (2, 4, 5):
- context = self.assertRaisesRegex(ExpatError, 'syntax error')
- else:
- context = self.assertRaisesRegex(ValueError, 'Unsupported syntax')
-
- with context:
- parseString('')
+ with self.assertRaises((ValueError, ExpatError)):
+ parseString(
+ '' +
+ ''
+ )
def testDocRemoveChild(self):
doc = parse(tstfile)
diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py
index 94db8bb7737acd..e0577916428a08 100644
--- a/Lib/test/test_os.py
+++ b/Lib/test/test_os.py
@@ -606,12 +606,13 @@ def test_stat_attributes_bytes(self):
def test_stat_result_pickle(self):
result = os.stat(self.fname)
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
- p = pickle.dumps(result, proto)
- self.assertIn(b'stat_result', p)
- if proto < 4:
- self.assertIn(b'cos\nstat_result\n', p)
- unpickled = pickle.loads(p)
- self.assertEqual(result, unpickled)
+ with self.subTest(f'protocol {proto}'):
+ p = pickle.dumps(result, proto)
+ self.assertIn(b'stat_result', p)
+ if proto < 4:
+ self.assertIn(b'cos\nstat_result\n', p)
+ unpickled = pickle.loads(p)
+ self.assertEqual(result, unpickled)
@unittest.skipUnless(hasattr(os, 'statvfs'), 'test needs os.statvfs()')
def test_statvfs_attributes(self):
diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py
index 78cac231929a61..cb284195d976ff 100644
--- a/Lib/test/test_syntax.py
+++ b/Lib/test/test_syntax.py
@@ -2145,6 +2145,22 @@ def test_error_parenthesis(self):
for paren in ")]}":
self._check_error(paren + "1 + 2", f"unmatched '\\{paren}'")
+ # Some more complex examples:
+ code = """\
+func(
+ a=["unclosed], # Need a quote in this comment: "
+ b=2,
+)
+"""
+ self._check_error(code, "parenthesis '\\)' does not match opening parenthesis '\\['")
+
+ def test_error_string_literal(self):
+
+ self._check_error("'blech", "unterminated string literal")
+ self._check_error('"blech', "unterminated string literal")
+ self._check_error("'''blech", "unterminated triple-quoted string literal")
+ self._check_error('"""blech', "unterminated triple-quoted string literal")
+
def test_invisible_characters(self):
self._check_error('print\x17("Hello")', "invalid non-printable character")
diff --git a/Lib/test/test_typing.py b/Lib/test/test_typing.py
index da602b0199d52c..1cae1b0de7140f 100644
--- a/Lib/test/test_typing.py
+++ b/Lib/test/test_typing.py
@@ -7719,6 +7719,7 @@ class CustomerModel:
"eq_default": True,
"order_default": False,
"kw_only_default": True,
+ "frozen_default": False,
"field_specifiers": (),
"kwargs": {},
}
@@ -7749,6 +7750,7 @@ class CustomerModel(Decorated, frozen=True):
"eq_default": True,
"order_default": True,
"kw_only_default": False,
+ "frozen_default": False,
"field_specifiers": (),
"kwargs": {"make_everything_awesome": True},
}
@@ -7765,7 +7767,7 @@ def __new__(
return super().__new__(cls, name, bases, namespace)
Decorated = dataclass_transform(
- order_default=True, field_specifiers=(Field,)
+ order_default=True, frozen_default=True, field_specifiers=(Field,)
)(ModelMeta)
class ModelBase(metaclass=Decorated): ...
@@ -7780,6 +7782,7 @@ class CustomerModel(ModelBase, init=False):
"eq_default": True,
"order_default": True,
"kw_only_default": False,
+ "frozen_default": True,
"field_specifiers": (Field,),
"kwargs": {},
}
diff --git a/Lib/test/test_unary.py b/Lib/test/test_unary.py
index c3c17cc9f611dd..a45fbf6bd6bc54 100644
--- a/Lib/test/test_unary.py
+++ b/Lib/test/test_unary.py
@@ -8,7 +8,6 @@ def test_negative(self):
self.assertTrue(-2 == 0 - 2)
self.assertEqual(-0, 0)
self.assertEqual(--2, 2)
- self.assertTrue(-2 == 0 - 2)
self.assertTrue(-2.0 == 0 - 2.0)
self.assertTrue(-2j == 0 - 2j)
@@ -16,15 +15,13 @@ def test_positive(self):
self.assertEqual(+2, 2)
self.assertEqual(+0, 0)
self.assertEqual(++2, 2)
- self.assertEqual(+2, 2)
self.assertEqual(+2.0, 2.0)
self.assertEqual(+2j, 2j)
def test_invert(self):
- self.assertTrue(-2 == 0 - 2)
- self.assertEqual(-0, 0)
- self.assertEqual(--2, 2)
- self.assertTrue(-2 == 0 - 2)
+ self.assertTrue(~2 == -(2+1))
+ self.assertEqual(~0, -1)
+ self.assertEqual(~~2, 2)
def test_no_overflow(self):
nines = "9" * 32
diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py
index f067560ca6caa1..2df74f5e6f99b2 100644
--- a/Lib/test/test_urllib.py
+++ b/Lib/test/test_urllib.py
@@ -1104,6 +1104,8 @@ def test_unquoting(self):
self.assertEqual(result.count('%'), 1,
"using unquote(): not all characters escaped: "
"%s" % result)
+
+ def test_unquote_rejects_none_and_tuple(self):
self.assertRaises((TypeError, AttributeError), urllib.parse.unquote, None)
self.assertRaises((TypeError, AttributeError), urllib.parse.unquote, ())
diff --git a/Lib/test/test_urllib2.py b/Lib/test/test_urllib2.py
index 28f88412fdcaac..498c0382d2137b 100644
--- a/Lib/test/test_urllib2.py
+++ b/Lib/test/test_urllib2.py
@@ -1824,6 +1824,10 @@ def test_HTTPError_interface(self):
expected_errmsg = '' % (err.code, err.msg)
self.assertEqual(repr(err), expected_errmsg)
+ def test_gh_98778(self):
+ x = urllib.error.HTTPError("url", 405, "METHOD NOT ALLOWED", None, None)
+ self.assertEqual(getattr(x, "__notes__", ()), ())
+
def test_parse_proxy(self):
parse_proxy_test_cases = [
('proxy.example.com',
diff --git a/Lib/test/test_winreg.py b/Lib/test/test_winreg.py
index 8157c2da6efaa6..769ab67b0f5611 100644
--- a/Lib/test/test_winreg.py
+++ b/Lib/test/test_winreg.py
@@ -113,7 +113,6 @@ def _write_test_data(self, root_key, subkeystr="sub_key",
"does not close the actual key!")
except OSError:
pass
-
def _read_test_data(self, root_key, subkeystr="sub_key", OpenKey=OpenKey):
# Check we can get default value for this key.
val = QueryValue(root_key, test_key_name)
@@ -340,6 +339,23 @@ def test_setvalueex_value_range(self):
finally:
DeleteKey(HKEY_CURRENT_USER, test_key_name)
+ def test_setvalueex_negative_one_check(self):
+ # Test for Issue #43984, check -1 was not set by SetValueEx.
+ # Py2Reg, which gets called by SetValueEx, wasn't checking return
+ # value by PyLong_AsUnsignedLong, thus setting -1 as value in the registry.
+ # The implementation now checks PyLong_AsUnsignedLong return value to assure
+ # the value set was not -1.
+ try:
+ with CreateKey(HKEY_CURRENT_USER, test_key_name) as ck:
+ with self.assertRaises(OverflowError):
+ SetValueEx(ck, "test_name_dword", None, REG_DWORD, -1)
+ SetValueEx(ck, "test_name_qword", None, REG_QWORD, -1)
+ self.assertRaises(FileNotFoundError, QueryValueEx, ck, "test_name_dword")
+ self.assertRaises(FileNotFoundError, QueryValueEx, ck, "test_name_qword")
+
+ finally:
+ DeleteKey(HKEY_CURRENT_USER, test_key_name)
+
def test_queryvalueex_return_value(self):
# Test for Issue #16759, return unsigned int from QueryValueEx.
# Reg2Py, which gets called by QueryValueEx, was returning a value
diff --git a/Lib/typing.py b/Lib/typing.py
index 38e227e3c55d59..d9d6fbcdb8f068 100644
--- a/Lib/typing.py
+++ b/Lib/typing.py
@@ -3363,6 +3363,7 @@ def dataclass_transform(
eq_default: bool = True,
order_default: bool = False,
kw_only_default: bool = False,
+ frozen_default: bool = False,
field_specifiers: tuple[type[Any] | Callable[..., Any], ...] = (),
**kwargs: Any,
) -> Callable[[T], T]:
@@ -3416,6 +3417,8 @@ class CustomerModel(ModelBase):
assumed to be True or False if it is omitted by the caller.
- ``kw_only_default`` indicates whether the ``kw_only`` parameter is
assumed to be True or False if it is omitted by the caller.
+ - ``frozen_default`` indicates whether the ``frozen`` parameter is
+ assumed to be True or False if it is omitted by the caller.
- ``field_specifiers`` specifies a static list of supported classes
or functions that describe fields, similar to ``dataclasses.field()``.
- Arbitrary other keyword arguments are accepted in order to allow for
@@ -3432,6 +3435,7 @@ def decorator(cls_or_fn):
"eq_default": eq_default,
"order_default": order_default,
"kw_only_default": kw_only_default,
+ "frozen_default": frozen_default,
"field_specifiers": field_specifiers,
"kwargs": kwargs,
}
diff --git a/Lib/urllib/error.py b/Lib/urllib/error.py
index 8cd901f13f8e49..feec0e7f848e46 100644
--- a/Lib/urllib/error.py
+++ b/Lib/urllib/error.py
@@ -10,7 +10,7 @@
an application may want to handle an exception like a regular
response.
"""
-
+import io
import urllib.response
__all__ = ['URLError', 'HTTPError', 'ContentTooShortError']
@@ -42,12 +42,9 @@ def __init__(self, url, code, msg, hdrs, fp):
self.hdrs = hdrs
self.fp = fp
self.filename = url
- # The addinfourl classes depend on fp being a valid file
- # object. In some cases, the HTTPError may not have a valid
- # file object. If this happens, the simplest workaround is to
- # not initialize the base classes.
- if fp is not None:
- self.__super_init(fp, hdrs, url, code)
+ if fp is None:
+ fp = io.StringIO()
+ self.__super_init(fp, hdrs, url, code)
def __str__(self):
return 'HTTP Error %s: %s' % (self.code, self.msg)
diff --git a/Lib/urllib/parse.py b/Lib/urllib/parse.py
index 4f6867accbc0eb..5f95c5ff7f9c1c 100644
--- a/Lib/urllib/parse.py
+++ b/Lib/urllib/parse.py
@@ -600,6 +600,9 @@ def urldefrag(url):
def unquote_to_bytes(string):
"""unquote_to_bytes('abc%20def') -> b'abc def'."""
+ return bytes(_unquote_impl(string))
+
+def _unquote_impl(string: bytes | bytearray | str) -> bytes | bytearray:
# Note: strings are encoded as UTF-8. This is only an issue if it contains
# unescaped non-ASCII characters, which URIs should not.
if not string:
@@ -611,8 +614,8 @@ def unquote_to_bytes(string):
bits = string.split(b'%')
if len(bits) == 1:
return string
- res = [bits[0]]
- append = res.append
+ res = bytearray(bits[0])
+ append = res.extend
# Delay the initialization of the table to not waste memory
# if the function is never called
global _hextobyte
@@ -626,10 +629,20 @@ def unquote_to_bytes(string):
except KeyError:
append(b'%')
append(item)
- return b''.join(res)
+ return res
_asciire = re.compile('([\x00-\x7f]+)')
+def _generate_unquoted_parts(string, encoding, errors):
+ previous_match_end = 0
+ for ascii_match in _asciire.finditer(string):
+ start, end = ascii_match.span()
+ yield string[previous_match_end:start] # Non-ASCII
+ # The ascii_match[1] group == string[start:end].
+ yield _unquote_impl(ascii_match[1]).decode(encoding, errors)
+ previous_match_end = end
+ yield string[previous_match_end:] # Non-ASCII tail
+
def unquote(string, encoding='utf-8', errors='replace'):
"""Replace %xx escapes by their single-character equivalent. The optional
encoding and errors parameters specify how to decode percent-encoded
@@ -641,21 +654,16 @@ def unquote(string, encoding='utf-8', errors='replace'):
unquote('abc%20def') -> 'abc def'.
"""
if isinstance(string, bytes):
- return unquote_to_bytes(string).decode(encoding, errors)
+ return _unquote_impl(string).decode(encoding, errors)
if '%' not in string:
+ # Is it a string-like object?
string.split
return string
if encoding is None:
encoding = 'utf-8'
if errors is None:
errors = 'replace'
- bits = _asciire.split(string)
- res = [bits[0]]
- append = res.append
- for i in range(1, len(bits), 2):
- append(unquote_to_bytes(bits[i]).decode(encoding, errors))
- append(bits[i + 1])
- return ''.join(res)
+ return ''.join(_generate_unquoted_parts(string, encoding, errors))
def parse_qs(qs, keep_blank_values=False, strict_parsing=False,
diff --git a/Makefile.pre.in b/Makefile.pre.in
index f6df7a620deaed..dd6c3fbd1c6483 100644
--- a/Makefile.pre.in
+++ b/Makefile.pre.in
@@ -976,7 +976,8 @@ Programs/_testembed: Programs/_testembed.o $(LINK_PYTHON_DEPS)
BOOTSTRAP_HEADERS = \
Python/frozen_modules/importlib._bootstrap.h \
- Python/frozen_modules/importlib._bootstrap_external.h
+ Python/frozen_modules/importlib._bootstrap_external.h \
+ Python/frozen_modules/zipimport.h
Programs/_bootstrap_python.o: Programs/_bootstrap_python.c $(BOOTSTRAP_HEADERS) $(PYTHON_HEADERS)
@@ -1623,6 +1624,7 @@ PYTHON_HEADERS= \
$(srcdir)/Include/internal/pycore_bytesobject.h \
$(srcdir)/Include/internal/pycore_call.h \
$(srcdir)/Include/internal/pycore_ceval.h \
+ $(srcdir)/Include/internal/pycore_ceval_state.h \
$(srcdir)/Include/internal/pycore_code.h \
$(srcdir)/Include/internal/pycore_compile.h \
$(srcdir)/Include/internal/pycore_condvar.h \
@@ -1632,6 +1634,7 @@ PYTHON_HEADERS= \
$(srcdir)/Include/internal/pycore_descrobject.h \
$(srcdir)/Include/internal/pycore_dtoa.h \
$(srcdir)/Include/internal/pycore_exceptions.h \
+ $(srcdir)/Include/internal/pycore_faulthandler.h \
$(srcdir)/Include/internal/pycore_fileutils.h \
$(srcdir)/Include/internal/pycore_floatobject.h \
$(srcdir)/Include/internal/pycore_format.h \
@@ -1662,6 +1665,7 @@ PYTHON_HEADERS= \
$(srcdir)/Include/internal/pycore_pymem.h \
$(srcdir)/Include/internal/pycore_pymem_init.h \
$(srcdir)/Include/internal/pycore_pystate.h \
+ $(srcdir)/Include/internal/pycore_pythread.h \
$(srcdir)/Include/internal/pycore_range.h \
$(srcdir)/Include/internal/pycore_runtime.h \
$(srcdir)/Include/internal/pycore_runtime_init_generated.h \
@@ -1672,8 +1676,10 @@ PYTHON_HEADERS= \
$(srcdir)/Include/internal/pycore_structseq.h \
$(srcdir)/Include/internal/pycore_symtable.h \
$(srcdir)/Include/internal/pycore_sysmodule.h \
+ $(srcdir)/Include/internal/pycore_time.h \
$(srcdir)/Include/internal/pycore_token.h \
$(srcdir)/Include/internal/pycore_traceback.h \
+ $(srcdir)/Include/internal/pycore_tracemalloc.h \
$(srcdir)/Include/internal/pycore_tuple.h \
$(srcdir)/Include/internal/pycore_typeobject.h \
$(srcdir)/Include/internal/pycore_ucnhash.h \
diff --git a/Misc/NEWS.d/3.12.0a3.rst b/Misc/NEWS.d/3.12.0a3.rst
new file mode 100644
index 00000000000000..3d1e43350d136e
--- /dev/null
+++ b/Misc/NEWS.d/3.12.0a3.rst
@@ -0,0 +1,836 @@
+.. date: 2022-12-05-01-39-10
+.. gh-issue: 100001
+.. nonce: uD05Fc
+.. release date: 2022-12-06
+.. section: Security
+
+``python -m http.server`` no longer allows terminal control characters sent
+within a garbage request to be printed to the stderr server log.
+
+This is done by changing the :mod:`http.server`
+:class:`BaseHTTPRequestHandler` ``.log_message`` method to replace control
+characters with a ``\xHH`` hex escape before printing.
+
+..
+
+.. date: 2022-11-11-12-50-28
+.. gh-issue: 87604
+.. nonce: OtwH5L
+.. section: Security
+
+Avoid publishing list of active per-interpreter audit hooks via the
+:mod:`gc` module
+
+..
+
+.. date: 2022-11-30-11-09-40
+.. gh-issue: 99891
+.. nonce: 9VomwB
+.. section: Core and Builtins
+
+Fix a bug in the tokenizer that could cause infinite recursion when showing
+syntax warnings that happen in the first line of the source. Patch by Pablo
+Galindo
+
+..
+
+.. date: 2022-11-27-13-50-13
+.. gh-issue: 91054
+.. nonce: oox_kW
+.. section: Core and Builtins
+
+Add :c:func:`PyCode_AddWatcher` and :c:func:`PyCode_ClearWatcher` APIs to
+register callbacks to receive notification on creation and destruction of
+code objects.
+
+..
+
+.. date: 2022-11-26-04-00-41
+.. gh-issue: 99729
+.. nonce: A3ovwQ
+.. section: Core and Builtins
+
+Fix an issue that could cause frames to be visible to Python code as they
+are being torn down, possibly leading to memory corruption or hard crashes
+of the interpreter.
+
+..
+
+.. date: 2022-11-23-18-16-18
+.. gh-issue: 99708
+.. nonce: 7MuaiR
+.. section: Core and Builtins
+
+Fix bug where compiler crashes on an if expression with an empty body block.
+
+..
+
+.. date: 2022-11-21-11-27-14
+.. gh-issue: 99578
+.. nonce: DcKoBJ
+.. section: Core and Builtins
+
+Fix a reference bug in :func:`_imp.create_builtin()` after the creation of
+the first sub-interpreter for modules ``builtins`` and ``sys``. Patch by
+Victor Stinner.
+
+..
+
+.. date: 2022-11-19-22-27-52
+.. gh-issue: 99581
+.. nonce: yKYPbf
+.. section: Core and Builtins
+
+Fixed a bug that was causing a buffer overflow if the tokenizer copies a
+line missing the newline caracter from a file that is as long as the
+available tokenizer buffer. Patch by Pablo galindo
+
+..
+
+.. date: 2022-11-18-11-24-25
+.. gh-issue: 99553
+.. nonce: F64h-n
+.. section: Core and Builtins
+
+Fix bug where an :exc:`ExceptionGroup` subclass can wrap a
+:exc:`BaseException`.
+
+..
+
+.. date: 2022-11-16-21-35-30
+.. gh-issue: 99547
+.. nonce: p_c_bp
+.. section: Core and Builtins
+
+Add a function to os.path to check if a path is a junction: isjunction. Add
+similar functionality to pathlib.Path as is_junction.
+
+..
+
+.. date: 2022-11-12-01-39-57
+.. gh-issue: 99370
+.. nonce: _cu32j
+.. section: Core and Builtins
+
+Fix zip path for venv created from a non-installed python on POSIX
+platforms.
+
+..
+
+.. date: 2022-11-11-14-04-01
+.. gh-issue: 99377
+.. nonce: -CJvWn
+.. section: Core and Builtins
+
+Add audit events for thread creation and clear operations.
+
+..
+
+.. date: 2022-11-10-17-09-16
+.. gh-issue: 98686
+.. nonce: bmAKwr
+.. section: Core and Builtins
+
+Remove the ``BINARY_OP_GENERIC`` and ``COMPARE_OP_GENERIC``
+"specializations".
+
+..
+
+.. date: 2022-11-10-16-53-40
+.. gh-issue: 99298
+.. nonce: HqRJES
+.. section: Core and Builtins
+
+Remove the remaining error paths for attribute specializations, and refuse
+to specialize attribute accesses on types that haven't had
+:c:func:`PyType_Ready` called on them yet.
+
+..
+
+.. date: 2022-11-05-22-26-35
+.. gh-issue: 99127
+.. nonce: Btk7ih
+.. section: Core and Builtins
+
+Allow some features of :mod:`syslog` to the main interpreter only. Patch by
+Dong-hee Na.
+
+..
+
+.. date: 2022-10-05-11-44-52
+.. gh-issue: 91053
+.. nonce: f5Bo3p
+.. section: Core and Builtins
+
+Optimizing interpreters and JIT compilers may need to invalidate internal
+metadata when functions are modified. This change adds the ability to
+provide a callback that will be invoked each time a function is created,
+modified, or destroyed.
+
+..
+
+.. date: 2022-09-17-17-08-01
+.. gh-issue: 90994
+.. nonce: f0H2Yd
+.. section: Core and Builtins
+
+Improve error messages when there's a syntax error with call arguments. The
+following three cases are covered: - No value is assigned to a named
+argument, eg ``foo(a=)``. - A value is assigned to a star argument, eg
+``foo(*args=[0])``. - A value is assigned to a double-star keyword argument,
+eg ``foo(**kwarg={'a': 0})``.
+
+..
+
+.. bpo: 45026
+.. date: 2021-08-29-15-55-19
+.. nonce: z7nTA3
+.. section: Core and Builtins
+
+Optimize the :class:`range` object iterator. It is now smaller, faster
+iteration of ranges containing large numbers. Smaller pickles, faster
+unpickling.
+
+..
+
+.. bpo: 31718
+.. date: 2020-02-23-23-48-15
+.. nonce: sXko5e
+.. section: Core and Builtins
+
+Raise :exc:`ValueError` instead of :exc:`SystemError` when methods of
+uninitialized :class:`io.IncrementalNewlineDecoder` objects are called.
+Patch by Oren Milman.
+
+..
+
+.. bpo: 38031
+.. date: 2019-09-04-19-09-49
+.. nonce: Yq4L72
+.. section: Core and Builtins
+
+Fix a possible assertion failure in :class:`io.FileIO` when the opener
+returns an invalid file descriptor.
+
+..
+
+.. date: 2022-12-05-13-40-15
+.. gh-issue: 100001
+.. nonce: 78ReYp
+.. section: Library
+
+Also \ escape \s in the http.server BaseHTTPRequestHandler.log_message so
+that it is technically possible to parse the line and reconstruct what the
+original data was. Without this a \xHH is ambiguious as to if it is a hex
+replacement we put in or the characters r"\x" came through in the original
+request line.
+
+..
+
+.. date: 2022-12-03-05-58-48
+.. gh-issue: 99957
+.. nonce: jLYYgN
+.. section: Library
+
+Add ``frozen_default`` parameter to :func:`typing.dataclass_transform`.
+
+..
+
+.. date: 2022-11-22-19-31-26
+.. gh-issue: 79033
+.. nonce: MW6kHq
+.. section: Library
+
+Fix :func:`asyncio.Server.wait_closed` to actually do what the docs promise
+-- wait for all existing connections to complete, after closing the server.
+
+..
+
+.. date: 2022-11-21-17-56-18
+.. gh-issue: 51524
+.. nonce: nTykx8
+.. section: Library
+
+Fix bug when calling trace.CoverageResults with valid infile.
+
+..
+
+.. date: 2022-11-21-13-49-03
+.. gh-issue: 99645
+.. nonce: 9w1QKq
+.. section: Library
+
+Fix a bug in handling class cleanups in :class:`unittest.TestCase`. Now
+``addClassCleanup()`` uses separate lists for different ``TestCase``
+subclasses, and ``doClassCleanups()`` only cleans up the particular class.
+
+..
+
+.. date: 2022-11-21-10-45-54
+.. gh-issue: 99508
+.. nonce: QqVbby
+.. section: Library
+
+Fix ``TypeError`` in ``Lib/importlib/_bootstrap_external.py`` while calling
+``_imp.source_hash()``.
+
+..
+
+.. date: 2022-11-17-10-56-47
+.. gh-issue: 66285
+.. nonce: KvjlaB
+.. section: Library
+
+Fix :mod:`asyncio` to not share event loop and signal wakeupfd in forked
+processes. Patch by Kumar Aditya.
+
+..
+
+.. date: 2022-11-15-10-55-24
+.. gh-issue: 97001
+.. nonce: KeQuVF
+.. section: Library
+
+Release the GIL when calling termios APIs to avoid blocking threads.
+
+..
+
+.. date: 2022-11-15-04-08-25
+.. gh-issue: 92647
+.. nonce: cZcjnJ
+.. section: Library
+
+Use final status of an enum to determine lookup or creation branch of
+functional API.
+
+..
+
+.. date: 2022-11-14-08-21-56
+.. gh-issue: 99388
+.. nonce: UWSlwp
+.. section: Library
+
+Add *loop_factory* parameter to :func:`asyncio.run` to allow specifying a
+custom event loop factory. Patch by Kumar Aditya.
+
+..
+
+.. date: 2022-11-13-02-06-56
+.. gh-issue: 99341
+.. nonce: 8-OlwB
+.. section: Library
+
+Fix :func:`ast.increment_lineno` to also cover :class:`ast.TypeIgnore` when
+changing line numbers.
+
+..
+
+.. date: 2022-11-12-12-15-30
+.. gh-issue: 99382
+.. nonce: dKg_rW
+.. section: Library
+
+Check the number of arguments in substitution in user generics containing a
+:class:`~typing.TypeVarTuple` and one or more :class:`~typing.TypeVar`.
+
+..
+
+.. date: 2022-11-12-12-10-23
+.. gh-issue: 99379
+.. nonce: bcGhxF
+.. section: Library
+
+Fix substitution of :class:`~typing.ParamSpec` followed by
+:class:`~typing.TypeVarTuple` in generic aliases.
+
+..
+
+.. date: 2022-11-12-12-08-34
+.. gh-issue: 99344
+.. nonce: 7M_u8G
+.. section: Library
+
+Fix substitution of :class:`~typing.TypeVarTuple` and
+:class:`~typing.ParamSpec` together in user generics.
+
+..
+
+.. date: 2022-11-09-12-36-12
+.. gh-issue: 99284
+.. nonce: 9p4J2l
+.. section: Library
+
+Remove ``_use_broken_old_ctypes_structure_semantics_`` old untested and
+undocumented hack from :mod:`ctypes`.
+
+..
+
+.. date: 2022-11-09-03-34-29
+.. gh-issue: 99201
+.. nonce: lDJ7xI
+.. section: Library
+
+Fix :exc:`IndexError` when initializing the config variables on Windows if
+``HAVE_DYNAMIC_LOADING`` is not set.
+
+..
+
+.. date: 2022-11-08-15-54-43
+.. gh-issue: 99240
+.. nonce: MhYwcz
+.. section: Library
+
+Fix double-free bug in Argument Clinic ``str_converter`` by extracting
+memory clean up to a new ``post_parsing`` section.
+
+..
+
+.. date: 2022-11-08-11-18-51
+.. gh-issue: 64490
+.. nonce: VcBgrN
+.. section: Library
+
+Fix refcount error when arguments are packed to tuple in Argument Clinic.
+
+..
+
+.. date: 2022-11-02-23-47-07
+.. gh-issue: 99029
+.. nonce: 7uCiIB
+.. section: Library
+
+:meth:`pathlib.PurePath.relative_to()` now treats naked Windows drive paths
+as relative. This brings its behaviour in line with other parts of pathlib.
+
+..
+
+.. date: 2022-10-24-11-01-05
+.. gh-issue: 98253
+.. nonce: HVd5v4
+.. section: Library
+
+The implementation of the typing module is now more resilient to reference
+leaks in binary extension modules.
+
+Previously, a reference leak in a typed C API-based extension module could
+leak internals of the typing module, which could in turn introduce leaks in
+essentially any other package with typed function signatures. Although the
+typing package is not the original source of the problem, such non-local
+dependences exacerbate debugging of large-scale projects, and the
+implementation was therefore changed to reduce harm by providing better
+isolation.
+
+..
+
+.. date: 2022-10-19-18-31-53
+.. gh-issue: 98458
+.. nonce: vwyq7O
+.. section: Library
+
+Fix infinite loop in unittest when a self-referencing chained exception is
+raised
+
+..
+
+.. date: 2022-10-19-13-37-23
+.. gh-issue: 93453
+.. nonce: wTB_sH
+.. section: Library
+
+:func:`asyncio.get_event_loop` and many other :mod:`asyncio` functions like
+:func:`asyncio.ensure_future`, :func:`asyncio.shield` or
+:func:`asyncio.gather`, and also the
+:meth:`~asyncio.BaseDefaultEventLoopPolicy.get_event_loop` method of
+:class:`asyncio.BaseDefaultEventLoopPolicy` now raise a :exc:`RuntimeError`
+if called when there is no running event loop and the current event loop was
+not set. Previously they implicitly created and set a new current event
+loop. :exc:`DeprecationWarning` is no longer emitted if there is no running
+event loop but the current event loop was set.
+
+..
+
+.. date: 2022-10-16-18-52-00
+.. gh-issue: 97966
+.. nonce: humlhz
+.. section: Library
+
+On ``uname_result``, restored expectation that ``_fields`` and ``_asdict``
+would include all six properties including ``processor``.
+
+..
+
+.. date: 2022-10-13-22-13-54
+.. gh-issue: 98248
+.. nonce: lwyygy
+.. section: Library
+
+Provide informative error messages in :func:`struct.pack` when its integral
+arguments are not in range.
+
+..
+
+.. date: 2022-10-08-19-20-33
+.. gh-issue: 98108
+.. nonce: WUObqM
+.. section: Library
+
+``zipfile.Path`` is now pickleable if its initialization parameters were
+pickleable (e.g. for file system paths).
+
+..
+
+.. date: 2022-10-08-15-41-00
+.. gh-issue: 98098
+.. nonce: DugpWi
+.. section: Library
+
+Created packages from zipfile and test_zipfile modules, separating
+``zipfile.Path`` functionality.
+
+..
+
+.. date: 2022-10-02-12-38-22
+.. gh-issue: 82836
+.. nonce: OvYLmC
+.. section: Library
+
+Fix :attr:`~ipaddress.IPv4Address.is_private` properties in the
+:mod:`ipaddress` module. Previously non-private networks (0.0.0.0/0) would
+return True from this method; now they correctly return False.
+
+..
+
+.. date: 2022-09-14-21-56-15
+.. gh-issue: 96828
+.. nonce: ZoOY5G
+.. section: Library
+
+Add an :data:`~ssl.OP_ENABLE_KTLS` option for enabling the use of the kernel
+TLS (kTLS). Patch by Illia Volochii.
+
+..
+
+.. date: 2022-08-06-12-18-07
+.. gh-issue: 88863
+.. nonce: NnqsuJ
+.. section: Library
+
+To avoid apparent memory leaks when :func:`asyncio.open_connection` raises,
+break reference cycles generated by local exception and future instances
+(which has exception instance as its member var). Patch by Dong Uk, Kang.
+
+..
+
+.. date: 2022-04-23-03-46-37
+.. gh-issue: 91078
+.. nonce: 87-hkp
+.. section: Library
+
+:meth:`TarFile.next` now returns ``None`` when called on an empty tarfile.
+
+..
+
+.. bpo: 47220
+.. date: 2022-04-04-22-54-11
+.. nonce: L9jYu4
+.. section: Library
+
+Document the optional *callback* parameter of :class:`WeakMethod`. Patch by
+Géry Ogam.
+
+..
+
+.. bpo: 44817
+.. date: 2021-08-03-05-31-00
+.. nonce: wOW_Qn
+.. section: Library
+
+Ignore WinError 53 (ERROR_BAD_NETPATH), 65 (ERROR_NETWORK_ACCESS_DENIED) and
+161 (ERROR_BAD_PATHNAME) when using ntpath.realpath().
+
+..
+
+.. bpo: 41260
+.. date: 2020-08-02-23-46-22
+.. nonce: Q2BNzY
+.. section: Library
+
+Rename the *fmt* parameter of the pure Python implementation of
+:meth:`datetime.date.strftime` to *format*.
+
+..
+
+.. bpo: 15999
+.. date: 2019-08-30-10-48-53
+.. nonce: QqsRRi
+.. section: Library
+
+All built-in functions now accept arguments of any type instead of just
+``bool`` and ``int`` for boolean parameters.
+
+..
+
+.. date: 2022-12-02-17-08-08
+.. gh-issue: 99931
+.. nonce: wC46hE
+.. section: Documentation
+
+Use `sphinxext-opengraph `__ to
+generate `OpenGraph metadata `__.
+
+..
+
+.. date: 2022-11-26-21-43-05
+.. gh-issue: 89682
+.. nonce: DhKoTM
+.. section: Documentation
+
+Reworded docstring of the default ``__contains__`` to clarify that it
+returns a :class:`bool`.
+
+..
+
+.. date: 2022-11-26-15-51-23
+.. gh-issue: 88330
+.. nonce: B_wFq8
+.. section: Documentation
+
+Improved the description of what a resource is in importlib.resources docs.
+
+..
+
+.. date: 2022-11-16-12-52-23
+.. gh-issue: 92892
+.. nonce: TS-P0j
+.. section: Documentation
+
+Document that calling variadic functions with ctypes requires special care
+on macOS/arm64 (and possibly other platforms).
+
+..
+
+.. bpo: 41825
+.. date: 2020-09-22-12-32-16
+.. nonce: npcaCb
+.. section: Documentation
+
+Restructured the documentation for the :func:`os.wait* ` family of
+functions, and improved the docs for :func:`os.waitid` with more explanation
+of the possible argument constants.
+
+..
+
+.. date: 2022-12-05-16-12-56
+.. gh-issue: 99892
+.. nonce: sz_eW8
+.. section: Tests
+
+Skip test_normalization() of test_unicodedata if it fails to download
+NormalizationTest.txt file from pythontest.net. Patch by Victor Stinner.
+
+..
+
+.. date: 2022-12-01-18-55-18
+.. gh-issue: 99934
+.. nonce: Ox3Fqf
+.. section: Tests
+
+Correct test_marsh on (32 bit) x86: test_deterministic sets was failing.
+
+..
+
+.. date: 2022-11-23-18-32-16
+.. gh-issue: 99741
+.. nonce: q4R7NH
+.. section: Tests
+
+We've implemented multi-phase init (PEP 489/630/687) for the internal (for
+testing) _xxsubinterpreters module.
+
+..
+
+.. date: 2022-11-21-19-21-30
+.. gh-issue: 99659
+.. nonce: 4gP0nm
+.. section: Tests
+
+Optional big memory tests in ``test_sqlite3`` now catch the correct
+:exc:`sqlite.DataError` exception type in case of too large strings and/or
+blobs passed.
+
+..
+
+.. date: 2022-11-19-13-34-28
+.. gh-issue: 99593
+.. nonce: 8ZfCkj
+.. section: Tests
+
+Cover the Unicode C API with tests.
+
+..
+
+.. date: 2022-08-22-15-49-14
+.. gh-issue: 96002
+.. nonce: 4UE9UE
+.. section: Tests
+
+Add functional test for Argument Clinic.
+
+..
+
+.. date: 2022-11-24-02-58-10
+.. gh-issue: 99086
+.. nonce: DV_4Br
+.. section: Build
+
+Fix ``-Wimplicit-int``, ``-Wstrict-prototypes``, and
+``-Wimplicit-function-declaration`` compiler warnings in
+:program:`configure` checks.
+
+..
+
+.. date: 2022-11-15-08-40-22
+.. gh-issue: 99337
+.. nonce: 5LoQDE
+.. section: Build
+
+Fix a compilation issue with GCC 12 on macOS.
+
+..
+
+.. date: 2022-11-09-14-42-48
+.. gh-issue: 99289
+.. nonce: X7wFE1
+.. section: Build
+
+Add a ``COMPILEALL_OPTS`` variable in Makefile to override :mod:`compileall`
+options (default: ``-j0``) in ``make install``. Also merged the
+``compileall`` commands into a single command building .pyc files for the
+all optimization levels (0, 1, 2) at once. Patch by Victor Stinner.
+
+..
+
+.. date: 2022-11-03-08-10-49
+.. gh-issue: 98872
+.. nonce: gdsR8X
+.. section: Build
+
+Fix a possible fd leak in ``Programs/_freeze_module.c`` introduced in Python
+3.11.
+
+..
+
+.. date: 2022-10-16-12-49-24
+.. gh-issue: 88226
+.. nonce: BsnQ4k
+.. section: Build
+
+Always define ``TARGET_*`` labels in ``Python/ceval.c``, even if
+``USE_COMPUTED_GOTOS`` is disabled. This allows breakpoints to be set at
+those labels in (for instance) ``gdb``.
+
+..
+
+.. date: 2022-11-23-17-17-16
+.. gh-issue: 99345
+.. nonce: jOa3-f
+.. section: Windows
+
+Use faster initialization functions to detect install location for Windows
+Store package
+
+..
+
+.. date: 2022-11-21-19-50-18
+.. gh-issue: 98629
+.. nonce: tMmB_B
+.. section: Windows
+
+Fix initialization of :data:`sys.version` and ``sys._git`` on Windows
+
+..
+
+.. date: 2022-11-16-19-03-21
+.. gh-issue: 99442
+.. nonce: 6Dgk3Q
+.. section: Windows
+
+Fix handling in :ref:`launcher` when ``argv[0]`` does not include a file
+extension.
+
+..
+
+.. bpo: 40882
+.. date: 2020-06-06-15-10-37
+.. nonce: UvNbdj
+.. section: Windows
+
+Fix a memory leak in :class:`multiprocessing.shared_memory.SharedMemory` on
+Windows.
+
+..
+
+.. date: 2022-11-25-09-23-20
+.. gh-issue: 87235
+.. nonce: SifjCD
+.. section: macOS
+
+On macOS ``python3 /dev/fd/9 9` family of functions,
-and improved the docs for :func:`os.waitid` with more explanation of the
-possible argument constants.
diff --git a/Misc/NEWS.d/next/Documentation/2022-11-16-12-52-23.gh-issue-92892.TS-P0j.rst b/Misc/NEWS.d/next/Documentation/2022-11-16-12-52-23.gh-issue-92892.TS-P0j.rst
deleted file mode 100644
index 54e421d19d9da3..00000000000000
--- a/Misc/NEWS.d/next/Documentation/2022-11-16-12-52-23.gh-issue-92892.TS-P0j.rst
+++ /dev/null
@@ -1 +0,0 @@
-Document that calling variadic functions with ctypes requires special care on macOS/arm64 (and possibly other platforms).
diff --git a/Misc/NEWS.d/next/Documentation/2022-11-26-15-51-23.gh-issue-88330.B_wFq8.rst b/Misc/NEWS.d/next/Documentation/2022-11-26-15-51-23.gh-issue-88330.B_wFq8.rst
deleted file mode 100644
index 0f242eecc31258..00000000000000
--- a/Misc/NEWS.d/next/Documentation/2022-11-26-15-51-23.gh-issue-88330.B_wFq8.rst
+++ /dev/null
@@ -1 +0,0 @@
-Improved the description of what a resource is in importlib.resources docs.
diff --git a/Misc/NEWS.d/next/Documentation/2022-11-26-21-43-05.gh-issue-89682.DhKoTM.rst b/Misc/NEWS.d/next/Documentation/2022-11-26-21-43-05.gh-issue-89682.DhKoTM.rst
deleted file mode 100644
index 46be065b653952..00000000000000
--- a/Misc/NEWS.d/next/Documentation/2022-11-26-21-43-05.gh-issue-89682.DhKoTM.rst
+++ /dev/null
@@ -1 +0,0 @@
-Reworded docstring of the default ``__contains__`` to clarify that it returns a :class:`bool`.
diff --git a/Misc/NEWS.d/next/Library/2019-08-30-10-48-53.bpo-15999.QqsRRi.rst b/Misc/NEWS.d/next/Library/2019-08-30-10-48-53.bpo-15999.QqsRRi.rst
deleted file mode 100644
index 9650c694cc674a..00000000000000
--- a/Misc/NEWS.d/next/Library/2019-08-30-10-48-53.bpo-15999.QqsRRi.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-All built-in functions now accept arguments of any type instead of just
-``bool`` and ``int`` for boolean parameters.
diff --git a/Misc/NEWS.d/next/Library/2020-08-02-23-46-22.bpo-41260.Q2BNzY.rst b/Misc/NEWS.d/next/Library/2020-08-02-23-46-22.bpo-41260.Q2BNzY.rst
deleted file mode 100644
index ae2fdd9b84a00e..00000000000000
--- a/Misc/NEWS.d/next/Library/2020-08-02-23-46-22.bpo-41260.Q2BNzY.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Rename the *fmt* parameter of the pure Python implementation of
-:meth:`datetime.date.strftime` to *format*.
diff --git a/Misc/NEWS.d/next/Library/2021-08-03-05-31-00.bpo-44817.wOW_Qn.rst b/Misc/NEWS.d/next/Library/2021-08-03-05-31-00.bpo-44817.wOW_Qn.rst
deleted file mode 100644
index 79f8c506b54f37..00000000000000
--- a/Misc/NEWS.d/next/Library/2021-08-03-05-31-00.bpo-44817.wOW_Qn.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Ignore WinError 53 (ERROR_BAD_NETPATH), 65 (ERROR_NETWORK_ACCESS_DENIED)
-and 161 (ERROR_BAD_PATHNAME) when using ntpath.realpath().
diff --git a/Misc/NEWS.d/next/Library/2022-04-04-22-54-11.bpo-47220.L9jYu4.rst b/Misc/NEWS.d/next/Library/2022-04-04-22-54-11.bpo-47220.L9jYu4.rst
deleted file mode 100644
index 6e2af088640b55..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-04-04-22-54-11.bpo-47220.L9jYu4.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Document the optional *callback* parameter of :class:`WeakMethod`. Patch by
-Géry Ogam.
diff --git a/Misc/NEWS.d/next/Library/2022-04-23-03-46-37.gh-issue-91078.87-hkp.rst b/Misc/NEWS.d/next/Library/2022-04-23-03-46-37.gh-issue-91078.87-hkp.rst
deleted file mode 100644
index e05d5e2a13146c..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-04-23-03-46-37.gh-issue-91078.87-hkp.rst
+++ /dev/null
@@ -1 +0,0 @@
-:meth:`TarFile.next` now returns ``None`` when called on an empty tarfile.
diff --git a/Misc/NEWS.d/next/Library/2022-05-06-01-53-34.gh-issue-92122.96Lf2p.rst b/Misc/NEWS.d/next/Library/2022-05-06-01-53-34.gh-issue-92122.96Lf2p.rst
new file mode 100644
index 00000000000000..d585535ee38d20
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2022-05-06-01-53-34.gh-issue-92122.96Lf2p.rst
@@ -0,0 +1 @@
+Fix reStructuredText syntax errors in docstrings in the :mod:`enum` module.
diff --git a/Misc/NEWS.d/next/Library/2022-08-06-12-18-07.gh-issue-88863.NnqsuJ.rst b/Misc/NEWS.d/next/Library/2022-08-06-12-18-07.gh-issue-88863.NnqsuJ.rst
deleted file mode 100644
index 23f8cb01cf0ac8..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-08-06-12-18-07.gh-issue-88863.NnqsuJ.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-To avoid apparent memory leaks when :func:`asyncio.open_connection` raises,
-break reference cycles generated by local exception and future instances
-(which has exception instance as its member var). Patch by Dong Uk, Kang.
diff --git a/Misc/NEWS.d/next/Library/2022-09-14-21-56-15.gh-issue-96828.ZoOY5G.rst b/Misc/NEWS.d/next/Library/2022-09-14-21-56-15.gh-issue-96828.ZoOY5G.rst
deleted file mode 100644
index d8a448851f4779..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-09-14-21-56-15.gh-issue-96828.ZoOY5G.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Add an :data:`~ssl.OP_ENABLE_KTLS` option for enabling the use of the kernel
-TLS (kTLS). Patch by Illia Volochii.
diff --git a/Misc/NEWS.d/next/Library/2022-09-16-08-21-46.gh-issue-88500.jQ0pCc.rst b/Misc/NEWS.d/next/Library/2022-09-16-08-21-46.gh-issue-88500.jQ0pCc.rst
new file mode 100644
index 00000000000000..ad01f5e16b16a9
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2022-09-16-08-21-46.gh-issue-88500.jQ0pCc.rst
@@ -0,0 +1,2 @@
+Reduced the memory usage of :func:`urllib.parse.unquote` and
+:func:`urllib.parse.unquote_to_bytes` on large values.
diff --git a/Misc/NEWS.d/next/Library/2022-10-02-12-38-22.gh-issue-82836.OvYLmC.rst b/Misc/NEWS.d/next/Library/2022-10-02-12-38-22.gh-issue-82836.OvYLmC.rst
deleted file mode 100644
index dcbea66d66bf7c..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-10-02-12-38-22.gh-issue-82836.OvYLmC.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fix :attr:`~ipaddress.IPv4Address.is_private` properties in the :mod:`ipaddress` module. Previously non-private networks (0.0.0.0/0) would return True from this method; now they correctly return False.
diff --git a/Misc/NEWS.d/next/Library/2022-10-07-18-16-00.gh-issue-98030.2oQCZy.rst b/Misc/NEWS.d/next/Library/2022-10-07-18-16-00.gh-issue-98030.2oQCZy.rst
new file mode 100644
index 00000000000000..7768ed0817e8fa
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2022-10-07-18-16-00.gh-issue-98030.2oQCZy.rst
@@ -0,0 +1,7 @@
+Add missing TCP socket options from Linux: ``TCP_MD5SIG``,
+``TCP_THIN_LINEAR_TIMEOUTS``, ``TCP_THIN_DUPACK``, ``TCP_REPAIR``,
+``TCP_REPAIR_QUEUE``, ``TCP_QUEUE_SEQ``, ``TCP_REPAIR_OPTIONS``,
+``TCP_TIMESTAMP``, ``TCP_CC_INFO``, ``TCP_SAVE_SYN``, ``TCP_SAVED_SYN``,
+``TCP_REPAIR_WINDOW``, ``TCP_FASTOPEN_CONNECT``, ``TCP_ULP``,
+``TCP_MD5SIG_EXT``, ``TCP_FASTOPEN_KEY``, ``TCP_FASTOPEN_NO_COOKIE``,
+``TCP_ZEROCOPY_RECEIVE``, ``TCP_INQ``, ``TCP_TX_DELAY``.
diff --git a/Misc/NEWS.d/next/Library/2022-10-08-15-41-00.gh-issue-98098.DugpWi.rst b/Misc/NEWS.d/next/Library/2022-10-08-15-41-00.gh-issue-98098.DugpWi.rst
deleted file mode 100644
index 202275e16ea081..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-10-08-15-41-00.gh-issue-98098.DugpWi.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Created packages from zipfile and test_zipfile modules, separating
-``zipfile.Path`` functionality.
diff --git a/Misc/NEWS.d/next/Library/2022-10-08-19-20-33.gh-issue-98108.WUObqM.rst b/Misc/NEWS.d/next/Library/2022-10-08-19-20-33.gh-issue-98108.WUObqM.rst
deleted file mode 100644
index 7e962580dda228..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-10-08-19-20-33.gh-issue-98108.WUObqM.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-``zipfile.Path`` is now pickleable if its initialization parameters were
-pickleable (e.g. for file system paths).
diff --git a/Misc/NEWS.d/next/Library/2022-10-13-22-13-54.gh-issue-98248.lwyygy.rst b/Misc/NEWS.d/next/Library/2022-10-13-22-13-54.gh-issue-98248.lwyygy.rst
deleted file mode 100644
index 347f6e160335e2..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-10-13-22-13-54.gh-issue-98248.lwyygy.rst
+++ /dev/null
@@ -1 +0,0 @@
-Provide informative error messages in :func:`struct.pack` when its integral arguments are not in range.
diff --git a/Misc/NEWS.d/next/Library/2022-10-16-18-52-00.gh-issue-97966.humlhz.rst b/Misc/NEWS.d/next/Library/2022-10-16-18-52-00.gh-issue-97966.humlhz.rst
deleted file mode 100644
index b725465ae4f0ef..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-10-16-18-52-00.gh-issue-97966.humlhz.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-On ``uname_result``, restored expectation that ``_fields`` and ``_asdict``
-would include all six properties including ``processor``.
diff --git a/Misc/NEWS.d/next/Library/2022-10-19-18-31-53.gh-issue-98458.vwyq7O.rst b/Misc/NEWS.d/next/Library/2022-10-19-18-31-53.gh-issue-98458.vwyq7O.rst
deleted file mode 100644
index f74195cc8e7dc6..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-10-19-18-31-53.gh-issue-98458.vwyq7O.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fix infinite loop in unittest when a self-referencing chained exception is raised
diff --git a/Misc/NEWS.d/next/Library/2022-10-24-11-01-05.gh-issue-98253.HVd5v4.rst b/Misc/NEWS.d/next/Library/2022-10-24-11-01-05.gh-issue-98253.HVd5v4.rst
deleted file mode 100644
index 00df0070f3b9c1..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-10-24-11-01-05.gh-issue-98253.HVd5v4.rst
+++ /dev/null
@@ -1,10 +0,0 @@
-The implementation of the typing module is now more resilient to reference
-leaks in binary extension modules.
-
-Previously, a reference leak in a typed C API-based extension module could leak
-internals of the typing module, which could in turn introduce leaks in
-essentially any other package with typed function signatures. Although the
-typing package is not the original source of the problem, such non-local
-dependences exacerbate debugging of large-scale projects, and the
-implementation was therefore changed to reduce harm by providing better
-isolation.
diff --git a/Misc/NEWS.d/next/Library/2022-11-02-23-47-07.gh-issue-99029.7uCiIB.rst b/Misc/NEWS.d/next/Library/2022-11-02-23-47-07.gh-issue-99029.7uCiIB.rst
deleted file mode 100644
index 0bfba5e1e32662..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-11-02-23-47-07.gh-issue-99029.7uCiIB.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-:meth:`pathlib.PurePath.relative_to()` now treats naked Windows drive paths
-as relative. This brings its behaviour in line with other parts of pathlib.
diff --git a/Misc/NEWS.d/next/Library/2022-11-08-11-18-51.gh-issue-64490.VcBgrN.rst b/Misc/NEWS.d/next/Library/2022-11-08-11-18-51.gh-issue-64490.VcBgrN.rst
deleted file mode 100644
index f98c181cc9c54b..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-11-08-11-18-51.gh-issue-64490.VcBgrN.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fix refcount error when arguments are packed to tuple in Argument Clinic.
diff --git a/Misc/NEWS.d/next/Library/2022-11-08-15-54-43.gh-issue-99240.MhYwcz.rst b/Misc/NEWS.d/next/Library/2022-11-08-15-54-43.gh-issue-99240.MhYwcz.rst
deleted file mode 100644
index 0a4db052755f87..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-11-08-15-54-43.gh-issue-99240.MhYwcz.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Fix double-free bug in Argument Clinic ``str_converter`` by
-extracting memory clean up to a new ``post_parsing`` section.
diff --git a/Misc/NEWS.d/next/Library/2022-11-09-03-34-29.gh-issue-99201.lDJ7xI.rst b/Misc/NEWS.d/next/Library/2022-11-09-03-34-29.gh-issue-99201.lDJ7xI.rst
deleted file mode 100644
index 6d03574fdaf5bf..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-11-09-03-34-29.gh-issue-99201.lDJ7xI.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Fix :exc:`IndexError` when initializing the config variables on Windows if
-``HAVE_DYNAMIC_LOADING`` is not set.
diff --git a/Misc/NEWS.d/next/Library/2022-11-09-12-36-12.gh-issue-99284.9p4J2l.rst b/Misc/NEWS.d/next/Library/2022-11-09-12-36-12.gh-issue-99284.9p4J2l.rst
deleted file mode 100644
index 48576bd457aa0d..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-11-09-12-36-12.gh-issue-99284.9p4J2l.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Remove ``_use_broken_old_ctypes_structure_semantics_``
-old untested and undocumented hack from :mod:`ctypes`.
diff --git a/Misc/NEWS.d/next/Library/2022-11-12-12-08-34.gh-issue-99344.7M_u8G.rst b/Misc/NEWS.d/next/Library/2022-11-12-12-08-34.gh-issue-99344.7M_u8G.rst
deleted file mode 100644
index 412c8c793435af..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-11-12-12-08-34.gh-issue-99344.7M_u8G.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Fix substitution of :class:`~typing.TypeVarTuple` and
-:class:`~typing.ParamSpec` together in user generics.
diff --git a/Misc/NEWS.d/next/Library/2022-11-12-12-10-23.gh-issue-99379.bcGhxF.rst b/Misc/NEWS.d/next/Library/2022-11-12-12-10-23.gh-issue-99379.bcGhxF.rst
deleted file mode 100644
index 1950680b1df86c..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-11-12-12-10-23.gh-issue-99379.bcGhxF.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Fix substitution of :class:`~typing.ParamSpec` followed by
-:class:`~typing.TypeVarTuple` in generic aliases.
diff --git a/Misc/NEWS.d/next/Library/2022-11-12-12-15-30.gh-issue-99382.dKg_rW.rst b/Misc/NEWS.d/next/Library/2022-11-12-12-15-30.gh-issue-99382.dKg_rW.rst
deleted file mode 100644
index f153f2fceac844..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-11-12-12-15-30.gh-issue-99382.dKg_rW.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Check the number of arguments in substitution in user generics containing a
-:class:`~typing.TypeVarTuple` and one or more :class:`~typing.TypeVar`.
diff --git a/Misc/NEWS.d/next/Library/2022-11-13-02-06-56.gh-issue-99341.8-OlwB.rst b/Misc/NEWS.d/next/Library/2022-11-13-02-06-56.gh-issue-99341.8-OlwB.rst
deleted file mode 100644
index 451561c579daff..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-11-13-02-06-56.gh-issue-99341.8-OlwB.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Fix :func:`ast.increment_lineno` to also cover :class:`ast.TypeIgnore` when
-changing line numbers.
diff --git a/Misc/NEWS.d/next/Library/2022-11-14-08-21-56.gh-issue-99388.UWSlwp.rst b/Misc/NEWS.d/next/Library/2022-11-14-08-21-56.gh-issue-99388.UWSlwp.rst
deleted file mode 100644
index f35799d454573e..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-11-14-08-21-56.gh-issue-99388.UWSlwp.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Add *loop_factory* parameter to :func:`asyncio.run` to allow specifying a custom event loop factory.
-Patch by Kumar Aditya.
diff --git a/Misc/NEWS.d/next/Library/2022-11-15-04-08-25.gh-issue-92647.cZcjnJ.rst b/Misc/NEWS.d/next/Library/2022-11-15-04-08-25.gh-issue-92647.cZcjnJ.rst
deleted file mode 100644
index c6e2a0ca25ff2a..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-11-15-04-08-25.gh-issue-92647.cZcjnJ.rst
+++ /dev/null
@@ -1 +0,0 @@
-Use final status of an enum to determine lookup or creation branch of functional API.
diff --git a/Misc/NEWS.d/next/Library/2022-11-15-10-55-24.gh-issue-97001.KeQuVF.rst b/Misc/NEWS.d/next/Library/2022-11-15-10-55-24.gh-issue-97001.KeQuVF.rst
deleted file mode 100644
index 014161cf7b1d44..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-11-15-10-55-24.gh-issue-97001.KeQuVF.rst
+++ /dev/null
@@ -1 +0,0 @@
-Release the GIL when calling termios APIs to avoid blocking threads.
diff --git a/Misc/NEWS.d/next/Library/2022-11-17-10-56-47.gh-issue-66285.KvjlaB.rst b/Misc/NEWS.d/next/Library/2022-11-17-10-56-47.gh-issue-66285.KvjlaB.rst
deleted file mode 100644
index ebd82173882726..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-11-17-10-56-47.gh-issue-66285.KvjlaB.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fix :mod:`asyncio` to not share event loop and signal wakeupfd in forked processes. Patch by Kumar Aditya.
diff --git a/Misc/NEWS.d/next/Library/2022-11-21-10-45-54.gh-issue-99508.QqVbby.rst b/Misc/NEWS.d/next/Library/2022-11-21-10-45-54.gh-issue-99508.QqVbby.rst
deleted file mode 100644
index 82720d17bcafd3..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-11-21-10-45-54.gh-issue-99508.QqVbby.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Fix ``TypeError`` in ``Lib/importlib/_bootstrap_external.py`` while calling
-``_imp.source_hash()``.
diff --git a/Misc/NEWS.d/next/Library/2022-11-21-13-49-03.gh-issue-99645.9w1QKq.rst b/Misc/NEWS.d/next/Library/2022-11-21-13-49-03.gh-issue-99645.9w1QKq.rst
deleted file mode 100644
index f6ee449891d9f6..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-11-21-13-49-03.gh-issue-99645.9w1QKq.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-Fix a bug in handling class cleanups in :class:`unittest.TestCase`. Now
-``addClassCleanup()`` uses separate lists for different ``TestCase``
-subclasses, and ``doClassCleanups()`` only cleans up the particular class.
diff --git a/Misc/NEWS.d/next/Library/2022-11-21-16-24-01.gh-issue-83035.qZIujU.rst b/Misc/NEWS.d/next/Library/2022-11-21-16-24-01.gh-issue-83035.qZIujU.rst
new file mode 100644
index 00000000000000..629d9aefb2d869
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2022-11-21-16-24-01.gh-issue-83035.qZIujU.rst
@@ -0,0 +1 @@
+Fix :func:`inspect.getsource` handling of decorator calls with nested parentheses.
diff --git a/Misc/NEWS.d/next/Library/2022-11-21-17-56-18.gh-issue-51524.nTykx8.rst b/Misc/NEWS.d/next/Library/2022-11-21-17-56-18.gh-issue-51524.nTykx8.rst
deleted file mode 100644
index 63fe7b8a3a3254..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-11-21-17-56-18.gh-issue-51524.nTykx8.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fix bug when calling trace.CoverageResults with valid infile.
diff --git a/Misc/NEWS.d/next/Library/2022-11-22-19-31-26.gh-issue-79033.MW6kHq.rst b/Misc/NEWS.d/next/Library/2022-11-22-19-31-26.gh-issue-79033.MW6kHq.rst
deleted file mode 100644
index 4b12fd9c8d798f..00000000000000
--- a/Misc/NEWS.d/next/Library/2022-11-22-19-31-26.gh-issue-79033.MW6kHq.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fix :func:`asyncio.Server.wait_closed` to actually do what the docs promise -- wait for all existing connections to complete, after closing the server.
diff --git a/Misc/NEWS.d/next/Library/2022-11-23-23-58-45.gh-issue-94943.Oog0Zo.rst b/Misc/NEWS.d/next/Library/2022-11-23-23-58-45.gh-issue-94943.Oog0Zo.rst
new file mode 100644
index 00000000000000..ed4754e49bd2cf
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2022-11-23-23-58-45.gh-issue-94943.Oog0Zo.rst
@@ -0,0 +1,5 @@
+Add :ref:`enum-dataclass-support` to the
+:class:`~enum.Enum` :meth:`~enum.Enum.__repr__`.
+When inheriting from a :class:`~dataclasses.dataclass`,
+only show the field names in the value section of the member :func:`repr`,
+and not the dataclass' class name.
diff --git a/Misc/NEWS.d/next/Library/2022-12-03-20-06-16.gh-issue-98778.t5U9uc.rst b/Misc/NEWS.d/next/Library/2022-12-03-20-06-16.gh-issue-98778.t5U9uc.rst
new file mode 100644
index 00000000000000..b1c170dff3eabc
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2022-12-03-20-06-16.gh-issue-98778.t5U9uc.rst
@@ -0,0 +1,2 @@
+Update :exc:`~urllib.error.HTTPError` to be initialized properly, even if
+the ``fp`` is ``None``. Patch by Dong-hee Na.
diff --git a/Misc/NEWS.d/next/Library/2022-12-08-06-18-06.gh-issue-100098.uBvPlp.rst b/Misc/NEWS.d/next/Library/2022-12-08-06-18-06.gh-issue-100098.uBvPlp.rst
new file mode 100644
index 00000000000000..256f2bcd39f81d
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2022-12-08-06-18-06.gh-issue-100098.uBvPlp.rst
@@ -0,0 +1 @@
+Fix ``tuple`` subclasses being cast to ``tuple`` when used as enum values.
diff --git a/Misc/NEWS.d/next/Library/2022-12-09-10-35-36.bpo-44592.z-P3oe.rst b/Misc/NEWS.d/next/Library/2022-12-09-10-35-36.bpo-44592.z-P3oe.rst
new file mode 100644
index 00000000000000..7f290605934d76
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2022-12-09-10-35-36.bpo-44592.z-P3oe.rst
@@ -0,0 +1,2 @@
+Fixes inconsistent handling of case sensitivity of *extrasaction* arg in
+:class:`csv.DictWriter`.
diff --git a/Misc/NEWS.d/next/Security/2022-11-11-12-50-28.gh-issue-87604.OtwH5L.rst b/Misc/NEWS.d/next/Security/2022-11-11-12-50-28.gh-issue-87604.OtwH5L.rst
deleted file mode 100644
index c931409b817122..00000000000000
--- a/Misc/NEWS.d/next/Security/2022-11-11-12-50-28.gh-issue-87604.OtwH5L.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Avoid publishing list of active per-interpreter audit hooks via the
-:mod:`gc` module
diff --git a/Misc/NEWS.d/next/Tests/2022-06-16-13-26-31.gh-issue-93018.wvNx76.rst b/Misc/NEWS.d/next/Tests/2022-06-16-13-26-31.gh-issue-93018.wvNx76.rst
new file mode 100644
index 00000000000000..a8fb98048e4023
--- /dev/null
+++ b/Misc/NEWS.d/next/Tests/2022-06-16-13-26-31.gh-issue-93018.wvNx76.rst
@@ -0,0 +1 @@
+Make two tests forgiving towards host system libexpat with backported security fixes applied.
diff --git a/Misc/NEWS.d/next/Tests/2022-08-22-15-49-14.gh-issue-96002.4UE9UE.rst b/Misc/NEWS.d/next/Tests/2022-08-22-15-49-14.gh-issue-96002.4UE9UE.rst
deleted file mode 100644
index dc86e1d70f1289..00000000000000
--- a/Misc/NEWS.d/next/Tests/2022-08-22-15-49-14.gh-issue-96002.4UE9UE.rst
+++ /dev/null
@@ -1 +0,0 @@
-Add functional test for Argument Clinic.
diff --git a/Misc/NEWS.d/next/Tests/2022-11-19-13-34-28.gh-issue-99593.8ZfCkj.rst b/Misc/NEWS.d/next/Tests/2022-11-19-13-34-28.gh-issue-99593.8ZfCkj.rst
deleted file mode 100644
index ec4cda2080323f..00000000000000
--- a/Misc/NEWS.d/next/Tests/2022-11-19-13-34-28.gh-issue-99593.8ZfCkj.rst
+++ /dev/null
@@ -1 +0,0 @@
-Cover the Unicode C API with tests.
diff --git a/Misc/NEWS.d/next/Tests/2022-11-21-19-21-30.gh-issue-99659.4gP0nm.rst b/Misc/NEWS.d/next/Tests/2022-11-21-19-21-30.gh-issue-99659.4gP0nm.rst
deleted file mode 100644
index 3db1ec12b5202e..00000000000000
--- a/Misc/NEWS.d/next/Tests/2022-11-21-19-21-30.gh-issue-99659.4gP0nm.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-Optional big memory tests in ``test_sqlite3`` now catch the correct
-:exc:`sqlite.DataError` exception type in case of too large strings and/or
-blobs passed.
diff --git a/Misc/NEWS.d/next/Tests/2022-12-01-18-55-18.gh-issue-99934.Ox3Fqf.rst b/Misc/NEWS.d/next/Tests/2022-12-01-18-55-18.gh-issue-99934.Ox3Fqf.rst
deleted file mode 100644
index 1b1287c33dbd94..00000000000000
--- a/Misc/NEWS.d/next/Tests/2022-12-01-18-55-18.gh-issue-99934.Ox3Fqf.rst
+++ /dev/null
@@ -1 +0,0 @@
-Correct test_marsh on (32 bit) x86: test_deterministic sets was failing.
diff --git a/Misc/NEWS.d/next/Tests/2022-12-05-16-12-56.gh-issue-99892.sz_eW8.rst b/Misc/NEWS.d/next/Tests/2022-12-05-16-12-56.gh-issue-99892.sz_eW8.rst
deleted file mode 100644
index eded0361fbebbb..00000000000000
--- a/Misc/NEWS.d/next/Tests/2022-12-05-16-12-56.gh-issue-99892.sz_eW8.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Skip test_normalization() of test_unicodedata if it fails to download
-NormalizationTest.txt file from pythontest.net. Patch by Victor Stinner.
diff --git a/Misc/NEWS.d/next/Tests/2022-12-08-00-03-37.gh-issue-100086.1zYpto.rst b/Misc/NEWS.d/next/Tests/2022-12-08-00-03-37.gh-issue-100086.1zYpto.rst
new file mode 100644
index 00000000000000..a5f1bb9f5a5e05
--- /dev/null
+++ b/Misc/NEWS.d/next/Tests/2022-12-08-00-03-37.gh-issue-100086.1zYpto.rst
@@ -0,0 +1,3 @@
+The Python test runner (libregrtest) now logs Python build information like
+"debug" vs "release" build, or LTO and PGO optimizations. Patch by Victor
+Stinner.
diff --git a/Misc/NEWS.d/next/Tools-Demos/2022-08-11-09-58-15.gh-issue-64490.PjwhM4.rst b/Misc/NEWS.d/next/Tools-Demos/2022-08-11-09-58-15.gh-issue-64490.PjwhM4.rst
deleted file mode 100644
index 4a308a9306055c..00000000000000
--- a/Misc/NEWS.d/next/Tools-Demos/2022-08-11-09-58-15.gh-issue-64490.PjwhM4.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-Argument Clinic varargs bugfixes
-
-* Fix out-of-bounds error in :c:func:`!_PyArg_UnpackKeywordsWithVararg`.
-* Fix incorrect check which allowed more than one varargs in clinic.py.
-* Fix miscalculation of ``noptargs`` in generated code.
-* Do not generate ``noptargs`` when there is a vararg argument and no optional argument.
-
diff --git a/Misc/NEWS.d/next/Windows/2020-06-06-15-10-37.bpo-40882.UvNbdj.rst b/Misc/NEWS.d/next/Windows/2020-06-06-15-10-37.bpo-40882.UvNbdj.rst
deleted file mode 100644
index 2670aeef9a2525..00000000000000
--- a/Misc/NEWS.d/next/Windows/2020-06-06-15-10-37.bpo-40882.UvNbdj.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Fix a memory leak in :class:`multiprocessing.shared_memory.SharedMemory` on
-Windows.
diff --git a/Misc/NEWS.d/next/Windows/2021-05-02-15-29-33.bpo-43984.U92jiv.rst b/Misc/NEWS.d/next/Windows/2021-05-02-15-29-33.bpo-43984.U92jiv.rst
new file mode 100644
index 00000000000000..a5975b2d00c7bf
--- /dev/null
+++ b/Misc/NEWS.d/next/Windows/2021-05-02-15-29-33.bpo-43984.U92jiv.rst
@@ -0,0 +1,3 @@
+:meth:`winreg.SetValueEx` now leaves the target value untouched in the case of conversion errors.
+Previously, ``-1`` would be written in case of such errors.
+
diff --git a/Misc/NEWS.d/next/Windows/2022-11-16-19-03-21.gh-issue-99442.6Dgk3Q.rst b/Misc/NEWS.d/next/Windows/2022-11-16-19-03-21.gh-issue-99442.6Dgk3Q.rst
deleted file mode 100644
index 8e19366c429715..00000000000000
--- a/Misc/NEWS.d/next/Windows/2022-11-16-19-03-21.gh-issue-99442.6Dgk3Q.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Fix handling in :ref:`launcher` when ``argv[0]`` does not include a file
-extension.
diff --git a/Misc/NEWS.d/next/Windows/2022-11-21-19-50-18.gh-issue-98629.tMmB_B.rst b/Misc/NEWS.d/next/Windows/2022-11-21-19-50-18.gh-issue-98629.tMmB_B.rst
deleted file mode 100644
index 46cbf998eb2001..00000000000000
--- a/Misc/NEWS.d/next/Windows/2022-11-21-19-50-18.gh-issue-98629.tMmB_B.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fix initialization of :data:`sys.version` and ``sys._git`` on Windows
diff --git a/Misc/NEWS.d/next/Windows/2022-11-23-17-17-16.gh-issue-99345.jOa3-f.rst b/Misc/NEWS.d/next/Windows/2022-11-23-17-17-16.gh-issue-99345.jOa3-f.rst
deleted file mode 100644
index 99db0c55a67eed..00000000000000
--- a/Misc/NEWS.d/next/Windows/2022-11-23-17-17-16.gh-issue-99345.jOa3-f.rst
+++ /dev/null
@@ -1,2 +0,0 @@
-Use faster initialization functions to detect install location for Windows
-Store package
diff --git a/Misc/NEWS.d/next/Windows/2022-12-06-11-16-46.gh-issue-99941.GmUQ6o.rst b/Misc/NEWS.d/next/Windows/2022-12-06-11-16-46.gh-issue-99941.GmUQ6o.rst
new file mode 100644
index 00000000000000..a019d7287207d8
--- /dev/null
+++ b/Misc/NEWS.d/next/Windows/2022-12-06-11-16-46.gh-issue-99941.GmUQ6o.rst
@@ -0,0 +1,2 @@
+Ensure that :func:`asyncio.Protocol.data_received` receives an immutable
+:class:`bytes` object (as documented), instead of :class:`bytearray`.
diff --git a/Misc/NEWS.d/next/Windows/2022-12-09-22-47-42.gh-issue-79218.Yiot2e.rst b/Misc/NEWS.d/next/Windows/2022-12-09-22-47-42.gh-issue-79218.Yiot2e.rst
new file mode 100644
index 00000000000000..e2e6ca3c7796e0
--- /dev/null
+++ b/Misc/NEWS.d/next/Windows/2022-12-09-22-47-42.gh-issue-79218.Yiot2e.rst
@@ -0,0 +1 @@
+Define ``MS_WIN64`` for Mingw-w64 64bit, fix cython compilation failure.
diff --git a/Misc/NEWS.d/next/macOS/2022-11-01-10-32-23.gh-issue-98940.W3YzC_.rst b/Misc/NEWS.d/next/macOS/2022-11-01-10-32-23.gh-issue-98940.W3YzC_.rst
deleted file mode 100644
index 18ef0b0e252322..00000000000000
--- a/Misc/NEWS.d/next/macOS/2022-11-01-10-32-23.gh-issue-98940.W3YzC_.rst
+++ /dev/null
@@ -1 +0,0 @@
-Fix ``Mac/Extras.install.py`` file filter bug.
diff --git a/Misc/NEWS.d/next/macOS/2022-11-25-09-23-20.gh-issue-87235.SifjCD.rst b/Misc/NEWS.d/next/macOS/2022-11-25-09-23-20.gh-issue-87235.SifjCD.rst
deleted file mode 100644
index 3111e4975e87b3..00000000000000
--- a/Misc/NEWS.d/next/macOS/2022-11-25-09-23-20.gh-issue-87235.SifjCD.rst
+++ /dev/null
@@ -1 +0,0 @@
-On macOS ``python3 /dev/fd/9 9asyncio_get_event_loop_policy);
if (policy == NULL) {
return NULL;
@@ -538,7 +531,7 @@ future_init(FutureObj *fut, PyObject *loop)
if (loop == Py_None) {
asyncio_state *state = get_asyncio_state_by_def((PyObject *)fut);
- loop = get_event_loop(state, 1);
+ loop = get_event_loop(state);
if (loop == NULL) {
return -1;
}
@@ -3229,20 +3222,7 @@ _asyncio_get_event_loop_impl(PyObject *module)
/*[clinic end generated code: output=2a2d8b2f824c648b input=9364bf2916c8655d]*/
{
asyncio_state *state = get_asyncio_state(module);
- return get_event_loop(state, 1);
-}
-
-/*[clinic input]
-_asyncio._get_event_loop
- stacklevel: int = 3
-[clinic start generated code]*/
-
-static PyObject *
-_asyncio__get_event_loop_impl(PyObject *module, int stacklevel)
-/*[clinic end generated code: output=9c1d6d3c802e67c9 input=d17aebbd686f711d]*/
-{
- asyncio_state *state = get_asyncio_state(module);
- return get_event_loop(state, stacklevel-1);
+ return get_event_loop(state);
}
/*[clinic input]
@@ -3620,7 +3600,6 @@ PyDoc_STRVAR(module_doc, "Accelerator module for asyncio");
static PyMethodDef asyncio_methods[] = {
_ASYNCIO_GET_EVENT_LOOP_METHODDEF
- _ASYNCIO__GET_EVENT_LOOP_METHODDEF
_ASYNCIO_GET_RUNNING_LOOP_METHODDEF
_ASYNCIO__GET_RUNNING_LOOP_METHODDEF
_ASYNCIO__SET_RUNNING_LOOP_METHODDEF
diff --git a/Modules/_io/bufferedio.c b/Modules/_io/bufferedio.c
index 6df55b5b8303c2..ba8969f0bcd100 100644
--- a/Modules/_io/bufferedio.c
+++ b/Modules/_io/bufferedio.c
@@ -746,26 +746,26 @@ _buffered_init(buffered *self)
int
_PyIO_trap_eintr(void)
{
- static PyObject *eintr_int = NULL;
PyObject *typ, *val, *tb;
PyOSErrorObject *env_err;
-
- if (eintr_int == NULL) {
- eintr_int = PyLong_FromLong(EINTR);
- assert(eintr_int != NULL);
- }
if (!PyErr_ExceptionMatches(PyExc_OSError))
return 0;
PyErr_Fetch(&typ, &val, &tb);
PyErr_NormalizeException(&typ, &val, &tb);
env_err = (PyOSErrorObject *) val;
assert(env_err != NULL);
- if (env_err->myerrno != NULL &&
- PyObject_RichCompareBool(env_err->myerrno, eintr_int, Py_EQ) > 0) {
- Py_DECREF(typ);
- Py_DECREF(val);
- Py_XDECREF(tb);
- return 1;
+ if (env_err->myerrno != NULL) {
+ assert(EINTR > 0 && EINTR < INT_MAX);
+ assert(PyLong_CheckExact(env_err->myerrno));
+ int overflow;
+ int myerrno = PyLong_AsLongAndOverflow(env_err->myerrno, &overflow);
+ PyErr_Clear();
+ if (myerrno == EINTR) {
+ Py_DECREF(typ);
+ Py_DECREF(val);
+ Py_XDECREF(tb);
+ return 1;
+ }
}
/* This silences any error set by PyObject_RichCompareBool() */
PyErr_Restore(typ, val, tb);
diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c
index 3617fafe9b4fdd..83eef73a875d9d 100644
--- a/Modules/_testcapimodule.c
+++ b/Modules/_testcapimodule.c
@@ -2589,6 +2589,91 @@ test_set_type_size(PyObject *self, PyObject *Py_UNUSED(ignored))
}
+// Test Py_CLEAR() macro
+static PyObject*
+test_py_clear(PyObject *self, PyObject *Py_UNUSED(ignored))
+{
+ // simple case with a variable
+ PyObject *obj = PyList_New(0);
+ if (obj == NULL) {
+ return NULL;
+ }
+ Py_CLEAR(obj);
+ assert(obj == NULL);
+
+ // gh-98724: complex case, Py_CLEAR() argument has a side effect
+ PyObject* array[1];
+ array[0] = PyList_New(0);
+ if (array[0] == NULL) {
+ return NULL;
+ }
+
+ PyObject **p = array;
+ Py_CLEAR(*p++);
+ assert(array[0] == NULL);
+ assert(p == array + 1);
+
+ Py_RETURN_NONE;
+}
+
+
+// Test Py_SETREF() and Py_XSETREF() macros, similar to test_py_clear()
+static PyObject*
+test_py_setref(PyObject *self, PyObject *Py_UNUSED(ignored))
+{
+ // Py_SETREF() simple case with a variable
+ PyObject *obj = PyList_New(0);
+ if (obj == NULL) {
+ return NULL;
+ }
+ Py_SETREF(obj, NULL);
+ assert(obj == NULL);
+
+ // Py_XSETREF() simple case with a variable
+ PyObject *obj2 = PyList_New(0);
+ if (obj2 == NULL) {
+ return NULL;
+ }
+ Py_XSETREF(obj2, NULL);
+ assert(obj2 == NULL);
+ // test Py_XSETREF() when the argument is NULL
+ Py_XSETREF(obj2, NULL);
+ assert(obj2 == NULL);
+
+ // gh-98724: complex case, Py_SETREF() argument has a side effect
+ PyObject* array[1];
+ array[0] = PyList_New(0);
+ if (array[0] == NULL) {
+ return NULL;
+ }
+
+ PyObject **p = array;
+ Py_SETREF(*p++, NULL);
+ assert(array[0] == NULL);
+ assert(p == array + 1);
+
+ // gh-98724: complex case, Py_XSETREF() argument has a side effect
+ PyObject* array2[1];
+ array2[0] = PyList_New(0);
+ if (array2[0] == NULL) {
+ return NULL;
+ }
+
+ PyObject **p2 = array2;
+ Py_XSETREF(*p2++, NULL);
+ assert(array2[0] == NULL);
+ assert(p2 == array2 + 1);
+
+ // test Py_XSETREF() when the argument is NULL
+ p2 = array2;
+ Py_XSETREF(*p2++, NULL);
+ assert(array2[0] == NULL);
+ assert(p2 == array2 + 1);
+
+ Py_RETURN_NONE;
+}
+
+
#define TEST_REFCOUNT() \
do { \
PyObject *obj = PyList_New(0); \
@@ -3252,6 +3337,8 @@ static PyMethodDef TestMethods[] = {
{"pynumber_tobase", pynumber_tobase, METH_VARARGS},
{"without_gc", without_gc, METH_O},
{"test_set_type_size", test_set_type_size, METH_NOARGS},
+ {"test_py_clear", test_py_clear, METH_NOARGS},
+ {"test_py_setref", test_py_setref, METH_NOARGS},
{"test_refcount_macros", test_refcount_macros, METH_NOARGS},
{"test_refcount_funcs", test_refcount_funcs, METH_NOARGS},
{"test_py_is_macros", test_py_is_macros, METH_NOARGS},
diff --git a/Modules/_tracemalloc.c b/Modules/_tracemalloc.c
index 0d70f0cf34c8d6..ac16626f2101ba 100644
--- a/Modules/_tracemalloc.c
+++ b/Modules/_tracemalloc.c
@@ -20,9 +20,6 @@ module _tracemalloc
_Py_DECLARE_STR(anon_unknown, "");
-/* Trace memory blocks allocated by PyMem_RawMalloc() */
-#define TRACE_RAW_MALLOC
-
/* Forward declaration */
static void tracemalloc_stop(void);
static void* raw_malloc(size_t size);
@@ -35,19 +32,14 @@ static void raw_free(void *ptr);
#define TO_PTR(key) ((const void *)(uintptr_t)(key))
#define FROM_PTR(key) ((uintptr_t)(key))
-/* Protected by the GIL */
-static struct {
- PyMemAllocatorEx mem;
- PyMemAllocatorEx raw;
- PyMemAllocatorEx obj;
-} allocators;
+#define allocators _PyRuntime.tracemalloc.allocators
#if defined(TRACE_RAW_MALLOC)
/* This lock is needed because tracemalloc_free() is called without
the GIL held from PyMem_RawFree(). It cannot acquire the lock because it
would introduce a deadlock in _PyThreadState_DeleteCurrent(). */
-static PyThread_type_lock tables_lock;
+# define tables_lock _PyRuntime.tracemalloc.tables_lock
# define TABLES_LOCK() PyThread_acquire_lock(tables_lock, 1)
# define TABLES_UNLOCK() PyThread_release_lock(tables_lock)
#else
@@ -59,33 +51,8 @@ static PyThread_type_lock tables_lock;
#define DEFAULT_DOMAIN 0
-/* Pack the frame_t structure to reduce the memory footprint on 64-bit
- architectures: 12 bytes instead of 16. */
-typedef struct
-#ifdef __GNUC__
-__attribute__((packed))
-#elif defined(_MSC_VER)
-#pragma pack(push, 4)
-#endif
-{
- /* filename cannot be NULL: "" is used if the Python frame
- filename is NULL */
- PyObject *filename;
- unsigned int lineno;
-} frame_t;
-#ifdef _MSC_VER
-#pragma pack(pop)
-#endif
-
-
-typedef struct {
- Py_uhash_t hash;
- /* Number of frames stored */
- uint16_t nframe;
- /* Total number of frames the traceback had */
- uint16_t total_nframe;
- frame_t frames[1];
-} traceback_t;
+typedef struct tracemalloc_frame frame_t;
+typedef struct tracemalloc_traceback traceback_t;
#define TRACEBACK_SIZE(NFRAME) \
(sizeof(traceback_t) + sizeof(frame_t) * (NFRAME - 1))
@@ -96,7 +63,8 @@ typedef struct {
static const unsigned long MAX_NFRAME = Py_MIN(UINT16_MAX, ((SIZE_MAX - sizeof(traceback_t)) / sizeof(frame_t) + 1));
-static traceback_t tracemalloc_empty_traceback;
+#define tracemalloc_empty_traceback _PyRuntime.tracemalloc.empty_traceback
+
/* Trace of a memory block */
typedef struct {
@@ -108,35 +76,13 @@ typedef struct {
} trace_t;
-/* Size in bytes of currently traced memory.
- Protected by TABLES_LOCK(). */
-static size_t tracemalloc_traced_memory = 0;
-
-/* Peak size in bytes of traced memory.
- Protected by TABLES_LOCK(). */
-static size_t tracemalloc_peak_traced_memory = 0;
-
-/* Hash table used as a set to intern filenames:
- PyObject* => PyObject*.
- Protected by the GIL */
-static _Py_hashtable_t *tracemalloc_filenames = NULL;
-
-/* Buffer to store a new traceback in traceback_new().
- Protected by the GIL. */
-static traceback_t *tracemalloc_traceback = NULL;
-
-/* Hash table used as a set to intern tracebacks:
- traceback_t* => traceback_t*
- Protected by the GIL */
-static _Py_hashtable_t *tracemalloc_tracebacks = NULL;
-
-/* pointer (void*) => trace (trace_t*).
- Protected by TABLES_LOCK(). */
-static _Py_hashtable_t *tracemalloc_traces = NULL;
-
-/* domain (unsigned int) => traces (_Py_hashtable_t).
- Protected by TABLES_LOCK(). */
-static _Py_hashtable_t *tracemalloc_domains = NULL;
+#define tracemalloc_traced_memory _PyRuntime.tracemalloc.traced_memory
+#define tracemalloc_peak_traced_memory _PyRuntime.tracemalloc.peak_traced_memory
+#define tracemalloc_filenames _PyRuntime.tracemalloc.filenames
+#define tracemalloc_traceback _PyRuntime.tracemalloc.traceback
+#define tracemalloc_tracebacks _PyRuntime.tracemalloc.tracebacks
+#define tracemalloc_traces _PyRuntime.tracemalloc.traces
+#define tracemalloc_domains _PyRuntime.tracemalloc.domains
#ifdef TRACE_DEBUG
@@ -157,7 +103,7 @@ tracemalloc_error(const char *format, ...)
#if defined(TRACE_RAW_MALLOC)
#define REENTRANT_THREADLOCAL
-static Py_tss_t tracemalloc_reentrant_key = Py_tss_NEEDS_INIT;
+#define tracemalloc_reentrant_key _PyRuntime.tracemalloc.reentrant_key
/* Any non-NULL pointer can be used */
#define REENTRANT Py_True
diff --git a/Modules/_xxsubinterpretersmodule.c b/Modules/_xxsubinterpretersmodule.c
index 3e064ca8c0b319..0892fa3a9595e8 100644
--- a/Modules/_xxsubinterpretersmodule.c
+++ b/Modules/_xxsubinterpretersmodule.c
@@ -96,18 +96,20 @@ add_new_exception(PyObject *mod, const char *name, PyObject *base)
add_new_exception(MOD, MODULE_NAME "." Py_STRINGIFY(NAME), BASE)
static PyTypeObject *
-add_new_type(PyObject *mod, PyTypeObject *cls, crossinterpdatafunc shared)
+add_new_type(PyObject *mod, PyType_Spec *spec, crossinterpdatafunc shared)
{
- if (PyType_Ready(cls) != 0) {
+ PyTypeObject *cls = (PyTypeObject *)PyType_FromMetaclass(
+ NULL, mod, spec, NULL);
+ if (cls == NULL) {
return NULL;
}
- if (PyModule_AddType(mod, cls) != 0) {
- // XXX When this becomes a heap type, we need to decref here.
+ if (PyModule_AddType(mod, cls) < 0) {
+ Py_DECREF(cls);
return NULL;
}
if (shared != NULL) {
if (_PyCrossInterpreterData_RegisterClass(cls, shared)) {
- // XXX When this becomes a heap type, we need to decref here.
+ Py_DECREF(cls);
return NULL;
}
}
@@ -135,12 +137,7 @@ _release_xid_data(_PyCrossInterpreterData *data, int ignoreexc)
* shareable types are all very basic, with no GC.
* That said, it becomes much messier once interpreters
* no longer share a GIL, so this needs to be fixed before then. */
- // We do what _release_xidata() does in pystate.c.
- if (data->free != NULL) {
- data->free(data->data);
- data->data = NULL;
- }
- Py_CLEAR(data->obj);
+ _PyCrossInterpreterData_Clear(NULL, data);
if (ignoreexc) {
// XXX Emit a warning?
PyErr_Clear();
@@ -153,6 +150,69 @@ _release_xid_data(_PyCrossInterpreterData *data, int ignoreexc)
}
+/* module state *************************************************************/
+
+typedef struct {
+ PyTypeObject *ChannelIDType;
+
+ /* interpreter exceptions */
+ PyObject *RunFailedError;
+
+ /* channel exceptions */
+ PyObject *ChannelError;
+ PyObject *ChannelNotFoundError;
+ PyObject *ChannelClosedError;
+ PyObject *ChannelEmptyError;
+ PyObject *ChannelNotEmptyError;
+} module_state;
+
+static inline module_state *
+get_module_state(PyObject *mod)
+{
+ assert(mod != NULL);
+ module_state *state = PyModule_GetState(mod);
+ assert(state != NULL);
+ return state;
+}
+
+static int
+traverse_module_state(module_state *state, visitproc visit, void *arg)
+{
+ /* heap types */
+ Py_VISIT(state->ChannelIDType);
+
+ /* interpreter exceptions */
+ Py_VISIT(state->RunFailedError);
+
+ /* channel exceptions */
+ Py_VISIT(state->ChannelError);
+ Py_VISIT(state->ChannelNotFoundError);
+ Py_VISIT(state->ChannelClosedError);
+ Py_VISIT(state->ChannelEmptyError);
+ Py_VISIT(state->ChannelNotEmptyError);
+ return 0;
+}
+
+static int
+clear_module_state(module_state *state)
+{
+ /* heap types */
+ (void)_PyCrossInterpreterData_UnregisterClass(state->ChannelIDType);
+ Py_CLEAR(state->ChannelIDType);
+
+ /* interpreter exceptions */
+ Py_CLEAR(state->RunFailedError);
+
+ /* channel exceptions */
+ Py_CLEAR(state->ChannelError);
+ Py_CLEAR(state->ChannelNotFoundError);
+ Py_CLEAR(state->ChannelClosedError);
+ Py_CLEAR(state->ChannelEmptyError);
+ Py_CLEAR(state->ChannelNotEmptyError);
+ return 0;
+}
+
+
/* data-sharing-specific code ***********************************************/
struct _sharednsitem {
@@ -420,82 +480,80 @@ _sharedexception_apply(_sharedexception *exc, PyObject *wrapperclass)
#define ERR_CHANNELS_MUTEX_INIT -8
#define ERR_NO_NEXT_CHANNEL_ID -9
-static PyObject *ChannelError;
-static PyObject *ChannelNotFoundError;
-static PyObject *ChannelClosedError;
-static PyObject *ChannelEmptyError;
-static PyObject *ChannelNotEmptyError;
-
static int
channel_exceptions_init(PyObject *mod)
{
- // XXX Move the exceptions into per-module memory?
+ module_state *state = get_module_state(mod);
+ if (state == NULL) {
+ return -1;
+ }
#define ADD(NAME, BASE) \
do { \
- if (NAME == NULL) { \
- NAME = ADD_NEW_EXCEPTION(mod, NAME, BASE); \
- if (NAME == NULL) { \
- return -1; \
- } \
+ assert(state->NAME == NULL); \
+ state->NAME = ADD_NEW_EXCEPTION(mod, NAME, BASE); \
+ if (state->NAME == NULL) { \
+ return -1; \
} \
} while (0)
// A channel-related operation failed.
ADD(ChannelError, PyExc_RuntimeError);
// An operation tried to use a channel that doesn't exist.
- ADD(ChannelNotFoundError, ChannelError);
+ ADD(ChannelNotFoundError, state->ChannelError);
// An operation tried to use a closed channel.
- ADD(ChannelClosedError, ChannelError);
+ ADD(ChannelClosedError, state->ChannelError);
// An operation tried to pop from an empty channel.
- ADD(ChannelEmptyError, ChannelError);
+ ADD(ChannelEmptyError, state->ChannelError);
// An operation tried to close a non-empty channel.
- ADD(ChannelNotEmptyError, ChannelError);
+ ADD(ChannelNotEmptyError, state->ChannelError);
#undef ADD
return 0;
}
static int
-handle_channel_error(int err, PyObject *Py_UNUSED(mod), int64_t cid)
+handle_channel_error(int err, PyObject *mod, int64_t cid)
{
if (err == 0) {
assert(!PyErr_Occurred());
return 0;
}
assert(err < 0);
+ module_state *state = get_module_state(mod);
+ assert(state != NULL);
if (err == ERR_CHANNEL_NOT_FOUND) {
- PyErr_Format(ChannelNotFoundError,
+ PyErr_Format(state->ChannelNotFoundError,
"channel %" PRId64 " not found", cid);
}
else if (err == ERR_CHANNEL_CLOSED) {
- PyErr_Format(ChannelClosedError,
+ PyErr_Format(state->ChannelClosedError,
"channel %" PRId64 " is closed", cid);
}
else if (err == ERR_CHANNEL_INTERP_CLOSED) {
- PyErr_Format(ChannelClosedError,
+ PyErr_Format(state->ChannelClosedError,
"channel %" PRId64 " is already closed", cid);
}
else if (err == ERR_CHANNEL_EMPTY) {
- PyErr_Format(ChannelEmptyError,
+ PyErr_Format(state->ChannelEmptyError,
"channel %" PRId64 " is empty", cid);
}
else if (err == ERR_CHANNEL_NOT_EMPTY) {
- PyErr_Format(ChannelNotEmptyError,
+ PyErr_Format(state->ChannelNotEmptyError,
"channel %" PRId64 " may not be closed "
"if not empty (try force=True)",
cid);
}
else if (err == ERR_CHANNEL_MUTEX_INIT) {
- PyErr_SetString(ChannelError,
+ PyErr_SetString(state->ChannelError,
"can't initialize mutex for new channel");
}
else if (err == ERR_CHANNELS_MUTEX_INIT) {
- PyErr_SetString(ChannelError,
+ PyErr_SetString(state->ChannelError,
"can't initialize mutex for channel management");
}
else if (err == ERR_NO_NEXT_CHANNEL_ID) {
- PyErr_SetString(ChannelError,
+ PyErr_SetString(state->ChannelError,
"failed to get a channel ID");
}
else {
@@ -1604,8 +1662,6 @@ _channel_is_associated(_channels *channels, int64_t cid, int64_t interp,
/* ChannelID class */
-static PyTypeObject ChannelIDType;
-
typedef struct channelid {
PyObject_HEAD
int64_t id;
@@ -1624,7 +1680,9 @@ channel_id_converter(PyObject *arg, void *ptr)
{
int64_t cid;
struct channel_id_converter_data *data = ptr;
- if (PyObject_TypeCheck(arg, &ChannelIDType)) {
+ module_state *state = get_module_state(data->module);
+ assert(state != NULL);
+ if (PyObject_TypeCheck(arg, state->ChannelIDType)) {
cid = ((channelid *)arg)->id;
}
else if (PyIndex_Check(arg)) {
@@ -1731,11 +1789,20 @@ _channelid_new(PyObject *mod, PyTypeObject *cls,
}
static void
-channelid_dealloc(PyObject *v)
+channelid_dealloc(PyObject *self)
{
- int64_t cid = ((channelid *)v)->id;
- _channels *channels = ((channelid *)v)->channels;
- Py_TYPE(v)->tp_free(v);
+ int64_t cid = ((channelid *)self)->id;
+ _channels *channels = ((channelid *)self)->channels;
+
+ PyTypeObject *tp = Py_TYPE(self);
+ tp->tp_free(self);
+ /* "Instances of heap-allocated types hold a reference to their type."
+ * See: https://docs.python.org/3.11/howto/isolating-extensions.html#garbage-collection-protocol
+ * See: https://docs.python.org/3.11/c-api/typeobj.html#c.PyTypeObject.tp_traverse
+ */
+ // XXX Why don't we implement Py_TPFLAGS_HAVE_GC, e.g. Py_tp_traverse,
+ // like we do for _abc._abc_data?
+ Py_DECREF(tp);
_channels_drop_id_object(channels, cid);
}
@@ -1774,11 +1841,6 @@ channelid_int(PyObject *self)
return PyLong_FromLongLong(cid->id);
}
-static PyNumberMethods channelid_as_number = {
- .nb_int = (unaryfunc)channelid_int, /* nb_int */
- .nb_index = (unaryfunc)channelid_int, /* nb_index */
-};
-
static Py_hash_t
channelid_hash(PyObject *self)
{
@@ -1804,15 +1866,19 @@ channelid_richcompare(PyObject *self, PyObject *other, int op)
if (mod == NULL) {
return NULL;
}
+ module_state *state = get_module_state(mod);
+ if (state == NULL) {
+ goto done;
+ }
- if (!PyObject_TypeCheck(self, &ChannelIDType)) {
+ if (!PyObject_TypeCheck(self, state->ChannelIDType)) {
res = Py_NewRef(Py_NotImplemented);
goto done;
}
channelid *cid = (channelid *)self;
int equal;
- if (PyObject_TypeCheck(other, &ChannelIDType)) {
+ if (PyObject_TypeCheck(other, state->ChannelIDType)) {
channelid *othercid = (channelid *)other;
equal = (cid->end == othercid->end) && (cid->id == othercid->id);
}
@@ -1892,10 +1958,14 @@ _channelid_from_xid(_PyCrossInterpreterData *data)
if (mod == NULL) {
return NULL;
}
+ module_state *state = get_module_state(mod);
+ if (state == NULL) {
+ return NULL;
+ }
// Note that we do not preserve the "resolve" flag.
PyObject *cid = NULL;
- int err = newchannelid(&ChannelIDType, xid->id, xid->end,
+ int err = newchannelid(state->ChannelIDType, xid->id, xid->end,
_global_channels(), 0, 0,
(channelid **)&cid);
if (err != 0) {
@@ -1926,20 +1996,20 @@ _channelid_from_xid(_PyCrossInterpreterData *data)
}
static int
-_channelid_shared(PyObject *obj, _PyCrossInterpreterData *data)
-{
- struct _channelid_xid *xid = PyMem_NEW(struct _channelid_xid, 1);
- if (xid == NULL) {
+_channelid_shared(PyThreadState *tstate, PyObject *obj,
+ _PyCrossInterpreterData *data)
+{
+ if (_PyCrossInterpreterData_InitWithSize(
+ data, tstate->interp, sizeof(struct _channelid_xid), obj,
+ _channelid_from_xid
+ ) < 0)
+ {
return -1;
}
+ struct _channelid_xid *xid = (struct _channelid_xid *)data->data;
xid->id = ((channelid *)obj)->id;
xid->end = ((channelid *)obj)->end;
xid->resolve = ((channelid *)obj)->resolve;
-
- data->data = xid;
- data->obj = Py_NewRef(obj);
- data->new_object = _channelid_from_xid;
- data->free = PyMem_Free;
return 0;
}
@@ -1992,61 +2062,45 @@ static PyGetSetDef channelid_getsets[] = {
PyDoc_STRVAR(channelid_doc,
"A channel ID identifies a channel and may be used as an int.");
-static PyTypeObject ChannelIDType = {
- PyVarObject_HEAD_INIT(&PyType_Type, 0)
- "_xxsubinterpreters.ChannelID", /* tp_name */
- sizeof(channelid), /* tp_basicsize */
- 0, /* tp_itemsize */
- (destructor)channelid_dealloc, /* tp_dealloc */
- 0, /* tp_vectorcall_offset */
- 0, /* tp_getattr */
- 0, /* tp_setattr */
- 0, /* tp_as_async */
- (reprfunc)channelid_repr, /* tp_repr */
- &channelid_as_number, /* tp_as_number */
- 0, /* tp_as_sequence */
- 0, /* tp_as_mapping */
- channelid_hash, /* tp_hash */
- 0, /* tp_call */
- (reprfunc)channelid_str, /* tp_str */
- 0, /* tp_getattro */
- 0, /* tp_setattro */
- 0, /* tp_as_buffer */
- // Use Py_TPFLAGS_DISALLOW_INSTANTIATION so the type cannot be instantiated
- // from Python code. We do this because there is a strong relationship
- // between channel IDs and the channel lifecycle, so this limitation avoids
- // related complications. Use the _channel_id() function instead.
- Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE
- | Py_TPFLAGS_DISALLOW_INSTANTIATION, /* tp_flags */
- channelid_doc, /* tp_doc */
- 0, /* tp_traverse */
- 0, /* tp_clear */
- channelid_richcompare, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- 0, /* tp_iter */
- 0, /* tp_iternext */
- 0, /* tp_methods */
- 0, /* tp_members */
- channelid_getsets, /* tp_getset */
+static PyType_Slot ChannelIDType_slots[] = {
+ {Py_tp_dealloc, (destructor)channelid_dealloc},
+ {Py_tp_doc, (void *)channelid_doc},
+ {Py_tp_repr, (reprfunc)channelid_repr},
+ {Py_tp_str, (reprfunc)channelid_str},
+ {Py_tp_hash, channelid_hash},
+ {Py_tp_richcompare, channelid_richcompare},
+ {Py_tp_getset, channelid_getsets},
+ // number slots
+ {Py_nb_int, (unaryfunc)channelid_int},
+ {Py_nb_index, (unaryfunc)channelid_int},
+ {0, NULL},
};
+static PyType_Spec ChannelIDType_spec = {
+ .name = "_xxsubinterpreters.ChannelID",
+ .basicsize = sizeof(channelid),
+ .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE |
+ Py_TPFLAGS_DISALLOW_INSTANTIATION | Py_TPFLAGS_IMMUTABLETYPE),
+ .slots = ChannelIDType_slots,
+};
-/* interpreter-specific code ************************************************/
-static PyObject * RunFailedError = NULL;
+/* interpreter-specific code ************************************************/
static int
interp_exceptions_init(PyObject *mod)
{
- // XXX Move the exceptions into per-module memory?
+ module_state *state = get_module_state(mod);
+ if (state == NULL) {
+ return -1;
+ }
#define ADD(NAME, BASE) \
do { \
- if (NAME == NULL) { \
- NAME = ADD_NEW_EXCEPTION(mod, NAME, BASE); \
- if (NAME == NULL) { \
- return -1; \
- } \
+ assert(state->NAME == NULL); \
+ state->NAME = ADD_NEW_EXCEPTION(mod, NAME, BASE); \
+ if (state->NAME == NULL) { \
+ return -1; \
} \
} while (0)
@@ -2167,9 +2221,10 @@ _run_script_in_interpreter(PyObject *mod, PyInterpreterState *interp,
if (_ensure_not_running(interp) < 0) {
return -1;
}
+ module_state *state = get_module_state(mod);
int needs_import = 0;
- _sharedns *shared = _get_shared_ns(shareables, &ChannelIDType,
+ _sharedns *shared = _get_shared_ns(shareables, state->ChannelIDType,
&needs_import);
if (shared == NULL && PyErr_Occurred()) {
return -1;
@@ -2195,7 +2250,8 @@ _run_script_in_interpreter(PyObject *mod, PyInterpreterState *interp,
// Propagate any exception out to the caller.
if (exc != NULL) {
- _sharedexception_apply(exc, RunFailedError);
+ assert(state != NULL);
+ _sharedexception_apply(exc, state->RunFailedError);
_sharedexception_free(exc);
}
else if (result != 0) {
@@ -2527,11 +2583,15 @@ channel_create(PyObject *self, PyObject *Py_UNUSED(ignored))
{
int64_t cid = _channel_create(&_globals.channels);
if (cid < 0) {
- (void)handle_channel_error(cid, self, -1);
+ (void)handle_channel_error(-1, self, cid);
+ return NULL;
+ }
+ module_state *state = get_module_state(self);
+ if (state == NULL) {
return NULL;
}
PyObject *id = NULL;
- int err = newchannelid(&ChannelIDType, cid, 0,
+ int err = newchannelid(state->ChannelIDType, cid, 0,
&_globals.channels, 0, 0,
(channelid **)&id);
if (handle_channel_error(err, self, cid)) {
@@ -2594,10 +2654,16 @@ channel_list_all(PyObject *self, PyObject *Py_UNUSED(ignored))
if (ids == NULL) {
goto finally;
}
+ module_state *state = get_module_state(self);
+ if (state == NULL) {
+ Py_DECREF(ids);
+ ids = NULL;
+ goto finally;
+ }
int64_t *cur = cids;
for (int64_t i=0; i < count; cur++, i++) {
PyObject *id = NULL;
- int err = newchannelid(&ChannelIDType, *cur, 0,
+ int err = newchannelid(state->ChannelIDType, *cur, 0,
&_globals.channels, 0, 0,
(channelid **)&id);
if (handle_channel_error(err, self, *cur)) {
@@ -2850,7 +2916,11 @@ ends are closed. Closing an already closed end is a noop.");
static PyObject *
channel__channel_id(PyObject *self, PyObject *args, PyObject *kwds)
{
- PyTypeObject *cls = &ChannelIDType;
+ module_state *state = get_module_state(self);
+ if (state == NULL) {
+ return NULL;
+ }
+ PyTypeObject *cls = state->ChannelIDType;
PyObject *mod = get_module_from_owned_type(cls);
if (mod == NULL) {
return NULL;
@@ -2915,6 +2985,11 @@ module_exec(PyObject *mod)
return -1;
}
+ module_state *state = get_module_state(mod);
+ if (state == NULL) {
+ goto error;
+ }
+
/* Add exception types */
if (interp_exceptions_init(mod) != 0) {
goto error;
@@ -2924,9 +2999,15 @@ module_exec(PyObject *mod)
}
/* Add other types */
- if (add_new_type(mod, &ChannelIDType, _channelid_shared) == NULL) {
+
+ // ChannelID
+ state->ChannelIDType = add_new_type(
+ mod, &ChannelIDType_spec, _channelid_shared);
+ if (state->ChannelIDType == NULL) {
goto error;
}
+
+ // PyInterpreterID
if (PyModule_AddType(mod, &_PyInterpreterID_Type) < 0) {
goto error;
}
@@ -2934,31 +3015,57 @@ module_exec(PyObject *mod)
return 0;
error:
- (void)_PyCrossInterpreterData_UnregisterClass(&ChannelIDType);
+ (void)_PyCrossInterpreterData_UnregisterClass(state->ChannelIDType);
_globals_fini();
return -1;
}
+static struct PyModuleDef_Slot module_slots[] = {
+ {Py_mod_exec, module_exec},
+ {0, NULL},
+};
+
+static int
+module_traverse(PyObject *mod, visitproc visit, void *arg)
+{
+ module_state *state = get_module_state(mod);
+ assert(state != NULL);
+ traverse_module_state(state, visit, arg);
+ return 0;
+}
+
+static int
+module_clear(PyObject *mod)
+{
+ module_state *state = get_module_state(mod);
+ assert(state != NULL);
+ clear_module_state(state);
+ return 0;
+}
+
+static void
+module_free(void *mod)
+{
+ module_state *state = get_module_state(mod);
+ assert(state != NULL);
+ clear_module_state(state);
+ _globals_fini();
+}
+
static struct PyModuleDef moduledef = {
.m_base = PyModuleDef_HEAD_INIT,
.m_name = MODULE_NAME,
.m_doc = module_doc,
- .m_size = -1,
+ .m_size = sizeof(module_state),
.m_methods = module_functions,
+ .m_slots = module_slots,
+ .m_traverse = module_traverse,
+ .m_clear = module_clear,
+ .m_free = (freefunc)module_free,
};
-
PyMODINIT_FUNC
PyInit__xxsubinterpreters(void)
{
- /* Create the module */
- PyObject *mod = PyModule_Create(&moduledef);
- if (mod == NULL) {
- return NULL;
- }
- if (module_exec(mod) < 0) {
- Py_DECREF(mod);
- return NULL;
- }
- return mod;
+ return PyModuleDef_Init(&moduledef);
}
diff --git a/Modules/clinic/_asynciomodule.c.h b/Modules/clinic/_asynciomodule.c.h
index 11db478a8b4827..f2fbb352c2c69b 100644
--- a/Modules/clinic/_asynciomodule.c.h
+++ b/Modules/clinic/_asynciomodule.c.h
@@ -987,68 +987,6 @@ _asyncio_get_event_loop(PyObject *module, PyObject *Py_UNUSED(ignored))
return _asyncio_get_event_loop_impl(module);
}
-PyDoc_STRVAR(_asyncio__get_event_loop__doc__,
-"_get_event_loop($module, /, stacklevel=3)\n"
-"--\n"
-"\n");
-
-#define _ASYNCIO__GET_EVENT_LOOP_METHODDEF \
- {"_get_event_loop", _PyCFunction_CAST(_asyncio__get_event_loop), METH_FASTCALL|METH_KEYWORDS, _asyncio__get_event_loop__doc__},
-
-static PyObject *
-_asyncio__get_event_loop_impl(PyObject *module, int stacklevel);
-
-static PyObject *
-_asyncio__get_event_loop(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
-{
- PyObject *return_value = NULL;
- #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
-
- #define NUM_KEYWORDS 1
- static struct {
- PyGC_Head _this_is_not_used;
- PyObject_VAR_HEAD
- PyObject *ob_item[NUM_KEYWORDS];
- } _kwtuple = {
- .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
- .ob_item = { &_Py_ID(stacklevel), },
- };
- #undef NUM_KEYWORDS
- #define KWTUPLE (&_kwtuple.ob_base.ob_base)
-
- #else // !Py_BUILD_CORE
- # define KWTUPLE NULL
- #endif // !Py_BUILD_CORE
-
- static const char * const _keywords[] = {"stacklevel", NULL};
- static _PyArg_Parser _parser = {
- .keywords = _keywords,
- .fname = "_get_event_loop",
- .kwtuple = KWTUPLE,
- };
- #undef KWTUPLE
- PyObject *argsbuf[1];
- Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- int stacklevel = 3;
-
- args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf);
- if (!args) {
- goto exit;
- }
- if (!noptargs) {
- goto skip_optional_pos;
- }
- stacklevel = _PyLong_AsInt(args[0]);
- if (stacklevel == -1 && PyErr_Occurred()) {
- goto exit;
- }
-skip_optional_pos:
- return_value = _asyncio__get_event_loop_impl(module, stacklevel);
-
-exit:
- return return_value;
-}
-
PyDoc_STRVAR(_asyncio_get_running_loop__doc__,
"get_running_loop($module, /)\n"
"--\n"
@@ -1304,4 +1242,4 @@ _asyncio__leave_task(PyObject *module, PyObject *const *args, Py_ssize_t nargs,
exit:
return return_value;
}
-/*[clinic end generated code: output=550bc6603df89ed9 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=83580c190031241c input=a9049054013a1b77]*/
diff --git a/Modules/clinic/itertoolsmodule.c.h b/Modules/clinic/itertoolsmodule.c.h
index 17f9ebb249390f..287de524e91307 100644
--- a/Modules/clinic/itertoolsmodule.c.h
+++ b/Modules/clinic/itertoolsmodule.c.h
@@ -12,19 +12,19 @@ PyDoc_STRVAR(batched_new__doc__,
"batched(iterable, n)\n"
"--\n"
"\n"
-"Batch data into lists of length n. The last batch may be shorter than n.\n"
+"Batch data into tuples of length n. The last batch may be shorter than n.\n"
"\n"
-"Loops over the input iterable and accumulates data into lists\n"
+"Loops over the input iterable and accumulates data into tuples\n"
"up to size n. The input is consumed lazily, just enough to\n"
-"fill a list. The result is yielded as soon as a batch is full\n"
+"fill a batch. The result is yielded as soon as a batch is full\n"
"or when the input iterable is exhausted.\n"
"\n"
" >>> for batch in batched(\'ABCDEFG\', 3):\n"
" ... print(batch)\n"
" ...\n"
-" [\'A\', \'B\', \'C\']\n"
-" [\'D\', \'E\', \'F\']\n"
-" [\'G\']");
+" (\'A\', \'B\', \'C\')\n"
+" (\'D\', \'E\', \'F\')\n"
+" (\'G\',)");
static PyObject *
batched_new_impl(PyTypeObject *type, PyObject *iterable, Py_ssize_t n);
@@ -913,4 +913,4 @@ itertools_count(PyTypeObject *type, PyObject *args, PyObject *kwargs)
exit:
return return_value;
}
-/*[clinic end generated code: output=efea8cd1e647bd17 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=0229ebd72962f130 input=a9049054013a1b77]*/
diff --git a/Modules/faulthandler.c b/Modules/faulthandler.c
index 341a03a244cdc5..5309a3728c5e07 100644
--- a/Modules/faulthandler.c
+++ b/Modules/faulthandler.c
@@ -18,12 +18,6 @@
# include
#endif
-/* Using an alternative stack requires sigaltstack()
- and sigaction() SA_ONSTACK */
-#if defined(HAVE_SIGALTSTACK) && defined(HAVE_SIGACTION)
-# define FAULTHANDLER_USE_ALT_STACK
-#endif
-
#if defined(FAULTHANDLER_USE_ALT_STACK) && defined(HAVE_LINUX_AUXVEC_H) && defined(HAVE_SYS_AUXV_H)
# include // AT_MINSIGSTKSZ
# include // getauxval()
@@ -32,13 +26,6 @@
/* Allocate at maximum 100 MiB of the stack to raise the stack overflow */
#define STACK_OVERFLOW_MAX_SIZE (100 * 1024 * 1024)
-#ifndef MS_WINDOWS
- /* register() is useless on Windows, because only SIGSEGV, SIGABRT and
- SIGILL can be handled by the process, and these signals can only be used
- with enable(), not using register() */
-# define FAULTHANDLER_USER
-#endif
-
#define PUTS(fd, str) _Py_write_noraise(fd, str, strlen(str))
@@ -58,12 +45,6 @@
#endif
-#ifdef HAVE_SIGACTION
-typedef struct sigaction _Py_sighandler_t;
-#else
-typedef PyOS_sighandler_t _Py_sighandler_t;
-#endif
-
typedef struct {
int signum;
int enabled;
@@ -72,47 +53,12 @@ typedef struct {
int all_threads;
} fault_handler_t;
-static struct {
- int enabled;
- PyObject *file;
- int fd;
- int all_threads;
- PyInterpreterState *interp;
-#ifdef MS_WINDOWS
- void *exc_handler;
-#endif
-} fatal_error = {0, NULL, -1, 0};
-
-static struct {
- PyObject *file;
- int fd;
- PY_TIMEOUT_T timeout_us; /* timeout in microseconds */
- int repeat;
- PyInterpreterState *interp;
- int exit;
- char *header;
- size_t header_len;
- /* The main thread always holds this lock. It is only released when
- faulthandler_thread() is interrupted before this thread exits, or at
- Python exit. */
- PyThread_type_lock cancel_event;
- /* released by child thread when joined */
- PyThread_type_lock running;
-} thread;
+#define fatal_error _PyRuntime.faulthandler.fatal_error
+#define thread _PyRuntime.faulthandler.thread
#ifdef FAULTHANDLER_USER
-typedef struct {
- int enabled;
- PyObject *file;
- int fd;
- int all_threads;
- int chain;
- _Py_sighandler_t previous;
- PyInterpreterState *interp;
-} user_signal_t;
-
-static user_signal_t *user_signals;
-
+#define user_signals _PyRuntime.faulthandler.user_signals
+typedef struct faulthandler_user_signal user_signal_t;
static void faulthandler_user(int signum);
#endif /* FAULTHANDLER_USER */
@@ -134,8 +80,8 @@ static const size_t faulthandler_nsignals = \
Py_ARRAY_LENGTH(faulthandler_handlers);
#ifdef FAULTHANDLER_USE_ALT_STACK
-static stack_t stack;
-static stack_t old_stack;
+# define stack _PyRuntime.faulthandler.stack
+# define old_stack _PyRuntime.faulthandler.old_stack
#endif
@@ -1094,7 +1040,7 @@ faulthandler_fatal_error_thread(void *plock)
static PyObject *
faulthandler_fatal_error_c_thread(PyObject *self, PyObject *args)
{
- long thread;
+ long tid;
PyThread_type_lock lock;
faulthandler_suppress_crash_report();
@@ -1105,8 +1051,8 @@ faulthandler_fatal_error_c_thread(PyObject *self, PyObject *args)
PyThread_acquire_lock(lock, WAIT_LOCK);
- thread = PyThread_start_new_thread(faulthandler_fatal_error_thread, lock);
- if (thread == -1) {
+ tid = PyThread_start_new_thread(faulthandler_fatal_error_thread, lock);
+ if (tid == -1) {
PyThread_free_lock(lock);
PyErr_SetString(PyExc_RuntimeError, "unable to start the thread");
return NULL;
diff --git a/Modules/itertoolsmodule.c b/Modules/itertoolsmodule.c
index d45000788c5318..c1f1e7320db719 100644
--- a/Modules/itertoolsmodule.c
+++ b/Modules/itertoolsmodule.c
@@ -56,11 +56,13 @@ static PyTypeObject pairwise_type;
/* batched object ************************************************************/
/* Note: The built-in zip() function includes a "strict" argument
- that is needed because that function can silently truncate data
- and there is no easy way for a user to detect that condition.
- The same reasoning does not apply to batched() which never drops
- data. Instead, it produces a shorter list which can be handled
- as the user sees fit.
+ that was needed because that function would silently truncate data,
+ and there was no easy way for a user to detect the data loss.
+ The same reasoning does not apply to batched() which never drops data.
+ Instead, batched() produces a shorter tuple which can be handled
+ as the user sees fit. If requested, it would be reasonable to add
+ "fillvalue" support which had demonstrated value in zip_longest().
+ For now, the API is kept simple and clean.
*/
typedef struct {
@@ -74,25 +76,25 @@ typedef struct {
itertools.batched.__new__ as batched_new
iterable: object
n: Py_ssize_t
-Batch data into lists of length n. The last batch may be shorter than n.
+Batch data into tuples of length n. The last batch may be shorter than n.
-Loops over the input iterable and accumulates data into lists
+Loops over the input iterable and accumulates data into tuples
up to size n. The input is consumed lazily, just enough to
-fill a list. The result is yielded as soon as a batch is full
+fill a batch. The result is yielded as soon as a batch is full
or when the input iterable is exhausted.
>>> for batch in batched('ABCDEFG', 3):
... print(batch)
...
- ['A', 'B', 'C']
- ['D', 'E', 'F']
- ['G']
+ ('A', 'B', 'C')
+ ('D', 'E', 'F')
+ ('G',)
[clinic start generated code]*/
static PyObject *
batched_new_impl(PyTypeObject *type, PyObject *iterable, Py_ssize_t n)
-/*[clinic end generated code: output=7ebc954d655371b6 input=f28fd12cb52365f0]*/
+/*[clinic end generated code: output=7ebc954d655371b6 input=ffd70726927c5129]*/
{
PyObject *it;
batchedobject *bo;
@@ -150,12 +152,12 @@ batched_next(batchedobject *bo)
if (it == NULL) {
return NULL;
}
- result = PyList_New(n);
+ result = PyTuple_New(n);
if (result == NULL) {
return NULL;
}
iternextfunc iternext = *Py_TYPE(it)->tp_iternext;
- PyObject **items = _PyList_ITEMS(result);
+ PyObject **items = _PyTuple_ITEMS(result);
for (i=0 ; i < n ; i++) {
item = iternext(it);
if (item == NULL) {
@@ -180,8 +182,7 @@ batched_next(batchedobject *bo)
Py_DECREF(result);
return NULL;
}
- /* Elements in result[i:] are still NULL */
- Py_SET_SIZE(result, i);
+ _PyTuple_Resize(&result, i);
return result;
}
diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c
index 7fc8aef9b303fc..4817973262f484 100644
--- a/Modules/posixmodule.c
+++ b/Modules/posixmodule.c
@@ -495,9 +495,11 @@ extern char *ctermid_r(char *);
#ifdef MS_WINDOWS
# define INITFUNC PyInit_nt
# define MODNAME "nt"
+# define MODNAME_OBJ &_Py_ID(nt)
#else
# define INITFUNC PyInit_posix
# define MODNAME "posix"
+# define MODNAME_OBJ &_Py_ID(posix)
#endif
#if defined(__sun)
@@ -974,6 +976,7 @@ typedef struct {
#if defined(HAVE_SCHED_SETPARAM) || defined(HAVE_SCHED_SETSCHEDULER) || defined(POSIX_SPAWN_SETSCHEDULER) || defined(POSIX_SPAWN_SETSCHEDPARAM)
PyObject *SchedParamType;
#endif
+ newfunc statresult_new_orig;
PyObject *StatResultType;
PyObject *StatVFSResultType;
PyObject *TerminalSizeType;
@@ -2225,7 +2228,6 @@ static PyStructSequence_Desc waitid_result_desc = {
5
};
#endif
-static newfunc structseq_new;
static PyObject *
statresult_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
@@ -2233,6 +2235,19 @@ statresult_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
PyStructSequence *result;
int i;
+ // ht_module doesn't get set in PyStructSequence_NewType(),
+ // so we can't use PyType_GetModule().
+ PyObject *mod = PyImport_GetModule(MODNAME_OBJ);
+ if (mod == NULL) {
+ return NULL;
+ }
+ _posixstate *state = get_posix_state(mod);
+ Py_DECREF(mod);
+ if (state == NULL) {
+ return NULL;
+ }
+#define structseq_new state->statresult_new_orig
+
result = (PyStructSequence*)structseq_new(type, args, kwds);
if (!result)
return NULL;
@@ -9051,11 +9066,6 @@ build_times_result(PyObject *module, double user, double system,
}
-#ifndef MS_WINDOWS
-#define NEED_TICKS_PER_SECOND
-static long ticks_per_second = -1;
-#endif /* MS_WINDOWS */
-
/*[clinic input]
os.times
@@ -9091,20 +9101,22 @@ os_times_impl(PyObject *module)
}
#else /* MS_WINDOWS */
{
-
-
struct tms t;
clock_t c;
errno = 0;
c = times(&t);
- if (c == (clock_t) -1)
+ if (c == (clock_t) -1) {
return posix_error();
+ }
+ assert(_PyRuntime.time.ticks_per_second_initialized);
+#define ticks_per_second _PyRuntime.time.ticks_per_second
return build_times_result(module,
(double)t.tms_utime / ticks_per_second,
(double)t.tms_stime / ticks_per_second,
(double)t.tms_cutime / ticks_per_second,
(double)t.tms_cstime / ticks_per_second,
(double)c / ticks_per_second);
+#undef ticks_per_second
}
#endif /* MS_WINDOWS */
#endif /* HAVE_TIMES */
@@ -15912,7 +15924,7 @@ posixmodule_exec(PyObject *m)
}
PyModule_AddObject(m, "stat_result", Py_NewRef(StatResultType));
state->StatResultType = StatResultType;
- structseq_new = ((PyTypeObject *)StatResultType)->tp_new;
+ state->statresult_new_orig = ((PyTypeObject *)StatResultType)->tp_new;
((PyTypeObject *)StatResultType)->tp_new = statresult_new;
statvfs_result_desc.name = "os.statvfs_result"; /* see issue #19209 */
@@ -15922,15 +15934,6 @@ posixmodule_exec(PyObject *m)
}
PyModule_AddObject(m, "statvfs_result", Py_NewRef(StatVFSResultType));
state->StatVFSResultType = StatVFSResultType;
-#ifdef NEED_TICKS_PER_SECOND
-# if defined(HAVE_SYSCONF) && defined(_SC_CLK_TCK)
- ticks_per_second = sysconf(_SC_CLK_TCK);
-# elif defined(HZ)
- ticks_per_second = HZ;
-# else
- ticks_per_second = 60; /* magic fallback value; may be bogus */
-# endif
-#endif
#if defined(HAVE_SCHED_SETPARAM) || defined(HAVE_SCHED_SETSCHEDULER) || defined(POSIX_SPAWN_SETSCHEDULER) || defined(POSIX_SPAWN_SETSCHEDPARAM)
sched_param_desc.name = MODNAME ".sched_param";
diff --git a/Modules/signalmodule.c b/Modules/signalmodule.c
index c539787e5829dd..538a7e85bc950c 100644
--- a/Modules/signalmodule.c
+++ b/Modules/signalmodule.c
@@ -13,7 +13,7 @@
#include "pycore_moduleobject.h" // _PyModule_GetState()
#include "pycore_pyerrors.h" // _PyErr_SetString()
#include "pycore_pystate.h" // _PyThreadState_GET()
-#include "pycore_signal.h" // Py_NSIG
+#include "pycore_signal.h"
#ifndef MS_WINDOWS
# include "posixmodule.h"
@@ -23,12 +23,13 @@
#endif
#ifdef MS_WINDOWS
-# include
# ifdef HAVE_PROCESS_H
# include
# endif
#endif
+#include "pycore_signal.h" // Py_NSIG
+
#ifdef HAVE_SIGNAL_H
# include
#endif
@@ -100,47 +101,13 @@ class sigset_t_converter(CConverter):
may not be the thread that received the signal.
*/
-static volatile struct {
- _Py_atomic_int tripped;
- /* func is atomic to ensure that PyErr_SetInterrupt is async-signal-safe
- * (even though it would probably be otherwise, anyway).
- */
- _Py_atomic_address func;
-} Handlers[Py_NSIG];
-
-#ifdef MS_WINDOWS
-#define INVALID_FD ((SOCKET_T)-1)
-
-static volatile struct {
- SOCKET_T fd;
- int warn_on_full_buffer;
- int use_send;
-} wakeup = {.fd = INVALID_FD, .warn_on_full_buffer = 1, .use_send = 0};
-#else
-#define INVALID_FD (-1)
-static volatile struct {
-#ifdef __VXWORKS__
- int fd;
-#else
- sig_atomic_t fd;
-#endif
- int warn_on_full_buffer;
-} wakeup = {.fd = INVALID_FD, .warn_on_full_buffer = 1};
-#endif
-
-/* Speed up sigcheck() when none tripped */
-static _Py_atomic_int is_tripped;
-
-typedef struct {
- PyObject *default_handler;
- PyObject *ignore_handler;
-#ifdef MS_WINDOWS
- HANDLE sigint_event;
-#endif
-} signal_state_t;
+#define Handlers _PyRuntime.signals.handlers
+#define wakeup _PyRuntime.signals.wakeup
+#define is_tripped _PyRuntime.signals.is_tripped
// State shared by all Python interpreters
-static signal_state_t signal_global_state = {0};
+typedef struct _signals_runtime_state signal_state_t;
+#define signal_global_state _PyRuntime.signals
#if defined(HAVE_GETITIMER) || defined(HAVE_SETITIMER)
# define PYHAVE_ITIMER_ERROR
@@ -331,13 +298,7 @@ trip_signal(int sig_num)
See bpo-30038 for more details.
*/
- int fd;
-#ifdef MS_WINDOWS
- fd = Py_SAFE_DOWNCAST(wakeup.fd, SOCKET_T, int);
-#else
- fd = wakeup.fd;
-#endif
-
+ int fd = wakeup.fd;
if (fd != INVALID_FD) {
unsigned char byte = (unsigned char)sig_num;
#ifdef MS_WINDOWS
@@ -407,7 +368,7 @@ signal_handler(int sig_num)
#ifdef MS_WINDOWS
if (sig_num == SIGINT) {
signal_state_t *state = &signal_global_state;
- SetEvent(state->sigint_event);
+ SetEvent((HANDLE)state->sigint_event);
}
#endif
}
@@ -822,7 +783,7 @@ signal_set_wakeup_fd(PyObject *self, PyObject *args, PyObject *kwds)
}
old_sockfd = wakeup.fd;
- wakeup.fd = sockfd;
+ wakeup.fd = Py_SAFE_DOWNCAST(sockfd, SOCKET_T, int);
wakeup.warn_on_full_buffer = warn_on_full_buffer;
wakeup.use_send = is_socket;
@@ -873,11 +834,7 @@ PySignal_SetWakeupFd(int fd)
fd = -1;
}
-#ifdef MS_WINDOWS
- int old_fd = Py_SAFE_DOWNCAST(wakeup.fd, SOCKET_T, int);
-#else
int old_fd = wakeup.fd;
-#endif
wakeup.fd = fd;
wakeup.warn_on_full_buffer = 1;
return old_fd;
@@ -1654,6 +1611,8 @@ signal_module_exec(PyObject *m)
signal_state_t *state = &signal_global_state;
_signal_module_state *modstate = get_signal_state(m);
+ // XXX For proper isolation, these values must be guaranteed
+ // to be effectively const (e.g. immortal).
modstate->default_handler = state->default_handler; // borrowed ref
modstate->ignore_handler = state->ignore_handler; // borrowed ref
@@ -1783,7 +1742,7 @@ _PySignal_Fini(void)
#ifdef MS_WINDOWS
if (state->sigint_event != NULL) {
- CloseHandle(state->sigint_event);
+ CloseHandle((HANDLE)state->sigint_event);
state->sigint_event = NULL;
}
#endif
@@ -2009,7 +1968,7 @@ _PySignal_Init(int install_signal_handlers)
#ifdef MS_WINDOWS
/* Create manual-reset event, initially unset */
- state->sigint_event = CreateEvent(NULL, TRUE, FALSE, FALSE);
+ state->sigint_event = (void *)CreateEvent(NULL, TRUE, FALSE, FALSE);
if (state->sigint_event == NULL) {
PyErr_SetFromWindowsErr(0);
return -1;
diff --git a/Modules/socketmodule.c b/Modules/socketmodule.c
index 7d438448ef36e7..2c59c2f2c89b25 100644
--- a/Modules/socketmodule.c
+++ b/Modules/socketmodule.c
@@ -8484,18 +8484,78 @@ PyInit__socket(void)
#ifdef TCP_QUICKACK
PyModule_AddIntMacro(m, TCP_QUICKACK);
#endif
-#ifdef TCP_FASTOPEN
- PyModule_AddIntMacro(m, TCP_FASTOPEN);
-#endif
#ifdef TCP_CONGESTION
PyModule_AddIntMacro(m, TCP_CONGESTION);
#endif
+#ifdef TCP_MD5SIG
+ PyModule_AddIntMacro(m, TCP_MD5SIG);
+#endif
+#ifdef TCP_THIN_LINEAR_TIMEOUTS
+ PyModule_AddIntMacro(m, TCP_THIN_LINEAR_TIMEOUTS);
+#endif
+#ifdef TCP_THIN_DUPACK
+ PyModule_AddIntMacro(m, TCP_THIN_DUPACK);
+#endif
#ifdef TCP_USER_TIMEOUT
PyModule_AddIntMacro(m, TCP_USER_TIMEOUT);
#endif
+#ifdef TCP_REPAIR
+ PyModule_AddIntMacro(m, TCP_REPAIR);
+#endif
+#ifdef TCP_REPAIR_QUEUE
+ PyModule_AddIntMacro(m, TCP_REPAIR_QUEUE);
+#endif
+#ifdef TCP_QUEUE_SEQ
+ PyModule_AddIntMacro(m, TCP_QUEUE_SEQ);
+#endif
+#ifdef TCP_REPAIR_OPTIONS
+ PyModule_AddIntMacro(m, TCP_REPAIR_OPTIONS);
+#endif
+#ifdef TCP_FASTOPEN
+ PyModule_AddIntMacro(m, TCP_FASTOPEN);
+#endif
+#ifdef TCP_TIMESTAMP
+ PyModule_AddIntMacro(m, TCP_TIMESTAMP);
+#endif
#ifdef TCP_NOTSENT_LOWAT
PyModule_AddIntMacro(m, TCP_NOTSENT_LOWAT);
#endif
+#ifdef TCP_CC_INFO
+ PyModule_AddIntMacro(m, TCP_CC_INFO);
+#endif
+#ifdef TCP_SAVE_SYN
+ PyModule_AddIntMacro(m, TCP_SAVE_SYN);
+#endif
+#ifdef TCP_SAVED_SYN
+ PyModule_AddIntMacro(m, TCP_SAVED_SYN);
+#endif
+#ifdef TCP_REPAIR_WINDOW
+ PyModule_AddIntMacro(m, TCP_REPAIR_WINDOW);
+#endif
+#ifdef TCP_FASTOPEN_CONNECT
+ PyModule_AddIntMacro(m, TCP_FASTOPEN_CONNECT);
+#endif
+#ifdef TCP_ULP
+ PyModule_AddIntMacro(m, TCP_ULP);
+#endif
+#ifdef TCP_MD5SIG_EXT
+ PyModule_AddIntMacro(m, TCP_MD5SIG_EXT);
+#endif
+#ifdef TCP_FASTOPEN_KEY
+ PyModule_AddIntMacro(m, TCP_FASTOPEN_KEY);
+#endif
+#ifdef TCP_FASTOPEN_NO_COOKIE
+ PyModule_AddIntMacro(m, TCP_FASTOPEN_NO_COOKIE);
+#endif
+#ifdef TCP_ZEROCOPY_RECEIVE
+ PyModule_AddIntMacro(m, TCP_ZEROCOPY_RECEIVE);
+#endif
+#ifdef TCP_INQ
+ PyModule_AddIntMacro(m, TCP_INQ);
+#endif
+#ifdef TCP_TX_DELAY
+ PyModule_AddIntMacro(m, TCP_TX_DELAY);
+#endif
/* IPX options */
#ifdef IPX_TYPE
diff --git a/Modules/timemodule.c b/Modules/timemodule.c
index 11c888af03e82d..c2bacaae0c0339 100644
--- a/Modules/timemodule.c
+++ b/Modules/timemodule.c
@@ -62,6 +62,56 @@
#define SEC_TO_NS (1000 * 1000 * 1000)
+#if defined(HAVE_TIMES) || defined(HAVE_CLOCK)
+static int
+check_ticks_per_second(long tps, const char *context)
+{
+ /* Effectively, check that _PyTime_MulDiv(t, SEC_TO_NS, ticks_per_second)
+ cannot overflow. */
+ if (tps >= 0 && (_PyTime_t)tps > _PyTime_MAX / SEC_TO_NS) {
+ PyErr_Format(PyExc_OverflowError, "%s is too large", context);
+ return -1;
+ }
+ return 0;
+}
+#endif /* HAVE_TIMES || HAVE_CLOCK */
+
+#ifdef HAVE_TIMES
+
+# define ticks_per_second _PyRuntime.time.ticks_per_second
+
+static void
+ensure_ticks_per_second(void)
+{
+ if (_PyRuntime.time.ticks_per_second_initialized) {
+ return;
+ }
+ _PyRuntime.time.ticks_per_second_initialized = 1;
+# if defined(HAVE_SYSCONF) && defined(_SC_CLK_TCK)
+ ticks_per_second = sysconf(_SC_CLK_TCK);
+ if (ticks_per_second < 1) {
+ ticks_per_second = -1;
+ }
+# elif defined(HZ)
+ ticks_per_second = HZ;
+# else
+ ticks_per_second = 60; /* magic fallback value; may be bogus */
+# endif
+}
+
+#endif /* HAVE_TIMES */
+
+
+PyStatus
+_PyTime_Init(void)
+{
+#ifdef HAVE_TIMES
+ ensure_ticks_per_second();
+#endif
+ return PyStatus_Ok();
+}
+
+
/* Forward declarations */
static int pysleep(_PyTime_t timeout);
@@ -140,18 +190,8 @@ Return the current time in nanoseconds since the Epoch.");
static int
_PyTime_GetClockWithInfo(_PyTime_t *tp, _Py_clock_info_t *info)
{
- static int initialized = 0;
-
- if (!initialized) {
- initialized = 1;
-
- /* Make sure that _PyTime_MulDiv(ticks, SEC_TO_NS, CLOCKS_PER_SEC)
- above cannot overflow */
- if ((_PyTime_t)CLOCKS_PER_SEC > _PyTime_MAX / SEC_TO_NS) {
- PyErr_SetString(PyExc_OverflowError,
- "CLOCKS_PER_SEC is too large");
- return -1;
- }
+ if (check_ticks_per_second(CLOCKS_PER_SEC, "CLOCKS_PER_SEC") < 0) {
+ return -1;
}
if (info) {
@@ -1308,36 +1348,10 @@ _PyTime_GetProcessTimeWithInfo(_PyTime_t *tp, _Py_clock_info_t *info)
struct tms t;
if (times(&t) != (clock_t)-1) {
- static long ticks_per_second = -1;
-
- if (ticks_per_second == -1) {
- long freq;
-#if defined(HAVE_SYSCONF) && defined(_SC_CLK_TCK)
- freq = sysconf(_SC_CLK_TCK);
- if (freq < 1) {
- freq = -1;
- }
-#elif defined(HZ)
- freq = HZ;
-#else
- freq = 60; /* magic fallback value; may be bogus */
-#endif
-
- if (freq != -1) {
- /* check that _PyTime_MulDiv(t, SEC_TO_NS, ticks_per_second)
- cannot overflow below */
-#if LONG_MAX > _PyTime_MAX / SEC_TO_NS
- if ((_PyTime_t)freq > _PyTime_MAX / SEC_TO_NS) {
- PyErr_SetString(PyExc_OverflowError,
- "_SC_CLK_TCK is too large");
- return -1;
- }
-#endif
-
- ticks_per_second = freq;
- }
+ assert(_PyRuntime.time.ticks_per_second_initialized);
+ if (check_ticks_per_second(ticks_per_second, "_SC_CLK_TCK") < 0) {
+ return -1;
}
-
if (ticks_per_second != -1) {
if (info) {
info->implementation = "times()";
diff --git a/Objects/codeobject.c b/Objects/codeobject.c
index f54acc3bbc7c40..834a7eb8846bbe 100644
--- a/Objects/codeobject.c
+++ b/Objects/codeobject.c
@@ -11,7 +11,6 @@
#include "pycore_tuple.h" // _PyTuple_ITEMS()
#include "clinic/codeobject.c.h"
-
static void
notify_code_watchers(PyCodeEvent event, PyCodeObject *co)
{
@@ -405,7 +404,10 @@ init_code(PyCodeObject *co, struct _PyCodeConstructor *con)
co->co_nplaincellvars = nplaincellvars;
co->co_ncellvars = ncellvars;
co->co_nfreevars = nfreevars;
-
+ co->co_version = _Py_next_func_version;
+ if (_Py_next_func_version != 0) {
+ _Py_next_func_version++;
+ }
/* not set */
co->co_weakreflist = NULL;
co->co_extra = NULL;
diff --git a/Objects/funcobject.c b/Objects/funcobject.c
index 12f9843e319926..d5cf5b9277b3f1 100644
--- a/Objects/funcobject.c
+++ b/Objects/funcobject.c
@@ -3,7 +3,7 @@
#include "Python.h"
#include "pycore_ceval.h" // _PyEval_BuiltinsFromGlobals()
-#include "pycore_function.h" // FUNC_MAX_WATCHERS
+#include "pycore_code.h" // _Py_next_func_version
#include "pycore_object.h" // _PyObject_GC_UNTRACK()
#include "pycore_pyerrors.h" // _PyErr_Occurred()
#include "structmember.h" // PyMemberDef
@@ -74,7 +74,6 @@ PyFunction_ClearWatcher(int watcher_id)
interp->active_func_watchers &= ~(1 << watcher_id);
return 0;
}
-
PyFunctionObject *
_PyFunction_FromConstructor(PyFrameConstructor *constr)
{
diff --git a/Objects/longobject.c b/Objects/longobject.c
index c84b4d3f316d5d..8596ce9797b5a6 100644
--- a/Objects/longobject.c
+++ b/Objects/longobject.c
@@ -36,8 +36,8 @@ medium_value(PyLongObject *x)
#define IS_SMALL_INT(ival) (-_PY_NSMALLNEGINTS <= (ival) && (ival) < _PY_NSMALLPOSINTS)
#define IS_SMALL_UINT(ival) ((ival) < _PY_NSMALLPOSINTS)
-#define _MAX_STR_DIGITS_ERROR_FMT_TO_INT "Exceeds the limit (%d) for integer string conversion: value has %zd digits; use sys.set_int_max_str_digits() to increase the limit"
-#define _MAX_STR_DIGITS_ERROR_FMT_TO_STR "Exceeds the limit (%d) for integer string conversion; use sys.set_int_max_str_digits() to increase the limit"
+#define _MAX_STR_DIGITS_ERROR_FMT_TO_INT "Exceeds the limit (%d digits) for integer string conversion: value has %zd digits; use sys.set_int_max_str_digits() to increase the limit"
+#define _MAX_STR_DIGITS_ERROR_FMT_TO_STR "Exceeds the limit (%d digits) for integer string conversion; use sys.set_int_max_str_digits() to increase the limit"
/* If defined, use algorithms from the _pylong.py module */
#define WITH_PYLONG_MODULE 1
diff --git a/Objects/obmalloc.c b/Objects/obmalloc.c
index 4c08bc214cd27a..276c5a276c06e6 100644
--- a/Objects/obmalloc.c
+++ b/Objects/obmalloc.c
@@ -908,11 +908,12 @@ new_arena(void)
struct arena_object* arenaobj;
uint excess; /* number of bytes above pool alignment */
void *address;
- static int debug_stats = -1;
+ int debug_stats = _PyRuntime.obmalloc.dump_debug_stats;
if (debug_stats == -1) {
const char *opt = Py_GETENV("PYTHONMALLOCSTATS");
debug_stats = (opt != NULL && *opt != '\0');
+ _PyRuntime.obmalloc.dump_debug_stats = debug_stats;
}
if (debug_stats) {
_PyObject_DebugMallocStats(stderr);
diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c
index 4405125d45e7cc..e1b9953226c0d7 100644
--- a/Objects/tupleobject.c
+++ b/Objects/tupleobject.c
@@ -995,14 +995,9 @@ _PyTuple_ClearFreeList(PyInterpreterState *interp)
/*********************** Tuple Iterator **************************/
-typedef struct {
- PyObject_HEAD
- Py_ssize_t it_index;
- PyTupleObject *it_seq; /* Set to NULL when iterator is exhausted */
-} tupleiterobject;
static void
-tupleiter_dealloc(tupleiterobject *it)
+tupleiter_dealloc(_PyTupleIterObject *it)
{
_PyObject_GC_UNTRACK(it);
Py_XDECREF(it->it_seq);
@@ -1010,14 +1005,14 @@ tupleiter_dealloc(tupleiterobject *it)
}
static int
-tupleiter_traverse(tupleiterobject *it, visitproc visit, void *arg)
+tupleiter_traverse(_PyTupleIterObject *it, visitproc visit, void *arg)
{
Py_VISIT(it->it_seq);
return 0;
}
static PyObject *
-tupleiter_next(tupleiterobject *it)
+tupleiter_next(_PyTupleIterObject *it)
{
PyTupleObject *seq;
PyObject *item;
@@ -1040,7 +1035,7 @@ tupleiter_next(tupleiterobject *it)
}
static PyObject *
-tupleiter_len(tupleiterobject *it, PyObject *Py_UNUSED(ignored))
+tupleiter_len(_PyTupleIterObject *it, PyObject *Py_UNUSED(ignored))
{
Py_ssize_t len = 0;
if (it->it_seq)
@@ -1051,7 +1046,7 @@ tupleiter_len(tupleiterobject *it, PyObject *Py_UNUSED(ignored))
PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it)).");
static PyObject *
-tupleiter_reduce(tupleiterobject *it, PyObject *Py_UNUSED(ignored))
+tupleiter_reduce(_PyTupleIterObject *it, PyObject *Py_UNUSED(ignored))
{
if (it->it_seq)
return Py_BuildValue("N(O)n", _PyEval_GetBuiltin(&_Py_ID(iter)),
@@ -1061,7 +1056,7 @@ tupleiter_reduce(tupleiterobject *it, PyObject *Py_UNUSED(ignored))
}
static PyObject *
-tupleiter_setstate(tupleiterobject *it, PyObject *state)
+tupleiter_setstate(_PyTupleIterObject *it, PyObject *state)
{
Py_ssize_t index = PyLong_AsSsize_t(state);
if (index == -1 && PyErr_Occurred())
@@ -1089,7 +1084,7 @@ static PyMethodDef tupleiter_methods[] = {
PyTypeObject PyTupleIter_Type = {
PyVarObject_HEAD_INIT(&PyType_Type, 0)
"tuple_iterator", /* tp_name */
- sizeof(tupleiterobject), /* tp_basicsize */
+ sizeof(_PyTupleIterObject), /* tp_basicsize */
0, /* tp_itemsize */
/* methods */
(destructor)tupleiter_dealloc, /* tp_dealloc */
@@ -1122,13 +1117,13 @@ PyTypeObject PyTupleIter_Type = {
static PyObject *
tuple_iter(PyObject *seq)
{
- tupleiterobject *it;
+ _PyTupleIterObject *it;
if (!PyTuple_Check(seq)) {
PyErr_BadInternalCall();
return NULL;
}
- it = PyObject_GC_New(tupleiterobject, &PyTupleIter_Type);
+ it = PyObject_GC_New(_PyTupleIterObject, &PyTupleIter_Type);
if (it == NULL)
return NULL;
it->it_index = 0;
diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c
index deeca35714b766..b721ccd805edf1 100644
--- a/Objects/unicodeobject.c
+++ b/Objects/unicodeobject.c
@@ -5697,8 +5697,6 @@ PyUnicode_AsUTF16String(PyObject *unicode)
/* --- Unicode Escape Codec ----------------------------------------------- */
-static _PyUnicode_Name_CAPI *ucnhash_capi = NULL;
-
PyObject *
_PyUnicode_DecodeUnicodeEscapeInternal(const char *s,
Py_ssize_t size,
@@ -5711,6 +5709,8 @@ _PyUnicode_DecodeUnicodeEscapeInternal(const char *s,
const char *end;
PyObject *errorHandler = NULL;
PyObject *exc = NULL;
+ _PyUnicode_Name_CAPI *ucnhash_capi;
+ PyInterpreterState *interp = _PyInterpreterState_Get();
// so we can remember if we've seen an invalid escape char or not
*first_invalid_escape = NULL;
@@ -5858,6 +5858,7 @@ _PyUnicode_DecodeUnicodeEscapeInternal(const char *s,
/* \N{name} */
case 'N':
+ ucnhash_capi = interp->unicode.ucnhash_capi;
if (ucnhash_capi == NULL) {
/* load the unicode data module */
ucnhash_capi = (_PyUnicode_Name_CAPI *)PyCapsule_Import(
@@ -5869,6 +5870,7 @@ _PyUnicode_DecodeUnicodeEscapeInternal(const char *s,
);
goto onError;
}
+ interp->unicode.ucnhash_capi = ucnhash_capi;
}
message = "malformed \\N character escape";
@@ -15128,10 +15130,10 @@ _PyUnicode_Fini(PyInterpreterState *interp)
assert(get_interned_dict() == NULL);
// bpo-47182: force a unicodedata CAPI capsule re-import on
// subsequent initialization of main interpreter.
- ucnhash_capi = NULL;
}
_PyUnicode_FiniEncodings(&state->fs_codec);
+ interp->unicode.ucnhash_capi = NULL;
unicode_clear_identifiers(state);
}
diff --git a/PC/pyconfig.h b/PC/pyconfig.h
index 1a33d4c5a1e4fc..1d8408b363a66a 100644
--- a/PC/pyconfig.h
+++ b/PC/pyconfig.h
@@ -209,6 +209,16 @@ typedef int pid_t;
#endif /* _MSC_VER */
+/* ------------------------------------------------------------------------*/
+/* mingw and mingw-w64 define __MINGW32__ */
+#ifdef __MINGW32__
+
+#ifdef _WIN64
+#define MS_WIN64
+#endif
+
+#endif /* __MINGW32__*/
+
/* ------------------------------------------------------------------------*/
/* egcs/gnu-win32 defines __GNUC__ and _WIN32 */
#if defined(__GNUC__) && defined(_WIN32)
diff --git a/PC/winreg.c b/PC/winreg.c
index df34e8cf5a77a9..63b37be526ab80 100644
--- a/PC/winreg.c
+++ b/PC/winreg.c
@@ -561,42 +561,54 @@ Py2Reg(PyObject *value, DWORD typ, BYTE **retDataBuf, DWORD *retDataSize)
{
Py_ssize_t i,j;
switch (typ) {
- case REG_DWORD:
- if (value != Py_None && !PyLong_Check(value))
- return FALSE;
- *retDataBuf = (BYTE *)PyMem_NEW(DWORD, 1);
- if (*retDataBuf == NULL){
- PyErr_NoMemory();
- return FALSE;
- }
- *retDataSize = sizeof(DWORD);
- if (value == Py_None) {
- DWORD zero = 0;
- memcpy(*retDataBuf, &zero, sizeof(DWORD));
- }
- else {
- DWORD d = PyLong_AsUnsignedLong(value);
+ case REG_DWORD:
+ {
+ if (value != Py_None && !PyLong_Check(value)) {
+ return FALSE;
+ }
+ DWORD d;
+ if (value == Py_None) {
+ d = 0;
+ }
+ else if (PyLong_Check(value)) {
+ d = PyLong_AsUnsignedLong(value);
+ if (d == (DWORD)(-1) && PyErr_Occurred()) {
+ return FALSE;
+ }
+ }
+ *retDataBuf = (BYTE *)PyMem_NEW(DWORD, 1);
+ if (*retDataBuf == NULL) {
+ PyErr_NoMemory();
+ return FALSE;
+ }
memcpy(*retDataBuf, &d, sizeof(DWORD));
+ *retDataSize = sizeof(DWORD);
+ break;
}
- break;
- case REG_QWORD:
- if (value != Py_None && !PyLong_Check(value))
- return FALSE;
- *retDataBuf = (BYTE *)PyMem_NEW(DWORD64, 1);
- if (*retDataBuf == NULL){
- PyErr_NoMemory();
- return FALSE;
- }
- *retDataSize = sizeof(DWORD64);
- if (value == Py_None) {
- DWORD64 zero = 0;
- memcpy(*retDataBuf, &zero, sizeof(DWORD64));
- }
- else {
- DWORD64 d = PyLong_AsUnsignedLongLong(value);
+ case REG_QWORD:
+ {
+ if (value != Py_None && !PyLong_Check(value)) {
+ return FALSE;
+ }
+ DWORD64 d;
+ if (value == Py_None) {
+ d = 0;
+ }
+ else if (PyLong_Check(value)) {
+ d = PyLong_AsUnsignedLongLong(value);
+ if (d == (DWORD64)(-1) && PyErr_Occurred()) {
+ return FALSE;
+ }
+ }
+ *retDataBuf = (BYTE *)PyMem_NEW(DWORD64, 1);
+ if (*retDataBuf == NULL) {
+ PyErr_NoMemory();
+ return FALSE;
+ }
memcpy(*retDataBuf, &d, sizeof(DWORD64));
+ *retDataSize = sizeof(DWORD64);
+ break;
}
- break;
case REG_SZ:
case REG_EXPAND_SZ:
{
diff --git a/PCbuild/_freeze_module.vcxproj b/PCbuild/_freeze_module.vcxproj
index 8454bd67b1db1b..fce1f670510001 100644
--- a/PCbuild/_freeze_module.vcxproj
+++ b/PCbuild/_freeze_module.vcxproj
@@ -110,6 +110,7 @@
+
diff --git a/PCbuild/_freeze_module.vcxproj.filters b/PCbuild/_freeze_module.vcxproj.filters
index 6e8498dceb1cfa..dce6278987c5df 100644
--- a/PCbuild/_freeze_module.vcxproj.filters
+++ b/PCbuild/_freeze_module.vcxproj.filters
@@ -367,6 +367,9 @@
Source Files
+
+ Source Files
+
Source Files
diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj
index f62434370cfdf7..bb2aaae3317b02 100644
--- a/PCbuild/pythoncore.vcxproj
+++ b/PCbuild/pythoncore.vcxproj
@@ -204,6 +204,7 @@
+
@@ -213,6 +214,7 @@
+
@@ -244,6 +246,7 @@
+
@@ -254,8 +257,10 @@
+
+
diff --git a/PCbuild/pythoncore.vcxproj.filters b/PCbuild/pythoncore.vcxproj.filters
index f44a1ad8550a38..339e7cc4937a3d 100644
--- a/PCbuild/pythoncore.vcxproj.filters
+++ b/PCbuild/pythoncore.vcxproj.filters
@@ -519,6 +519,9 @@
Include\internal
+
+ Include\internal
+
Include\internal
@@ -546,6 +549,9 @@
Include\internal
+
+ Include\internal
+
Include\internal
@@ -555,6 +561,9 @@
Include\internal
+
+ Include\internal
+
Include\internal
@@ -636,6 +645,9 @@
Include\internal
+
+ Include\internal
+
Include\internal
@@ -663,12 +675,18 @@
Include\internal
+
+ Include\internal
+
Include\internal
Include\internal
+
+ Include\internal
+
Include\internal
diff --git a/Parser/action_helpers.c b/Parser/action_helpers.c
index 27c093332f6725..f12dad095acaa8 100644
--- a/Parser/action_helpers.c
+++ b/Parser/action_helpers.c
@@ -13,6 +13,7 @@ void *
_PyPegen_dummy_name(Parser *p, ...)
{
// XXX This leaks memory from the initial arena.
+ // Use a statically allocated variable instead of a pointer?
static void *cache = NULL;
if (cache != NULL) {
@@ -1287,4 +1288,4 @@ _PyPegen_nonparen_genexp_in_call(Parser *p, expr_ty args, asdl_comprehension_seq
_PyPegen_get_last_comprehension_item(last_comprehension),
"Generator expression must be parenthesized"
);
-}
\ No newline at end of file
+}
diff --git a/Parser/pegen.c b/Parser/pegen.c
index d34a86e9c883de..d84e06861edefc 100644
--- a/Parser/pegen.c
+++ b/Parser/pegen.c
@@ -246,8 +246,8 @@ _PyPegen_fill_token(Parser *p)
// The array counts the number of tokens skipped by memoization,
// indexed by type.
-#define NSTATISTICS 2000
-static long memo_statistics[NSTATISTICS];
+#define NSTATISTICS _PYPEGEN_NSTATISTICS
+#define memo_statistics _PyRuntime.parser.memo_statistics
void
_PyPegen_clear_memo_statistics()
diff --git a/Parser/pegen_errors.c b/Parser/pegen_errors.c
index 7738cbaf9ef39e..6ea7600119b643 100644
--- a/Parser/pegen_errors.c
+++ b/Parser/pegen_errors.c
@@ -169,6 +169,10 @@ _PyPegen_tokenize_full_source_to_check_for_errors(Parser *p) {
for (;;) {
switch (_PyTokenizer_Get(p->tok, &new_token)) {
case ERRORTOKEN:
+ if (PyErr_Occurred()) {
+ ret = -1;
+ goto exit;
+ }
if (p->tok->level != 0) {
int error_lineno = p->tok->parenlinenostack[p->tok->level-1];
if (current_err_line > error_lineno) {
diff --git a/Programs/_bootstrap_python.c b/Programs/_bootstrap_python.c
index bbac0c4e1a8a45..6c388fc7033dd0 100644
--- a/Programs/_bootstrap_python.c
+++ b/Programs/_bootstrap_python.c
@@ -12,8 +12,11 @@
/* Includes for frozen modules: */
#include "Python/frozen_modules/importlib._bootstrap.h"
#include "Python/frozen_modules/importlib._bootstrap_external.h"
+#include "Python/frozen_modules/zipimport.h"
/* End includes */
+uint32_t _Py_next_func_version = 1;
+
/* Empty initializer for deepfrozen modules */
int _Py_Deepfreeze_Init(void)
{
@@ -30,6 +33,7 @@ _Py_Deepfreeze_Fini(void)
static const struct _frozen bootstrap_modules[] = {
{"_frozen_importlib", _Py_M__importlib__bootstrap, (int)sizeof(_Py_M__importlib__bootstrap)},
{"_frozen_importlib_external", _Py_M__importlib__bootstrap_external, (int)sizeof(_Py_M__importlib__bootstrap_external)},
+ {"zipimport", _Py_M__zipimport, (int)sizeof(_Py_M__zipimport)},
{0, 0, 0} /* bootstrap sentinel */
};
static const struct _frozen stdlib_modules[] = {
diff --git a/Programs/_freeze_module.c b/Programs/_freeze_module.c
index 9e2169f32e9211..90fc2dc6e87da8 100644
--- a/Programs/_freeze_module.c
+++ b/Programs/_freeze_module.c
@@ -9,6 +9,7 @@
Keep this file in sync with Programs/_freeze_module.py.
*/
+
#include
#include
#include "pycore_fileutils.h" // _Py_stat_struct
@@ -22,6 +23,8 @@
#include
#endif
+uint32_t _Py_next_func_version = 1;
+
/* Empty initializer for deepfrozen modules */
int _Py_Deepfreeze_Init(void)
{
diff --git a/Python/bytecodes.c b/Python/bytecodes.c
index 41dd1acc937d71..c56f1d3ef9f498 100644
--- a/Python/bytecodes.c
+++ b/Python/bytecodes.c
@@ -81,8 +81,17 @@ do { \
// Dummy variables for stack effects.
static PyObject *value, *value1, *value2, *left, *right, *res, *sum, *prod, *sub;
static PyObject *container, *start, *stop, *v, *lhs, *rhs;
-static PyObject *list, *tuple, *dict;
-static PyObject *exit_func, *lasti, *val;
+static PyObject *list, *tuple, *dict, *owner;
+static PyObject *exit_func, *lasti, *val, *retval, *obj, *iter;
+static size_t jump;
+// Dummy variables for cache effects
+static _Py_CODEUNIT when_to_jump_mask, invert, counter, index, hint;
+static uint32_t type_version;
+// Dummy opcode names for 'op' opcodes
+#define _COMPARE_OP_FLOAT 1003
+#define _COMPARE_OP_INT 1004
+#define _COMPARE_OP_STR 1005
+#define _JUMP_IF 1006
static PyObject *
dummy_func(
@@ -205,7 +214,7 @@ dummy_func(
};
- inst(BINARY_OP_MULTIPLY_INT, (left, right, unused/1 -- prod)) {
+ inst(BINARY_OP_MULTIPLY_INT, (unused/1, left, right -- prod)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP);
DEOPT_IF(!PyLong_CheckExact(right), BINARY_OP);
@@ -216,7 +225,7 @@ dummy_func(
ERROR_IF(prod == NULL, error);
}
- inst(BINARY_OP_MULTIPLY_FLOAT, (left, right, unused/1 -- prod)) {
+ inst(BINARY_OP_MULTIPLY_FLOAT, (unused/1, left, right -- prod)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP);
DEOPT_IF(!PyFloat_CheckExact(right), BINARY_OP);
@@ -229,7 +238,7 @@ dummy_func(
ERROR_IF(prod == NULL, error);
}
- inst(BINARY_OP_SUBTRACT_INT, (left, right, unused/1 -- sub)) {
+ inst(BINARY_OP_SUBTRACT_INT, (unused/1, left, right -- sub)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP);
DEOPT_IF(!PyLong_CheckExact(right), BINARY_OP);
@@ -240,7 +249,7 @@ dummy_func(
ERROR_IF(sub == NULL, error);
}
- inst(BINARY_OP_SUBTRACT_FLOAT, (left, right, unused/1 -- sub)) {
+ inst(BINARY_OP_SUBTRACT_FLOAT, (unused/1, left, right -- sub)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP);
DEOPT_IF(!PyFloat_CheckExact(right), BINARY_OP);
@@ -252,7 +261,7 @@ dummy_func(
ERROR_IF(sub == NULL, error);
}
- inst(BINARY_OP_ADD_UNICODE, (left, right, unused/1 -- res)) {
+ inst(BINARY_OP_ADD_UNICODE, (unused/1, left, right -- res)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyUnicode_CheckExact(left), BINARY_OP);
DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP);
@@ -299,7 +308,7 @@ dummy_func(
JUMPBY(INLINE_CACHE_ENTRIES_BINARY_OP + 1);
}
- inst(BINARY_OP_ADD_FLOAT, (left, right, unused/1 -- sum)) {
+ inst(BINARY_OP_ADD_FLOAT, (unused/1, left, right -- sum)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP);
DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP);
@@ -312,7 +321,7 @@ dummy_func(
ERROR_IF(sum == NULL, error);
}
- inst(BINARY_OP_ADD_INT, (left, right, unused/1 -- sum)) {
+ inst(BINARY_OP_ADD_INT, (unused/1, left, right -- sum)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP);
DEOPT_IF(Py_TYPE(right) != Py_TYPE(left), BINARY_OP);
@@ -331,7 +340,7 @@ dummy_func(
BINARY_SUBSCR_TUPLE_INT,
};
- inst(BINARY_SUBSCR, (container, sub, unused/4 -- res)) {
+ inst(BINARY_SUBSCR, (unused/4, container, sub -- res)) {
_PyBinarySubscrCache *cache = (_PyBinarySubscrCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
assert(cframe.use_tracing == 0);
@@ -377,7 +386,7 @@ dummy_func(
ERROR_IF(err, error);
}
- inst(BINARY_SUBSCR_LIST_INT, (list, sub, unused/4 -- res)) {
+ inst(BINARY_SUBSCR_LIST_INT, (unused/4, list, sub -- res)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR);
DEOPT_IF(!PyList_CheckExact(list), BINARY_SUBSCR);
@@ -396,7 +405,7 @@ dummy_func(
Py_DECREF(list);
}
- inst(BINARY_SUBSCR_TUPLE_INT, (tuple, sub, unused/4 -- res)) {
+ inst(BINARY_SUBSCR_TUPLE_INT, (unused/4, tuple, sub -- res)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR);
DEOPT_IF(!PyTuple_CheckExact(tuple), BINARY_SUBSCR);
@@ -415,7 +424,7 @@ dummy_func(
Py_DECREF(tuple);
}
- inst(BINARY_SUBSCR_DICT, (dict, sub, unused/4 -- res)) {
+ inst(BINARY_SUBSCR_DICT, (unused/4, dict, sub -- res)) {
assert(cframe.use_tracing == 0);
DEOPT_IF(!PyDict_CheckExact(dict), BINARY_SUBSCR);
STAT_INC(BINARY_SUBSCR, hit);
@@ -426,14 +435,14 @@ dummy_func(
}
Py_DECREF(dict);
Py_DECREF(sub);
- ERROR_IF(1, error);
+ ERROR_IF(true, error);
}
Py_INCREF(res); // Do this before DECREF'ing dict, sub
Py_DECREF(dict);
Py_DECREF(sub);
}
- inst(BINARY_SUBSCR_GETITEM, (container, sub, unused/1, type_version/2, func_version/1 -- unused)) {
+ inst(BINARY_SUBSCR_GETITEM, (unused/1, type_version/2, func_version/1, container, sub -- unused)) {
PyTypeObject *tp = Py_TYPE(container);
DEOPT_IF(tp->tp_version_tag != type_version, BINARY_SUBSCR);
assert(tp->tp_flags & Py_TPFLAGS_HEAPTYPE);
@@ -457,52 +466,48 @@ dummy_func(
DISPATCH_INLINED(new_frame);
}
- // stack effect: (__0 -- )
- inst(LIST_APPEND) {
- PyObject *v = POP();
- PyObject *list = PEEK(oparg);
- if (_PyList_AppendTakeRef((PyListObject *)list, v) < 0)
- goto error;
+ // Alternative: (list, unused[oparg], v -- list, unused[oparg])
+ inst(LIST_APPEND, (v --)) {
+ PyObject *list = PEEK(oparg + 1); // +1 to account for v staying on stack
+ ERROR_IF(_PyList_AppendTakeRef((PyListObject *)list, v) < 0, error);
PREDICT(JUMP_BACKWARD);
}
- // stack effect: (__0 -- )
- inst(SET_ADD) {
- PyObject *v = POP();
- PyObject *set = PEEK(oparg);
- int err;
- err = PySet_Add(set, v);
+ // Alternative: (set, unused[oparg], v -- set, unused[oparg])
+ inst(SET_ADD, (v --)) {
+ PyObject *set = PEEK(oparg + 1); // +1 to account for v staying on stack
+ int err = PySet_Add(set, v);
Py_DECREF(v);
- if (err != 0)
- goto error;
+ ERROR_IF(err, error);
PREDICT(JUMP_BACKWARD);
}
- inst(STORE_SUBSCR, (v, container, sub -- )) {
- _PyStoreSubscrCache *cache = (_PyStoreSubscrCache *)next_instr;
- if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
+ family(store_subscr) = {
+ STORE_SUBSCR,
+ STORE_SUBSCR_DICT,
+ STORE_SUBSCR_LIST_INT,
+ };
+
+ inst(STORE_SUBSCR, (counter/1, v, container, sub -- )) {
+ if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
assert(cframe.use_tracing == 0);
next_instr--;
_Py_Specialize_StoreSubscr(container, sub, next_instr);
DISPATCH_SAME_OPARG();
}
STAT_INC(STORE_SUBSCR, deferred);
+ _PyStoreSubscrCache *cache = (_PyStoreSubscrCache *)next_instr;
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
/* container[sub] = v */
int err = PyObject_SetItem(container, sub, v);
Py_DECREF(v);
Py_DECREF(container);
Py_DECREF(sub);
- ERROR_IF(err != 0, error);
- JUMPBY(INLINE_CACHE_ENTRIES_STORE_SUBSCR);
+ ERROR_IF(err, error);
}
- // stack effect: (__0, __1, __2 -- )
- inst(STORE_SUBSCR_LIST_INT) {
+ inst(STORE_SUBSCR_LIST_INT, (unused/1, value, list, sub -- )) {
assert(cframe.use_tracing == 0);
- PyObject *sub = TOP();
- PyObject *list = SECOND();
- PyObject *value = THIRD();
DEOPT_IF(!PyLong_CheckExact(sub), STORE_SUBSCR);
DEOPT_IF(!PyList_CheckExact(list), STORE_SUBSCR);
@@ -515,60 +520,42 @@ dummy_func(
PyObject *old_value = PyList_GET_ITEM(list, index);
PyList_SET_ITEM(list, index, value);
- STACK_SHRINK(3);
assert(old_value != NULL);
Py_DECREF(old_value);
_Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free);
Py_DECREF(list);
- JUMPBY(INLINE_CACHE_ENTRIES_STORE_SUBSCR);
}
- // stack effect: (__0, __1, __2 -- )
- inst(STORE_SUBSCR_DICT) {
+ inst(STORE_SUBSCR_DICT, (unused/1, value, dict, sub -- )) {
assert(cframe.use_tracing == 0);
- PyObject *sub = TOP();
- PyObject *dict = SECOND();
- PyObject *value = THIRD();
DEOPT_IF(!PyDict_CheckExact(dict), STORE_SUBSCR);
- STACK_SHRINK(3);
STAT_INC(STORE_SUBSCR, hit);
int err = _PyDict_SetItem_Take2((PyDictObject *)dict, sub, value);
Py_DECREF(dict);
- if (err != 0) {
- goto error;
- }
- JUMPBY(INLINE_CACHE_ENTRIES_STORE_SUBSCR);
+ ERROR_IF(err, error);
}
- // stack effect: (__0, __1 -- )
- inst(DELETE_SUBSCR) {
- PyObject *sub = TOP();
- PyObject *container = SECOND();
- int err;
- STACK_SHRINK(2);
+ inst(DELETE_SUBSCR, (container, sub --)) {
/* del container[sub] */
- err = PyObject_DelItem(container, sub);
+ int err = PyObject_DelItem(container, sub);
Py_DECREF(container);
Py_DECREF(sub);
- if (err != 0)
- goto error;
+ ERROR_IF(err, error);
}
- // stack effect: (__0 -- )
- inst(PRINT_EXPR) {
- PyObject *value = POP();
+ inst(PRINT_EXPR, (value --)) {
PyObject *hook = _PySys_GetAttr(tstate, &_Py_ID(displayhook));
PyObject *res;
+ // Can't use ERROR_IF here.
if (hook == NULL) {
_PyErr_SetString(tstate, PyExc_RuntimeError,
"lost sys.displayhook");
Py_DECREF(value);
- goto error;
+ ERROR_IF(true, error);
}
res = PyObject_CallOneArg(hook, value);
Py_DECREF(value);
- if (res == NULL)
- goto error;
+ ERROR_IF(res == NULL, error);
Py_DECREF(res);
}
@@ -595,11 +582,10 @@ dummy_func(
goto error;
}
- // stack effect: (__0 -- )
- inst(INTERPRETER_EXIT) {
+ inst(INTERPRETER_EXIT, (retval --)) {
assert(frame == &entry_frame);
assert(_PyFrame_IsIncomplete(frame));
- PyObject *retval = POP();
+ STACK_SHRINK(1); // Since we're not going to DISPATCH()
assert(EMPTY());
/* Restore previous cframe and return. */
tstate->cframe = cframe.previous;
@@ -610,62 +596,53 @@ dummy_func(
return retval;
}
- // stack effect: (__0 -- )
- inst(RETURN_VALUE) {
- PyObject *retval = POP();
+ inst(RETURN_VALUE, (retval --)) {
+ STACK_SHRINK(1);
assert(EMPTY());
_PyFrame_SetStackPointer(frame, stack_pointer);
TRACE_FUNCTION_EXIT();
DTRACE_FUNCTION_EXIT();
_Py_LeaveRecursiveCallPy(tstate);
assert(frame != &entry_frame);
- frame = cframe.current_frame = pop_frame(tstate, frame);
+ // GH-99729: We need to unlink the frame *before* clearing it:
+ _PyInterpreterFrame *dying = frame;
+ frame = cframe.current_frame = dying->previous;
+ _PyEvalFrameClearAndPop(tstate, dying);
_PyFrame_StackPush(frame, retval);
goto resume_frame;
}
- // stack effect: ( -- )
- inst(GET_AITER) {
+ inst(GET_AITER, (obj -- iter)) {
unaryfunc getter = NULL;
- PyObject *iter = NULL;
- PyObject *obj = TOP();
PyTypeObject *type = Py_TYPE(obj);
if (type->tp_as_async != NULL) {
getter = type->tp_as_async->am_aiter;
}
- if (getter != NULL) {
- iter = (*getter)(obj);
- Py_DECREF(obj);
- if (iter == NULL) {
- SET_TOP(NULL);
- goto error;
- }
- }
- else {
- SET_TOP(NULL);
+ if (getter == NULL) {
_PyErr_Format(tstate, PyExc_TypeError,
"'async for' requires an object with "
"__aiter__ method, got %.100s",
type->tp_name);
Py_DECREF(obj);
- goto error;
+ ERROR_IF(true, error);
}
+ iter = (*getter)(obj);
+ Py_DECREF(obj);
+ ERROR_IF(iter == NULL, error);
+
if (Py_TYPE(iter)->tp_as_async == NULL ||
Py_TYPE(iter)->tp_as_async->am_anext == NULL) {
- SET_TOP(NULL);
_PyErr_Format(tstate, PyExc_TypeError,
"'async for' received an object from __aiter__ "
"that does not implement __anext__: %.100s",
Py_TYPE(iter)->tp_name);
Py_DECREF(iter);
- goto error;
+ ERROR_IF(true, error);
}
-
- SET_TOP(iter);
}
// stack effect: ( -- __0)
@@ -1116,53 +1093,43 @@ dummy_func(
Py_DECREF(seq);
}
- // stack effect: (__0, __1 -- )
- inst(STORE_ATTR) {
- _PyAttrCache *cache = (_PyAttrCache *)next_instr;
- if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
+ family(store_attr) = {
+ STORE_ATTR,
+ STORE_ATTR_INSTANCE_VALUE,
+ STORE_ATTR_SLOT,
+ STORE_ATTR_WITH_HINT,
+ };
+
+ inst(STORE_ATTR, (counter/1, unused/3, v, owner --)) {
+ if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
assert(cframe.use_tracing == 0);
- PyObject *owner = TOP();
PyObject *name = GETITEM(names, oparg);
next_instr--;
_Py_Specialize_StoreAttr(owner, next_instr, name);
DISPATCH_SAME_OPARG();
}
STAT_INC(STORE_ATTR, deferred);
+ _PyAttrCache *cache = (_PyAttrCache *)next_instr;
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
PyObject *name = GETITEM(names, oparg);
- PyObject *owner = TOP();
- PyObject *v = SECOND();
- int err;
- STACK_SHRINK(2);
- err = PyObject_SetAttr(owner, name, v);
+ int err = PyObject_SetAttr(owner, name, v);
Py_DECREF(v);
Py_DECREF(owner);
- if (err != 0) {
- goto error;
- }
- JUMPBY(INLINE_CACHE_ENTRIES_STORE_ATTR);
+ ERROR_IF(err, error);
}
- // stack effect: (__0 -- )
- inst(DELETE_ATTR) {
+ inst(DELETE_ATTR, (owner --)) {
PyObject *name = GETITEM(names, oparg);
- PyObject *owner = POP();
- int err;
- err = PyObject_SetAttr(owner, name, (PyObject *)NULL);
+ int err = PyObject_SetAttr(owner, name, (PyObject *)NULL);
Py_DECREF(owner);
- if (err != 0)
- goto error;
+ ERROR_IF(err, error);
}
- // stack effect: (__0 -- )
- inst(STORE_GLOBAL) {
+ inst(STORE_GLOBAL, (v --)) {
PyObject *name = GETITEM(names, oparg);
- PyObject *v = POP();
- int err;
- err = PyDict_SetItem(GLOBALS(), name, v);
+ int err = PyDict_SetItem(GLOBALS(), name, v);
Py_DECREF(v);
- if (err != 0)
- goto error;
+ ERROR_IF(err, error);
}
inst(DELETE_GLOBAL, (--)) {
@@ -1951,22 +1918,15 @@ dummy_func(
DISPATCH_INLINED(new_frame);
}
- // stack effect: (__0, __1 -- )
- inst(STORE_ATTR_INSTANCE_VALUE) {
+ inst(STORE_ATTR_INSTANCE_VALUE, (unused/1, type_version/2, index/1, value, owner --)) {
assert(cframe.use_tracing == 0);
- PyObject *owner = TOP();
PyTypeObject *tp = Py_TYPE(owner);
- _PyAttrCache *cache = (_PyAttrCache *)next_instr;
- uint32_t type_version = read_u32(cache->version);
assert(type_version != 0);
DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR);
assert(tp->tp_flags & Py_TPFLAGS_MANAGED_DICT);
PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner);
DEOPT_IF(!_PyDictOrValues_IsValues(dorv), STORE_ATTR);
STAT_INC(STORE_ATTR, hit);
- Py_ssize_t index = cache->index;
- STACK_SHRINK(1);
- PyObject *value = POP();
PyDictValues *values = _PyDictOrValues_GetValues(dorv);
PyObject *old_value = values->values[index];
values->values[index] = value;
@@ -1977,16 +1937,11 @@ dummy_func(
Py_DECREF(old_value);
}
Py_DECREF(owner);
- JUMPBY(INLINE_CACHE_ENTRIES_STORE_ATTR);
}
- // stack effect: (__0, __1 -- )
- inst(STORE_ATTR_WITH_HINT) {
+ inst(STORE_ATTR_WITH_HINT, (unused/1, type_version/2, hint/1, value, owner --)) {
assert(cframe.use_tracing == 0);
- PyObject *owner = TOP();
PyTypeObject *tp = Py_TYPE(owner);
- _PyAttrCache *cache = (_PyAttrCache *)next_instr;
- uint32_t type_version = read_u32(cache->version);
assert(type_version != 0);
DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR);
assert(tp->tp_flags & Py_TPFLAGS_MANAGED_DICT);
@@ -1996,17 +1951,14 @@ dummy_func(
DEOPT_IF(dict == NULL, STORE_ATTR);
assert(PyDict_CheckExact((PyObject *)dict));
PyObject *name = GETITEM(names, oparg);
- uint16_t hint = cache->index;
DEOPT_IF(hint >= (size_t)dict->ma_keys->dk_nentries, STORE_ATTR);
- PyObject *value, *old_value;
+ PyObject *old_value;
uint64_t new_version;
if (DK_IS_UNICODE(dict->ma_keys)) {
PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint;
DEOPT_IF(ep->me_key != name, STORE_ATTR);
old_value = ep->me_value;
DEOPT_IF(old_value == NULL, STORE_ATTR);
- STACK_SHRINK(1);
- value = POP();
new_version = _PyDict_NotifyEvent(PyDict_EVENT_MODIFIED, dict, name, value);
ep->me_value = value;
}
@@ -2015,8 +1967,6 @@ dummy_func(
DEOPT_IF(ep->me_key != name, STORE_ATTR);
old_value = ep->me_value;
DEOPT_IF(old_value == NULL, STORE_ATTR);
- STACK_SHRINK(1);
- value = POP();
new_version = _PyDict_NotifyEvent(PyDict_EVENT_MODIFIED, dict, name, value);
ep->me_value = value;
}
@@ -2029,36 +1979,32 @@ dummy_func(
/* PEP 509 */
dict->ma_version_tag = new_version;
Py_DECREF(owner);
- JUMPBY(INLINE_CACHE_ENTRIES_STORE_ATTR);
}
- // stack effect: (__0, __1 -- )
- inst(STORE_ATTR_SLOT) {
+ inst(STORE_ATTR_SLOT, (unused/1, type_version/2, index/1, value, owner --)) {
assert(cframe.use_tracing == 0);
- PyObject *owner = TOP();
PyTypeObject *tp = Py_TYPE(owner);
- _PyAttrCache *cache = (_PyAttrCache *)next_instr;
- uint32_t type_version = read_u32(cache->version);
assert(type_version != 0);
DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR);
- char *addr = (char *)owner + cache->index;
+ char *addr = (char *)owner + index;
STAT_INC(STORE_ATTR, hit);
- STACK_SHRINK(1);
- PyObject *value = POP();
PyObject *old_value = *(PyObject **)addr;
*(PyObject **)addr = value;
Py_XDECREF(old_value);
Py_DECREF(owner);
- JUMPBY(INLINE_CACHE_ENTRIES_STORE_ATTR);
}
- // stack effect: (__0 -- )
- inst(COMPARE_OP) {
+ family(compare_op) = {
+ COMPARE_OP,
+ _COMPARE_OP_FLOAT,
+ _COMPARE_OP_INT,
+ _COMPARE_OP_STR,
+ };
+
+ inst(COMPARE_OP, (unused/2, left, right -- res)) {
_PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
assert(cframe.use_tracing == 0);
- PyObject *right = TOP();
- PyObject *left = SECOND();
next_instr--;
_Py_Specialize_CompareOp(left, right, next_instr, oparg);
DISPATCH_SAME_OPARG();
@@ -2066,57 +2012,43 @@ dummy_func(
STAT_INC(COMPARE_OP, deferred);
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
assert(oparg <= Py_GE);
- PyObject *right = POP();
- PyObject *left = TOP();
- PyObject *res = PyObject_RichCompare(left, right, oparg);
- SET_TOP(res);
+ res = PyObject_RichCompare(left, right, oparg);
Py_DECREF(left);
Py_DECREF(right);
- if (res == NULL) {
- goto error;
- }
- JUMPBY(INLINE_CACHE_ENTRIES_COMPARE_OP);
+ ERROR_IF(res == NULL, error);
}
- // stack effect: (__0 -- )
- inst(COMPARE_OP_FLOAT_JUMP) {
+ // The result is an int disguised as an object pointer.
+ op(_COMPARE_OP_FLOAT, (unused/1, when_to_jump_mask/1, left, right -- jump: size_t)) {
assert(cframe.use_tracing == 0);
// Combined: COMPARE_OP (float ? float) + POP_JUMP_IF_(true/false)
- _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr;
- int when_to_jump_mask = cache->mask;
- PyObject *right = TOP();
- PyObject *left = SECOND();
DEOPT_IF(!PyFloat_CheckExact(left), COMPARE_OP);
DEOPT_IF(!PyFloat_CheckExact(right), COMPARE_OP);
double dleft = PyFloat_AS_DOUBLE(left);
double dright = PyFloat_AS_DOUBLE(right);
- int sign = (dleft > dright) - (dleft < dright);
+ // 1 if <, 2 if ==, 4 if >; this matches when _to_jump_mask
+ int sign_ish = 2*(dleft > dright) + 2 - (dleft < dright);
DEOPT_IF(isnan(dleft), COMPARE_OP);
DEOPT_IF(isnan(dright), COMPARE_OP);
STAT_INC(COMPARE_OP, hit);
- JUMPBY(INLINE_CACHE_ENTRIES_COMPARE_OP);
- NEXTOPARG();
- STACK_SHRINK(2);
_Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc);
_Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc);
+ jump = sign_ish & when_to_jump_mask;
+ }
+ // The input is an int disguised as an object pointer!
+ op(_JUMP_IF, (jump: size_t --)) {
assert(opcode == POP_JUMP_IF_FALSE || opcode == POP_JUMP_IF_TRUE);
- int jump = (1 << (sign + 1)) & when_to_jump_mask;
- if (!jump) {
- next_instr++;
- }
- else {
- JUMPBY(1 + oparg);
+ if (jump) {
+ JUMPBY(oparg);
}
}
+ // We're praying that the compiler optimizes the flags manipuations.
+ super(COMPARE_OP_FLOAT_JUMP) = _COMPARE_OP_FLOAT + _JUMP_IF;
- // stack effect: (__0 -- )
- inst(COMPARE_OP_INT_JUMP) {
+ // Similar to COMPARE_OP_FLOAT
+ op(_COMPARE_OP_INT, (unused/1, when_to_jump_mask/1, left, right -- jump: size_t)) {
assert(cframe.use_tracing == 0);
// Combined: COMPARE_OP (int ? int) + POP_JUMP_IF_(true/false)
- _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr;
- int when_to_jump_mask = cache->mask;
- PyObject *right = TOP();
- PyObject *left = SECOND();
DEOPT_IF(!PyLong_CheckExact(left), COMPARE_OP);
DEOPT_IF(!PyLong_CheckExact(right), COMPARE_OP);
DEOPT_IF((size_t)(Py_SIZE(left) + 1) > 2, COMPARE_OP);
@@ -2125,51 +2057,30 @@ dummy_func(
assert(Py_ABS(Py_SIZE(left)) <= 1 && Py_ABS(Py_SIZE(right)) <= 1);
Py_ssize_t ileft = Py_SIZE(left) * ((PyLongObject *)left)->ob_digit[0];
Py_ssize_t iright = Py_SIZE(right) * ((PyLongObject *)right)->ob_digit[0];
- int sign = (ileft > iright) - (ileft < iright);
- JUMPBY(INLINE_CACHE_ENTRIES_COMPARE_OP);
- NEXTOPARG();
- STACK_SHRINK(2);
+ // 1 if <, 2 if ==, 4 if >; this matches when _to_jump_mask
+ int sign_ish = 2*(ileft > iright) + 2 - (ileft < iright);
_Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free);
_Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free);
- assert(opcode == POP_JUMP_IF_FALSE || opcode == POP_JUMP_IF_TRUE);
- int jump = (1 << (sign + 1)) & when_to_jump_mask;
- if (!jump) {
- next_instr++;
- }
- else {
- JUMPBY(1 + oparg);
- }
+ jump = sign_ish & when_to_jump_mask;
}
+ super(COMPARE_OP_INT_JUMP) = _COMPARE_OP_INT + _JUMP_IF;
- // stack effect: (__0 -- )
- inst(COMPARE_OP_STR_JUMP) {
+ // Similar to COMPARE_OP_FLOAT, but for ==, != only
+ op(_COMPARE_OP_STR, (unused/1, invert/1, left, right -- jump: size_t)) {
assert(cframe.use_tracing == 0);
// Combined: COMPARE_OP (str == str or str != str) + POP_JUMP_IF_(true/false)
- _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr;
- int invert = cache->mask;
- PyObject *right = TOP();
- PyObject *left = SECOND();
DEOPT_IF(!PyUnicode_CheckExact(left), COMPARE_OP);
DEOPT_IF(!PyUnicode_CheckExact(right), COMPARE_OP);
STAT_INC(COMPARE_OP, hit);
int res = _PyUnicode_Equal(left, right);
assert(oparg == Py_EQ || oparg == Py_NE);
- JUMPBY(INLINE_CACHE_ENTRIES_COMPARE_OP);
- NEXTOPARG();
- assert(opcode == POP_JUMP_IF_FALSE || opcode == POP_JUMP_IF_TRUE);
- STACK_SHRINK(2);
_Py_DECREF_SPECIALIZED(left, _PyUnicode_ExactDealloc);
_Py_DECREF_SPECIALIZED(right, _PyUnicode_ExactDealloc);
assert(res == 0 || res == 1);
assert(invert == 0 || invert == 1);
- int jump = res ^ invert;
- if (!jump) {
- next_instr++;
- }
- else {
- JUMPBY(1 + oparg);
- }
+ jump = res ^ invert;
}
+ super(COMPARE_OP_STR_JUMP) = _COMPARE_OP_STR + _JUMP_IF;
// stack effect: (__0 -- )
inst(IS_OP) {
@@ -2612,6 +2523,29 @@ dummy_func(
end_for_iter_list:
}
+ // stack effect: ( -- __0)
+ inst(FOR_ITER_TUPLE) {
+ assert(cframe.use_tracing == 0);
+ _PyTupleIterObject *it = (_PyTupleIterObject *)TOP();
+ DEOPT_IF(Py_TYPE(it) != &PyTupleIter_Type, FOR_ITER);
+ STAT_INC(FOR_ITER, hit);
+ PyTupleObject *seq = it->it_seq;
+ if (seq) {
+ if (it->it_index < PyTuple_GET_SIZE(seq)) {
+ PyObject *next = PyTuple_GET_ITEM(seq, it->it_index++);
+ PUSH(Py_NewRef(next));
+ JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER);
+ goto end_for_iter_tuple; // End of this instruction
+ }
+ it->it_seq = NULL;
+ Py_DECREF(seq);
+ }
+ STACK_SHRINK(1);
+ Py_DECREF(it);
+ JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg + 1);
+ end_for_iter_tuple:
+ }
+
// stack effect: ( -- __0)
inst(FOR_ITER_RANGE) {
assert(cframe.use_tracing == 0);
@@ -3518,6 +3452,7 @@ dummy_func(
func->func_defaults = POP();
}
+ func->func_version = ((PyCodeObject *)codeobj)->co_version;
PUSH((PyObject *)func);
}
@@ -3630,7 +3565,7 @@ dummy_func(
PUSH(Py_NewRef(peek));
}
- inst(BINARY_OP, (lhs, rhs, unused/1 -- res)) {
+ inst(BINARY_OP, (unused/1, lhs, rhs -- res)) {
_PyBinaryOpCache *cache = (_PyBinaryOpCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
assert(cframe.use_tracing == 0);
@@ -3688,9 +3623,6 @@ dummy_func(
// Future families go below this point //
-family(binary_subscr) = {
- BINARY_SUBSCR, BINARY_SUBSCR_DICT,
- BINARY_SUBSCR_GETITEM, BINARY_SUBSCR_LIST_INT, BINARY_SUBSCR_TUPLE_INT };
family(call) = {
CALL, CALL_PY_EXACT_ARGS,
CALL_PY_WITH_DEFAULTS, CALL_BOUND_METHOD_EXACT_ARGS, CALL_BUILTIN_CLASS,
@@ -3699,9 +3631,6 @@ family(call) = {
CALL_NO_KW_LIST_APPEND, CALL_NO_KW_METHOD_DESCRIPTOR_FAST, CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS,
CALL_NO_KW_METHOD_DESCRIPTOR_O, CALL_NO_KW_STR_1, CALL_NO_KW_TUPLE_1,
CALL_NO_KW_TYPE_1 };
-family(compare_op) = {
- COMPARE_OP, COMPARE_OP_FLOAT_JUMP,
- COMPARE_OP_INT_JUMP, COMPARE_OP_STR_JUMP };
family(for_iter) = {
FOR_ITER, FOR_ITER_LIST,
FOR_ITER_RANGE };
@@ -3716,13 +3645,7 @@ family(load_fast) = { LOAD_FAST, LOAD_FAST__LOAD_CONST, LOAD_FAST__LOAD_FAST };
family(load_global) = {
LOAD_GLOBAL, LOAD_GLOBAL_BUILTIN,
LOAD_GLOBAL_MODULE };
-family(store_attr) = {
- STORE_ATTR, STORE_ATTR_INSTANCE_VALUE,
- STORE_ATTR_SLOT, STORE_ATTR_WITH_HINT };
family(store_fast) = { STORE_FAST, STORE_FAST__LOAD_FAST, STORE_FAST__STORE_FAST };
-family(store_subscr) = {
- STORE_SUBSCR, STORE_SUBSCR_DICT,
- STORE_SUBSCR_LIST_INT };
family(unpack_sequence) = {
UNPACK_SEQUENCE, UNPACK_SEQUENCE_LIST,
UNPACK_SEQUENCE_TUPLE, UNPACK_SEQUENCE_TWO_TUPLE };
diff --git a/Python/ceval.c b/Python/ceval.c
index 80bfa21ad0b6f0..9e4179e56071a0 100644
--- a/Python/ceval.c
+++ b/Python/ceval.c
@@ -1009,14 +1009,6 @@ trace_function_exit(PyThreadState *tstate, _PyInterpreterFrame *frame, PyObject
return 0;
}
-static _PyInterpreterFrame *
-pop_frame(PyThreadState *tstate, _PyInterpreterFrame *frame)
-{
- _PyInterpreterFrame *prev_frame = frame->previous;
- _PyEvalFrameClearAndPop(tstate, frame);
- return prev_frame;
-}
-
int _Py_CheckRecursiveCallPy(
PyThreadState *tstate)
@@ -1432,7 +1424,10 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int
assert(_PyErr_Occurred(tstate));
_Py_LeaveRecursiveCallPy(tstate);
assert(frame != &entry_frame);
- frame = cframe.current_frame = pop_frame(tstate, frame);
+ // GH-99729: We need to unlink the frame *before* clearing it:
+ _PyInterpreterFrame *dying = frame;
+ frame = cframe.current_frame = dying->previous;
+ _PyEvalFrameClearAndPop(tstate, dying);
if (frame == &entry_frame) {
/* Restore previous cframe and exit */
tstate->cframe = cframe.previous;
diff --git a/Python/compile.c b/Python/compile.c
index d6ed6941ac1ecd..813e0d5503b4b0 100644
--- a/Python/compile.c
+++ b/Python/compile.c
@@ -55,6 +55,16 @@
*/
#define STACK_USE_GUIDELINE 30
+#undef SUCCESS
+#undef ERROR
+#define SUCCESS 0
+#define ERROR -1
+
+#define RETURN_IF_ERROR(X) \
+ if ((X) == -1) { \
+ return ERROR; \
+ }
+
/* If we exceed this limit, it should
* be considered a compiler bug.
* Currently it should be impossible
@@ -498,7 +508,7 @@ static int compiler_annassign(struct compiler *, stmt_ty);
static int compiler_subscript(struct compiler *, expr_ty);
static int compiler_slice(struct compiler *, expr_ty);
-static int are_all_items_const(asdl_expr_seq *, Py_ssize_t, Py_ssize_t);
+static bool are_all_items_const(asdl_expr_seq *, Py_ssize_t, Py_ssize_t);
static int compiler_with(struct compiler *, stmt_ty, int);
@@ -610,18 +620,18 @@ compiler_setup(struct compiler *c, mod_ty mod, PyObject *filename,
{
c->c_const_cache = PyDict_New();
if (!c->c_const_cache) {
- return 0;
+ return ERROR;
}
c->c_stack = PyList_New(0);
if (!c->c_stack) {
- return 0;
+ return ERROR;
}
c->c_filename = Py_NewRef(filename);
c->c_arena = arena;
if (!_PyFuture_FromAST(mod, filename, &c->c_future)) {
- return 0;
+ return ERROR;
}
int merged = c->c_future.ff_features | flags.cf_flags;
c->c_future.ff_features = merged;
@@ -635,16 +645,16 @@ compiler_setup(struct compiler *c, mod_ty mod, PyObject *filename,
state.ff_features = merged;
if (!_PyAST_Optimize(mod, arena, &state)) {
- return 0;
+ return ERROR;
}
c->c_st = _PySymtable_Build(mod, filename, &c->c_future);
if (c->c_st == NULL) {
if (!PyErr_Occurred()) {
PyErr_SetString(PyExc_SystemError, "no symtable");
}
- return 0;
+ return ERROR;
}
- return 1;
+ return SUCCESS;
}
static struct compiler*
@@ -656,7 +666,7 @@ new_compiler(mod_ty mod, PyObject *filename, PyCompilerFlags *pflags,
if (c == NULL) {
return NULL;
}
- if (!compiler_setup(c, mod, filename, flags, optimize, arena)) {
+ if (compiler_setup(c, mod, filename, flags, optimize, arena) < 0) {
compiler_free(c);
return NULL;
}
@@ -800,11 +810,12 @@ cfg_builder_init(cfg_builder *g)
{
g->g_block_list = NULL;
basicblock *block = cfg_builder_new_block(g);
- if (block == NULL)
- return 0;
+ if (block == NULL) {
+ return ERROR;
+ }
g->g_curblock = g->g_entryblock = block;
g->g_current_label = NO_LABEL;
- return 1;
+ return SUCCESS;
}
static void
@@ -862,8 +873,10 @@ compiler_set_qualname(struct compiler *c)
|| u->u_scope_type == COMPILER_SCOPE_CLASS) {
assert(u->u_name);
mangled = _Py_Mangle(parent->u_private, u->u_name);
- if (!mangled)
- return 0;
+ if (!mangled) {
+ return ERROR;
+ }
+
scope = _PyST_GetScope(parent->u_ste, mangled);
Py_DECREF(mangled);
assert(scope != GLOBAL_IMPLICIT);
@@ -879,8 +892,9 @@ compiler_set_qualname(struct compiler *c)
_Py_DECLARE_STR(dot_locals, ".");
base = PyUnicode_Concat(parent->u_qualname,
&_Py_STR(dot_locals));
- if (base == NULL)
- return 0;
+ if (base == NULL) {
+ return ERROR;
+ }
}
else {
base = Py_NewRef(parent->u_qualname);
@@ -892,18 +906,20 @@ compiler_set_qualname(struct compiler *c)
_Py_DECLARE_STR(dot, ".");
name = PyUnicode_Concat(base, &_Py_STR(dot));
Py_DECREF(base);
- if (name == NULL)
- return 0;
+ if (name == NULL) {
+ return ERROR;
+ }
PyUnicode_Append(&name, u->u_name);
- if (name == NULL)
- return 0;
+ if (name == NULL) {
+ return ERROR;
+ }
}
else {
name = Py_NewRef(u->u_name);
}
u->u_qualname = name;
- return 1;
+ return SUCCESS;
}
static jump_target_label
@@ -1304,10 +1320,6 @@ PyCompile_OpcodeStackEffect(int opcode, int oparg)
return stack_effect(opcode, oparg, -1);
}
-/* Add an opcode with no argument.
- Returns 0 on failure, 1 on success.
-*/
-
static int
basicblock_addop(basicblock *b, int opcode, int oparg, location loc)
{
@@ -1318,7 +1330,7 @@ basicblock_addop(basicblock *b, int opcode, int oparg, location loc)
int off = basicblock_next_instr(b);
if (off < 0) {
- return 0;
+ return ERROR;
}
struct instr *i = &b->b_instr[off];
i->i_opcode = opcode;
@@ -1326,7 +1338,7 @@ basicblock_addop(basicblock *b, int opcode, int oparg, location loc)
i->i_target = NULL;
i->i_loc = loc;
- return 1;
+ return SUCCESS;
}
static bool
@@ -1522,8 +1534,9 @@ static int
compiler_addop_load_const(struct compiler *c, location loc, PyObject *o)
{
Py_ssize_t arg = compiler_add_const(c, o);
- if (arg < 0)
- return 0;
+ if (arg < 0) {
+ return ERROR;
+ }
return cfg_builder_addop_i(CFG_BUILDER(c), LOAD_CONST, arg, loc);
}
@@ -1532,8 +1545,9 @@ compiler_addop_o(struct compiler *c, location loc,
int opcode, PyObject *dict, PyObject *o)
{
Py_ssize_t arg = dict_add_o(dict, o);
- if (arg < 0)
- return 0;
+ if (arg < 0) {
+ return ERROR;
+ }
return cfg_builder_addop_i(CFG_BUILDER(c), opcode, arg, loc);
}
@@ -1544,12 +1558,14 @@ compiler_addop_name(struct compiler *c, location loc,
Py_ssize_t arg;
PyObject *mangled = _Py_Mangle(c->u->u_private, o);
- if (!mangled)
- return 0;
+ if (!mangled) {
+ return ERROR;
+ }
arg = dict_add_o(dict, mangled);
Py_DECREF(mangled);
- if (arg < 0)
- return 0;
+ if (arg < 0) {
+ return ERROR;
+ }
if (opcode == LOAD_ATTR) {
arg <<= 1;
}
@@ -1561,9 +1577,7 @@ compiler_addop_name(struct compiler *c, location loc,
return cfg_builder_addop_i(CFG_BUILDER(c), opcode, arg, loc);
}
-/* Add an opcode with an integer argument.
- Returns 0 on failure, 1 on success.
-*/
+/* Add an opcode with an integer argument */
static int
cfg_builder_addop_i(cfg_builder *g, int opcode, Py_ssize_t oparg, location loc)
{
@@ -1588,94 +1602,79 @@ cfg_builder_addop_j(cfg_builder *g, location loc,
return cfg_builder_addop(g, opcode, target.id, loc);
}
-
-#define ADDOP(C, LOC, OP) { \
- if (!cfg_builder_addop_noarg(CFG_BUILDER(C), (OP), (LOC))) \
- return 0; \
-}
+#define ADDOP(C, LOC, OP) \
+ RETURN_IF_ERROR(cfg_builder_addop_noarg(CFG_BUILDER(C), (OP), (LOC)))
#define ADDOP_IN_SCOPE(C, LOC, OP) { \
- if (!cfg_builder_addop_noarg(CFG_BUILDER(C), (OP), (LOC))) { \
+ if (cfg_builder_addop_noarg(CFG_BUILDER(C), (OP), (LOC)) < 0) { \
compiler_exit_scope(c); \
- return 0; \
+ return -1; \
} \
}
-#define ADDOP_LOAD_CONST(C, LOC, O) { \
- if (!compiler_addop_load_const((C), (LOC), (O))) \
- return 0; \
-}
+#define ADDOP_LOAD_CONST(C, LOC, O) \
+ RETURN_IF_ERROR(compiler_addop_load_const((C), (LOC), (O)))
/* Same as ADDOP_LOAD_CONST, but steals a reference. */
#define ADDOP_LOAD_CONST_NEW(C, LOC, O) { \
PyObject *__new_const = (O); \
if (__new_const == NULL) { \
- return 0; \
+ return ERROR; \
} \
- if (!compiler_addop_load_const((C), (LOC), __new_const)) { \
+ if (compiler_addop_load_const((C), (LOC), __new_const) < 0) { \
Py_DECREF(__new_const); \
- return 0; \
+ return ERROR; \
} \
Py_DECREF(__new_const); \
}
#define ADDOP_N(C, LOC, OP, O, TYPE) { \
assert(!HAS_CONST(OP)); /* use ADDOP_LOAD_CONST_NEW */ \
- if (!compiler_addop_o((C), (LOC), (OP), (C)->u->u_ ## TYPE, (O))) { \
+ if (compiler_addop_o((C), (LOC), (OP), (C)->u->u_ ## TYPE, (O)) < 0) { \
Py_DECREF((O)); \
- return 0; \
+ return ERROR; \
} \
Py_DECREF((O)); \
}
-#define ADDOP_NAME(C, LOC, OP, O, TYPE) { \
- if (!compiler_addop_name((C), (LOC), (OP), (C)->u->u_ ## TYPE, (O))) \
- return 0; \
-}
+#define ADDOP_NAME(C, LOC, OP, O, TYPE) \
+ RETURN_IF_ERROR(compiler_addop_name((C), (LOC), (OP), (C)->u->u_ ## TYPE, (O)))
-#define ADDOP_I(C, LOC, OP, O) { \
- if (!cfg_builder_addop_i(CFG_BUILDER(C), (OP), (O), (LOC))) \
- return 0; \
-}
+#define ADDOP_I(C, LOC, OP, O) \
+ RETURN_IF_ERROR(cfg_builder_addop_i(CFG_BUILDER(C), (OP), (O), (LOC)))
-#define ADDOP_JUMP(C, LOC, OP, O) { \
- if (!cfg_builder_addop_j(CFG_BUILDER(C), (LOC), (OP), (O))) \
- return 0; \
-}
+#define ADDOP_JUMP(C, LOC, OP, O) \
+ RETURN_IF_ERROR(cfg_builder_addop_j(CFG_BUILDER(C), (LOC), (OP), (O)))
-#define ADDOP_COMPARE(C, LOC, CMP) { \
- if (!compiler_addcompare((C), (LOC), (cmpop_ty)(CMP))) \
- return 0; \
-}
+#define ADDOP_COMPARE(C, LOC, CMP) \
+ RETURN_IF_ERROR(compiler_addcompare((C), (LOC), (cmpop_ty)(CMP)))
#define ADDOP_BINARY(C, LOC, BINOP) \
- RETURN_IF_FALSE(addop_binary((C), (LOC), (BINOP), false))
+ RETURN_IF_ERROR(addop_binary((C), (LOC), (BINOP), false))
#define ADDOP_INPLACE(C, LOC, BINOP) \
- RETURN_IF_FALSE(addop_binary((C), (LOC), (BINOP), true))
+ RETURN_IF_ERROR(addop_binary((C), (LOC), (BINOP), true))
#define ADD_YIELD_FROM(C, LOC, await) \
- RETURN_IF_FALSE(compiler_add_yield_from((C), (LOC), (await)))
+ RETURN_IF_ERROR(compiler_add_yield_from((C), (LOC), (await)))
#define POP_EXCEPT_AND_RERAISE(C, LOC) \
- RETURN_IF_FALSE(compiler_pop_except_and_reraise((C), (LOC)))
+ RETURN_IF_ERROR(compiler_pop_except_and_reraise((C), (LOC)))
#define ADDOP_YIELD(C, LOC) \
- RETURN_IF_FALSE(addop_yield((C), (LOC)))
+ RETURN_IF_ERROR(addop_yield((C), (LOC)))
/* VISIT and VISIT_SEQ takes an ASDL type as their second argument. They use
the ASDL name to synthesize the name of the C type and the visit function.
*/
-#define VISIT(C, TYPE, V) {\
- if (!compiler_visit_ ## TYPE((C), (V))) \
- return 0; \
-}
+#define VISIT(C, TYPE, V) \
+ RETURN_IF_ERROR(compiler_visit_ ## TYPE((C), (V)));
#define VISIT_IN_SCOPE(C, TYPE, V) {\
- if (!compiler_visit_ ## TYPE((C), (V))) { \
+ if (compiler_visit_ ## TYPE((C), (V)) < 0) { \
compiler_exit_scope(c); \
- return 0; \
+ return ERROR; \
} \
}
@@ -1684,8 +1683,8 @@ cfg_builder_addop_j(cfg_builder *g, location loc,
asdl_ ## TYPE ## _seq *seq = (SEQ); /* avoid variable capture */ \
for (_i = 0; _i < asdl_seq_LEN(seq); _i++) { \
TYPE ## _ty elt = (TYPE ## _ty)asdl_seq_GET(seq, _i); \
- if (!compiler_visit_ ## TYPE((C), elt)) \
- return 0; \
+ if (compiler_visit_ ## TYPE((C), elt) < 0) \
+ return ERROR; \
} \
}
@@ -1694,17 +1693,13 @@ cfg_builder_addop_j(cfg_builder *g, location loc,
asdl_ ## TYPE ## _seq *seq = (SEQ); /* avoid variable capture */ \
for (_i = 0; _i < asdl_seq_LEN(seq); _i++) { \
TYPE ## _ty elt = (TYPE ## _ty)asdl_seq_GET(seq, _i); \
- if (!compiler_visit_ ## TYPE((C), elt)) { \
+ if (compiler_visit_ ## TYPE((C), elt) < 0) { \
compiler_exit_scope(c); \
- return 0; \
+ return ERROR; \
} \
} \
}
-#define RETURN_IF_FALSE(X) \
- if (!(X)) { \
- return 0; \
- }
static int
compiler_enter_scope(struct compiler *c, identifier name,
@@ -1718,7 +1713,7 @@ compiler_enter_scope(struct compiler *c, identifier name,
struct compiler_unit));
if (!u) {
PyErr_NoMemory();
- return 0;
+ return ERROR;
}
u->u_scope_type = scope_type;
u->u_argcount = 0;
@@ -1727,14 +1722,14 @@ compiler_enter_scope(struct compiler *c, identifier name,
u->u_ste = PySymtable_Lookup(c->c_st, key);
if (!u->u_ste) {
compiler_unit_free(u);
- return 0;
+ return ERROR;
}
u->u_name = Py_NewRef(name);
u->u_varnames = list2dict(u->u_ste->ste_varnames);
u->u_cellvars = dictbytype(u->u_ste->ste_symbols, CELL, 0, 0);
if (!u->u_varnames || !u->u_cellvars) {
compiler_unit_free(u);
- return 0;
+ return ERROR;
}
if (u->u_ste->ste_needs_class_closure) {
/* Cook up an implicit __class__ cell. */
@@ -1745,7 +1740,7 @@ compiler_enter_scope(struct compiler *c, identifier name,
_PyLong_GetZero());
if (res < 0) {
compiler_unit_free(u);
- return 0;
+ return ERROR;
}
}
@@ -1753,7 +1748,7 @@ compiler_enter_scope(struct compiler *c, identifier name,
PyDict_GET_SIZE(u->u_cellvars));
if (!u->u_freevars) {
compiler_unit_free(u);
- return 0;
+ return ERROR;
}
u->u_nfblocks = 0;
@@ -1761,12 +1756,12 @@ compiler_enter_scope(struct compiler *c, identifier name,
u->u_consts = PyDict_New();
if (!u->u_consts) {
compiler_unit_free(u);
- return 0;
+ return ERROR;
}
u->u_names = PyDict_New();
if (!u->u_names) {
compiler_unit_free(u);
- return 0;
+ return ERROR;
}
u->u_private = NULL;
@@ -1777,7 +1772,7 @@ compiler_enter_scope(struct compiler *c, identifier name,
if (!capsule || PyList_Append(c->c_stack, capsule) < 0) {
Py_XDECREF(capsule);
compiler_unit_free(u);
- return 0;
+ return ERROR;
}
Py_DECREF(capsule);
u->u_private = Py_XNewRef(c->u->u_private);
@@ -1787,23 +1782,20 @@ compiler_enter_scope(struct compiler *c, identifier name,
c->c_nestlevel++;
cfg_builder *g = CFG_BUILDER(c);
- if (!cfg_builder_init(g)) {
- return 0;
- }
+ RETURN_IF_ERROR(cfg_builder_init(g));
if (u->u_scope_type == COMPILER_SCOPE_MODULE) {
loc.lineno = 0;
}
else {
- if (!compiler_set_qualname(c))
- return 0;
+ RETURN_IF_ERROR(compiler_set_qualname(c));
}
ADDOP_I(c, loc, RESUME, 0);
if (u->u_scope_type == COMPILER_SCOPE_MODULE) {
loc.lineno = -1;
}
- return 1;
+ return SUCCESS;
}
static void
@@ -1837,7 +1829,7 @@ compiler_exit_scope(struct compiler *c)
/* Search if variable annotations are present statically in a block. */
-static int
+static bool
find_ann(asdl_stmt_seq *stmts)
{
int i, j, res = 0;
@@ -1847,7 +1839,7 @@ find_ann(asdl_stmt_seq *stmts)
st = (stmt_ty)asdl_seq_GET(stmts, i);
switch (st->kind) {
case AnnAssign_kind:
- return 1;
+ return true;
case For_kind:
res = find_ann(st->v.For.body) ||
find_ann(st->v.For.orelse);
@@ -1875,7 +1867,7 @@ find_ann(asdl_stmt_seq *stmts)
excepthandler_ty handler = (excepthandler_ty)asdl_seq_GET(
st->v.Try.handlers, j);
if (find_ann(handler->v.ExceptHandler.body)) {
- return 1;
+ return true;
}
}
res = find_ann(st->v.Try.body) ||
@@ -1887,7 +1879,7 @@ find_ann(asdl_stmt_seq *stmts)
excepthandler_ty handler = (excepthandler_ty)asdl_seq_GET(
st->v.TryStar.handlers, j);
if (find_ann(handler->v.ExceptHandler.body)) {
- return 1;
+ return true;
}
}
res = find_ann(st->v.TryStar.body) ||
@@ -1895,7 +1887,7 @@ find_ann(asdl_stmt_seq *stmts)
find_ann(st->v.TryStar.orelse);
break;
default:
- res = 0;
+ res = false;
}
if (res) {
break;
@@ -1922,7 +1914,7 @@ compiler_push_fblock(struct compiler *c, location loc,
f->fb_block = block_label;
f->fb_exit = exit;
f->fb_datum = datum;
- return 1;
+ return SUCCESS;
}
static void
@@ -1942,7 +1934,7 @@ compiler_call_exit_with_nones(struct compiler *c, location loc)
ADDOP_LOAD_CONST(c, loc, Py_None);
ADDOP_LOAD_CONST(c, loc, Py_None);
ADDOP_I(c, loc, CALL, 2);
- return 1;
+ return SUCCESS;
}
static int
@@ -1966,7 +1958,7 @@ compiler_add_yield_from(struct compiler *c, location loc, int await)
ADDOP(c, loc, CLEANUP_THROW);
USE_LABEL(c, exit);
- return 1;
+ return SUCCESS;
}
static int
@@ -1982,7 +1974,7 @@ compiler_pop_except_and_reraise(struct compiler *c, location loc)
ADDOP_I(c, loc, COPY, 3);
ADDOP(c, loc, POP_EXCEPT);
ADDOP_I(c, loc, RERAISE, 1);
- return 1;
+ return SUCCESS;
}
/* Unwind a frame block. If preserve_tos is true, the TOS before
@@ -1999,7 +1991,7 @@ compiler_unwind_fblock(struct compiler *c, location *ploc,
case EXCEPTION_HANDLER:
case EXCEPTION_GROUP_HANDLER:
case ASYNC_COMPREHENSION_GENERATOR:
- return 1;
+ return SUCCESS;
case FOR_LOOP:
/* Pop the iterator */
@@ -2007,19 +1999,18 @@ compiler_unwind_fblock(struct compiler *c, location *ploc,
ADDOP_I(c, *ploc, SWAP, 2);
}
ADDOP(c, *ploc, POP_TOP);
- return 1;
+ return SUCCESS;
case TRY_EXCEPT:
ADDOP(c, *ploc, POP_BLOCK);
- return 1;
+ return SUCCESS;
case FINALLY_TRY:
/* This POP_BLOCK gets the line number of the unwinding statement */
ADDOP(c, *ploc, POP_BLOCK);
if (preserve_tos) {
- if (!compiler_push_fblock(c, *ploc, POP_VALUE, NO_LABEL, NO_LABEL, NULL)) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_push_fblock(c, *ploc, POP_VALUE, NO_LABEL, NO_LABEL, NULL));
}
/* Emit the finally block */
VISIT_SEQ(c, stmt, info->fb_datum);
@@ -2030,7 +2021,7 @@ compiler_unwind_fblock(struct compiler *c, location *ploc,
* statement causing the unwinding, so make the unwinding
* instruction artificial */
*ploc = NO_LOCATION;
- return 1;
+ return SUCCESS;
case FINALLY_END:
if (preserve_tos) {
@@ -2042,7 +2033,7 @@ compiler_unwind_fblock(struct compiler *c, location *ploc,
}
ADDOP(c, *ploc, POP_BLOCK);
ADDOP(c, *ploc, POP_EXCEPT);
- return 1;
+ return SUCCESS;
case WITH:
case ASYNC_WITH:
@@ -2051,9 +2042,7 @@ compiler_unwind_fblock(struct compiler *c, location *ploc,
if (preserve_tos) {
ADDOP_I(c, *ploc, SWAP, 2);
}
- if(!compiler_call_exit_with_nones(c, *ploc)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_call_exit_with_nones(c, *ploc));
if (info->fb_type == ASYNC_WITH) {
ADDOP_I(c, *ploc, GET_AWAITABLE, 2);
ADDOP_LOAD_CONST(c, *ploc, Py_None);
@@ -2064,7 +2053,7 @@ compiler_unwind_fblock(struct compiler *c, location *ploc,
* statement causing the unwinding, so make the unwinding
* instruction artificial */
*ploc = NO_LOCATION;
- return 1;
+ return SUCCESS;
case HANDLER_CLEANUP: {
if (info->fb_datum) {
@@ -2077,17 +2066,17 @@ compiler_unwind_fblock(struct compiler *c, location *ploc,
ADDOP(c, *ploc, POP_EXCEPT);
if (info->fb_datum) {
ADDOP_LOAD_CONST(c, *ploc, Py_None);
- compiler_nameop(c, *ploc, info->fb_datum, Store);
- compiler_nameop(c, *ploc, info->fb_datum, Del);
+ RETURN_IF_ERROR(compiler_nameop(c, *ploc, info->fb_datum, Store));
+ RETURN_IF_ERROR(compiler_nameop(c, *ploc, info->fb_datum, Del));
}
- return 1;
+ return SUCCESS;
}
case POP_VALUE: {
if (preserve_tos) {
ADDOP_I(c, *ploc, SWAP, 2);
}
ADDOP(c, *ploc, POP_TOP);
- return 1;
+ return SUCCESS;
}
}
Py_UNREACHABLE();
@@ -2099,7 +2088,7 @@ compiler_unwind_fblock_stack(struct compiler *c, location *ploc,
int preserve_tos, struct fblockinfo **loop)
{
if (c->u->u_nfblocks == 0) {
- return 1;
+ return SUCCESS;
}
struct fblockinfo *top = &c->u->u_fblock[c->u->u_nfblocks-1];
if (top->fb_type == EXCEPTION_GROUP_HANDLER) {
@@ -2108,19 +2097,15 @@ compiler_unwind_fblock_stack(struct compiler *c, location *ploc,
}
if (loop != NULL && (top->fb_type == WHILE_LOOP || top->fb_type == FOR_LOOP)) {
*loop = top;
- return 1;
+ return SUCCESS;
}
struct fblockinfo copy = *top;
c->u->u_nfblocks--;
- if (!compiler_unwind_fblock(c, ploc, ©, preserve_tos)) {
- return 0;
- }
- if (!compiler_unwind_fblock_stack(c, ploc, preserve_tos, loop)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_unwind_fblock(c, ploc, ©, preserve_tos));
+ RETURN_IF_ERROR(compiler_unwind_fblock_stack(c, ploc, preserve_tos, loop));
c->u->u_fblock[c->u->u_nfblocks] = copy;
c->u->u_nfblocks++;
- return 1;
+ return SUCCESS;
}
/* Compile a sequence of statements, checking for a docstring
@@ -2145,8 +2130,9 @@ compiler_body(struct compiler *c, location loc, asdl_stmt_seq *stmts)
if (find_ann(stmts)) {
ADDOP(c, loc, SETUP_ANNOTATIONS);
}
- if (!asdl_seq_LEN(stmts))
- return 1;
+ if (!asdl_seq_LEN(stmts)) {
+ return SUCCESS;
+ }
/* if not -OO mode, set docstring */
if (c->c_optimize < 2) {
docstring = _PyAST_GetDocString(stmts);
@@ -2155,29 +2141,29 @@ compiler_body(struct compiler *c, location loc, asdl_stmt_seq *stmts)
st = (stmt_ty)asdl_seq_GET(stmts, 0);
assert(st->kind == Expr_kind);
VISIT(c, expr, st->v.Expr.value);
- if (!compiler_nameop(c, NO_LOCATION, &_Py_ID(__doc__), Store))
- return 0;
+ RETURN_IF_ERROR(compiler_nameop(c, NO_LOCATION, &_Py_ID(__doc__), Store));
}
}
- for (; i < asdl_seq_LEN(stmts); i++)
+ for (; i < asdl_seq_LEN(stmts); i++) {
VISIT(c, stmt, (stmt_ty)asdl_seq_GET(stmts, i));
- return 1;
+ }
+ return SUCCESS;
}
static int
compiler_codegen(struct compiler *c, mod_ty mod)
{
_Py_DECLARE_STR(anon_module, "");
- if (!compiler_enter_scope(c, &_Py_STR(anon_module), COMPILER_SCOPE_MODULE,
- mod, 1)) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_enter_scope(c, &_Py_STR(anon_module), COMPILER_SCOPE_MODULE,
+ mod, 1));
+
location loc = LOCATION(1, 1, 0, 0);
switch (mod->kind) {
case Module_kind:
- if (!compiler_body(c, loc, mod->v.Module.body)) {
+ if (compiler_body(c, loc, mod->v.Module.body) < 0) {
compiler_exit_scope(c);
- return 0;
+ return ERROR;
}
break;
case Interactive_kind:
@@ -2194,16 +2180,16 @@ compiler_codegen(struct compiler *c, mod_ty mod)
PyErr_Format(PyExc_SystemError,
"module kind %d should not be possible",
mod->kind);
- return 0;
+ return ERROR;
}
- return 1;
+ return SUCCESS;
}
static PyCodeObject *
compiler_mod(struct compiler *c, mod_ty mod)
{
int addNone = mod->kind != Expression_kind;
- if (!compiler_codegen(c, mod)) {
+ if (compiler_codegen(c, mod) < 0) {
return NULL;
}
PyCodeObject *co = assemble(c, addNone);
@@ -2270,7 +2256,7 @@ compiler_make_closure(struct compiler *c, location loc,
*/
int reftype = get_ref_type(c, name);
if (reftype == -1) {
- return 0;
+ return ERROR;
}
int arg;
if (reftype == CELL) {
@@ -2293,7 +2279,7 @@ compiler_make_closure(struct compiler *c, location loc,
co->co_name,
freevars);
Py_DECREF(freevars);
- return 0;
+ return ERROR;
}
ADDOP_I(c, loc, LOAD_CLOSURE, arg);
}
@@ -2302,34 +2288,34 @@ compiler_make_closure(struct compiler *c, location loc,
}
ADDOP_LOAD_CONST(c, loc, (PyObject*)co);
ADDOP_I(c, loc, MAKE_FUNCTION, flags);
- return 1;
+ return SUCCESS;
}
static int
compiler_decorators(struct compiler *c, asdl_expr_seq* decos)
{
- int i;
-
- if (!decos)
- return 1;
+ if (!decos) {
+ return SUCCESS;
+ }
- for (i = 0; i < asdl_seq_LEN(decos); i++) {
+ for (Py_ssize_t i = 0; i < asdl_seq_LEN(decos); i++) {
VISIT(c, expr, (expr_ty)asdl_seq_GET(decos, i));
}
- return 1;
+ return SUCCESS;
}
static int
compiler_apply_decorators(struct compiler *c, asdl_expr_seq* decos)
{
- if (!decos)
- return 1;
+ if (!decos) {
+ return SUCCESS;
+ }
for (Py_ssize_t i = asdl_seq_LEN(decos) - 1; i > -1; i--) {
location loc = LOC((expr_ty)asdl_seq_GET(decos, i));
ADDOP_I(c, loc, CALL, 0);
}
- return 1;
+ return SUCCESS;
}
static int
@@ -2338,7 +2324,7 @@ compiler_visit_kwonlydefaults(struct compiler *c, location loc,
{
/* Push a dict of keyword-only default values.
- Return 0 on error, -1 if no dict pushed, 1 if a dict is pushed.
+ Return -1 on error, 0 if no dict pushed, 1 if a dict is pushed.
*/
int i;
PyObject *keys = NULL;
@@ -2355,7 +2341,7 @@ compiler_visit_kwonlydefaults(struct compiler *c, location loc,
keys = PyList_New(1);
if (keys == NULL) {
Py_DECREF(mangled);
- return 0;
+ return ERROR;
}
PyList_SET_ITEM(keys, 0, mangled);
}
@@ -2366,7 +2352,7 @@ compiler_visit_kwonlydefaults(struct compiler *c, location loc,
goto error;
}
}
- if (!compiler_visit_expr(c, default_)) {
+ if (compiler_visit_expr(c, default_) < 0) {
goto error;
}
}
@@ -2381,12 +2367,12 @@ compiler_visit_kwonlydefaults(struct compiler *c, location loc,
return 1;
}
else {
- return -1;
+ return 0;
}
error:
Py_XDECREF(keys);
- return 0;
+ return ERROR;
}
static int
@@ -2394,7 +2380,7 @@ compiler_visit_annexpr(struct compiler *c, expr_ty annotation)
{
location loc = LOC(annotation);
ADDOP_LOAD_CONST_NEW(c, loc, _PyAST_ExprAsUnicode(annotation));
- return 1;
+ return SUCCESS;
}
static int
@@ -2402,11 +2388,11 @@ compiler_visit_argannotation(struct compiler *c, identifier id,
expr_ty annotation, Py_ssize_t *annotations_len, location loc)
{
if (!annotation) {
- return 1;
+ return SUCCESS;
}
PyObject *mangled = _Py_Mangle(c->u->u_private, id);
if (!mangled) {
- return 0;
+ return ERROR;
}
ADDOP_LOAD_CONST(c, loc, mangled);
Py_DECREF(mangled);
@@ -2428,7 +2414,7 @@ compiler_visit_argannotation(struct compiler *c, identifier id,
}
}
*annotations_len += 2;
- return 1;
+ return SUCCESS;
}
static int
@@ -2438,15 +2424,15 @@ compiler_visit_argannotations(struct compiler *c, asdl_arg_seq* args,
int i;
for (i = 0; i < asdl_seq_LEN(args); i++) {
arg_ty arg = (arg_ty)asdl_seq_GET(args, i);
- if (!compiler_visit_argannotation(
+ RETURN_IF_ERROR(
+ compiler_visit_argannotation(
c,
arg->arg,
arg->annotation,
annotations_len,
- loc))
- return 0;
+ loc));
}
- return 1;
+ return SUCCESS;
}
static int
@@ -2456,36 +2442,40 @@ compiler_visit_annotations(struct compiler *c, location loc,
/* Push arg annotation names and values.
The expressions are evaluated out-of-order wrt the source code.
- Return 0 on error, -1 if no annotations pushed, 1 if a annotations is pushed.
+ Return -1 on error, 0 if no annotations pushed, 1 if a annotations is pushed.
*/
Py_ssize_t annotations_len = 0;
- if (!compiler_visit_argannotations(c, args->args, &annotations_len, loc))
- return 0;
- if (!compiler_visit_argannotations(c, args->posonlyargs, &annotations_len, loc))
- return 0;
- if (args->vararg && args->vararg->annotation &&
- !compiler_visit_argannotation(c, args->vararg->arg,
- args->vararg->annotation, &annotations_len, loc))
- return 0;
- if (!compiler_visit_argannotations(c, args->kwonlyargs, &annotations_len, loc))
- return 0;
- if (args->kwarg && args->kwarg->annotation &&
- !compiler_visit_argannotation(c, args->kwarg->arg,
- args->kwarg->annotation, &annotations_len, loc))
- return 0;
+ RETURN_IF_ERROR(
+ compiler_visit_argannotations(c, args->args, &annotations_len, loc));
- if (!compiler_visit_argannotation(c, &_Py_ID(return), returns,
- &annotations_len, loc)) {
- return 0;
+ RETURN_IF_ERROR(
+ compiler_visit_argannotations(c, args->posonlyargs, &annotations_len, loc));
+
+ if (args->vararg && args->vararg->annotation) {
+ RETURN_IF_ERROR(
+ compiler_visit_argannotation(c, args->vararg->arg,
+ args->vararg->annotation, &annotations_len, loc));
}
+ RETURN_IF_ERROR(
+ compiler_visit_argannotations(c, args->kwonlyargs, &annotations_len, loc));
+
+ if (args->kwarg && args->kwarg->annotation) {
+ RETURN_IF_ERROR(
+ compiler_visit_argannotation(c, args->kwarg->arg,
+ args->kwarg->annotation, &annotations_len, loc));
+ }
+
+ RETURN_IF_ERROR(
+ compiler_visit_argannotation(c, &_Py_ID(return), returns, &annotations_len, loc));
+
if (annotations_len) {
ADDOP_I(c, loc, BUILD_TUPLE, annotations_len);
return 1;
}
- return -1;
+ return 0;
}
static int
@@ -2494,7 +2484,7 @@ compiler_visit_defaults(struct compiler *c, arguments_ty args,
{
VISIT_SEQ(c, expr, args->defaults);
ADDOP_I(c, loc, BUILD_TUPLE, asdl_seq_LEN(args->defaults));
- return 1;
+ return SUCCESS;
}
static Py_ssize_t
@@ -2503,47 +2493,45 @@ compiler_default_arguments(struct compiler *c, location loc,
{
Py_ssize_t funcflags = 0;
if (args->defaults && asdl_seq_LEN(args->defaults) > 0) {
- if (!compiler_visit_defaults(c, args, loc))
- return -1;
+ RETURN_IF_ERROR(compiler_visit_defaults(c, args, loc));
funcflags |= 0x01;
}
if (args->kwonlyargs) {
int res = compiler_visit_kwonlydefaults(c, loc,
args->kwonlyargs,
args->kw_defaults);
- if (res == 0) {
- return -1;
- }
- else if (res > 0) {
+ RETURN_IF_ERROR(res);
+ if (res > 0) {
funcflags |= 0x02;
}
}
return funcflags;
}
-static int
+static bool
forbidden_name(struct compiler *c, location loc, identifier name,
expr_context_ty ctx)
{
if (ctx == Store && _PyUnicode_EqualToASCIIString(name, "__debug__")) {
compiler_error(c, loc, "cannot assign to __debug__");
- return 1;
+ return true;
}
if (ctx == Del && _PyUnicode_EqualToASCIIString(name, "__debug__")) {
compiler_error(c, loc, "cannot delete __debug__");
- return 1;
+ return true;
}
- return 0;
+ return false;
}
static int
compiler_check_debug_one_arg(struct compiler *c, arg_ty arg)
{
if (arg != NULL) {
- if (forbidden_name(c, LOC(arg), arg->arg, Store))
- return 0;
+ if (forbidden_name(c, LOC(arg), arg->arg, Store)) {
+ return ERROR;
+ }
}
- return 1;
+ return SUCCESS;
}
static int
@@ -2551,39 +2539,32 @@ compiler_check_debug_args_seq(struct compiler *c, asdl_arg_seq *args)
{
if (args != NULL) {
for (Py_ssize_t i = 0, n = asdl_seq_LEN(args); i < n; i++) {
- if (!compiler_check_debug_one_arg(c, asdl_seq_GET(args, i)))
- return 0;
+ RETURN_IF_ERROR(
+ compiler_check_debug_one_arg(c, asdl_seq_GET(args, i)));
}
}
- return 1;
+ return SUCCESS;
}
static int
compiler_check_debug_args(struct compiler *c, arguments_ty args)
{
- if (!compiler_check_debug_args_seq(c, args->posonlyargs))
- return 0;
- if (!compiler_check_debug_args_seq(c, args->args))
- return 0;
- if (!compiler_check_debug_one_arg(c, args->vararg))
- return 0;
- if (!compiler_check_debug_args_seq(c, args->kwonlyargs))
- return 0;
- if (!compiler_check_debug_one_arg(c, args->kwarg))
- return 0;
- return 1;
+ RETURN_IF_ERROR(compiler_check_debug_args_seq(c, args->posonlyargs));
+ RETURN_IF_ERROR(compiler_check_debug_args_seq(c, args->args));
+ RETURN_IF_ERROR(compiler_check_debug_one_arg(c, args->vararg));
+ RETURN_IF_ERROR(compiler_check_debug_args_seq(c, args->kwonlyargs));
+ RETURN_IF_ERROR(compiler_check_debug_one_arg(c, args->kwarg));
+ return SUCCESS;
}
static inline int
insert_instruction(basicblock *block, int pos, struct instr *instr) {
- if (basicblock_next_instr(block) < 0) {
- return -1;
- }
+ RETURN_IF_ERROR(basicblock_next_instr(block));
for (int i = block->b_iused - 1; i > pos; i--) {
block->b_instr[i] = block->b_instr[i-1];
}
block->b_instr[pos] = *instr;
- return 0;
+ return SUCCESS;
}
static int
@@ -2598,16 +2579,15 @@ wrap_in_stopiteration_handler(struct compiler *c)
.i_loc = NO_LOCATION,
.i_target = NULL,
};
- if (insert_instruction(c->u->u_cfg_builder.g_entryblock, 0, &setup)) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ insert_instruction(c->u->u_cfg_builder.g_entryblock, 0, &setup));
ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None);
ADDOP(c, NO_LOCATION, RETURN_VALUE);
USE_LABEL(c, handler);
ADDOP(c, NO_LOCATION, STOPITERATION_ERROR);
ADDOP_I(c, NO_LOCATION, RERAISE, 1);
- return 1;
+ return SUCCESS;
}
static int
@@ -2647,11 +2627,8 @@ compiler_function(struct compiler *c, stmt_ty s, int is_async)
scope_type = COMPILER_SCOPE_FUNCTION;
}
- if (!compiler_check_debug_args(c, args))
- return 0;
-
- if (!compiler_decorators(c, decos))
- return 0;
+ RETURN_IF_ERROR(compiler_check_debug_args(c, args));
+ RETURN_IF_ERROR(compiler_decorators(c, decos));
firstlineno = s->lineno;
if (asdl_seq_LEN(decos)) {
@@ -2661,19 +2638,16 @@ compiler_function(struct compiler *c, stmt_ty s, int is_async)
location loc = LOC(s);
funcflags = compiler_default_arguments(c, loc, args);
if (funcflags == -1) {
- return 0;
+ return ERROR;
}
annotations = compiler_visit_annotations(c, loc, args, returns);
- if (annotations == 0) {
- return 0;
- }
- else if (annotations > 0) {
+ RETURN_IF_ERROR(annotations);
+ if (annotations > 0) {
funcflags |= 0x04;
}
- if (!compiler_enter_scope(c, name, scope_type, (void *)s, firstlineno)) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_enter_scope(c, name, scope_type, (void *)s, firstlineno));
/* if not -OO mode, add docstring */
if (c->c_optimize < 2) {
@@ -2681,7 +2655,7 @@ compiler_function(struct compiler *c, stmt_ty s, int is_async)
}
if (compiler_add_const(c, docstring ? docstring : Py_None) < 0) {
compiler_exit_scope(c);
- return 0;
+ return ERROR;
}
c->u->u_argcount = asdl_seq_LEN(args->args);
@@ -2691,9 +2665,9 @@ compiler_function(struct compiler *c, stmt_ty s, int is_async)
VISIT_IN_SCOPE(c, stmt, (stmt_ty)asdl_seq_GET(body, i));
}
if (c->u->u_ste->ste_coroutine || c->u->u_ste->ste_generator) {
- if (!wrap_in_stopiteration_handler(c)) {
+ if (wrap_in_stopiteration_handler(c) < 0) {
compiler_exit_scope(c);
- return 0;
+ return ERROR;
}
}
co = assemble(c, 1);
@@ -2702,18 +2676,17 @@ compiler_function(struct compiler *c, stmt_ty s, int is_async)
if (co == NULL) {
Py_XDECREF(qualname);
Py_XDECREF(co);
- return 0;
+ return ERROR;
}
- if (!compiler_make_closure(c, loc, co, funcflags, qualname)) {
+ if (compiler_make_closure(c, loc, co, funcflags, qualname) < 0) {
Py_DECREF(qualname);
Py_DECREF(co);
- return 0;
+ return ERROR;
}
Py_DECREF(qualname);
Py_DECREF(co);
- if (!compiler_apply_decorators(c, decos))
- return 0;
+ RETURN_IF_ERROR(compiler_apply_decorators(c, decos));
return compiler_nameop(c, loc, name, Store);
}
@@ -2724,8 +2697,7 @@ compiler_class(struct compiler *c, stmt_ty s)
int i, firstlineno;
asdl_expr_seq *decos = s->v.ClassDef.decorator_list;
- if (!compiler_decorators(c, decos))
- return 0;
+ RETURN_IF_ERROR(compiler_decorators(c, decos));
firstlineno = s->lineno;
if (asdl_seq_LEN(decos)) {
@@ -2743,35 +2715,35 @@ compiler_class(struct compiler *c, stmt_ty s)
This borrows from compiler_call.
*/
/* 1. compile the class body into a code object */
- if (!compiler_enter_scope(c, s->v.ClassDef.name,
- COMPILER_SCOPE_CLASS, (void *)s, firstlineno)) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_enter_scope(c, s->v.ClassDef.name,
+ COMPILER_SCOPE_CLASS, (void *)s, firstlineno));
+
/* this block represents what we do in the new scope */
{
location loc = LOCATION(firstlineno, firstlineno, 0, 0);
/* use the class name for name mangling */
Py_XSETREF(c->u->u_private, Py_NewRef(s->v.ClassDef.name));
/* load (global) __name__ ... */
- if (!compiler_nameop(c, loc, &_Py_ID(__name__), Load)) {
+ if (compiler_nameop(c, loc, &_Py_ID(__name__), Load) < 0) {
compiler_exit_scope(c);
- return 0;
+ return ERROR;
}
/* ... and store it as __module__ */
- if (!compiler_nameop(c, loc, &_Py_ID(__module__), Store)) {
+ if (compiler_nameop(c, loc, &_Py_ID(__module__), Store) < 0) {
compiler_exit_scope(c);
- return 0;
+ return ERROR;
}
assert(c->u->u_qualname);
ADDOP_LOAD_CONST(c, loc, c->u->u_qualname);
- if (!compiler_nameop(c, loc, &_Py_ID(__qualname__), Store)) {
+ if (compiler_nameop(c, loc, &_Py_ID(__qualname__), Store) < 0) {
compiler_exit_scope(c);
- return 0;
+ return ERROR;
}
/* compile the body proper */
- if (!compiler_body(c, loc, s->v.ClassDef.body)) {
+ if (compiler_body(c, loc, s->v.ClassDef.body) < 0) {
compiler_exit_scope(c);
- return 0;
+ return ERROR;
}
/* The following code is artificial */
/* Return __classcell__ if it is referenced, otherwise return None */
@@ -2780,14 +2752,14 @@ compiler_class(struct compiler *c, stmt_ty s)
i = compiler_lookup_arg(c->u->u_cellvars, &_Py_ID(__class__));
if (i < 0) {
compiler_exit_scope(c);
- return 0;
+ return ERROR;
}
assert(i == 0);
ADDOP_I(c, NO_LOCATION, LOAD_CLOSURE, i);
ADDOP_I(c, NO_LOCATION, COPY, 1);
- if (!compiler_nameop(c, NO_LOCATION, &_Py_ID(__classcell__), Store)) {
+ if (compiler_nameop(c, NO_LOCATION, &_Py_ID(__classcell__), Store) < 0) {
compiler_exit_scope(c);
- return 0;
+ return ERROR;
}
}
else {
@@ -2801,8 +2773,9 @@ compiler_class(struct compiler *c, stmt_ty s)
}
/* leave the new scope */
compiler_exit_scope(c);
- if (co == NULL)
- return 0;
+ if (co == NULL) {
+ return ERROR;
+ }
location loc = LOC(s);
/* 2. load the 'build_class' function */
@@ -2810,9 +2783,9 @@ compiler_class(struct compiler *c, stmt_ty s)
ADDOP(c, loc, LOAD_BUILD_CLASS);
/* 3. load a function (or closure) made from the code object */
- if (!compiler_make_closure(c, loc, co, 0, NULL)) {
+ if (compiler_make_closure(c, loc, co, 0, NULL) < 0) {
Py_DECREF(co);
- return 0;
+ return ERROR;
}
Py_DECREF(co);
@@ -2820,27 +2793,25 @@ compiler_class(struct compiler *c, stmt_ty s)
ADDOP_LOAD_CONST(c, loc, s->v.ClassDef.name);
/* 5. generate the rest of the code for the call */
- if (!compiler_call_helper(c, loc, 2,
- s->v.ClassDef.bases,
- s->v.ClassDef.keywords))
- return 0;
+ RETURN_IF_ERROR(compiler_call_helper(c, loc, 2,
+ s->v.ClassDef.bases,
+ s->v.ClassDef.keywords));
+
/* 6. apply decorators */
- if (!compiler_apply_decorators(c, decos))
- return 0;
+ RETURN_IF_ERROR(compiler_apply_decorators(c, decos));
/* 7. store into */
- if (!compiler_nameop(c, loc, s->v.ClassDef.name, Store))
- return 0;
- return 1;
+ RETURN_IF_ERROR(compiler_nameop(c, loc, s->v.ClassDef.name, Store));
+ return SUCCESS;
}
-/* Return 0 if the expression is a constant value except named singletons.
- Return 1 otherwise. */
-static int
+/* Return false if the expression is a constant value except named singletons.
+ Return true otherwise. */
+static bool
check_is_arg(expr_ty e)
{
if (e->kind != Constant_kind) {
- return 1;
+ return true;
}
PyObject *value = e->v.Constant.value;
return (value == Py_None
@@ -2849,19 +2820,18 @@ check_is_arg(expr_ty e)
|| value == Py_Ellipsis);
}
-/* Check operands of identity chacks ("is" and "is not").
+/* Check operands of identity checks ("is" and "is not").
Emit a warning if any operand is a constant except named singletons.
- Return 0 on error.
*/
static int
check_compare(struct compiler *c, expr_ty e)
{
Py_ssize_t i, n;
- int left = check_is_arg(e->v.Compare.left);
+ bool left = check_is_arg(e->v.Compare.left);
n = asdl_seq_LEN(e->v.Compare.ops);
for (i = 0; i < n; i++) {
cmpop_ty op = (cmpop_ty)asdl_seq_GET(e->v.Compare.ops, i);
- int right = check_is_arg((expr_ty)asdl_seq_GET(e->v.Compare.comparators, i));
+ bool right = check_is_arg((expr_ty)asdl_seq_GET(e->v.Compare.comparators, i));
if (op == Is || op == IsNot) {
if (!right || !left) {
const char *msg = (op == Is)
@@ -2872,7 +2842,7 @@ check_compare(struct compiler *c, expr_ty e)
}
left = right;
}
- return 1;
+ return SUCCESS;
}
static int compiler_addcompare(struct compiler *c, location loc,
@@ -2900,21 +2870,21 @@ static int compiler_addcompare(struct compiler *c, location loc,
break;
case Is:
ADDOP_I(c, loc, IS_OP, 0);
- return 1;
+ return SUCCESS;
case IsNot:
ADDOP_I(c, loc, IS_OP, 1);
- return 1;
+ return SUCCESS;
case In:
ADDOP_I(c, loc, CONTAINS_OP, 0);
- return 1;
+ return SUCCESS;
case NotIn:
ADDOP_I(c, loc, CONTAINS_OP, 1);
- return 1;
+ return SUCCESS;
default:
Py_UNREACHABLE();
}
ADDOP_I(c, loc, COMPARE_OP, cmp);
- return 1;
+ return SUCCESS;
}
@@ -2941,43 +2911,36 @@ compiler_jump_if(struct compiler *c, location loc,
next2 = new_next2;
}
for (i = 0; i < n; ++i) {
- if (!compiler_jump_if(c, loc, (expr_ty)asdl_seq_GET(s, i), next2, cond2)) {
- return 0;
- }
- }
- if (!compiler_jump_if(c, loc, (expr_ty)asdl_seq_GET(s, n), next, cond)) {
- return 0;
+ RETURN_IF_ERROR(
+ compiler_jump_if(c, loc, (expr_ty)asdl_seq_GET(s, i), next2, cond2));
}
+ RETURN_IF_ERROR(
+ compiler_jump_if(c, loc, (expr_ty)asdl_seq_GET(s, n), next, cond));
if (!SAME_LABEL(next2, next)) {
USE_LABEL(c, next2);
}
- return 1;
+ return SUCCESS;
}
case IfExp_kind: {
NEW_JUMP_TARGET_LABEL(c, end);
NEW_JUMP_TARGET_LABEL(c, next2);
- if (!compiler_jump_if(c, loc, e->v.IfExp.test, next2, 0)) {
- return 0;
- }
- if (!compiler_jump_if(c, loc, e->v.IfExp.body, next, cond)) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_jump_if(c, loc, e->v.IfExp.test, next2, 0));
+ RETURN_IF_ERROR(
+ compiler_jump_if(c, loc, e->v.IfExp.body, next, cond));
ADDOP_JUMP(c, NO_LOCATION, JUMP, end);
USE_LABEL(c, next2);
- if (!compiler_jump_if(c, loc, e->v.IfExp.orelse, next, cond)) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_jump_if(c, loc, e->v.IfExp.orelse, next, cond));
USE_LABEL(c, end);
- return 1;
+ return SUCCESS;
}
case Compare_kind: {
Py_ssize_t n = asdl_seq_LEN(e->v.Compare.ops) - 1;
if (n > 0) {
- if (!check_compare(c, e)) {
- return 0;
- }
+ RETURN_IF_ERROR(check_compare(c, e));
NEW_JUMP_TARGET_LABEL(c, cleanup);
VISIT(c, expr, e->v.Compare.left);
for (Py_ssize_t i = 0; i < n; i++) {
@@ -3001,7 +2964,7 @@ compiler_jump_if(struct compiler *c, location loc,
}
USE_LABEL(c, end);
- return 1;
+ return SUCCESS;
}
/* fallback to general implementation */
break;
@@ -3014,7 +2977,7 @@ compiler_jump_if(struct compiler *c, location loc,
/* general implementation */
VISIT(c, expr, e);
ADDOP_JUMP(c, LOC(e), cond ? POP_JUMP_IF_TRUE : POP_JUMP_IF_FALSE, next);
- return 1;
+ return SUCCESS;
}
static int
@@ -3024,9 +2987,9 @@ compiler_ifexp(struct compiler *c, expr_ty e)
NEW_JUMP_TARGET_LABEL(c, end);
NEW_JUMP_TARGET_LABEL(c, next);
- if (!compiler_jump_if(c, LOC(e), e->v.IfExp.test, next, 0)) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_jump_if(c, LOC(e), e->v.IfExp.test, next, 0));
+
VISIT(c, expr, e->v.IfExp.body);
ADDOP_JUMP(c, NO_LOCATION, JUMP, end);
@@ -3034,7 +2997,7 @@ compiler_ifexp(struct compiler *c, expr_ty e)
VISIT(c, expr, e->v.IfExp.orelse);
USE_LABEL(c, end);
- return 1;
+ return SUCCESS;
}
static int
@@ -3046,8 +3009,7 @@ compiler_lambda(struct compiler *c, expr_ty e)
arguments_ty args = e->v.Lambda.args;
assert(e->kind == Lambda_kind);
- if (!compiler_check_debug_args(c, args))
- return 0;
+ RETURN_IF_ERROR(compiler_check_debug_args(c, args));
location loc = LOC(e);
funcflags = compiler_default_arguments(c, loc, args);
@@ -3056,14 +3018,13 @@ compiler_lambda(struct compiler *c, expr_ty e)
}
_Py_DECLARE_STR(anon_lambda, "");
- if (!compiler_enter_scope(c, &_Py_STR(anon_lambda), COMPILER_SCOPE_LAMBDA,
- (void *)e, e->lineno)) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_enter_scope(c, &_Py_STR(anon_lambda), COMPILER_SCOPE_LAMBDA,
+ (void *)e, e->lineno));
+
/* Make None the first constant, so the lambda can't have a
docstring. */
- if (compiler_add_const(c, Py_None) < 0)
- return 0;
+ RETURN_IF_ERROR(compiler_add_const(c, Py_None));
c->u->u_argcount = asdl_seq_LEN(args->args);
c->u->u_posonlyargcount = asdl_seq_LEN(args->posonlyargs);
@@ -3081,18 +3042,18 @@ compiler_lambda(struct compiler *c, expr_ty e)
compiler_exit_scope(c);
if (co == NULL) {
Py_DECREF(qualname);
- return 0;
+ return ERROR;
}
- if (!compiler_make_closure(c, loc, co, funcflags, qualname)) {
+ if (compiler_make_closure(c, loc, co, funcflags, qualname) < 0) {
Py_DECREF(qualname);
Py_DECREF(co);
- return 0;
+ return ERROR;
}
Py_DECREF(qualname);
Py_DECREF(co);
- return 1;
+ return SUCCESS;
}
static int
@@ -3108,9 +3069,9 @@ compiler_if(struct compiler *c, stmt_ty s)
else {
next = end;
}
- if (!compiler_jump_if(c, LOC(s), s->v.If.test, next, 0)) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_jump_if(c, LOC(s), s->v.If.test, next, 0));
+
VISIT_SEQ(c, stmt, s->v.If.body);
if (asdl_seq_LEN(s->v.If.orelse)) {
ADDOP_JUMP(c, NO_LOCATION, JUMP, end);
@@ -3120,7 +3081,7 @@ compiler_if(struct compiler *c, stmt_ty s)
}
USE_LABEL(c, end);
- return 1;
+ return SUCCESS;
}
static int
@@ -3132,9 +3093,8 @@ compiler_for(struct compiler *c, stmt_ty s)
NEW_JUMP_TARGET_LABEL(c, cleanup);
NEW_JUMP_TARGET_LABEL(c, end);
- if (!compiler_push_fblock(c, loc, FOR_LOOP, start, end, NULL)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_push_fblock(c, loc, FOR_LOOP, start, end, NULL));
+
VISIT(c, expr, s->v.For.iter);
ADDOP(c, loc, GET_ITER);
@@ -3155,7 +3115,7 @@ compiler_for(struct compiler *c, stmt_ty s)
VISIT_SEQ(c, stmt, s->v.For.orelse);
USE_LABEL(c, end);
- return 1;
+ return SUCCESS;
}
@@ -3177,9 +3137,8 @@ compiler_async_for(struct compiler *c, stmt_ty s)
ADDOP(c, loc, GET_AITER);
USE_LABEL(c, start);
- if (!compiler_push_fblock(c, loc, FOR_LOOP, start, end, NULL)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_push_fblock(c, loc, FOR_LOOP, start, end, NULL));
+
/* SETUP_FINALLY to guard the __anext__ call */
ADDOP_JUMP(c, loc, SETUP_FINALLY, except);
ADDOP(c, loc, GET_ANEXT);
@@ -3207,7 +3166,7 @@ compiler_async_for(struct compiler *c, stmt_ty s)
VISIT_SEQ(c, stmt, s->v.For.orelse);
USE_LABEL(c, end);
- return 1;
+ return SUCCESS;
}
static int
@@ -3219,18 +3178,13 @@ compiler_while(struct compiler *c, stmt_ty s)
NEW_JUMP_TARGET_LABEL(c, anchor);
USE_LABEL(c, loop);
- if (!compiler_push_fblock(c, LOC(s), WHILE_LOOP, loop, end, NULL)) {
- return 0;
- }
- if (!compiler_jump_if(c, LOC(s), s->v.While.test, anchor, 0)) {
- return 0;
- }
+
+ RETURN_IF_ERROR(compiler_push_fblock(c, LOC(s), WHILE_LOOP, loop, end, NULL));
+ RETURN_IF_ERROR(compiler_jump_if(c, LOC(s), s->v.While.test, anchor, 0));
USE_LABEL(c, body);
VISIT_SEQ(c, stmt, s->v.While.body);
- if (!compiler_jump_if(c, LOC(s), s->v.While.test, body, 1)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_jump_if(c, LOC(s), s->v.While.test, body, 1));
compiler_pop_fblock(c, WHILE_LOOP, loop);
@@ -3240,7 +3194,7 @@ compiler_while(struct compiler *c, stmt_ty s)
}
USE_LABEL(c, end);
- return 1;
+ return SUCCESS;
}
static int
@@ -3249,13 +3203,13 @@ compiler_return(struct compiler *c, stmt_ty s)
location loc = LOC(s);
int preserve_tos = ((s->v.Return.value != NULL) &&
(s->v.Return.value->kind != Constant_kind));
- if (c->u->u_ste->ste_type != FunctionBlock)
+ if (c->u->u_ste->ste_type != FunctionBlock) {
return compiler_error(c, loc, "'return' outside function");
+ }
if (s->v.Return.value != NULL &&
c->u->u_ste->ste_coroutine && c->u->u_ste->ste_generator)
{
- return compiler_error(
- c, loc, "'return' with value in async generator");
+ return compiler_error(c, loc, "'return' with value in async generator");
}
if (preserve_tos) {
@@ -3272,8 +3226,7 @@ compiler_return(struct compiler *c, stmt_ty s)
ADDOP(c, loc, NOP);
}
- if (!compiler_unwind_fblock_stack(c, &loc, preserve_tos, NULL))
- return 0;
+ RETURN_IF_ERROR(compiler_unwind_fblock_stack(c, &loc, preserve_tos, NULL));
if (s->v.Return.value == NULL) {
ADDOP_LOAD_CONST(c, loc, Py_None);
}
@@ -3282,7 +3235,7 @@ compiler_return(struct compiler *c, stmt_ty s)
}
ADDOP(c, loc, RETURN_VALUE);
- return 1;
+ return SUCCESS;
}
static int
@@ -3291,17 +3244,13 @@ compiler_break(struct compiler *c, location loc)
struct fblockinfo *loop = NULL;
/* Emit instruction with line number */
ADDOP(c, loc, NOP);
- if (!compiler_unwind_fblock_stack(c, &loc, 0, &loop)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_unwind_fblock_stack(c, &loc, 0, &loop));
if (loop == NULL) {
return compiler_error(c, loc, "'break' outside loop");
}
- if (!compiler_unwind_fblock(c, &loc, loop, 0)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_unwind_fblock(c, &loc, loop, 0));
ADDOP_JUMP(c, loc, JUMP, loop->fb_exit);
- return 1;
+ return SUCCESS;
}
static int
@@ -3310,14 +3259,12 @@ compiler_continue(struct compiler *c, location loc)
struct fblockinfo *loop = NULL;
/* Emit instruction with line number */
ADDOP(c, loc, NOP);
- if (!compiler_unwind_fblock_stack(c, &loc, 0, &loop)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_unwind_fblock_stack(c, &loc, 0, &loop));
if (loop == NULL) {
return compiler_error(c, loc, "'continue' not properly in loop");
}
ADDOP_JUMP(c, loc, JUMP, loop->fb_block);
- return 1;
+ return SUCCESS;
}
@@ -3376,11 +3323,12 @@ compiler_try_finally(struct compiler *c, stmt_ty s)
ADDOP_JUMP(c, loc, SETUP_FINALLY, end);
USE_LABEL(c, body);
- if (!compiler_push_fblock(c, loc, FINALLY_TRY, body, end, s->v.Try.finalbody))
- return 0;
+ RETURN_IF_ERROR(
+ compiler_push_fblock(c, loc, FINALLY_TRY, body, end,
+ s->v.Try.finalbody));
+
if (s->v.Try.handlers && asdl_seq_LEN(s->v.Try.handlers)) {
- if (!compiler_try_except(c, s))
- return 0;
+ RETURN_IF_ERROR(compiler_try_except(c, s));
}
else {
VISIT_SEQ(c, stmt, s->v.Try.body);
@@ -3397,8 +3345,8 @@ compiler_try_finally(struct compiler *c, stmt_ty s)
loc = NO_LOCATION;
ADDOP_JUMP(c, loc, SETUP_CLEANUP, cleanup);
ADDOP(c, loc, PUSH_EXC_INFO);
- if (!compiler_push_fblock(c, loc, FINALLY_END, end, NO_LABEL, NULL))
- return 0;
+ RETURN_IF_ERROR(
+ compiler_push_fblock(c, loc, FINALLY_END, end, NO_LABEL, NULL));
VISIT_SEQ(c, stmt, s->v.Try.finalbody);
loc = location_of_last_executing_statement(s->v.Try.finalbody);
compiler_pop_fblock(c, FINALLY_END, end);
@@ -3409,7 +3357,7 @@ compiler_try_finally(struct compiler *c, stmt_ty s)
POP_EXCEPT_AND_RERAISE(c, loc);
USE_LABEL(c, exit);
- return 1;
+ return SUCCESS;
}
static int
@@ -3425,13 +3373,12 @@ compiler_try_star_finally(struct compiler *c, stmt_ty s)
ADDOP_JUMP(c, loc, SETUP_FINALLY, end);
USE_LABEL(c, body);
- if (!compiler_push_fblock(c, loc, FINALLY_TRY, body, end, s->v.TryStar.finalbody)) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_push_fblock(c, loc, FINALLY_TRY, body, end,
+ s->v.TryStar.finalbody));
+
if (s->v.TryStar.handlers && asdl_seq_LEN(s->v.TryStar.handlers)) {
- if (!compiler_try_star_except(c, s)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_try_star_except(c, s));
}
else {
VISIT_SEQ(c, stmt, s->v.TryStar.body);
@@ -3448,9 +3395,9 @@ compiler_try_star_finally(struct compiler *c, stmt_ty s)
loc = NO_LOCATION;
ADDOP_JUMP(c, loc, SETUP_CLEANUP, cleanup);
ADDOP(c, loc, PUSH_EXC_INFO);
- if (!compiler_push_fblock(c, loc, FINALLY_END, end, NO_LABEL, NULL)) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_push_fblock(c, loc, FINALLY_END, end, NO_LABEL, NULL));
+
VISIT_SEQ(c, stmt, s->v.TryStar.finalbody);
loc = location_of_last_executing_statement(s->v.Try.finalbody);
@@ -3461,7 +3408,7 @@ compiler_try_star_finally(struct compiler *c, stmt_ty s)
POP_EXCEPT_AND_RERAISE(c, loc);
USE_LABEL(c, exit);
- return 1;
+ return SUCCESS;
}
@@ -3507,8 +3454,8 @@ compiler_try_except(struct compiler *c, stmt_ty s)
ADDOP_JUMP(c, loc, SETUP_FINALLY, except);
USE_LABEL(c, body);
- if (!compiler_push_fblock(c, loc, TRY_EXCEPT, body, NO_LABEL, NULL))
- return 0;
+ RETURN_IF_ERROR(
+ compiler_push_fblock(c, loc, TRY_EXCEPT, body, NO_LABEL, NULL));
VISIT_SEQ(c, stmt, s->v.Try.body);
compiler_pop_fblock(c, TRY_EXCEPT, body);
ADDOP(c, NO_LOCATION, POP_BLOCK);
@@ -3522,9 +3469,11 @@ compiler_try_except(struct compiler *c, stmt_ty s)
ADDOP_JUMP(c, NO_LOCATION, SETUP_CLEANUP, cleanup);
ADDOP(c, NO_LOCATION, PUSH_EXC_INFO);
+
/* Runtime will push a block here, so we need to account for that */
- if (!compiler_push_fblock(c, loc, EXCEPTION_HANDLER, NO_LABEL, NO_LABEL, NULL))
- return 0;
+ RETURN_IF_ERROR(
+ compiler_push_fblock(c, loc, EXCEPTION_HANDLER, NO_LABEL, NO_LABEL, NULL));
+
for (i = 0; i < n; i++) {
excepthandler_ty handler = (excepthandler_ty)asdl_seq_GET(
s->v.Try.handlers, i);
@@ -3543,7 +3492,8 @@ compiler_try_except(struct compiler *c, stmt_ty s)
NEW_JUMP_TARGET_LABEL(c, cleanup_end);
NEW_JUMP_TARGET_LABEL(c, cleanup_body);
- compiler_nameop(c, loc, handler->v.ExceptHandler.name, Store);
+ RETURN_IF_ERROR(
+ compiler_nameop(c, loc, handler->v.ExceptHandler.name, Store));
/*
try:
@@ -3560,10 +3510,9 @@ compiler_try_except(struct compiler *c, stmt_ty s)
ADDOP_JUMP(c, loc, SETUP_CLEANUP, cleanup_end);
USE_LABEL(c, cleanup_body);
- if (!compiler_push_fblock(c, loc, HANDLER_CLEANUP, cleanup_body,
- NO_LABEL, handler->v.ExceptHandler.name)) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_push_fblock(c, loc, HANDLER_CLEANUP, cleanup_body,
+ NO_LABEL, handler->v.ExceptHandler.name));
/* second # body */
VISIT_SEQ(c, stmt, handler->v.ExceptHandler.body);
@@ -3573,8 +3522,10 @@ compiler_try_except(struct compiler *c, stmt_ty s)
ADDOP(c, NO_LOCATION, POP_BLOCK);
ADDOP(c, NO_LOCATION, POP_EXCEPT);
ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None);
- compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Store);
- compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Del);
+ RETURN_IF_ERROR(
+ compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Store));
+ RETURN_IF_ERROR(
+ compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Del));
ADDOP_JUMP(c, NO_LOCATION, JUMP, end);
/* except: */
@@ -3582,8 +3533,10 @@ compiler_try_except(struct compiler *c, stmt_ty s)
/* name = None; del name; # artificial */
ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None);
- compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Store);
- compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Del);
+ RETURN_IF_ERROR(
+ compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Store));
+ RETURN_IF_ERROR(
+ compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Del));
ADDOP_I(c, NO_LOCATION, RERAISE, 1);
}
@@ -3593,8 +3546,10 @@ compiler_try_except(struct compiler *c, stmt_ty s)
ADDOP(c, loc, POP_TOP); /* exc_value */
USE_LABEL(c, cleanup_body);
- if (!compiler_push_fblock(c, loc, HANDLER_CLEANUP, cleanup_body, NO_LABEL, NULL))
- return 0;
+ RETURN_IF_ERROR(
+ compiler_push_fblock(c, loc, HANDLER_CLEANUP, cleanup_body,
+ NO_LABEL, NULL));
+
VISIT_SEQ(c, stmt, handler->v.ExceptHandler.body);
compiler_pop_fblock(c, HANDLER_CLEANUP, cleanup_body);
ADDOP(c, NO_LOCATION, POP_BLOCK);
@@ -3612,7 +3567,7 @@ compiler_try_except(struct compiler *c, stmt_ty s)
POP_EXCEPT_AND_RERAISE(c, NO_LOCATION);
USE_LABEL(c, end);
- return 1;
+ return SUCCESS;
}
/*
@@ -3679,9 +3634,8 @@ compiler_try_star_except(struct compiler *c, stmt_ty s)
ADDOP_JUMP(c, loc, SETUP_FINALLY, except);
USE_LABEL(c, body);
- if (!compiler_push_fblock(c, loc, TRY_EXCEPT, body, NO_LABEL, NULL)) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_push_fblock(c, loc, TRY_EXCEPT, body, NO_LABEL, NULL));
VISIT_SEQ(c, stmt, s->v.TryStar.body);
compiler_pop_fblock(c, TRY_EXCEPT, body);
ADDOP(c, NO_LOCATION, POP_BLOCK);
@@ -3692,11 +3646,12 @@ compiler_try_star_except(struct compiler *c, stmt_ty s)
ADDOP_JUMP(c, NO_LOCATION, SETUP_CLEANUP, cleanup);
ADDOP(c, NO_LOCATION, PUSH_EXC_INFO);
+
/* Runtime will push a block here, so we need to account for that */
- if (!compiler_push_fblock(c, loc, EXCEPTION_GROUP_HANDLER,
- NO_LABEL, NO_LABEL, "except handler")) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_push_fblock(c, loc, EXCEPTION_GROUP_HANDLER,
+ NO_LABEL, NO_LABEL, "except handler"));
+
for (Py_ssize_t i = 0; i < n; i++) {
excepthandler_ty handler = (excepthandler_ty)asdl_seq_GET(
s->v.TryStar.handlers, i);
@@ -3736,7 +3691,8 @@ compiler_try_star_except(struct compiler *c, stmt_ty s)
NEW_JUMP_TARGET_LABEL(c, cleanup_body);
if (handler->v.ExceptHandler.name) {
- compiler_nameop(c, loc, handler->v.ExceptHandler.name, Store);
+ RETURN_IF_ERROR(
+ compiler_nameop(c, loc, handler->v.ExceptHandler.name, Store));
}
else {
ADDOP(c, loc, POP_TOP); // match
@@ -3756,9 +3712,9 @@ compiler_try_star_except(struct compiler *c, stmt_ty s)
ADDOP_JUMP(c, loc, SETUP_CLEANUP, cleanup_end);
USE_LABEL(c, cleanup_body);
- if (!compiler_push_fblock(c, loc, HANDLER_CLEANUP, cleanup_body, NO_LABEL, handler->v.ExceptHandler.name)) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_push_fblock(c, loc, HANDLER_CLEANUP, cleanup_body,
+ NO_LABEL, handler->v.ExceptHandler.name));
/* second # body */
VISIT_SEQ(c, stmt, handler->v.ExceptHandler.body);
@@ -3767,8 +3723,10 @@ compiler_try_star_except(struct compiler *c, stmt_ty s)
ADDOP(c, NO_LOCATION, POP_BLOCK);
if (handler->v.ExceptHandler.name) {
ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None);
- compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Store);
- compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Del);
+ RETURN_IF_ERROR(
+ compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Store));
+ RETURN_IF_ERROR(
+ compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Del));
}
ADDOP_JUMP(c, NO_LOCATION, JUMP, except);
@@ -3778,8 +3736,10 @@ compiler_try_star_except(struct compiler *c, stmt_ty s)
/* name = None; del name; # artificial */
if (handler->v.ExceptHandler.name) {
ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None);
- compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Store);
- compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Del);
+ RETURN_IF_ERROR(
+ compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Store));
+ RETURN_IF_ERROR(
+ compiler_nameop(c, NO_LOCATION, handler->v.ExceptHandler.name, Del));
}
/* add exception raised to the res list */
@@ -3823,7 +3783,7 @@ compiler_try_star_except(struct compiler *c, stmt_ty s)
VISIT_SEQ(c, stmt, s->v.TryStar.orelse);
USE_LABEL(c, end);
- return 1;
+ return SUCCESS;
}
static int
@@ -3857,19 +3817,22 @@ compiler_import_as(struct compiler *c, location loc,
*/
Py_ssize_t len = PyUnicode_GET_LENGTH(name);
Py_ssize_t dot = PyUnicode_FindChar(name, '.', 0, len, 1);
- if (dot == -2)
- return 0;
+ if (dot == -2) {
+ return ERROR;
+ }
if (dot != -1) {
/* Consume the base module name to get the first attribute */
while (1) {
Py_ssize_t pos = dot + 1;
PyObject *attr;
dot = PyUnicode_FindChar(name, '.', pos, len, 1);
- if (dot == -2)
- return 0;
+ if (dot == -2) {
+ return ERROR;
+ }
attr = PyUnicode_Substring(name, pos, (dot != -1) ? dot : len);
- if (!attr)
- return 0;
+ if (!attr) {
+ return ERROR;
+ }
ADDOP_N(c, loc, IMPORT_FROM, attr, names);
if (dot == -1) {
break;
@@ -3877,11 +3840,9 @@ compiler_import_as(struct compiler *c, location loc,
ADDOP_I(c, loc, SWAP, 2);
ADDOP(c, loc, POP_TOP);
}
- if (!compiler_nameop(c, loc, asname, Store)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_nameop(c, loc, asname, Store));
ADDOP(c, loc, POP_TOP);
- return 1;
+ return SUCCESS;
}
return compiler_nameop(c, loc, asname, Store);
}
@@ -3910,8 +3871,7 @@ compiler_import(struct compiler *c, stmt_ty s)
if (alias->asname) {
r = compiler_import_as(c, loc, alias->name, alias->asname);
- if (!r)
- return r;
+ RETURN_IF_ERROR(r);
}
else {
identifier tmp = alias->name;
@@ -3919,18 +3879,18 @@ compiler_import(struct compiler *c, stmt_ty s)
alias->name, '.', 0, PyUnicode_GET_LENGTH(alias->name), 1);
if (dot != -1) {
tmp = PyUnicode_Substring(alias->name, 0, dot);
- if (tmp == NULL)
- return 0;
+ if (tmp == NULL) {
+ return ERROR;
+ }
}
r = compiler_nameop(c, loc, tmp, Store);
if (dot != -1) {
Py_DECREF(tmp);
}
- if (!r)
- return r;
+ RETURN_IF_ERROR(r);
}
}
- return 1;
+ return SUCCESS;
}
static int
@@ -3942,7 +3902,7 @@ compiler_from_import(struct compiler *c, stmt_ty s)
PyObject *names = PyTuple_New(n);
if (!names) {
- return 0;
+ return ERROR;
}
/* build up the names */
@@ -3975,7 +3935,7 @@ compiler_from_import(struct compiler *c, stmt_ty s)
if (i == 0 && PyUnicode_READ_CHAR(alias->name, 0) == '*') {
assert(n == 1);
ADDOP(c, LOC(s), IMPORT_STAR);
- return 1;
+ return SUCCESS;
}
ADDOP_NAME(c, LOC(s), IMPORT_FROM, alias->name, names);
@@ -3984,13 +3944,11 @@ compiler_from_import(struct compiler *c, stmt_ty s)
store_name = alias->asname;
}
- if (!compiler_nameop(c, LOC(s), store_name, Store)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_nameop(c, LOC(s), store_name, Store));
}
/* remove imported module */
ADDOP(c, LOC(s), POP_TOP);
- return 1;
+ return SUCCESS;
}
static int
@@ -4003,19 +3961,15 @@ compiler_assert(struct compiler *c, stmt_ty s)
PyTuple_Check(s->v.Assert.test->v.Constant.value) &&
PyTuple_Size(s->v.Assert.test->v.Constant.value) > 0))
{
- if (!compiler_warn(c, LOC(s), "assertion is always true, "
- "perhaps remove parentheses?"))
- {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_warn(c, LOC(s), "assertion is always true, "
+ "perhaps remove parentheses?"));
}
if (c->c_optimize) {
- return 1;
+ return SUCCESS;
}
NEW_JUMP_TARGET_LABEL(c, end);
- if (!compiler_jump_if(c, LOC(s), s->v.Assert.test, end, 1)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_jump_if(c, LOC(s), s->v.Assert.test, end, 1));
ADDOP(c, LOC(s), LOAD_ASSERTION_ERROR);
if (s->v.Assert.msg) {
VISIT(c, expr, s->v.Assert.msg);
@@ -4024,7 +3978,7 @@ compiler_assert(struct compiler *c, stmt_ty s)
ADDOP_I(c, LOC(s), RAISE_VARARGS, 1);
USE_LABEL(c, end);
- return 1;
+ return SUCCESS;
}
static int
@@ -4033,18 +3987,18 @@ compiler_stmt_expr(struct compiler *c, location loc, expr_ty value)
if (c->c_interactive && c->c_nestlevel <= 1) {
VISIT(c, expr, value);
ADDOP(c, loc, PRINT_EXPR);
- return 1;
+ return SUCCESS;
}
if (value->kind == Constant_kind) {
/* ignore constant statement */
ADDOP(c, loc, NOP);
- return 1;
+ return SUCCESS;
}
VISIT(c, expr, value);
ADDOP(c, NO_LOCATION, POP_TOP); /* artificial */
- return 1;
+ return SUCCESS;
}
static int
@@ -4140,7 +4094,7 @@ compiler_visit_stmt(struct compiler *c, stmt_ty s)
return compiler_async_for(c, s);
}
- return 1;
+ return SUCCESS;
}
static int
@@ -4210,10 +4164,10 @@ addop_binary(struct compiler *c, location loc, operator_ty binop,
default:
PyErr_Format(PyExc_SystemError, "%s op %d should not be possible",
inplace ? "inplace" : "binary", binop);
- return 0;
+ return ERROR;
}
ADDOP_I(c, loc, BINARY_OP, oparg);
- return 1;
+ return SUCCESS;
}
@@ -4224,7 +4178,7 @@ addop_yield(struct compiler *c, location loc) {
}
ADDOP_I(c, loc, YIELD_VALUE, 0);
ADDOP_I(c, loc, RESUME, 1);
- return 1;
+ return SUCCESS;
}
static int
@@ -4242,12 +4196,14 @@ compiler_nameop(struct compiler *c, location loc,
!_PyUnicode_EqualToASCIIString(name, "True") &&
!_PyUnicode_EqualToASCIIString(name, "False"));
- if (forbidden_name(c, loc, name, ctx))
- return 0;
+ if (forbidden_name(c, loc, name, ctx)) {
+ return ERROR;
+ }
mangled = _Py_Mangle(c->u->u_private, name);
- if (!mangled)
- return 0;
+ if (!mangled) {
+ return ERROR;
+ }
op = 0;
optype = OP_NAME;
@@ -4297,7 +4253,7 @@ compiler_nameop(struct compiler *c, location loc,
case Del: op = DELETE_FAST; break;
}
ADDOP_N(c, loc, op, mangled, varnames);
- return 1;
+ return SUCCESS;
case OP_GLOBAL:
switch (ctx) {
case Load: op = LOAD_GLOBAL; break;
@@ -4318,7 +4274,7 @@ compiler_nameop(struct compiler *c, location loc,
arg = dict_add_o(dict, mangled);
Py_DECREF(mangled);
if (arg < 0) {
- return 0;
+ return ERROR;
}
if (op == LOAD_GLOBAL) {
arg <<= 1;
@@ -4353,7 +4309,7 @@ compiler_boolop(struct compiler *c, expr_ty e)
VISIT(c, expr, (expr_ty)asdl_seq_GET(s, n));
USE_LABEL(c, end);
- return 1;
+ return SUCCESS;
}
static int
@@ -4365,7 +4321,7 @@ starunpack_helper(struct compiler *c, location loc,
if (n > 2 && are_all_items_const(elts, 0, n)) {
PyObject *folded = PyTuple_New(n);
if (folded == NULL) {
- return 0;
+ return ERROR;
}
PyObject *val;
for (Py_ssize_t i = 0; i < n; i++) {
@@ -4378,7 +4334,7 @@ starunpack_helper(struct compiler *c, location loc,
if (add == SET_ADD) {
Py_SETREF(folded, PyFrozenSet_New(folded));
if (folded == NULL) {
- return 0;
+ return ERROR;
}
}
ADDOP_I(c, loc, build, pushed);
@@ -4388,7 +4344,7 @@ starunpack_helper(struct compiler *c, location loc,
ADDOP(c, loc, LIST_TO_TUPLE);
}
}
- return 1;
+ return SUCCESS;
}
int big = n+pushed > STACK_USE_GUIDELINE;
@@ -4410,7 +4366,7 @@ starunpack_helper(struct compiler *c, location loc,
} else {
ADDOP_I(c, loc, build, n+pushed);
}
- return 1;
+ return SUCCESS;
}
int sequence_built = 0;
if (big) {
@@ -4438,7 +4394,7 @@ starunpack_helper(struct compiler *c, location loc,
if (tuple) {
ADDOP(c, loc, LIST_TO_TUPLE);
}
- return 1;
+ return SUCCESS;
}
static int
@@ -4450,10 +4406,11 @@ unpack_helper(struct compiler *c, location loc, asdl_expr_seq *elts)
expr_ty elt = asdl_seq_GET(elts, i);
if (elt->kind == Starred_kind && !seen_star) {
if ((i >= (1 << 8)) ||
- (n-i-1 >= (INT_MAX >> 8)))
+ (n-i-1 >= (INT_MAX >> 8))) {
return compiler_error(c, loc,
"too many expressions in "
"star-unpacking assignment");
+ }
ADDOP_I(c, loc, UNPACK_EX, (i + ((n-i-1) << 8)));
seen_star = 1;
}
@@ -4465,19 +4422,19 @@ unpack_helper(struct compiler *c, location loc, asdl_expr_seq *elts)
if (!seen_star) {
ADDOP_I(c, loc, UNPACK_SEQUENCE, n);
}
- return 1;
+ return SUCCESS;
}
static int
assignment_helper(struct compiler *c, location loc, asdl_expr_seq *elts)
{
Py_ssize_t n = asdl_seq_LEN(elts);
- RETURN_IF_FALSE(unpack_helper(c, loc, elts));
+ RETURN_IF_ERROR(unpack_helper(c, loc, elts));
for (Py_ssize_t i = 0; i < n; i++) {
expr_ty elt = asdl_seq_GET(elts, i);
VISIT(c, expr, elt->kind != Starred_kind ? elt : elt->v.Starred.value);
}
- return 1;
+ return SUCCESS;
}
static int
@@ -4492,9 +4449,10 @@ compiler_list(struct compiler *c, expr_ty e)
return starunpack_helper(c, loc, elts, 0,
BUILD_LIST, LIST_APPEND, LIST_EXTEND, 0);
}
- else
+ else {
VISIT_SEQ(c, expr, elts);
- return 1;
+ }
+ return SUCCESS;
}
static int
@@ -4509,9 +4467,10 @@ compiler_tuple(struct compiler *c, expr_ty e)
return starunpack_helper(c, loc, elts, 0,
BUILD_LIST, LIST_APPEND, LIST_EXTEND, 1);
}
- else
+ else {
VISIT_SEQ(c, expr, elts);
- return 1;
+ }
+ return SUCCESS;
}
static int
@@ -4522,16 +4481,16 @@ compiler_set(struct compiler *c, expr_ty e)
BUILD_SET, SET_ADD, SET_UPDATE, 0);
}
-static int
+static bool
are_all_items_const(asdl_expr_seq *seq, Py_ssize_t begin, Py_ssize_t end)
{
- Py_ssize_t i;
- for (i = begin; i < end; i++) {
+ for (Py_ssize_t i = begin; i < end; i++) {
expr_ty key = (expr_ty)asdl_seq_GET(seq, i);
- if (key == NULL || key->kind != Constant_kind)
- return 0;
+ if (key == NULL || key->kind != Constant_kind) {
+ return false;
+ }
}
- return 1;
+ return true;
}
static int
@@ -4547,7 +4506,7 @@ compiler_subdict(struct compiler *c, expr_ty e, Py_ssize_t begin, Py_ssize_t end
}
keys = PyTuple_New(n);
if (keys == NULL) {
- return 0;
+ return SUCCESS;
}
for (i = begin; i < end; i++) {
key = ((expr_ty)asdl_seq_GET(e->v.Dict.keys, i))->v.Constant.value;
@@ -4555,7 +4514,7 @@ compiler_subdict(struct compiler *c, expr_ty e, Py_ssize_t begin, Py_ssize_t end
}
ADDOP_LOAD_CONST_NEW(c, loc, keys);
ADDOP_I(c, loc, BUILD_CONST_KEY_MAP, n);
- return 1;
+ return SUCCESS;
}
if (big) {
ADDOP_I(c, loc, BUILD_MAP, 0);
@@ -4570,7 +4529,7 @@ compiler_subdict(struct compiler *c, expr_ty e, Py_ssize_t begin, Py_ssize_t end
if (!big) {
ADDOP_I(c, loc, BUILD_MAP, n);
}
- return 1;
+ return SUCCESS;
}
static int
@@ -4587,9 +4546,7 @@ compiler_dict(struct compiler *c, expr_ty e)
is_unpacking = (expr_ty)asdl_seq_GET(e->v.Dict.keys, i) == NULL;
if (is_unpacking) {
if (elements) {
- if (!compiler_subdict(c, e, i - elements, i)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_subdict(c, e, i - elements, i));
if (have_dict) {
ADDOP_I(c, loc, DICT_UPDATE, 1);
}
@@ -4605,9 +4562,7 @@ compiler_dict(struct compiler *c, expr_ty e)
}
else {
if (elements*2 > STACK_USE_GUIDELINE) {
- if (!compiler_subdict(c, e, i - elements, i + 1)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_subdict(c, e, i - elements, i + 1));
if (have_dict) {
ADDOP_I(c, loc, DICT_UPDATE, 1);
}
@@ -4620,9 +4575,7 @@ compiler_dict(struct compiler *c, expr_ty e)
}
}
if (elements) {
- if (!compiler_subdict(c, e, n - elements, n)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_subdict(c, e, n - elements, n));
if (have_dict) {
ADDOP_I(c, loc, DICT_UPDATE, 1);
}
@@ -4631,7 +4584,7 @@ compiler_dict(struct compiler *c, expr_ty e)
if (!have_dict) {
ADDOP_I(c, loc, BUILD_MAP, 0);
}
- return 1;
+ return SUCCESS;
}
static int
@@ -4640,9 +4593,7 @@ compiler_compare(struct compiler *c, expr_ty e)
location loc = LOC(e);
Py_ssize_t i, n;
- if (!check_compare(c, e)) {
- return 0;
- }
+ RETURN_IF_ERROR(check_compare(c, e));
VISIT(c, expr, e->v.Compare.left);
assert(asdl_seq_LEN(e->v.Compare.ops) > 0);
n = asdl_seq_LEN(e->v.Compare.ops) - 1;
@@ -4671,7 +4622,7 @@ compiler_compare(struct compiler *c, expr_ty e)
USE_LABEL(c, end);
}
- return 1;
+ return SUCCESS;
}
static PyTypeObject *
@@ -4724,7 +4675,7 @@ check_caller(struct compiler *c, expr_ty e)
infer_type(e)->tp_name);
}
default:
- return 1;
+ return SUCCESS;
}
}
@@ -4740,7 +4691,7 @@ check_subscripter(struct compiler *c, expr_ty e)
PyLong_Check(v) || PyFloat_Check(v) || PyComplex_Check(v) ||
PyAnySet_Check(v)))
{
- return 1;
+ return SUCCESS;
}
/* fall through */
case Set_kind:
@@ -4753,7 +4704,7 @@ check_subscripter(struct compiler *c, expr_ty e)
infer_type(e)->tp_name);
}
default:
- return 1;
+ return SUCCESS;
}
}
@@ -4766,14 +4717,14 @@ check_index(struct compiler *c, expr_ty e, expr_ty s)
if (index_type == NULL
|| PyType_FastSubclass(index_type, Py_TPFLAGS_LONG_SUBCLASS)
|| index_type == &PySlice_Type) {
- return 1;
+ return SUCCESS;
}
switch (e->kind) {
case Constant_kind:
v = e->v.Constant.value;
if (!(PyUnicode_Check(v) || PyBytes_Check(v) || PyTuple_Check(v))) {
- return 1;
+ return SUCCESS;
}
/* fall through */
case Tuple_kind:
@@ -4789,7 +4740,7 @@ check_index(struct compiler *c, expr_ty e, expr_ty s)
index_type->tp_name);
}
default:
- return 1;
+ return SUCCESS;
}
}
@@ -4838,7 +4789,7 @@ update_start_location_to_match_attr(struct compiler *c, location loc,
return loc;
}
-// Return 1 if the method call was optimized, -1 if not, and 0 on error.
+// Return 1 if the method call was optimized, 0 if not, and -1 on error.
static int
maybe_optimize_method_call(struct compiler *c, expr_ty e)
{
@@ -4849,32 +4800,32 @@ maybe_optimize_method_call(struct compiler *c, expr_ty e)
/* Check that the call node is an attribute access */
if (meth->kind != Attribute_kind || meth->v.Attribute.ctx != Load) {
- return -1;
+ return 0;
}
/* Check that the base object is not something that is imported */
if (is_import_originated(c, meth->v.Attribute.value)) {
- return -1;
+ return 0;
}
/* Check that there aren't too many arguments */
argsl = asdl_seq_LEN(args);
kwdsl = asdl_seq_LEN(kwds);
if (argsl + kwdsl + (kwdsl != 0) >= STACK_USE_GUIDELINE) {
- return -1;
+ return 0;
}
/* Check that there are no *varargs types of arguments. */
for (i = 0; i < argsl; i++) {
expr_ty elt = asdl_seq_GET(args, i);
if (elt->kind == Starred_kind) {
- return -1;
+ return 0;
}
}
for (i = 0; i < kwdsl; i++) {
keyword_ty kw = asdl_seq_GET(kwds, i);
if (kw->arg == NULL) {
- return -1;
+ return 0;
}
}
/* Alright, we can optimize the code. */
@@ -4886,9 +4837,8 @@ maybe_optimize_method_call(struct compiler *c, expr_ty e)
if (kwdsl) {
VISIT_SEQ(c, keyword, kwds);
- if (!compiler_call_simple_kw_helper(c, loc, kwds, kwdsl)) {
- return 0;
- };
+ RETURN_IF_ERROR(
+ compiler_call_simple_kw_helper(c, loc, kwds, kwdsl));
}
loc = update_start_location_to_match_attr(c, LOC(e), meth);
ADDOP_I(c, loc, CALL, argsl + kwdsl);
@@ -4906,32 +4856,31 @@ validate_keywords(struct compiler *c, asdl_keyword_seq *keywords)
}
location loc = LOC(key);
if (forbidden_name(c, loc, key->arg, Store)) {
- return -1;
+ return ERROR;
}
for (Py_ssize_t j = i + 1; j < nkeywords; j++) {
keyword_ty other = ((keyword_ty)asdl_seq_GET(keywords, j));
if (other->arg && !PyUnicode_Compare(key->arg, other->arg)) {
compiler_error(c, LOC(other), "keyword argument repeated: %U", key->arg);
- return -1;
+ return ERROR;
}
}
}
- return 0;
+ return SUCCESS;
}
static int
compiler_call(struct compiler *c, expr_ty e)
{
- if (validate_keywords(c, e->v.Call.keywords) == -1) {
- return 0;
- }
+ RETURN_IF_ERROR(validate_keywords(c, e->v.Call.keywords));
int ret = maybe_optimize_method_call(c, e);
- if (ret >= 0) {
- return ret;
+ if (ret < 0) {
+ return ERROR;
}
- if (!check_caller(c, e->v.Call.func)) {
- return 0;
+ if (ret == 1) {
+ return SUCCESS;
}
+ RETURN_IF_ERROR(check_caller(c, e->v.Call.func));
location loc = LOC(e->v.Call.func);
ADDOP(c, loc, PUSH_NULL);
VISIT(c, expr, e->v.Call.func);
@@ -4963,7 +4912,7 @@ compiler_joined_str(struct compiler *c, expr_ty e)
ADDOP_I(c, loc, BUILD_STRING, asdl_seq_LEN(e->v.JoinedStr.values));
}
}
- return 1;
+ return SUCCESS;
}
/* Used to implement f-strings. Format a single value. */
@@ -4998,7 +4947,7 @@ compiler_formatted_value(struct compiler *c, expr_ty e)
default:
PyErr_Format(PyExc_SystemError,
"Unrecognized conversion character %d", conversion);
- return 0;
+ return ERROR;
}
if (e->v.FormattedValue.format_spec) {
/* Evaluate the format spec, and update our opcode arg. */
@@ -5010,7 +4959,7 @@ compiler_formatted_value(struct compiler *c, expr_ty e)
location loc = LOC(e);
ADDOP_I(c, loc, FORMAT_VALUE, oparg);
- return 1;
+ return SUCCESS;
}
static int
@@ -5030,7 +4979,7 @@ compiler_subkwargs(struct compiler *c, location loc,
}
keys = PyTuple_New(n);
if (keys == NULL) {
- return 0;
+ return ERROR;
}
for (i = begin; i < end; i++) {
key = ((keyword_ty) asdl_seq_GET(keywords, i))->arg;
@@ -5038,7 +4987,7 @@ compiler_subkwargs(struct compiler *c, location loc,
}
ADDOP_LOAD_CONST_NEW(c, loc, keys);
ADDOP_I(c, loc, BUILD_CONST_KEY_MAP, n);
- return 1;
+ return SUCCESS;
}
if (big) {
ADDOP_I(c, NO_LOCATION, BUILD_MAP, 0);
@@ -5054,12 +5003,11 @@ compiler_subkwargs(struct compiler *c, location loc,
if (!big) {
ADDOP_I(c, loc, BUILD_MAP, n);
}
- return 1;
+ return SUCCESS;
}
/* Used by compiler_call_helper and maybe_optimize_method_call to emit
* KW_NAMES before CALL.
- * Returns 1 on success, 0 on error.
*/
static int
compiler_call_simple_kw_helper(struct compiler *c, location loc,
@@ -5068,7 +5016,7 @@ compiler_call_simple_kw_helper(struct compiler *c, location loc,
PyObject *names;
names = PyTuple_New(nkwelts);
if (names == NULL) {
- return 0;
+ return ERROR;
}
for (int i = 0; i < nkwelts; i++) {
keyword_ty kw = asdl_seq_GET(keywords, i);
@@ -5076,11 +5024,11 @@ compiler_call_simple_kw_helper(struct compiler *c, location loc,
}
Py_ssize_t arg = compiler_add_const(c, names);
if (arg < 0) {
- return 0;
+ return ERROR;
}
Py_DECREF(names);
ADDOP_I(c, loc, KW_NAMES, arg);
- return 1;
+ return SUCCESS;
}
@@ -5093,9 +5041,7 @@ compiler_call_helper(struct compiler *c, location loc,
{
Py_ssize_t i, nseen, nelts, nkwelts;
- if (validate_keywords(c, keywords) == -1) {
- return 0;
- }
+ RETURN_IF_ERROR(validate_keywords(c, keywords));
nelts = asdl_seq_LEN(args);
nkwelts = asdl_seq_LEN(keywords);
@@ -5124,12 +5070,11 @@ compiler_call_helper(struct compiler *c, location loc,
}
if (nkwelts) {
VISIT_SEQ(c, keyword, keywords);
- if (!compiler_call_simple_kw_helper(c, loc, keywords, nkwelts)) {
- return 0;
- };
+ RETURN_IF_ERROR(
+ compiler_call_simple_kw_helper(c, loc, keywords, nkwelts));
}
ADDOP_I(c, loc, CALL, n + nelts + nkwelts);
- return 1;
+ return SUCCESS;
ex_call:
@@ -5138,8 +5083,8 @@ compiler_call_helper(struct compiler *c, location loc,
VISIT(c, expr, ((expr_ty)asdl_seq_GET(args, 0))->v.Starred.value);
}
else if (starunpack_helper(c, loc, args, n, BUILD_LIST,
- LIST_APPEND, LIST_EXTEND, 1) == 0) {
- return 0;
+ LIST_APPEND, LIST_EXTEND, 1) < 0) {
+ return ERROR;
}
/* Then keyword arguments */
if (nkwelts) {
@@ -5152,9 +5097,7 @@ compiler_call_helper(struct compiler *c, location loc,
if (kw->arg == NULL) {
/* A keyword argument unpacking. */
if (nseen) {
- if (!compiler_subkwargs(c, loc, keywords, i - nseen, i)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_subkwargs(c, loc, keywords, i - nseen, i));
if (have_dict) {
ADDOP_I(c, loc, DICT_MERGE, 1);
}
@@ -5174,9 +5117,7 @@ compiler_call_helper(struct compiler *c, location loc,
}
if (nseen) {
/* Pack up any trailing keyword arguments. */
- if (!compiler_subkwargs(c, loc, keywords, nkwelts - nseen, nkwelts)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_subkwargs(c, loc, keywords, nkwelts - nseen, nkwelts));
if (have_dict) {
ADDOP_I(c, loc, DICT_MERGE, 1);
}
@@ -5185,7 +5126,7 @@ compiler_call_helper(struct compiler *c, location loc,
assert(have_dict);
}
ADDOP_I(c, loc, CALL_FUNCTION_EX, nkwelts > 0);
- return 1;
+ return SUCCESS;
}
@@ -5280,17 +5221,14 @@ compiler_sync_comprehension_generator(struct compiler *c, location loc,
Py_ssize_t n = asdl_seq_LEN(gen->ifs);
for (Py_ssize_t i = 0; i < n; i++) {
expr_ty e = (expr_ty)asdl_seq_GET(gen->ifs, i);
- if (!compiler_jump_if(c, loc, e, if_cleanup, 0)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_jump_if(c, loc, e, if_cleanup, 0));
}
if (++gen_index < asdl_seq_LEN(generators)) {
- if (!compiler_comprehension_generator(c, loc,
- generators, gen_index, depth,
- elt, val, type)) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_comprehension_generator(c, loc,
+ generators, gen_index, depth,
+ elt, val, type));
}
location elt_loc = LOC(elt);
@@ -5324,7 +5262,7 @@ compiler_sync_comprehension_generator(struct compiler *c, location loc,
ADDOP_I(c, elt_loc, MAP_ADD, depth + 1);
break;
default:
- return 0;
+ return ERROR;
}
}
@@ -5336,7 +5274,7 @@ compiler_sync_comprehension_generator(struct compiler *c, location loc,
ADDOP(c, NO_LOCATION, END_FOR);
}
- return 1;
+ return SUCCESS;
}
static int
@@ -5365,10 +5303,9 @@ compiler_async_comprehension_generator(struct compiler *c, location loc,
USE_LABEL(c, start);
/* Runtime will push a block here, so we need to account for that */
- if (!compiler_push_fblock(c, loc, ASYNC_COMPREHENSION_GENERATOR,
- start, NO_LABEL, NULL)) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_push_fblock(c, loc, ASYNC_COMPREHENSION_GENERATOR,
+ start, NO_LABEL, NULL));
ADDOP_JUMP(c, loc, SETUP_FINALLY, except);
ADDOP(c, loc, GET_ANEXT);
@@ -5380,18 +5317,15 @@ compiler_async_comprehension_generator(struct compiler *c, location loc,
Py_ssize_t n = asdl_seq_LEN(gen->ifs);
for (Py_ssize_t i = 0; i < n; i++) {
expr_ty e = (expr_ty)asdl_seq_GET(gen->ifs, i);
- if (!compiler_jump_if(c, loc, e, if_cleanup, 0)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_jump_if(c, loc, e, if_cleanup, 0));
}
depth++;
if (++gen_index < asdl_seq_LEN(generators)) {
- if (!compiler_comprehension_generator(c, loc,
- generators, gen_index, depth,
- elt, val, type)) {
- return 0;
- }
+ RETURN_IF_ERROR(
+ compiler_comprehension_generator(c, loc,
+ generators, gen_index, depth,
+ elt, val, type));
}
location elt_loc = LOC(elt);
@@ -5424,7 +5358,7 @@ compiler_async_comprehension_generator(struct compiler *c, location loc,
ADDOP_I(c, elt_loc, MAP_ADD, depth + 1);
break;
default:
- return 0;
+ return ERROR;
}
}
@@ -5437,7 +5371,7 @@ compiler_async_comprehension_generator(struct compiler *c, location loc,
ADDOP(c, loc, END_ASYNC_FOR);
- return 1;
+ return SUCCESS;
}
static int
@@ -5453,8 +5387,8 @@ compiler_comprehension(struct compiler *c, expr_ty e, int type,
int is_top_level_await = IS_TOP_LEVEL_AWAIT(c);
outermost = (comprehension_ty) asdl_seq_GET(generators, 0);
- if (!compiler_enter_scope(c, name, COMPILER_SCOPE_COMPREHENSION,
- (void *)e, e->lineno))
+ if (compiler_enter_scope(c, name, COMPILER_SCOPE_COMPREHENSION,
+ (void *)e, e->lineno) < 0)
{
goto error;
}
@@ -5493,8 +5427,8 @@ compiler_comprehension(struct compiler *c, expr_ty e, int type,
ADDOP_I(c, loc, op, 0);
}
- if (!compiler_comprehension_generator(c, loc, generators, 0, 0,
- elt, val, type)) {
+ if (compiler_comprehension_generator(c, loc, generators, 0, 0,
+ elt, val, type) < 0) {
goto error_in_scope;
}
@@ -5502,7 +5436,7 @@ compiler_comprehension(struct compiler *c, expr_ty e, int type,
ADDOP(c, LOC(e), RETURN_VALUE);
}
if (type == COMP_GENEXP) {
- if (!wrap_in_stopiteration_handler(c)) {
+ if (wrap_in_stopiteration_handler(c) < 0) {
goto error_in_scope;
}
}
@@ -5513,11 +5447,12 @@ compiler_comprehension(struct compiler *c, expr_ty e, int type,
if (is_top_level_await && is_async_generator){
c->u->u_ste->ste_coroutine = 1;
}
- if (co == NULL)
+ if (co == NULL) {
goto error;
+ }
loc = LOC(e);
- if (!compiler_make_closure(c, loc, co, 0, qualname)) {
+ if (compiler_make_closure(c, loc, co, 0, qualname) < 0) {
goto error;
}
Py_DECREF(qualname);
@@ -5540,13 +5475,13 @@ compiler_comprehension(struct compiler *c, expr_ty e, int type,
ADD_YIELD_FROM(c, loc, 1);
}
- return 1;
+ return SUCCESS;
error_in_scope:
compiler_exit_scope(c);
error:
Py_XDECREF(qualname);
Py_XDECREF(co);
- return 0;
+ return ERROR;
}
static int
@@ -5595,7 +5530,7 @@ static int
compiler_visit_keyword(struct compiler *c, keyword_ty k)
{
VISIT(c, expr, k->value);
- return 1;
+ return SUCCESS;
}
@@ -5618,7 +5553,7 @@ compiler_with_except_finish(struct compiler *c, jump_target_label cleanup) {
POP_EXCEPT_AND_RERAISE(c, NO_LOCATION);
USE_LABEL(c, exit);
- return 1;
+ return SUCCESS;
}
/*
@@ -5675,15 +5610,13 @@ compiler_async_with(struct compiler *c, stmt_ty s, int pos)
/* SETUP_WITH pushes a finally block. */
USE_LABEL(c, block);
- if (!compiler_push_fblock(c, loc, ASYNC_WITH, block, final, s)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_push_fblock(c, loc, ASYNC_WITH, block, final, s));
if (item->optional_vars) {
VISIT(c, expr, item->optional_vars);
}
else {
- /* Discard result from context.__aenter__() */
+ /* Discard result from context.__aenter__() */
ADDOP(c, loc, POP_TOP);
}
@@ -5692,8 +5625,8 @@ compiler_async_with(struct compiler *c, stmt_ty s, int pos)
/* BLOCK code */
VISIT_SEQ(c, stmt, s->v.AsyncWith.body)
}
- else if (!compiler_async_with(c, s, pos)) {
- return 0;
+ else {
+ RETURN_IF_ERROR(compiler_async_with(c, s, pos));
}
compiler_pop_fblock(c, ASYNC_WITH, block);
@@ -5704,8 +5637,7 @@ compiler_async_with(struct compiler *c, stmt_ty s, int pos)
/* For successful outcome:
* call __exit__(None, None, None)
*/
- if(!compiler_call_exit_with_nones(c, loc))
- return 0;
+ RETURN_IF_ERROR(compiler_call_exit_with_nones(c, loc));
ADDOP_I(c, loc, GET_AWAITABLE, 2);
ADDOP_LOAD_CONST(c, loc, Py_None);
ADD_YIELD_FROM(c, loc, 1);
@@ -5723,10 +5655,10 @@ compiler_async_with(struct compiler *c, stmt_ty s, int pos)
ADDOP_I(c, loc, GET_AWAITABLE, 2);
ADDOP_LOAD_CONST(c, loc, Py_None);
ADD_YIELD_FROM(c, loc, 1);
- compiler_with_except_finish(c, cleanup);
+ RETURN_IF_ERROR(compiler_with_except_finish(c, cleanup));
USE_LABEL(c, exit);
- return 1;
+ return SUCCESS;
}
@@ -5772,9 +5704,7 @@ compiler_with(struct compiler *c, stmt_ty s, int pos)
/* SETUP_WITH pushes a finally block. */
USE_LABEL(c, block);
- if (!compiler_push_fblock(c, loc, WITH, block, final, s)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_push_fblock(c, loc, WITH, block, final, s));
if (item->optional_vars) {
VISIT(c, expr, item->optional_vars);
@@ -5785,11 +5715,13 @@ compiler_with(struct compiler *c, stmt_ty s, int pos)
}
pos++;
- if (pos == asdl_seq_LEN(s->v.With.items))
+ if (pos == asdl_seq_LEN(s->v.With.items)) {
/* BLOCK code */
VISIT_SEQ(c, stmt, s->v.With.body)
- else if (!compiler_with(c, s, pos))
- return 0;
+ }
+ else {
+ RETURN_IF_ERROR(compiler_with(c, s, pos));
+ }
ADDOP(c, NO_LOCATION, POP_BLOCK);
compiler_pop_fblock(c, WITH, block);
@@ -5800,8 +5732,7 @@ compiler_with(struct compiler *c, stmt_ty s, int pos)
* call __exit__(None, None, None)
*/
loc = LOC(s);
- if (!compiler_call_exit_with_nones(c, loc))
- return 0;
+ RETURN_IF_ERROR(compiler_call_exit_with_nones(c, loc));
ADDOP(c, loc, POP_TOP);
ADDOP_JUMP(c, loc, JUMP, exit);
@@ -5811,10 +5742,10 @@ compiler_with(struct compiler *c, stmt_ty s, int pos)
ADDOP_JUMP(c, loc, SETUP_CLEANUP, cleanup);
ADDOP(c, loc, PUSH_EXC_INFO);
ADDOP(c, loc, WITH_EXCEPT_START);
- compiler_with_except_finish(c, cleanup);
+ RETURN_IF_ERROR(compiler_with_except_finish(c, cleanup));
USE_LABEL(c, exit);
- return 1;
+ return SUCCESS;
}
static int
@@ -5855,8 +5786,9 @@ compiler_visit_expr1(struct compiler *c, expr_ty e)
case DictComp_kind:
return compiler_dictcomp(c, e);
case Yield_kind:
- if (c->u->u_ste->ste_type != FunctionBlock)
+ if (c->u->u_ste->ste_type != FunctionBlock) {
return compiler_error(c, loc, "'yield' outside function");
+ }
if (e->v.Yield.value) {
VISIT(c, expr, e->v.Yield.value);
}
@@ -5866,12 +5798,12 @@ compiler_visit_expr1(struct compiler *c, expr_ty e)
ADDOP_YIELD(c, loc);
break;
case YieldFrom_kind:
- if (c->u->u_ste->ste_type != FunctionBlock)
+ if (c->u->u_ste->ste_type != FunctionBlock) {
return compiler_error(c, loc, "'yield' outside function");
-
- if (c->u->u_scope_type == COMPILER_SCOPE_ASYNC_FUNCTION)
+ }
+ if (c->u->u_scope_type == COMPILER_SCOPE_ASYNC_FUNCTION) {
return compiler_error(c, loc, "'yield from' inside async function");
-
+ }
VISIT(c, expr, e->v.YieldFrom.value);
ADDOP(c, loc, GET_YIELD_FROM_ITER);
ADDOP_LOAD_CONST(c, loc, Py_None);
@@ -5884,7 +5816,7 @@ compiler_visit_expr1(struct compiler *c, expr_ty e)
}
if (c->u->u_scope_type != COMPILER_SCOPE_ASYNC_FUNCTION &&
- c->u->u_scope_type != COMPILER_SCOPE_COMPREHENSION){
+ c->u->u_scope_type != COMPILER_SCOPE_COMPREHENSION) {
return compiler_error(c, loc, "'await' outside async function");
}
}
@@ -5916,7 +5848,7 @@ compiler_visit_expr1(struct compiler *c, expr_ty e)
break;
case Store:
if (forbidden_name(c, loc, e->v.Attribute.attr, e->v.Attribute.ctx)) {
- return 0;
+ return ERROR;
}
ADDOP_NAME(c, loc, STORE_ATTR, e->v.Attribute.attr, names);
break;
@@ -5942,9 +5874,7 @@ compiler_visit_expr1(struct compiler *c, expr_ty e)
case Slice_kind:
{
int n = compiler_slice(c, e);
- if (n == 0) {
- return 0;
- }
+ RETURN_IF_ERROR(n);
ADDOP_I(c, loc, BUILD_SLICE, n);
break;
}
@@ -5956,7 +5886,7 @@ compiler_visit_expr1(struct compiler *c, expr_ty e)
case Tuple_kind:
return compiler_tuple(c, e);
}
- return 1;
+ return SUCCESS;
}
static int
@@ -5991,9 +5921,7 @@ compiler_augassign(struct compiler *c, stmt_ty s)
case Subscript_kind:
VISIT(c, expr, e->v.Subscript.value);
if (is_two_element_slice(e->v.Subscript.slice)) {
- if (!compiler_slice(c, e->v.Subscript.slice)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_slice(c, e->v.Subscript.slice));
ADDOP_I(c, loc, COPY, 3);
ADDOP_I(c, loc, COPY, 3);
ADDOP_I(c, loc, COPY, 3);
@@ -6007,14 +5935,13 @@ compiler_augassign(struct compiler *c, stmt_ty s)
}
break;
case Name_kind:
- if (!compiler_nameop(c, loc, e->v.Name.id, Load))
- return 0;
+ RETURN_IF_ERROR(compiler_nameop(c, loc, e->v.Name.id, Load));
break;
default:
PyErr_Format(PyExc_SystemError,
"invalid node type (%d) for augmented assignment",
e->kind);
- return 0;
+ return ERROR;
}
loc = LOC(s);
@@ -6048,7 +5975,7 @@ compiler_augassign(struct compiler *c, stmt_ty s)
default:
Py_UNREACHABLE();
}
- return 1;
+ return SUCCESS;
}
static int
@@ -6056,7 +5983,7 @@ check_ann_expr(struct compiler *c, expr_ty e)
{
VISIT(c, expr, e);
ADDOP(c, LOC(e), POP_TOP);
- return 1;
+ return SUCCESS;
}
static int
@@ -6065,7 +5992,7 @@ check_annotation(struct compiler *c, stmt_ty s)
/* Annotations of complex targets does not produce anything
under annotations future */
if (c->c_future.ff_features & CO_FUTURE_ANNOTATIONS) {
- return 1;
+ return SUCCESS;
}
/* Annotations are only evaluated in a module or class. */
@@ -6073,7 +6000,7 @@ check_annotation(struct compiler *c, stmt_ty s)
c->u->u_scope_type == COMPILER_SCOPE_CLASS) {
return check_ann_expr(c, s->v.AnnAssign.annotation);
}
- return 1;
+ return SUCCESS;
}
static int
@@ -6082,26 +6009,24 @@ check_ann_subscr(struct compiler *c, expr_ty e)
/* We check that everything in a subscript is defined at runtime. */
switch (e->kind) {
case Slice_kind:
- if (e->v.Slice.lower && !check_ann_expr(c, e->v.Slice.lower)) {
- return 0;
+ if (e->v.Slice.lower && check_ann_expr(c, e->v.Slice.lower) < 0) {
+ return ERROR;
}
- if (e->v.Slice.upper && !check_ann_expr(c, e->v.Slice.upper)) {
- return 0;
+ if (e->v.Slice.upper && check_ann_expr(c, e->v.Slice.upper) < 0) {
+ return ERROR;
}
- if (e->v.Slice.step && !check_ann_expr(c, e->v.Slice.step)) {
- return 0;
+ if (e->v.Slice.step && check_ann_expr(c, e->v.Slice.step) < 0) {
+ return ERROR;
}
- return 1;
+ return SUCCESS;
case Tuple_kind: {
/* extended slice */
asdl_expr_seq *elts = e->v.Tuple.elts;
Py_ssize_t i, n = asdl_seq_LEN(elts);
for (i = 0; i < n; i++) {
- if (!check_ann_subscr(c, asdl_seq_GET(elts, i))) {
- return 0;
- }
+ RETURN_IF_ERROR(check_ann_subscr(c, asdl_seq_GET(elts, i)));
}
- return 1;
+ return SUCCESS;
}
default:
return check_ann_expr(c, e);
@@ -6124,8 +6049,9 @@ compiler_annassign(struct compiler *c, stmt_ty s)
}
switch (targ->kind) {
case Name_kind:
- if (forbidden_name(c, loc, targ->v.Name.id, Store))
- return 0;
+ if (forbidden_name(c, loc, targ->v.Name.id, Store)) {
+ return ERROR;
+ }
/* If we have a simple name in a module or class, store annotation. */
if (s->v.AnnAssign.simple &&
(c->u->u_scope_type == COMPILER_SCOPE_MODULE ||
@@ -6143,31 +6069,32 @@ compiler_annassign(struct compiler *c, stmt_ty s)
}
break;
case Attribute_kind:
- if (forbidden_name(c, loc, targ->v.Attribute.attr, Store))
- return 0;
+ if (forbidden_name(c, loc, targ->v.Attribute.attr, Store)) {
+ return ERROR;
+ }
if (!s->v.AnnAssign.value &&
- !check_ann_expr(c, targ->v.Attribute.value)) {
- return 0;
+ check_ann_expr(c, targ->v.Attribute.value) < 0) {
+ return ERROR;
}
break;
case Subscript_kind:
if (!s->v.AnnAssign.value &&
- (!check_ann_expr(c, targ->v.Subscript.value) ||
- !check_ann_subscr(c, targ->v.Subscript.slice))) {
- return 0;
+ (check_ann_expr(c, targ->v.Subscript.value) < 0 ||
+ check_ann_subscr(c, targ->v.Subscript.slice) < 0)) {
+ return ERROR;
}
break;
default:
PyErr_Format(PyExc_SystemError,
"invalid node type (%d) for annotated assignment",
targ->kind);
- return 0;
+ return ERROR;
}
/* Annotation is evaluated last. */
- if (!s->v.AnnAssign.simple && !check_annotation(c, s)) {
- return 0;
+ if (!s->v.AnnAssign.simple && check_annotation(c, s) < 0) {
+ return ERROR;
}
- return 1;
+ return SUCCESS;
}
/* Raises a SyntaxError and returns 0.
@@ -6183,7 +6110,7 @@ compiler_error(struct compiler *c, location loc,
PyObject *msg = PyUnicode_FromFormatV(format, vargs);
va_end(vargs);
if (msg == NULL) {
- return 0;
+ return ERROR;
}
PyObject *loc_obj = PyErr_ProgramTextObject(c->c_filename, loc.lineno);
if (loc_obj == NULL) {
@@ -6200,7 +6127,7 @@ compiler_error(struct compiler *c, location loc,
exit:
Py_DECREF(loc_obj);
Py_XDECREF(args);
- return 0;
+ return ERROR;
}
/* Emits a SyntaxWarning and returns 1 on success.
@@ -6216,7 +6143,7 @@ compiler_warn(struct compiler *c, location loc,
PyObject *msg = PyUnicode_FromFormatV(format, vargs);
va_end(vargs);
if (msg == NULL) {
- return 0;
+ return ERROR;
}
if (PyErr_WarnExplicitObject(PyExc_SyntaxWarning, msg, c->c_filename,
loc.lineno, NULL, NULL) < 0)
@@ -6229,10 +6156,10 @@ compiler_warn(struct compiler *c, location loc,
compiler_error(c, loc, PyUnicode_AsUTF8(msg));
}
Py_DECREF(msg);
- return 0;
+ return ERROR;
}
Py_DECREF(msg);
- return 1;
+ return SUCCESS;
}
static int
@@ -6243,19 +6170,13 @@ compiler_subscript(struct compiler *c, expr_ty e)
int op = 0;
if (ctx == Load) {
- if (!check_subscripter(c, e->v.Subscript.value)) {
- return 0;
- }
- if (!check_index(c, e->v.Subscript.value, e->v.Subscript.slice)) {
- return 0;
- }
+ RETURN_IF_ERROR(check_subscripter(c, e->v.Subscript.value));
+ RETURN_IF_ERROR(check_index(c, e->v.Subscript.value, e->v.Subscript.slice));
}
VISIT(c, expr, e->v.Subscript.value);
if (is_two_element_slice(e->v.Subscript.slice) && ctx != Del) {
- if (!compiler_slice(c, e->v.Subscript.slice)) {
- return 0;
- }
+ RETURN_IF_ERROR(compiler_slice(c, e->v.Subscript.slice));
if (ctx == Load) {
ADDOP(c, loc, BINARY_SLICE);
}
@@ -6274,11 +6195,11 @@ compiler_subscript(struct compiler *c, expr_ty e)
assert(op);
ADDOP(c, loc, op);
}
- return 1;
+ return SUCCESS;
}
/* Returns the number of the values emitted,
- * thus are needed to build the slice, or 0 if there is an error. */
+ * thus are needed to build the slice, or -1 if there is an error. */
static int
compiler_slice(struct compiler *c, expr_ty s)
{
@@ -6336,20 +6257,20 @@ ensure_fail_pop(struct compiler *c, pattern_context *pc, Py_ssize_t n)
{
Py_ssize_t size = n + 1;
if (size <= pc->fail_pop_size) {
- return 1;
+ return SUCCESS;
}
Py_ssize_t needed = sizeof(jump_target_label) * size;
jump_target_label *resized = PyObject_Realloc(pc->fail_pop, needed);
if (resized == NULL) {
PyErr_NoMemory();
- return 0;
+ return ERROR;
}
pc->fail_pop = resized;
while (pc->fail_pop_size < size) {
NEW_JUMP_TARGET_LABEL(c, new_block);
pc->fail_pop[pc->fail_pop_size++] = new_block;
}
- return 1;
+ return SUCCESS;
}
// Use op to jump to the correct fail_pop block.
@@ -6360,9 +6281,9 @@ jump_to_fail_pop(struct compiler *c, location loc,
// Pop any items on the top of the stack, plus any objects we were going to
// capture on success:
Py_ssize_t pops = pc->on_top + PyList_GET_SIZE(pc->stores);
- RETURN_IF_FALSE(ensure_fail_pop(c, pc, pops));
+ RETURN_IF_ERROR(ensure_fail_pop(c, pc, pops));
ADDOP_JUMP(c, loc, op, pc->fail_pop[pops]);
- return 1;
+ return SUCCESS;
}
// Build all of the fail_pop blocks and reset fail_pop.
@@ -6372,21 +6293,21 @@ emit_and_reset_fail_pop(struct compiler *c, location loc,
{
if (!pc->fail_pop_size) {
assert(pc->fail_pop == NULL);
- return 1;
+ return SUCCESS;
}
while (--pc->fail_pop_size) {
USE_LABEL(c, pc->fail_pop[pc->fail_pop_size]);
- if (!cfg_builder_addop_noarg(CFG_BUILDER(c), POP_TOP, loc)) {
+ if (cfg_builder_addop_noarg(CFG_BUILDER(c), POP_TOP, loc) < 0) {
pc->fail_pop_size = 0;
PyObject_Free(pc->fail_pop);
pc->fail_pop = NULL;
- return 0;
+ return ERROR;
}
}
USE_LABEL(c, pc->fail_pop[0]);
PyObject_Free(pc->fail_pop);
pc->fail_pop = NULL;
- return 1;
+ return SUCCESS;
}
static int
@@ -6403,7 +6324,7 @@ pattern_helper_rotate(struct compiler *c, location loc, Py_ssize_t count)
while (1 < count) {
ADDOP_I(c, loc, SWAP, count--);
}
- return 1;
+ return SUCCESS;
}
static int
@@ -6412,23 +6333,22 @@ pattern_helper_store_name(struct compiler *c, location loc,
{
if (n == NULL) {
ADDOP(c, loc, POP_TOP);
- return 1;
+ return SUCCESS;
}
if (forbidden_name(c, loc, n, Store)) {
- return 0;
+ return ERROR;
}
// Can't assign to the same name twice:
int duplicate = PySequence_Contains(pc->stores, n);
- if (duplicate < 0) {
- return 0;
- }
+ RETURN_IF_ERROR(duplicate);
if (duplicate) {
return compiler_error_duplicate_store(c, loc, n);
}
// Rotate this object underneath any items we need to preserve:
Py_ssize_t rotations = pc->on_top + PyList_GET_SIZE(pc->stores) + 1;
- RETURN_IF_FALSE(pattern_helper_rotate(c, loc, rotations));
- return !PyList_Append(pc->stores, n);
+ RETURN_IF_ERROR(pattern_helper_rotate(c, loc, rotations));
+ RETURN_IF_ERROR(PyList_Append(pc->stores, n));
+ return SUCCESS;
}
@@ -6442,10 +6362,11 @@ pattern_unpack_helper(struct compiler *c, location loc,
pattern_ty elt = asdl_seq_GET(elts, i);
if (elt->kind == MatchStar_kind && !seen_star) {
if ((i >= (1 << 8)) ||
- (n-i-1 >= (INT_MAX >> 8)))
+ (n-i-1 >= (INT_MAX >> 8))) {
return compiler_error(c, loc,
"too many expressions in "
"star-unpacking sequence pattern");
+ }
ADDOP_I(c, loc, UNPACK_EX, (i + ((n-i-1) << 8)));
seen_star = 1;
}
@@ -6457,7 +6378,7 @@ pattern_unpack_helper(struct compiler *c, location loc,
if (!seen_star) {
ADDOP_I(c, loc, UNPACK_SEQUENCE, n);
}
- return 1;
+ return SUCCESS;
}
static int
@@ -6465,7 +6386,7 @@ pattern_helper_sequence_unpack(struct compiler *c, location loc,
asdl_pattern_seq *patterns, Py_ssize_t star,
pattern_context *pc)
{
- RETURN_IF_FALSE(pattern_unpack_helper(c, loc, patterns));
+ RETURN_IF_ERROR(pattern_unpack_helper(c, loc, patterns));
Py_ssize_t size = asdl_seq_LEN(patterns);
// We've now got a bunch of new subjects on the stack. They need to remain
// there after each subpattern match:
@@ -6474,9 +6395,9 @@ pattern_helper_sequence_unpack(struct compiler *c, location loc,
// One less item to keep track of each time we loop through:
pc->on_top--;
pattern_ty pattern = asdl_seq_GET(patterns, i);
- RETURN_IF_FALSE(compiler_pattern_subpattern(c, pattern, pc));
+ RETURN_IF_ERROR(compiler_pattern_subpattern(c, pattern, pc));
}
- return 1;
+ return SUCCESS;
}
// Like pattern_helper_sequence_unpack, but uses BINARY_SUBSCR instead of
@@ -6511,12 +6432,12 @@ pattern_helper_sequence_subscr(struct compiler *c, location loc,
ADDOP_BINARY(c, loc, Sub);
}
ADDOP(c, loc, BINARY_SUBSCR);
- RETURN_IF_FALSE(compiler_pattern_subpattern(c, pattern, pc));
+ RETURN_IF_ERROR(compiler_pattern_subpattern(c, pattern, pc));
}
// Pop the subject, we're done with it:
pc->on_top--;
ADDOP(c, loc, POP_TOP);
- return 1;
+ return SUCCESS;
}
// Like compiler_pattern, but turn off checks for irrefutability.
@@ -6526,9 +6447,9 @@ compiler_pattern_subpattern(struct compiler *c,
{
int allow_irrefutable = pc->allow_irrefutable;
pc->allow_irrefutable = 1;
- RETURN_IF_FALSE(compiler_pattern(c, p, pc));
+ RETURN_IF_ERROR(compiler_pattern(c, p, pc));
pc->allow_irrefutable = allow_irrefutable;
- return 1;
+ return SUCCESS;
}
static int
@@ -6550,19 +6471,20 @@ compiler_pattern_as(struct compiler *c, pattern_ty p, pattern_context *pc)
// Need to make a copy for (possibly) storing later:
pc->on_top++;
ADDOP_I(c, LOC(p), COPY, 1);
- RETURN_IF_FALSE(compiler_pattern(c, p->v.MatchAs.pattern, pc));
+ RETURN_IF_ERROR(compiler_pattern(c, p->v.MatchAs.pattern, pc));
// Success! Store it:
pc->on_top--;
- RETURN_IF_FALSE(pattern_helper_store_name(c, LOC(p), p->v.MatchAs.name, pc));
- return 1;
+ RETURN_IF_ERROR(pattern_helper_store_name(c, LOC(p), p->v.MatchAs.name, pc));
+ return SUCCESS;
}
static int
compiler_pattern_star(struct compiler *c, pattern_ty p, pattern_context *pc)
{
assert(p->kind == MatchStar_kind);
- RETURN_IF_FALSE(pattern_helper_store_name(c, LOC(p), p->v.MatchStar.name, pc));
- return 1;
+ RETURN_IF_ERROR(
+ pattern_helper_store_name(c, LOC(p), p->v.MatchStar.name, pc));
+ return SUCCESS;
}
static int
@@ -6575,18 +6497,18 @@ validate_kwd_attrs(struct compiler *c, asdl_identifier_seq *attrs, asdl_pattern_
identifier attr = ((identifier)asdl_seq_GET(attrs, i));
location loc = LOC((pattern_ty) asdl_seq_GET(patterns, i));
if (forbidden_name(c, loc, attr, Store)) {
- return -1;
+ return ERROR;
}
for (Py_ssize_t j = i + 1; j < nattrs; j++) {
identifier other = ((identifier)asdl_seq_GET(attrs, j));
if (!PyUnicode_Compare(attr, other)) {
location loc = LOC((pattern_ty) asdl_seq_GET(patterns, j));
compiler_error(c, loc, "attribute name repeated in class pattern: %U", attr);
- return -1;
+ return ERROR;
}
}
}
- return 0;
+ return SUCCESS;
}
static int
@@ -6610,11 +6532,13 @@ compiler_pattern_class(struct compiler *c, pattern_ty p, pattern_context *pc)
return compiler_error(c, LOC(p), e, p->v.MatchClass.cls);
}
if (nattrs) {
- RETURN_IF_FALSE(!validate_kwd_attrs(c, kwd_attrs, kwd_patterns));
+ RETURN_IF_ERROR(validate_kwd_attrs(c, kwd_attrs, kwd_patterns));
}
VISIT(c, expr, p->v.MatchClass.cls);
- PyObject *attr_names;
- RETURN_IF_FALSE(attr_names = PyTuple_New(nattrs));
+ PyObject *attr_names = PyTuple_New(nattrs);
+ if (attr_names == NULL) {
+ return ERROR;
+ }
Py_ssize_t i;
for (i = 0; i < nattrs; i++) {
PyObject *name = asdl_seq_GET(kwd_attrs, i);
@@ -6627,7 +6551,7 @@ compiler_pattern_class(struct compiler *c, pattern_ty p, pattern_context *pc)
ADDOP_I(c, LOC(p), IS_OP, 1);
// TOS is now a tuple of (nargs + nattrs) attributes (or None):
pc->on_top++;
- RETURN_IF_FALSE(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
+ RETURN_IF_ERROR(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
ADDOP_I(c, LOC(p), UNPACK_SEQUENCE, nargs + nattrs);
pc->on_top += nargs + nattrs - 1;
for (i = 0; i < nargs + nattrs; i++) {
@@ -6645,10 +6569,10 @@ compiler_pattern_class(struct compiler *c, pattern_ty p, pattern_context *pc)
ADDOP(c, LOC(p), POP_TOP);
continue;
}
- RETURN_IF_FALSE(compiler_pattern_subpattern(c, pattern, pc));
+ RETURN_IF_ERROR(compiler_pattern_subpattern(c, pattern, pc));
}
// Success! Pop the tuple of attributes:
- return 1;
+ return SUCCESS;
}
static int
@@ -6671,19 +6595,19 @@ compiler_pattern_mapping(struct compiler *c, pattern_ty p,
// We need to keep the subject on top during the mapping and length checks:
pc->on_top++;
ADDOP(c, LOC(p), MATCH_MAPPING);
- RETURN_IF_FALSE(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
+ RETURN_IF_ERROR(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
if (!size && !star_target) {
// If the pattern is just "{}", we're done! Pop the subject:
pc->on_top--;
ADDOP(c, LOC(p), POP_TOP);
- return 1;
+ return SUCCESS;
}
if (size) {
// If the pattern has any keys in it, perform a length check:
ADDOP(c, LOC(p), GET_LEN);
ADDOP_LOAD_CONST_NEW(c, LOC(p), PyLong_FromSsize_t(size));
ADDOP_COMPARE(c, LOC(p), GtE);
- RETURN_IF_FALSE(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
+ RETURN_IF_ERROR(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
}
if (INT_MAX < size - 1) {
return compiler_error(c, LOC(p), "too many sub-patterns in mapping pattern");
@@ -6695,7 +6619,7 @@ compiler_pattern_mapping(struct compiler *c, pattern_ty p,
// SyntaxError in the case of duplicates.
PyObject *seen = PySet_New(NULL);
if (seen == NULL) {
- return 0;
+ return ERROR;
}
// NOTE: goto error on failure in the loop below to avoid leaking `seen`
@@ -6729,7 +6653,7 @@ compiler_pattern_mapping(struct compiler *c, pattern_ty p,
compiler_error(c, LOC(p), e);
goto error;
}
- if (!compiler_visit_expr(c, key)) {
+ if (compiler_visit_expr(c, key) < 0) {
goto error;
}
}
@@ -6744,7 +6668,7 @@ compiler_pattern_mapping(struct compiler *c, pattern_ty p,
ADDOP_I(c, LOC(p), COPY, 1);
ADDOP_LOAD_CONST(c, LOC(p), Py_None);
ADDOP_I(c, LOC(p), IS_OP, 1);
- RETURN_IF_FALSE(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
+ RETURN_IF_ERROR(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
// So far so good. Use that tuple of values on the stack to match
// sub-patterns against:
ADDOP_I(c, LOC(p), UNPACK_SEQUENCE, size);
@@ -6752,7 +6676,7 @@ compiler_pattern_mapping(struct compiler *c, pattern_ty p,
for (Py_ssize_t i = 0; i < size; i++) {
pc->on_top--;
pattern_ty pattern = asdl_seq_GET(patterns, i);
- RETURN_IF_FALSE(compiler_pattern_subpattern(c, pattern, pc));
+ RETURN_IF_ERROR(compiler_pattern_subpattern(c, pattern, pc));
}
// If we get this far, it's a match! Whatever happens next should consume
// the tuple of keys and the subject:
@@ -6773,17 +6697,17 @@ compiler_pattern_mapping(struct compiler *c, pattern_ty p,
ADDOP_I(c, LOC(p), SWAP, 2); // [copy, keys..., copy, key]
ADDOP(c, LOC(p), DELETE_SUBSCR); // [copy, keys...]
}
- RETURN_IF_FALSE(pattern_helper_store_name(c, LOC(p), star_target, pc));
+ RETURN_IF_ERROR(pattern_helper_store_name(c, LOC(p), star_target, pc));
}
else {
ADDOP(c, LOC(p), POP_TOP); // Tuple of keys.
ADDOP(c, LOC(p), POP_TOP); // Subject.
}
- return 1;
+ return SUCCESS;
error:
Py_DECREF(seen);
- return 0;
+ return ERROR;
}
static int
@@ -6813,8 +6737,8 @@ compiler_pattern_or(struct compiler *c, pattern_ty p, pattern_context *pc)
pc->fail_pop = NULL;
pc->fail_pop_size = 0;
pc->on_top = 0;
- if (!cfg_builder_addop_i(CFG_BUILDER(c), COPY, 1, LOC(alt)) ||
- !compiler_pattern(c, alt, pc)) {
+ if (cfg_builder_addop_i(CFG_BUILDER(c), COPY, 1, LOC(alt)) < 0 ||
+ compiler_pattern(c, alt, pc) < 0) {
goto error;
}
// Success!
@@ -6868,7 +6792,7 @@ compiler_pattern_or(struct compiler *c, pattern_ty p, pattern_context *pc)
// Do the same thing to the stack, using several
// rotations:
while (rotations--) {
- if (!pattern_helper_rotate(c, LOC(alt), icontrol + 1)){
+ if (pattern_helper_rotate(c, LOC(alt), icontrol + 1) < 0) {
goto error;
}
}
@@ -6876,8 +6800,8 @@ compiler_pattern_or(struct compiler *c, pattern_ty p, pattern_context *pc)
}
}
assert(control);
- if (!cfg_builder_addop_j(CFG_BUILDER(c), LOC(alt), JUMP, end) ||
- !emit_and_reset_fail_pop(c, LOC(alt), pc))
+ if (cfg_builder_addop_j(CFG_BUILDER(c), LOC(alt), JUMP, end) < 0 ||
+ emit_and_reset_fail_pop(c, LOC(alt), pc) < 0)
{
goto error;
}
@@ -6888,8 +6812,8 @@ compiler_pattern_or(struct compiler *c, pattern_ty p, pattern_context *pc)
// Need to NULL this for the PyObject_Free call in the error block.
old_pc.fail_pop = NULL;
// No match. Pop the remaining copy of the subject and fail:
- if (!cfg_builder_addop_noarg(CFG_BUILDER(c), POP_TOP, LOC(p)) ||
- !jump_to_fail_pop(c, LOC(p), pc, JUMP)) {
+ if (cfg_builder_addop_noarg(CFG_BUILDER(c), POP_TOP, LOC(p)) < 0 ||
+ jump_to_fail_pop(c, LOC(p), pc, JUMP) < 0) {
goto error;
}
@@ -6904,7 +6828,7 @@ compiler_pattern_or(struct compiler *c, pattern_ty p, pattern_context *pc)
Py_ssize_t nrots = nstores + 1 + pc->on_top + PyList_GET_SIZE(pc->stores);
for (Py_ssize_t i = 0; i < nstores; i++) {
// Rotate this capture to its proper place on the stack:
- if (!pattern_helper_rotate(c, LOC(p), nrots)) {
+ if (pattern_helper_rotate(c, LOC(p), nrots) < 0) {
goto error;
}
// Update the list of previous stores with this new name, checking for
@@ -6927,14 +6851,14 @@ compiler_pattern_or(struct compiler *c, pattern_ty p, pattern_context *pc)
// NOTE: Returning macros are safe again.
// Pop the copy of the subject:
ADDOP(c, LOC(p), POP_TOP);
- return 1;
+ return SUCCESS;
diff:
compiler_error(c, LOC(p), "alternative patterns bind different names");
error:
PyObject_Free(old_pc.fail_pop);
Py_DECREF(old_pc.stores);
Py_XDECREF(control);
- return 0;
+ return ERROR;
}
@@ -6966,20 +6890,20 @@ compiler_pattern_sequence(struct compiler *c, pattern_ty p,
// We need to keep the subject on top during the sequence and length checks:
pc->on_top++;
ADDOP(c, LOC(p), MATCH_SEQUENCE);
- RETURN_IF_FALSE(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
+ RETURN_IF_ERROR(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
if (star < 0) {
// No star: len(subject) == size
ADDOP(c, LOC(p), GET_LEN);
ADDOP_LOAD_CONST_NEW(c, LOC(p), PyLong_FromSsize_t(size));
ADDOP_COMPARE(c, LOC(p), Eq);
- RETURN_IF_FALSE(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
+ RETURN_IF_ERROR(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
}
else if (size > 1) {
// Star: len(subject) >= size - 1
ADDOP(c, LOC(p), GET_LEN);
ADDOP_LOAD_CONST_NEW(c, LOC(p), PyLong_FromSsize_t(size - 1));
ADDOP_COMPARE(c, LOC(p), GtE);
- RETURN_IF_FALSE(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
+ RETURN_IF_ERROR(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
}
// Whatever comes next should consume the subject:
pc->on_top--;
@@ -6988,12 +6912,12 @@ compiler_pattern_sequence(struct compiler *c, pattern_ty p,
ADDOP(c, LOC(p), POP_TOP);
}
else if (star_wildcard) {
- RETURN_IF_FALSE(pattern_helper_sequence_subscr(c, LOC(p), patterns, star, pc));
+ RETURN_IF_ERROR(pattern_helper_sequence_subscr(c, LOC(p), patterns, star, pc));
}
else {
- RETURN_IF_FALSE(pattern_helper_sequence_unpack(c, LOC(p), patterns, star, pc));
+ RETURN_IF_ERROR(pattern_helper_sequence_unpack(c, LOC(p), patterns, star, pc));
}
- return 1;
+ return SUCCESS;
}
static int
@@ -7007,8 +6931,8 @@ compiler_pattern_value(struct compiler *c, pattern_ty p, pattern_context *pc)
}
VISIT(c, expr, value);
ADDOP_COMPARE(c, LOC(p), Eq);
- RETURN_IF_FALSE(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
- return 1;
+ RETURN_IF_ERROR(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
+ return SUCCESS;
}
static int
@@ -7017,8 +6941,8 @@ compiler_pattern_singleton(struct compiler *c, pattern_ty p, pattern_context *pc
assert(p->kind == MatchSingleton_kind);
ADDOP_LOAD_CONST(c, LOC(p), p->v.MatchSingleton.value);
ADDOP_COMPARE(c, LOC(p), Is);
- RETURN_IF_FALSE(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
- return 1;
+ RETURN_IF_ERROR(jump_to_fail_pop(c, LOC(p), pc, POP_JUMP_IF_FALSE));
+ return SUCCESS;
}
static int
@@ -7063,32 +6987,35 @@ compiler_match_inner(struct compiler *c, stmt_ty s, pattern_context *pc)
if (i != cases - has_default - 1) {
ADDOP_I(c, LOC(m->pattern), COPY, 1);
}
- RETURN_IF_FALSE(pc->stores = PyList_New(0));
+ pc->stores = PyList_New(0);
+ if (pc->stores == NULL) {
+ return ERROR;
+ }
// Irrefutable cases must be either guarded, last, or both:
pc->allow_irrefutable = m->guard != NULL || i == cases - 1;
pc->fail_pop = NULL;
pc->fail_pop_size = 0;
pc->on_top = 0;
// NOTE: Can't use returning macros here (they'll leak pc->stores)!
- if (!compiler_pattern(c, m->pattern, pc)) {
+ if (compiler_pattern(c, m->pattern, pc) < 0) {
Py_DECREF(pc->stores);
- return 0;
+ return ERROR;
}
assert(!pc->on_top);
// It's a match! Store all of the captured names (they're on the stack).
Py_ssize_t nstores = PyList_GET_SIZE(pc->stores);
for (Py_ssize_t n = 0; n < nstores; n++) {
PyObject *name = PyList_GET_ITEM(pc->stores, n);
- if (!compiler_nameop(c, LOC(m->pattern), name, Store)) {
+ if (compiler_nameop(c, LOC(m->pattern), name, Store) < 0) {
Py_DECREF(pc->stores);
- return 0;
+ return ERROR;
}
}
Py_DECREF(pc->stores);
// NOTE: Returning macros are safe again.
if (m->guard) {
- RETURN_IF_FALSE(ensure_fail_pop(c, pc, 0));
- RETURN_IF_FALSE(compiler_jump_if(c, LOC(m->pattern), m->guard, pc->fail_pop[0], 0));
+ RETURN_IF_ERROR(ensure_fail_pop(c, pc, 0));
+ RETURN_IF_ERROR(compiler_jump_if(c, LOC(m->pattern), m->guard, pc->fail_pop[0], 0));
}
// Success! Pop the subject off, we're done with it:
if (i != cases - has_default - 1) {
@@ -7099,7 +7026,7 @@ compiler_match_inner(struct compiler *c, stmt_ty s, pattern_context *pc)
// If the pattern fails to match, we want the line number of the
// cleanup to be associated with the failed pattern, not the last line
// of the body
- RETURN_IF_FALSE(emit_and_reset_fail_pop(c, LOC(m->pattern), pc));
+ RETURN_IF_ERROR(emit_and_reset_fail_pop(c, LOC(m->pattern), pc));
}
if (has_default) {
// A trailing "case _" is common, and lets us save a bit of redundant
@@ -7114,12 +7041,12 @@ compiler_match_inner(struct compiler *c, stmt_ty s, pattern_context *pc)
ADDOP(c, LOC(m->pattern), NOP);
}
if (m->guard) {
- RETURN_IF_FALSE(compiler_jump_if(c, LOC(m->pattern), m->guard, end, 0));
+ RETURN_IF_ERROR(compiler_jump_if(c, LOC(m->pattern), m->guard, end, 0));
}
VISIT_SEQ(c, stmt, m->body);
}
USE_LABEL(c, end);
- return 1;
+ return SUCCESS;
}
static int
@@ -8792,6 +8719,19 @@ prepare_localsplus(struct compiler* c, int code_flags)
return nlocalsplus;
}
+static int
+add_return_at_end_of_block(struct compiler *c, int addNone)
+{
+ /* Make sure every block that falls off the end returns None. */
+ if (!basicblock_returns(CFG_BUILDER(c)->g_curblock)) {
+ if (addNone) {
+ ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None);
+ }
+ ADDOP(c, NO_LOCATION, RETURN_VALUE);
+ }
+ return SUCCESS;
+}
+
static PyCodeObject *
assemble(struct compiler *c, int addNone)
{
@@ -8805,12 +8745,8 @@ assemble(struct compiler *c, int addNone)
return NULL;
}
- /* Make sure every block that falls off the end returns None. */
- if (!basicblock_returns(CFG_BUILDER(c)->g_curblock)) {
- if (addNone) {
- ADDOP_LOAD_CONST(c, NO_LOCATION, Py_None);
- }
- ADDOP(c, NO_LOCATION, RETURN_VALUE);
+ if (add_return_at_end_of_block(c, addNone) < 0) {
+ return NULL;
}
int nblocks = 0;
@@ -9992,7 +9928,7 @@ instructions_to_cfg(PyObject *instructions, cfg_builder *g)
if (PyErr_Occurred()) {
return -1;
}
- if (!cfg_builder_addop(g, opcode, oparg, loc)) {
+ if (cfg_builder_addop(g, opcode, oparg, loc) < 0) {
return -1;
}
}
@@ -10077,7 +10013,7 @@ _PyCompile_CodeGen(PyObject *ast, PyObject *filename, PyCompilerFlags *pflags,
return NULL;
}
- if (!compiler_codegen(c, mod)) {
+ if (compiler_codegen(c, mod) < 0) {
goto finally;
}
diff --git a/Python/dtoa.c b/Python/dtoa.c
index 1b47d83bf77a24..cff5f1b0658eae 100644
--- a/Python/dtoa.c
+++ b/Python/dtoa.c
@@ -673,10 +673,6 @@ mult(Bigint *a, Bigint *b)
#ifndef Py_USING_MEMORY_DEBUGGER
-/* p5s is a linked list of powers of 5 of the form 5**(2**i), i >= 2 */
-
-static Bigint *p5s;
-
/* multiply the Bigint b by 5**k. Returns a pointer to the result, or NULL on
failure; if the returned pointer is distinct from b then the original
Bigint b will have been Bfree'd. Ignores the sign of b. */
@@ -696,7 +692,7 @@ pow5mult(Bigint *b, int k)
if (!(k >>= 2))
return b;
- p5 = p5s;
+ p5 = _PyRuntime.dtoa.p5s;
if (!p5) {
/* first time */
p5 = i2b(625);
@@ -704,7 +700,7 @@ pow5mult(Bigint *b, int k)
Bfree(b);
return NULL;
}
- p5s = p5;
+ _PyRuntime.dtoa.p5s = p5;
p5->next = 0;
}
for(;;) {
diff --git a/Python/frame.c b/Python/frame.c
index 52f6ef428291c5..b1525cca511224 100644
--- a/Python/frame.c
+++ b/Python/frame.c
@@ -127,6 +127,9 @@ _PyFrame_Clear(_PyInterpreterFrame *frame)
* to have cleared the enclosing generator, if any. */
assert(frame->owner != FRAME_OWNED_BY_GENERATOR ||
_PyFrame_GetGenerator(frame)->gi_frame_state == FRAME_CLEARED);
+ // GH-99729: Clearing this frame can expose the stack (via finalizers). It's
+ // crucial that this frame has been unlinked, and is no longer visible:
+ assert(_PyThreadState_GET()->cframe->current_frame != frame);
if (frame->frame_obj) {
PyFrameObject *f = frame->frame_obj;
frame->frame_obj = NULL;
diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h
index 3a403824b49958..45382a466b1ca9 100644
--- a/Python/generated_cases.c.h
+++ b/Python/generated_cases.c.h
@@ -63,6 +63,115 @@
DISPATCH();
}
+ TARGET(LOAD_FAST__LOAD_FAST) {
+ PyObject *_tmp_1;
+ PyObject *_tmp_2;
+ {
+ PyObject *value;
+ value = GETLOCAL(oparg);
+ assert(value != NULL);
+ Py_INCREF(value);
+ _tmp_2 = value;
+ }
+ NEXTOPARG();
+ JUMPBY(1);
+ {
+ PyObject *value;
+ value = GETLOCAL(oparg);
+ assert(value != NULL);
+ Py_INCREF(value);
+ _tmp_1 = value;
+ }
+ STACK_GROW(2);
+ POKE(1, _tmp_1);
+ POKE(2, _tmp_2);
+ DISPATCH();
+ }
+
+ TARGET(LOAD_FAST__LOAD_CONST) {
+ PyObject *_tmp_1;
+ PyObject *_tmp_2;
+ {
+ PyObject *value;
+ value = GETLOCAL(oparg);
+ assert(value != NULL);
+ Py_INCREF(value);
+ _tmp_2 = value;
+ }
+ NEXTOPARG();
+ JUMPBY(1);
+ {
+ PyObject *value;
+ value = GETITEM(consts, oparg);
+ Py_INCREF(value);
+ _tmp_1 = value;
+ }
+ STACK_GROW(2);
+ POKE(1, _tmp_1);
+ POKE(2, _tmp_2);
+ DISPATCH();
+ }
+
+ TARGET(STORE_FAST__LOAD_FAST) {
+ PyObject *_tmp_1 = PEEK(1);
+ {
+ PyObject *value = _tmp_1;
+ SETLOCAL(oparg, value);
+ }
+ NEXTOPARG();
+ JUMPBY(1);
+ {
+ PyObject *value;
+ value = GETLOCAL(oparg);
+ assert(value != NULL);
+ Py_INCREF(value);
+ _tmp_1 = value;
+ }
+ POKE(1, _tmp_1);
+ DISPATCH();
+ }
+
+ TARGET(STORE_FAST__STORE_FAST) {
+ PyObject *_tmp_1 = PEEK(1);
+ PyObject *_tmp_2 = PEEK(2);
+ {
+ PyObject *value = _tmp_1;
+ SETLOCAL(oparg, value);
+ }
+ NEXTOPARG();
+ JUMPBY(1);
+ {
+ PyObject *value = _tmp_2;
+ SETLOCAL(oparg, value);
+ }
+ STACK_SHRINK(2);
+ DISPATCH();
+ }
+
+ TARGET(LOAD_CONST__LOAD_FAST) {
+ PyObject *_tmp_1;
+ PyObject *_tmp_2;
+ {
+ PyObject *value;
+ value = GETITEM(consts, oparg);
+ Py_INCREF(value);
+ _tmp_2 = value;
+ }
+ NEXTOPARG();
+ JUMPBY(1);
+ {
+ PyObject *value;
+ value = GETLOCAL(oparg);
+ assert(value != NULL);
+ Py_INCREF(value);
+ _tmp_1 = value;
+ }
+ STACK_GROW(2);
+ POKE(1, _tmp_1);
+ POKE(2, _tmp_2);
+ DISPATCH();
+ }
+
TARGET(POP_TOP) {
PyObject *value = PEEK(1);
Py_DECREF(value);
@@ -78,6 +187,21 @@
DISPATCH();
}
+ TARGET(END_FOR) {
+ PyObject *_tmp_1 = PEEK(1);
+ PyObject *_tmp_2 = PEEK(2);
+ {
+ PyObject *value = _tmp_1;
+ Py_DECREF(value);
+ }
+ {
+ PyObject *value = _tmp_2;
+ Py_DECREF(value);
+ }
+ STACK_SHRINK(2);
+ DISPATCH();
+ }
+
TARGET(UNARY_POSITIVE) {
PyObject *value = PEEK(1);
PyObject *res;
@@ -139,7 +263,7 @@
if (prod == NULL) goto pop_2_error;
STACK_SHRINK(1);
POKE(1, prod);
- next_instr += 1;
+ JUMPBY(1);
DISPATCH();
}
@@ -159,7 +283,7 @@
if (prod == NULL) goto pop_2_error;
STACK_SHRINK(1);
POKE(1, prod);
- next_instr += 1;
+ JUMPBY(1);
DISPATCH();
}
@@ -177,7 +301,7 @@
if (sub == NULL) goto pop_2_error;
STACK_SHRINK(1);
POKE(1, sub);
- next_instr += 1;
+ JUMPBY(1);
DISPATCH();
}
@@ -196,7 +320,7 @@
if (sub == NULL) goto pop_2_error;
STACK_SHRINK(1);
POKE(1, sub);
- next_instr += 1;
+ JUMPBY(1);
DISPATCH();
}
@@ -214,7 +338,7 @@
if (res == NULL) goto pop_2_error;
STACK_SHRINK(1);
POKE(1, res);
- next_instr += 1;
+ JUMPBY(1);
DISPATCH();
}
@@ -268,7 +392,7 @@
if (sum == NULL) goto pop_2_error;
STACK_SHRINK(1);
POKE(1, sum);
- next_instr += 1;
+ JUMPBY(1);
DISPATCH();
}
@@ -286,7 +410,7 @@
if (sum == NULL) goto pop_2_error;
STACK_SHRINK(1);
POKE(1, sum);
- next_instr += 1;
+ JUMPBY(1);
DISPATCH();
}
@@ -311,7 +435,7 @@
if (res == NULL) goto pop_2_error;
STACK_SHRINK(1);
POKE(1, res);
- next_instr += 4;
+ JUMPBY(4);
DISPATCH();
}
@@ -380,7 +504,7 @@
Py_DECREF(list);
STACK_SHRINK(1);
POKE(1, res);
- next_instr += 4;
+ JUMPBY(4);
DISPATCH();
}
@@ -406,7 +530,7 @@
Py_DECREF(tuple);
STACK_SHRINK(1);
POKE(1, res);
- next_instr += 4;
+ JUMPBY(4);
DISPATCH();
}
@@ -424,14 +548,14 @@
}
Py_DECREF(dict);
Py_DECREF(sub);
- if (1) goto pop_2_error;
+ if (true) goto pop_2_error;
}
Py_INCREF(res); // Do this before DECREF'ing dict, sub
Py_DECREF(dict);
Py_DECREF(sub);
STACK_SHRINK(1);
POKE(1, res);
- next_instr += 4;
+ JUMPBY(4);
DISPATCH();
}
@@ -464,22 +588,21 @@
}
TARGET(LIST_APPEND) {
- PyObject *v = POP();
- PyObject *list = PEEK(oparg);
- if (_PyList_AppendTakeRef((PyListObject *)list, v) < 0)
- goto error;
+ PyObject *v = PEEK(1);
+ PyObject *list = PEEK(oparg + 1); // +1 to account for v staying on stack
+ if (_PyList_AppendTakeRef((PyListObject *)list, v) < 0) goto pop_1_error;
+ STACK_SHRINK(1);
PREDICT(JUMP_BACKWARD);
DISPATCH();
}
TARGET(SET_ADD) {
- PyObject *v = POP();
- PyObject *set = PEEK(oparg);
- int err;
- err = PySet_Add(set, v);
+ PyObject *v = PEEK(1);
+ PyObject *set = PEEK(oparg + 1); // +1 to account for v staying on stack
+ int err = PySet_Add(set, v);
Py_DECREF(v);
- if (err != 0)
- goto error;
+ if (err) goto pop_1_error;
+ STACK_SHRINK(1);
PREDICT(JUMP_BACKWARD);
DISPATCH();
}
@@ -489,31 +612,32 @@
PyObject *sub = PEEK(1);
PyObject *container = PEEK(2);
PyObject *v = PEEK(3);
- _PyStoreSubscrCache *cache = (_PyStoreSubscrCache *)next_instr;
- if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
+ uint16_t counter = read_u16(next_instr + 0);
+ if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
assert(cframe.use_tracing == 0);
next_instr--;
_Py_Specialize_StoreSubscr(container, sub, next_instr);
DISPATCH_SAME_OPARG();
}
STAT_INC(STORE_SUBSCR, deferred);
+ _PyStoreSubscrCache *cache = (_PyStoreSubscrCache *)next_instr;
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
/* container[sub] = v */
int err = PyObject_SetItem(container, sub, v);
Py_DECREF(v);
Py_DECREF(container);
Py_DECREF(sub);
- if (err != 0) goto pop_3_error;
- JUMPBY(INLINE_CACHE_ENTRIES_STORE_SUBSCR);
+ if (err) goto pop_3_error;
STACK_SHRINK(3);
+ JUMPBY(1);
DISPATCH();
}
TARGET(STORE_SUBSCR_LIST_INT) {
+ PyObject *sub = PEEK(1);
+ PyObject *list = PEEK(2);
+ PyObject *value = PEEK(3);
assert(cframe.use_tracing == 0);
- PyObject *sub = TOP();
- PyObject *list = SECOND();
- PyObject *value = THIRD();
DEOPT_IF(!PyLong_CheckExact(sub), STORE_SUBSCR);
DEOPT_IF(!PyList_CheckExact(list), STORE_SUBSCR);
@@ -526,61 +650,58 @@
PyObject *old_value = PyList_GET_ITEM(list, index);
PyList_SET_ITEM(list, index, value);
- STACK_SHRINK(3);
assert(old_value != NULL);
Py_DECREF(old_value);
_Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free);
Py_DECREF(list);
- JUMPBY(INLINE_CACHE_ENTRIES_STORE_SUBSCR);
+ STACK_SHRINK(3);
+ JUMPBY(1);
DISPATCH();
}
TARGET(STORE_SUBSCR_DICT) {
+ PyObject *sub = PEEK(1);
+ PyObject *dict = PEEK(2);
+ PyObject *value = PEEK(3);
assert(cframe.use_tracing == 0);
- PyObject *sub = TOP();
- PyObject *dict = SECOND();
- PyObject *value = THIRD();
DEOPT_IF(!PyDict_CheckExact(dict), STORE_SUBSCR);
- STACK_SHRINK(3);
STAT_INC(STORE_SUBSCR, hit);
int err = _PyDict_SetItem_Take2((PyDictObject *)dict, sub, value);
Py_DECREF(dict);
- if (err != 0) {
- goto error;
- }
- JUMPBY(INLINE_CACHE_ENTRIES_STORE_SUBSCR);
+ if (err) goto pop_3_error;
+ STACK_SHRINK(3);
+ JUMPBY(1);
DISPATCH();
}
TARGET(DELETE_SUBSCR) {
- PyObject *sub = TOP();
- PyObject *container = SECOND();
- int err;
- STACK_SHRINK(2);
+ PyObject *sub = PEEK(1);
+ PyObject *container = PEEK(2);
/* del container[sub] */
- err = PyObject_DelItem(container, sub);
+ int err = PyObject_DelItem(container, sub);
Py_DECREF(container);
Py_DECREF(sub);
- if (err != 0)
- goto error;
+ if (err) goto pop_2_error;
+ STACK_SHRINK(2);
DISPATCH();
}
TARGET(PRINT_EXPR) {
- PyObject *value = POP();
+ PyObject *value = PEEK(1);
PyObject *hook = _PySys_GetAttr(tstate, &_Py_ID(displayhook));
PyObject *res;
+ // Can't use ERROR_IF here.
if (hook == NULL) {
_PyErr_SetString(tstate, PyExc_RuntimeError,
"lost sys.displayhook");
Py_DECREF(value);
- goto error;
+ if (true) goto pop_1_error;
}
res = PyObject_CallOneArg(hook, value);
Py_DECREF(value);
- if (res == NULL)
- goto error;
+ if (res == NULL) goto pop_1_error;
Py_DECREF(res);
+ STACK_SHRINK(1);
DISPATCH();
}
@@ -607,9 +728,10 @@
}
TARGET(INTERPRETER_EXIT) {
+ PyObject *retval = PEEK(1);
assert(frame == &entry_frame);
assert(_PyFrame_IsIncomplete(frame));
- PyObject *retval = POP();
+ STACK_SHRINK(1); // Since we're not going to DISPATCH()
assert(EMPTY());
/* Restore previous cframe and return. */
tstate->cframe = cframe.previous;
@@ -621,59 +743,56 @@
}
TARGET(RETURN_VALUE) {
- PyObject *retval = POP();
+ PyObject *retval = PEEK(1);
+ STACK_SHRINK(1);
assert(EMPTY());
_PyFrame_SetStackPointer(frame, stack_pointer);
TRACE_FUNCTION_EXIT();
DTRACE_FUNCTION_EXIT();
_Py_LeaveRecursiveCallPy(tstate);
assert(frame != &entry_frame);
- frame = cframe.current_frame = pop_frame(tstate, frame);
+ // GH-99729: We need to unlink the frame *before* clearing it:
+ _PyInterpreterFrame *dying = frame;
+ frame = cframe.current_frame = dying->previous;
+ _PyEvalFrameClearAndPop(tstate, dying);
_PyFrame_StackPush(frame, retval);
goto resume_frame;
}
TARGET(GET_AITER) {
+ PyObject *obj = PEEK(1);
+ PyObject *iter;
unaryfunc getter = NULL;
- PyObject *iter = NULL;
- PyObject *obj = TOP();
PyTypeObject *type = Py_TYPE(obj);
if (type->tp_as_async != NULL) {
getter = type->tp_as_async->am_aiter;
}
- if (getter != NULL) {
- iter = (*getter)(obj);
- Py_DECREF(obj);
- if (iter == NULL) {
- SET_TOP(NULL);
- goto error;
- }
- }
- else {
- SET_TOP(NULL);
+ if (getter == NULL) {
_PyErr_Format(tstate, PyExc_TypeError,
"'async for' requires an object with "
"__aiter__ method, got %.100s",
type->tp_name);
Py_DECREF(obj);
- goto error;
+ if (true) goto pop_1_error;
}
+ iter = (*getter)(obj);
+ Py_DECREF(obj);
+ if (iter == NULL) goto pop_1_error;
+
if (Py_TYPE(iter)->tp_as_async == NULL ||
Py_TYPE(iter)->tp_as_async->am_anext == NULL) {
- SET_TOP(NULL);
_PyErr_Format(tstate, PyExc_TypeError,
"'async for' received an object from __aiter__ "
"that does not implement __anext__: %.100s",
Py_TYPE(iter)->tp_name);
Py_DECREF(iter);
- goto error;
+ if (true) goto pop_1_error;
}
-
- SET_TOP(iter);
+ POKE(1, iter);
DISPATCH();
}
@@ -1128,51 +1247,46 @@
TARGET(STORE_ATTR) {
PREDICTED(STORE_ATTR);
- _PyAttrCache *cache = (_PyAttrCache *)next_instr;
- if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
+ PyObject *owner = PEEK(1);
+ PyObject *v = PEEK(2);
+ uint16_t counter = read_u16(next_instr + 0);
+ if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
assert(cframe.use_tracing == 0);
- PyObject *owner = TOP();
PyObject *name = GETITEM(names, oparg);
next_instr--;
_Py_Specialize_StoreAttr(owner, next_instr, name);
DISPATCH_SAME_OPARG();
}
STAT_INC(STORE_ATTR, deferred);
+ _PyAttrCache *cache = (_PyAttrCache *)next_instr;
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
PyObject *name = GETITEM(names, oparg);
- PyObject *owner = TOP();
- PyObject *v = SECOND();
- int err;
- STACK_SHRINK(2);
- err = PyObject_SetAttr(owner, name, v);
+ int err = PyObject_SetAttr(owner, name, v);
Py_DECREF(v);
Py_DECREF(owner);
- if (err != 0) {
- goto error;
- }
- JUMPBY(INLINE_CACHE_ENTRIES_STORE_ATTR);
+ if (err) goto pop_2_error;
+ STACK_SHRINK(2);
+ JUMPBY(4);
DISPATCH();
}
TARGET(DELETE_ATTR) {
+ PyObject *owner = PEEK(1);
PyObject *name = GETITEM(names, oparg);
- PyObject *owner = POP();
- int err;
- err = PyObject_SetAttr(owner, name, (PyObject *)NULL);
+ int err = PyObject_SetAttr(owner, name, (PyObject *)NULL);
Py_DECREF(owner);
- if (err != 0)
- goto error;
+ if (err) goto pop_1_error;
+ STACK_SHRINK(1);
DISPATCH();
}
TARGET(STORE_GLOBAL) {
+ PyObject *v = PEEK(1);
PyObject *name = GETITEM(names, oparg);
- PyObject *v = POP();
- int err;
- err = PyDict_SetItem(GLOBALS(), name, v);
+ int err = PyDict_SetItem(GLOBALS(), name, v);
Py_DECREF(v);
- if (err != 0)
- goto error;
+ if (err) goto pop_1_error;
+ STACK_SHRINK(1);
DISPATCH();
}
@@ -1967,20 +2081,18 @@
}
TARGET(STORE_ATTR_INSTANCE_VALUE) {
+ PyObject *owner = PEEK(1);
+ PyObject *value = PEEK(2);
+ uint32_t type_version = read_u32(next_instr + 1);
+ uint16_t index = read_u16(next_instr + 3);
assert(cframe.use_tracing == 0);
- PyObject *owner = TOP();
PyTypeObject *tp = Py_TYPE(owner);
- _PyAttrCache *cache = (_PyAttrCache *)next_instr;
- uint32_t type_version = read_u32(cache->version);
assert(type_version != 0);
DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR);
assert(tp->tp_flags & Py_TPFLAGS_MANAGED_DICT);
PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner);
DEOPT_IF(!_PyDictOrValues_IsValues(dorv), STORE_ATTR);
STAT_INC(STORE_ATTR, hit);
- Py_ssize_t index = cache->index;
- STACK_SHRINK(1);
- PyObject *value = POP();
PyDictValues *values = _PyDictOrValues_GetValues(dorv);
PyObject *old_value = values->values[index];
values->values[index] = value;
@@ -1991,16 +2103,18 @@
Py_DECREF(old_value);
}
Py_DECREF(owner);
- JUMPBY(INLINE_CACHE_ENTRIES_STORE_ATTR);
+ STACK_SHRINK(2);
+ JUMPBY(4);
DISPATCH();
}
TARGET(STORE_ATTR_WITH_HINT) {
+ PyObject *owner = PEEK(1);
+ PyObject *value = PEEK(2);
+ uint32_t type_version = read_u32(next_instr + 1);
+ uint16_t hint = read_u16(next_instr + 3);
assert(cframe.use_tracing == 0);
- PyObject *owner = TOP();
PyTypeObject *tp = Py_TYPE(owner);
- _PyAttrCache *cache = (_PyAttrCache *)next_instr;
- uint32_t type_version = read_u32(cache->version);
assert(type_version != 0);
DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR);
assert(tp->tp_flags & Py_TPFLAGS_MANAGED_DICT);
@@ -2010,17 +2124,14 @@
DEOPT_IF(dict == NULL, STORE_ATTR);
assert(PyDict_CheckExact((PyObject *)dict));
PyObject *name = GETITEM(names, oparg);
- uint16_t hint = cache->index;
DEOPT_IF(hint >= (size_t)dict->ma_keys->dk_nentries, STORE_ATTR);
- PyObject *value, *old_value;
+ PyObject *old_value;
uint64_t new_version;
if (DK_IS_UNICODE(dict->ma_keys)) {
PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint;
DEOPT_IF(ep->me_key != name, STORE_ATTR);
old_value = ep->me_value;
DEOPT_IF(old_value == NULL, STORE_ATTR);
- STACK_SHRINK(1);
- value = POP();
new_version = _PyDict_NotifyEvent(PyDict_EVENT_MODIFIED, dict, name, value);
ep->me_value = value;
}
@@ -2029,8 +2140,6 @@
DEOPT_IF(ep->me_key != name, STORE_ATTR);
old_value = ep->me_value;
DEOPT_IF(old_value == NULL, STORE_ATTR);
- STACK_SHRINK(1);
- value = POP();
new_version = _PyDict_NotifyEvent(PyDict_EVENT_MODIFIED, dict, name, value);
ep->me_value = value;
}
@@ -2043,37 +2152,39 @@
/* PEP 509 */
dict->ma_version_tag = new_version;
Py_DECREF(owner);
- JUMPBY(INLINE_CACHE_ENTRIES_STORE_ATTR);
+ STACK_SHRINK(2);
+ JUMPBY(4);
DISPATCH();
}
TARGET(STORE_ATTR_SLOT) {
+ PyObject *owner = PEEK(1);
+ PyObject *value = PEEK(2);
+ uint32_t type_version = read_u32(next_instr + 1);
+ uint16_t index = read_u16(next_instr + 3);
assert(cframe.use_tracing == 0);
- PyObject *owner = TOP();
PyTypeObject *tp = Py_TYPE(owner);
- _PyAttrCache *cache = (_PyAttrCache *)next_instr;
- uint32_t type_version = read_u32(cache->version);
assert(type_version != 0);
DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR);
- char *addr = (char *)owner + cache->index;
+ char *addr = (char *)owner + index;
STAT_INC(STORE_ATTR, hit);
- STACK_SHRINK(1);
- PyObject *value = POP();
PyObject *old_value = *(PyObject **)addr;
*(PyObject **)addr = value;
Py_XDECREF(old_value);
Py_DECREF(owner);
- JUMPBY(INLINE_CACHE_ENTRIES_STORE_ATTR);
+ STACK_SHRINK(2);
+ JUMPBY(4);
DISPATCH();
}
TARGET(COMPARE_OP) {
PREDICTED(COMPARE_OP);
+ PyObject *right = PEEK(1);
+ PyObject *left = PEEK(2);
+ PyObject *res;
_PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
assert(cframe.use_tracing == 0);
- PyObject *right = TOP();
- PyObject *left = SECOND();
next_instr--;
_Py_Specialize_CompareOp(left, right, next_instr, oparg);
DISPATCH_SAME_OPARG();
@@ -2081,109 +2192,126 @@
STAT_INC(COMPARE_OP, deferred);
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
assert(oparg <= Py_GE);
- PyObject *right = POP();
- PyObject *left = TOP();
- PyObject *res = PyObject_RichCompare(left, right, oparg);
- SET_TOP(res);
+ res = PyObject_RichCompare(left, right, oparg);
Py_DECREF(left);
Py_DECREF(right);
- if (res == NULL) {
- goto error;
- }
- JUMPBY(INLINE_CACHE_ENTRIES_COMPARE_OP);
+ if (res == NULL) goto pop_2_error;
+ STACK_SHRINK(1);
+ POKE(1, res);
+ JUMPBY(2);
DISPATCH();
}
TARGET(COMPARE_OP_FLOAT_JUMP) {
- assert(cframe.use_tracing == 0);
- // Combined: COMPARE_OP (float ? float) + POP_JUMP_IF_(true/false)
- _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr;
- int when_to_jump_mask = cache->mask;
- PyObject *right = TOP();
- PyObject *left = SECOND();
- DEOPT_IF(!PyFloat_CheckExact(left), COMPARE_OP);
- DEOPT_IF(!PyFloat_CheckExact(right), COMPARE_OP);
- double dleft = PyFloat_AS_DOUBLE(left);
- double dright = PyFloat_AS_DOUBLE(right);
- int sign = (dleft > dright) - (dleft < dright);
- DEOPT_IF(isnan(dleft), COMPARE_OP);
- DEOPT_IF(isnan(dright), COMPARE_OP);
- STAT_INC(COMPARE_OP, hit);
- JUMPBY(INLINE_CACHE_ENTRIES_COMPARE_OP);
+ PyObject *_tmp_1 = PEEK(1);
+ PyObject *_tmp_2 = PEEK(2);
+ {
+ PyObject *right = _tmp_1;
+ PyObject *left = _tmp_2;
+ size_t jump;
+ uint16_t when_to_jump_mask = read_u16(next_instr + 1);
+ assert(cframe.use_tracing == 0);
+ // Combined: COMPARE_OP (float ? float) + POP_JUMP_IF_(true/false)
+ DEOPT_IF(!PyFloat_CheckExact(left), COMPARE_OP);
+ DEOPT_IF(!PyFloat_CheckExact(right), COMPARE_OP);
+ double dleft = PyFloat_AS_DOUBLE(left);
+ double dright = PyFloat_AS_DOUBLE(right);
+ // 1 if <, 2 if ==, 4 if >; this matches when _to_jump_mask
+ int sign_ish = 2*(dleft > dright) + 2 - (dleft < dright);
+ DEOPT_IF(isnan(dleft), COMPARE_OP);
+ DEOPT_IF(isnan(dright), COMPARE_OP);
+ STAT_INC(COMPARE_OP, hit);
+ _Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc);
+ _Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc);
+ jump = sign_ish & when_to_jump_mask;
+ _tmp_2 = (PyObject *)jump;
+ }
+ JUMPBY(2);
NEXTOPARG();
- STACK_SHRINK(2);
- _Py_DECREF_SPECIALIZED(left, _PyFloat_ExactDealloc);
- _Py_DECREF_SPECIALIZED(right, _PyFloat_ExactDealloc);
- assert(opcode == POP_JUMP_IF_FALSE || opcode == POP_JUMP_IF_TRUE);
- int jump = (1 << (sign + 1)) & when_to_jump_mask;
- if (!jump) {
- next_instr++;
- }
- else {
- JUMPBY(1 + oparg);
+ JUMPBY(1);
+ {
+ size_t jump = (size_t)_tmp_2;
+ assert(opcode == POP_JUMP_IF_FALSE || opcode == POP_JUMP_IF_TRUE);
+ if (jump) {
+ JUMPBY(oparg);
+ }
}
+ STACK_SHRINK(2);
DISPATCH();
}
TARGET(COMPARE_OP_INT_JUMP) {
- assert(cframe.use_tracing == 0);
- // Combined: COMPARE_OP (int ? int) + POP_JUMP_IF_(true/false)
- _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr;
- int when_to_jump_mask = cache->mask;
- PyObject *right = TOP();
- PyObject *left = SECOND();
- DEOPT_IF(!PyLong_CheckExact(left), COMPARE_OP);
- DEOPT_IF(!PyLong_CheckExact(right), COMPARE_OP);
- DEOPT_IF((size_t)(Py_SIZE(left) + 1) > 2, COMPARE_OP);
- DEOPT_IF((size_t)(Py_SIZE(right) + 1) > 2, COMPARE_OP);
- STAT_INC(COMPARE_OP, hit);
- assert(Py_ABS(Py_SIZE(left)) <= 1 && Py_ABS(Py_SIZE(right)) <= 1);
- Py_ssize_t ileft = Py_SIZE(left) * ((PyLongObject *)left)->ob_digit[0];
- Py_ssize_t iright = Py_SIZE(right) * ((PyLongObject *)right)->ob_digit[0];
- int sign = (ileft > iright) - (ileft < iright);
- JUMPBY(INLINE_CACHE_ENTRIES_COMPARE_OP);
+ PyObject *_tmp_1 = PEEK(1);
+ PyObject *_tmp_2 = PEEK(2);
+ {
+ PyObject *right = _tmp_1;
+ PyObject *left = _tmp_2;
+ size_t jump;
+ uint16_t when_to_jump_mask = read_u16(next_instr + 1);
+ assert(cframe.use_tracing == 0);
+ // Combined: COMPARE_OP (int ? int) + POP_JUMP_IF_(true/false)
+ DEOPT_IF(!PyLong_CheckExact(left), COMPARE_OP);
+ DEOPT_IF(!PyLong_CheckExact(right), COMPARE_OP);
+ DEOPT_IF((size_t)(Py_SIZE(left) + 1) > 2, COMPARE_OP);
+ DEOPT_IF((size_t)(Py_SIZE(right) + 1) > 2, COMPARE_OP);
+ STAT_INC(COMPARE_OP, hit);
+ assert(Py_ABS(Py_SIZE(left)) <= 1 && Py_ABS(Py_SIZE(right)) <= 1);
+ Py_ssize_t ileft = Py_SIZE(left) * ((PyLongObject *)left)->ob_digit[0];
+ Py_ssize_t iright = Py_SIZE(right) * ((PyLongObject *)right)->ob_digit[0];
+ // 1 if <, 2 if ==, 4 if >; this matches when _to_jump_mask
+ int sign_ish = 2*(ileft > iright) + 2 - (ileft < iright);
+ _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free);
+ _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free);
+ jump = sign_ish & when_to_jump_mask;
+ _tmp_2 = (PyObject *)jump;
+ }
+ JUMPBY(2);
NEXTOPARG();
- STACK_SHRINK(2);
- _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free);
- _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free);
- assert(opcode == POP_JUMP_IF_FALSE || opcode == POP_JUMP_IF_TRUE);
- int jump = (1 << (sign + 1)) & when_to_jump_mask;
- if (!jump) {
- next_instr++;
- }
- else {
- JUMPBY(1 + oparg);
+ JUMPBY(1);
+ {
+ size_t jump = (size_t)_tmp_2;
+ assert(opcode == POP_JUMP_IF_FALSE || opcode == POP_JUMP_IF_TRUE);
+ if (jump) {
+ JUMPBY(oparg);
+ }
}
+ STACK_SHRINK(2);
DISPATCH();
}
TARGET(COMPARE_OP_STR_JUMP) {
- assert(cframe.use_tracing == 0);
- // Combined: COMPARE_OP (str == str or str != str) + POP_JUMP_IF_(true/false)
- _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr;
- int invert = cache->mask;
- PyObject *right = TOP();
- PyObject *left = SECOND();
- DEOPT_IF(!PyUnicode_CheckExact(left), COMPARE_OP);
- DEOPT_IF(!PyUnicode_CheckExact(right), COMPARE_OP);
- STAT_INC(COMPARE_OP, hit);
- int res = _PyUnicode_Equal(left, right);
- assert(oparg == Py_EQ || oparg == Py_NE);
- JUMPBY(INLINE_CACHE_ENTRIES_COMPARE_OP);
+ PyObject *_tmp_1 = PEEK(1);
+ PyObject *_tmp_2 = PEEK(2);
+ {
+ PyObject *right = _tmp_1;
+ PyObject *left = _tmp_2;
+ size_t jump;
+ uint16_t invert = read_u16(next_instr + 1);
+ assert(cframe.use_tracing == 0);
+ // Combined: COMPARE_OP (str == str or str != str) + POP_JUMP_IF_(true/false)
+ DEOPT_IF(!PyUnicode_CheckExact(left), COMPARE_OP);
+ DEOPT_IF(!PyUnicode_CheckExact(right), COMPARE_OP);
+ STAT_INC(COMPARE_OP, hit);
+ int res = _PyUnicode_Equal(left, right);
+ assert(oparg == Py_EQ || oparg == Py_NE);
+ _Py_DECREF_SPECIALIZED(left, _PyUnicode_ExactDealloc);
+ _Py_DECREF_SPECIALIZED(right, _PyUnicode_ExactDealloc);
+ assert(res == 0 || res == 1);
+ assert(invert == 0 || invert == 1);
+ jump = res ^ invert;
+ _tmp_2 = (PyObject *)jump;
+ }
+ JUMPBY(2);
NEXTOPARG();
- assert(opcode == POP_JUMP_IF_FALSE || opcode == POP_JUMP_IF_TRUE);
- STACK_SHRINK(2);
- _Py_DECREF_SPECIALIZED(left, _PyUnicode_ExactDealloc);
- _Py_DECREF_SPECIALIZED(right, _PyUnicode_ExactDealloc);
- assert(res == 0 || res == 1);
- assert(invert == 0 || invert == 1);
- int jump = res ^ invert;
- if (!jump) {
- next_instr++;
- }
- else {
- JUMPBY(1 + oparg);
+ JUMPBY(1);
+ {
+ size_t jump = (size_t)_tmp_2;
+ assert(opcode == POP_JUMP_IF_FALSE || opcode == POP_JUMP_IF_TRUE);
+ if (jump) {
+ JUMPBY(oparg);
+ }
}
+ STACK_SHRINK(2);
DISPATCH();
}
@@ -2631,6 +2759,29 @@
DISPATCH();
}
+ TARGET(FOR_ITER_TUPLE) {
+ assert(cframe.use_tracing == 0);
+ _PyTupleIterObject *it = (_PyTupleIterObject *)TOP();
+ DEOPT_IF(Py_TYPE(it) != &PyTupleIter_Type, FOR_ITER);
+ STAT_INC(FOR_ITER, hit);
+ PyTupleObject *seq = it->it_seq;
+ if (seq) {
+ if (it->it_index < PyTuple_GET_SIZE(seq)) {
+ PyObject *next = PyTuple_GET_ITEM(seq, it->it_index++);
+ PUSH(Py_NewRef(next));
+ JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER);
+ goto end_for_iter_tuple; // End of this instruction
+ }
+ it->it_seq = NULL;
+ Py_DECREF(seq);
+ }
+ STACK_SHRINK(1);
+ Py_DECREF(it);
+ JUMPBY(INLINE_CACHE_ENTRIES_FOR_ITER + oparg + 1);
+ end_for_iter_tuple:
+ DISPATCH();
+ }
+
TARGET(FOR_ITER_RANGE) {
assert(cframe.use_tracing == 0);
_PyRangeIterObject *r = (_PyRangeIterObject *)TOP();
@@ -3542,6 +3693,7 @@
func->func_defaults = POP();
}
+ func->func_version = ((PyCodeObject *)codeobj)->co_version;
PUSH((PyObject *)func);
DISPATCH();
}
@@ -3678,7 +3830,7 @@
if (res == NULL) goto pop_2_error;
STACK_SHRINK(1);
POKE(1, res);
- next_instr += 1;
+ JUMPBY(1);
DISPATCH();
}
@@ -3702,127 +3854,3 @@
TARGET(CACHE) {
Py_UNREACHABLE();
}
-
- TARGET(LOAD_FAST__LOAD_FAST) {
- PyObject *_tmp_1;
- PyObject *_tmp_2;
- {
- PyObject *value;
- value = GETLOCAL(oparg);
- assert(value != NULL);
- Py_INCREF(value);
- _tmp_1 = value;
- }
- NEXTOPARG();
- next_instr++;
- {
- PyObject *value;
- value = GETLOCAL(oparg);
- assert(value != NULL);
- Py_INCREF(value);
- _tmp_2 = value;
- }
- STACK_GROW(2);
- POKE(1, _tmp_2);
- POKE(2, _tmp_1);
- DISPATCH();
- }
-
- TARGET(LOAD_FAST__LOAD_CONST) {
- PyObject *_tmp_1;
- PyObject *_tmp_2;
- {
- PyObject *value;
- value = GETLOCAL(oparg);
- assert(value != NULL);
- Py_INCREF(value);
- _tmp_1 = value;
- }
- NEXTOPARG();
- next_instr++;
- {
- PyObject *value;
- value = GETITEM(consts, oparg);
- Py_INCREF(value);
- _tmp_2 = value;
- }
- STACK_GROW(2);
- POKE(1, _tmp_2);
- POKE(2, _tmp_1);
- DISPATCH();
- }
-
- TARGET(STORE_FAST__LOAD_FAST) {
- PyObject *_tmp_1 = PEEK(1);
- {
- PyObject *value = _tmp_1;
- SETLOCAL(oparg, value);
- }
- NEXTOPARG();
- next_instr++;
- {
- PyObject *value;
- value = GETLOCAL(oparg);
- assert(value != NULL);
- Py_INCREF(value);
- _tmp_1 = value;
- }
- POKE(1, _tmp_1);
- DISPATCH();
- }
-
- TARGET(STORE_FAST__STORE_FAST) {
- PyObject *_tmp_1 = PEEK(2);
- PyObject *_tmp_2 = PEEK(1);
- {
- PyObject *value = _tmp_2;
- SETLOCAL(oparg, value);
- }
- NEXTOPARG();
- next_instr++;
- {
- PyObject *value = _tmp_1;
- SETLOCAL(oparg, value);
- }
- STACK_SHRINK(2);
- DISPATCH();
- }
-
- TARGET(LOAD_CONST__LOAD_FAST) {
- PyObject *_tmp_1;
- PyObject *_tmp_2;
- {
- PyObject *value;
- value = GETITEM(consts, oparg);
- Py_INCREF(value);
- _tmp_1 = value;
- }
- NEXTOPARG();
- next_instr++;
- {
- PyObject *value;
- value = GETLOCAL(oparg);
- assert(value != NULL);
- Py_INCREF(value);
- _tmp_2 = value;
- }
- STACK_GROW(2);
- POKE(1, _tmp_2);
- POKE(2, _tmp_1);
- DISPATCH();
- }
-
- TARGET(END_FOR) {
- PyObject *_tmp_1 = PEEK(2);
- PyObject *_tmp_2 = PEEK(1);
- {
- PyObject *value = _tmp_2;
- Py_DECREF(value);
- }
- {
- PyObject *value = _tmp_1;
- Py_DECREF(value);
- }
- STACK_SHRINK(2);
- DISPATCH();
- }
diff --git a/Python/initconfig.c b/Python/initconfig.c
index 67f6777d3b1d9e..d05099cd997790 100644
--- a/Python/initconfig.c
+++ b/Python/initconfig.c
@@ -129,7 +129,14 @@ The following implementation-specific options are available:\n\
\n\
-X int_max_str_digits=number: limit the size of int<->str conversions.\n\
This helps avoid denial of service attacks when parsing untrusted data.\n\
- The default is sys.int_info.default_max_str_digits. 0 disables.";
+ The default is sys.int_info.default_max_str_digits. 0 disables."
+
+#ifdef Py_STATS
+"\n\
+\n\
+-X pystats: Enable pystats collection at startup."
+#endif
+;
/* Envvars that don't have equivalent command-line options are listed first */
static const char usage_envvars[] =
@@ -595,17 +602,13 @@ _Py_ClearStandardStreamEncoding(void)
/* --- Py_GetArgcArgv() ------------------------------------------- */
-/* For Py_GetArgcArgv(); set by _Py_SetArgcArgv() */
-static PyWideStringList orig_argv = {.length = 0, .items = NULL};
-
-
void
_Py_ClearArgcArgv(void)
{
PyMemAllocatorEx old_alloc;
_PyMem_SetDefaultAllocator(PYMEM_DOMAIN_RAW, &old_alloc);
- _PyWideStringList_Clear(&orig_argv);
+ _PyWideStringList_Clear(&_PyRuntime.orig_argv);
PyMem_SetAllocator(PYMEM_DOMAIN_RAW, &old_alloc);
}
@@ -620,7 +623,9 @@ _Py_SetArgcArgv(Py_ssize_t argc, wchar_t * const *argv)
PyMemAllocatorEx old_alloc;
_PyMem_SetDefaultAllocator(PYMEM_DOMAIN_RAW, &old_alloc);
- res = _PyWideStringList_Copy(&orig_argv, &argv_list);
+ // XXX _PyRuntime.orig_argv only gets cleared by Py_Main(),
+ // so it it currently leaks for embedders.
+ res = _PyWideStringList_Copy(&_PyRuntime.orig_argv, &argv_list);
PyMem_SetAllocator(PYMEM_DOMAIN_RAW, &old_alloc);
return res;
@@ -631,8 +636,8 @@ _Py_SetArgcArgv(Py_ssize_t argc, wchar_t * const *argv)
void
Py_GetArgcArgv(int *argc, wchar_t ***argv)
{
- *argc = (int)orig_argv.length;
- *argv = orig_argv.items;
+ *argc = (int)_PyRuntime.orig_argv.length;
+ *argv = _PyRuntime.orig_argv.items;
}
@@ -2188,6 +2193,12 @@ config_read(PyConfig *config, int compute_path_config)
config->show_ref_count = 1;
}
+#ifdef Py_STATS
+ if (config_get_xoption(config, L"pystats")) {
+ _py_stats = &_py_stats_struct;
+ }
+#endif
+
status = config_read_complex_options(config);
if (_PyStatus_EXCEPTION(status)) {
return status;
diff --git a/Python/opcode_targets.h b/Python/opcode_targets.h
index 3aba4e7556a65f..be3ad01c151c04 100644
--- a/Python/opcode_targets.h
+++ b/Python/opcode_targets.h
@@ -61,31 +61,31 @@ static void *opcode_targets[256] = {
&&TARGET_FOR_ITER_LIST,
&&TARGET_STORE_SUBSCR,
&&TARGET_DELETE_SUBSCR,
- &&TARGET_FOR_ITER_RANGE,
+ &&TARGET_FOR_ITER_TUPLE,
&&TARGET_STOPITERATION_ERROR,
+ &&TARGET_FOR_ITER_RANGE,
&&TARGET_FOR_ITER_GEN,
&&TARGET_LOAD_ATTR_CLASS,
&&TARGET_LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN,
- &&TARGET_LOAD_ATTR_INSTANCE_VALUE,
&&TARGET_GET_ITER,
&&TARGET_GET_YIELD_FROM_ITER,
&&TARGET_PRINT_EXPR,
&&TARGET_LOAD_BUILD_CLASS,
+ &&TARGET_LOAD_ATTR_INSTANCE_VALUE,
&&TARGET_LOAD_ATTR_MODULE,
- &&TARGET_LOAD_ATTR_PROPERTY,
&&TARGET_LOAD_ASSERTION_ERROR,
&&TARGET_RETURN_GENERATOR,
+ &&TARGET_LOAD_ATTR_PROPERTY,
&&TARGET_LOAD_ATTR_SLOT,
&&TARGET_LOAD_ATTR_WITH_HINT,
&&TARGET_LOAD_ATTR_METHOD_LAZY_DICT,
&&TARGET_LOAD_ATTR_METHOD_NO_DICT,
&&TARGET_LOAD_ATTR_METHOD_WITH_DICT,
- &&TARGET_LOAD_ATTR_METHOD_WITH_VALUES,
&&TARGET_LIST_TO_TUPLE,
&&TARGET_RETURN_VALUE,
&&TARGET_IMPORT_STAR,
&&TARGET_SETUP_ANNOTATIONS,
- &&TARGET_LOAD_CONST__LOAD_FAST,
+ &&TARGET_LOAD_ATTR_METHOD_WITH_VALUES,
&&TARGET_ASYNC_GEN_WRAP,
&&TARGET_PREP_RERAISE_STAR,
&&TARGET_POP_EXCEPT,
@@ -112,7 +112,7 @@ static void *opcode_targets[256] = {
&&TARGET_JUMP_FORWARD,
&&TARGET_JUMP_IF_FALSE_OR_POP,
&&TARGET_JUMP_IF_TRUE_OR_POP,
- &&TARGET_LOAD_FAST__LOAD_CONST,
+ &&TARGET_LOAD_CONST__LOAD_FAST,
&&TARGET_POP_JUMP_IF_FALSE,
&&TARGET_POP_JUMP_IF_TRUE,
&&TARGET_LOAD_GLOBAL,
@@ -120,7 +120,7 @@ static void *opcode_targets[256] = {
&&TARGET_CONTAINS_OP,
&&TARGET_RERAISE,
&&TARGET_COPY,
- &&TARGET_LOAD_FAST__LOAD_FAST,
+ &&TARGET_LOAD_FAST__LOAD_CONST,
&&TARGET_BINARY_OP,
&&TARGET_SEND,
&&TARGET_LOAD_FAST,
@@ -140,9 +140,9 @@ static void *opcode_targets[256] = {
&&TARGET_STORE_DEREF,
&&TARGET_DELETE_DEREF,
&&TARGET_JUMP_BACKWARD,
- &&TARGET_LOAD_GLOBAL_BUILTIN,
+ &&TARGET_LOAD_FAST__LOAD_FAST,
&&TARGET_CALL_FUNCTION_EX,
- &&TARGET_LOAD_GLOBAL_MODULE,
+ &&TARGET_LOAD_GLOBAL_BUILTIN,
&&TARGET_EXTENDED_ARG,
&&TARGET_LIST_APPEND,
&&TARGET_SET_ADD,
@@ -152,24 +152,24 @@ static void *opcode_targets[256] = {
&&TARGET_YIELD_VALUE,
&&TARGET_RESUME,
&&TARGET_MATCH_CLASS,
+ &&TARGET_LOAD_GLOBAL_MODULE,
&&TARGET_STORE_ATTR_INSTANCE_VALUE,
- &&TARGET_STORE_ATTR_SLOT,
&&TARGET_FORMAT_VALUE,
&&TARGET_BUILD_CONST_KEY_MAP,
&&TARGET_BUILD_STRING,
+ &&TARGET_STORE_ATTR_SLOT,
&&TARGET_STORE_ATTR_WITH_HINT,
&&TARGET_STORE_FAST__LOAD_FAST,
&&TARGET_STORE_FAST__STORE_FAST,
- &&TARGET_STORE_SUBSCR_DICT,
&&TARGET_LIST_EXTEND,
&&TARGET_SET_UPDATE,
&&TARGET_DICT_MERGE,
&&TARGET_DICT_UPDATE,
+ &&TARGET_STORE_SUBSCR_DICT,
&&TARGET_STORE_SUBSCR_LIST_INT,
&&TARGET_UNPACK_SEQUENCE_LIST,
&&TARGET_UNPACK_SEQUENCE_TUPLE,
&&TARGET_UNPACK_SEQUENCE_TWO_TUPLE,
- &&_unknown_opcode,
&&TARGET_CALL,
&&TARGET_KW_NAMES,
&&_unknown_opcode,
diff --git a/Python/perf_trampoline.c b/Python/perf_trampoline.c
index 161e0ef74cf1da..1957ab82c33951 100644
--- a/Python/perf_trampoline.c
+++ b/Python/perf_trampoline.c
@@ -134,11 +134,6 @@ any DWARF information available for them).
#include "pycore_frame.h"
#include "pycore_interp.h"
-typedef enum {
- PERF_STATUS_FAILED = -1, // Perf trampoline is in an invalid state
- PERF_STATUS_NO_INIT = 0, // Perf trampoline is not initialized
- PERF_STATUS_OK = 1, // Perf trampoline is ready to be executed
-} perf_status_t;
#ifdef PY_HAVE_PERF_TRAMPOLINE
@@ -190,24 +185,13 @@ struct code_arena_st {
};
typedef struct code_arena_st code_arena_t;
-
-struct trampoline_api_st {
- void* (*init_state)(void);
- void (*write_state)(void* state, const void *code_addr,
- unsigned int code_size, PyCodeObject* code);
- int (*free_state)(void* state);
- void *state;
-};
-
typedef struct trampoline_api_st trampoline_api_t;
-
-static perf_status_t perf_status = PERF_STATUS_NO_INIT;
-static Py_ssize_t extra_code_index = -1;
-static code_arena_t *code_arena;
-static trampoline_api_t trampoline_api;
-
-static FILE *perf_map_file;
+#define perf_status _PyRuntime.ceval.perf.status
+#define extra_code_index _PyRuntime.ceval.perf.extra_code_index
+#define perf_code_arena _PyRuntime.ceval.perf.code_arena
+#define trampoline_api _PyRuntime.ceval.perf.trampoline_api
+#define perf_map_file _PyRuntime.ceval.perf.map_file
static void *
perf_map_get_file(void)
@@ -344,17 +328,17 @@ new_code_arena(void)
new_arena->size = mem_size;
new_arena->size_left = mem_size;
new_arena->code_size = code_size;
- new_arena->prev = code_arena;
- code_arena = new_arena;
+ new_arena->prev = perf_code_arena;
+ perf_code_arena = new_arena;
return 0;
}
static void
free_code_arenas(void)
{
- code_arena_t *cur = code_arena;
+ code_arena_t *cur = perf_code_arena;
code_arena_t *prev;
- code_arena = NULL; // invalid static pointer
+ perf_code_arena = NULL; // invalid static pointer
while (cur) {
munmap(cur->start_addr, cur->size);
prev = cur->prev;
@@ -375,14 +359,14 @@ code_arena_new_code(code_arena_t *code_arena)
static inline py_trampoline
compile_trampoline(void)
{
- if ((code_arena == NULL) ||
- (code_arena->size_left <= code_arena->code_size)) {
+ if ((perf_code_arena == NULL) ||
+ (perf_code_arena->size_left <= perf_code_arena->code_size)) {
if (new_code_arena() < 0) {
return NULL;
}
}
- assert(code_arena->size_left <= code_arena->size);
- return code_arena_new_code(code_arena);
+ assert(perf_code_arena->size_left <= perf_code_arena->size);
+ return code_arena_new_code(perf_code_arena);
}
static PyObject *
@@ -405,7 +389,7 @@ py_trampoline_evaluator(PyThreadState *ts, _PyInterpreterFrame *frame,
goto default_eval;
}
trampoline_api.write_state(trampoline_api.state, new_trampoline,
- code_arena->code_size, co);
+ perf_code_arena->code_size, co);
_PyCode_SetExtra((PyObject *)co, extra_code_index,
(void *)new_trampoline);
f = new_trampoline;
diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c
index 8209132ebc6c27..1cb0e4d747e10a 100644
--- a/Python/pylifecycle.c
+++ b/Python/pylifecycle.c
@@ -54,7 +54,6 @@ extern void _PyIO_Fini(void);
#ifdef MS_WINDOWS
# undef BYTE
-# include "windows.h"
extern PyTypeObject PyWindowsConsoleIO_Type;
# define PyWindowsConsoleIO_Check(op) \
@@ -606,6 +605,11 @@ pycore_init_runtime(_PyRuntimeState *runtime,
return status;
}
+ status = _PyTime_Init();
+ if (_PyStatus_EXCEPTION(status)) {
+ return status;
+ }
+
status = _PyImport_Init();
if (_PyStatus_EXCEPTION(status)) {
return status;
diff --git a/Python/pystate.c b/Python/pystate.c
index e006bf2517cfff..f52fc38b358689 100644
--- a/Python/pystate.c
+++ b/Python/pystate.c
@@ -1793,30 +1793,78 @@ PyGILState_Release(PyGILState_STATE oldstate)
/* cross-interpreter data */
-crossinterpdatafunc _PyCrossInterpreterData_Lookup(PyObject *);
+static inline void
+_xidata_init(_PyCrossInterpreterData *data)
+{
+ // If the value is being reused
+ // then _xidata_clear() should have been called already.
+ assert(data->data == NULL);
+ assert(data->obj == NULL);
+ *data = (_PyCrossInterpreterData){0};
+ data->interp = -1;
+}
-/* This is a separate func from _PyCrossInterpreterData_Lookup in order
- to keep the registry code separate. */
-static crossinterpdatafunc
-_lookup_getdata(PyObject *obj)
+static inline void
+_xidata_clear(_PyCrossInterpreterData *data)
{
- crossinterpdatafunc getdata = _PyCrossInterpreterData_Lookup(obj);
- if (getdata == NULL && PyErr_Occurred() == 0)
- PyErr_Format(PyExc_ValueError,
- "%S does not support cross-interpreter data", obj);
- return getdata;
+ if (data->free != NULL) {
+ data->free(data->data);
+ }
+ data->data = NULL;
+ Py_CLEAR(data->obj);
+}
+
+void
+_PyCrossInterpreterData_Init(_PyCrossInterpreterData *data,
+ PyInterpreterState *interp,
+ void *shared, PyObject *obj,
+ xid_newobjectfunc new_object)
+{
+ assert(data != NULL);
+ assert(new_object != NULL);
+ _xidata_init(data);
+ data->data = shared;
+ if (obj != NULL) {
+ assert(interp != NULL);
+ // released in _PyCrossInterpreterData_Clear()
+ data->obj = Py_NewRef(obj);
+ }
+ // Ideally every object would know its owning interpreter.
+ // Until then, we have to rely on the caller to identify it
+ // (but we don't need it in all cases).
+ data->interp = (interp != NULL) ? interp->id : -1;
+ data->new_object = new_object;
}
int
-_PyObject_CheckCrossInterpreterData(PyObject *obj)
-{
- crossinterpdatafunc getdata = _lookup_getdata(obj);
- if (getdata == NULL) {
+_PyCrossInterpreterData_InitWithSize(_PyCrossInterpreterData *data,
+ PyInterpreterState *interp,
+ const size_t size, PyObject *obj,
+ xid_newobjectfunc new_object)
+{
+ assert(size > 0);
+ // For now we always free the shared data in the same interpreter
+ // where it was allocated, so the interpreter is required.
+ assert(interp != NULL);
+ _PyCrossInterpreterData_Init(data, interp, NULL, obj, new_object);
+ data->data = PyMem_Malloc(size);
+ if (data->data == NULL) {
return -1;
}
+ data->free = PyMem_Free;
return 0;
}
+void
+_PyCrossInterpreterData_Clear(PyInterpreterState *interp,
+ _PyCrossInterpreterData *data)
+{
+ assert(data != NULL);
+ // This must be called in the owning interpreter.
+ assert(interp == NULL || data->interp == interp->id);
+ _xidata_clear(data);
+}
+
static int
_check_xidata(PyThreadState *tstate, _PyCrossInterpreterData *data)
{
@@ -1839,6 +1887,30 @@ _check_xidata(PyThreadState *tstate, _PyCrossInterpreterData *data)
return 0;
}
+crossinterpdatafunc _PyCrossInterpreterData_Lookup(PyObject *);
+
+/* This is a separate func from _PyCrossInterpreterData_Lookup in order
+ to keep the registry code separate. */
+static crossinterpdatafunc
+_lookup_getdata(PyObject *obj)
+{
+ crossinterpdatafunc getdata = _PyCrossInterpreterData_Lookup(obj);
+ if (getdata == NULL && PyErr_Occurred() == 0)
+ PyErr_Format(PyExc_ValueError,
+ "%S does not support cross-interpreter data", obj);
+ return getdata;
+}
+
+int
+_PyObject_CheckCrossInterpreterData(PyObject *obj)
+{
+ crossinterpdatafunc getdata = _lookup_getdata(obj);
+ if (getdata == NULL) {
+ return -1;
+ }
+ return 0;
+}
+
int
_PyObject_GetCrossInterpreterData(PyObject *obj, _PyCrossInterpreterData *data)
{
@@ -1851,7 +1923,7 @@ _PyObject_GetCrossInterpreterData(PyObject *obj, _PyCrossInterpreterData *data)
// Reset data before re-populating.
*data = (_PyCrossInterpreterData){0};
- data->free = PyMem_RawFree; // Set a default that may be overridden.
+ data->interp = -1;
// Call the "getdata" func for the object.
Py_INCREF(obj);
@@ -1860,7 +1932,7 @@ _PyObject_GetCrossInterpreterData(PyObject *obj, _PyCrossInterpreterData *data)
Py_DECREF(obj);
return -1;
}
- int res = getdata(obj, data);
+ int res = getdata(tstate, obj, data);
Py_DECREF(obj);
if (res != 0) {
return -1;
@@ -1876,21 +1948,17 @@ _PyObject_GetCrossInterpreterData(PyObject *obj, _PyCrossInterpreterData *data)
return 0;
}
-static void
-_release_xidata(void *arg)
+PyObject *
+_PyCrossInterpreterData_NewObject(_PyCrossInterpreterData *data)
{
- _PyCrossInterpreterData *data = (_PyCrossInterpreterData *)arg;
- if (data->free != NULL) {
- data->free(data->data);
- }
- data->data = NULL;
- Py_CLEAR(data->obj);
+ return data->new_object(data);
}
+typedef void (*releasefunc)(PyInterpreterState *, void *);
+
static void
_call_in_interpreter(struct _gilstate_runtime_state *gilstate,
- PyInterpreterState *interp,
- void (*func)(void *), void *arg)
+ PyInterpreterState *interp, releasefunc func, void *arg)
{
/* We would use Py_AddPendingCall() if it weren't specific to the
* main interpreter (see bpo-33608). In the meantime we take a
@@ -1906,7 +1974,7 @@ _call_in_interpreter(struct _gilstate_runtime_state *gilstate,
// XXX Once the GIL is per-interpreter, this should be called with the
// calling interpreter's GIL released and the target interpreter's held.
- func(arg);
+ func(interp, arg);
// Switch back.
if (save_tstate != NULL) {
@@ -1935,16 +2003,11 @@ _PyCrossInterpreterData_Release(_PyCrossInterpreterData *data)
// "Release" the data and/or the object.
struct _gilstate_runtime_state *gilstate = &_PyRuntime.gilstate;
- _call_in_interpreter(gilstate, interp, _release_xidata, data);
+ _call_in_interpreter(gilstate, interp,
+ (releasefunc)_PyCrossInterpreterData_Clear, data);
return 0;
}
-PyObject *
-_PyCrossInterpreterData_NewObject(_PyCrossInterpreterData *data)
-{
- return data->new_object(data);
-}
-
/* registry of {type -> crossinterpdatafunc} */
/* For now we use a global registry of shareable classes. An
@@ -2095,16 +2158,21 @@ _new_bytes_object(_PyCrossInterpreterData *data)
}
static int
-_bytes_shared(PyObject *obj, _PyCrossInterpreterData *data)
+_bytes_shared(PyThreadState *tstate, PyObject *obj,
+ _PyCrossInterpreterData *data)
{
- struct _shared_bytes_data *shared = PyMem_NEW(struct _shared_bytes_data, 1);
+ if (_PyCrossInterpreterData_InitWithSize(
+ data, tstate->interp, sizeof(struct _shared_bytes_data), obj,
+ _new_bytes_object
+ ) < 0)
+ {
+ return -1;
+ }
+ struct _shared_bytes_data *shared = (struct _shared_bytes_data *)data->data;
if (PyBytes_AsStringAndSize(obj, &shared->bytes, &shared->len) < 0) {
+ _PyCrossInterpreterData_Clear(tstate->interp, data);
return -1;
}
- data->data = (void *)shared;
- data->obj = Py_NewRef(obj); // Will be "released" (decref'ed) when data released.
- data->new_object = _new_bytes_object;
- data->free = PyMem_Free;
return 0;
}
@@ -2122,16 +2190,20 @@ _new_str_object(_PyCrossInterpreterData *data)
}
static int
-_str_shared(PyObject *obj, _PyCrossInterpreterData *data)
+_str_shared(PyThreadState *tstate, PyObject *obj,
+ _PyCrossInterpreterData *data)
{
- struct _shared_str_data *shared = PyMem_NEW(struct _shared_str_data, 1);
+ if (_PyCrossInterpreterData_InitWithSize(
+ data, tstate->interp, sizeof(struct _shared_str_data), obj,
+ _new_str_object
+ ) < 0)
+ {
+ return -1;
+ }
+ struct _shared_str_data *shared = (struct _shared_str_data *)data->data;
shared->kind = PyUnicode_KIND(obj);
shared->buffer = PyUnicode_DATA(obj);
shared->len = PyUnicode_GET_LENGTH(obj);
- data->data = (void *)shared;
- data->obj = Py_NewRef(obj); // Will be "released" (decref'ed) when data released.
- data->new_object = _new_str_object;
- data->free = PyMem_Free;
return 0;
}
@@ -2142,7 +2214,8 @@ _new_long_object(_PyCrossInterpreterData *data)
}
static int
-_long_shared(PyObject *obj, _PyCrossInterpreterData *data)
+_long_shared(PyThreadState *tstate, PyObject *obj,
+ _PyCrossInterpreterData *data)
{
/* Note that this means the size of shareable ints is bounded by
* sys.maxsize. Hence on 32-bit architectures that is half the
@@ -2155,10 +2228,9 @@ _long_shared(PyObject *obj, _PyCrossInterpreterData *data)
}
return -1;
}
- data->data = (void *)value;
- data->obj = NULL;
- data->new_object = _new_long_object;
- data->free = NULL;
+ _PyCrossInterpreterData_Init(data, tstate->interp, (void *)value, NULL,
+ _new_long_object);
+ // data->obj and data->free remain NULL
return 0;
}
@@ -2170,12 +2242,12 @@ _new_none_object(_PyCrossInterpreterData *data)
}
static int
-_none_shared(PyObject *obj, _PyCrossInterpreterData *data)
+_none_shared(PyThreadState *tstate, PyObject *obj,
+ _PyCrossInterpreterData *data)
{
- data->data = NULL;
- // data->obj remains NULL
- data->new_object = _new_none_object;
- data->free = NULL; // There is nothing to free.
+ _PyCrossInterpreterData_Init(data, tstate->interp, NULL, NULL,
+ _new_none_object);
+ // data->data, data->obj and data->free remain NULL
return 0;
}
diff --git a/Python/specialize.c b/Python/specialize.c
index cd09b188b7fa97..785088eac8c528 100644
--- a/Python/specialize.c
+++ b/Python/specialize.c
@@ -18,7 +18,7 @@
#ifdef Py_STATS
PyStats _py_stats_struct = { 0 };
-PyStats *_py_stats = &_py_stats_struct;
+PyStats *_py_stats = NULL;
#define ADD_STAT_TO_DICT(res, field) \
do { \
@@ -205,9 +205,6 @@ _Py_StatsClear(void)
void
_Py_PrintSpecializationStats(int to_file)
{
- if (_py_stats == NULL) {
- return;
- }
FILE *out = stderr;
if (to_file) {
/* Write to a file instead of stderr. */
@@ -238,7 +235,7 @@ _Py_PrintSpecializationStats(int to_file)
else {
fprintf(out, "Specialization stats:\n");
}
- print_stats(out, _py_stats);
+ print_stats(out, &_py_stats_struct);
if (out != stderr) {
fclose(out);
}
@@ -2132,6 +2129,10 @@ _Py_Specialize_ForIter(PyObject *iter, _Py_CODEUNIT *instr, int oparg)
_Py_SET_OPCODE(*instr, FOR_ITER_LIST);
goto success;
}
+ else if (tp == &PyTupleIter_Type) {
+ _Py_SET_OPCODE(*instr, FOR_ITER_TUPLE);
+ goto success;
+ }
else if (tp == &PyRangeIter_Type && next_op == STORE_FAST) {
_Py_SET_OPCODE(*instr, FOR_ITER_RANGE);
goto success;
diff --git a/Python/sysmodule.c b/Python/sysmodule.c
index 88f806e616f27e..91f5c487c98fe3 100644
--- a/Python/sysmodule.c
+++ b/Python/sysmodule.c
@@ -950,10 +950,6 @@ static int
profile_trampoline(PyObject *self, PyFrameObject *frame,
int what, PyObject *arg)
{
- if (arg == NULL) {
- arg = Py_None;
- }
-
PyThreadState *tstate = _PyThreadState_GET();
PyObject *result = call_trampoline(tstate, self, frame, what, arg);
if (result == NULL) {
diff --git a/Python/thread.c b/Python/thread.c
index 3c1e78ed1bca83..4581f1af043a37 100644
--- a/Python/thread.c
+++ b/Python/thread.c
@@ -8,15 +8,7 @@
#include "Python.h"
#include "pycore_pystate.h" // _PyInterpreterState_GET()
#include "pycore_structseq.h" // _PyStructSequence_FiniType()
-
-#ifndef _POSIX_THREADS
-/* This means pthreads are not implemented in libc headers, hence the macro
- not present in unistd.h. But they still can be implemented as an external
- library (e.g. gnu pth in pthread emulation) */
-# ifdef HAVE_PTHREAD_H
-# include /* _POSIX_THREADS */
-# endif
-#endif
+#include "pycore_pythread.h"
#ifndef DONT_HAVE_STDIO_H
#include
@@ -24,33 +16,17 @@
#include
-#ifndef _POSIX_THREADS
-
-/* Check if we're running on HP-UX and _SC_THREADS is defined. If so, then
- enough of the Posix threads package is implemented to support python
- threads.
-
- This is valid for HP-UX 11.23 running on an ia64 system. If needed, add
- a check of __ia64 to verify that we're running on an ia64 system instead
- of a pa-risc system.
-*/
-#ifdef __hpux
-#ifdef _SC_THREADS
-#define _POSIX_THREADS
-#endif
-#endif
-
-#endif /* _POSIX_THREADS */
-
-static int initialized;
static void PyThread__init_thread(void); /* Forward */
+#define initialized _PyRuntime.threads.initialized
+
void
PyThread_init_thread(void)
{
- if (initialized)
+ if (initialized) {
return;
+ }
initialized = 1;
PyThread__init_thread();
}
@@ -58,7 +34,7 @@ PyThread_init_thread(void)
#if defined(HAVE_PTHREAD_STUBS)
# define PYTHREAD_NAME "pthread-stubs"
# include "thread_pthread_stubs.h"
-#elif defined(_POSIX_THREADS)
+#elif defined(_USE_PTHREADS) /* AKA _PTHREADS */
# if defined(__EMSCRIPTEN__) && !defined(__EMSCRIPTEN_PTHREADS__)
# define PYTHREAD_NAME "pthread-stubs"
# else
diff --git a/Python/thread_nt.h b/Python/thread_nt.h
index d1f1323948a6c6..26f441bd6d3c56 100644
--- a/Python/thread_nt.h
+++ b/Python/thread_nt.h
@@ -152,11 +152,12 @@ unsigned long PyThread_get_thread_native_id(void);
#endif
/*
- * Initialization of the C package, should not be needed.
+ * Initialization for the current runtime.
*/
static void
PyThread__init_thread(void)
{
+ // Initialization of the C package should not be needed.
}
/*
diff --git a/Python/thread_pthread.h b/Python/thread_pthread.h
index 1c5b320813af83..76d6f3bcdf9c40 100644
--- a/Python/thread_pthread.h
+++ b/Python/thread_pthread.h
@@ -119,24 +119,21 @@
* pthread_cond support
*/
-#if defined(HAVE_PTHREAD_CONDATTR_SETCLOCK) && defined(HAVE_CLOCK_GETTIME) && defined(CLOCK_MONOTONIC)
-// monotonic is supported statically. It doesn't mean it works on runtime.
-#define CONDATTR_MONOTONIC
-#endif
-
-// NULL when pthread_condattr_setclock(CLOCK_MONOTONIC) is not supported.
-static pthread_condattr_t *condattr_monotonic = NULL;
+#define condattr_monotonic _PyRuntime.threads._condattr_monotonic.ptr
static void
init_condattr(void)
{
#ifdef CONDATTR_MONOTONIC
- static pthread_condattr_t ca;
+# define ca _PyRuntime.threads._condattr_monotonic.val
+ // XXX We need to check the return code?
pthread_condattr_init(&ca);
+ // XXX We need to run pthread_condattr_destroy() during runtime fini.
if (pthread_condattr_setclock(&ca, CLOCK_MONOTONIC) == 0) {
condattr_monotonic = &ca; // Use monotonic clock
}
-#endif
+# undef ca
+#endif // CONDATTR_MONOTONIC
}
int
@@ -192,15 +189,21 @@ typedef struct {
"%s: %s\n", name, strerror(status)); error = 1; }
/*
- * Initialization.
+ * Initialization for the current runtime.
*/
static void
PyThread__init_thread(void)
{
+ // The library is only initialized once in the process,
+ // regardless of how many times the Python runtime is initialized.
+ static int lib_initialized = 0;
+ if (!lib_initialized) {
+ lib_initialized = 1;
#if defined(_AIX) && defined(__GNUC__)
- extern void pthread_init(void);
- pthread_init();
+ extern void pthread_init(void);
+ pthread_init();
#endif
+ }
init_condattr();
}
diff --git a/Python/thread_pthread_stubs.h b/Python/thread_pthread_stubs.h
index 8b80c0f87e2509..56e5b6141924b4 100644
--- a/Python/thread_pthread_stubs.h
+++ b/Python/thread_pthread_stubs.h
@@ -124,13 +124,10 @@ pthread_attr_destroy(pthread_attr_t *attr)
return 0;
}
-// pthread_key
-typedef struct {
- bool in_use;
- void *value;
-} py_tls_entry;
-static py_tls_entry py_tls_entries[PTHREAD_KEYS_MAX] = {0};
+typedef struct py_stub_tls_entry py_tls_entry;
+
+#define py_tls_entries (_PyRuntime.threads.stubs.tls_entries)
int
pthread_key_create(pthread_key_t *key, void (*destr_function)(void *))
diff --git a/README.rst b/README.rst
index 1fa019473643d9..ab9d3a6ea71cf6 100644
--- a/README.rst
+++ b/README.rst
@@ -1,4 +1,4 @@
-This is Python version 3.12.0 alpha 2
+This is Python version 3.12.0 alpha 3
=====================================
.. image:: https://github.com/python/cpython/workflows/Tests/badge.svg
diff --git a/Tools/build/deepfreeze.py b/Tools/build/deepfreeze.py
index 2eef649437a680..7f4e24280133f2 100644
--- a/Tools/build/deepfreeze.py
+++ b/Tools/build/deepfreeze.py
@@ -44,6 +44,7 @@ def make_string_literal(b: bytes) -> str:
CO_FAST_CELL = 0x40
CO_FAST_FREE = 0x80
+next_code_version = 1
def get_localsplus(code: types.CodeType):
a = collections.defaultdict(int)
@@ -227,6 +228,7 @@ def generate_unicode(self, name: str, s: str) -> str:
def generate_code(self, name: str, code: types.CodeType) -> str:
+ global next_code_version
# The ordering here matches PyCode_NewWithPosOnlyArgs()
# (but see below).
co_consts = self.generate(name + "_consts", code.co_consts)
@@ -268,6 +270,8 @@ def generate_code(self, name: str, code: types.CodeType) -> str:
self.write(f".co_nplaincellvars = {nplaincellvars},")
self.write(f".co_ncellvars = {ncellvars},")
self.write(f".co_nfreevars = {nfreevars},")
+ self.write(f".co_version = {next_code_version},")
+ next_code_version += 1
self.write(f".co_localsplusnames = {co_localsplusnames},")
self.write(f".co_localspluskinds = {co_localspluskinds},")
self.write(f".co_filename = {co_filename},")
@@ -461,6 +465,7 @@ def generate(args: list[str], output: TextIO) -> None:
with printer.block(f"if ({p} < 0)"):
printer.write("return -1;")
printer.write("return 0;")
+ printer.write(f"\nuint32_t _Py_next_func_version = {next_code_version};\n")
if verbose:
print(f"Cache hits: {printer.hits}, misses: {printer.misses}")
diff --git a/Tools/c-analyzer/cpython/globals-to-fix.tsv b/Tools/c-analyzer/cpython/globals-to-fix.tsv
index cc465134a9e065..479221cbd4b682 100644
--- a/Tools/c-analyzer/cpython/globals-to-fix.tsv
+++ b/Tools/c-analyzer/cpython/globals-to-fix.tsv
@@ -4,10 +4,10 @@ filename funcname name reason
# These are all variables that we will be making non-global.
##################################
-# global objects to fix in core code
+## global objects to fix in core code
-#-----------------------
-# exported builtin types (C-API)
+##-----------------------
+## exported builtin types (C-API)
Objects/boolobject.c - PyBool_Type -
Objects/bytearrayobject.c - PyByteArrayIter_Type -
@@ -102,8 +102,8 @@ Python/context.c - PyContextVar_Type -
Python/context.c - PyContext_Type -
Python/traceback.c - PyTraceBack_Type -
-#-----------------------
-# other exported builtin types
+##-----------------------
+## other exported builtin types
# Not in a .h file:
Objects/codeobject.c - _PyLineIterator -
@@ -126,8 +126,8 @@ Python/hamt.c - _PyHamt_CollisionNode_Type -
Python/hamt.c - _PyHamt_Type -
Python/symtable.c - PySTEntry_Type -
-#-----------------------
-# private static builtin types
+##-----------------------
+## private static builtin types
Objects/setobject.c - _PySetDummy_Type -
Objects/stringlib/unicode_format.h - PyFormatterIter_Type -
@@ -136,8 +136,8 @@ Objects/unicodeobject.c - EncodingMapType -
#Objects/unicodeobject.c - PyFieldNameIter_Type -
#Objects/unicodeobject.c - PyFormatterIter_Type -
-#-----------------------
-# static builtin structseq
+##-----------------------
+## static builtin structseq
Objects/floatobject.c - FloatInfoType -
Objects/longobject.c - Int_InfoType -
@@ -148,8 +148,8 @@ Python/sysmodule.c - Hash_InfoType -
Python/sysmodule.c - VersionInfoType -
Python/thread.c - ThreadInfoType -
-#-----------------------
-# builtin exception types
+##-----------------------
+## builtin exception types
Objects/exceptions.c - _PyExc_BaseException -
Objects/exceptions.c - _PyExc_BaseExceptionGroup -
@@ -286,8 +286,8 @@ Objects/exceptions.c - PyExc_BytesWarning -
Objects/exceptions.c - PyExc_ResourceWarning -
Objects/exceptions.c - PyExc_EncodingWarning -
-#-----------------------
-# singletons
+##-----------------------
+## singletons
Objects/boolobject.c - _Py_FalseStruct -
Objects/boolobject.c - _Py_TrueStruct -
@@ -300,39 +300,16 @@ Objects/sliceobject.c - _Py_EllipsisObject -
##################################
-# global non-objects to fix in core code
+## global non-objects to fix in core code
-#-----------------------
-# effectively-const but initialized lazily
-
-# idempotent
-Python/dtoa.c - p5s -
-Objects/obmalloc.c new_arena debug_stats -
-
-# others
-Python/perf_trampoline.c - perf_map_file -
-Objects/unicodeobject.c - ucnhash_capi -
-
-#-----------------------
-# state
-
-# local buffer
-Python/suggestions.c levenshtein_distance buffer -
-
-# other
-Objects/object.c - _Py_RefTotal -
-Python/perf_trampoline.c - perf_status -
-Python/perf_trampoline.c - extra_code_index -
-Python/perf_trampoline.c - code_arena -
-Python/perf_trampoline.c - trampoline_api -
-Python/thread_pthread_stubs.h - py_tls_entries -
+#
##################################
-# global objects to fix in builtin modules
+## global objects to fix in builtin modules
-#-----------------------
-# static types
+##-----------------------
+## static types
Modules/_collectionsmodule.c - defdict_type -
Modules/_collectionsmodule.c - deque_type -
@@ -381,57 +358,18 @@ Modules/itertoolsmodule.c - tee_type -
Modules/itertoolsmodule.c - teedataobject_type -
Modules/itertoolsmodule.c - ziplongest_type -
-#-----------------------
-# other
-
-# state
-Modules/faulthandler.c - fatal_error -
-Modules/faulthandler.c - thread -
-Modules/faulthandler.c - user_signals -
-Modules/faulthandler.c - stack -
-Modules/faulthandler.c - old_stack -
-
##################################
-# global non-objects to fix in builtin modules
-
-#-----------------------
-# initialized once
-
-Modules/_io/bufferedio.c _PyIO_trap_eintr eintr_int -
-Modules/posixmodule.c os_dup2_impl dup3_works -
-Modules/posixmodule.c - structseq_new -
-Modules/posixmodule.c - ticks_per_second -
-Modules/timemodule.c _PyTime_GetClockWithInfo initialized -
-Modules/timemodule.c _PyTime_GetProcessTimeWithInfo ticks_per_second -
-
-#-----------------------
-# state
-
-Modules/_tracemalloc.c - allocators -
-Modules/_tracemalloc.c - tables_lock -
-Modules/_tracemalloc.c - tracemalloc_empty_traceback -
-Modules/_tracemalloc.c - tracemalloc_traced_memory -
-Modules/_tracemalloc.c - tracemalloc_peak_traced_memory -
-Modules/_tracemalloc.c - tracemalloc_filenames -
-Modules/_tracemalloc.c - tracemalloc_traceback -
-Modules/_tracemalloc.c - tracemalloc_tracebacks -
-Modules/_tracemalloc.c - tracemalloc_traces -
-Modules/_tracemalloc.c - tracemalloc_domains -
-Modules/_tracemalloc.c - tracemalloc_reentrant_key -
-Modules/faulthandler.c faulthandler_dump_traceback reentrant -
-Modules/posixmodule.c - environ -
-Modules/signalmodule.c - is_tripped -
-Modules/signalmodule.c - signal_global_state -
-Modules/signalmodule.c - wakeup -
-Modules/signalmodule.c - Handlers -
+## global non-objects to fix in builtin modules
+
+#
##################################
-# global objects to fix in extension modules
+## global objects to fix in extension modules
-#-----------------------
-# static types
+##-----------------------
+## static types
Modules/_asynciomodule.c - FutureIterType -
Modules/_asynciomodule.c - FutureType -
@@ -496,7 +434,6 @@ Modules/_pickle.c - PicklerMemoProxyType -
Modules/_pickle.c - Pickler_Type -
Modules/_pickle.c - UnpicklerMemoProxyType -
Modules/_pickle.c - Unpickler_Type -
-Modules/_xxsubinterpretersmodule.c - ChannelIDtype -
Modules/_zoneinfo.c - PyZoneInfo_ZoneInfoType -
Modules/ossaudiodev.c - OSSAudioType -
Modules/ossaudiodev.c - OSSMixerType -
@@ -507,10 +444,10 @@ Modules/xxmodule.c - Xxo_Type -
Modules/xxsubtype.c - spamdict_type -
Modules/xxsubtype.c - spamlist_type -
-#-----------------------
-# non-static types - initialized once
+##-----------------------
+## non-static types - initialized once
-# heap types
+## heap types
Modules/_decimal/_decimal.c - DecimalTuple -
Modules/_decimal/_decimal.c - PyDecSignalDict_Type -
Modules/_tkinter.c - PyTclObject_Type -
@@ -518,32 +455,26 @@ Modules/_tkinter.c - Tkapp_Type -
Modules/_tkinter.c - Tktt_Type -
Modules/xxlimited_35.c - Xxo_Type -
-# exception types
+## exception types
Modules/_ctypes/_ctypes.c - PyExc_ArgError -
Modules/_cursesmodule.c - PyCursesError -
Modules/_decimal/_decimal.c - DecimalException -
Modules/_tkinter.c - Tkinter_TclError -
-Modules/_xxsubinterpretersmodule.c - ChannelError -
-Modules/_xxsubinterpretersmodule.c - ChannelNotFoundError -
-Modules/_xxsubinterpretersmodule.c - ChannelClosedError -
-Modules/_xxsubinterpretersmodule.c - ChannelEmptyError -
-Modules/_xxsubinterpretersmodule.c - ChannelNotEmptyError -
-Modules/_xxsubinterpretersmodule.c - RunFailedError -
Modules/ossaudiodev.c - OSSAudioError -
Modules/socketmodule.c - socket_herror -
Modules/socketmodule.c - socket_gaierror -
Modules/xxlimited_35.c - ErrorObject -
Modules/xxmodule.c - ErrorObject -
-#-----------------------
-# cached - initialized once
+##-----------------------
+## cached - initialized once
-# manually cached PyUnicodeOjbect
+## manually cached PyUnicodeOjbect
Modules/_asynciomodule.c - context_kwname -
Modules/_ctypes/callproc.c _ctypes_get_errobj error_object_name -
Modules/_ctypes/_ctypes.c CreateSwappedType suffix -
-# other - during module init
+## other - during module init
Modules/_asynciomodule.c - asyncio_mod -
Modules/_asynciomodule.c - traceback_extract_stack -
Modules/_asynciomodule.c - asyncio_future_repr_func -
@@ -558,10 +489,10 @@ Modules/_zoneinfo.c - io_open -
Modules/_zoneinfo.c - _tzpath_find_tzfile -
Modules/_zoneinfo.c - _common_mod -
-#-----------------------
-# other
+##-----------------------
+## other
-# initialized once
+## initialized once
Modules/_ctypes/_ctypes.c - _unpickle -
Modules/_ctypes/_ctypes.c PyCArrayType_from_ctype cache -
Modules/_cursesmodule.c - ModDict -
@@ -584,7 +515,7 @@ Modules/_decimal/_decimal.c - Rational -
Modules/_decimal/_decimal.c - SignalTuple -
Modules/arraymodule.c array_array___reduce_ex___impl array_reconstructor -
-# state
+## state
Modules/_asynciomodule.c - cached_running_holder -
Modules/_asynciomodule.c - fi_freelist -
Modules/_asynciomodule.c - fi_freelist_len -
@@ -599,20 +530,19 @@ Modules/_tkinter.c - valInCmd -
Modules/_tkinter.c - trbInCmd -
Modules/_zoneinfo.c - TIMEDELTA_CACHE -
Modules/_zoneinfo.c - ZONEINFO_WEAK_CACHE -
-Modules/syslogmodule.c - S_ident_o -
##################################
-# global non-objects to fix in extension modules
+## global non-objects to fix in extension modules
-#-----------------------
-# initialized once
+##-----------------------
+## initialized once
-# pre-allocated buffer
+## pre-allocated buffer
Modules/nismodule.c nisproc_maplist_2 res -
Modules/pyexpat.c PyUnknownEncodingHandler template_buffer -
-# other
+## other
Include/datetime.h - PyDateTimeAPI -
Modules/_asynciomodule.c - module_initialized -
Modules/_ctypes/cfield.c _ctypes_get_fielddesc initialized -
@@ -657,8 +587,8 @@ Modules/readline.c - libedit_history_start -
Modules/socketmodule.c - accept4_works -
Modules/socketmodule.c - sock_cloexec_works -
-#-----------------------
-# state
+##-----------------------
+## state
Modules/_asynciomodule.c - cached_running_holder_tsid -
Modules/_asynciomodule.c - task_name_counter -
@@ -688,4 +618,3 @@ Modules/readline.c - completed_input_string -
Modules/rotatingtree.c - random_stream -
Modules/rotatingtree.c - random_value -
Modules/socketmodule.c - defaulttimeout -
-Modules/syslogmodule.c - S_log_open -
diff --git a/Tools/c-analyzer/cpython/ignored.tsv b/Tools/c-analyzer/cpython/ignored.tsv
index 242deace8c945d..c71fc0d958216c 100644
--- a/Tools/c-analyzer/cpython/ignored.tsv
+++ b/Tools/c-analyzer/cpython/ignored.tsv
@@ -5,52 +5,62 @@ filename funcname name reason
##################################
-# process-global resources
+## process-global values - set once
-# Initialization for these should be idempotent.
+# These will never re-initialize (but would be idempotent).
+# These are effectively const.
-#-----------------------
-# effectively const, set once before/during first init
+##-----------------------
+## process-global resources
-Modules/getbuildinfo.c - buildinfo -
-Modules/getbuildinfo.c - initialized -
-Python/getversion.c - initialized -
-Python/getversion.c - version -
-
-#-----------------------
-# effectively const, set once during first init
-
-Python/bootstrap_hash.c - _Py_HashSecret_Initialized -
-Python/pyhash.c - _Py_HashSecret -
-Python/thread.c - initialized -
-Python/thread_pthread.h - condattr_monotonic -
-
-# safe static buffer used during one-time initialization
-Python/thread_pthread.h init_condattr ca -
-
-# indicators for process-global resource availability/capability
+## indicators for resource availability/capability
+# (set during first init)
Python/bootstrap_hash.c py_getrandom getrandom_works -
Python/fileutils.c - _Py_open_cloexec_works -
Python/fileutils.c set_inheritable ioctl_works -
+# (set lazily, *after* first init)
+# XXX Is this thread-safe?
+Modules/posixmodule.c os_dup2_impl dup3_works -
-#-----------------------
-# effectively const but set once lazily (*after* first init)
+## guards around resource init
+Python/thread_pthread.h PyThread__init_thread lib_initialized -
+##-----------------------
+## other values (not Python-specific)
+
+## cached computed data - set lazily (*after* first init)
+# XXX Are these safe relative to write races?
Objects/longobject.c long_from_non_binary_base log_base_BASE -
Objects/longobject.c long_from_non_binary_base convwidth_base -
Objects/longobject.c long_from_non_binary_base convmultmax_base -
Objects/unicodeobject.c - bloom_linebreak -
+# This is safe:
Objects/unicodeobject.c _init_global_state initialized -
-# XXX Move to _PyRuntimeState?
+##-----------------------
+## other values (Python-specific)
+
+## internal state - set before/during first init
+Modules/getbuildinfo.c - buildinfo -
+Modules/getbuildinfo.c - initialized -
+Python/getversion.c - initialized -
+Python/getversion.c - version -
+
+## public C-API - set during first init
+Python/bootstrap_hash.c - _Py_HashSecret_Initialized -
+Python/pyhash.c - _Py_HashSecret -
+
+## internal state - set lazily (*after* first init)
+# XXX Move to _PyRuntimeState (i.e. tie to init/fini cycle)?
Parser/action_helpers.c _PyPegen_dummy_name cache -
##################################
-# state tied to C main() (only in main thread)
+## state tied to Py_Main()
+# (only in main thread)
-#-----------------------
-# handling C argv
+##-----------------------
+## handling C argv
Python/getopt.c - _PyOS_optarg -
Python/getopt.c - _PyOS_opterr -
@@ -58,8 +68,8 @@ Python/getopt.c - _PyOS_optind -
Python/getopt.c - opt_ptr -
Python/pathconfig.c - _Py_path_config -
-#-----------------------
-# REPL
+##-----------------------
+## REPL
Parser/myreadline.c - _PyOS_ReadlineLock -
Parser/myreadline.c - _PyOS_ReadlineTState -
@@ -68,28 +78,27 @@ Parser/myreadline.c - PyOS_ReadlineFunctionPointer -
##################################
-# state tied to each runtime init/fini cycle
+## runtime-global values - set once with each init
-Python/pylifecycle.c - _PyRuntime -
-Python/pylifecycle.c - runtime_initialized -
+# These are effectively const.
-# All uses of _PyArg_Parser are handled in c-analyzr/cpython/_analyzer.py.
+##-----------------------
+## set by embedders before init
+# (whether directly or through a call)
-#-----------------------
-# effectively const once init finishes
-
-# set by embedders before init (whether directly or through a call)
Python/initconfig.c - _Py_StandardStreamEncoding -
Python/initconfig.c - _Py_StandardStreamErrors -
-Python/initconfig.c - orig_argv -
-# deprecated
+##-----------------------
+## public C-API
+
+## deprecated
Python/preconfig.c - Py_FileSystemDefaultEncoding -
Python/preconfig.c - Py_HasFileSystemDefaultEncoding -
Python/preconfig.c - Py_FileSystemDefaultEncodeErrors -
Python/preconfig.c - _Py_HasFileSystemDefaultEncodeErrors -
-# legacy config flags
+## legacy config flags
Python/initconfig.c - Py_UTF8Mode -
Python/initconfig.c - Py_DebugFlag -
Python/initconfig.c - Py_VerboseFlag -
@@ -109,373 +118,68 @@ Python/initconfig.c - Py_IsolatedFlag -
Python/initconfig.c - Py_LegacyWindowsFSEncodingFlag -
Python/initconfig.c - Py_LegacyWindowsStdioFlag -
-# initialized statically, customized by embedders
+##-----------------------
+## initialized statically, may be customized by embedders
+
Python/frozen.c - PyImport_FrozenModules -
Python/import.c - inittab_copy -
Python/import.c - PyImport_Inittab -
-# used temporarily during init
-Python/sysmodule.c - _preinit_warnoptions -
-Python/sysmodule.c - _preinit_xoptions -
-
##################################
-# special-use diagnistic state
+## runtime-global state
-Parser/pegen.c - memo_statistics -
+##-----------------------
+## tied to each init/fini cycle
+## the consolidated runtime state
+Python/pylifecycle.c - _PyRuntime -
+Python/pylifecycle.c - runtime_initialized -
-##################################
-# one-off temporary state
+# All cases of _PyArg_Parser are handled in c-analyzr/cpython/_analyzer.py.
-# This is safe enough.
-Python/pylifecycle.c _Py_FatalErrorFormat reentrant -
-Python/pylifecycle.c fatal_error reentrant -
+## main interp state in stdlib modules
+Modules/syslogmodule.c - S_ident_o -
+Modules/syslogmodule.c - S_log_open -
+##-----------------------
+## kept for stable ABI compatibility
-##################################
-# not used (kept for compatibility)
+# XXX should be per-interpreter, without impacting stable ABI extensions
+Objects/object.c - _Py_RefTotal -
-Python/pyfpe.c - PyFPE_counter -
+##-----------------------
+## one-off temporary state
+
+# used during runtime init
+Python/sysmodule.c - _preinit_warnoptions -
+Python/sysmodule.c - _preinit_xoptions -
+
+# thread-safety
+# XXX need race protection?
+Modules/faulthandler.c faulthandler_dump_traceback reentrant -
+Python/pylifecycle.c _Py_FatalErrorFormat reentrant -
+Python/pylifecycle.c fatal_error reentrant -
##################################
-# The analyzer should have ignored these.
-# XXX Fix the analyzer.
+## not significant
-Modules/_io/_iomodule.c - _PyIO_Module -
-Modules/_sqlite/module.c - _sqlite3module -
+##-----------------------
+## not used (kept for compatibility)
-# forward/extern references
+Python/pyfpe.c - PyFPE_counter -
-Include/py_curses.h - PyCurses_API -
-Include/pydecimal.h - _decimal_api -
-Modules/_blake2/blake2module.c - blake2b_type_spec -
-Modules/_blake2/blake2module.c - blake2s_type_spec -
-Modules/_io/fileio.c - _Py_open_cloexec_works -
-Modules/_io/_iomodule.h - PyIOBase_Type -
-Modules/_io/_iomodule.h - PyRawIOBase_Type -
-Modules/_io/_iomodule.h - PyBufferedIOBase_Type -
-Modules/_io/_iomodule.h - PyTextIOBase_Type -
-Modules/_io/_iomodule.h - PyFileIO_Type -
-Modules/_io/_iomodule.h - PyBytesIO_Type -
-Modules/_io/_iomodule.h - PyStringIO_Type -
-Modules/_io/_iomodule.h - PyBufferedReader_Type -
-Modules/_io/_iomodule.h - PyBufferedWriter_Type -
-Modules/_io/_iomodule.h - PyBufferedRWPair_Type -
-Modules/_io/_iomodule.h - PyBufferedRandom_Type -
-Modules/_io/_iomodule.h - PyTextIOWrapper_Type -
-Modules/_io/_iomodule.h - PyIncrementalNewlineDecoder_Type -
-Modules/_io/_iomodule.h - _PyBytesIOBuffer_Type -
-Modules/_io/_iomodule.h - _PyIO_Module -
-Modules/_io/_iomodule.h - _PyIO_str_close -
-Modules/_io/_iomodule.h - _PyIO_str_closed -
-Modules/_io/_iomodule.h - _PyIO_str_decode -
-Modules/_io/_iomodule.h - _PyIO_str_encode -
-Modules/_io/_iomodule.h - _PyIO_str_fileno -
-Modules/_io/_iomodule.h - _PyIO_str_flush -
-Modules/_io/_iomodule.h - _PyIO_str_getstate -
-Modules/_io/_iomodule.h - _PyIO_str_isatty -
-Modules/_io/_iomodule.h - _PyIO_str_newlines -
-Modules/_io/_iomodule.h - _PyIO_str_nl -
-Modules/_io/_iomodule.h - _PyIO_str_peek -
-Modules/_io/_iomodule.h - _PyIO_str_read -
-Modules/_io/_iomodule.h - _PyIO_str_read1 -
-Modules/_io/_iomodule.h - _PyIO_str_readable -
-Modules/_io/_iomodule.h - _PyIO_str_readall -
-Modules/_io/_iomodule.h - _PyIO_str_readinto -
-Modules/_io/_iomodule.h - _PyIO_str_readline -
-Modules/_io/_iomodule.h - _PyIO_str_reset -
-Modules/_io/_iomodule.h - _PyIO_str_seek -
-Modules/_io/_iomodule.h - _PyIO_str_seekable -
-Modules/_io/_iomodule.h - _PyIO_str_setstate -
-Modules/_io/_iomodule.h - _PyIO_str_tell -
-Modules/_io/_iomodule.h - _PyIO_str_truncate -
-Modules/_io/_iomodule.h - _PyIO_str_writable -
-Modules/_io/_iomodule.h - _PyIO_str_write -
-Modules/_io/_iomodule.h - _PyIO_empty_str -
-Modules/_io/_iomodule.h - _PyIO_empty_bytes -
-Modules/_multiprocessing/multiprocessing.h - _PyMp_SemLockType -
-Modules/_sqlite/module.c - _pysqlite_converters -
-Modules/_sqlite/module.c - _pysqlite_enable_callback_tracebacks -
-Modules/_sqlite/module.c - pysqlite_BaseTypeAdapted -
-Modules/_sqlite/module.h - pysqlite_global_state -
-Modules/_testcapimodule.c - _PyBytesIOBuffer_Type -
-Modules/posixmodule.c - _Py_open_cloexec_works -
-Objects/object.c - _Py_GenericAliasIterType -
-Objects/object.c - _PyMemoryIter_Type -
-Objects/object.c - _PyLineIterator -
-Objects/object.c - _PyPositionsIterator -
-Python/perf_trampoline.c - _Py_trampoline_func_start -
-Python/perf_trampoline.c - _Py_trampoline_func_end -
-Python/importdl.h - _PyImport_DynLoadFiletab -
+##-----------------------
+## should be const
+# XXX Make them const.
-Modules/expat/xmlrole.c - prolog0 -
-Modules/expat/xmlrole.c - prolog1 -
-Modules/expat/xmlrole.c - prolog2 -
-Modules/expat/xmlrole.c - doctype0 -
-Modules/expat/xmlrole.c - doctype1 -
-Modules/expat/xmlrole.c - doctype2 -
-Modules/expat/xmlrole.c - doctype3 -
-Modules/expat/xmlrole.c - doctype4 -
-Modules/expat/xmlrole.c - doctype5 -
-Modules/expat/xmlrole.c - internalSubset -
-Modules/expat/xmlrole.c - entity0 -
-Modules/expat/xmlrole.c - entity1 -
-Modules/expat/xmlrole.c - entity2 -
-Modules/expat/xmlrole.c - entity3 -
-Modules/expat/xmlrole.c - entity4 -
-Modules/expat/xmlrole.c - entity5 -
-Modules/expat/xmlrole.c - entity6 -
-Modules/expat/xmlrole.c - entity7 -
-Modules/expat/xmlrole.c - entity8 -
-Modules/expat/xmlrole.c - entity9 -
-Modules/expat/xmlrole.c - entity10 -
-Modules/expat/xmlrole.c - notation0 -
-Modules/expat/xmlrole.c - notation1 -
-Modules/expat/xmlrole.c - notation2 -
-Modules/expat/xmlrole.c - notation3 -
-Modules/expat/xmlrole.c - notation4 -
-Modules/expat/xmlrole.c - attlist0 -
-Modules/expat/xmlrole.c - attlist1 -
-Modules/expat/xmlrole.c - attlist2 -
-Modules/expat/xmlrole.c - attlist3 -
-Modules/expat/xmlrole.c - attlist4 -
-Modules/expat/xmlrole.c - attlist5 -
-Modules/expat/xmlrole.c - attlist6 -
-Modules/expat/xmlrole.c - attlist7 -
-Modules/expat/xmlrole.c - attlist8 -
-Modules/expat/xmlrole.c - attlist9 -
-Modules/expat/xmlrole.c - element0 -
-Modules/expat/xmlrole.c - element1 -
-Modules/expat/xmlrole.c - element2 -
-Modules/expat/xmlrole.c - element3 -
-Modules/expat/xmlrole.c - element4 -
-Modules/expat/xmlrole.c - element5 -
-Modules/expat/xmlrole.c - element6 -
-Modules/expat/xmlrole.c - element7 -
-Modules/expat/xmlrole.c - externalSubset0 -
-Modules/expat/xmlrole.c - externalSubset1 -
-Modules/expat/xmlrole.c - condSect0 -
-Modules/expat/xmlrole.c - condSect1 -
-Modules/expat/xmlrole.c - condSect2 -
-Modules/expat/xmlrole.c - declClose -
-Modules/expat/xmlrole.c - error -
+# These are all variables that we will be leaving global.
+# All module defs, type defs, etc. are handled in c-analyzr/cpython/_analyzer.py.
+# All kwlist arrays are handled in c-analyzr/cpython/_analyzer.py.
-##################################
-# test code
-
-Modules/_ctypes/_ctypes_test.c - _ctypes_test_slots -
-Modules/_ctypes/_ctypes_test.c - _ctypes_testmodule -
-Modules/_ctypes/_ctypes_test.c - _xxx_lib -
-Modules/_ctypes/_ctypes_test.c - an_integer -
-Modules/_ctypes/_ctypes_test.c - bottom -
-Modules/_ctypes/_ctypes_test.c - last_tf_arg_s -
-Modules/_ctypes/_ctypes_test.c - last_tf_arg_u -
-Modules/_ctypes/_ctypes_test.c - last_tfrsuv_arg -
-Modules/_ctypes/_ctypes_test.c - left -
-Modules/_ctypes/_ctypes_test.c - module_methods -
-Modules/_ctypes/_ctypes_test.c - my_eggs -
-Modules/_ctypes/_ctypes_test.c - my_spams -
-Modules/_ctypes/_ctypes_test.c - right -
-Modules/_ctypes/_ctypes_test.c - top -
-Modules/_testbuffer.c - NDArray_Type -
-Modules/_testbuffer.c - StaticArray_Type -
-Modules/_testbuffer.c - Struct -
-Modules/_testbuffer.c - _testbuffer_functions -
-Modules/_testbuffer.c - _testbuffermodule -
-Modules/_testbuffer.c - calcsize -
-Modules/_testbuffer.c - infobuf -
-Modules/_testbuffer.c - ndarray_as_buffer -
-Modules/_testbuffer.c - ndarray_as_mapping -
-Modules/_testbuffer.c - ndarray_as_sequence -
-Modules/_testbuffer.c - ndarray_getset -
-Modules/_testbuffer.c - ndarray_methods -
-Modules/_testbuffer.c - simple_fmt -
-Modules/_testbuffer.c - simple_format -
-Modules/_testbuffer.c - static_buffer -
-Modules/_testbuffer.c - static_mem -
-Modules/_testbuffer.c - static_shape -
-Modules/_testbuffer.c - static_strides -
-Modules/_testbuffer.c - staticarray_as_buffer -
-Modules/_testbuffer.c - structmodule -
-Modules/_testbuffer.c ndarray_init kwlist -
-Modules/_testbuffer.c ndarray_memoryview_from_buffer format -
-Modules/_testbuffer.c ndarray_memoryview_from_buffer info -
-Modules/_testbuffer.c ndarray_memoryview_from_buffer shape -
-Modules/_testbuffer.c ndarray_memoryview_from_buffer strides -
-Modules/_testbuffer.c ndarray_memoryview_from_buffer suboffsets -
-Modules/_testbuffer.c ndarray_push kwlist -
-Modules/_testbuffer.c staticarray_init kwlist -
-Modules/_testcapi/heaptype.c - _testcapimodule -
-Modules/_testcapi/unicode.c - _testcapimodule -
-Modules/_testcapimodule.c - ContainerNoGC_members -
-Modules/_testcapimodule.c - ContainerNoGC_type -
-Modules/_testcapimodule.c - FmData -
-Modules/_testcapimodule.c - FmHook -
-Modules/_testcapimodule.c - GenericAlias_Type -
-Modules/_testcapimodule.c - Generic_Type -
-Modules/_testcapimodule.c - HeapCTypeSetattr_slots -
-Modules/_testcapimodule.c - HeapCTypeSetattr_spec -
-Modules/_testcapimodule.c - HeapCTypeSubclassWithFinalizer_slots -
-Modules/_testcapimodule.c - HeapCTypeSubclassWithFinalizer_spec -
-Modules/_testcapimodule.c - HeapCTypeSubclass_slots -
-Modules/_testcapimodule.c - HeapCTypeSubclass_spec -
-Modules/_testcapimodule.c - HeapCTypeWithBuffer_slots -
-Modules/_testcapimodule.c - HeapCTypeWithBuffer_spec -
-Modules/_testcapimodule.c - HeapCTypeWithDict_slots -
-Modules/_testcapimodule.c - HeapCTypeWithDict_spec -
-Modules/_testcapimodule.c - HeapCTypeWithNegativeDict_slots -
-Modules/_testcapimodule.c - HeapCTypeWithNegativeDict_spec -
-Modules/_testcapimodule.c - HeapCTypeWithWeakref_slots -
-Modules/_testcapimodule.c - HeapCTypeWithWeakref_spec -
-Modules/_testcapimodule.c - HeapCType_slots -
-Modules/_testcapimodule.c - HeapCType_spec -
-Modules/_testcapimodule.c - HeapDocCType_slots -
-Modules/_testcapimodule.c - HeapDocCType_spec -
-Modules/_testcapimodule.c - HeapGcCType_slots -
-Modules/_testcapimodule.c - HeapGcCType_spec -
-Modules/_testcapimodule.c - MethClass_Type -
-Modules/_testcapimodule.c - MethInstance_Type -
-Modules/_testcapimodule.c - MethStatic_Type -
-Modules/_testcapimodule.c - MethodDescriptor2_Type -
-Modules/_testcapimodule.c - MethodDescriptorBase_Type -
-Modules/_testcapimodule.c - MethodDescriptorDerived_Type -
-Modules/_testcapimodule.c - MethodDescriptorNopGet_Type -
-Modules/_testcapimodule.c - MyList_Type -
-Modules/_testcapimodule.c - PyRecursingInfinitelyError_Type -
-Modules/_testcapimodule.c - TestError -
-Modules/_testcapimodule.c - TestMethods -
-Modules/_testcapimodule.c - _HashInheritanceTester_Type -
-Modules/_testcapimodule.c - _testcapimodule -
-Modules/_testcapimodule.c - awaitType -
-Modules/_testcapimodule.c - awaitType_as_async -
-Modules/_testcapimodule.c - capsule_context -
-Modules/_testcapimodule.c - capsule_destructor_call_count -
-Modules/_testcapimodule.c - capsule_error -
-Modules/_testcapimodule.c - capsule_name -
-Modules/_testcapimodule.c - capsule_pointer -
-Modules/_testcapimodule.c - decimal_initialized -
-Modules/_testcapimodule.c - generic_alias_methods -
-Modules/_testcapimodule.c - generic_methods -
-Modules/_testcapimodule.c - heapctype_members -
-Modules/_testcapimodule.c - heapctypesetattr_members -
-Modules/_testcapimodule.c - heapctypesubclass_members -
-Modules/_testcapimodule.c - heapctypewithdict_getsetlist -
-Modules/_testcapimodule.c - heapctypewithdict_members -
-Modules/_testcapimodule.c - heapctypewithnegativedict_members -
-Modules/_testcapimodule.c - heapctypewithweakref_members -
-Modules/_testcapimodule.c - ipowType -
-Modules/_testcapimodule.c - ipowType_as_number -
-Modules/_testcapimodule.c - matmulType -
-Modules/_testcapimodule.c - matmulType_as_number -
-Modules/_testcapimodule.c - meth_class_methods -
-Modules/_testcapimodule.c - meth_instance_methods -
-Modules/_testcapimodule.c - meth_static_methods -
-Modules/_testcapimodule.c - ml -
-Modules/_testcapimodule.c - str1 -
-Modules/_testcapimodule.c - str2 -
-Modules/_testcapimodule.c - test_members -
-Modules/_testcapimodule.c - test_run_counter -
-Modules/_testcapimodule.c - test_structmembersType -
-Modules/_testcapimodule.c - thread_done -
-Modules/_testcapimodule.c - x -
-Modules/_testcapimodule.c getargs_keyword_only keywords -
-Modules/_testcapimodule.c getargs_keywords keywords -
-Modules/_testcapimodule.c getargs_positional_only_and_keywords keywords -
-Modules/_testcapimodule.c getargs_s_hash_int2 keywords static char*[]
-Modules/_testcapimodule.c make_exception_with_doc kwlist -
-Modules/_testcapimodule.c raise_SIGINT_then_send_None PyId_send -
-Modules/_testcapimodule.c slot_tp_del PyId___tp_del__ -
-Modules/_testcapimodule.c test_capsule buffer -
-Modules/_testcapimodule.c test_empty_argparse kwlist -
-Modules/_testcapimodule.c test_structmembers_new keywords -
-Modules/_testcapimodule.c getargs_s_hash_int keywords -
-Modules/_testcapimodule.c - g_dict_watch_events -
-Modules/_testcapimodule.c - g_dict_watchers_installed -
-Modules/_testcapimodule.c - g_type_modified_events -
-Modules/_testcapimodule.c - g_type_watchers_installed -
-Modules/_testimportmultiple.c - _barmodule -
-Modules/_testimportmultiple.c - _foomodule -
-Modules/_testimportmultiple.c - _testimportmultiple -
-Modules/_testinternalcapi.c - TestMethods -
-Modules/_testinternalcapi.c - _testcapimodule -
-Modules/_testmultiphase.c - Example_Type_slots -
-Modules/_testmultiphase.c - Example_Type_spec -
-Modules/_testmultiphase.c - Example_methods -
-Modules/_testmultiphase.c - StateAccessType_Type_slots -
-Modules/_testmultiphase.c - StateAccessType_methods -
-Modules/_testmultiphase.c - StateAccessType_spec -
-Modules/_testmultiphase.c - Str_Type_slots -
-Modules/_testmultiphase.c - Str_Type_spec -
-Modules/_testmultiphase.c - def_bad_large -
-Modules/_testmultiphase.c - def_bad_negative -
-Modules/_testmultiphase.c - def_create_int_with_state -
-Modules/_testmultiphase.c - def_create_null -
-Modules/_testmultiphase.c - def_create_raise -
-Modules/_testmultiphase.c - def_create_unreported_exception -
-Modules/_testmultiphase.c - def_exec_err -
-Modules/_testmultiphase.c - def_exec_raise -
-Modules/_testmultiphase.c - def_exec_unreported_exception -
-Modules/_testmultiphase.c - def_meth_state_access -
-Modules/_testmultiphase.c - def_negative_size -
-Modules/_testmultiphase.c - def_nonascii_kana -
-Modules/_testmultiphase.c - def_nonascii_latin -
-Modules/_testmultiphase.c - def_nonmodule -
-Modules/_testmultiphase.c - def_nonmodule_with_exec_slots -
-Modules/_testmultiphase.c - def_nonmodule_with_methods -
-Modules/_testmultiphase.c - imp_dummy_def -
-Modules/_testmultiphase.c - main_def -
-Modules/_testmultiphase.c - main_slots -
-Modules/_testmultiphase.c - meth_state_access_slots -
-Modules/_testmultiphase.c - nonmodule_methods -
-Modules/_testmultiphase.c - null_slots_def -
-Modules/_testmultiphase.c - slots_bad_large -
-Modules/_testmultiphase.c - slots_bad_negative -
-Modules/_testmultiphase.c - slots_create_nonmodule -
-Modules/_testmultiphase.c - slots_create_nonmodule -
-Modules/_testmultiphase.c - slots_create_null -
-Modules/_testmultiphase.c - slots_create_raise -
-Modules/_testmultiphase.c - slots_create_unreported_exception -
-Modules/_testmultiphase.c - slots_exec_err -
-Modules/_testmultiphase.c - slots_exec_raise -
-Modules/_testmultiphase.c - slots_exec_unreported_exception -
-Modules/_testmultiphase.c - slots_nonmodule_with_exec_slots -
-Modules/_testmultiphase.c - testexport_methods -
-Modules/_testmultiphase.c - uninitialized_def -
-Modules/_xxtestfuzz/_xxtestfuzz.c - _fuzzmodule -
-Modules/_xxtestfuzz/_xxtestfuzz.c - module_methods -
-Modules/_xxtestfuzz/fuzzer.c - SRE_FLAG_DEBUG -
-Modules/_xxtestfuzz/fuzzer.c - ast_literal_eval_method -
-Modules/_xxtestfuzz/fuzzer.c - compiled_patterns -
-Modules/_xxtestfuzz/fuzzer.c - csv_error -
-Modules/_xxtestfuzz/fuzzer.c - csv_module -
-Modules/_xxtestfuzz/fuzzer.c - json_loads_method -
-Modules/_xxtestfuzz/fuzzer.c - regex_patterns -
-Modules/_xxtestfuzz/fuzzer.c - sre_compile_method -
-Modules/_xxtestfuzz/fuzzer.c - sre_error_exception -
-Modules/_xxtestfuzz/fuzzer.c - struct_error -
-Modules/_xxtestfuzz/fuzzer.c - struct_unpack_method -
-Modules/_xxtestfuzz/fuzzer.c LLVMFuzzerTestOneInput CSV_READER_INITIALIZED -
-Modules/_xxtestfuzz/fuzzer.c LLVMFuzzerTestOneInput JSON_LOADS_INITIALIZED -
-Modules/_xxtestfuzz/fuzzer.c LLVMFuzzerTestOneInput SRE_COMPILE_INITIALIZED -
-Modules/_xxtestfuzz/fuzzer.c LLVMFuzzerTestOneInput SRE_MATCH_INITIALIZED -
-Modules/_xxtestfuzz/fuzzer.c LLVMFuzzerTestOneInput STRUCT_UNPACK_INITIALIZED -
-Modules/_xxtestfuzz/fuzzer.c LLVMFuzzerTestOneInput AST_LITERAL_EVAL_INITIALIZED -
-
-
-##################################
-# should be const
-# XXX Make them const.
-
-# These are all variables that we will be leaving global.
-
-# All module defs, type defs, etc. are handled in c-analyzr/cpython/_analyzer.py.
-# All kwlist arrays are handled in c-analyzr/cpython/_analyzer.py.
-
-#-----------------------
-# other vars that are actually constant
+# other vars that are actually constant
Include/internal/pycore_blocks_output_buffer.h - BUFFER_BLOCK_SIZE -
Modules/_csv.c - quote_styles -
@@ -646,3 +350,327 @@ Python/stdlib_module_names.h - _Py_stdlib_module_names -
Python/sysmodule.c - _PySys_ImplCacheTag -
Python/sysmodule.c - _PySys_ImplName -
Python/sysmodule.c - whatstrings -
+
+##-----------------------
+## test code
+
+Modules/_ctypes/_ctypes_test.c - _ctypes_test_slots -
+Modules/_ctypes/_ctypes_test.c - _ctypes_testmodule -
+Modules/_ctypes/_ctypes_test.c - _xxx_lib -
+Modules/_ctypes/_ctypes_test.c - an_integer -
+Modules/_ctypes/_ctypes_test.c - bottom -
+Modules/_ctypes/_ctypes_test.c - last_tf_arg_s -
+Modules/_ctypes/_ctypes_test.c - last_tf_arg_u -
+Modules/_ctypes/_ctypes_test.c - last_tfrsuv_arg -
+Modules/_ctypes/_ctypes_test.c - left -
+Modules/_ctypes/_ctypes_test.c - module_methods -
+Modules/_ctypes/_ctypes_test.c - my_eggs -
+Modules/_ctypes/_ctypes_test.c - my_spams -
+Modules/_ctypes/_ctypes_test.c - right -
+Modules/_ctypes/_ctypes_test.c - top -
+Modules/_testbuffer.c - NDArray_Type -
+Modules/_testbuffer.c - StaticArray_Type -
+Modules/_testbuffer.c - Struct -
+Modules/_testbuffer.c - _testbuffer_functions -
+Modules/_testbuffer.c - _testbuffermodule -
+Modules/_testbuffer.c - calcsize -
+Modules/_testbuffer.c - infobuf -
+Modules/_testbuffer.c - ndarray_as_buffer -
+Modules/_testbuffer.c - ndarray_as_mapping -
+Modules/_testbuffer.c - ndarray_as_sequence -
+Modules/_testbuffer.c - ndarray_getset -
+Modules/_testbuffer.c - ndarray_methods -
+Modules/_testbuffer.c - simple_fmt -
+Modules/_testbuffer.c - simple_format -
+Modules/_testbuffer.c - static_buffer -
+Modules/_testbuffer.c - static_mem -
+Modules/_testbuffer.c - static_shape -
+Modules/_testbuffer.c - static_strides -
+Modules/_testbuffer.c - staticarray_as_buffer -
+Modules/_testbuffer.c - structmodule -
+Modules/_testbuffer.c ndarray_init kwlist -
+Modules/_testbuffer.c ndarray_memoryview_from_buffer format -
+Modules/_testbuffer.c ndarray_memoryview_from_buffer info -
+Modules/_testbuffer.c ndarray_memoryview_from_buffer shape -
+Modules/_testbuffer.c ndarray_memoryview_from_buffer strides -
+Modules/_testbuffer.c ndarray_memoryview_from_buffer suboffsets -
+Modules/_testbuffer.c ndarray_push kwlist -
+Modules/_testbuffer.c staticarray_init kwlist -
+Modules/_testcapi/heaptype.c - _testcapimodule -
+Modules/_testcapi/unicode.c - _testcapimodule -
+Modules/_testcapimodule.c - ContainerNoGC_members -
+Modules/_testcapimodule.c - ContainerNoGC_type -
+Modules/_testcapimodule.c - FmData -
+Modules/_testcapimodule.c - FmHook -
+Modules/_testcapimodule.c - GenericAlias_Type -
+Modules/_testcapimodule.c - Generic_Type -
+Modules/_testcapimodule.c - HeapCTypeSetattr_slots -
+Modules/_testcapimodule.c - HeapCTypeSetattr_spec -
+Modules/_testcapimodule.c - HeapCTypeSubclassWithFinalizer_slots -
+Modules/_testcapimodule.c - HeapCTypeSubclassWithFinalizer_spec -
+Modules/_testcapimodule.c - HeapCTypeSubclass_slots -
+Modules/_testcapimodule.c - HeapCTypeSubclass_spec -
+Modules/_testcapimodule.c - HeapCTypeWithBuffer_slots -
+Modules/_testcapimodule.c - HeapCTypeWithBuffer_spec -
+Modules/_testcapimodule.c - HeapCTypeWithDict_slots -
+Modules/_testcapimodule.c - HeapCTypeWithDict_spec -
+Modules/_testcapimodule.c - HeapCTypeWithNegativeDict_slots -
+Modules/_testcapimodule.c - HeapCTypeWithNegativeDict_spec -
+Modules/_testcapimodule.c - HeapCTypeWithWeakref_slots -
+Modules/_testcapimodule.c - HeapCTypeWithWeakref_spec -
+Modules/_testcapimodule.c - HeapCType_slots -
+Modules/_testcapimodule.c - HeapCType_spec -
+Modules/_testcapimodule.c - HeapDocCType_slots -
+Modules/_testcapimodule.c - HeapDocCType_spec -
+Modules/_testcapimodule.c - HeapGcCType_slots -
+Modules/_testcapimodule.c - HeapGcCType_spec -
+Modules/_testcapimodule.c - MethClass_Type -
+Modules/_testcapimodule.c - MethInstance_Type -
+Modules/_testcapimodule.c - MethStatic_Type -
+Modules/_testcapimodule.c - MethodDescriptor2_Type -
+Modules/_testcapimodule.c - MethodDescriptorBase_Type -
+Modules/_testcapimodule.c - MethodDescriptorDerived_Type -
+Modules/_testcapimodule.c - MethodDescriptorNopGet_Type -
+Modules/_testcapimodule.c - MyList_Type -
+Modules/_testcapimodule.c - PyRecursingInfinitelyError_Type -
+Modules/_testcapimodule.c - TestError -
+Modules/_testcapimodule.c - TestMethods -
+Modules/_testcapimodule.c - _HashInheritanceTester_Type -
+Modules/_testcapimodule.c - _testcapimodule -
+Modules/_testcapimodule.c - awaitType -
+Modules/_testcapimodule.c - awaitType_as_async -
+Modules/_testcapimodule.c - capsule_context -
+Modules/_testcapimodule.c - capsule_destructor_call_count -
+Modules/_testcapimodule.c - capsule_error -
+Modules/_testcapimodule.c - capsule_name -
+Modules/_testcapimodule.c - capsule_pointer -
+Modules/_testcapimodule.c - decimal_initialized -
+Modules/_testcapimodule.c - generic_alias_methods -
+Modules/_testcapimodule.c - generic_methods -
+Modules/_testcapimodule.c - heapctype_members -
+Modules/_testcapimodule.c - heapctypesetattr_members -
+Modules/_testcapimodule.c - heapctypesubclass_members -
+Modules/_testcapimodule.c - heapctypewithdict_getsetlist -
+Modules/_testcapimodule.c - heapctypewithdict_members -
+Modules/_testcapimodule.c - heapctypewithnegativedict_members -
+Modules/_testcapimodule.c - heapctypewithweakref_members -
+Modules/_testcapimodule.c - ipowType -
+Modules/_testcapimodule.c - ipowType_as_number -
+Modules/_testcapimodule.c - matmulType -
+Modules/_testcapimodule.c - matmulType_as_number -
+Modules/_testcapimodule.c - meth_class_methods -
+Modules/_testcapimodule.c - meth_instance_methods -
+Modules/_testcapimodule.c - meth_static_methods -
+Modules/_testcapimodule.c - ml -
+Modules/_testcapimodule.c - str1 -
+Modules/_testcapimodule.c - str2 -
+Modules/_testcapimodule.c - test_members -
+Modules/_testcapimodule.c - test_run_counter -
+Modules/_testcapimodule.c - test_structmembersType -
+Modules/_testcapimodule.c - thread_done -
+Modules/_testcapimodule.c - x -
+Modules/_testcapimodule.c getargs_keyword_only keywords -
+Modules/_testcapimodule.c getargs_keywords keywords -
+Modules/_testcapimodule.c getargs_positional_only_and_keywords keywords -
+Modules/_testcapimodule.c getargs_s_hash_int2 keywords static char*[]
+Modules/_testcapimodule.c make_exception_with_doc kwlist -
+Modules/_testcapimodule.c raise_SIGINT_then_send_None PyId_send -
+Modules/_testcapimodule.c slot_tp_del PyId___tp_del__ -
+Modules/_testcapimodule.c test_capsule buffer -
+Modules/_testcapimodule.c test_empty_argparse kwlist -
+Modules/_testcapimodule.c test_structmembers_new keywords -
+Modules/_testcapimodule.c getargs_s_hash_int keywords -
+Modules/_testcapimodule.c - g_dict_watch_events -
+Modules/_testcapimodule.c - g_dict_watchers_installed -
+Modules/_testcapimodule.c - g_type_modified_events -
+Modules/_testcapimodule.c - g_type_watchers_installed -
+Modules/_testimportmultiple.c - _barmodule -
+Modules/_testimportmultiple.c - _foomodule -
+Modules/_testimportmultiple.c - _testimportmultiple -
+Modules/_testinternalcapi.c - TestMethods -
+Modules/_testinternalcapi.c - _testcapimodule -
+Modules/_testmultiphase.c - Example_Type_slots -
+Modules/_testmultiphase.c - Example_Type_spec -
+Modules/_testmultiphase.c - Example_methods -
+Modules/_testmultiphase.c - StateAccessType_Type_slots -
+Modules/_testmultiphase.c - StateAccessType_methods -
+Modules/_testmultiphase.c - StateAccessType_spec -
+Modules/_testmultiphase.c - Str_Type_slots -
+Modules/_testmultiphase.c - Str_Type_spec -
+Modules/_testmultiphase.c - def_bad_large -
+Modules/_testmultiphase.c - def_bad_negative -
+Modules/_testmultiphase.c - def_create_int_with_state -
+Modules/_testmultiphase.c - def_create_null -
+Modules/_testmultiphase.c - def_create_raise -
+Modules/_testmultiphase.c - def_create_unreported_exception -
+Modules/_testmultiphase.c - def_exec_err -
+Modules/_testmultiphase.c - def_exec_raise -
+Modules/_testmultiphase.c - def_exec_unreported_exception -
+Modules/_testmultiphase.c - def_meth_state_access -
+Modules/_testmultiphase.c - def_negative_size -
+Modules/_testmultiphase.c - def_nonascii_kana -
+Modules/_testmultiphase.c - def_nonascii_latin -
+Modules/_testmultiphase.c - def_nonmodule -
+Modules/_testmultiphase.c - def_nonmodule_with_exec_slots -
+Modules/_testmultiphase.c - def_nonmodule_with_methods -
+Modules/_testmultiphase.c - imp_dummy_def -
+Modules/_testmultiphase.c - main_def -
+Modules/_testmultiphase.c - main_slots -
+Modules/_testmultiphase.c - meth_state_access_slots -
+Modules/_testmultiphase.c - nonmodule_methods -
+Modules/_testmultiphase.c - null_slots_def -
+Modules/_testmultiphase.c - slots_bad_large -
+Modules/_testmultiphase.c - slots_bad_negative -
+Modules/_testmultiphase.c - slots_create_nonmodule -
+Modules/_testmultiphase.c - slots_create_nonmodule -
+Modules/_testmultiphase.c - slots_create_null -
+Modules/_testmultiphase.c - slots_create_raise -
+Modules/_testmultiphase.c - slots_create_unreported_exception -
+Modules/_testmultiphase.c - slots_exec_err -
+Modules/_testmultiphase.c - slots_exec_raise -
+Modules/_testmultiphase.c - slots_exec_unreported_exception -
+Modules/_testmultiphase.c - slots_nonmodule_with_exec_slots -
+Modules/_testmultiphase.c - testexport_methods -
+Modules/_testmultiphase.c - uninitialized_def -
+Modules/_xxtestfuzz/_xxtestfuzz.c - _fuzzmodule -
+Modules/_xxtestfuzz/_xxtestfuzz.c - module_methods -
+Modules/_xxtestfuzz/fuzzer.c - SRE_FLAG_DEBUG -
+Modules/_xxtestfuzz/fuzzer.c - ast_literal_eval_method -
+Modules/_xxtestfuzz/fuzzer.c - compiled_patterns -
+Modules/_xxtestfuzz/fuzzer.c - csv_error -
+Modules/_xxtestfuzz/fuzzer.c - csv_module -
+Modules/_xxtestfuzz/fuzzer.c - json_loads_method -
+Modules/_xxtestfuzz/fuzzer.c - regex_patterns -
+Modules/_xxtestfuzz/fuzzer.c - sre_compile_method -
+Modules/_xxtestfuzz/fuzzer.c - sre_error_exception -
+Modules/_xxtestfuzz/fuzzer.c - struct_error -
+Modules/_xxtestfuzz/fuzzer.c - struct_unpack_method -
+Modules/_xxtestfuzz/fuzzer.c LLVMFuzzerTestOneInput CSV_READER_INITIALIZED -
+Modules/_xxtestfuzz/fuzzer.c LLVMFuzzerTestOneInput JSON_LOADS_INITIALIZED -
+Modules/_xxtestfuzz/fuzzer.c LLVMFuzzerTestOneInput SRE_COMPILE_INITIALIZED -
+Modules/_xxtestfuzz/fuzzer.c LLVMFuzzerTestOneInput SRE_MATCH_INITIALIZED -
+Modules/_xxtestfuzz/fuzzer.c LLVMFuzzerTestOneInput STRUCT_UNPACK_INITIALIZED -
+Modules/_xxtestfuzz/fuzzer.c LLVMFuzzerTestOneInput AST_LITERAL_EVAL_INITIALIZED -
+
+##-----------------------
+## the analyzer should have ignored these
+# XXX Fix the analyzer.
+
+## forward/extern references
+Include/py_curses.h - PyCurses_API -
+Include/pydecimal.h - _decimal_api -
+Modules/_blake2/blake2module.c - blake2b_type_spec -
+Modules/_blake2/blake2module.c - blake2s_type_spec -
+Modules/_io/fileio.c - _Py_open_cloexec_works -
+Modules/_io/_iomodule.h - PyIOBase_Type -
+Modules/_io/_iomodule.h - PyRawIOBase_Type -
+Modules/_io/_iomodule.h - PyBufferedIOBase_Type -
+Modules/_io/_iomodule.h - PyTextIOBase_Type -
+Modules/_io/_iomodule.h - PyFileIO_Type -
+Modules/_io/_iomodule.h - PyBytesIO_Type -
+Modules/_io/_iomodule.h - PyStringIO_Type -
+Modules/_io/_iomodule.h - PyBufferedReader_Type -
+Modules/_io/_iomodule.h - PyBufferedWriter_Type -
+Modules/_io/_iomodule.h - PyBufferedRWPair_Type -
+Modules/_io/_iomodule.h - PyBufferedRandom_Type -
+Modules/_io/_iomodule.h - PyTextIOWrapper_Type -
+Modules/_io/_iomodule.h - PyIncrementalNewlineDecoder_Type -
+Modules/_io/_iomodule.h - _PyBytesIOBuffer_Type -
+Modules/_io/_iomodule.h - _PyIO_Module -
+Modules/_io/_iomodule.h - _PyIO_str_close -
+Modules/_io/_iomodule.h - _PyIO_str_closed -
+Modules/_io/_iomodule.h - _PyIO_str_decode -
+Modules/_io/_iomodule.h - _PyIO_str_encode -
+Modules/_io/_iomodule.h - _PyIO_str_fileno -
+Modules/_io/_iomodule.h - _PyIO_str_flush -
+Modules/_io/_iomodule.h - _PyIO_str_getstate -
+Modules/_io/_iomodule.h - _PyIO_str_isatty -
+Modules/_io/_iomodule.h - _PyIO_str_newlines -
+Modules/_io/_iomodule.h - _PyIO_str_nl -
+Modules/_io/_iomodule.h - _PyIO_str_peek -
+Modules/_io/_iomodule.h - _PyIO_str_read -
+Modules/_io/_iomodule.h - _PyIO_str_read1 -
+Modules/_io/_iomodule.h - _PyIO_str_readable -
+Modules/_io/_iomodule.h - _PyIO_str_readall -
+Modules/_io/_iomodule.h - _PyIO_str_readinto -
+Modules/_io/_iomodule.h - _PyIO_str_readline -
+Modules/_io/_iomodule.h - _PyIO_str_reset -
+Modules/_io/_iomodule.h - _PyIO_str_seek -
+Modules/_io/_iomodule.h - _PyIO_str_seekable -
+Modules/_io/_iomodule.h - _PyIO_str_setstate -
+Modules/_io/_iomodule.h - _PyIO_str_tell -
+Modules/_io/_iomodule.h - _PyIO_str_truncate -
+Modules/_io/_iomodule.h - _PyIO_str_writable -
+Modules/_io/_iomodule.h - _PyIO_str_write -
+Modules/_io/_iomodule.h - _PyIO_empty_str -
+Modules/_io/_iomodule.h - _PyIO_empty_bytes -
+Modules/_multiprocessing/multiprocessing.h - _PyMp_SemLockType -
+Modules/_sqlite/module.c - _pysqlite_converters -
+Modules/_sqlite/module.c - _pysqlite_enable_callback_tracebacks -
+Modules/_sqlite/module.c - pysqlite_BaseTypeAdapted -
+Modules/_sqlite/module.h - pysqlite_global_state -
+Modules/_testcapimodule.c - _PyBytesIOBuffer_Type -
+Modules/posixmodule.c - _Py_open_cloexec_works -
+Modules/posixmodule.c - environ -
+Objects/object.c - _Py_GenericAliasIterType -
+Objects/object.c - _PyMemoryIter_Type -
+Objects/object.c - _PyLineIterator -
+Objects/object.c - _PyPositionsIterator -
+Python/perf_trampoline.c - _Py_trampoline_func_start -
+Python/perf_trampoline.c - _Py_trampoline_func_end -
+Python/importdl.h - _PyImport_DynLoadFiletab -
+Modules/expat/xmlrole.c - prolog0 -
+Modules/expat/xmlrole.c - prolog1 -
+Modules/expat/xmlrole.c - prolog2 -
+Modules/expat/xmlrole.c - doctype0 -
+Modules/expat/xmlrole.c - doctype1 -
+Modules/expat/xmlrole.c - doctype2 -
+Modules/expat/xmlrole.c - doctype3 -
+Modules/expat/xmlrole.c - doctype4 -
+Modules/expat/xmlrole.c - doctype5 -
+Modules/expat/xmlrole.c - internalSubset -
+Modules/expat/xmlrole.c - entity0 -
+Modules/expat/xmlrole.c - entity1 -
+Modules/expat/xmlrole.c - entity2 -
+Modules/expat/xmlrole.c - entity3 -
+Modules/expat/xmlrole.c - entity4 -
+Modules/expat/xmlrole.c - entity5 -
+Modules/expat/xmlrole.c - entity6 -
+Modules/expat/xmlrole.c - entity7 -
+Modules/expat/xmlrole.c - entity8 -
+Modules/expat/xmlrole.c - entity9 -
+Modules/expat/xmlrole.c - entity10 -
+Modules/expat/xmlrole.c - notation0 -
+Modules/expat/xmlrole.c - notation1 -
+Modules/expat/xmlrole.c - notation2 -
+Modules/expat/xmlrole.c - notation3 -
+Modules/expat/xmlrole.c - notation4 -
+Modules/expat/xmlrole.c - attlist0 -
+Modules/expat/xmlrole.c - attlist1 -
+Modules/expat/xmlrole.c - attlist2 -
+Modules/expat/xmlrole.c - attlist3 -
+Modules/expat/xmlrole.c - attlist4 -
+Modules/expat/xmlrole.c - attlist5 -
+Modules/expat/xmlrole.c - attlist6 -
+Modules/expat/xmlrole.c - attlist7 -
+Modules/expat/xmlrole.c - attlist8 -
+Modules/expat/xmlrole.c - attlist9 -
+Modules/expat/xmlrole.c - element0 -
+Modules/expat/xmlrole.c - element1 -
+Modules/expat/xmlrole.c - element2 -
+Modules/expat/xmlrole.c - element3 -
+Modules/expat/xmlrole.c - element4 -
+Modules/expat/xmlrole.c - element5 -
+Modules/expat/xmlrole.c - element6 -
+Modules/expat/xmlrole.c - element7 -
+Modules/expat/xmlrole.c - externalSubset0 -
+Modules/expat/xmlrole.c - externalSubset1 -
+Modules/expat/xmlrole.c - condSect0 -
+Modules/expat/xmlrole.c - condSect1 -
+Modules/expat/xmlrole.c - condSect2 -
+Modules/expat/xmlrole.c - declClose -
+Modules/expat/xmlrole.c - error -
+
+## other
+Modules/_io/_iomodule.c - _PyIO_Module -
+Modules/_sqlite/module.c - _sqlite3module -
diff --git a/Tools/c-analyzer/table-file.py b/Tools/c-analyzer/table-file.py
index 3cc05cc9de7779..d36f814415c8e7 100644
--- a/Tools/c-analyzer/table-file.py
+++ b/Tools/c-analyzer/table-file.py
@@ -1,43 +1,59 @@
+KINDS = [
+ 'section-major',
+ 'section-minor',
+ 'section-group',
+ 'row',
+]
+
+
def iter_clean_lines(lines):
lines = iter(lines)
- for line in lines:
- line = line.strip()
- if line.startswith('# XXX'):
+ for rawline in lines:
+ line = rawline.strip()
+ if line.startswith('#') and not rawline.startswith('##'):
continue
- yield line
+ yield line, rawline
def parse_table_lines(lines):
lines = iter_clean_lines(lines)
- for line in lines:
- if line.startswith(('####', '#----')):
- kind = 0 if line[1] == '#' else 1
- try:
- line = next(lines).strip()
- except StopIteration:
- line = ''
- if not line.startswith('# '):
- raise NotImplementedError(line)
- yield kind, line[2:].lstrip()
- continue
-
- maybe = None
- while line.startswith('#'):
- if line != '#' and line[1] == ' ':
- maybe = line[2:].lstrip()
- try:
- line = next(lines).strip()
- except StopIteration:
- return
- if not line:
- break
- else:
- if line:
- if maybe:
- yield 2, maybe
- yield 'row', line
+ group = None
+ prev = ''
+ for line, rawline in lines:
+ if line.startswith('## '):
+ assert not rawline.startswith(' '), (line, rawline)
+ if group:
+ assert prev, (line, rawline)
+ kind, after, _ = group
+ assert kind and kind != 'section-group', (group, line, rawline)
+ assert after is not None, (group, line, rawline)
+ else:
+ assert not prev, (prev, line, rawline)
+ kind, after = group = ('section-group', None)
+ title = line[3:].lstrip()
+ assert title, (line, rawline)
+ if after is not None:
+ try:
+ line, rawline = next(lines)
+ except StopIteration:
+ line = None
+ if line != after:
+ raise NotImplementedError((group, line, rawline))
+ yield kind, title
+ group = None
+ elif group:
+ raise NotImplementedError((group, line, rawline))
+ elif line.startswith('##---'):
+ assert line.rstrip('-') == '##', (line, rawline)
+ group = ('section-minor', '', line)
+ elif line.startswith('#####'):
+ assert not line.strip('#'), (line, rawline)
+ group = ('section-major', '', line)
+ elif line:
+ yield 'row', line
+ prev = line
def iter_sections(lines):
@@ -49,12 +65,13 @@ def iter_sections(lines):
if header is None:
header = value
continue
- raise NotImplementedError(value)
+ raise NotImplementedError(repr(value))
yield tuple(section), value
else:
if header is None:
header = False
- section[kind:] = [value]
+ start = KINDS.index(kind)
+ section[start:] = [value]
def collect_sections(lines):
diff --git a/Tools/cases_generator/generate_cases.py b/Tools/cases_generator/generate_cases.py
index 2952634a3cda68..5930c797b8a4d3 100644
--- a/Tools/cases_generator/generate_cases.py
+++ b/Tools/cases_generator/generate_cases.py
@@ -13,6 +13,7 @@
import typing
import parser
+from parser import StackEffect
DEFAULT_INPUT = os.path.relpath(
os.path.join(os.path.dirname(__file__), "../../Python/bytecodes.c")
@@ -22,7 +23,7 @@
)
BEGIN_MARKER = "// BEGIN BYTECODES //"
END_MARKER = "// END BYTECODES //"
-RE_PREDICTED = r"(?s)(?:PREDICT\(|GO_TO_INSTRUCTION\(|DEOPT_IF\(.*?,\s*)(\w+)\);"
+RE_PREDICTED = r"^\s*(?:PREDICT\(|GO_TO_INSTRUCTION\(|DEOPT_IF\(.*?,\s*)(\w+)\);\s*$"
UNUSED = "unused"
BITS_PER_CODE_UNIT = 16
@@ -73,6 +74,34 @@ def block(self, head: str):
yield
self.emit("}")
+ def stack_adjust(self, diff: int):
+ if diff > 0:
+ self.emit(f"STACK_GROW({diff});")
+ elif diff < 0:
+ self.emit(f"STACK_SHRINK({-diff});")
+
+ def declare(self, dst: StackEffect, src: StackEffect | None):
+ if dst.name == UNUSED:
+ return
+ typ = f"{dst.type} " if dst.type else "PyObject *"
+ init = ""
+ if src:
+ cast = self.cast(dst, src)
+ init = f" = {cast}{src.name}"
+ self.emit(f"{typ}{dst.name}{init};")
+
+ def assign(self, dst: StackEffect, src: StackEffect):
+ if src.name == UNUSED:
+ return
+ cast = self.cast(dst, src)
+ if m := re.match(r"^PEEK\((\d+)\)$", dst.name):
+ self.emit(f"POKE({m.group(1)}, {cast}{src.name});")
+ else:
+ self.emit(f"{dst.name} = {cast}{src.name};")
+
+ def cast(self, dst: StackEffect, src: StackEffect) -> str:
+ return f"({dst.type or 'PyObject *'})" if src.type != dst.type else ""
+
@dataclasses.dataclass
class Instruction:
@@ -83,13 +112,15 @@ class Instruction:
kind: typing.Literal["inst", "op"]
name: str
block: parser.Block
+ block_text: list[str] # Block.text, less curlies, less PREDICT() calls
+ predictions: list[str] # Prediction targets (instruction names)
# Computed by constructor
always_exits: bool
cache_offset: int
cache_effects: list[parser.CacheEffect]
- input_effects: list[parser.StackEffect]
- output_effects: list[parser.StackEffect]
+ input_effects: list[StackEffect]
+ output_effects: list[StackEffect]
# Set later
family: parser.Family | None = None
@@ -100,13 +131,14 @@ def __init__(self, inst: parser.InstDef):
self.kind = inst.kind
self.name = inst.name
self.block = inst.block
- self.always_exits = always_exits(self.block)
+ self.block_text, self.predictions = extract_block_text(self.block)
+ self.always_exits = always_exits(self.block_text)
self.cache_effects = [
effect for effect in inst.inputs if isinstance(effect, parser.CacheEffect)
]
self.cache_offset = sum(c.size for c in self.cache_effects)
self.input_effects = [
- effect for effect in inst.inputs if isinstance(effect, parser.StackEffect)
+ effect for effect in inst.inputs if isinstance(effect, StackEffect)
]
self.output_effects = inst.outputs # For consistency/completeness
@@ -122,42 +154,39 @@ def write(self, out: Formatter) -> None:
)
# Write input stack effect variable declarations and initializations
- for i, seffect in enumerate(reversed(self.input_effects), 1):
- if seffect.name != UNUSED:
- out.emit(f"PyObject *{seffect.name} = PEEK({i});")
+ for i, ieffect in enumerate(reversed(self.input_effects), 1):
+ src = StackEffect(f"PEEK({i})", "")
+ out.declare(ieffect, src)
# Write output stack effect variable declarations
- input_names = {seffect.name for seffect in self.input_effects}
- input_names.add(UNUSED)
- for seffect in self.output_effects:
- if seffect.name not in input_names:
- out.emit(f"PyObject *{seffect.name};")
+ input_names = {ieffect.name for ieffect in self.input_effects}
+ for oeffect in self.output_effects:
+ if oeffect.name not in input_names:
+ out.declare(oeffect, None)
self.write_body(out, 0)
# Skip the rest if the block always exits
- if always_exits(self.block):
+ if self.always_exits:
return
# Write net stack growth/shrinkage
diff = len(self.output_effects) - len(self.input_effects)
- if diff > 0:
- out.emit(f"STACK_GROW({diff});")
- elif diff < 0:
- out.emit(f"STACK_SHRINK({-diff});")
+ out.stack_adjust(diff)
# Write output stack effect assignments
- unmoved_names = {UNUSED}
+ unmoved_names: set[str] = set()
for ieffect, oeffect in zip(self.input_effects, self.output_effects):
if ieffect.name == oeffect.name:
unmoved_names.add(ieffect.name)
- for i, seffect in enumerate(reversed(self.output_effects)):
- if seffect.name not in unmoved_names:
- out.emit(f"POKE({i+1}, {seffect.name});")
+ for i, oeffect in enumerate(reversed(self.output_effects), 1):
+ if oeffect.name not in unmoved_names:
+ dst = StackEffect(f"PEEK({i})", "")
+ out.assign(dst, oeffect)
# Write cache effect
if self.cache_offset:
- out.emit(f"next_instr += {self.cache_offset};")
+ out.emit(f"JUMPBY({self.cache_offset});")
def write_body(self, out: Formatter, dedent: int, cache_adjust: int = 0) -> None:
"""Write the instruction body."""
@@ -171,36 +200,19 @@ def write_body(self, out: Formatter, dedent: int, cache_adjust: int = 0) -> None
# is always an object pointer.
# If this becomes false, we need a way to specify
# syntactically what type the cache data is.
- type = "PyObject *"
+ typ = "PyObject *"
func = "read_obj"
else:
- type = f"uint{bits}_t "
+ typ = f"uint{bits}_t "
func = f"read_u{bits}"
- out.emit(f"{type}{ceffect.name} = {func}(next_instr + {cache_offset});")
+ out.emit(f"{typ}{ceffect.name} = {func}(next_instr + {cache_offset});")
cache_offset += ceffect.size
assert cache_offset == self.cache_offset + cache_adjust
- # Get lines of text with proper dedent
- blocklines = self.block.to_text(dedent=dedent).splitlines(True)
-
- # Remove blank lines from both ends
- while blocklines and not blocklines[0].strip():
- blocklines.pop(0)
- while blocklines and not blocklines[-1].strip():
- blocklines.pop()
-
- # Remove leading and trailing braces
- assert blocklines and blocklines[0].strip() == "{"
- assert blocklines and blocklines[-1].strip() == "}"
- blocklines.pop()
- blocklines.pop(0)
-
- # Remove trailing blank lines
- while blocklines and not blocklines[-1].strip():
- blocklines.pop()
-
# Write the body, substituting a goto for ERROR_IF()
- for line in blocklines:
+ assert dedent <= 0
+ extra = " " * -dedent
+ for line in self.block_text:
if m := re.match(r"(\s*)ERROR_IF\((.+), (\w+)\);\s*$", line):
space, cond, label = m.groups()
# ERROR_IF() must pop the inputs from the stack.
@@ -215,34 +227,36 @@ def write_body(self, out: Formatter, dedent: int, cache_adjust: int = 0) -> None
else:
break
if ninputs:
- out.write_raw(f"{space}if ({cond}) goto pop_{ninputs}_{label};\n")
+ out.write_raw(
+ f"{extra}{space}if ({cond}) goto pop_{ninputs}_{label};\n"
+ )
else:
- out.write_raw(f"{space}if ({cond}) goto {label};\n")
+ out.write_raw(f"{extra}{space}if ({cond}) goto {label};\n")
else:
- out.write_raw(line)
+ out.write_raw(extra + line)
InstructionOrCacheEffect = Instruction | parser.CacheEffect
+StackEffectMapping = list[tuple[StackEffect, StackEffect]]
@dataclasses.dataclass
class Component:
instr: Instruction
- input_mapping: dict[str, parser.StackEffect]
- output_mapping: dict[str, parser.StackEffect]
+ input_mapping: StackEffectMapping
+ output_mapping: StackEffectMapping
def write_body(self, out: Formatter, cache_adjust: int) -> None:
with out.block(""):
- for var, ieffect in self.input_mapping.items():
- out.emit(f"PyObject *{ieffect.name} = {var};")
- for oeffect in self.output_mapping.values():
- out.emit(f"PyObject *{oeffect.name};")
- self.instr.write_body(out, dedent=-4, cache_adjust=cache_adjust)
- for var, oeffect in self.output_mapping.items():
- out.emit(f"{var} = {oeffect.name};")
+ for var, ieffect in self.input_mapping:
+ out.declare(ieffect, var)
+ for _, oeffect in self.output_mapping:
+ out.declare(oeffect, None)
+ self.instr.write_body(out, dedent=-4, cache_adjust=cache_adjust)
-# TODO: Use a common base class for {Super,Macro}Instruction
+ for var, oeffect in self.output_mapping:
+ out.assign(var, oeffect)
@dataclasses.dataclass
@@ -250,7 +264,7 @@ class SuperOrMacroInstruction:
"""Common fields for super- and macro instructions."""
name: str
- stack: list[str]
+ stack: list[StackEffect]
initial_sp: int
final_sp: int
@@ -297,6 +311,7 @@ def error(self, msg: str, node: parser.Node) -> None:
print(f"{self.filename}:{lineno}: {msg}", file=sys.stderr)
self.errors += 1
+ everything: list[parser.InstDef | parser.Super | parser.Macro]
instrs: dict[str, Instruction] # Includes ops
supers: dict[str, parser.Super]
super_instrs: dict[str, SuperInstruction]
@@ -330,6 +345,7 @@ def parse(self) -> None:
# Parse from start
psr.setpos(start)
+ self.everything = []
self.instrs = {}
self.supers = {}
self.macros = {}
@@ -338,10 +354,13 @@ def parse(self) -> None:
match thing:
case parser.InstDef(name=name):
self.instrs[name] = Instruction(thing)
+ self.everything.append(thing)
case parser.Super(name):
self.supers[name] = thing
+ self.everything.append(thing)
case parser.Macro(name):
self.macros[name] = thing
+ self.everything.append(thing)
case parser.Family(name):
self.families[name] = thing
case _:
@@ -369,7 +388,11 @@ def analyze(self) -> None:
def find_predictions(self) -> None:
"""Find the instructions that need PREDICTED() labels."""
for instr in self.instrs.values():
- for target in re.findall(RE_PREDICTED, instr.block.text):
+ targets = set(instr.predictions)
+ for line in instr.block_text:
+ if m := re.match(RE_PREDICTED, line):
+ targets.add(m.group(1))
+ for target in targets:
if target_instr := self.instrs.get(target):
target_instr.predicted = True
else:
@@ -440,24 +463,9 @@ def analyze_super(self, super: parser.Super) -> SuperInstruction:
stack, initial_sp = self.stack_analysis(components)
sp = initial_sp
parts: list[Component] = []
- for component in components:
- match component:
- case parser.CacheEffect() as ceffect:
- parts.append(ceffect)
- case Instruction() as instr:
- input_mapping = {}
- for ieffect in reversed(instr.input_effects):
- sp -= 1
- if ieffect.name != UNUSED:
- input_mapping[stack[sp]] = ieffect
- output_mapping = {}
- for oeffect in instr.output_effects:
- if oeffect.name != UNUSED:
- output_mapping[stack[sp]] = oeffect
- sp += 1
- parts.append(Component(instr, input_mapping, output_mapping))
- case _:
- typing.assert_never(component)
+ for instr in components:
+ part, sp = self.analyze_instruction(instr, stack, sp)
+ parts.append(part)
final_sp = sp
return SuperInstruction(super.name, stack, initial_sp, final_sp, super, parts)
@@ -471,22 +479,26 @@ def analyze_macro(self, macro: parser.Macro) -> MacroInstruction:
case parser.CacheEffect() as ceffect:
parts.append(ceffect)
case Instruction() as instr:
- input_mapping = {}
- for ieffect in reversed(instr.input_effects):
- sp -= 1
- if ieffect.name != UNUSED:
- input_mapping[stack[sp]] = ieffect
- output_mapping = {}
- for oeffect in instr.output_effects:
- if oeffect.name != UNUSED:
- output_mapping[stack[sp]] = oeffect
- sp += 1
- parts.append(Component(instr, input_mapping, output_mapping))
+ part, sp = self.analyze_instruction(instr, stack, sp)
+ parts.append(part)
case _:
typing.assert_never(component)
final_sp = sp
return MacroInstruction(macro.name, stack, initial_sp, final_sp, macro, parts)
+ def analyze_instruction(
+ self, instr: Instruction, stack: list[StackEffect], sp: int
+ ) -> tuple[Component, int]:
+ input_mapping: StackEffectMapping = []
+ for ieffect in reversed(instr.input_effects):
+ sp -= 1
+ input_mapping.append((stack[sp], ieffect))
+ output_mapping: StackEffectMapping = []
+ for oeffect in instr.output_effects:
+ output_mapping.append((stack[sp], oeffect))
+ sp += 1
+ return Component(instr, input_mapping, output_mapping), sp
+
def check_super_components(self, super: parser.Super) -> list[Instruction]:
components: list[Instruction] = []
for op in super.ops:
@@ -514,7 +526,7 @@ def check_macro_components(
def stack_analysis(
self, components: typing.Iterable[InstructionOrCacheEffect]
- ) -> tuple[list[str], int]:
+ ) -> tuple[list[StackEffect], int]:
"""Analyze a super-instruction or macro.
Print an error if there's a cache effect (which we don't support yet).
@@ -536,7 +548,10 @@ def stack_analysis(
# At this point, 'current' is the net stack effect,
# and 'lowest' and 'highest' are the extremes.
# Note that 'lowest' may be negative.
- stack = [f"_tmp_{i+1}" for i in range(highest - lowest)]
+ # TODO: Reverse the numbering.
+ stack = [
+ StackEffect(f"_tmp_{i+1}", "") for i in reversed(range(highest - lowest))
+ ]
return stack, -lowest
def write_instructions(self) -> None:
@@ -550,31 +565,24 @@ def write_instructions(self) -> None:
# Create formatter; the rest of the code uses this.
self.out = Formatter(f, 8)
- # Write and count regular instructions
+ # Write and count instructions of all kinds
n_instrs = 0
- for name, instr in self.instrs.items():
- if instr.kind != "inst":
- continue # ops are not real instructions
- n_instrs += 1
- self.out.emit("")
- with self.out.block(f"TARGET({name})"):
- if instr.predicted:
- self.out.emit(f"PREDICTED({name});")
- instr.write(self.out)
- if not always_exits(instr.block):
- self.out.emit(f"DISPATCH();")
-
- # Write and count super-instructions
n_supers = 0
- for sup in self.super_instrs.values():
- n_supers += 1
- self.write_super(sup)
-
- # Write and count macro instructions
n_macros = 0
- for macro in self.macro_instrs.values():
- n_macros += 1
- self.write_macro(macro)
+ for thing in self.everything:
+ match thing:
+ case parser.InstDef():
+ if thing.kind == "inst":
+ n_instrs += 1
+ self.write_instr(self.instrs[thing.name])
+ case parser.Super():
+ n_supers += 1
+ self.write_super(self.super_instrs[thing.name])
+ case parser.Macro():
+ n_macros += 1
+ self.write_macro(self.macro_instrs[thing.name])
+ case _:
+ typing.assert_never(thing)
print(
f"Wrote {n_instrs} instructions, {n_supers} supers, "
@@ -582,6 +590,18 @@ def write_instructions(self) -> None:
file=sys.stderr,
)
+ def write_instr(self, instr: Instruction) -> None:
+ name = instr.name
+ self.out.emit("")
+ with self.out.block(f"TARGET({name})"):
+ if instr.predicted:
+ self.out.emit(f"PREDICTED({name});")
+ instr.write(self.out)
+ if not instr.always_exits:
+ for prediction in instr.predictions:
+ self.out.emit(f"PREDICT({prediction});")
+ self.out.emit(f"DISPATCH();")
+
def write_super(self, sup: SuperInstruction) -> None:
"""Write code for a super-instruction."""
with self.wrap_super_or_macro(sup):
@@ -589,11 +609,11 @@ def write_super(self, sup: SuperInstruction) -> None:
for comp in sup.parts:
if not first:
self.out.emit("NEXTOPARG();")
- self.out.emit("next_instr++;")
+ self.out.emit("JUMPBY(1);")
first = False
comp.write_body(self.out, 0)
if comp.instr.cache_offset:
- self.out.emit(f"next_instr += {comp.instr.cache_offset};")
+ self.out.emit(f"JUMPBY({comp.instr.cache_offset});")
def write_macro(self, mac: MacroInstruction) -> None:
"""Write code for a macro instruction."""
@@ -608,43 +628,68 @@ def write_macro(self, mac: MacroInstruction) -> None:
cache_adjust += comp.instr.cache_offset
if cache_adjust:
- self.out.emit(f"next_instr += {cache_adjust};")
+ self.out.emit(f"JUMPBY({cache_adjust});")
@contextlib.contextmanager
def wrap_super_or_macro(self, up: SuperOrMacroInstruction):
"""Shared boilerplate for super- and macro instructions."""
+ # TODO: Somewhere (where?) make it so that if one instruction
+ # has an output that is input to another, and the variable names
+ # and types match and don't conflict with other instructions,
+ # that variable is declared with the right name and type in the
+ # outer block, rather than trusting the compiler to optimize it.
self.out.emit("")
with self.out.block(f"TARGET({up.name})"):
- for i, var in enumerate(up.stack):
+ for i, var in reversed(list(enumerate(up.stack))):
+ src = None
if i < up.initial_sp:
- self.out.emit(f"PyObject *{var} = PEEK({up.initial_sp - i});")
- else:
- self.out.emit(f"PyObject *{var};")
+ src = StackEffect(f"PEEK({up.initial_sp - i})", "")
+ self.out.declare(var, src)
yield
- if up.final_sp > up.initial_sp:
- self.out.emit(f"STACK_GROW({up.final_sp - up.initial_sp});")
- elif up.final_sp < up.initial_sp:
- self.out.emit(f"STACK_SHRINK({up.initial_sp - up.final_sp});")
+ self.out.stack_adjust(up.final_sp - up.initial_sp)
for i, var in enumerate(reversed(up.stack[: up.final_sp]), 1):
- self.out.emit(f"POKE({i}, {var});")
+ dst = StackEffect(f"PEEK({i})", "")
+ self.out.assign(dst, var)
self.out.emit(f"DISPATCH();")
-def always_exits(block: parser.Block) -> bool:
+def extract_block_text(block: parser.Block) -> tuple[list[str], list[str]]:
+ # Get lines of text with proper dedent
+ blocklines = block.text.splitlines(True)
+
+ # Remove blank lines from both ends
+ while blocklines and not blocklines[0].strip():
+ blocklines.pop(0)
+ while blocklines and not blocklines[-1].strip():
+ blocklines.pop()
+
+ # Remove leading and trailing braces
+ assert blocklines and blocklines[0].strip() == "{"
+ assert blocklines and blocklines[-1].strip() == "}"
+ blocklines.pop()
+ blocklines.pop(0)
+
+ # Remove trailing blank lines
+ while blocklines and not blocklines[-1].strip():
+ blocklines.pop()
+
+ # Separate PREDICT(...) macros from end
+ predictions: list[str] = []
+ while blocklines and (m := re.match(r"^\s*PREDICT\((\w+)\);\s*$", blocklines[-1])):
+ predictions.insert(0, m.group(1))
+ blocklines.pop()
+
+ return blocklines, predictions
+
+
+def always_exits(lines: list[str]) -> bool:
"""Determine whether a block always ends in a return/goto/etc."""
- text = block.text
- lines = text.splitlines()
- while lines and not lines[-1].strip():
- lines.pop()
- if not lines or lines[-1].strip() != "}":
- return False
- lines.pop()
if not lines:
return False
- line = lines.pop().rstrip()
+ line = lines[-1].rstrip()
# Indent must match exactly (TODO: Do something better)
if line[:12] != " " * 12:
return False
diff --git a/Tools/cases_generator/parser.py b/Tools/cases_generator/parser.py
index 02a7834d221596..d802c733dfd10c 100644
--- a/Tools/cases_generator/parser.py
+++ b/Tools/cases_generator/parser.py
@@ -62,7 +62,8 @@ class Block(Node):
@dataclass
class StackEffect(Node):
name: str
- # TODO: type, condition
+ type: str = ""
+ # TODO: array, condition
@dataclass
@@ -147,7 +148,7 @@ def inst_header(self) -> InstHeader | None:
if self.expect(lx.LPAREN) and (tkn := self.expect(lx.IDENTIFIER)):
name = tkn.text
if self.expect(lx.COMMA):
- inp, outp = self.stack_effect()
+ inp, outp = self.io_effect()
if self.expect(lx.RPAREN):
if (tkn := self.peek()) and tkn.kind == lx.LBRACE:
return InstHeader(kind, name, inp, outp)
@@ -156,7 +157,7 @@ def inst_header(self) -> InstHeader | None:
return InstHeader(kind, name, [], [])
return None
- def stack_effect(self) -> tuple[list[InputEffect], list[OutputEffect]]:
+ def io_effect(self) -> tuple[list[InputEffect], list[OutputEffect]]:
# '(' [inputs] '--' [outputs] ')'
if self.expect(lx.LPAREN):
inputs = self.inputs() or []
@@ -181,23 +182,7 @@ def inputs(self) -> list[InputEffect] | None:
@contextual
def input(self) -> InputEffect | None:
- # IDENTIFIER '/' INTEGER (CacheEffect)
- # IDENTIFIER (StackEffect)
- if tkn := self.expect(lx.IDENTIFIER):
- if self.expect(lx.DIVIDE):
- if num := self.expect(lx.NUMBER):
- try:
- size = int(num.text)
- except ValueError:
- raise self.make_syntax_error(
- f"Expected integer, got {num.text!r}"
- )
- else:
- return CacheEffect(tkn.text, size)
- raise self.make_syntax_error("Expected integer")
- else:
- # TODO: Arrays, conditions
- return StackEffect(tkn.text)
+ return self.cache_effect() or self.stack_effect()
def outputs(self) -> list[OutputEffect] | None:
# output (, output)*
@@ -214,8 +199,30 @@ def outputs(self) -> list[OutputEffect] | None:
@contextual
def output(self) -> OutputEffect | None:
+ return self.stack_effect()
+
+ @contextual
+ def cache_effect(self) -> CacheEffect | None:
+ # IDENTIFIER '/' NUMBER
+ if tkn := self.expect(lx.IDENTIFIER):
+ if self.expect(lx.DIVIDE):
+ num = self.require(lx.NUMBER).text
+ try:
+ size = int(num)
+ except ValueError:
+ raise self.make_syntax_error(f"Expected integer, got {num!r}")
+ else:
+ return CacheEffect(tkn.text, size)
+
+ @contextual
+ def stack_effect(self) -> StackEffect | None:
+ # IDENTIFIER [':' IDENTIFIER]
+ # TODO: Arrays, conditions
if tkn := self.expect(lx.IDENTIFIER):
- return StackEffect(tkn.text)
+ type = ""
+ if self.expect(lx.COLON):
+ type = self.require(lx.IDENTIFIER).text
+ return StackEffect(tkn.text, type)
@contextual
def super_def(self) -> Super | None:
diff --git a/Tools/clinic/clinic.py b/Tools/clinic/clinic.py
index 0ece814e8f1883..fdf8041e14bbc1 100755
--- a/Tools/clinic/clinic.py
+++ b/Tools/clinic/clinic.py
@@ -5212,10 +5212,6 @@ def state_terminal(self, line):
def main(argv):
import sys
-
- if sys.version_info.major < 3 or sys.version_info.minor < 3:
- sys.exit("Error: clinic.py requires Python 3.3 or greater.")
-
import argparse
cmdline = argparse.ArgumentParser(
description="""Preprocessor for CPython C files.
diff --git a/Tools/scripts/summarize_stats.py b/Tools/scripts/summarize_stats.py
index 9c098064fe5403..c30a60e9514bda 100644
--- a/Tools/scripts/summarize_stats.py
+++ b/Tools/scripts/summarize_stats.py
@@ -34,6 +34,16 @@
TOTAL = "specialization.deferred", "specialization.hit", "specialization.miss", "execution_count"
+def format_ratio(num, den):
+ """
+ Format a ratio as a percentage. When the denominator is 0, returns the empty
+ string.
+ """
+ if den == 0:
+ return ""
+ else:
+ return f"{num/den:.01%}"
+
def join_rows(a_rows, b_rows):
"""
Joins two tables together, side-by-side, where the first column in each is a
@@ -87,7 +97,7 @@ def calculate_specialization_stats(family_stats, total):
continue
else:
label = key
- rows.append((f"{label:>12}", f"{family_stats[key]:>12}", f"{100*family_stats[key]/total:0.1f}%"))
+ rows.append((f"{label:>12}", f"{family_stats[key]:>12}", format_ratio(family_stats[key], total)))
return rows
def calculate_specialization_success_failure(family_stats):
@@ -100,7 +110,7 @@ def calculate_specialization_success_failure(family_stats):
label = key[len("specialization."):]
label = label[0].upper() + label[1:]
val = family_stats.get(key, 0)
- rows.append((label, val, f"{100*val/total_attempts:0.1f}%"))
+ rows.append((label, val, format_ratio(val, total_attempts)))
return rows
def calculate_specialization_failure_kinds(name, family_stats, defines):
@@ -118,7 +128,7 @@ def calculate_specialization_failure_kinds(name, family_stats, defines):
for value, index in failures:
if not value:
continue
- rows.append((kind_to_text(index, defines, name), value, f"{100*value/total_failures:0.1f}%"))
+ rows.append((kind_to_text(index, defines, name), value, format_ratio(value, total_failures)))
return rows
def print_specialization_stats(name, family_stats, defines):
@@ -184,6 +194,7 @@ def gather_stats(input):
key = key.strip()
value = int(value)
stats[key] += value
+ stats['__nfiles__'] += 1
return stats
else:
raise ValueError(f"{input:r} is not a file or directory path")
@@ -317,11 +328,11 @@ def calculate_execution_counts(opcode_stats, total):
for (count, name, miss) in counts:
cumulative += count
if miss:
- miss = f"{100*miss/count:0.1f}%"
+ miss = format_ratio(miss, count)
else:
miss = ""
- rows.append((name, count, f"{100*count/total:0.1f}%",
- f"{100*cumulative/total:0.1f}%", miss))
+ rows.append((name, count, format_ratio(count, total),
+ format_ratio(cumulative, total), miss))
return rows
def emit_execution_counts(opcode_stats, total):
@@ -385,9 +396,9 @@ def emit_comparative_specialization_stats(base_opcode_stats, head_opcode_stats):
def calculate_specialization_effectiveness(opcode_stats, total):
basic, not_specialized, specialized = categorized_counts(opcode_stats)
return [
- ("Basic", basic, f"{basic*100/total:0.1f}%"),
- ("Not specialized", not_specialized, f"{not_specialized*100/total:0.1f}%"),
- ("Specialized", specialized, f"{specialized*100/total:0.1f}%"),
+ ("Basic", basic, format_ratio(basic, total)),
+ ("Not specialized", not_specialized, format_ratio(not_specialized, total)),
+ ("Specialized", specialized, format_ratio(specialized, total)),
]
def emit_specialization_overview(opcode_stats, total):
@@ -404,7 +415,7 @@ def emit_specialization_overview(opcode_stats, total):
counts.sort(reverse=True)
if total:
with Section(f"{title} by instruction", 3):
- rows = [ (name, count, f"{100*count/total:0.1f}%") for (count, name) in counts[:10] ]
+ rows = [ (name, count, format_ratio(count, total)) for (count, name) in counts[:10] ]
emit_table(("Name", "Count:", "Ratio:"), rows)
def emit_comparative_specialization_overview(base_opcode_stats, base_total, head_opcode_stats, head_total):
@@ -431,15 +442,15 @@ def calculate_call_stats(stats):
rows = []
for key, value in stats.items():
if "Calls to" in key:
- rows.append((key, value, f"{100*value/total:0.1f}%"))
+ rows.append((key, value, format_ratio(value, total)))
elif key.startswith("Calls "):
name, index = key[:-1].split("[")
index = int(index)
label = name + " (" + pretty(defines[index][0]) + ")"
- rows.append((label, value, f"{100*value/total:0.1f}%"))
+ rows.append((label, value, format_ratio(value, total)))
for key, value in stats.items():
if key.startswith("Frame"):
- rows.append((key, value, f"{100*value/total:0.1f}%"))
+ rows.append((key, value, format_ratio(value, total)))
return rows
def emit_call_stats(stats):
@@ -467,13 +478,13 @@ def calculate_object_stats(stats):
for key, value in stats.items():
if key.startswith("Object"):
if "materialize" in key:
- ratio = f"{100*value/total_materializations:0.1f}%"
+ ratio = format_ratio(value, total_materializations)
elif "allocations" in key:
- ratio = f"{100*value/total_allocations:0.1f}%"
+ ratio = format_ratio(value, total_allocations)
elif "increfs" in key:
- ratio = f"{100*value/total_increfs:0.1f}%"
+ ratio = format_ratio(value, total_increfs)
elif "decrefs" in key:
- ratio = f"{100*value/total_decrefs:0.1f}%"
+ ratio = format_ratio(value, total_decrefs)
else:
ratio = ""
label = key[6:].strip()
@@ -516,8 +527,8 @@ def emit_pair_counts(opcode_stats, total):
for (count, pair) in itertools.islice(pair_counts, 100):
i, j = pair
cumulative += count
- rows.append((opname[i] + " " + opname[j], count, f"{100*count/total:0.1f}%",
- f"{100*cumulative/total:0.1f}%"))
+ rows.append((opname[i] + " " + opname[j], count, format_ratio(count, total),
+ format_ratio(cumulative, total)))
emit_table(("Pair", "Count:", "Self:", "Cumulative:"),
rows
)
@@ -561,6 +572,9 @@ def output_single_stats(stats):
emit_specialization_overview(opcode_stats, total)
emit_call_stats(stats)
emit_object_stats(stats)
+ with Section("Meta stats", summary="Meta statistics"):
+ emit_table(("", "Count:"), [('Number of data files', stats['__nfiles__'])])
+
def output_comparative_stats(base_stats, head_stats):
base_opcode_stats = extract_opcode_stats(base_stats)
diff --git a/netlify.toml b/netlify.toml
new file mode 100644
index 00000000000000..f5790fc5fec74f
--- /dev/null
+++ b/netlify.toml
@@ -0,0 +1,11 @@
+[build]
+ base = "Doc/"
+ command = "make html"
+ publish = "build/html"
+ # Do not trigger netlify builds if docs were not changed.
+ # Changed files should be in sync with `.github/workflows/doc.yml`
+ ignore = "git diff --quiet $CACHED_COMMIT_REF $COMMIT_REF . ../netlify.toml"
+
+[build.environment]
+ PYTHON_VERSION = "3.8"
+ IS_DEPLOYMENT_PREVIEW = "true"