From 4606eff0aa01d6ce30d25b05ed347567ea59b00b Mon Sep 17 00:00:00 2001
From: Serhiy Storchaka
Date: Tue, 23 Jul 2024 20:45:21 +0300
Subject: [PATCH 001/139] gh-122129: Improve support of method descriptors and
wrappers in the help title (GH-122157)
---
Lib/pydoc.py | 7 +++++++
Lib/test/test_pydoc/test_pydoc.py | 13 ++++++++++---
.../2024-07-23-13-07-12.gh-issue-122129.PwbC8q.rst | 1 +
3 files changed, 18 insertions(+), 3 deletions(-)
create mode 100644 Misc/NEWS.d/next/Library/2024-07-23-13-07-12.gh-issue-122129.PwbC8q.rst
diff --git a/Lib/pydoc.py b/Lib/pydoc.py
index 768c3dcb11ec59..d376592d69d40d 100644
--- a/Lib/pydoc.py
+++ b/Lib/pydoc.py
@@ -1682,6 +1682,13 @@ def describe(thing):
return 'function ' + thing.__name__
if inspect.ismethod(thing):
return 'method ' + thing.__name__
+ if inspect.ismethodwrapper(thing):
+ return 'method wrapper ' + thing.__name__
+ if inspect.ismethoddescriptor(thing):
+ try:
+ return 'method descriptor ' + thing.__name__
+ except AttributeError:
+ pass
return type(thing).__name__
def locate(path, forceload=0):
diff --git a/Lib/test/test_pydoc/test_pydoc.py b/Lib/test/test_pydoc/test_pydoc.py
index 40b3aca25e0da8..2dba077cdea6a7 100644
--- a/Lib/test/test_pydoc/test_pydoc.py
+++ b/Lib/test/test_pydoc/test_pydoc.py
@@ -776,9 +776,16 @@ def run_pydoc_pager(request, what, expected_first_line):
'Help on function help in module pydoc:')
run_pydoc_pager('str', 'str', 'Help on class str in module builtins:')
run_pydoc_pager(str, 'str', 'Help on class str in module builtins:')
- run_pydoc_pager('str.upper', 'str.upper', 'Help on method_descriptor in str:')
- run_pydoc_pager(str.upper, 'str.upper', 'Help on method_descriptor:')
- run_pydoc_pager(str.__add__, 'str.__add__', 'Help on wrapper_descriptor:')
+ run_pydoc_pager('str.upper', 'str.upper',
+ 'Help on method descriptor upper in str:')
+ run_pydoc_pager(str.upper, 'str.upper',
+ 'Help on method descriptor upper:')
+ run_pydoc_pager(''.upper, 'str.upper',
+ 'Help on built-in function upper:')
+ run_pydoc_pager(str.__add__,
+ 'str.__add__', 'Help on method descriptor __add__:')
+ run_pydoc_pager(''.__add__,
+ 'str.__add__', 'Help on method wrapper __add__:')
run_pydoc_pager(int.numerator, 'int.numerator',
'Help on getset descriptor builtins.int.numerator:')
run_pydoc_pager(list[int], 'list',
diff --git a/Misc/NEWS.d/next/Library/2024-07-23-13-07-12.gh-issue-122129.PwbC8q.rst b/Misc/NEWS.d/next/Library/2024-07-23-13-07-12.gh-issue-122129.PwbC8q.rst
new file mode 100644
index 00000000000000..08beb45653d24b
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-07-23-13-07-12.gh-issue-122129.PwbC8q.rst
@@ -0,0 +1 @@
+Improve support of method descriptors and wrappers in the help title.
From 33d32faa580fb776cb660e9cc8aa7e45c6c68c08 Mon Sep 17 00:00:00 2001
From: Eric Snow
Date: Tue, 23 Jul 2024 13:57:26 -0600
Subject: [PATCH 002/139] gh-117482: Expand Tests for Slot Wrappers of
Inherited Slots of Static Builtin Types (gh-122192)
---
Lib/test/test_embed.py | 27 +++++++++++++++++++++++
Lib/test/test_types.py | 49 +++++++++++++++++++++++++++++++++---------
2 files changed, 66 insertions(+), 10 deletions(-)
diff --git a/Lib/test/test_embed.py b/Lib/test/test_embed.py
index 30dab1fbaa48b2..fb7995e05152d2 100644
--- a/Lib/test/test_embed.py
+++ b/Lib/test/test_embed.py
@@ -5,6 +5,7 @@
from collections import namedtuple
import contextlib
+import io
import json
import os
import os.path
@@ -415,6 +416,32 @@ def test_datetime_reset_strptime(self):
out, err = self.run_embedded_interpreter("test_repeated_init_exec", code)
self.assertEqual(out, '20000101\n' * INIT_LOOPS)
+ def test_static_types_inherited_slots(self):
+ slots = []
+ script = ['import sys']
+ from test.test_types import iter_builtin_types, iter_own_slot_wrappers
+ for cls in iter_builtin_types():
+ for slot in iter_own_slot_wrappers(cls):
+ slots.append((cls, slot))
+ attr = f'{cls.__name__}.{slot}'
+ script.append(f'print("{attr}:", {attr}, file=sys.stderr)')
+ script.append('')
+ script = os.linesep.join(script)
+
+ with contextlib.redirect_stderr(io.StringIO()) as stderr:
+ exec(script)
+ expected = stderr.getvalue().splitlines()
+
+ out, err = self.run_embedded_interpreter("test_repeated_init_exec", script)
+ results = err.split('--- Loop #')[1:]
+ results = [res.rpartition(' ---\n')[-1] for res in results]
+
+ self.maxDiff = None
+ for i, result in enumerate(results, start=1):
+ with self.subTest(loop=i):
+ self.assertEqual(result.splitlines(), expected)
+ self.assertEqual(out, '')
+
@unittest.skipIf(_testinternalcapi is None, "requires _testinternalcapi")
class InitConfigTests(EmbeddingTestsMixin, unittest.TestCase):
diff --git a/Lib/test/test_types.py b/Lib/test/test_types.py
index b89380da57884e..fb88daf9742fa9 100644
--- a/Lib/test/test_types.py
+++ b/Lib/test/test_types.py
@@ -29,6 +29,26 @@ def clear_typing_caches():
f()
+def iter_builtin_types():
+ for obj in __builtins__.values():
+ if not isinstance(obj, type):
+ continue
+ cls = obj
+ if cls.__module__ != 'builtins':
+ continue
+ yield cls
+
+
+@cpython_only
+def iter_own_slot_wrappers(cls):
+ for name, value in vars(cls).items():
+ if not name.startswith('__') or not name.endswith('__'):
+ continue
+ if 'slot wrapper' not in str(value):
+ continue
+ yield name
+
+
class TypesTests(unittest.TestCase):
def test_truth_values(self):
@@ -2362,24 +2382,33 @@ def setUpClass(cls):
def test_slot_wrappers(self):
rch, sch = interpreters.channels.create()
- # For now it's sufficient to check int.__str__.
- # See https://github.com/python/cpython/issues/117482
- # and https://github.com/python/cpython/pull/117660.
- script = textwrap.dedent('''
- text = repr(int.__str__)
- sch.send_nowait(text)
- ''')
+ slots = []
+ script = ''
+ for cls in iter_builtin_types():
+ for slot in iter_own_slot_wrappers(cls):
+ slots.append((cls, slot))
+ script += textwrap.dedent(f"""
+ text = repr({cls.__name__}.{slot})
+ sch.send_nowait(({cls.__name__!r}, {slot!r}, text))
+ """)
exec(script)
- expected = rch.recv()
+ all_expected = []
+ for cls, slot in slots:
+ result = rch.recv()
+ assert result == (cls.__name__, slot, result[2]), (cls, slot, result)
+ all_expected.append(result)
interp = interpreters.create()
interp.exec('from test.support import interpreters')
interp.prepare_main(sch=sch)
interp.exec(script)
- results = rch.recv()
- self.assertEqual(results, expected)
+ for i, _ in enumerate(slots):
+ with self.subTest(cls=cls, slot=slot):
+ expected = all_expected[i]
+ result = rch.recv()
+ self.assertEqual(result, expected)
if __name__ == '__main__':
From 64e221d7ada8f6c20189035c7e81503f4c914f04 Mon Sep 17 00:00:00 2001
From: Sam Gross
Date: Tue, 23 Jul 2024 16:30:49 -0400
Subject: [PATCH 003/139] gh-117657: Skip tests that use threads after fork
(#122194)
These tests fail when run under thread sanitizer due to the use of fork
and threads.
---
Lib/test/test_asyncio/test_unix_events.py | 3 +++
1 file changed, 3 insertions(+)
diff --git a/Lib/test/test_asyncio/test_unix_events.py b/Lib/test/test_asyncio/test_unix_events.py
index 4966775acac7be..9ae54b6887010b 100644
--- a/Lib/test/test_asyncio/test_unix_events.py
+++ b/Lib/test/test_asyncio/test_unix_events.py
@@ -1212,6 +1212,7 @@ async def test_fork_not_share_event_loop(self):
wait_process(pid, exitcode=0)
@hashlib_helper.requires_hashdigest('md5')
+ @support.skip_if_sanitizer("TSAN doesn't support threads after fork", thread=True)
def test_fork_signal_handling(self):
self.addCleanup(multiprocessing_cleanup_tests)
@@ -1258,6 +1259,7 @@ async def func():
self.assertTrue(child_handled.is_set())
@hashlib_helper.requires_hashdigest('md5')
+ @support.skip_if_sanitizer("TSAN doesn't support threads after fork", thread=True)
def test_fork_asyncio_run(self):
self.addCleanup(multiprocessing_cleanup_tests)
@@ -1277,6 +1279,7 @@ async def child_main():
self.assertEqual(result.value, 42)
@hashlib_helper.requires_hashdigest('md5')
+ @support.skip_if_sanitizer("TSAN doesn't support threads after fork", thread=True)
def test_fork_asyncio_subprocess(self):
self.addCleanup(multiprocessing_cleanup_tests)
From 7b7b90d1ce5116f29ad6c8120c0490824baa54e0 Mon Sep 17 00:00:00 2001
From: Jelle Zijlstra
Date: Tue, 23 Jul 2024 14:16:50 -0700
Subject: [PATCH 004/139] gh-119180: Add `annotationlib` module to support PEP
649 (#119891)
Co-authored-by: Alex Waygood
---
Doc/howto/descriptor.rst | 8 +-
Lib/annotationlib.py | 655 +++++++++++++++
Lib/dataclasses.py | 4 +-
Lib/functools.py | 14 +-
Lib/inspect.py | 116 +--
Lib/test/test_annotationlib.py | 771 ++++++++++++++++++
Lib/test/test_dataclasses/__init__.py | 10 +
Lib/test/test_functools.py | 41 +
Lib/test/test_grammar.py | 3 +-
Lib/test/test_inspect/test_inspect.py | 215 +----
Lib/test/test_type_annotations.py | 13 +-
Lib/test/test_typing.py | 138 +++-
Lib/typing.py | 332 ++++----
...-06-11-07-17-25.gh-issue-119180.iH-2zy.rst | 4 +
Python/stdlib_module_names.h | 1 +
15 files changed, 1815 insertions(+), 510 deletions(-)
create mode 100644 Lib/annotationlib.py
create mode 100644 Lib/test/test_annotationlib.py
create mode 100644 Misc/NEWS.d/next/Library/2024-06-11-07-17-25.gh-issue-119180.iH-2zy.rst
diff --git a/Doc/howto/descriptor.rst b/Doc/howto/descriptor.rst
index b29488be39a0a3..67e981f9c57abe 100644
--- a/Doc/howto/descriptor.rst
+++ b/Doc/howto/descriptor.rst
@@ -1366,11 +1366,15 @@ Using the non-data descriptor protocol, a pure Python version of
def __call__(self, *args, **kwds):
return self.f(*args, **kwds)
+ @property
+ def __annotations__(self):
+ return self.f.__annotations__
+
The :func:`functools.update_wrapper` call adds a ``__wrapped__`` attribute
that refers to the underlying function. Also it carries forward
the attributes necessary to make the wrapper look like the wrapped
-function: :attr:`~function.__name__`, :attr:`~function.__qualname__`,
-:attr:`~function.__doc__`, and :attr:`~function.__annotations__`.
+function, including :attr:`~function.__name__`, :attr:`~function.__qualname__`,
+and :attr:`~function.__doc__`.
.. testcode::
:hide:
diff --git a/Lib/annotationlib.py b/Lib/annotationlib.py
new file mode 100644
index 00000000000000..b4036ffb189c2d
--- /dev/null
+++ b/Lib/annotationlib.py
@@ -0,0 +1,655 @@
+"""Helpers for introspecting and wrapping annotations."""
+
+import ast
+import enum
+import functools
+import sys
+import types
+
+__all__ = ["Format", "ForwardRef", "call_annotate_function", "get_annotations"]
+
+
+class Format(enum.IntEnum):
+ VALUE = 1
+ FORWARDREF = 2
+ SOURCE = 3
+
+
+_Union = None
+_sentinel = object()
+
+# Slots shared by ForwardRef and _Stringifier. The __forward__ names must be
+# preserved for compatibility with the old typing.ForwardRef class. The remaining
+# names are private.
+_SLOTS = (
+ "__forward_evaluated__",
+ "__forward_value__",
+ "__forward_is_argument__",
+ "__forward_is_class__",
+ "__forward_module__",
+ "__weakref__",
+ "__arg__",
+ "__ast_node__",
+ "__code__",
+ "__globals__",
+ "__owner__",
+ "__cell__",
+)
+
+
+class ForwardRef:
+ """Wrapper that holds a forward reference."""
+
+ __slots__ = _SLOTS
+
+ def __init__(
+ self,
+ arg,
+ *,
+ module=None,
+ owner=None,
+ is_argument=True,
+ is_class=False,
+ _globals=None,
+ _cell=None,
+ ):
+ if not isinstance(arg, str):
+ raise TypeError(f"Forward reference must be a string -- got {arg!r}")
+
+ self.__arg__ = arg
+ self.__forward_evaluated__ = False
+ self.__forward_value__ = None
+ self.__forward_is_argument__ = is_argument
+ self.__forward_is_class__ = is_class
+ self.__forward_module__ = module
+ self.__code__ = None
+ self.__ast_node__ = None
+ self.__globals__ = _globals
+ self.__cell__ = _cell
+ self.__owner__ = owner
+
+ def __init_subclass__(cls, /, *args, **kwds):
+ raise TypeError("Cannot subclass ForwardRef")
+
+ def evaluate(self, *, globals=None, locals=None, type_params=None, owner=None):
+ """Evaluate the forward reference and return the value.
+
+ If the forward reference is not evaluatable, raise an exception.
+ """
+ if self.__forward_evaluated__:
+ return self.__forward_value__
+ if self.__cell__ is not None:
+ try:
+ value = self.__cell__.cell_contents
+ except ValueError:
+ pass
+ else:
+ self.__forward_evaluated__ = True
+ self.__forward_value__ = value
+ return value
+ if owner is None:
+ owner = self.__owner__
+ if type_params is None and owner is None:
+ raise TypeError("Either 'type_params' or 'owner' must be provided")
+
+ if self.__forward_module__ is not None:
+ globals = getattr(
+ sys.modules.get(self.__forward_module__, None), "__dict__", globals
+ )
+ if globals is None:
+ globals = self.__globals__
+ if globals is None:
+ if isinstance(owner, type):
+ module_name = getattr(owner, "__module__", None)
+ if module_name:
+ module = sys.modules.get(module_name, None)
+ if module:
+ globals = getattr(module, "__dict__", None)
+ elif isinstance(owner, types.ModuleType):
+ globals = getattr(owner, "__dict__", None)
+ elif callable(owner):
+ globals = getattr(owner, "__globals__", None)
+
+ if locals is None:
+ locals = {}
+ if isinstance(self.__owner__, type):
+ locals.update(vars(self.__owner__))
+
+ if type_params is None and self.__owner__ is not None:
+ # "Inject" type parameters into the local namespace
+ # (unless they are shadowed by assignments *in* the local namespace),
+ # as a way of emulating annotation scopes when calling `eval()`
+ type_params = getattr(self.__owner__, "__type_params__", None)
+
+ # type parameters require some special handling,
+ # as they exist in their own scope
+ # but `eval()` does not have a dedicated parameter for that scope.
+ # For classes, names in type parameter scopes should override
+ # names in the global scope (which here are called `localns`!),
+ # but should in turn be overridden by names in the class scope
+ # (which here are called `globalns`!)
+ if type_params is not None:
+ globals, locals = dict(globals), dict(locals)
+ for param in type_params:
+ param_name = param.__name__
+ if not self.__forward_is_class__ or param_name not in globals:
+ globals[param_name] = param
+ locals.pop(param_name, None)
+
+ code = self.__forward_code__
+ value = eval(code, globals=globals, locals=locals)
+ self.__forward_evaluated__ = True
+ self.__forward_value__ = value
+ return value
+
+ def _evaluate(self, globalns, localns, type_params=_sentinel, *, recursive_guard):
+ import typing
+ import warnings
+
+ if type_params is _sentinel:
+ typing._deprecation_warning_for_no_type_params_passed(
+ "typing.ForwardRef._evaluate"
+ )
+ type_params = ()
+ warnings._deprecated(
+ "ForwardRef._evaluate",
+ "{name} is a private API and is retained for compatibility, but will be removed"
+ " in Python 3.16. Use ForwardRef.evaluate() or typing.evaluate_forward_ref() instead.",
+ remove=(3, 16),
+ )
+ return typing.evaluate_forward_ref(
+ self,
+ globals=globalns,
+ locals=localns,
+ type_params=type_params,
+ _recursive_guard=recursive_guard,
+ )
+
+ @property
+ def __forward_arg__(self):
+ if self.__arg__ is not None:
+ return self.__arg__
+ if self.__ast_node__ is not None:
+ self.__arg__ = ast.unparse(self.__ast_node__)
+ return self.__arg__
+ raise AssertionError(
+ "Attempted to access '__forward_arg__' on an uninitialized ForwardRef"
+ )
+
+ @property
+ def __forward_code__(self):
+ if self.__code__ is not None:
+ return self.__code__
+ arg = self.__forward_arg__
+ # If we do `def f(*args: *Ts)`, then we'll have `arg = '*Ts'`.
+ # Unfortunately, this isn't a valid expression on its own, so we
+ # do the unpacking manually.
+ if arg.startswith("*"):
+ arg_to_compile = f"({arg},)[0]" # E.g. (*Ts,)[0] or (*tuple[int, int],)[0]
+ else:
+ arg_to_compile = arg
+ try:
+ self.__code__ = compile(arg_to_compile, "", "eval")
+ except SyntaxError:
+ raise SyntaxError(f"Forward reference must be an expression -- got {arg!r}")
+ return self.__code__
+
+ def __eq__(self, other):
+ if not isinstance(other, ForwardRef):
+ return NotImplemented
+ if self.__forward_evaluated__ and other.__forward_evaluated__:
+ return (
+ self.__forward_arg__ == other.__forward_arg__
+ and self.__forward_value__ == other.__forward_value__
+ )
+ return (
+ self.__forward_arg__ == other.__forward_arg__
+ and self.__forward_module__ == other.__forward_module__
+ )
+
+ def __hash__(self):
+ return hash((self.__forward_arg__, self.__forward_module__))
+
+ def __or__(self, other):
+ global _Union
+ if _Union is None:
+ from typing import Union as _Union
+ return _Union[self, other]
+
+ def __ror__(self, other):
+ global _Union
+ if _Union is None:
+ from typing import Union as _Union
+ return _Union[other, self]
+
+ def __repr__(self):
+ if self.__forward_module__ is None:
+ module_repr = ""
+ else:
+ module_repr = f", module={self.__forward_module__!r}"
+ return f"ForwardRef({self.__forward_arg__!r}{module_repr})"
+
+
+class _Stringifier:
+ # Must match the slots on ForwardRef, so we can turn an instance of one into an
+ # instance of the other in place.
+ __slots__ = _SLOTS
+
+ def __init__(self, node, globals=None, owner=None, is_class=False, cell=None):
+ assert isinstance(node, ast.AST)
+ self.__arg__ = None
+ self.__forward_evaluated__ = False
+ self.__forward_value__ = None
+ self.__forward_is_argument__ = False
+ self.__forward_is_class__ = is_class
+ self.__forward_module__ = None
+ self.__code__ = None
+ self.__ast_node__ = node
+ self.__globals__ = globals
+ self.__cell__ = cell
+ self.__owner__ = owner
+
+ def __convert(self, other):
+ if isinstance(other, _Stringifier):
+ return other.__ast_node__
+ elif isinstance(other, slice):
+ return ast.Slice(
+ lower=self.__convert(other.start) if other.start is not None else None,
+ upper=self.__convert(other.stop) if other.stop is not None else None,
+ step=self.__convert(other.step) if other.step is not None else None,
+ )
+ else:
+ return ast.Constant(value=other)
+
+ def __make_new(self, node):
+ return _Stringifier(
+ node, self.__globals__, self.__owner__, self.__forward_is_class__
+ )
+
+ # Must implement this since we set __eq__. We hash by identity so that
+ # stringifiers in dict keys are kept separate.
+ def __hash__(self):
+ return id(self)
+
+ def __getitem__(self, other):
+ # Special case, to avoid stringifying references to class-scoped variables
+ # as '__classdict__["x"]'.
+ if (
+ isinstance(self.__ast_node__, ast.Name)
+ and self.__ast_node__.id == "__classdict__"
+ ):
+ raise KeyError
+ if isinstance(other, tuple):
+ elts = [self.__convert(elt) for elt in other]
+ other = ast.Tuple(elts)
+ else:
+ other = self.__convert(other)
+ assert isinstance(other, ast.AST), repr(other)
+ return self.__make_new(ast.Subscript(self.__ast_node__, other))
+
+ def __getattr__(self, attr):
+ return self.__make_new(ast.Attribute(self.__ast_node__, attr))
+
+ def __call__(self, *args, **kwargs):
+ return self.__make_new(
+ ast.Call(
+ self.__ast_node__,
+ [self.__convert(arg) for arg in args],
+ [
+ ast.keyword(key, self.__convert(value))
+ for key, value in kwargs.items()
+ ],
+ )
+ )
+
+ def __iter__(self):
+ yield self.__make_new(ast.Starred(self.__ast_node__))
+
+ def __repr__(self):
+ return ast.unparse(self.__ast_node__)
+
+ def __format__(self, format_spec):
+ raise TypeError("Cannot stringify annotation containing string formatting")
+
+ def _make_binop(op: ast.AST):
+ def binop(self, other):
+ return self.__make_new(
+ ast.BinOp(self.__ast_node__, op, self.__convert(other))
+ )
+
+ return binop
+
+ __add__ = _make_binop(ast.Add())
+ __sub__ = _make_binop(ast.Sub())
+ __mul__ = _make_binop(ast.Mult())
+ __matmul__ = _make_binop(ast.MatMult())
+ __truediv__ = _make_binop(ast.Div())
+ __mod__ = _make_binop(ast.Mod())
+ __lshift__ = _make_binop(ast.LShift())
+ __rshift__ = _make_binop(ast.RShift())
+ __or__ = _make_binop(ast.BitOr())
+ __xor__ = _make_binop(ast.BitXor())
+ __and__ = _make_binop(ast.BitAnd())
+ __floordiv__ = _make_binop(ast.FloorDiv())
+ __pow__ = _make_binop(ast.Pow())
+
+ del _make_binop
+
+ def _make_rbinop(op: ast.AST):
+ def rbinop(self, other):
+ return self.__make_new(
+ ast.BinOp(self.__convert(other), op, self.__ast_node__)
+ )
+
+ return rbinop
+
+ __radd__ = _make_rbinop(ast.Add())
+ __rsub__ = _make_rbinop(ast.Sub())
+ __rmul__ = _make_rbinop(ast.Mult())
+ __rmatmul__ = _make_rbinop(ast.MatMult())
+ __rtruediv__ = _make_rbinop(ast.Div())
+ __rmod__ = _make_rbinop(ast.Mod())
+ __rlshift__ = _make_rbinop(ast.LShift())
+ __rrshift__ = _make_rbinop(ast.RShift())
+ __ror__ = _make_rbinop(ast.BitOr())
+ __rxor__ = _make_rbinop(ast.BitXor())
+ __rand__ = _make_rbinop(ast.BitAnd())
+ __rfloordiv__ = _make_rbinop(ast.FloorDiv())
+ __rpow__ = _make_rbinop(ast.Pow())
+
+ del _make_rbinop
+
+ def _make_compare(op):
+ def compare(self, other):
+ return self.__make_new(
+ ast.Compare(
+ left=self.__ast_node__,
+ ops=[op],
+ comparators=[self.__convert(other)],
+ )
+ )
+
+ return compare
+
+ __lt__ = _make_compare(ast.Lt())
+ __le__ = _make_compare(ast.LtE())
+ __eq__ = _make_compare(ast.Eq())
+ __ne__ = _make_compare(ast.NotEq())
+ __gt__ = _make_compare(ast.Gt())
+ __ge__ = _make_compare(ast.GtE())
+
+ del _make_compare
+
+ def _make_unary_op(op):
+ def unary_op(self):
+ return self.__make_new(ast.UnaryOp(op, self.__ast_node__))
+
+ return unary_op
+
+ __invert__ = _make_unary_op(ast.Invert())
+ __pos__ = _make_unary_op(ast.UAdd())
+ __neg__ = _make_unary_op(ast.USub())
+
+ del _make_unary_op
+
+
+class _StringifierDict(dict):
+ def __init__(self, namespace, globals=None, owner=None, is_class=False):
+ super().__init__(namespace)
+ self.namespace = namespace
+ self.globals = globals
+ self.owner = owner
+ self.is_class = is_class
+ self.stringifiers = []
+
+ def __missing__(self, key):
+ fwdref = _Stringifier(
+ ast.Name(id=key),
+ globals=self.globals,
+ owner=self.owner,
+ is_class=self.is_class,
+ )
+ self.stringifiers.append(fwdref)
+ return fwdref
+
+
+def call_annotate_function(annotate, format, owner=None):
+ """Call an __annotate__ function. __annotate__ functions are normally
+ generated by the compiler to defer the evaluation of annotations. They
+ can be called with any of the format arguments in the Format enum, but
+ compiler-generated __annotate__ functions only support the VALUE format.
+ This function provides additional functionality to call __annotate__
+ functions with the FORWARDREF and SOURCE formats.
+
+ *annotate* must be an __annotate__ function, which takes a single argument
+ and returns a dict of annotations.
+
+ *format* must be a member of the Format enum or one of the corresponding
+ integer values.
+
+ *owner* can be the object that owns the annotations (i.e., the module,
+ class, or function that the __annotate__ function derives from). With the
+ FORWARDREF format, it is used to provide better evaluation capabilities
+ on the generated ForwardRef objects.
+
+ """
+ try:
+ return annotate(format)
+ except NotImplementedError:
+ pass
+ if format == Format.SOURCE:
+ # SOURCE is implemented by calling the annotate function in a special
+ # environment where every name lookup results in an instance of _Stringifier.
+ # _Stringifier supports every dunder operation and returns a new _Stringifier.
+ # At the end, we get a dictionary that mostly contains _Stringifier objects (or
+ # possibly constants if the annotate function uses them directly). We then
+ # convert each of those into a string to get an approximation of the
+ # original source.
+ globals = _StringifierDict({})
+ if annotate.__closure__:
+ freevars = annotate.__code__.co_freevars
+ new_closure = []
+ for i, cell in enumerate(annotate.__closure__):
+ if i < len(freevars):
+ name = freevars[i]
+ else:
+ name = "__cell__"
+ fwdref = _Stringifier(ast.Name(id=name))
+ new_closure.append(types.CellType(fwdref))
+ closure = tuple(new_closure)
+ else:
+ closure = None
+ func = types.FunctionType(annotate.__code__, globals, closure=closure)
+ annos = func(Format.VALUE)
+ return {
+ key: val if isinstance(val, str) else repr(val)
+ for key, val in annos.items()
+ }
+ elif format == Format.FORWARDREF:
+ # FORWARDREF is implemented similarly to SOURCE, but there are two changes,
+ # at the beginning and the end of the process.
+ # First, while SOURCE uses an empty dictionary as the namespace, so that all
+ # name lookups result in _Stringifier objects, FORWARDREF uses the globals
+ # and builtins, so that defined names map to their real values.
+ # Second, instead of returning strings, we want to return either real values
+ # or ForwardRef objects. To do this, we keep track of all _Stringifier objects
+ # created while the annotation is being evaluated, and at the end we convert
+ # them all to ForwardRef objects by assigning to __class__. To make this
+ # technique work, we have to ensure that the _Stringifier and ForwardRef
+ # classes share the same attributes.
+ # We use this technique because while the annotations are being evaluated,
+ # we want to support all operations that the language allows, including even
+ # __getattr__ and __eq__, and return new _Stringifier objects so we can accurately
+ # reconstruct the source. But in the dictionary that we eventually return, we
+ # want to return objects with more user-friendly behavior, such as an __eq__
+ # that returns a bool and an defined set of attributes.
+ namespace = {**annotate.__builtins__, **annotate.__globals__}
+ is_class = isinstance(owner, type)
+ globals = _StringifierDict(namespace, annotate.__globals__, owner, is_class)
+ if annotate.__closure__:
+ freevars = annotate.__code__.co_freevars
+ new_closure = []
+ for i, cell in enumerate(annotate.__closure__):
+ try:
+ cell.cell_contents
+ except ValueError:
+ if i < len(freevars):
+ name = freevars[i]
+ else:
+ name = "__cell__"
+ fwdref = _Stringifier(
+ ast.Name(id=name),
+ cell=cell,
+ owner=owner,
+ globals=annotate.__globals__,
+ is_class=is_class,
+ )
+ globals.stringifiers.append(fwdref)
+ new_closure.append(types.CellType(fwdref))
+ else:
+ new_closure.append(cell)
+ closure = tuple(new_closure)
+ else:
+ closure = None
+ func = types.FunctionType(annotate.__code__, globals, closure=closure)
+ result = func(Format.VALUE)
+ for obj in globals.stringifiers:
+ obj.__class__ = ForwardRef
+ return result
+ elif format == Format.VALUE:
+ # Should be impossible because __annotate__ functions must not raise
+ # NotImplementedError for this format.
+ raise RuntimeError("annotate function does not support VALUE format")
+ else:
+ raise ValueError(f"Invalid format: {format!r}")
+
+
+def get_annotations(
+ obj, *, globals=None, locals=None, eval_str=False, format=Format.VALUE
+):
+ """Compute the annotations dict for an object.
+
+ obj may be a callable, class, or module.
+ Passing in an object of any other type raises TypeError.
+
+ Returns a dict. get_annotations() returns a new dict every time
+ it's called; calling it twice on the same object will return two
+ different but equivalent dicts.
+
+ This function handles several details for you:
+
+ * If eval_str is true, values of type str will
+ be un-stringized using eval(). This is intended
+ for use with stringized annotations
+ ("from __future__ import annotations").
+ * If obj doesn't have an annotations dict, returns an
+ empty dict. (Functions and methods always have an
+ annotations dict; classes, modules, and other types of
+ callables may not.)
+ * Ignores inherited annotations on classes. If a class
+ doesn't have its own annotations dict, returns an empty dict.
+ * All accesses to object members and dict values are done
+ using getattr() and dict.get() for safety.
+ * Always, always, always returns a freshly-created dict.
+
+ eval_str controls whether or not values of type str are replaced
+ with the result of calling eval() on those values:
+
+ * If eval_str is true, eval() is called on values of type str.
+ * If eval_str is false (the default), values of type str are unchanged.
+
+ globals and locals are passed in to eval(); see the documentation
+ for eval() for more information. If either globals or locals is
+ None, this function may replace that value with a context-specific
+ default, contingent on type(obj):
+
+ * If obj is a module, globals defaults to obj.__dict__.
+ * If obj is a class, globals defaults to
+ sys.modules[obj.__module__].__dict__ and locals
+ defaults to the obj class namespace.
+ * If obj is a callable, globals defaults to obj.__globals__,
+ although if obj is a wrapped function (using
+ functools.update_wrapper()) it is first unwrapped.
+ """
+ if eval_str and format != Format.VALUE:
+ raise ValueError("eval_str=True is only supported with format=Format.VALUE")
+
+ # For VALUE format, we look at __annotations__ directly.
+ if format != Format.VALUE:
+ annotate = getattr(obj, "__annotate__", None)
+ if annotate is not None:
+ ann = call_annotate_function(annotate, format, owner=obj)
+ if not isinstance(ann, dict):
+ raise ValueError(f"{obj!r}.__annotate__ returned a non-dict")
+ return dict(ann)
+
+ ann = getattr(obj, "__annotations__", None)
+ if ann is None:
+ return {}
+
+ if not isinstance(ann, dict):
+ raise ValueError(f"{obj!r}.__annotations__ is neither a dict nor None")
+
+ if not ann:
+ return {}
+
+ if not eval_str:
+ return dict(ann)
+
+ if isinstance(obj, type):
+ # class
+ obj_globals = None
+ module_name = getattr(obj, "__module__", None)
+ if module_name:
+ module = sys.modules.get(module_name, None)
+ if module:
+ obj_globals = getattr(module, "__dict__", None)
+ obj_locals = dict(vars(obj))
+ unwrap = obj
+ elif isinstance(obj, types.ModuleType):
+ # module
+ obj_globals = getattr(obj, "__dict__")
+ obj_locals = None
+ unwrap = None
+ elif callable(obj):
+ # this includes types.Function, types.BuiltinFunctionType,
+ # types.BuiltinMethodType, functools.partial, functools.singledispatch,
+ # "class funclike" from Lib/test/test_inspect... on and on it goes.
+ obj_globals = getattr(obj, "__globals__", None)
+ obj_locals = None
+ unwrap = obj
+ elif ann is not None:
+ obj_globals = obj_locals = unwrap = None
+ else:
+ raise TypeError(f"{obj!r} is not a module, class, or callable.")
+
+ if unwrap is not None:
+ while True:
+ if hasattr(unwrap, "__wrapped__"):
+ unwrap = unwrap.__wrapped__
+ continue
+ if isinstance(unwrap, functools.partial):
+ unwrap = unwrap.func
+ continue
+ break
+ if hasattr(unwrap, "__globals__"):
+ obj_globals = unwrap.__globals__
+
+ if globals is None:
+ globals = obj_globals
+ if locals is None:
+ locals = obj_locals
+
+ # "Inject" type parameters into the local namespace
+ # (unless they are shadowed by assignments *in* the local namespace),
+ # as a way of emulating annotation scopes when calling `eval()`
+ if type_params := getattr(obj, "__type_params__", ()):
+ if locals is None:
+ locals = {}
+ locals = {param.__name__: param for param in type_params} | locals
+
+ return_value = {
+ key: value if not isinstance(value, str) else eval(value, globals, locals)
+ for key, value in ann.items()
+ }
+ return return_value
diff --git a/Lib/dataclasses.py b/Lib/dataclasses.py
index 74011b7e28b9f3..4cba606dd8dd4d 100644
--- a/Lib/dataclasses.py
+++ b/Lib/dataclasses.py
@@ -5,6 +5,7 @@
import inspect
import keyword
import itertools
+import annotationlib
import abc
from reprlib import recursive_repr
@@ -981,7 +982,8 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen,
# actual default value. Pseudo-fields ClassVars and InitVars are
# included, despite the fact that they're not real fields. That's
# dealt with later.
- cls_annotations = inspect.get_annotations(cls)
+ cls_annotations = annotationlib.get_annotations(
+ cls, format=annotationlib.Format.FORWARDREF)
# Now find fields in our class. While doing so, validate some
# things, and set the default values (as class attributes) where
diff --git a/Lib/functools.py b/Lib/functools.py
index a10493f0e25360..49ea9a2f6999f5 100644
--- a/Lib/functools.py
+++ b/Lib/functools.py
@@ -32,7 +32,7 @@
# wrapper functions that can handle naive introspection
WRAPPER_ASSIGNMENTS = ('__module__', '__name__', '__qualname__', '__doc__',
- '__annotations__', '__type_params__')
+ '__annotate__', '__type_params__')
WRAPPER_UPDATES = ('__dict__',)
def update_wrapper(wrapper,
wrapped,
@@ -882,8 +882,8 @@ def register(cls, func=None):
f"Invalid first argument to `register()`. "
f"{cls!r} is not a class or union type."
)
- ann = getattr(cls, '__annotations__', {})
- if not ann:
+ ann = getattr(cls, '__annotate__', None)
+ if ann is None:
raise TypeError(
f"Invalid first argument to `register()`: {cls!r}. "
f"Use either `@register(some_class)` or plain `@register` "
@@ -893,13 +893,19 @@ def register(cls, func=None):
# only import typing if annotation parsing is necessary
from typing import get_type_hints
- argname, cls = next(iter(get_type_hints(func).items()))
+ from annotationlib import Format, ForwardRef
+ argname, cls = next(iter(get_type_hints(func, format=Format.FORWARDREF).items()))
if not _is_valid_dispatch_type(cls):
if _is_union_type(cls):
raise TypeError(
f"Invalid annotation for {argname!r}. "
f"{cls!r} not all arguments are classes."
)
+ elif isinstance(cls, ForwardRef):
+ raise TypeError(
+ f"Invalid annotation for {argname!r}. "
+ f"{cls!r} is an unresolved forward reference."
+ )
else:
raise TypeError(
f"Invalid annotation for {argname!r}. "
diff --git a/Lib/inspect.py b/Lib/inspect.py
index 0e7b40eb39bce8..ba3ecbb87c7026 100644
--- a/Lib/inspect.py
+++ b/Lib/inspect.py
@@ -142,6 +142,7 @@
import abc
+from annotationlib import get_annotations
import ast
import dis
import collections.abc
@@ -173,121 +174,6 @@
TPFLAGS_IS_ABSTRACT = 1 << 20
-def get_annotations(obj, *, globals=None, locals=None, eval_str=False):
- """Compute the annotations dict for an object.
-
- obj may be a callable, class, or module.
- Passing in an object of any other type raises TypeError.
-
- Returns a dict. get_annotations() returns a new dict every time
- it's called; calling it twice on the same object will return two
- different but equivalent dicts.
-
- This function handles several details for you:
-
- * If eval_str is true, values of type str will
- be un-stringized using eval(). This is intended
- for use with stringized annotations
- ("from __future__ import annotations").
- * If obj doesn't have an annotations dict, returns an
- empty dict. (Functions and methods always have an
- annotations dict; classes, modules, and other types of
- callables may not.)
- * Ignores inherited annotations on classes. If a class
- doesn't have its own annotations dict, returns an empty dict.
- * All accesses to object members and dict values are done
- using getattr() and dict.get() for safety.
- * Always, always, always returns a freshly-created dict.
-
- eval_str controls whether or not values of type str are replaced
- with the result of calling eval() on those values:
-
- * If eval_str is true, eval() is called on values of type str.
- * If eval_str is false (the default), values of type str are unchanged.
-
- globals and locals are passed in to eval(); see the documentation
- for eval() for more information. If either globals or locals is
- None, this function may replace that value with a context-specific
- default, contingent on type(obj):
-
- * If obj is a module, globals defaults to obj.__dict__.
- * If obj is a class, globals defaults to
- sys.modules[obj.__module__].__dict__ and locals
- defaults to the obj class namespace.
- * If obj is a callable, globals defaults to obj.__globals__,
- although if obj is a wrapped function (using
- functools.update_wrapper()) it is first unwrapped.
- """
- if isinstance(obj, type):
- # class
- ann = obj.__annotations__
-
- obj_globals = None
- module_name = getattr(obj, '__module__', None)
- if module_name:
- module = sys.modules.get(module_name, None)
- if module:
- obj_globals = getattr(module, '__dict__', None)
- obj_locals = dict(vars(obj))
- unwrap = obj
- elif isinstance(obj, types.ModuleType):
- # module
- ann = getattr(obj, '__annotations__', None)
- obj_globals = getattr(obj, '__dict__')
- obj_locals = None
- unwrap = None
- elif callable(obj):
- # this includes types.Function, types.BuiltinFunctionType,
- # types.BuiltinMethodType, functools.partial, functools.singledispatch,
- # "class funclike" from Lib/test/test_inspect... on and on it goes.
- ann = getattr(obj, '__annotations__', None)
- obj_globals = getattr(obj, '__globals__', None)
- obj_locals = None
- unwrap = obj
- else:
- raise TypeError(f"{obj!r} is not a module, class, or callable.")
-
- if ann is None:
- return {}
-
- if not isinstance(ann, dict):
- raise ValueError(f"{obj!r}.__annotations__ is neither a dict nor None")
-
- if not ann:
- return {}
-
- if not eval_str:
- return dict(ann)
-
- if unwrap is not None:
- while True:
- if hasattr(unwrap, '__wrapped__'):
- unwrap = unwrap.__wrapped__
- continue
- if isinstance(unwrap, functools.partial):
- unwrap = unwrap.func
- continue
- break
- if hasattr(unwrap, "__globals__"):
- obj_globals = unwrap.__globals__
-
- if globals is None:
- globals = obj_globals
- if locals is None:
- locals = obj_locals or {}
-
- # "Inject" type parameters into the local namespace
- # (unless they are shadowed by assignments *in* the local namespace),
- # as a way of emulating annotation scopes when calling `eval()`
- if type_params := getattr(obj, "__type_params__", ()):
- locals = {param.__name__: param for param in type_params} | locals
-
- return_value = {key:
- value if not isinstance(value, str) else eval(value, globals, locals)
- for key, value in ann.items() }
- return return_value
-
-
# ----------------------------------------------------------- type-checking
def ismodule(object):
"""Return true if the object is a module."""
diff --git a/Lib/test/test_annotationlib.py b/Lib/test/test_annotationlib.py
new file mode 100644
index 00000000000000..e68d63c91d1a73
--- /dev/null
+++ b/Lib/test/test_annotationlib.py
@@ -0,0 +1,771 @@
+"""Tests for the annotations module."""
+
+import annotationlib
+import functools
+import pickle
+import unittest
+from typing import Unpack
+
+from test.test_inspect import inspect_stock_annotations
+from test.test_inspect import inspect_stringized_annotations
+from test.test_inspect import inspect_stringized_annotations_2
+from test.test_inspect import inspect_stringized_annotations_pep695
+
+
+def times_three(fn):
+ @functools.wraps(fn)
+ def wrapper(a, b):
+ return fn(a * 3, b * 3)
+
+ return wrapper
+
+
+class TestFormat(unittest.TestCase):
+ def test_enum(self):
+ self.assertEqual(annotationlib.Format.VALUE.value, 1)
+ self.assertEqual(annotationlib.Format.VALUE, 1)
+
+ self.assertEqual(annotationlib.Format.FORWARDREF.value, 2)
+ self.assertEqual(annotationlib.Format.FORWARDREF, 2)
+
+ self.assertEqual(annotationlib.Format.SOURCE.value, 3)
+ self.assertEqual(annotationlib.Format.SOURCE, 3)
+
+
+class TestForwardRefFormat(unittest.TestCase):
+ def test_closure(self):
+ def inner(arg: x):
+ pass
+
+ anno = annotationlib.get_annotations(
+ inner, format=annotationlib.Format.FORWARDREF
+ )
+ fwdref = anno["arg"]
+ self.assertIsInstance(fwdref, annotationlib.ForwardRef)
+ self.assertEqual(fwdref.__forward_arg__, "x")
+ with self.assertRaises(NameError):
+ fwdref.evaluate()
+
+ x = 1
+ self.assertEqual(fwdref.evaluate(), x)
+
+ anno = annotationlib.get_annotations(
+ inner, format=annotationlib.Format.FORWARDREF
+ )
+ self.assertEqual(anno["arg"], x)
+
+ def test_function(self):
+ def f(x: int, y: doesntexist):
+ pass
+
+ anno = annotationlib.get_annotations(f, format=annotationlib.Format.FORWARDREF)
+ self.assertIs(anno["x"], int)
+ fwdref = anno["y"]
+ self.assertIsInstance(fwdref, annotationlib.ForwardRef)
+ self.assertEqual(fwdref.__forward_arg__, "doesntexist")
+ with self.assertRaises(NameError):
+ fwdref.evaluate()
+ self.assertEqual(fwdref.evaluate(globals={"doesntexist": 1}), 1)
+
+
+class TestSourceFormat(unittest.TestCase):
+ def test_closure(self):
+ x = 0
+
+ def inner(arg: x):
+ pass
+
+ anno = annotationlib.get_annotations(inner, format=annotationlib.Format.SOURCE)
+ self.assertEqual(anno, {"arg": "x"})
+
+ def test_function(self):
+ def f(x: int, y: doesntexist):
+ pass
+
+ anno = annotationlib.get_annotations(f, format=annotationlib.Format.SOURCE)
+ self.assertEqual(anno, {"x": "int", "y": "doesntexist"})
+
+ def test_expressions(self):
+ def f(
+ add: a + b,
+ sub: a - b,
+ mul: a * b,
+ matmul: a @ b,
+ truediv: a / b,
+ mod: a % b,
+ lshift: a << b,
+ rshift: a >> b,
+ or_: a | b,
+ xor: a ^ b,
+ and_: a & b,
+ floordiv: a // b,
+ pow_: a**b,
+ lt: a < b,
+ le: a <= b,
+ eq: a == b,
+ ne: a != b,
+ gt: a > b,
+ ge: a >= b,
+ invert: ~a,
+ neg: -a,
+ pos: +a,
+ getitem: a[b],
+ getattr: a.b,
+ call: a(b, *c, d=e), # **kwargs are not supported
+ *args: *a,
+ ):
+ pass
+
+ anno = annotationlib.get_annotations(f, format=annotationlib.Format.SOURCE)
+ self.assertEqual(
+ anno,
+ {
+ "add": "a + b",
+ "sub": "a - b",
+ "mul": "a * b",
+ "matmul": "a @ b",
+ "truediv": "a / b",
+ "mod": "a % b",
+ "lshift": "a << b",
+ "rshift": "a >> b",
+ "or_": "a | b",
+ "xor": "a ^ b",
+ "and_": "a & b",
+ "floordiv": "a // b",
+ "pow_": "a ** b",
+ "lt": "a < b",
+ "le": "a <= b",
+ "eq": "a == b",
+ "ne": "a != b",
+ "gt": "a > b",
+ "ge": "a >= b",
+ "invert": "~a",
+ "neg": "-a",
+ "pos": "+a",
+ "getitem": "a[b]",
+ "getattr": "a.b",
+ "call": "a(b, *c, d=e)",
+ "args": "*a",
+ },
+ )
+
+ def test_reverse_ops(self):
+ def f(
+ radd: 1 + a,
+ rsub: 1 - a,
+ rmul: 1 * a,
+ rmatmul: 1 @ a,
+ rtruediv: 1 / a,
+ rmod: 1 % a,
+ rlshift: 1 << a,
+ rrshift: 1 >> a,
+ ror: 1 | a,
+ rxor: 1 ^ a,
+ rand: 1 & a,
+ rfloordiv: 1 // a,
+ rpow: 1**a,
+ ):
+ pass
+
+ anno = annotationlib.get_annotations(f, format=annotationlib.Format.SOURCE)
+ self.assertEqual(
+ anno,
+ {
+ "radd": "1 + a",
+ "rsub": "1 - a",
+ "rmul": "1 * a",
+ "rmatmul": "1 @ a",
+ "rtruediv": "1 / a",
+ "rmod": "1 % a",
+ "rlshift": "1 << a",
+ "rrshift": "1 >> a",
+ "ror": "1 | a",
+ "rxor": "1 ^ a",
+ "rand": "1 & a",
+ "rfloordiv": "1 // a",
+ "rpow": "1 ** a",
+ },
+ )
+
+ def test_nested_expressions(self):
+ def f(
+ nested: list[Annotated[set[int], "set of ints", 4j]],
+ set: {a + b}, # single element because order is not guaranteed
+ dict: {a + b: c + d, "key": e + g},
+ list: [a, b, c],
+ tuple: (a, b, c),
+ slice: (a[b:c], a[b:c:d], a[:c], a[b:], a[:], a[::d], a[b::d]),
+ extended_slice: a[:, :, c:d],
+ unpack1: [*a],
+ unpack2: [*a, b, c],
+ ):
+ pass
+
+ anno = annotationlib.get_annotations(f, format=annotationlib.Format.SOURCE)
+ self.assertEqual(
+ anno,
+ {
+ "nested": "list[Annotated[set[int], 'set of ints', 4j]]",
+ "set": "{a + b}",
+ "dict": "{a + b: c + d, 'key': e + g}",
+ "list": "[a, b, c]",
+ "tuple": "(a, b, c)",
+ "slice": "(a[b:c], a[b:c:d], a[:c], a[b:], a[:], a[::d], a[b::d])",
+ "extended_slice": "a[:, :, c:d]",
+ "unpack1": "[*a]",
+ "unpack2": "[*a, b, c]",
+ },
+ )
+
+ def test_unsupported_operations(self):
+ format_msg = "Cannot stringify annotation containing string formatting"
+
+ def f(fstring: f"{a}"):
+ pass
+
+ with self.assertRaisesRegex(TypeError, format_msg):
+ annotationlib.get_annotations(f, format=annotationlib.Format.SOURCE)
+
+ def f(fstring_format: f"{a:02d}"):
+ pass
+
+ with self.assertRaisesRegex(TypeError, format_msg):
+ annotationlib.get_annotations(f, format=annotationlib.Format.SOURCE)
+
+
+class TestForwardRefClass(unittest.TestCase):
+ def test_special_attrs(self):
+ # Forward refs provide a different introspection API. __name__ and
+ # __qualname__ make little sense for forward refs as they can store
+ # complex typing expressions.
+ fr = annotationlib.ForwardRef("set[Any]")
+ self.assertFalse(hasattr(fr, "__name__"))
+ self.assertFalse(hasattr(fr, "__qualname__"))
+ self.assertEqual(fr.__module__, "annotationlib")
+ # Forward refs are currently unpicklable once they contain a code object.
+ fr.__forward_code__ # fill the cache
+ for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+ with self.assertRaises(TypeError):
+ pickle.dumps(fr, proto)
+
+
+class TestGetAnnotations(unittest.TestCase):
+ def test_builtin_type(self):
+ self.assertEqual(annotationlib.get_annotations(int), {})
+ self.assertEqual(annotationlib.get_annotations(object), {})
+
+ def test_custom_metaclass(self):
+ class Meta(type):
+ pass
+
+ class C(metaclass=Meta):
+ x: int
+
+ self.assertEqual(annotationlib.get_annotations(C), {"x": int})
+
+ def test_missing_dunder_dict(self):
+ class NoDict(type):
+ @property
+ def __dict__(cls):
+ raise AttributeError
+
+ b: str
+
+ class C1(metaclass=NoDict):
+ a: int
+
+ self.assertEqual(annotationlib.get_annotations(C1), {"a": int})
+ self.assertEqual(
+ annotationlib.get_annotations(C1, format=annotationlib.Format.FORWARDREF),
+ {"a": int},
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(C1, format=annotationlib.Format.SOURCE),
+ {"a": "int"},
+ )
+ self.assertEqual(annotationlib.get_annotations(NoDict), {"b": str})
+ self.assertEqual(
+ annotationlib.get_annotations(NoDict, format=annotationlib.Format.FORWARDREF),
+ {"b": str},
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(NoDict, format=annotationlib.Format.SOURCE),
+ {"b": "str"},
+ )
+
+ def test_format(self):
+ def f1(a: int):
+ pass
+
+ def f2(a: undefined):
+ pass
+
+ self.assertEqual(
+ annotationlib.get_annotations(f1, format=annotationlib.Format.VALUE),
+ {"a": int},
+ )
+ self.assertEqual(annotationlib.get_annotations(f1, format=1), {"a": int})
+
+ fwd = annotationlib.ForwardRef("undefined")
+ self.assertEqual(
+ annotationlib.get_annotations(f2, format=annotationlib.Format.FORWARDREF),
+ {"a": fwd},
+ )
+ self.assertEqual(annotationlib.get_annotations(f2, format=2), {"a": fwd})
+
+ self.assertEqual(
+ annotationlib.get_annotations(f1, format=annotationlib.Format.SOURCE),
+ {"a": "int"},
+ )
+ self.assertEqual(annotationlib.get_annotations(f1, format=3), {"a": "int"})
+
+ with self.assertRaises(ValueError):
+ annotationlib.get_annotations(f1, format=0)
+
+ with self.assertRaises(ValueError):
+ annotationlib.get_annotations(f1, format=4)
+
+ def test_custom_object_with_annotations(self):
+ class C:
+ def __init__(self):
+ self.__annotations__ = {"x": int, "y": str}
+
+ self.assertEqual(annotationlib.get_annotations(C()), {"x": int, "y": str})
+
+ def test_custom_format_eval_str(self):
+ def foo():
+ pass
+
+ with self.assertRaises(ValueError):
+ annotationlib.get_annotations(
+ foo, format=annotationlib.Format.FORWARDREF, eval_str=True
+ )
+ annotationlib.get_annotations(
+ foo, format=annotationlib.Format.SOURCE, eval_str=True
+ )
+
+ def test_stock_annotations(self):
+ def foo(a: int, b: str):
+ pass
+
+ for format in (annotationlib.Format.VALUE, annotationlib.Format.FORWARDREF):
+ with self.subTest(format=format):
+ self.assertEqual(
+ annotationlib.get_annotations(foo, format=format),
+ {"a": int, "b": str},
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(foo, format=annotationlib.Format.SOURCE),
+ {"a": "int", "b": "str"},
+ )
+
+ foo.__annotations__ = {"a": "foo", "b": "str"}
+ for format in annotationlib.Format:
+ with self.subTest(format=format):
+ self.assertEqual(
+ annotationlib.get_annotations(foo, format=format),
+ {"a": "foo", "b": "str"},
+ )
+
+ self.assertEqual(
+ annotationlib.get_annotations(foo, eval_str=True, locals=locals()),
+ {"a": foo, "b": str},
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(foo, eval_str=True, globals=locals()),
+ {"a": foo, "b": str},
+ )
+
+ def test_stock_annotations_in_module(self):
+ isa = inspect_stock_annotations
+
+ for kwargs in [
+ {},
+ {"eval_str": False},
+ {"format": annotationlib.Format.VALUE},
+ {"format": annotationlib.Format.FORWARDREF},
+ {"format": annotationlib.Format.VALUE, "eval_str": False},
+ {"format": annotationlib.Format.FORWARDREF, "eval_str": False},
+ ]:
+ with self.subTest(**kwargs):
+ self.assertEqual(
+ annotationlib.get_annotations(isa, **kwargs), {"a": int, "b": str}
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(isa.MyClass, **kwargs),
+ {"a": int, "b": str},
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(isa.function, **kwargs),
+ {"a": int, "b": str, "return": isa.MyClass},
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(isa.function2, **kwargs),
+ {"a": int, "b": "str", "c": isa.MyClass, "return": isa.MyClass},
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(isa.function3, **kwargs),
+ {"a": "int", "b": "str", "c": "MyClass"},
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(annotationlib, **kwargs), {}
+ ) # annotations module has no annotations
+ self.assertEqual(
+ annotationlib.get_annotations(isa.UnannotatedClass, **kwargs), {}
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(isa.unannotated_function, **kwargs),
+ {},
+ )
+
+ for kwargs in [
+ {"eval_str": True},
+ {"format": annotationlib.Format.VALUE, "eval_str": True},
+ ]:
+ with self.subTest(**kwargs):
+ self.assertEqual(
+ annotationlib.get_annotations(isa, **kwargs), {"a": int, "b": str}
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(isa.MyClass, **kwargs),
+ {"a": int, "b": str},
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(isa.function, **kwargs),
+ {"a": int, "b": str, "return": isa.MyClass},
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(isa.function2, **kwargs),
+ {"a": int, "b": str, "c": isa.MyClass, "return": isa.MyClass},
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(isa.function3, **kwargs),
+ {"a": int, "b": str, "c": isa.MyClass},
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(annotationlib, **kwargs), {}
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(isa.UnannotatedClass, **kwargs), {}
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(isa.unannotated_function, **kwargs),
+ {},
+ )
+
+ self.assertEqual(
+ annotationlib.get_annotations(isa, format=annotationlib.Format.SOURCE),
+ {"a": "int", "b": "str"},
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(
+ isa.MyClass, format=annotationlib.Format.SOURCE
+ ),
+ {"a": "int", "b": "str"},
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(
+ isa.function, format=annotationlib.Format.SOURCE
+ ),
+ {"a": "int", "b": "str", "return": "MyClass"},
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(
+ isa.function2, format=annotationlib.Format.SOURCE
+ ),
+ {"a": "int", "b": "str", "c": "MyClass", "return": "MyClass"},
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(
+ isa.function3, format=annotationlib.Format.SOURCE
+ ),
+ {"a": "int", "b": "str", "c": "MyClass"},
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(
+ annotationlib, format=annotationlib.Format.SOURCE
+ ),
+ {},
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(
+ isa.UnannotatedClass, format=annotationlib.Format.SOURCE
+ ),
+ {},
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(
+ isa.unannotated_function, format=annotationlib.Format.SOURCE
+ ),
+ {},
+ )
+
+ def test_stock_annotations_on_wrapper(self):
+ isa = inspect_stock_annotations
+
+ wrapped = times_three(isa.function)
+ self.assertEqual(wrapped(1, "x"), isa.MyClass(3, "xxx"))
+ self.assertIsNot(wrapped.__globals__, isa.function.__globals__)
+ self.assertEqual(
+ annotationlib.get_annotations(wrapped),
+ {"a": int, "b": str, "return": isa.MyClass},
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(
+ wrapped, format=annotationlib.Format.FORWARDREF
+ ),
+ {"a": int, "b": str, "return": isa.MyClass},
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(wrapped, format=annotationlib.Format.SOURCE),
+ {"a": "int", "b": "str", "return": "MyClass"},
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(wrapped, eval_str=True),
+ {"a": int, "b": str, "return": isa.MyClass},
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(wrapped, eval_str=False),
+ {"a": int, "b": str, "return": isa.MyClass},
+ )
+
+ def test_stringized_annotations_in_module(self):
+ isa = inspect_stringized_annotations
+ for kwargs in [
+ {},
+ {"eval_str": False},
+ {"format": annotationlib.Format.VALUE},
+ {"format": annotationlib.Format.FORWARDREF},
+ {"format": annotationlib.Format.SOURCE},
+ {"format": annotationlib.Format.VALUE, "eval_str": False},
+ {"format": annotationlib.Format.FORWARDREF, "eval_str": False},
+ {"format": annotationlib.Format.SOURCE, "eval_str": False},
+ ]:
+ with self.subTest(**kwargs):
+ self.assertEqual(
+ annotationlib.get_annotations(isa, **kwargs),
+ {"a": "int", "b": "str"},
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(isa.MyClass, **kwargs),
+ {"a": "int", "b": "str"},
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(isa.function, **kwargs),
+ {"a": "int", "b": "str", "return": "MyClass"},
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(isa.function2, **kwargs),
+ {"a": "int", "b": "'str'", "c": "MyClass", "return": "MyClass"},
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(isa.function3, **kwargs),
+ {"a": "'int'", "b": "'str'", "c": "'MyClass'"},
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(isa.UnannotatedClass, **kwargs), {}
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(isa.unannotated_function, **kwargs),
+ {},
+ )
+
+ for kwargs in [
+ {"eval_str": True},
+ {"format": annotationlib.Format.VALUE, "eval_str": True},
+ ]:
+ with self.subTest(**kwargs):
+ self.assertEqual(
+ annotationlib.get_annotations(isa, **kwargs), {"a": int, "b": str}
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(isa.MyClass, **kwargs),
+ {"a": int, "b": str},
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(isa.function, **kwargs),
+ {"a": int, "b": str, "return": isa.MyClass},
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(isa.function2, **kwargs),
+ {"a": int, "b": "str", "c": isa.MyClass, "return": isa.MyClass},
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(isa.function3, **kwargs),
+ {"a": "int", "b": "str", "c": "MyClass"},
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(isa.UnannotatedClass, **kwargs), {}
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(isa.unannotated_function, **kwargs),
+ {},
+ )
+
+ def test_stringized_annotations_in_empty_module(self):
+ isa2 = inspect_stringized_annotations_2
+ self.assertEqual(annotationlib.get_annotations(isa2), {})
+ self.assertEqual(annotationlib.get_annotations(isa2, eval_str=True), {})
+ self.assertEqual(annotationlib.get_annotations(isa2, eval_str=False), {})
+
+ def test_stringized_annotations_on_wrapper(self):
+ isa = inspect_stringized_annotations
+ wrapped = times_three(isa.function)
+ self.assertEqual(wrapped(1, "x"), isa.MyClass(3, "xxx"))
+ self.assertIsNot(wrapped.__globals__, isa.function.__globals__)
+ self.assertEqual(
+ annotationlib.get_annotations(wrapped),
+ {"a": "int", "b": "str", "return": "MyClass"},
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(wrapped, eval_str=True),
+ {"a": int, "b": str, "return": isa.MyClass},
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(wrapped, eval_str=False),
+ {"a": "int", "b": "str", "return": "MyClass"},
+ )
+
+ def test_stringized_annotations_on_class(self):
+ isa = inspect_stringized_annotations
+ # test that local namespace lookups work
+ self.assertEqual(
+ annotationlib.get_annotations(isa.MyClassWithLocalAnnotations),
+ {"x": "mytype"},
+ )
+ self.assertEqual(
+ annotationlib.get_annotations(
+ isa.MyClassWithLocalAnnotations, eval_str=True
+ ),
+ {"x": int},
+ )
+
+ def test_modify_annotations(self):
+ def f(x: int):
+ pass
+
+ self.assertEqual(annotationlib.get_annotations(f), {"x": int})
+ self.assertEqual(
+ annotationlib.get_annotations(f, format=annotationlib.Format.FORWARDREF),
+ {"x": int},
+ )
+
+ f.__annotations__["x"] = str
+ # The modification is reflected in VALUE (the default)
+ self.assertEqual(annotationlib.get_annotations(f), {"x": str})
+ # ... but not in FORWARDREF, which uses __annotate__
+ self.assertEqual(
+ annotationlib.get_annotations(f, format=annotationlib.Format.FORWARDREF),
+ {"x": int},
+ )
+
+ def test_pep695_generic_class_with_future_annotations(self):
+ ann_module695 = inspect_stringized_annotations_pep695
+ A_annotations = annotationlib.get_annotations(ann_module695.A, eval_str=True)
+ A_type_params = ann_module695.A.__type_params__
+ self.assertIs(A_annotations["x"], A_type_params[0])
+ self.assertEqual(A_annotations["y"].__args__[0], Unpack[A_type_params[1]])
+ self.assertIs(A_annotations["z"].__args__[0], A_type_params[2])
+
+ def test_pep695_generic_class_with_future_annotations_and_local_shadowing(self):
+ B_annotations = annotationlib.get_annotations(
+ inspect_stringized_annotations_pep695.B, eval_str=True
+ )
+ self.assertEqual(B_annotations, {"x": int, "y": str, "z": bytes})
+
+ def test_pep695_generic_class_with_future_annotations_name_clash_with_global_vars(self):
+ ann_module695 = inspect_stringized_annotations_pep695
+ C_annotations = annotationlib.get_annotations(ann_module695.C, eval_str=True)
+ self.assertEqual(
+ set(C_annotations.values()),
+ set(ann_module695.C.__type_params__)
+ )
+
+ def test_pep_695_generic_function_with_future_annotations(self):
+ ann_module695 = inspect_stringized_annotations_pep695
+ generic_func_annotations = annotationlib.get_annotations(
+ ann_module695.generic_function, eval_str=True
+ )
+ func_t_params = ann_module695.generic_function.__type_params__
+ self.assertEqual(
+ generic_func_annotations.keys(), {"x", "y", "z", "zz", "return"}
+ )
+ self.assertIs(generic_func_annotations["x"], func_t_params[0])
+ self.assertEqual(generic_func_annotations["y"], Unpack[func_t_params[1]])
+ self.assertIs(generic_func_annotations["z"].__origin__, func_t_params[2])
+ self.assertIs(generic_func_annotations["zz"].__origin__, func_t_params[2])
+
+ def test_pep_695_generic_function_with_future_annotations_name_clash_with_global_vars(self):
+ self.assertEqual(
+ set(
+ annotationlib.get_annotations(
+ inspect_stringized_annotations_pep695.generic_function_2,
+ eval_str=True
+ ).values()
+ ),
+ set(
+ inspect_stringized_annotations_pep695.generic_function_2.__type_params__
+ )
+ )
+
+ def test_pep_695_generic_method_with_future_annotations(self):
+ ann_module695 = inspect_stringized_annotations_pep695
+ generic_method_annotations = annotationlib.get_annotations(
+ ann_module695.D.generic_method, eval_str=True
+ )
+ params = {
+ param.__name__: param
+ for param in ann_module695.D.generic_method.__type_params__
+ }
+ self.assertEqual(
+ generic_method_annotations,
+ {"x": params["Foo"], "y": params["Bar"], "return": None}
+ )
+
+ def test_pep_695_generic_method_with_future_annotations_name_clash_with_global_vars(self):
+ self.assertEqual(
+ set(
+ annotationlib.get_annotations(
+ inspect_stringized_annotations_pep695.D.generic_method_2,
+ eval_str=True
+ ).values()
+ ),
+ set(
+ inspect_stringized_annotations_pep695.D.generic_method_2.__type_params__
+ )
+ )
+
+ def test_pep_695_generic_method_with_future_annotations_name_clash_with_global_and_local_vars(self):
+ self.assertEqual(
+ annotationlib.get_annotations(
+ inspect_stringized_annotations_pep695.E, eval_str=True
+ ),
+ {"x": str},
+ )
+
+ def test_pep_695_generics_with_future_annotations_nested_in_function(self):
+ results = inspect_stringized_annotations_pep695.nested()
+
+ self.assertEqual(
+ set(results.F_annotations.values()),
+ set(results.F.__type_params__)
+ )
+ self.assertEqual(
+ set(results.F_meth_annotations.values()),
+ set(results.F.generic_method.__type_params__)
+ )
+ self.assertNotEqual(
+ set(results.F_meth_annotations.values()),
+ set(results.F.__type_params__)
+ )
+ self.assertEqual(
+ set(results.F_meth_annotations.values()).intersection(results.F.__type_params__),
+ set()
+ )
+
+ self.assertEqual(results.G_annotations, {"x": str})
+
+ self.assertEqual(
+ set(results.generic_func_annotations.values()),
+ set(results.generic_func.__type_params__)
+ )
diff --git a/Lib/test/test_dataclasses/__init__.py b/Lib/test/test_dataclasses/__init__.py
index ffb8bbe75c504f..b93c99d8c90bf3 100644
--- a/Lib/test/test_dataclasses/__init__.py
+++ b/Lib/test/test_dataclasses/__init__.py
@@ -4807,6 +4807,16 @@ def test_make_dataclass(self):
self.assertTrue(fields(B)[0].kw_only)
self.assertFalse(fields(B)[1].kw_only)
+ def test_deferred_annotations(self):
+ @dataclass
+ class A:
+ x: undefined
+ y: ClassVar[undefined]
+
+ fs = fields(A)
+ self.assertEqual(len(fs), 1)
+ self.assertEqual(fs[0].name, 'x')
+
if __name__ == '__main__':
unittest.main()
diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py
index 492a16a8c7ff45..837f3795f0842d 100644
--- a/Lib/test/test_functools.py
+++ b/Lib/test/test_functools.py
@@ -741,6 +741,26 @@ def wrapper(*args): pass
self.assertEqual(wrapper.__annotations__, {})
self.assertEqual(wrapper.__type_params__, ())
+ def test_update_wrapper_annotations(self):
+ def inner(x: int): pass
+ def wrapper(*args): pass
+
+ functools.update_wrapper(wrapper, inner)
+ self.assertEqual(wrapper.__annotations__, {'x': int})
+ self.assertIs(wrapper.__annotate__, inner.__annotate__)
+
+ def with_forward_ref(x: undefined): pass
+ def wrapper(*args): pass
+
+ functools.update_wrapper(wrapper, with_forward_ref)
+
+ self.assertIs(wrapper.__annotate__, with_forward_ref.__annotate__)
+ with self.assertRaises(NameError):
+ wrapper.__annotations__
+
+ undefined = str
+ self.assertEqual(wrapper.__annotations__, {'x': undefined})
+
class TestWraps(TestUpdateWrapper):
@@ -3059,6 +3079,27 @@ def _(arg: typing.List[float] | bytes):
self.assertEqual(f(""), "default")
self.assertEqual(f(b""), "default")
+ def test_forward_reference(self):
+ @functools.singledispatch
+ def f(arg, arg2=None):
+ return "default"
+
+ @f.register
+ def _(arg: str, arg2: undefined = None):
+ return "forward reference"
+
+ self.assertEqual(f(1), "default")
+ self.assertEqual(f(""), "forward reference")
+
+ def test_unresolved_forward_reference(self):
+ @functools.singledispatch
+ def f(arg):
+ return "default"
+
+ with self.assertRaisesRegex(TypeError, "is an unresolved forward reference"):
+ @f.register
+ def _(arg: undefined):
+ return "forward reference"
class CachedCostItem:
_cost = 1
diff --git a/Lib/test/test_grammar.py b/Lib/test/test_grammar.py
index 5b7a639c025a0f..6a841587f49166 100644
--- a/Lib/test/test_grammar.py
+++ b/Lib/test/test_grammar.py
@@ -3,6 +3,7 @@
from test.support import check_syntax_error
from test.support import import_helper
+import annotationlib
import inspect
import unittest
import sys
@@ -459,7 +460,7 @@ def test_var_annot_simple_exec(self):
gns = {}; lns = {}
exec("'docstring'\n"
"x: int = 5\n", gns, lns)
- self.assertEqual(lns["__annotate__"](1), {'x': int})
+ self.assertEqual(lns["__annotate__"](annotationlib.Format.VALUE), {'x': int})
with self.assertRaises(KeyError):
gns['__annotate__']
diff --git a/Lib/test/test_inspect/test_inspect.py b/Lib/test/test_inspect/test_inspect.py
index d39c3ccdc847bd..5521528a524762 100644
--- a/Lib/test/test_inspect/test_inspect.py
+++ b/Lib/test/test_inspect/test_inspect.py
@@ -45,10 +45,7 @@
from test.test_inspect import inspect_fodder as mod
from test.test_inspect import inspect_fodder2 as mod2
-from test.test_inspect import inspect_stock_annotations
from test.test_inspect import inspect_stringized_annotations
-from test.test_inspect import inspect_stringized_annotations_2
-from test.test_inspect import inspect_stringized_annotations_pep695
# Functions tested in this suite:
@@ -126,7 +123,7 @@ def istest(self, predicate, exp):
self.assertFalse(other(obj), 'not %s(%s)' % (other.__name__, exp))
def test__all__(self):
- support.check__all__(self, inspect, not_exported=("modulesbyfile",))
+ support.check__all__(self, inspect, not_exported=("modulesbyfile",), extra=("get_annotations",))
def generator_function_example(self):
for i in range(2):
@@ -1595,216 +1592,6 @@ class C(metaclass=M):
attrs = [a[0] for a in inspect.getmembers(C)]
self.assertNotIn('missing', attrs)
- def test_get_annotations_with_stock_annotations(self):
- def foo(a:int, b:str): pass
- self.assertEqual(inspect.get_annotations(foo), {'a': int, 'b': str})
-
- foo.__annotations__ = {'a': 'foo', 'b':'str'}
- self.assertEqual(inspect.get_annotations(foo), {'a': 'foo', 'b': 'str'})
-
- self.assertEqual(inspect.get_annotations(foo, eval_str=True, locals=locals()), {'a': foo, 'b': str})
- self.assertEqual(inspect.get_annotations(foo, eval_str=True, globals=locals()), {'a': foo, 'b': str})
-
- isa = inspect_stock_annotations
- self.assertEqual(inspect.get_annotations(isa), {'a': int, 'b': str})
- self.assertEqual(inspect.get_annotations(isa.MyClass), {'a': int, 'b': str})
- self.assertEqual(inspect.get_annotations(isa.function), {'a': int, 'b': str, 'return': isa.MyClass})
- self.assertEqual(inspect.get_annotations(isa.function2), {'a': int, 'b': 'str', 'c': isa.MyClass, 'return': isa.MyClass})
- self.assertEqual(inspect.get_annotations(isa.function3), {'a': 'int', 'b': 'str', 'c': 'MyClass'})
- self.assertEqual(inspect.get_annotations(inspect), {}) # inspect module has no annotations
- self.assertEqual(inspect.get_annotations(isa.UnannotatedClass), {})
- self.assertEqual(inspect.get_annotations(isa.unannotated_function), {})
-
- self.assertEqual(inspect.get_annotations(isa, eval_str=True), {'a': int, 'b': str})
- self.assertEqual(inspect.get_annotations(isa.MyClass, eval_str=True), {'a': int, 'b': str})
- self.assertEqual(inspect.get_annotations(isa.function, eval_str=True), {'a': int, 'b': str, 'return': isa.MyClass})
- self.assertEqual(inspect.get_annotations(isa.function2, eval_str=True), {'a': int, 'b': str, 'c': isa.MyClass, 'return': isa.MyClass})
- self.assertEqual(inspect.get_annotations(isa.function3, eval_str=True), {'a': int, 'b': str, 'c': isa.MyClass})
- self.assertEqual(inspect.get_annotations(inspect, eval_str=True), {})
- self.assertEqual(inspect.get_annotations(isa.UnannotatedClass, eval_str=True), {})
- self.assertEqual(inspect.get_annotations(isa.unannotated_function, eval_str=True), {})
-
- self.assertEqual(inspect.get_annotations(isa, eval_str=False), {'a': int, 'b': str})
- self.assertEqual(inspect.get_annotations(isa.MyClass, eval_str=False), {'a': int, 'b': str})
- self.assertEqual(inspect.get_annotations(isa.function, eval_str=False), {'a': int, 'b': str, 'return': isa.MyClass})
- self.assertEqual(inspect.get_annotations(isa.function2, eval_str=False), {'a': int, 'b': 'str', 'c': isa.MyClass, 'return': isa.MyClass})
- self.assertEqual(inspect.get_annotations(isa.function3, eval_str=False), {'a': 'int', 'b': 'str', 'c': 'MyClass'})
- self.assertEqual(inspect.get_annotations(inspect, eval_str=False), {})
- self.assertEqual(inspect.get_annotations(isa.UnannotatedClass, eval_str=False), {})
- self.assertEqual(inspect.get_annotations(isa.unannotated_function, eval_str=False), {})
-
- def times_three(fn):
- @functools.wraps(fn)
- def wrapper(a, b):
- return fn(a*3, b*3)
- return wrapper
-
- wrapped = times_three(isa.function)
- self.assertEqual(wrapped(1, 'x'), isa.MyClass(3, 'xxx'))
- self.assertIsNot(wrapped.__globals__, isa.function.__globals__)
- self.assertEqual(inspect.get_annotations(wrapped), {'a': int, 'b': str, 'return': isa.MyClass})
- self.assertEqual(inspect.get_annotations(wrapped, eval_str=True), {'a': int, 'b': str, 'return': isa.MyClass})
- self.assertEqual(inspect.get_annotations(wrapped, eval_str=False), {'a': int, 'b': str, 'return': isa.MyClass})
-
- def test_get_annotations_with_stringized_annotations(self):
- isa = inspect_stringized_annotations
- self.assertEqual(inspect.get_annotations(isa), {'a': 'int', 'b': 'str'})
- self.assertEqual(inspect.get_annotations(isa.MyClass), {'a': 'int', 'b': 'str'})
- self.assertEqual(inspect.get_annotations(isa.function), {'a': 'int', 'b': 'str', 'return': 'MyClass'})
- self.assertEqual(inspect.get_annotations(isa.function2), {'a': 'int', 'b': "'str'", 'c': 'MyClass', 'return': 'MyClass'})
- self.assertEqual(inspect.get_annotations(isa.function3), {'a': "'int'", 'b': "'str'", 'c': "'MyClass'"})
- self.assertEqual(inspect.get_annotations(isa.UnannotatedClass), {})
- self.assertEqual(inspect.get_annotations(isa.unannotated_function), {})
-
- self.assertEqual(inspect.get_annotations(isa, eval_str=True), {'a': int, 'b': str})
- self.assertEqual(inspect.get_annotations(isa.MyClass, eval_str=True), {'a': int, 'b': str})
- self.assertEqual(inspect.get_annotations(isa.function, eval_str=True), {'a': int, 'b': str, 'return': isa.MyClass})
- self.assertEqual(inspect.get_annotations(isa.function2, eval_str=True), {'a': int, 'b': 'str', 'c': isa.MyClass, 'return': isa.MyClass})
- self.assertEqual(inspect.get_annotations(isa.function3, eval_str=True), {'a': 'int', 'b': 'str', 'c': 'MyClass'})
- self.assertEqual(inspect.get_annotations(isa.UnannotatedClass, eval_str=True), {})
- self.assertEqual(inspect.get_annotations(isa.unannotated_function, eval_str=True), {})
-
- self.assertEqual(inspect.get_annotations(isa, eval_str=False), {'a': 'int', 'b': 'str'})
- self.assertEqual(inspect.get_annotations(isa.MyClass, eval_str=False), {'a': 'int', 'b': 'str'})
- self.assertEqual(inspect.get_annotations(isa.function, eval_str=False), {'a': 'int', 'b': 'str', 'return': 'MyClass'})
- self.assertEqual(inspect.get_annotations(isa.function2, eval_str=False), {'a': 'int', 'b': "'str'", 'c': 'MyClass', 'return': 'MyClass'})
- self.assertEqual(inspect.get_annotations(isa.function3, eval_str=False), {'a': "'int'", 'b': "'str'", 'c': "'MyClass'"})
- self.assertEqual(inspect.get_annotations(isa.UnannotatedClass, eval_str=False), {})
- self.assertEqual(inspect.get_annotations(isa.unannotated_function, eval_str=False), {})
-
- isa2 = inspect_stringized_annotations_2
- self.assertEqual(inspect.get_annotations(isa2), {})
- self.assertEqual(inspect.get_annotations(isa2, eval_str=True), {})
- self.assertEqual(inspect.get_annotations(isa2, eval_str=False), {})
-
- def times_three(fn):
- @functools.wraps(fn)
- def wrapper(a, b):
- return fn(a*3, b*3)
- return wrapper
-
- wrapped = times_three(isa.function)
- self.assertEqual(wrapped(1, 'x'), isa.MyClass(3, 'xxx'))
- self.assertIsNot(wrapped.__globals__, isa.function.__globals__)
- self.assertEqual(inspect.get_annotations(wrapped), {'a': 'int', 'b': 'str', 'return': 'MyClass'})
- self.assertEqual(inspect.get_annotations(wrapped, eval_str=True), {'a': int, 'b': str, 'return': isa.MyClass})
- self.assertEqual(inspect.get_annotations(wrapped, eval_str=False), {'a': 'int', 'b': 'str', 'return': 'MyClass'})
-
- # test that local namespace lookups work
- self.assertEqual(inspect.get_annotations(isa.MyClassWithLocalAnnotations), {'x': 'mytype'})
- self.assertEqual(inspect.get_annotations(isa.MyClassWithLocalAnnotations, eval_str=True), {'x': int})
-
- def test_pep695_generic_class_with_future_annotations(self):
- ann_module695 = inspect_stringized_annotations_pep695
- A_annotations = inspect.get_annotations(ann_module695.A, eval_str=True)
- A_type_params = ann_module695.A.__type_params__
- self.assertIs(A_annotations["x"], A_type_params[0])
- self.assertEqual(A_annotations["y"].__args__[0], Unpack[A_type_params[1]])
- self.assertIs(A_annotations["z"].__args__[0], A_type_params[2])
-
- def test_pep695_generic_class_with_future_annotations_and_local_shadowing(self):
- B_annotations = inspect.get_annotations(
- inspect_stringized_annotations_pep695.B, eval_str=True
- )
- self.assertEqual(B_annotations, {"x": int, "y": str, "z": bytes})
-
- def test_pep695_generic_class_with_future_annotations_name_clash_with_global_vars(self):
- ann_module695 = inspect_stringized_annotations_pep695
- C_annotations = inspect.get_annotations(ann_module695.C, eval_str=True)
- self.assertEqual(
- set(C_annotations.values()),
- set(ann_module695.C.__type_params__)
- )
-
- def test_pep_695_generic_function_with_future_annotations(self):
- ann_module695 = inspect_stringized_annotations_pep695
- generic_func_annotations = inspect.get_annotations(
- ann_module695.generic_function, eval_str=True
- )
- func_t_params = ann_module695.generic_function.__type_params__
- self.assertEqual(
- generic_func_annotations.keys(), {"x", "y", "z", "zz", "return"}
- )
- self.assertIs(generic_func_annotations["x"], func_t_params[0])
- self.assertEqual(generic_func_annotations["y"], Unpack[func_t_params[1]])
- self.assertIs(generic_func_annotations["z"].__origin__, func_t_params[2])
- self.assertIs(generic_func_annotations["zz"].__origin__, func_t_params[2])
-
- def test_pep_695_generic_function_with_future_annotations_name_clash_with_global_vars(self):
- self.assertEqual(
- set(
- inspect.get_annotations(
- inspect_stringized_annotations_pep695.generic_function_2,
- eval_str=True
- ).values()
- ),
- set(
- inspect_stringized_annotations_pep695.generic_function_2.__type_params__
- )
- )
-
- def test_pep_695_generic_method_with_future_annotations(self):
- ann_module695 = inspect_stringized_annotations_pep695
- generic_method_annotations = inspect.get_annotations(
- ann_module695.D.generic_method, eval_str=True
- )
- params = {
- param.__name__: param
- for param in ann_module695.D.generic_method.__type_params__
- }
- self.assertEqual(
- generic_method_annotations,
- {"x": params["Foo"], "y": params["Bar"], "return": None}
- )
-
- def test_pep_695_generic_method_with_future_annotations_name_clash_with_global_vars(self):
- self.assertEqual(
- set(
- inspect.get_annotations(
- inspect_stringized_annotations_pep695.D.generic_method_2,
- eval_str=True
- ).values()
- ),
- set(
- inspect_stringized_annotations_pep695.D.generic_method_2.__type_params__
- )
- )
-
- def test_pep_695_generic_method_with_future_annotations_name_clash_with_global_and_local_vars(self):
- self.assertEqual(
- inspect.get_annotations(
- inspect_stringized_annotations_pep695.E, eval_str=True
- ),
- {"x": str},
- )
-
- def test_pep_695_generics_with_future_annotations_nested_in_function(self):
- results = inspect_stringized_annotations_pep695.nested()
-
- self.assertEqual(
- set(results.F_annotations.values()),
- set(results.F.__type_params__)
- )
- self.assertEqual(
- set(results.F_meth_annotations.values()),
- set(results.F.generic_method.__type_params__)
- )
- self.assertNotEqual(
- set(results.F_meth_annotations.values()),
- set(results.F.__type_params__)
- )
- self.assertEqual(
- set(results.F_meth_annotations.values()).intersection(results.F.__type_params__),
- set()
- )
-
- self.assertEqual(results.G_annotations, {"x": str})
-
- self.assertEqual(
- set(results.generic_func_annotations.values()),
- set(results.generic_func.__type_params__)
- )
-
class TestFormatAnnotation(unittest.TestCase):
def test_typing_replacement(self):
diff --git a/Lib/test/test_type_annotations.py b/Lib/test/test_type_annotations.py
index a9be1f5aa84681..91082e6b23c04b 100644
--- a/Lib/test/test_type_annotations.py
+++ b/Lib/test/test_type_annotations.py
@@ -1,12 +1,9 @@
+import annotationlib
import textwrap
import types
import unittest
from test.support import run_code, check_syntax_error
-VALUE = 1
-FORWARDREF = 2
-SOURCE = 3
-
class TypeAnnotationTests(unittest.TestCase):
@@ -376,12 +373,12 @@ class X:
self.assertIsInstance(annotate, types.FunctionType)
self.assertEqual(annotate.__name__, "__annotate__")
with self.assertRaises(NotImplementedError):
- annotate(FORWARDREF)
+ annotate(annotationlib.Format.FORWARDREF)
with self.assertRaises(NotImplementedError):
- annotate(SOURCE)
+ annotate(annotationlib.Format.SOURCE)
with self.assertRaises(NotImplementedError):
annotate(None)
- self.assertEqual(annotate(VALUE), {"x": int})
+ self.assertEqual(annotate(annotationlib.Format.VALUE), {"x": int})
def test_comprehension_in_annotation(self):
# This crashed in an earlier version of the code
@@ -398,7 +395,7 @@ def f(x: int) -> int: pass
f = ns["f"]
self.assertIsInstance(f.__annotate__, types.FunctionType)
annos = {"x": "int", "return": "int"}
- self.assertEqual(f.__annotate__(VALUE), annos)
+ self.assertEqual(f.__annotate__(annotationlib.Format.VALUE), annos)
self.assertEqual(f.__annotations__, annos)
def test_name_clash_with_format(self):
diff --git a/Lib/test/test_typing.py b/Lib/test/test_typing.py
index a931da55908236..290b3c63a762e9 100644
--- a/Lib/test/test_typing.py
+++ b/Lib/test/test_typing.py
@@ -1,3 +1,4 @@
+import annotationlib
import contextlib
import collections
import collections.abc
@@ -45,7 +46,7 @@
import weakref
import types
-from test.support import captured_stderr, cpython_only, infinite_recursion, requires_docstrings, import_helper
+from test.support import captured_stderr, cpython_only, infinite_recursion, requires_docstrings, import_helper, run_code
from test.typinganndata import ann_module695, mod_generics_cache, _typed_dict_helper
@@ -7812,6 +7813,48 @@ class XMethBad2(NamedTuple):
def _source(self):
return 'no chance for this as well'
+ def test_annotation_type_check(self):
+ # These are rejected by _type_check
+ with self.assertRaises(TypeError):
+ class X(NamedTuple):
+ a: Final
+ with self.assertRaises(TypeError):
+ class Y(NamedTuple):
+ a: (1, 2)
+
+ # Conversion by _type_convert
+ class Z(NamedTuple):
+ a: None
+ b: "str"
+ annos = {'a': type(None), 'b': ForwardRef("str")}
+ self.assertEqual(Z.__annotations__, annos)
+ self.assertEqual(Z.__annotate__(annotationlib.Format.VALUE), annos)
+ self.assertEqual(Z.__annotate__(annotationlib.Format.FORWARDREF), annos)
+ self.assertEqual(Z.__annotate__(annotationlib.Format.SOURCE), {"a": "None", "b": "str"})
+
+ def test_future_annotations(self):
+ code = """
+ from __future__ import annotations
+ from typing import NamedTuple
+ class X(NamedTuple):
+ a: int
+ b: None
+ """
+ ns = run_code(textwrap.dedent(code))
+ X = ns['X']
+ self.assertEqual(X.__annotations__, {'a': ForwardRef("int"), 'b': ForwardRef("None")})
+
+ def test_deferred_annotations(self):
+ class X(NamedTuple):
+ y: undefined
+
+ self.assertEqual(X._fields, ('y',))
+ with self.assertRaises(NameError):
+ X.__annotations__
+
+ undefined = int
+ self.assertEqual(X.__annotations__, {'y': int})
+
def test_multiple_inheritance(self):
class A:
pass
@@ -8126,7 +8169,11 @@ def test_basics_functional_syntax(self):
self.assertEqual(Emp.__name__, 'Emp')
self.assertEqual(Emp.__module__, __name__)
self.assertEqual(Emp.__bases__, (dict,))
- self.assertEqual(Emp.__annotations__, {'name': str, 'id': int})
+ annos = {'name': str, 'id': int}
+ self.assertEqual(Emp.__annotations__, annos)
+ self.assertEqual(Emp.__annotate__(annotationlib.Format.VALUE), annos)
+ self.assertEqual(Emp.__annotate__(annotationlib.Format.FORWARDREF), annos)
+ self.assertEqual(Emp.__annotate__(annotationlib.Format.SOURCE), {'name': 'str', 'id': 'int'})
self.assertEqual(Emp.__total__, True)
self.assertEqual(Emp.__required_keys__, {'name', 'id'})
self.assertIsInstance(Emp.__required_keys__, frozenset)
@@ -8487,6 +8534,8 @@ class A[T](TypedDict):
self.assertEqual(A.__bases__, (Generic, dict))
self.assertEqual(A.__orig_bases__, (TypedDict, Generic[T]))
self.assertEqual(A.__mro__, (A, Generic, dict, object))
+ self.assertEqual(A.__annotations__, {'a': T})
+ self.assertEqual(A.__annotate__(annotationlib.Format.SOURCE), {'a': 'T'})
self.assertEqual(A.__parameters__, (T,))
self.assertEqual(A[str].__parameters__, ())
self.assertEqual(A[str].__args__, (str,))
@@ -8498,6 +8547,8 @@ class A(TypedDict, Generic[T]):
self.assertEqual(A.__bases__, (Generic, dict))
self.assertEqual(A.__orig_bases__, (TypedDict, Generic[T]))
self.assertEqual(A.__mro__, (A, Generic, dict, object))
+ self.assertEqual(A.__annotations__, {'a': T})
+ self.assertEqual(A.__annotate__(annotationlib.Format.SOURCE), {'a': 'T'})
self.assertEqual(A.__parameters__, (T,))
self.assertEqual(A[str].__parameters__, ())
self.assertEqual(A[str].__args__, (str,))
@@ -8508,6 +8559,8 @@ class A2(Generic[T], TypedDict):
self.assertEqual(A2.__bases__, (Generic, dict))
self.assertEqual(A2.__orig_bases__, (Generic[T], TypedDict))
self.assertEqual(A2.__mro__, (A2, Generic, dict, object))
+ self.assertEqual(A2.__annotations__, {'a': T})
+ self.assertEqual(A2.__annotate__(annotationlib.Format.SOURCE), {'a': 'T'})
self.assertEqual(A2.__parameters__, (T,))
self.assertEqual(A2[str].__parameters__, ())
self.assertEqual(A2[str].__args__, (str,))
@@ -8518,6 +8571,8 @@ class B(A[KT], total=False):
self.assertEqual(B.__bases__, (Generic, dict))
self.assertEqual(B.__orig_bases__, (A[KT],))
self.assertEqual(B.__mro__, (B, Generic, dict, object))
+ self.assertEqual(B.__annotations__, {'a': T, 'b': KT})
+ self.assertEqual(B.__annotate__(annotationlib.Format.SOURCE), {'a': 'T', 'b': 'KT'})
self.assertEqual(B.__parameters__, (KT,))
self.assertEqual(B.__total__, False)
self.assertEqual(B.__optional_keys__, frozenset(['b']))
@@ -8542,6 +8597,11 @@ class C(B[int]):
'b': KT,
'c': int,
})
+ self.assertEqual(C.__annotate__(annotationlib.Format.SOURCE), {
+ 'a': 'T',
+ 'b': 'KT',
+ 'c': 'int',
+ })
with self.assertRaises(TypeError):
C[str]
@@ -8561,6 +8621,11 @@ class Point3D(Point2DGeneric[T], Generic[T, KT]):
'b': T,
'c': KT,
})
+ self.assertEqual(Point3D.__annotate__(annotationlib.Format.SOURCE), {
+ 'a': 'T',
+ 'b': 'T',
+ 'c': 'KT',
+ })
self.assertEqual(Point3D[int, str].__origin__, Point3D)
with self.assertRaises(TypeError):
@@ -8592,6 +8657,11 @@ class WithImplicitAny(B):
'b': KT,
'c': int,
})
+ self.assertEqual(WithImplicitAny.__annotate__(annotationlib.Format.SOURCE), {
+ 'a': 'T',
+ 'b': 'KT',
+ 'c': 'int',
+ })
with self.assertRaises(TypeError):
WithImplicitAny[str]
@@ -8748,6 +8818,54 @@ class AllTheThings(TypedDict):
},
)
+ def test_annotations(self):
+ # _type_check is applied
+ with self.assertRaisesRegex(TypeError, "Plain typing.Final is not valid as type argument"):
+ class X(TypedDict):
+ a: Final
+
+ # _type_convert is applied
+ class Y(TypedDict):
+ a: None
+ b: "int"
+ fwdref = ForwardRef('int', module='test.test_typing')
+ self.assertEqual(Y.__annotations__, {'a': type(None), 'b': fwdref})
+ self.assertEqual(Y.__annotate__(annotationlib.Format.FORWARDREF), {'a': type(None), 'b': fwdref})
+
+ # _type_check is also applied later
+ class Z(TypedDict):
+ a: undefined
+
+ with self.assertRaises(NameError):
+ Z.__annotations__
+
+ undefined = Final
+ with self.assertRaisesRegex(TypeError, "Plain typing.Final is not valid as type argument"):
+ Z.__annotations__
+
+ undefined = None
+ self.assertEqual(Z.__annotations__, {'a': type(None)})
+
+ def test_deferred_evaluation(self):
+ class A(TypedDict):
+ x: NotRequired[undefined]
+ y: ReadOnly[undefined]
+ z: Required[undefined]
+
+ self.assertEqual(A.__required_keys__, frozenset({'y', 'z'}))
+ self.assertEqual(A.__optional_keys__, frozenset({'x'}))
+ self.assertEqual(A.__readonly_keys__, frozenset({'y'}))
+ self.assertEqual(A.__mutable_keys__, frozenset({'x', 'z'}))
+
+ with self.assertRaises(NameError):
+ A.__annotations__
+
+ self.assertEqual(
+ A.__annotate__(annotationlib.Format.SOURCE),
+ {'x': 'NotRequired[undefined]', 'y': 'ReadOnly[undefined]',
+ 'z': 'Required[undefined]'},
+ )
+
class RequiredTests(BaseTestCase):
@@ -10075,7 +10193,6 @@ def test_special_attrs(self):
typing.ClassVar: 'ClassVar',
typing.Concatenate: 'Concatenate',
typing.Final: 'Final',
- typing.ForwardRef: 'ForwardRef',
typing.Literal: 'Literal',
typing.NewType: 'NewType',
typing.NoReturn: 'NoReturn',
@@ -10087,7 +10204,7 @@ def test_special_attrs(self):
typing.TypeVar: 'TypeVar',
typing.Union: 'Union',
typing.Self: 'Self',
- # Subscribed special forms
+ # Subscripted special forms
typing.Annotated[Any, "Annotation"]: 'Annotated',
typing.Annotated[int, 'Annotation']: 'Annotated',
typing.ClassVar[Any]: 'ClassVar',
@@ -10102,7 +10219,6 @@ def test_special_attrs(self):
typing.Union[Any]: 'Any',
typing.Union[int, float]: 'Union',
# Incompatible special forms (tested in test_special_attrs2)
- # - typing.ForwardRef('set[Any]')
# - typing.NewType('TypeName', Any)
# - typing.ParamSpec('SpecialAttrsP')
# - typing.TypeVar('T')
@@ -10121,18 +10237,6 @@ def test_special_attrs(self):
TypeName = typing.NewType('SpecialAttrsTests.TypeName', Any)
def test_special_attrs2(self):
- # Forward refs provide a different introspection API. __name__ and
- # __qualname__ make little sense for forward refs as they can store
- # complex typing expressions.
- fr = typing.ForwardRef('set[Any]')
- self.assertFalse(hasattr(fr, '__name__'))
- self.assertFalse(hasattr(fr, '__qualname__'))
- self.assertEqual(fr.__module__, 'typing')
- # Forward refs are currently unpicklable.
- for proto in range(pickle.HIGHEST_PROTOCOL + 1):
- with self.assertRaises(TypeError):
- pickle.dumps(fr, proto)
-
self.assertEqual(SpecialAttrsTests.TypeName.__name__, 'TypeName')
self.assertEqual(
SpecialAttrsTests.TypeName.__qualname__,
diff --git a/Lib/typing.py b/Lib/typing.py
index bc17d136082891..626053d8166160 100644
--- a/Lib/typing.py
+++ b/Lib/typing.py
@@ -19,6 +19,8 @@
"""
from abc import abstractmethod, ABCMeta
+import annotationlib
+from annotationlib import ForwardRef
import collections
from collections import defaultdict
import collections.abc
@@ -125,6 +127,7 @@
'cast',
'clear_overloads',
'dataclass_transform',
+ 'evaluate_forward_ref',
'final',
'get_args',
'get_origin',
@@ -165,7 +168,7 @@ def _type_convert(arg, module=None, *, allow_special_forms=False):
if arg is None:
return type(None)
if isinstance(arg, str):
- return ForwardRef(arg, module=module, is_class=allow_special_forms)
+ return _make_forward_ref(arg, module=module, is_class=allow_special_forms)
return arg
@@ -459,7 +462,8 @@ def __repr__(self):
_sentinel = _Sentinel()
-def _eval_type(t, globalns, localns, type_params=_sentinel, *, recursive_guard=frozenset()):
+def _eval_type(t, globalns, localns, type_params=_sentinel, *, recursive_guard=frozenset(),
+ format=annotationlib.Format.VALUE, owner=None):
"""Evaluate all forward references in the given type t.
For use of globalns and localns see the docstring for get_type_hints().
@@ -470,11 +474,13 @@ def _eval_type(t, globalns, localns, type_params=_sentinel, *, recursive_guard=f
_deprecation_warning_for_no_type_params_passed("typing._eval_type")
type_params = ()
if isinstance(t, ForwardRef):
- return t._evaluate(globalns, localns, type_params, recursive_guard=recursive_guard)
+ return evaluate_forward_ref(t, globals=globalns, locals=localns,
+ type_params=type_params, owner=owner,
+ _recursive_guard=recursive_guard, format=format)
if isinstance(t, (_GenericAlias, GenericAlias, types.UnionType)):
if isinstance(t, GenericAlias):
args = tuple(
- ForwardRef(arg) if isinstance(arg, str) else arg
+ _make_forward_ref(arg) if isinstance(arg, str) else arg
for arg in t.__args__
)
is_unpacked = t.__unpacked__
@@ -487,7 +493,8 @@ def _eval_type(t, globalns, localns, type_params=_sentinel, *, recursive_guard=f
ev_args = tuple(
_eval_type(
- a, globalns, localns, type_params, recursive_guard=recursive_guard
+ a, globalns, localns, type_params, recursive_guard=recursive_guard,
+ format=format, owner=owner,
)
for a in t.__args__
)
@@ -1011,111 +1018,77 @@ def run(arg: Child | Unrelated):
return _GenericAlias(self, (item,))
-class ForwardRef(_Final, _root=True):
- """Internal wrapper to hold a forward reference."""
+def _make_forward_ref(code, **kwargs):
+ forward_ref = ForwardRef(code, **kwargs)
+ # For compatibility, eagerly compile the forwardref's code.
+ forward_ref.__forward_code__
+ return forward_ref
- __slots__ = ('__forward_arg__', '__forward_code__',
- '__forward_evaluated__', '__forward_value__',
- '__forward_is_argument__', '__forward_is_class__',
- '__forward_module__')
- def __init__(self, arg, is_argument=True, module=None, *, is_class=False):
- if not isinstance(arg, str):
- raise TypeError(f"Forward reference must be a string -- got {arg!r}")
-
- # If we do `def f(*args: *Ts)`, then we'll have `arg = '*Ts'`.
- # Unfortunately, this isn't a valid expression on its own, so we
- # do the unpacking manually.
- if arg.startswith('*'):
- arg_to_compile = f'({arg},)[0]' # E.g. (*Ts,)[0] or (*tuple[int, int],)[0]
- else:
- arg_to_compile = arg
- try:
- code = compile(arg_to_compile, '', 'eval')
- except SyntaxError:
- raise SyntaxError(f"Forward reference must be an expression -- got {arg!r}")
-
- self.__forward_arg__ = arg
- self.__forward_code__ = code
- self.__forward_evaluated__ = False
- self.__forward_value__ = None
- self.__forward_is_argument__ = is_argument
- self.__forward_is_class__ = is_class
- self.__forward_module__ = module
-
- def _evaluate(self, globalns, localns, type_params=_sentinel, *, recursive_guard):
- if type_params is _sentinel:
- _deprecation_warning_for_no_type_params_passed("typing.ForwardRef._evaluate")
- type_params = ()
- if self.__forward_arg__ in recursive_guard:
- return self
- if not self.__forward_evaluated__ or localns is not globalns:
- if globalns is None and localns is None:
- globalns = localns = {}
- elif globalns is None:
- globalns = localns
- elif localns is None:
- localns = globalns
- if self.__forward_module__ is not None:
- globalns = getattr(
- sys.modules.get(self.__forward_module__, None), '__dict__', globalns
- )
-
- # type parameters require some special handling,
- # as they exist in their own scope
- # but `eval()` does not have a dedicated parameter for that scope.
- # For classes, names in type parameter scopes should override
- # names in the global scope (which here are called `localns`!),
- # but should in turn be overridden by names in the class scope
- # (which here are called `globalns`!)
- if type_params:
- globalns, localns = dict(globalns), dict(localns)
- for param in type_params:
- param_name = param.__name__
- if not self.__forward_is_class__ or param_name not in globalns:
- globalns[param_name] = param
- localns.pop(param_name, None)
-
- type_ = _type_check(
- eval(self.__forward_code__, globalns, localns),
- "Forward references must evaluate to types.",
- is_argument=self.__forward_is_argument__,
- allow_special_forms=self.__forward_is_class__,
- )
- self.__forward_value__ = _eval_type(
- type_,
- globalns,
- localns,
- type_params,
- recursive_guard=(recursive_guard | {self.__forward_arg__}),
- )
- self.__forward_evaluated__ = True
- return self.__forward_value__
-
- def __eq__(self, other):
- if not isinstance(other, ForwardRef):
- return NotImplemented
- if self.__forward_evaluated__ and other.__forward_evaluated__:
- return (self.__forward_arg__ == other.__forward_arg__ and
- self.__forward_value__ == other.__forward_value__)
- return (self.__forward_arg__ == other.__forward_arg__ and
- self.__forward_module__ == other.__forward_module__)
-
- def __hash__(self):
- return hash((self.__forward_arg__, self.__forward_module__))
-
- def __or__(self, other):
- return Union[self, other]
+def evaluate_forward_ref(
+ forward_ref,
+ *,
+ owner=None,
+ globals=None,
+ locals=None,
+ type_params=None,
+ format=annotationlib.Format.VALUE,
+ _recursive_guard=frozenset(),
+):
+ """Evaluate a forward reference as a type hint.
+
+ This is similar to calling the ForwardRef.evaluate() method,
+ but unlike that method, evaluate_forward_ref() also:
+
+ * Recursively evaluates forward references nested within the type hint.
+ * Rejects certain objects that are not valid type hints.
+ * Replaces type hints that evaluate to None with types.NoneType.
+ * Supports the *FORWARDREF* and *SOURCE* formats.
+
+ *forward_ref* must be an instance of ForwardRef. *owner*, if given,
+ should be the object that holds the annotations that the forward reference
+ derived from, such as a module, class object, or function. It is used to
+ infer the namespaces to use for looking up names. *globals* and *locals*
+ can also be explicitly given to provide the global and local namespaces.
+ *type_params* is a tuple of type parameters that are in scope when
+ evaluating the forward reference. This parameter must be provided (though
+ it may be an empty tuple) if *owner* is not given and the forward reference
+ does not already have an owner set. *format* specifies the format of the
+ annotation and is a member of the annoations.Format enum.
- def __ror__(self, other):
- return Union[other, self]
+ """
+ if type_params is _sentinel:
+ _deprecation_warning_for_no_type_params_passed("typing.evaluate_forward_ref")
+ type_params = ()
+ if format == annotationlib.Format.SOURCE:
+ return forward_ref.__forward_arg__
+ if forward_ref.__forward_arg__ in _recursive_guard:
+ return forward_ref
- def __repr__(self):
- if self.__forward_module__ is None:
- module_repr = ''
+ try:
+ value = forward_ref.evaluate(globals=globals, locals=locals,
+ type_params=type_params, owner=owner)
+ except NameError:
+ if format == annotationlib.Format.FORWARDREF:
+ return forward_ref
else:
- module_repr = f', module={self.__forward_module__!r}'
- return f'ForwardRef({self.__forward_arg__!r}{module_repr})'
+ raise
+
+ type_ = _type_check(
+ value,
+ "Forward references must evaluate to types.",
+ is_argument=forward_ref.__forward_is_argument__,
+ allow_special_forms=forward_ref.__forward_is_class__,
+ )
+ return _eval_type(
+ type_,
+ globals,
+ locals,
+ type_params,
+ recursive_guard=_recursive_guard | {forward_ref.__forward_arg__},
+ format=format,
+ owner=owner,
+ )
def _is_unpacked_typevartuple(x: Any) -> bool:
@@ -2196,7 +2169,7 @@ class _AnnotatedAlias(_NotIterable, _GenericAlias, _root=True):
"""Runtime representation of an annotated type.
At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't'
- with extra annotations. The alias behaves like a normal typing alias.
+ with extra metadata. The alias behaves like a normal typing alias.
Instantiating is the same as instantiating the underlying type; binding
it to types is also the same.
@@ -2380,7 +2353,8 @@ def greet(name: str) -> None:
WrapperDescriptorType, MethodWrapperType, MethodDescriptorType)
-def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
+def get_type_hints(obj, globalns=None, localns=None, include_extras=False,
+ *, format=annotationlib.Format.VALUE):
"""Return type hints for an object.
This is often the same as obj.__annotations__, but it handles
@@ -2417,13 +2391,14 @@ def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
if isinstance(obj, type):
hints = {}
for base in reversed(obj.__mro__):
+ ann = annotationlib.get_annotations(base, format=format)
+ if format is annotationlib.Format.SOURCE:
+ hints.update(ann)
+ continue
if globalns is None:
base_globals = getattr(sys.modules.get(base.__module__, None), '__dict__', {})
else:
base_globals = globalns
- ann = getattr(base, '__annotations__', {})
- if isinstance(ann, types.GetSetDescriptorType):
- ann = {}
base_locals = dict(vars(base)) if localns is None else localns
if localns is None and globalns is None:
# This is surprising, but required. Before Python 3.10,
@@ -2437,10 +2412,26 @@ def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
if value is None:
value = type(None)
if isinstance(value, str):
- value = ForwardRef(value, is_argument=False, is_class=True)
- value = _eval_type(value, base_globals, base_locals, base.__type_params__)
+ value = _make_forward_ref(value, is_argument=False, is_class=True)
+ value = _eval_type(value, base_globals, base_locals, base.__type_params__,
+ format=format, owner=obj)
hints[name] = value
- return hints if include_extras else {k: _strip_annotations(t) for k, t in hints.items()}
+ if include_extras or format is annotationlib.Format.SOURCE:
+ return hints
+ else:
+ return {k: _strip_annotations(t) for k, t in hints.items()}
+
+ hints = annotationlib.get_annotations(obj, format=format)
+ if (
+ not hints
+ and not isinstance(obj, types.ModuleType)
+ and not callable(obj)
+ and not hasattr(obj, '__annotations__')
+ and not hasattr(obj, '__annotate__')
+ ):
+ raise TypeError(f"{obj!r} is not a module, class, or callable.")
+ if format is annotationlib.Format.SOURCE:
+ return hints
if globalns is None:
if isinstance(obj, types.ModuleType):
@@ -2455,15 +2446,6 @@ def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
localns = globalns
elif localns is None:
localns = globalns
- hints = getattr(obj, '__annotations__', None)
- if hints is None:
- # Return empty annotations for something that _could_ have them.
- if isinstance(obj, _allowed_types):
- return {}
- else:
- raise TypeError('{!r} is not a module, class, method, '
- 'or function.'.format(obj))
- hints = dict(hints)
type_params = getattr(obj, "__type_params__", ())
for name, value in hints.items():
if value is None:
@@ -2471,12 +2453,12 @@ def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
if isinstance(value, str):
# class-level forward refs were handled above, this must be either
# a module-level annotation or a function argument annotation
- value = ForwardRef(
+ value = _make_forward_ref(
value,
is_argument=not isinstance(obj, types.ModuleType),
is_class=False,
)
- hints[name] = _eval_type(value, globalns, localns, type_params)
+ hints[name] = _eval_type(value, globalns, localns, type_params, format=format, owner=obj)
return hints if include_extras else {k: _strip_annotations(t) for k, t in hints.items()}
@@ -2953,22 +2935,34 @@ def __round__(self, ndigits: int = 0) -> T:
pass
-def _make_nmtuple(name, types, module, defaults = ()):
- fields = [n for n, t in types]
- types = {n: _type_check(t, f"field {n} annotation must be a type")
- for n, t in types}
+def _make_nmtuple(name, fields, annotate_func, module, defaults = ()):
nm_tpl = collections.namedtuple(name, fields,
defaults=defaults, module=module)
- nm_tpl.__annotations__ = nm_tpl.__new__.__annotations__ = types
+ nm_tpl.__annotate__ = nm_tpl.__new__.__annotate__ = annotate_func
return nm_tpl
+def _make_eager_annotate(types):
+ checked_types = {key: _type_check(val, f"field {key} annotation must be a type")
+ for key, val in types.items()}
+ def annotate(format):
+ if format in (annotationlib.Format.VALUE, annotationlib.Format.FORWARDREF):
+ return checked_types
+ else:
+ return _convert_to_source(types)
+ return annotate
+
+
+def _convert_to_source(types):
+ return {n: t if isinstance(t, str) else _type_repr(t) for n, t in types.items()}
+
+
# attributes prohibited to set in NamedTuple class syntax
_prohibited = frozenset({'__new__', '__init__', '__slots__', '__getnewargs__',
'_fields', '_field_defaults',
'_make', '_replace', '_asdict', '_source'})
-_special = frozenset({'__module__', '__name__', '__annotations__'})
+_special = frozenset({'__module__', '__name__', '__annotations__', '__annotate__'})
class NamedTupleMeta(type):
@@ -2981,12 +2975,29 @@ def __new__(cls, typename, bases, ns):
bases = tuple(tuple if base is _NamedTuple else base for base in bases)
if "__annotations__" in ns:
types = ns["__annotations__"]
+ field_names = list(types)
+ annotate = _make_eager_annotate(types)
elif "__annotate__" in ns:
- types = ns["__annotate__"](1) # VALUE
+ original_annotate = ns["__annotate__"]
+ types = annotationlib.call_annotate_function(original_annotate, annotationlib.Format.FORWARDREF)
+ field_names = list(types)
+
+ # For backward compatibility, type-check all the types at creation time
+ for typ in types.values():
+ _type_check(typ, "field annotation must be a type")
+
+ def annotate(format):
+ annos = annotationlib.call_annotate_function(original_annotate, format)
+ if format != annotationlib.Format.SOURCE:
+ return {key: _type_check(val, f"field {key} annotation must be a type")
+ for key, val in annos.items()}
+ return annos
else:
- types = {}
+ # Empty NamedTuple
+ field_names = []
+ annotate = lambda format: {}
default_names = []
- for field_name in types:
+ for field_name in field_names:
if field_name in ns:
default_names.append(field_name)
elif default_names:
@@ -2994,7 +3005,7 @@ def __new__(cls, typename, bases, ns):
f"cannot follow default field"
f"{'s' if len(default_names) > 1 else ''} "
f"{', '.join(default_names)}")
- nm_tpl = _make_nmtuple(typename, types.items(),
+ nm_tpl = _make_nmtuple(typename, field_names, annotate,
defaults=[ns[n] for n in default_names],
module=ns['__module__'])
nm_tpl.__bases__ = bases
@@ -3085,7 +3096,11 @@ class Employee(NamedTuple):
import warnings
warnings._deprecated(deprecated_thing, message=deprecation_msg, remove=(3, 15))
fields = kwargs.items()
- nt = _make_nmtuple(typename, fields, module=_caller())
+ types = {n: _type_check(t, f"field {n} annotation must be a type")
+ for n, t in fields}
+ field_names = [n for n, _ in fields]
+
+ nt = _make_nmtuple(typename, field_names, _make_eager_annotate(types), module=_caller())
nt.__orig_bases__ = (NamedTuple,)
return nt
@@ -3144,15 +3159,19 @@ def __new__(cls, name, bases, ns, total=True):
if not hasattr(tp_dict, '__orig_bases__'):
tp_dict.__orig_bases__ = bases
- annotations = {}
if "__annotations__" in ns:
+ own_annotate = None
own_annotations = ns["__annotations__"]
elif "__annotate__" in ns:
- own_annotations = ns["__annotate__"](1) # VALUE
+ own_annotate = ns["__annotate__"]
+ own_annotations = annotationlib.call_annotate_function(
+ own_annotate, annotationlib.Format.FORWARDREF, owner=tp_dict
+ )
else:
+ own_annotate = None
own_annotations = {}
msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
- own_annotations = {
+ own_checked_annotations = {
n: _type_check(tp, msg, module=tp_dict.__module__)
for n, tp in own_annotations.items()
}
@@ -3162,13 +3181,6 @@ def __new__(cls, name, bases, ns, total=True):
mutable_keys = set()
for base in bases:
- # TODO: Avoid eagerly evaluating annotations in VALUE format.
- # Instead, evaluate in FORWARDREF format to figure out which
- # keys have Required/NotRequired/ReadOnly qualifiers, and create
- # a new __annotate__ function for the resulting TypedDict that
- # combines the annotations from this class and its parents.
- annotations.update(base.__annotations__)
-
base_required = base.__dict__.get('__required_keys__', set())
required_keys |= base_required
optional_keys -= base_required
@@ -3180,8 +3192,7 @@ def __new__(cls, name, bases, ns, total=True):
readonly_keys.update(base.__dict__.get('__readonly_keys__', ()))
mutable_keys.update(base.__dict__.get('__mutable_keys__', ()))
- annotations.update(own_annotations)
- for annotation_key, annotation_type in own_annotations.items():
+ for annotation_key, annotation_type in own_checked_annotations.items():
qualifiers = set(_get_typeddict_qualifiers(annotation_type))
if Required in qualifiers:
is_required = True
@@ -3212,7 +3223,32 @@ def __new__(cls, name, bases, ns, total=True):
f"Required keys overlap with optional keys in {name}:"
f" {required_keys=}, {optional_keys=}"
)
- tp_dict.__annotations__ = annotations
+
+ def __annotate__(format):
+ annos = {}
+ for base in bases:
+ if base is Generic:
+ continue
+ base_annotate = base.__annotate__
+ if base_annotate is None:
+ continue
+ base_annos = annotationlib.call_annotate_function(base.__annotate__, format, owner=base)
+ annos.update(base_annos)
+ if own_annotate is not None:
+ own = annotationlib.call_annotate_function(own_annotate, format, owner=tp_dict)
+ if format != annotationlib.Format.SOURCE:
+ own = {
+ n: _type_check(tp, msg, module=tp_dict.__module__)
+ for n, tp in own.items()
+ }
+ elif format == annotationlib.Format.SOURCE:
+ own = _convert_to_source(own_annotations)
+ else:
+ own = own_checked_annotations
+ annos.update(own)
+ return annos
+
+ tp_dict.__annotate__ = __annotate__
tp_dict.__required_keys__ = frozenset(required_keys)
tp_dict.__optional_keys__ = frozenset(optional_keys)
tp_dict.__readonly_keys__ = frozenset(readonly_keys)
diff --git a/Misc/NEWS.d/next/Library/2024-06-11-07-17-25.gh-issue-119180.iH-2zy.rst b/Misc/NEWS.d/next/Library/2024-06-11-07-17-25.gh-issue-119180.iH-2zy.rst
new file mode 100644
index 00000000000000..f24d7bd6b9d26c
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-06-11-07-17-25.gh-issue-119180.iH-2zy.rst
@@ -0,0 +1,4 @@
+As part of implementing :pep:`649` and :pep:`749`, add a new module
+``annotationlib``. Add support for unresolved forward references in
+annotations to :mod:`dataclasses`, :class:`typing.TypedDict`, and
+:class:`typing.NamedTuple`.
diff --git a/Python/stdlib_module_names.h b/Python/stdlib_module_names.h
index 9686d10563aa4d..4d595d98445a05 100644
--- a/Python/stdlib_module_names.h
+++ b/Python/stdlib_module_names.h
@@ -99,6 +99,7 @@ static const char* _Py_stdlib_module_names[] = {
"_winapi",
"_zoneinfo",
"abc",
+"annotationlib",
"antigravity",
"argparse",
"array",
From 41a91bd67f86c922f350894a797738038536e1c5 Mon Sep 17 00:00:00 2001
From: Eric Snow
Date: Tue, 23 Jul 2024 15:19:17 -0600
Subject: [PATCH 005/139] gh-122199: Skip test_slot_wrappers When Checking For
Refleaks (gh-122200)
---
Lib/test/test_types.py | 6 +++++-
1 file changed, 5 insertions(+), 1 deletion(-)
diff --git a/Lib/test/test_types.py b/Lib/test/test_types.py
index fb88daf9742fa9..a87bb275d296a0 100644
--- a/Lib/test/test_types.py
+++ b/Lib/test/test_types.py
@@ -1,6 +1,9 @@
# Python test set -- part 6, built-in types
-from test.support import run_with_locale, is_apple_mobile, cpython_only, MISSING_C_DOCSTRINGS
+from test.support import (
+ run_with_locale, is_apple_mobile, cpython_only, no_rerun,
+ MISSING_C_DOCSTRINGS,
+)
import collections.abc
from collections import namedtuple, UserDict
import copy
@@ -2378,6 +2381,7 @@ def setUpClass(cls):
import test.support.interpreters.channels
@cpython_only
+ @no_rerun('channels (and queues) might have a refleak; see gh-122199')
@unittest.skipIf(is_apple_mobile, "Fails on iOS due to test ordering; see #121832.")
def test_slot_wrappers(self):
rch, sch = interpreters.channels.create()
From e91ef13861e88c27aed51a24e58d1dcc855a01dc Mon Sep 17 00:00:00 2001
From: Tian Gao
Date: Tue, 23 Jul 2024 15:25:26 -0700
Subject: [PATCH 006/139] gh-122029: Log call events in sys.setprofile when
it's a method with c function (GH-122072)
Log call events in sys.setprofile when it is a method with a C function.
---
Lib/test/test_sys_setprofile.py | 14 ++++++++++++++
.../2024-07-21-01-23-54.gh-issue-122029.gKv-e2.rst | 1 +
Python/legacy_tracing.c | 13 +++++++++++++
3 files changed, 28 insertions(+)
create mode 100644 Misc/NEWS.d/next/Core and Builtins/2024-07-21-01-23-54.gh-issue-122029.gKv-e2.rst
diff --git a/Lib/test/test_sys_setprofile.py b/Lib/test/test_sys_setprofile.py
index 32e03d7cd25dbe..b2e8e8a15b67ea 100644
--- a/Lib/test/test_sys_setprofile.py
+++ b/Lib/test/test_sys_setprofile.py
@@ -479,6 +479,20 @@ def f():
sys.setprofile(lambda *args: None)
f()
+ def test_method_with_c_function(self):
+ # gh-122029
+ # When we have a PyMethodObject whose im_func is a C function, we
+ # should record both the call and the return. f = classmethod(repr)
+ # is just a way to create a PyMethodObject with a C function.
+ class A:
+ f = classmethod(repr)
+ events = []
+ sys.setprofile(lambda frame, event, args: events.append(event))
+ A().f()
+ sys.setprofile(None)
+ # The last c_call is the call to sys.setprofile
+ self.assertEqual(events, ['c_call', 'c_return', 'c_call'])
+
if __name__ == "__main__":
unittest.main()
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-07-21-01-23-54.gh-issue-122029.gKv-e2.rst b/Misc/NEWS.d/next/Core and Builtins/2024-07-21-01-23-54.gh-issue-122029.gKv-e2.rst
new file mode 100644
index 00000000000000..bddee3a57fba80
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2024-07-21-01-23-54.gh-issue-122029.gKv-e2.rst
@@ -0,0 +1 @@
+Emit ``c_call`` events in :func:`sys.setprofile` when a ``PyMethodObject`` pointing to a ``PyCFunction`` is called.
diff --git a/Python/legacy_tracing.c b/Python/legacy_tracing.c
index 1103d999dfaea5..9cc3af1f5e162c 100644
--- a/Python/legacy_tracing.c
+++ b/Python/legacy_tracing.c
@@ -121,6 +121,19 @@ sys_profile_call_or_return(
Py_DECREF(meth);
return res;
}
+ else if (Py_TYPE(callable) == &PyMethod_Type) {
+ // CALL instruction will grab the function from the method,
+ // so if the function is a C function, the return event will
+ // be emitted. However, CALL event happens before CALL
+ // instruction, so we need to handle this case here.
+ PyObject* func = PyMethod_GET_FUNCTION(callable);
+ if (func == NULL) {
+ return NULL;
+ }
+ if (PyCFunction_Check(func)) {
+ return call_profile_func(self, func);
+ }
+ }
Py_RETURN_NONE;
}
From 9eb734111be90399fb6ae2f717d736abb8e518cb Mon Sep 17 00:00:00 2001
From: Cody Maloney
Date: Tue, 23 Jul 2024 23:14:35 -0700
Subject: [PATCH 007/139] GH-120754: Add more tests around seek + readall
(#122103)
In the process of speeding up readall, A number of related tests
(ex. large file tests in test_zipfile) found problems with the
change I was making. This adds I/O tests to specifically test these
cases to help ensure they don't regress and hopefully make debugging
easier.
This is part of the improvements from
https://github.com/python/cpython/pull/121593#issuecomment-2222261986
---
Lib/test/test_largefile.py | 19 +++++++++++++++++++
1 file changed, 19 insertions(+)
diff --git a/Lib/test/test_largefile.py b/Lib/test/test_largefile.py
index 849b6cb3e50a19..41f7b70e5cfe81 100644
--- a/Lib/test/test_largefile.py
+++ b/Lib/test/test_largefile.py
@@ -141,6 +141,9 @@ def test_truncate(self):
f.truncate(1)
self.assertEqual(f.tell(), 0) # else pointer moved
f.seek(0)
+ # Verify readall on a truncated file is well behaved. read()
+ # without a size can be unbounded, this should get just the byte
+ # that remains.
self.assertEqual(len(f.read()), 1) # else wasn't truncated
def test_seekable(self):
@@ -151,6 +154,22 @@ def test_seekable(self):
f.seek(pos)
self.assertTrue(f.seekable())
+ @bigmemtest(size=size, memuse=2, dry_run=False)
+ def test_seek_readall(self, _size):
+ # Seek which doesn't change position should readall successfully.
+ with self.open(TESTFN, 'rb') as f:
+ self.assertEqual(f.seek(0, os.SEEK_CUR), 0)
+ self.assertEqual(len(f.read()), size + 1)
+
+ # Seek which changes (or might change) position should readall
+ # successfully.
+ with self.open(TESTFN, 'rb') as f:
+ self.assertEqual(f.seek(20, os.SEEK_SET), 20)
+ self.assertEqual(len(f.read()), size - 19)
+
+ with self.open(TESTFN, 'rb') as f:
+ self.assertEqual(f.seek(-3, os.SEEK_END), size - 2)
+ self.assertEqual(len(f.read()), 3)
def skip_no_disk_space(path, required):
def decorator(fun):
From f067efa64306cfe582bf8d3ceb8c709dec6530f7 Mon Sep 17 00:00:00 2001
From: Xie Yanbo
Date: Wed, 24 Jul 2024 14:49:58 +0800
Subject: [PATCH 008/139] Fix typo in news document (GH-122209)
---
.../2024-06-13-12-17-52.gh-issue-120384.w1UBGl.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-06-13-12-17-52.gh-issue-120384.w1UBGl.rst b/Misc/NEWS.d/next/Core and Builtins/2024-06-13-12-17-52.gh-issue-120384.w1UBGl.rst
index 4a4db821ce29b8..65959ca2d28075 100644
--- a/Misc/NEWS.d/next/Core and Builtins/2024-06-13-12-17-52.gh-issue-120384.w1UBGl.rst
+++ b/Misc/NEWS.d/next/Core and Builtins/2024-06-13-12-17-52.gh-issue-120384.w1UBGl.rst
@@ -1,3 +1,3 @@
Fix an array out of bounds crash in ``list_ass_subscript``, which could be
-invoked via some specificly tailored input: including concurrent modification
+invoked via some specifically tailored input: including concurrent modification
of a list object, where one thread assigns a slice and another clears it.
From b3b7b7d46a4549f818aa8f99ff577678b2fdd967 Mon Sep 17 00:00:00 2001
From: Hugo van Kemenade <1324225+hugovk@users.noreply.github.com>
Date: Wed, 24 Jul 2024 11:29:35 +0300
Subject: [PATCH 009/139] gh-122085: Use include files for `whatsnew/3.12.rst`
deprecations (#122093)
---
Doc/deprecations/pending-removal-in-3.13.rst | 52 ++++++
Doc/deprecations/pending-removal-in-3.14.rst | 19 +++
.../pending-removal-in-future.rst | 2 +
Doc/whatsnew/3.12.rst | 150 +-----------------
4 files changed, 77 insertions(+), 146 deletions(-)
create mode 100644 Doc/deprecations/pending-removal-in-3.13.rst
diff --git a/Doc/deprecations/pending-removal-in-3.13.rst b/Doc/deprecations/pending-removal-in-3.13.rst
new file mode 100644
index 00000000000000..03c785bb4b69ac
--- /dev/null
+++ b/Doc/deprecations/pending-removal-in-3.13.rst
@@ -0,0 +1,52 @@
+Pending Removal in Python 3.13
+------------------------------
+
+Modules (see :pep:`594`):
+
+* :mod:`!aifc`
+* :mod:`!audioop`
+* :mod:`!cgi`
+* :mod:`!cgitb`
+* :mod:`!chunk`
+* :mod:`!crypt`
+* :mod:`!imghdr`
+* :mod:`!mailcap`
+* :mod:`!msilib`
+* :mod:`!nis`
+* :mod:`!nntplib`
+* :mod:`!ossaudiodev`
+* :mod:`!pipes`
+* :mod:`!sndhdr`
+* :mod:`!spwd`
+* :mod:`!sunau`
+* :mod:`!telnetlib`
+* :mod:`!uu`
+* :mod:`!xdrlib`
+
+Other modules:
+
+* :mod:`!lib2to3`, and the :program:`2to3` program (:gh:`84540`)
+
+APIs:
+
+* :class:`!configparser.LegacyInterpolation` (:gh:`90765`)
+* ``locale.resetlocale()`` (:gh:`90817`)
+* :meth:`!turtle.RawTurtle.settiltangle` (:gh:`50096`)
+* :func:`!unittest.findTestCases` (:gh:`50096`)
+* :func:`!unittest.getTestCaseNames` (:gh:`50096`)
+* :func:`!unittest.makeSuite` (:gh:`50096`)
+* :meth:`!unittest.TestProgram.usageExit` (:gh:`67048`)
+* :class:`!webbrowser.MacOSX` (:gh:`86421`)
+* :class:`classmethod` descriptor chaining (:gh:`89519`)
+* :mod:`importlib.resources` deprecated methods:
+
+ * ``contents()``
+ * ``is_resource()``
+ * ``open_binary()``
+ * ``open_text()``
+ * ``path()``
+ * ``read_binary()``
+ * ``read_text()``
+
+ Use :func:`importlib.resources.files()` instead. Refer to `importlib-resources: Migrating from Legacy
+ `_ (:gh:`106531`)
diff --git a/Doc/deprecations/pending-removal-in-3.14.rst b/Doc/deprecations/pending-removal-in-3.14.rst
index 48b0fb503cf397..6c831ae366ced5 100644
--- a/Doc/deprecations/pending-removal-in-3.14.rst
+++ b/Doc/deprecations/pending-removal-in-3.14.rst
@@ -19,6 +19,25 @@ Pending Removal in Python 3.14
Use :class:`ast.Constant` instead.
(Contributed by Serhiy Storchaka in :gh:`90953`.)
+* :mod:`asyncio`:
+
+ * The child watcher classes :class:`!asyncio.MultiLoopChildWatcher`,
+ :class:`!asyncio.FastChildWatcher`, :class:`!asyncio.AbstractChildWatcher`
+ and :class:`!asyncio.SafeChildWatcher` are deprecated and
+ will be removed in Python 3.14.
+ (Contributed by Kumar Aditya in :gh:`94597`.)
+
+ * :func:`!asyncio.set_child_watcher`, :func:`!asyncio.get_child_watcher`,
+ :meth:`!asyncio.AbstractEventLoopPolicy.set_child_watcher` and
+ :meth:`!asyncio.AbstractEventLoopPolicy.get_child_watcher` are deprecated
+ and will be removed in Python 3.14.
+ (Contributed by Kumar Aditya in :gh:`94597`.)
+
+ * The :meth:`~asyncio.get_event_loop` method of the
+ default event loop policy now emits a :exc:`DeprecationWarning` if there
+ is no current event loop set and it decides to create one.
+ (Contributed by Serhiy Storchaka and Guido van Rossum in :gh:`100160`.)
+
* :mod:`collections.abc`: Deprecated :class:`!collections.abc.ByteString`.
Prefer :class:`!Sequence` or :class:`~collections.abc.Buffer`.
For use in typing, prefer a union, like ``bytes | bytearray``,
diff --git a/Doc/deprecations/pending-removal-in-future.rst b/Doc/deprecations/pending-removal-in-future.rst
index f2b95e420e8972..db6a41fe8880f6 100644
--- a/Doc/deprecations/pending-removal-in-future.rst
+++ b/Doc/deprecations/pending-removal-in-future.rst
@@ -7,6 +7,8 @@ although there is currently no date scheduled for their removal.
* :mod:`argparse`: Nesting argument groups and nesting mutually exclusive
groups are deprecated.
+* :mod:`array`'s ``'u'`` format code (:gh:`57281`)
+
* :mod:`builtins`:
* ``~bool``, bitwise inversion on bool.
diff --git a/Doc/whatsnew/3.12.rst b/Doc/whatsnew/3.12.rst
index cfc87cb9089c66..b4cd4aa6e83b91 100644
--- a/Doc/whatsnew/3.12.rst
+++ b/Doc/whatsnew/3.12.rst
@@ -1330,155 +1330,13 @@ Deprecated
therefore it will be removed in 3.14.
(Contributed by Nikita Sobolev in :gh:`101866`.)
-Pending Removal in Python 3.13
-------------------------------
-
-The following modules and APIs have been deprecated in earlier Python releases,
-and will be removed in Python 3.13.
-
-Modules (see :pep:`594`):
-
-* :mod:`!aifc`
-* :mod:`!audioop`
-* :mod:`!cgi`
-* :mod:`!cgitb`
-* :mod:`!chunk`
-* :mod:`!crypt`
-* :mod:`!imghdr`
-* :mod:`!mailcap`
-* :mod:`!msilib`
-* :mod:`!nis`
-* :mod:`!nntplib`
-* :mod:`!ossaudiodev`
-* :mod:`!pipes`
-* :mod:`!sndhdr`
-* :mod:`!spwd`
-* :mod:`!sunau`
-* :mod:`!telnetlib`
-* :mod:`!uu`
-* :mod:`!xdrlib`
-
-Other modules:
-
-* :mod:`!lib2to3`, and the :program:`2to3` program (:gh:`84540`)
-
-APIs:
-
-* :class:`!configparser.LegacyInterpolation` (:gh:`90765`)
-* ``locale.resetlocale()`` (:gh:`90817`)
-* :meth:`!turtle.RawTurtle.settiltangle` (:gh:`50096`)
-* :func:`!unittest.findTestCases` (:gh:`50096`)
-* :func:`!unittest.getTestCaseNames` (:gh:`50096`)
-* :func:`!unittest.makeSuite` (:gh:`50096`)
-* :meth:`!unittest.TestProgram.usageExit` (:gh:`67048`)
-* :class:`!webbrowser.MacOSX` (:gh:`86421`)
-* :class:`classmethod` descriptor chaining (:gh:`89519`)
-* :mod:`importlib.resources` deprecated methods:
-
- * ``contents()``
- * ``is_resource()``
- * ``open_binary()``
- * ``open_text()``
- * ``path()``
- * ``read_binary()``
- * ``read_text()``
-
- Use :func:`importlib.resources.files()` instead. Refer to `importlib-resources: Migrating from Legacy
- `_ (:gh:`106531`)
-
-Pending Removal in Python 3.14
-------------------------------
-
-The following APIs have been deprecated
-and will be removed in Python 3.14.
-
-* :mod:`argparse`: The *type*, *choices*, and *metavar* parameters
- of :class:`!argparse.BooleanOptionalAction`
-
-* :mod:`ast`:
-
- * :class:`!ast.Num`
- * :class:`!ast.Str`
- * :class:`!ast.Bytes`
- * :class:`!ast.NameConstant`
- * :class:`!ast.Ellipsis`
-
-* :mod:`asyncio`:
-
- * :class:`!asyncio.MultiLoopChildWatcher`
- * :class:`!asyncio.FastChildWatcher`
- * :class:`!asyncio.AbstractChildWatcher`
- * :class:`!asyncio.SafeChildWatcher`
- * :func:`!asyncio.set_child_watcher`
- * :func:`!asyncio.get_child_watcher`,
- * :meth:`!asyncio.AbstractEventLoopPolicy.set_child_watcher`
- * :meth:`!asyncio.AbstractEventLoopPolicy.get_child_watcher`
-
-* :mod:`collections.abc`: :class:`!collections.abc.ByteString`.
-
-* :mod:`email`: the *isdst* parameter in :func:`email.utils.localtime`.
-
-* :mod:`importlib.abc`:
-
- * :class:`!importlib.abc.ResourceReader`
- * :class:`!importlib.abc.Traversable`
- * :class:`!importlib.abc.TraversableResources`
-
-* :mod:`itertools`: Support for copy, deepcopy, and pickle operations.
-
-* :mod:`pkgutil`:
-
- * :func:`!pkgutil.find_loader`
- * :func:`!pkgutil.get_loader`.
-
-* :mod:`pty`:
-
- * :func:`!pty.master_open`
- * :func:`!pty.slave_open`
-
-* :mod:`shutil`: The *onerror* argument of :func:`shutil.rmtree`
-
-* :mod:`typing`: :class:`!typing.ByteString`
-
-* The ``__package__`` and ``__cached__`` attributes on module objects.
-
-* The :attr:`~codeobject.co_lnotab` attribute of code objects.
-
-Pending Removal in Python 3.15
-------------------------------
-
-The following APIs have been deprecated
-and will be removed in Python 3.15.
-
-APIs:
-
-* :func:`locale.getdefaultlocale` (:gh:`90817`)
-
-
-Pending Removal in Future Versions
-----------------------------------
-
-The following APIs were deprecated in earlier Python versions and will be removed,
-although there is currently no date scheduled for their removal.
-
-* :mod:`array`'s ``'u'`` format code (:gh:`57281`)
-
-* :class:`typing.Text` (:gh:`92332`)
+.. include:: ../deprecations/pending-removal-in-3.13.rst
-* :mod:`xml.etree.ElementTree`: Testing the truth value of an
- :class:`xml.etree.ElementTree.Element` is deprecated. In a future release it
- will always return True. Prefer explicit ``len(elem)`` or
- ``elem is not None`` tests instead.
+.. include:: ../deprecations/pending-removal-in-3.14.rst
-* Currently Python accepts numeric literals immediately followed by keywords,
- for example ``0in x``, ``1or x``, ``0if 1else 2``. It allows confusing
- and ambiguous expressions like ``[0x1for x in y]`` (which can be
- interpreted as ``[0x1 for x in y]`` or ``[0x1f or x in y]``).
- A syntax warning is raised if the numeric literal is
- immediately followed by one of keywords :keyword:`and`, :keyword:`else`,
- :keyword:`for`, :keyword:`if`, :keyword:`in`, :keyword:`is` and :keyword:`or`.
- In a future release it will be changed to a syntax error. (:gh:`87999`)
+.. include:: ../deprecations/pending-removal-in-3.15.rst
+.. include:: ../deprecations/pending-removal-in-future.rst
Removed
=======
From e55b05f29ee62cd92b6b9990fd699b78f19432ba Mon Sep 17 00:00:00 2001
From: Mark Shannon
Date: Wed, 24 Jul 2024 10:22:51 +0100
Subject: [PATCH 010/139] GH-121832: Assert that the version number of static
builtin types is not changed by PyType_Modified. (GH-122182)
Update datetime module and test_type_cache.py to not call PyType_Modified.
---
Lib/test/test_type_cache.py | 23 +---------
Modules/_datetimemodule.c | 88 ++++++++++++++++++-------------------
Objects/typeobject.c | 2 +
3 files changed, 46 insertions(+), 67 deletions(-)
diff --git a/Lib/test/test_type_cache.py b/Lib/test/test_type_cache.py
index 89632a3abebfb5..66abe73f8d766d 100644
--- a/Lib/test/test_type_cache.py
+++ b/Lib/test/test_type_cache.py
@@ -161,8 +161,8 @@ def load_foo_2(type_):
self._check_specialization(load_foo_2, A, "LOAD_ATTR", should_specialize=False)
def test_class_load_attr_specialization_static_type(self):
- self._assign_valid_version_or_skip(str)
- self._assign_valid_version_or_skip(bytes)
+ self.assertNotEqual(type_get_version(str), 0)
+ self.assertNotEqual(type_get_version(bytes), 0)
def get_capitalize_1(type_):
return type_.capitalize
@@ -170,25 +170,6 @@ def get_capitalize_1(type_):
self._check_specialization(get_capitalize_1, str, "LOAD_ATTR", should_specialize=True)
self.assertEqual(get_capitalize_1(str)('hello'), 'Hello')
self.assertEqual(get_capitalize_1(bytes)(b'hello'), b'Hello')
- del get_capitalize_1
-
- # Permanently overflow the static type version counter, and force str and bytes
- # to have tp_version_tag == 0
- for _ in range(2**16):
- type_modified(str)
- type_assign_version(str)
- type_modified(bytes)
- type_assign_version(bytes)
-
- self.assertEqual(type_get_version(str), 0)
- self.assertEqual(type_get_version(bytes), 0)
-
- def get_capitalize_2(type_):
- return type_.capitalize
-
- self._check_specialization(get_capitalize_2, str, "LOAD_ATTR", should_specialize=False)
- self.assertEqual(get_capitalize_2(str)('hello'), 'Hello')
- self.assertEqual(get_capitalize_2(bytes)(b'hello'), b'Hello')
def test_property_load_attr_specialization_user_type(self):
class G:
diff --git a/Modules/_datetimemodule.c b/Modules/_datetimemodule.c
index f20efd3d36d275..67b49aa6ac2301 100644
--- a/Modules/_datetimemodule.c
+++ b/Modules/_datetimemodule.c
@@ -7256,49 +7256,51 @@ _datetime_exec(PyObject *module)
Py_DECREF(value); \
} while(0)
- /* timedelta values */
- PyObject *d = _PyType_GetDict(&PyDateTime_DeltaType);
- DATETIME_ADD_MACRO(d, "resolution", new_delta(0, 0, 1, 0));
- DATETIME_ADD_MACRO(d, "min", new_delta(-MAX_DELTA_DAYS, 0, 0, 0));
- DATETIME_ADD_MACRO(d, "max",
- new_delta(MAX_DELTA_DAYS, 24*3600-1, 1000000-1, 0));
-
- /* date values */
- d = _PyType_GetDict(&PyDateTime_DateType);
- DATETIME_ADD_MACRO(d, "min", new_date(1, 1, 1));
- DATETIME_ADD_MACRO(d, "max", new_date(MAXYEAR, 12, 31));
- DATETIME_ADD_MACRO(d, "resolution", new_delta(1, 0, 0, 0));
-
- /* time values */
- d = _PyType_GetDict(&PyDateTime_TimeType);
- DATETIME_ADD_MACRO(d, "min", new_time(0, 0, 0, 0, Py_None, 0));
- DATETIME_ADD_MACRO(d, "max", new_time(23, 59, 59, 999999, Py_None, 0));
- DATETIME_ADD_MACRO(d, "resolution", new_delta(0, 0, 1, 0));
-
- /* datetime values */
- d = _PyType_GetDict(&PyDateTime_DateTimeType);
- DATETIME_ADD_MACRO(d, "min",
- new_datetime(1, 1, 1, 0, 0, 0, 0, Py_None, 0));
- DATETIME_ADD_MACRO(d, "max", new_datetime(MAXYEAR, 12, 31, 23, 59, 59,
- 999999, Py_None, 0));
- DATETIME_ADD_MACRO(d, "resolution", new_delta(0, 0, 1, 0));
-
- /* timezone values */
- d = _PyType_GetDict(&PyDateTime_TimeZoneType);
- if (PyDict_SetItemString(d, "utc", (PyObject *)&utc_timezone) < 0) {
- goto error;
- }
+ if (!reloading) {
+ /* timedelta values */
+ PyObject *d = _PyType_GetDict(&PyDateTime_DeltaType);
+ DATETIME_ADD_MACRO(d, "resolution", new_delta(0, 0, 1, 0));
+ DATETIME_ADD_MACRO(d, "min", new_delta(-MAX_DELTA_DAYS, 0, 0, 0));
+ DATETIME_ADD_MACRO(d, "max",
+ new_delta(MAX_DELTA_DAYS, 24*3600-1, 1000000-1, 0));
+
+ /* date values */
+ d = _PyType_GetDict(&PyDateTime_DateType);
+ DATETIME_ADD_MACRO(d, "min", new_date(1, 1, 1));
+ DATETIME_ADD_MACRO(d, "max", new_date(MAXYEAR, 12, 31));
+ DATETIME_ADD_MACRO(d, "resolution", new_delta(1, 0, 0, 0));
+
+ /* time values */
+ d = _PyType_GetDict(&PyDateTime_TimeType);
+ DATETIME_ADD_MACRO(d, "min", new_time(0, 0, 0, 0, Py_None, 0));
+ DATETIME_ADD_MACRO(d, "max", new_time(23, 59, 59, 999999, Py_None, 0));
+ DATETIME_ADD_MACRO(d, "resolution", new_delta(0, 0, 1, 0));
+
+ /* datetime values */
+ d = _PyType_GetDict(&PyDateTime_DateTimeType);
+ DATETIME_ADD_MACRO(d, "min",
+ new_datetime(1, 1, 1, 0, 0, 0, 0, Py_None, 0));
+ DATETIME_ADD_MACRO(d, "max", new_datetime(MAXYEAR, 12, 31, 23, 59, 59,
+ 999999, Py_None, 0));
+ DATETIME_ADD_MACRO(d, "resolution", new_delta(0, 0, 1, 0));
+
+ /* timezone values */
+ d = _PyType_GetDict(&PyDateTime_TimeZoneType);
+ if (PyDict_SetItemString(d, "utc", (PyObject *)&utc_timezone) < 0) {
+ goto error;
+ }
- /* bpo-37642: These attributes are rounded to the nearest minute for backwards
- * compatibility, even though the constructor will accept a wider range of
- * values. This may change in the future.*/
+ /* bpo-37642: These attributes are rounded to the nearest minute for backwards
+ * compatibility, even though the constructor will accept a wider range of
+ * values. This may change in the future.*/
- /* -23:59 */
- DATETIME_ADD_MACRO(d, "min", create_timezone_from_delta(-1, 60, 0, 1));
+ /* -23:59 */
+ DATETIME_ADD_MACRO(d, "min", create_timezone_from_delta(-1, 60, 0, 1));
- /* +23:59 */
- DATETIME_ADD_MACRO(
- d, "max", create_timezone_from_delta(0, (23 * 60 + 59) * 60, 0, 0));
+ /* +23:59 */
+ DATETIME_ADD_MACRO(
+ d, "max", create_timezone_from_delta(0, (23 * 60 + 59) * 60, 0, 0));
+ }
#undef DATETIME_ADD_MACRO
@@ -7342,12 +7344,6 @@ _datetime_exec(PyObject *module)
static_assert(DI100Y == 25 * DI4Y - 1, "DI100Y");
assert(DI100Y == days_before_year(100+1));
- if (reloading) {
- for (size_t i = 0; i < Py_ARRAY_LENGTH(capi_types); i++) {
- PyType_Modified(capi_types[i]);
- }
- }
-
if (set_current_module(interp, module) < 0) {
goto error;
}
diff --git a/Objects/typeobject.c b/Objects/typeobject.c
index 7d01b680605a38..5b0a466f913495 100644
--- a/Objects/typeobject.c
+++ b/Objects/typeobject.c
@@ -1026,6 +1026,8 @@ type_modified_unlocked(PyTypeObject *type)
if (type->tp_version_tag == 0) {
return;
}
+ // Cannot modify static builtin types.
+ assert((type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) == 0);
PyObject *subclasses = lookup_tp_subclasses(type);
if (subclasses != NULL) {
From af4329e7b1a25d58bb92f79480f5059c3683517b Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Sviatoslav=20Sydorenko=20=28=D0=A1=D0=B2=D1=8F=D1=82=D0=BE?=
=?UTF-8?q?=D1=81=D0=BB=D0=B0=D0=B2=20=D0=A1=D0=B8=D0=B4=D0=BE=D1=80=D0=B5?=
=?UTF-8?q?=D0=BD=D0=BA=D0=BE=29?=
Date: Wed, 24 Jul 2024 11:46:39 +0200
Subject: [PATCH 011/139] Integrate `build_msi` into main CI workflow (#121778)
Co-authored-by: Hugo van Kemenade <1324225+hugovk@users.noreply.github.com>
Co-authored-by: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
---
.github/workflows/build.yml | 32 +++++++++++++++++
.github/workflows/build_msi.yml | 40 ----------------------
.github/workflows/reusable-windows-msi.yml | 24 +++++++++++++
3 files changed, 56 insertions(+), 40 deletions(-)
delete mode 100644 .github/workflows/build_msi.yml
create mode 100644 .github/workflows/reusable-windows-msi.yml
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index fc5b98f0220626..5c894abda71a87 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -48,6 +48,7 @@ jobs:
# }}
#
run-docs: ${{ steps.docs-changes.outputs.run-docs || false }}
+ run-win-msi: ${{ steps.win-msi-changes.outputs.run-win-msi || false }}
run_tests: ${{ steps.check.outputs.run_tests || false }}
run_hypothesis: ${{ steps.check.outputs.run_hypothesis || false }}
run_cifuzz: ${{ steps.check.outputs.run_cifuzz || false }}
@@ -123,6 +124,20 @@ jobs:
id: docs-changes
run: |
echo "run-docs=true" >> "${GITHUB_OUTPUT}"
+ - name: Get a list of the MSI installer-related files
+ id: changed-win-msi-files
+ uses: Ana06/get-changed-files@v2.3.0
+ with:
+ filter: |
+ Tools/msi/**
+ .github/workflows/reusable-windows-msi.yml
+ format: csv # works for paths with spaces
+ - name: Check for changes in MSI installer-related files
+ if: >-
+ steps.changed-win-msi-files.outputs.added_modified_renamed != ''
+ id: win-msi-changes
+ run: |
+ echo "run-win-msi=true" >> "${GITHUB_OUTPUT}"
check-docs:
name: Docs
@@ -218,6 +233,21 @@ jobs:
arch: ${{ matrix.arch }}
free-threading: ${{ matrix.free-threading }}
+ build_windows_msi:
+ name: >- # ${{ '' } is a hack to nest jobs under the same sidebar category
+ Windows MSI${{ '' }}
+ needs: check_source
+ if: fromJSON(needs.check_source.outputs.run-win-msi)
+ strategy:
+ matrix:
+ arch:
+ - x86
+ - x64
+ - arm64
+ uses: ./.github/workflows/reusable-windows-msi.yml
+ with:
+ arch: ${{ matrix.arch }}
+
build_macos:
name: 'macOS'
needs: check_source
@@ -571,6 +601,7 @@ jobs:
- build_ubuntu_ssltests
- build_wasi
- build_windows
+ - build_windows_msi
- test_hypothesis
- build_asan
- build_tsan
@@ -585,6 +616,7 @@ jobs:
with:
allowed-failures: >-
build_ubuntu_ssltests,
+ build_windows_msi,
cifuzz,
test_hypothesis,
allowed-skips: >-
diff --git a/.github/workflows/build_msi.yml b/.github/workflows/build_msi.yml
deleted file mode 100644
index 65d32c734e7745..00000000000000
--- a/.github/workflows/build_msi.yml
+++ /dev/null
@@ -1,40 +0,0 @@
-name: TestsMSI
-
-on:
- workflow_dispatch:
- push:
- branches:
- - 'main'
- - '3.*'
- paths:
- - 'Tools/msi/**'
- - '.github/workflows/build_msi.yml'
- pull_request:
- branches:
- - 'main'
- - '3.*'
- paths:
- - 'Tools/msi/**'
- - '.github/workflows/build_msi.yml'
-
-permissions:
- contents: read
-
-concurrency:
- group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
- cancel-in-progress: true
-
-jobs:
- build:
- name: Windows Installer
- runs-on: windows-latest
- timeout-minutes: 60
- strategy:
- matrix:
- type: [x86, x64, arm64]
- env:
- IncludeFreethreaded: true
- steps:
- - uses: actions/checkout@v4
- - name: Build CPython installer
- run: .\Tools\msi\build.bat --doc -${{ matrix.type }}
diff --git a/.github/workflows/reusable-windows-msi.yml b/.github/workflows/reusable-windows-msi.yml
new file mode 100644
index 00000000000000..fc34ab7c3eb1f2
--- /dev/null
+++ b/.github/workflows/reusable-windows-msi.yml
@@ -0,0 +1,24 @@
+name: TestsMSI
+
+on:
+ workflow_call:
+ inputs:
+ arch:
+ description: CPU architecture
+ required: true
+ type: string
+
+permissions:
+ contents: read
+
+jobs:
+ build:
+ name: installer for ${{ inputs.arch }}
+ runs-on: windows-latest
+ timeout-minutes: 60
+ env:
+ IncludeFreethreaded: true
+ steps:
+ - uses: actions/checkout@v4
+ - name: Build CPython installer
+ run: .\Tools\msi\build.bat --doc -${{ inputs.arch }}
From e9681211b9ad11d1c1f471c43bc57cac46814779 Mon Sep 17 00:00:00 2001
From: sobolevn
Date: Wed, 24 Jul 2024 15:47:52 +0300
Subject: [PATCH 012/139] gh-122229: Add missing `Py_DECREF` in
`func_get_annotation_dict` (#122230)
---
Objects/funcobject.c | 1 +
1 file changed, 1 insertion(+)
diff --git a/Objects/funcobject.c b/Objects/funcobject.c
index 40211297be20c0..8df0da800980a9 100644
--- a/Objects/funcobject.c
+++ b/Objects/funcobject.c
@@ -547,6 +547,7 @@ func_get_annotation_dict(PyFunctionObject *op)
PyTuple_GET_ITEM(ann_tuple, i + 1));
if (err < 0) {
+ Py_DECREF(ann_dict);
return NULL;
}
}
From 794546fd53dffa1903a2d0fbe8d745889978f5dc Mon Sep 17 00:00:00 2001
From: Brandt Bucher
Date: Wed, 24 Jul 2024 09:16:30 -0700
Subject: [PATCH 013/139] GH-118093: Remove invalidated executors from side
exits (GH-121885)
---
Include/internal/pycore_uop_metadata.h | 2 +-
Python/bytecodes.c | 6 +++-
Python/executor_cases.c.h | 9 +++---
Python/optimizer.c | 38 +++++++++++---------------
4 files changed, 27 insertions(+), 28 deletions(-)
diff --git a/Include/internal/pycore_uop_metadata.h b/Include/internal/pycore_uop_metadata.h
index ea48f9d20607bd..4c18f66d7420af 100644
--- a/Include/internal/pycore_uop_metadata.h
+++ b/Include/internal/pycore_uop_metadata.h
@@ -260,7 +260,7 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = {
[_CHECK_FUNCTION] = HAS_DEOPT_FLAG,
[_INTERNAL_INCREMENT_OPT_COUNTER] = 0,
[_DYNAMIC_EXIT] = HAS_ARG_FLAG | HAS_ESCAPES_FLAG,
- [_START_EXECUTOR] = HAS_DEOPT_FLAG,
+ [_START_EXECUTOR] = 0,
[_FATAL_ERROR] = 0,
[_CHECK_VALIDITY_AND_SET_IP] = HAS_DEOPT_FLAG,
[_DEOPT] = 0,
diff --git a/Python/bytecodes.c b/Python/bytecodes.c
index 480045069c2942..9dd7cf37beecf0 100644
--- a/Python/bytecodes.c
+++ b/Python/bytecodes.c
@@ -4624,6 +4624,10 @@ dummy_func(
_PyOpcode_OpName[target->op.code]);
}
#endif
+ if (exit->executor && !exit->executor->vm_data.valid) {
+ exit->temperature = initial_temperature_backoff_counter();
+ Py_CLEAR(exit->executor);
+ }
if (exit->executor == NULL) {
_Py_BackoffCounter temperature = exit->temperature;
if (!backoff_counter_triggers(temperature)) {
@@ -4743,7 +4747,7 @@ dummy_func(
#ifndef _Py_JIT
current_executor = (_PyExecutorObject*)executor;
#endif
- DEOPT_IF(!((_PyExecutorObject *)executor)->vm_data.valid);
+ assert(((_PyExecutorObject *)executor)->vm_data.valid);
}
tier2 op(_FATAL_ERROR, (--)) {
diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h
index e9f73f032bf2a4..2a4428e4a52cf0 100644
--- a/Python/executor_cases.c.h
+++ b/Python/executor_cases.c.h
@@ -4986,6 +4986,10 @@
_PyOpcode_OpName[target->op.code]);
}
#endif
+ if (exit->executor && !exit->executor->vm_data.valid) {
+ exit->temperature = initial_temperature_backoff_counter();
+ Py_CLEAR(exit->executor);
+ }
if (exit->executor == NULL) {
_Py_BackoffCounter temperature = exit->temperature;
if (!backoff_counter_triggers(temperature)) {
@@ -5156,10 +5160,7 @@
#ifndef _Py_JIT
current_executor = (_PyExecutorObject*)executor;
#endif
- if (!((_PyExecutorObject *)executor)->vm_data.valid) {
- UOP_STAT_INC(uopcode, miss);
- JUMP_TO_JUMP_TARGET();
- }
+ assert(((_PyExecutorObject *)executor)->vm_data.valid);
break;
}
diff --git a/Python/optimizer.c b/Python/optimizer.c
index a43eed45f097e7..73316b3587f221 100644
--- a/Python/optimizer.c
+++ b/Python/optimizer.c
@@ -1587,42 +1587,36 @@ _Py_Executors_InvalidateDependency(PyInterpreterState *interp, void *obj, int is
_Py_BloomFilter_Add(&obj_filter, obj);
/* Walk the list of executors */
/* TO DO -- Use a tree to avoid traversing as many objects */
- bool no_memory = false;
PyObject *invalidate = PyList_New(0);
if (invalidate == NULL) {
- PyErr_Clear();
- no_memory = true;
+ goto error;
}
/* Clearing an executor can deallocate others, so we need to make a list of
* executors to invalidate first */
for (_PyExecutorObject *exec = interp->executor_list_head; exec != NULL;) {
assert(exec->vm_data.valid);
_PyExecutorObject *next = exec->vm_data.links.next;
- if (bloom_filter_may_contain(&exec->vm_data.bloom, &obj_filter)) {
- unlink_executor(exec);
- if (no_memory) {
- exec->vm_data.valid = 0;
- } else {
- if (PyList_Append(invalidate, (PyObject *)exec) < 0) {
- PyErr_Clear();
- no_memory = true;
- exec->vm_data.valid = 0;
- }
- }
- if (is_invalidation) {
- OPT_STAT_INC(executors_invalidated);
- }
+ if (bloom_filter_may_contain(&exec->vm_data.bloom, &obj_filter) &&
+ PyList_Append(invalidate, (PyObject *)exec))
+ {
+ goto error;
}
exec = next;
}
- if (invalidate != NULL) {
- for (Py_ssize_t i = 0; i < PyList_GET_SIZE(invalidate); i++) {
- _PyExecutorObject *exec = (_PyExecutorObject *)PyList_GET_ITEM(invalidate, i);
- executor_clear(exec);
+ for (Py_ssize_t i = 0; i < PyList_GET_SIZE(invalidate); i++) {
+ _PyExecutorObject *exec = (_PyExecutorObject *)PyList_GET_ITEM(invalidate, i);
+ executor_clear(exec);
+ if (is_invalidation) {
+ OPT_STAT_INC(executors_invalidated);
}
- Py_DECREF(invalidate);
}
+ Py_DECREF(invalidate);
return;
+error:
+ PyErr_Clear();
+ Py_XDECREF(invalidate);
+ // If we're truly out of memory, wiping out everything is a fine fallback:
+ _Py_Executors_InvalidateAll(interp, is_invalidation);
}
/* Invalidate all executors */
From 9ac606080a0074cdf7589d9b7c9413a73e0ddf37 Mon Sep 17 00:00:00 2001
From: Irit Katriel <1055913+iritkatriel@users.noreply.github.com>
Date: Wed, 24 Jul 2024 17:22:18 +0100
Subject: [PATCH 014/139] gh-121404: extract compiler_lookup_arg out of
compiler_make_closure (#122181)
---
Python/compile.c | 90 ++++++++++++++++++++++++++----------------------
1 file changed, 49 insertions(+), 41 deletions(-)
diff --git a/Python/compile.c b/Python/compile.c
index c55e64fa863d03..87a75487a9aaa5 100644
--- a/Python/compile.c
+++ b/Python/compile.c
@@ -1611,29 +1611,30 @@ compiler_mod(struct compiler *c, mod_ty mod)
static int
compiler_get_ref_type(struct compiler *c, PyObject *name)
{
- int scope;
if (c->u->u_scope_type == COMPILER_SCOPE_CLASS &&
(_PyUnicode_EqualToASCIIString(name, "__class__") ||
_PyUnicode_EqualToASCIIString(name, "__classdict__"))) {
return CELL;
}
PySTEntryObject *ste = SYMTABLE_ENTRY(c);
- scope = _PyST_GetScope(ste, name);
+ int scope = _PyST_GetScope(ste, name);
if (scope == 0) {
PyErr_Format(PyExc_SystemError,
"_PyST_GetScope(name=%R) failed: "
"unknown scope in unit %S (%R); "
- "symbols: %R; locals: %R; globals: %R",
+ "symbols: %R; locals: %R; "
+ "globals: %R",
name,
c->u->u_metadata.u_name, ste->ste_id,
- ste->ste_symbols, c->u->u_metadata.u_varnames, c->u->u_metadata.u_names);
+ ste->ste_symbols, c->u->u_metadata.u_varnames,
+ c->u->u_metadata.u_names);
return ERROR;
}
return scope;
}
static int
-compiler_lookup_arg(PyObject *dict, PyObject *name)
+dict_lookup_arg(PyObject *dict, PyObject *name)
{
PyObject *v = PyDict_GetItemWithError(dict, name);
if (v == NULL) {
@@ -1642,6 +1643,45 @@ compiler_lookup_arg(PyObject *dict, PyObject *name)
return PyLong_AS_LONG(v);
}
+static int
+compiler_lookup_arg(struct compiler *c, PyCodeObject *co, PyObject *name)
+{
+ /* Special case: If a class contains a method with a
+ * free variable that has the same name as a method,
+ * the name will be considered free *and* local in the
+ * class. It should be handled by the closure, as
+ * well as by the normal name lookup logic.
+ */
+ int reftype = compiler_get_ref_type(c, name);
+ if (reftype == -1) {
+ return ERROR;
+ }
+ int arg;
+ if (reftype == CELL) {
+ arg = dict_lookup_arg(c->u->u_metadata.u_cellvars, name);
+ }
+ else {
+ arg = dict_lookup_arg(c->u->u_metadata.u_freevars, name);
+ }
+ if (arg == -1) {
+ PyObject *freevars = _PyCode_GetFreevars(co);
+ if (freevars == NULL) {
+ PyErr_Clear();
+ }
+ PyErr_Format(PyExc_SystemError,
+ "compiler_lookup_arg(name=%R) with reftype=%d failed in %S; "
+ "freevars of code %S: %R",
+ name,
+ reftype,
+ c->u->u_metadata.u_name,
+ co->co_name,
+ freevars);
+ Py_DECREF(freevars);
+ return ERROR;
+ }
+ return arg;
+}
+
static int
compiler_make_closure(struct compiler *c, location loc,
PyCodeObject *co, Py_ssize_t flags)
@@ -1653,40 +1693,8 @@ compiler_make_closure(struct compiler *c, location loc,
LOAD_DEREF but LOAD_CLOSURE is needed.
*/
PyObject *name = PyTuple_GET_ITEM(co->co_localsplusnames, i);
-
- /* Special case: If a class contains a method with a
- free variable that has the same name as a method,
- the name will be considered free *and* local in the
- class. It should be handled by the closure, as
- well as by the normal name lookup logic.
- */
- int reftype = compiler_get_ref_type(c, name);
- if (reftype == -1) {
- return ERROR;
- }
- int arg;
- if (reftype == CELL) {
- arg = compiler_lookup_arg(c->u->u_metadata.u_cellvars, name);
- }
- else {
- arg = compiler_lookup_arg(c->u->u_metadata.u_freevars, name);
- }
- if (arg == -1) {
- PyObject *freevars = _PyCode_GetFreevars(co);
- if (freevars == NULL) {
- PyErr_Clear();
- }
- PyErr_Format(PyExc_SystemError,
- "compiler_lookup_arg(name=%R) with reftype=%d failed in %S; "
- "freevars of code %S: %R",
- name,
- reftype,
- c->u->u_metadata.u_name,
- co->co_name,
- freevars);
- Py_DECREF(freevars);
- return ERROR;
- }
+ int arg = compiler_lookup_arg(c, co, name);
+ RETURN_IF_ERROR(arg);
ADDOP_I(c, loc, LOAD_CLOSURE, arg);
}
flags |= MAKE_FUNCTION_CLOSURE;
@@ -2460,7 +2468,7 @@ compiler_class_body(struct compiler *c, stmt_ty s, int firstlineno)
/* Set __classdictcell__ if necessary */
if (SYMTABLE_ENTRY(c)->ste_needs_classdict) {
/* Store __classdictcell__ into class namespace */
- int i = compiler_lookup_arg(c->u->u_metadata.u_cellvars, &_Py_ID(__classdict__));
+ int i = dict_lookup_arg(c->u->u_metadata.u_cellvars, &_Py_ID(__classdict__));
if (i < 0) {
compiler_exit_scope(c);
return ERROR;
@@ -2474,7 +2482,7 @@ compiler_class_body(struct compiler *c, stmt_ty s, int firstlineno)
/* Return __classcell__ if it is referenced, otherwise return None */
if (SYMTABLE_ENTRY(c)->ste_needs_class_closure) {
/* Store __classcell__ into class namespace & return it */
- int i = compiler_lookup_arg(c->u->u_metadata.u_cellvars, &_Py_ID(__class__));
+ int i = dict_lookup_arg(c->u->u_metadata.u_cellvars, &_Py_ID(__class__));
if (i < 0) {
compiler_exit_scope(c);
return ERROR;
From 5592399313c963c110280a7c98de974889e1d353 Mon Sep 17 00:00:00 2001
From: Dino Viehland
Date: Wed, 24 Jul 2024 10:58:28 -0700
Subject: [PATCH 015/139] gh-122208: Don't delivery PyDict_EVENT_ADDED until it
can't fail (#122207)
Don't delivery PyDict_EVENT_ADDED until it can't fail
---
...-07-23-23-59-04.gh-issue-122208.z8KHsY.rst | 1 +
Objects/dictobject.c | 19 ++++++++-----------
2 files changed, 9 insertions(+), 11 deletions(-)
create mode 100644 Misc/NEWS.d/next/Core and Builtins/2024-07-23-23-59-04.gh-issue-122208.z8KHsY.rst
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-07-23-23-59-04.gh-issue-122208.z8KHsY.rst b/Misc/NEWS.d/next/Core and Builtins/2024-07-23-23-59-04.gh-issue-122208.z8KHsY.rst
new file mode 100644
index 00000000000000..e4a89d137ede0e
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2024-07-23-23-59-04.gh-issue-122208.z8KHsY.rst
@@ -0,0 +1 @@
+Dictionary watchers now only deliver the PyDict_EVENT_ADDED event when the insertion is in a known good state to succeed.
diff --git a/Objects/dictobject.c b/Objects/dictobject.c
index ee88576cc77dec..6a16a04102a6c0 100644
--- a/Objects/dictobject.c
+++ b/Objects/dictobject.c
@@ -1627,6 +1627,10 @@ insert_combined_dict(PyInterpreterState *interp, PyDictObject *mp,
}
}
+ uint64_t new_version = _PyDict_NotifyEvent(
+ interp, PyDict_EVENT_ADDED, mp, key, value);
+ mp->ma_keys->dk_version = 0;
+
Py_ssize_t hashpos = find_empty_slot(mp->ma_keys, hash);
dictkeys_set_index(mp->ma_keys, hashpos, mp->ma_keys->dk_nentries);
@@ -1643,6 +1647,7 @@ insert_combined_dict(PyInterpreterState *interp, PyDictObject *mp,
STORE_VALUE(ep, value);
STORE_HASH(ep, hash);
}
+ mp->ma_version_tag = new_version;
STORE_KEYS_USABLE(mp->ma_keys, mp->ma_keys->dk_usable - 1);
STORE_KEYS_NENTRIES(mp->ma_keys, mp->ma_keys->dk_nentries + 1);
assert(mp->ma_keys->dk_usable >= 0);
@@ -1746,15 +1751,11 @@ insertdict(PyInterpreterState *interp, PyDictObject *mp,
if (ix == DKIX_EMPTY) {
assert(!_PyDict_HasSplitTable(mp));
- uint64_t new_version = _PyDict_NotifyEvent(
- interp, PyDict_EVENT_ADDED, mp, key, value);
/* Insert into new slot. */
- mp->ma_keys->dk_version = 0;
assert(old_value == NULL);
if (insert_combined_dict(interp, mp, hash, key, value) < 0) {
goto Fail;
}
- mp->ma_version_tag = new_version;
STORE_USED(mp, mp->ma_used + 1);
ASSERT_CONSISTENT(mp);
return 0;
@@ -1795,9 +1796,6 @@ insert_to_emptydict(PyInterpreterState *interp, PyDictObject *mp,
assert(mp->ma_keys == Py_EMPTY_KEYS);
ASSERT_DICT_LOCKED(mp);
- uint64_t new_version = _PyDict_NotifyEvent(
- interp, PyDict_EVENT_ADDED, mp, key, value);
-
int unicode = PyUnicode_CheckExact(key);
PyDictKeysObject *newkeys = new_keys_object(
interp, PyDict_LOG_MINSIZE, unicode);
@@ -1806,6 +1804,9 @@ insert_to_emptydict(PyInterpreterState *interp, PyDictObject *mp,
Py_DECREF(value);
return -1;
}
+ uint64_t new_version = _PyDict_NotifyEvent(
+ interp, PyDict_EVENT_ADDED, mp, key, value);
+
/* We don't decref Py_EMPTY_KEYS here because it is immortal. */
assert(mp->ma_values == NULL);
@@ -4199,9 +4200,6 @@ dict_setdefault_ref_lock_held(PyObject *d, PyObject *key, PyObject *default_valu
if (ix == DKIX_EMPTY) {
assert(!_PyDict_HasSplitTable(mp));
- uint64_t new_version = _PyDict_NotifyEvent(
- interp, PyDict_EVENT_ADDED, mp, key, default_value);
- mp->ma_keys->dk_version = 0;
value = default_value;
if (insert_combined_dict(interp, mp, hash, Py_NewRef(key), Py_NewRef(value)) < 0) {
@@ -4214,7 +4212,6 @@ dict_setdefault_ref_lock_held(PyObject *d, PyObject *key, PyObject *default_valu
MAINTAIN_TRACKING(mp, key, value);
STORE_USED(mp, mp->ma_used + 1);
- mp->ma_version_tag = new_version;
assert(mp->ma_keys->dk_usable >= 0);
ASSERT_CONSISTENT(mp);
if (result) {
From 9b4fe9b718f27352ba0c1cf1184f5b90d77d7df4 Mon Sep 17 00:00:00 2001
From: Serhiy Storchaka
Date: Thu, 25 Jul 2024 09:57:22 +0300
Subject: [PATCH 016/139] gh-122191: Fix test_warnings failure if run with
-Werror (GH-122222)
__spec__.loader is now required in the module globals (see gh-86298).
---
Lib/test/test_warnings/__init__.py | 36 +++++++++++++++++++-----------
1 file changed, 23 insertions(+), 13 deletions(-)
diff --git a/Lib/test/test_warnings/__init__.py b/Lib/test/test_warnings/__init__.py
index 7d04371c94abda..95515a4b694315 100644
--- a/Lib/test/test_warnings/__init__.py
+++ b/Lib/test/test_warnings/__init__.py
@@ -1,6 +1,7 @@
from contextlib import contextmanager
import linecache
import os
+import importlib
import inspect
from io import StringIO
import re
@@ -887,37 +888,46 @@ def test_issue31285(self):
# warn_explicit() should neither raise a SystemError nor cause an
# assertion failure, in case the return value of get_source() has a
# bad splitlines() method.
- def get_bad_loader(splitlines_ret_val):
+ get_source_called = []
+ def get_module_globals(*, splitlines_ret_val):
+ class BadSource(str):
+ def splitlines(self):
+ return splitlines_ret_val
+
class BadLoader:
def get_source(self, fullname):
- class BadSource(str):
- def splitlines(self):
- return splitlines_ret_val
+ get_source_called.append(splitlines_ret_val)
return BadSource('spam')
- return BadLoader()
+
+ loader = BadLoader()
+ spec = importlib.machinery.ModuleSpec('foobar', loader)
+ return {'__loader__': loader,
+ '__spec__': spec,
+ '__name__': 'foobar'}
+
wmod = self.module
with original_warnings.catch_warnings(module=wmod):
wmod.filterwarnings('default', category=UserWarning)
+ linecache.clearcache()
with support.captured_stderr() as stderr:
wmod.warn_explicit(
'foo', UserWarning, 'bar', 1,
- module_globals={'__loader__': get_bad_loader(42),
- '__name__': 'foobar'})
+ module_globals=get_module_globals(splitlines_ret_val=42))
self.assertIn('UserWarning: foo', stderr.getvalue())
+ self.assertEqual(get_source_called, [42])
- show = wmod._showwarnmsg
- try:
+ linecache.clearcache()
+ with support.swap_attr(wmod, '_showwarnmsg', None):
del wmod._showwarnmsg
with support.captured_stderr() as stderr:
wmod.warn_explicit(
'eggs', UserWarning, 'bar', 1,
- module_globals={'__loader__': get_bad_loader([42]),
- '__name__': 'foobar'})
+ module_globals=get_module_globals(splitlines_ret_val=[42]))
self.assertIn('UserWarning: eggs', stderr.getvalue())
- finally:
- wmod._showwarnmsg = show
+ self.assertEqual(get_source_called, [42, [42]])
+ linecache.clearcache()
@support.cpython_only
def test_issue31411(self):
From a3327dbfd4db9e5ad1ca514963d503abbbbfede7 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Miko=C5=82aj=20Kuranowski?=
Date: Thu, 25 Jul 2024 09:04:47 +0200
Subject: [PATCH 017/139] gh-113785: csv: fields starting with escapechar are
not quoted (GH-122110)
---
Lib/test/test_csv.py | 4 ++++
.../Library/2024-07-22-08-14-04.gh-issue-113785.6B_KNB.rst | 1 +
Modules/_csv.c | 1 -
3 files changed, 5 insertions(+), 1 deletion(-)
create mode 100644 Misc/NEWS.d/next/Library/2024-07-22-08-14-04.gh-issue-113785.6B_KNB.rst
diff --git a/Lib/test/test_csv.py b/Lib/test/test_csv.py
index d74ab7e016f78c..c718ee1203cbe0 100644
--- a/Lib/test/test_csv.py
+++ b/Lib/test/test_csv.py
@@ -454,6 +454,10 @@ def test_read_quoting(self):
quoting=csv.QUOTE_STRINGS)
self._read_test(['1,@,3,@,5'], [['1', ',3,', '5']], quotechar='@')
self._read_test(['1,\0,3,\0,5'], [['1', ',3,', '5']], quotechar='\0')
+ self._read_test(['1\\.5,\\.5,.5'], [[1.5, 0.5, 0.5]],
+ quoting=csv.QUOTE_NONNUMERIC, escapechar='\\')
+ self._read_test(['1\\.5,\\.5,"\\.5"'], [[1.5, 0.5, ".5"]],
+ quoting=csv.QUOTE_STRINGS, escapechar='\\')
def test_read_skipinitialspace(self):
self._read_test(['no space, space, spaces,\ttab'],
diff --git a/Misc/NEWS.d/next/Library/2024-07-22-08-14-04.gh-issue-113785.6B_KNB.rst b/Misc/NEWS.d/next/Library/2024-07-22-08-14-04.gh-issue-113785.6B_KNB.rst
new file mode 100644
index 00000000000000..89d44a3f79c390
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-07-22-08-14-04.gh-issue-113785.6B_KNB.rst
@@ -0,0 +1 @@
+:mod:`csv` now correctly parses numeric fields (when used with :const:`csv.QUOTE_NONNUMERIC` or :const:`csv.QUOTE_STRINGS`) which start with an escape character.
diff --git a/Modules/_csv.c b/Modules/_csv.c
index 9964c383b8ad09..737b2c7468e13c 100644
--- a/Modules/_csv.c
+++ b/Modules/_csv.c
@@ -749,7 +749,6 @@ parse_process_char(ReaderObj *self, _csvstate *module_state, Py_UCS4 c)
}
else if (c == dialect->escapechar) {
/* possible escaped character */
- self->unquoted_field = false;
self->state = ESCAPED_CHAR;
}
else if (c == ' ' && dialect->skipinitialspace)
From bb108580dec5d8655ccdfb6c8737b5f64e3366d0 Mon Sep 17 00:00:00 2001
From: Serhiy Storchaka
Date: Thu, 25 Jul 2024 10:12:26 +0300
Subject: [PATCH 018/139] gh-122087: Add tests for ismethoddescriptor() and
isroutine() with partial objects (GH-122219)
---
Lib/test/test_inspect/test_inspect.py | 3 +++
1 file changed, 3 insertions(+)
diff --git a/Lib/test/test_inspect/test_inspect.py b/Lib/test/test_inspect/test_inspect.py
index 5521528a524762..730c192d1aa260 100644
--- a/Lib/test/test_inspect/test_inspect.py
+++ b/Lib/test/test_inspect/test_inspect.py
@@ -402,6 +402,8 @@ def test_isroutine(self):
self.assertFalse(inspect.isroutine(type))
self.assertFalse(inspect.isroutine(int))
self.assertFalse(inspect.isroutine(type('some_class', (), {})))
+ # partial
+ self.assertTrue(inspect.isroutine(functools.partial(mod.spam)))
def test_isclass(self):
self.istest(inspect.isclass, 'mod.StupidGit')
@@ -1693,6 +1695,7 @@ def function():
self.assertFalse(inspect.ismethoddescriptor(Owner.static_method))
self.assertFalse(inspect.ismethoddescriptor(function))
self.assertFalse(inspect.ismethoddescriptor(a_lambda))
+ self.assertTrue(inspect.ismethoddescriptor(functools.partial(function)))
def test_descriptor_being_a_class(self):
class MethodDescriptorMeta(type):
From 2f74b709b637cad7a9c18a2d90b0747823f2ff51 Mon Sep 17 00:00:00 2001
From: Sam Gross
Date: Thu, 25 Jul 2024 04:16:53 -0400
Subject: [PATCH 019/139] gh-122187: Avoid TSan reported race in
`run_udp_echo_server` (#122189)
TSan doesn't fully recognize the synchronization via I/O, so ensure that
socket name is retrieved earlier and use a different socket for sending
the "STOP" message.
---
Lib/test/test_asyncio/utils.py | 9 +++++++--
1 file changed, 7 insertions(+), 2 deletions(-)
diff --git a/Lib/test/test_asyncio/utils.py b/Lib/test/test_asyncio/utils.py
index dbb8d27c176950..35893ab3118e1e 100644
--- a/Lib/test/test_asyncio/utils.py
+++ b/Lib/test/test_asyncio/utils.py
@@ -301,12 +301,17 @@ def run_udp_echo_server(*, host='127.0.0.1', port=0):
family, type, proto, _, sockaddr = addr_info[0]
sock = socket.socket(family, type, proto)
sock.bind((host, port))
+ sockname = sock.getsockname()
thread = threading.Thread(target=lambda: echo_datagrams(sock))
thread.start()
try:
- yield sock.getsockname()
+ yield sockname
finally:
- sock.sendto(b'STOP', sock.getsockname())
+ # gh-122187: use a separate socket to send the stop message to avoid
+ # TSan reported race on the same socket.
+ sock2 = socket.socket(family, type, proto)
+ sock2.sendto(b'STOP', sockname)
+ sock2.close()
thread.join()
From ca0f7c447c83503bd760dc2eb6d1ea4b3558f8e9 Mon Sep 17 00:00:00 2001
From: Nate Ohlson
Date: Thu, 25 Jul 2024 04:35:23 -0400
Subject: [PATCH 020/139] gh-112301: Make fortify source option check -Werror
(gh-122141)
---
configure | 14 +++++++-------
configure.ac | 2 +-
2 files changed, 8 insertions(+), 8 deletions(-)
diff --git a/configure b/configure
index 7b3dfa71a2a192..52988f77f6d926 100755
--- a/configure
+++ b/configure
@@ -9788,13 +9788,13 @@ if test "$enable_slower_safety" = "yes"
then
{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking whether C compiler accepts -D_FORTIFY_SOURCE=3" >&5
printf %s "checking whether C compiler accepts -D_FORTIFY_SOURCE=3... " >&6; }
-if test ${ax_cv_check_cflags___D_FORTIFY_SOURCE_3+y}
+if test ${ax_cv_check_cflags__Werror__D_FORTIFY_SOURCE_3+y}
then :
printf %s "(cached) " >&6
else $as_nop
ax_check_save_flags=$CFLAGS
- CFLAGS="$CFLAGS -D_FORTIFY_SOURCE=3"
+ CFLAGS="$CFLAGS -Werror -D_FORTIFY_SOURCE=3"
cat confdefs.h - <<_ACEOF >conftest.$ac_ext
/* end confdefs.h. */
@@ -9808,16 +9808,16 @@ main (void)
_ACEOF
if ac_fn_c_try_compile "$LINENO"
then :
- ax_cv_check_cflags___D_FORTIFY_SOURCE_3=yes
+ ax_cv_check_cflags__Werror__D_FORTIFY_SOURCE_3=yes
else $as_nop
- ax_cv_check_cflags___D_FORTIFY_SOURCE_3=no
+ ax_cv_check_cflags__Werror__D_FORTIFY_SOURCE_3=no
fi
rm -f core conftest.err conftest.$ac_objext conftest.beam conftest.$ac_ext
CFLAGS=$ax_check_save_flags
fi
-{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ax_cv_check_cflags___D_FORTIFY_SOURCE_3" >&5
-printf "%s\n" "$ax_cv_check_cflags___D_FORTIFY_SOURCE_3" >&6; }
-if test "x$ax_cv_check_cflags___D_FORTIFY_SOURCE_3" = xyes
+{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $ax_cv_check_cflags__Werror__D_FORTIFY_SOURCE_3" >&5
+printf "%s\n" "$ax_cv_check_cflags__Werror__D_FORTIFY_SOURCE_3" >&6; }
+if test "x$ax_cv_check_cflags__Werror__D_FORTIFY_SOURCE_3" = xyes
then :
BASECFLAGS="$BASECFLAGS -U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=3"
else $as_nop
diff --git a/configure.ac b/configure.ac
index 1275c199a7cf1c..5bde6803cd5a7b 100644
--- a/configure.ac
+++ b/configure.ac
@@ -2519,7 +2519,7 @@ AC_MSG_RESULT([$enable_slower_safety])
if test "$enable_slower_safety" = "yes"
then
- AX_CHECK_COMPILE_FLAG([-D_FORTIFY_SOURCE=3], [BASECFLAGS="$BASECFLAGS -U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=3"], [AC_MSG_WARN([-D_FORTIFY_SOURCE=3 not supported])])
+ AX_CHECK_COMPILE_FLAG([-D_FORTIFY_SOURCE=3], [BASECFLAGS="$BASECFLAGS -U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=3"], [AC_MSG_WARN([-D_FORTIFY_SOURCE=3 not supported])], [-Werror])
fi
case $GCC in
From dc07f65a53baf60d9857186294d3d7ba92d5606d Mon Sep 17 00:00:00 2001
From: Serhiy Storchaka
Date: Thu, 25 Jul 2024 11:45:19 +0300
Subject: [PATCH 021/139] gh-82951: Fix serializing by name in pickle protocols
< 4 (GH-122149)
Serializing objects with complex __qualname__ (such as unbound methods and
nested classes) by name no longer involves serializing parent objects by value
in pickle protocols < 4.
---
Lib/pickle.py | 40 ++++++++++----
Lib/test/pickletester.py | 12 +++++
...4-07-23-09-14-44.gh-issue-82951.-F5p5A.rst | 3 ++
Modules/_pickle.c | 53 +++++++++++++------
4 files changed, 82 insertions(+), 26 deletions(-)
create mode 100644 Misc/NEWS.d/next/Library/2024-07-23-09-14-44.gh-issue-82951.-F5p5A.rst
diff --git a/Lib/pickle.py b/Lib/pickle.py
index 115bd893ca1a38..2d764980cdf7b2 100644
--- a/Lib/pickle.py
+++ b/Lib/pickle.py
@@ -1110,11 +1110,35 @@ def save_global(self, obj, name=None):
self.save(module_name)
self.save(name)
write(STACK_GLOBAL)
- elif parent is not module:
- self.save_reduce(getattr, (parent, lastname))
- elif self.proto >= 3:
- write(GLOBAL + bytes(module_name, "utf-8") + b'\n' +
- bytes(name, "utf-8") + b'\n')
+ elif '.' in name:
+ # In protocol < 4, objects with multi-part __qualname__
+ # are represented as
+ # getattr(getattr(..., attrname1), attrname2).
+ dotted_path = name.split('.')
+ name = dotted_path.pop(0)
+ save = self.save
+ for attrname in dotted_path:
+ save(getattr)
+ if self.proto < 2:
+ write(MARK)
+ self._save_toplevel_by_name(module_name, name)
+ for attrname in dotted_path:
+ save(attrname)
+ if self.proto < 2:
+ write(TUPLE)
+ else:
+ write(TUPLE2)
+ write(REDUCE)
+ else:
+ self._save_toplevel_by_name(module_name, name)
+
+ self.memoize(obj)
+
+ def _save_toplevel_by_name(self, module_name, name):
+ if self.proto >= 3:
+ # Non-ASCII identifiers are supported only with protocols >= 3.
+ self.write(GLOBAL + bytes(module_name, "utf-8") + b'\n' +
+ bytes(name, "utf-8") + b'\n')
else:
if self.fix_imports:
r_name_mapping = _compat_pickle.REVERSE_NAME_MAPPING
@@ -1124,15 +1148,13 @@ def save_global(self, obj, name=None):
elif module_name in r_import_mapping:
module_name = r_import_mapping[module_name]
try:
- write(GLOBAL + bytes(module_name, "ascii") + b'\n' +
- bytes(name, "ascii") + b'\n')
+ self.write(GLOBAL + bytes(module_name, "ascii") + b'\n' +
+ bytes(name, "ascii") + b'\n')
except UnicodeEncodeError:
raise PicklingError(
"can't pickle global identifier '%s.%s' using "
"pickle protocol %i" % (module, name, self.proto)) from None
- self.memoize(obj)
-
def save_type(self, obj):
if obj is type(None):
return self.save_reduce(type, (None,), obj=obj)
diff --git a/Lib/test/pickletester.py b/Lib/test/pickletester.py
index 9922591ce7114a..13663220fc77ea 100644
--- a/Lib/test/pickletester.py
+++ b/Lib/test/pickletester.py
@@ -2818,6 +2818,18 @@ class Recursive:
self.assertIs(unpickled, Recursive)
del Recursive.mod # break reference loop
+ def test_recursive_nested_names2(self):
+ global Recursive
+ class Recursive:
+ pass
+ Recursive.ref = Recursive
+ Recursive.__qualname__ = 'Recursive.ref'
+ for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+ with self.subTest(proto=proto):
+ unpickled = self.loads(self.dumps(Recursive, proto))
+ self.assertIs(unpickled, Recursive)
+ del Recursive.ref # break reference loop
+
def test_py_methods(self):
global PyMethodsTest
class PyMethodsTest:
diff --git a/Misc/NEWS.d/next/Library/2024-07-23-09-14-44.gh-issue-82951.-F5p5A.rst b/Misc/NEWS.d/next/Library/2024-07-23-09-14-44.gh-issue-82951.-F5p5A.rst
new file mode 100644
index 00000000000000..b3f07889119c9f
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-07-23-09-14-44.gh-issue-82951.-F5p5A.rst
@@ -0,0 +1,3 @@
+Serializing objects with complex ``__qualname__`` (such as unbound methods
+and nested classes) by name no longer involves serializing parent objects by
+value in pickle protocols < 4.
diff --git a/Modules/_pickle.c b/Modules/_pickle.c
index 7eebe922c93ca1..861363b68c20c5 100644
--- a/Modules/_pickle.c
+++ b/Modules/_pickle.c
@@ -3592,7 +3592,6 @@ save_global(PickleState *st, PicklerObject *self, PyObject *obj,
PyObject *module = NULL;
PyObject *parent = NULL;
PyObject *dotted_path = NULL;
- PyObject *lastname = NULL;
PyObject *cls;
int status = 0;
@@ -3633,10 +3632,7 @@ save_global(PickleState *st, PicklerObject *self, PyObject *obj,
obj, module_name);
goto error;
}
- lastname = Py_NewRef(PyList_GET_ITEM(dotted_path,
- PyList_GET_SIZE(dotted_path) - 1));
cls = get_deep_attribute(module, dotted_path, &parent);
- Py_CLEAR(dotted_path);
if (cls == NULL) {
PyErr_Format(st->PicklingError,
"Can't pickle %R: attribute lookup %S on %S failed",
@@ -3724,7 +3720,10 @@ save_global(PickleState *st, PicklerObject *self, PyObject *obj,
else {
gen_global:
if (parent == module) {
- Py_SETREF(global_name, Py_NewRef(lastname));
+ Py_SETREF(global_name,
+ Py_NewRef(PyList_GET_ITEM(dotted_path,
+ PyList_GET_SIZE(dotted_path) - 1)));
+ Py_CLEAR(dotted_path);
}
if (self->proto >= 4) {
const char stack_global_op = STACK_GLOBAL;
@@ -3737,20 +3736,30 @@ save_global(PickleState *st, PicklerObject *self, PyObject *obj,
if (_Pickler_Write(self, &stack_global_op, 1) < 0)
goto error;
}
- else if (parent != module) {
- PyObject *reduce_value = Py_BuildValue("(O(OO))",
- st->getattr, parent, lastname);
- if (reduce_value == NULL)
- goto error;
- status = save_reduce(st, self, reduce_value, NULL);
- Py_DECREF(reduce_value);
- if (status < 0)
- goto error;
- }
else {
/* Generate a normal global opcode if we are using a pickle
protocol < 4, or if the object is not registered in the
- extension registry. */
+ extension registry.
+
+ Objects with multi-part __qualname__ are represented as
+ getattr(getattr(..., attrname1), attrname2). */
+ const char mark_op = MARK;
+ const char tupletwo_op = (self->proto < 2) ? TUPLE : TUPLE2;
+ const char reduce_op = REDUCE;
+ Py_ssize_t i;
+ if (dotted_path) {
+ if (PyList_GET_SIZE(dotted_path) > 1) {
+ Py_SETREF(global_name, Py_NewRef(PyList_GET_ITEM(dotted_path, 0)));
+ }
+ for (i = 1; i < PyList_GET_SIZE(dotted_path); i++) {
+ if (save(st, self, st->getattr, 0) < 0 ||
+ (self->proto < 2 && _Pickler_Write(self, &mark_op, 1) < 0))
+ {
+ goto error;
+ }
+ }
+ }
+
PyObject *encoded;
PyObject *(*unicode_encoder)(PyObject *);
@@ -3812,6 +3821,17 @@ save_global(PickleState *st, PicklerObject *self, PyObject *obj,
Py_DECREF(encoded);
if (_Pickler_Write(self, "\n", 1) < 0)
goto error;
+
+ if (dotted_path) {
+ for (i = 1; i < PyList_GET_SIZE(dotted_path); i++) {
+ if (save(st, self, PyList_GET_ITEM(dotted_path, i), 0) < 0 ||
+ _Pickler_Write(self, &tupletwo_op, 1) < 0 ||
+ _Pickler_Write(self, &reduce_op, 1) < 0)
+ {
+ goto error;
+ }
+ }
+ }
}
/* Memoize the object. */
if (memo_put(st, self, obj) < 0)
@@ -3827,7 +3847,6 @@ save_global(PickleState *st, PicklerObject *self, PyObject *obj,
Py_XDECREF(module);
Py_XDECREF(parent);
Py_XDECREF(dotted_path);
- Py_XDECREF(lastname);
return status;
}
From 6c09b8de5c67406113e8d082e05c9587e35a852a Mon Sep 17 00:00:00 2001
From: Serhiy Storchaka
Date: Thu, 25 Jul 2024 14:04:22 +0300
Subject: [PATCH 022/139] gh-122270: Fix typos in the Py_DEBUG macro name
(GH-122271)
---
Parser/pegen.c | 2 +-
Tools/peg_generator/peg_extension/peg_extension.c | 6 +++---
2 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/Parser/pegen.c b/Parser/pegen.c
index 6efb5477c7b80f..ac428be0958bdf 100644
--- a/Parser/pegen.c
+++ b/Parser/pegen.c
@@ -341,7 +341,7 @@ _PyPegen_is_memoized(Parser *p, int type, void *pres)
for (Memo *m = t->memo; m != NULL; m = m->next) {
if (m->type == type) {
-#if defined(PY_DEBUG)
+#if defined(Py_DEBUG)
if (0 <= type && type < NSTATISTICS) {
long count = m->mark - p->mark;
// A memoized negative result counts for one.
diff --git a/Tools/peg_generator/peg_extension/peg_extension.c b/Tools/peg_generator/peg_extension/peg_extension.c
index b081240ffff017..1587d53d59472e 100644
--- a/Tools/peg_generator/peg_extension/peg_extension.c
+++ b/Tools/peg_generator/peg_extension/peg_extension.c
@@ -108,7 +108,7 @@ parse_string(PyObject *self, PyObject *args, PyObject *kwds)
static PyObject *
clear_memo_stats(PyObject *Py_UNUSED(self), PyObject *Py_UNUSED(ignored))
{
-#if defined(PY_DEBUG)
+#if defined(Py_DEBUG)
_PyPegen_clear_memo_statistics();
#endif
Py_RETURN_NONE;
@@ -117,7 +117,7 @@ clear_memo_stats(PyObject *Py_UNUSED(self), PyObject *Py_UNUSED(ignored))
static PyObject *
get_memo_stats(PyObject *Py_UNUSED(self), PyObject *Py_UNUSED(ignored))
{
-#if defined(PY_DEBUG)
+#if defined(Py_DEBUG)
return _PyPegen_get_memo_statistics();
#else
Py_RETURN_NONE;
@@ -128,7 +128,7 @@ get_memo_stats(PyObject *Py_UNUSED(self), PyObject *Py_UNUSED(ignored))
static PyObject *
dump_memo_stats(PyObject *Py_UNUSED(self), PyObject *Py_UNUSED(ignored))
{
-#if defined(PY_DEBUG)
+#if defined(Py_DEBUG)
PyObject *list = _PyPegen_get_memo_statistics();
if (list == NULL) {
return NULL;
From 070f1e2e5b9b31ee3e7a1af2e30d7e3a66040b17 Mon Sep 17 00:00:00 2001
From: AN Long
Date: Thu, 25 Jul 2024 19:56:04 +0800
Subject: [PATCH 023/139] gh-121913: Use str(exc) instead of exc.strerror in
`asyncio.base_events` (#122269)
---
Lib/asyncio/base_events.py | 5 ++---
1 file changed, 2 insertions(+), 3 deletions(-)
diff --git a/Lib/asyncio/base_events.py b/Lib/asyncio/base_events.py
index f0e690b61a73dd..e4a39f4d345c79 100644
--- a/Lib/asyncio/base_events.py
+++ b/Lib/asyncio/base_events.py
@@ -1028,8 +1028,7 @@ async def _connect_sock(self, exceptions, addr_info, local_addr_infos=None):
except OSError as exc:
msg = (
f'error while attempting to bind on '
- f'address {laddr!r}: '
- f'{exc.strerror.lower()}'
+ f'address {laddr!r}: {str(exc).lower()}'
)
exc = OSError(exc.errno, msg)
my_exceptions.append(exc)
@@ -1599,7 +1598,7 @@ async def create_server(
except OSError as err:
msg = ('error while attempting '
'to bind on address %r: %s'
- % (sa, err.strerror.lower()))
+ % (sa, str(err).lower()))
if err.errno == errno.EADDRNOTAVAIL:
# Assume the family is not enabled (bpo-30945)
sockets.pop()
From 3998554bb05f5ce18e8a66492d23d094a2299442 Mon Sep 17 00:00:00 2001
From: AN Long
Date: Thu, 25 Jul 2024 20:03:39 +0800
Subject: [PATCH 024/139] gh-121275: Fix test_logging and test_smtplib with
Python build withoud IPv6 support (#121276)
Fix test_logging and test_smtplib with Python build withoud IPv6 support
---
Lib/test/support/smtpd.py | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/Lib/test/support/smtpd.py b/Lib/test/support/smtpd.py
index c2e17cad422861..6537679db9ad24 100755
--- a/Lib/test/support/smtpd.py
+++ b/Lib/test/support/smtpd.py
@@ -633,7 +633,8 @@ def __init__(self, localaddr, remoteaddr,
" be set to True at the same time")
asyncore.dispatcher.__init__(self, map=map)
try:
- gai_results = socket.getaddrinfo(*localaddr,
+ family = 0 if socket.has_ipv6 else socket.AF_INET
+ gai_results = socket.getaddrinfo(*localaddr, family=family,
type=socket.SOCK_STREAM)
self.create_socket(gai_results[0][0], gai_results[0][1])
# try to re-use a server port if possible
From 9bb2e4623f504c44655436eae181d802f544fff9 Mon Sep 17 00:00:00 2001
From: Serhiy Storchaka
Date: Thu, 25 Jul 2024 17:31:57 +0300
Subject: [PATCH 025/139] gh-116322: Fix typo in the #ifdef check (#122268)
---
Objects/moduleobject.c | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Objects/moduleobject.c b/Objects/moduleobject.c
index 73ad9711b6b0fc..efc74dafb5fc73 100644
--- a/Objects/moduleobject.c
+++ b/Objects/moduleobject.c
@@ -251,7 +251,7 @@ _PyModule_CreateInitialized(PyModuleDef* module, int module_api_version)
}
}
m->md_def = module;
-#ifdef Py_GIL_DISABLE
+#ifdef Py_GIL_DISABLED
m->md_gil = Py_MOD_GIL_USED;
#endif
return (PyObject*)m;
From 2e14a52cced9834ed5f7e0665a08055de554360f Mon Sep 17 00:00:00 2001
From: Mark Shannon
Date: Thu, 25 Jul 2024 16:24:29 +0100
Subject: [PATCH 026/139] GH-122160: Remove BUILD_CONST_KEY_MAP opcode.
(GH-122164)
---
Doc/library/dis.rst | 9 -
Include/internal/pycore_opcode_metadata.h | 9 +-
Include/internal/pycore_uop_ids.h | 1 -
Include/internal/pycore_uop_metadata.h | 4 -
Include/opcode_ids.h | 149 +++++-----
Lib/_opcode_metadata.py | 149 +++++-----
Lib/importlib/_bootstrap_external.py | 3 +-
Lib/test/test_dis.py | 274 +++++++++---------
...-07-23-11-57-36.gh-issue-122160.HSnrAP.rst | 1 +
Programs/test_frozenmain.h | 22 +-
Python/bytecodes.c | 19 --
Python/compile.c | 63 +---
Python/executor_cases.c.h | 34 ---
Python/generated_cases.c.h | 36 ---
Python/opcode_targets.h | 2 +-
Python/optimizer_cases.c.h | 9 -
16 files changed, 306 insertions(+), 478 deletions(-)
create mode 100644 Misc/NEWS.d/next/Core and Builtins/2024-07-23-11-57-36.gh-issue-122160.HSnrAP.rst
diff --git a/Doc/library/dis.rst b/Doc/library/dis.rst
index 56712e294bbe54..26b13c87181000 100644
--- a/Doc/library/dis.rst
+++ b/Doc/library/dis.rst
@@ -1109,15 +1109,6 @@ iterations of the loop.
empty dictionary pre-sized to hold *count* items.
-.. opcode:: BUILD_CONST_KEY_MAP (count)
-
- The version of :opcode:`BUILD_MAP` specialized for constant keys. Pops the
- top element on the stack which contains a tuple of keys, then starting from
- ``STACK[-2]``, pops *count* values to form values in the built dictionary.
-
- .. versionadded:: 3.6
-
-
.. opcode:: BUILD_STRING (count)
Concatenates *count* strings from the stack and pushes the resulting string
diff --git a/Include/internal/pycore_opcode_metadata.h b/Include/internal/pycore_opcode_metadata.h
index 40e582a5e94c3b..2b6e9bca51c9df 100644
--- a/Include/internal/pycore_opcode_metadata.h
+++ b/Include/internal/pycore_opcode_metadata.h
@@ -65,8 +65,6 @@ int _PyOpcode_num_popped(int opcode, int oparg) {
return 2;
case BINARY_SUBSCR_TUPLE_INT:
return 2;
- case BUILD_CONST_KEY_MAP:
- return 1 + oparg;
case BUILD_LIST:
return oparg;
case BUILD_MAP:
@@ -512,8 +510,6 @@ int _PyOpcode_num_pushed(int opcode, int oparg) {
return 1;
case BINARY_SUBSCR_TUPLE_INT:
return 1;
- case BUILD_CONST_KEY_MAP:
- return 1;
case BUILD_LIST:
return 1;
case BUILD_MAP:
@@ -1004,7 +1000,6 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[264] = {
[BINARY_SUBSCR_LIST_INT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG },
[BINARY_SUBSCR_STR_INT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG },
[BINARY_SUBSCR_TUPLE_INT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG },
- [BUILD_CONST_KEY_MAP] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[BUILD_LIST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG },
[BUILD_MAP] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[BUILD_SET] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
@@ -1233,7 +1228,6 @@ _PyOpcode_macro_expansion[256] = {
[BINARY_SUBSCR_LIST_INT] = { .nuops = 1, .uops = { { _BINARY_SUBSCR_LIST_INT, 0, 0 } } },
[BINARY_SUBSCR_STR_INT] = { .nuops = 1, .uops = { { _BINARY_SUBSCR_STR_INT, 0, 0 } } },
[BINARY_SUBSCR_TUPLE_INT] = { .nuops = 1, .uops = { { _BINARY_SUBSCR_TUPLE_INT, 0, 0 } } },
- [BUILD_CONST_KEY_MAP] = { .nuops = 1, .uops = { { _BUILD_CONST_KEY_MAP, 0, 0 } } },
[BUILD_LIST] = { .nuops = 1, .uops = { { _BUILD_LIST, 0, 0 } } },
[BUILD_MAP] = { .nuops = 1, .uops = { { _BUILD_MAP, 0, 0 } } },
[BUILD_SET] = { .nuops = 1, .uops = { { _BUILD_SET, 0, 0 } } },
@@ -1409,7 +1403,6 @@ const char *_PyOpcode_OpName[264] = {
[BINARY_SUBSCR_LIST_INT] = "BINARY_SUBSCR_LIST_INT",
[BINARY_SUBSCR_STR_INT] = "BINARY_SUBSCR_STR_INT",
[BINARY_SUBSCR_TUPLE_INT] = "BINARY_SUBSCR_TUPLE_INT",
- [BUILD_CONST_KEY_MAP] = "BUILD_CONST_KEY_MAP",
[BUILD_LIST] = "BUILD_LIST",
[BUILD_MAP] = "BUILD_MAP",
[BUILD_SET] = "BUILD_SET",
@@ -1659,7 +1652,6 @@ const uint8_t _PyOpcode_Deopt[256] = {
[BINARY_SUBSCR_LIST_INT] = BINARY_SUBSCR,
[BINARY_SUBSCR_STR_INT] = BINARY_SUBSCR,
[BINARY_SUBSCR_TUPLE_INT] = BINARY_SUBSCR,
- [BUILD_CONST_KEY_MAP] = BUILD_CONST_KEY_MAP,
[BUILD_LIST] = BUILD_LIST,
[BUILD_MAP] = BUILD_MAP,
[BUILD_SET] = BUILD_SET,
@@ -1859,6 +1851,7 @@ const uint8_t _PyOpcode_Deopt[256] = {
#endif // NEED_OPCODE_METADATA
#define EXTRA_CASES \
+ case 117: \
case 118: \
case 119: \
case 120: \
diff --git a/Include/internal/pycore_uop_ids.h b/Include/internal/pycore_uop_ids.h
index aa7ee7775faeba..fc67da697cb06c 100644
--- a/Include/internal/pycore_uop_ids.h
+++ b/Include/internal/pycore_uop_ids.h
@@ -26,7 +26,6 @@ extern "C" {
#define _BINARY_SUBSCR_LIST_INT BINARY_SUBSCR_LIST_INT
#define _BINARY_SUBSCR_STR_INT BINARY_SUBSCR_STR_INT
#define _BINARY_SUBSCR_TUPLE_INT BINARY_SUBSCR_TUPLE_INT
-#define _BUILD_CONST_KEY_MAP BUILD_CONST_KEY_MAP
#define _BUILD_LIST BUILD_LIST
#define _BUILD_MAP BUILD_MAP
#define _BUILD_SET BUILD_SET
diff --git a/Include/internal/pycore_uop_metadata.h b/Include/internal/pycore_uop_metadata.h
index 4c18f66d7420af..e86bae1d72d1a9 100644
--- a/Include/internal/pycore_uop_metadata.h
+++ b/Include/internal/pycore_uop_metadata.h
@@ -129,7 +129,6 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = {
[_BUILD_SET] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_BUILD_MAP] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_SETUP_ANNOTATIONS] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
- [_BUILD_CONST_KEY_MAP] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_DICT_UPDATE] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_DICT_MERGE] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_MAP_ADD] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
@@ -289,7 +288,6 @@ const char *const _PyOpcode_uop_name[MAX_UOP_ID+1] = {
[_BINARY_SUBSCR_LIST_INT] = "_BINARY_SUBSCR_LIST_INT",
[_BINARY_SUBSCR_STR_INT] = "_BINARY_SUBSCR_STR_INT",
[_BINARY_SUBSCR_TUPLE_INT] = "_BINARY_SUBSCR_TUPLE_INT",
- [_BUILD_CONST_KEY_MAP] = "_BUILD_CONST_KEY_MAP",
[_BUILD_LIST] = "_BUILD_LIST",
[_BUILD_MAP] = "_BUILD_MAP",
[_BUILD_SET] = "_BUILD_SET",
@@ -746,8 +744,6 @@ int _PyUop_num_popped(int opcode, int oparg)
return oparg*2;
case _SETUP_ANNOTATIONS:
return 0;
- case _BUILD_CONST_KEY_MAP:
- return 1 + oparg;
case _DICT_UPDATE:
return 2 + (oparg - 1);
case _DICT_MERGE:
diff --git a/Include/opcode_ids.h b/Include/opcode_ids.h
index 2ae6e5c1ba51ec..dd9b1ec1674949 100644
--- a/Include/opcode_ids.h
+++ b/Include/opcode_ids.h
@@ -53,81 +53,80 @@ extern "C" {
#define UNARY_NOT 40
#define WITH_EXCEPT_START 41
#define BINARY_OP 42
-#define BUILD_CONST_KEY_MAP 43
-#define BUILD_LIST 44
-#define BUILD_MAP 45
-#define BUILD_SET 46
-#define BUILD_SLICE 47
-#define BUILD_STRING 48
-#define BUILD_TUPLE 49
-#define CALL 50
-#define CALL_FUNCTION_EX 51
-#define CALL_INTRINSIC_1 52
-#define CALL_INTRINSIC_2 53
-#define CALL_KW 54
-#define COMPARE_OP 55
-#define CONTAINS_OP 56
-#define CONVERT_VALUE 57
-#define COPY 58
-#define COPY_FREE_VARS 59
-#define DELETE_ATTR 60
-#define DELETE_DEREF 61
-#define DELETE_FAST 62
-#define DELETE_GLOBAL 63
-#define DELETE_NAME 64
-#define DICT_MERGE 65
-#define DICT_UPDATE 66
-#define ENTER_EXECUTOR 67
-#define EXTENDED_ARG 68
-#define FOR_ITER 69
-#define GET_AWAITABLE 70
-#define IMPORT_FROM 71
-#define IMPORT_NAME 72
-#define IS_OP 73
-#define JUMP_BACKWARD 74
-#define JUMP_BACKWARD_NO_INTERRUPT 75
-#define JUMP_FORWARD 76
-#define LIST_APPEND 77
-#define LIST_EXTEND 78
-#define LOAD_ATTR 79
-#define LOAD_COMMON_CONSTANT 80
-#define LOAD_CONST 81
-#define LOAD_DEREF 82
-#define LOAD_FAST 83
-#define LOAD_FAST_AND_CLEAR 84
-#define LOAD_FAST_CHECK 85
-#define LOAD_FAST_LOAD_FAST 86
-#define LOAD_FROM_DICT_OR_DEREF 87
-#define LOAD_FROM_DICT_OR_GLOBALS 88
-#define LOAD_GLOBAL 89
-#define LOAD_NAME 90
-#define LOAD_SPECIAL 91
-#define LOAD_SUPER_ATTR 92
-#define MAKE_CELL 93
-#define MAP_ADD 94
-#define MATCH_CLASS 95
-#define POP_JUMP_IF_FALSE 96
-#define POP_JUMP_IF_NONE 97
-#define POP_JUMP_IF_NOT_NONE 98
-#define POP_JUMP_IF_TRUE 99
-#define RAISE_VARARGS 100
-#define RERAISE 101
-#define RETURN_CONST 102
-#define SEND 103
-#define SET_ADD 104
-#define SET_FUNCTION_ATTRIBUTE 105
-#define SET_UPDATE 106
-#define STORE_ATTR 107
-#define STORE_DEREF 108
-#define STORE_FAST 109
-#define STORE_FAST_LOAD_FAST 110
-#define STORE_FAST_STORE_FAST 111
-#define STORE_GLOBAL 112
-#define STORE_NAME 113
-#define SWAP 114
-#define UNPACK_EX 115
-#define UNPACK_SEQUENCE 116
-#define YIELD_VALUE 117
+#define BUILD_LIST 43
+#define BUILD_MAP 44
+#define BUILD_SET 45
+#define BUILD_SLICE 46
+#define BUILD_STRING 47
+#define BUILD_TUPLE 48
+#define CALL 49
+#define CALL_FUNCTION_EX 50
+#define CALL_INTRINSIC_1 51
+#define CALL_INTRINSIC_2 52
+#define CALL_KW 53
+#define COMPARE_OP 54
+#define CONTAINS_OP 55
+#define CONVERT_VALUE 56
+#define COPY 57
+#define COPY_FREE_VARS 58
+#define DELETE_ATTR 59
+#define DELETE_DEREF 60
+#define DELETE_FAST 61
+#define DELETE_GLOBAL 62
+#define DELETE_NAME 63
+#define DICT_MERGE 64
+#define DICT_UPDATE 65
+#define ENTER_EXECUTOR 66
+#define EXTENDED_ARG 67
+#define FOR_ITER 68
+#define GET_AWAITABLE 69
+#define IMPORT_FROM 70
+#define IMPORT_NAME 71
+#define IS_OP 72
+#define JUMP_BACKWARD 73
+#define JUMP_BACKWARD_NO_INTERRUPT 74
+#define JUMP_FORWARD 75
+#define LIST_APPEND 76
+#define LIST_EXTEND 77
+#define LOAD_ATTR 78
+#define LOAD_COMMON_CONSTANT 79
+#define LOAD_CONST 80
+#define LOAD_DEREF 81
+#define LOAD_FAST 82
+#define LOAD_FAST_AND_CLEAR 83
+#define LOAD_FAST_CHECK 84
+#define LOAD_FAST_LOAD_FAST 85
+#define LOAD_FROM_DICT_OR_DEREF 86
+#define LOAD_FROM_DICT_OR_GLOBALS 87
+#define LOAD_GLOBAL 88
+#define LOAD_NAME 89
+#define LOAD_SPECIAL 90
+#define LOAD_SUPER_ATTR 91
+#define MAKE_CELL 92
+#define MAP_ADD 93
+#define MATCH_CLASS 94
+#define POP_JUMP_IF_FALSE 95
+#define POP_JUMP_IF_NONE 96
+#define POP_JUMP_IF_NOT_NONE 97
+#define POP_JUMP_IF_TRUE 98
+#define RAISE_VARARGS 99
+#define RERAISE 100
+#define RETURN_CONST 101
+#define SEND 102
+#define SET_ADD 103
+#define SET_FUNCTION_ATTRIBUTE 104
+#define SET_UPDATE 105
+#define STORE_ATTR 106
+#define STORE_DEREF 107
+#define STORE_FAST 108
+#define STORE_FAST_LOAD_FAST 109
+#define STORE_FAST_STORE_FAST 110
+#define STORE_GLOBAL 111
+#define STORE_NAME 112
+#define SWAP 113
+#define UNPACK_EX 114
+#define UNPACK_SEQUENCE 115
+#define YIELD_VALUE 116
#define RESUME 149
#define BINARY_OP_ADD_FLOAT 150
#define BINARY_OP_ADD_INT 151
diff --git a/Lib/_opcode_metadata.py b/Lib/_opcode_metadata.py
index 2d0a5ba46b3e10..4b6057f4119421 100644
--- a/Lib/_opcode_metadata.py
+++ b/Lib/_opcode_metadata.py
@@ -231,81 +231,80 @@
'UNARY_NOT': 40,
'WITH_EXCEPT_START': 41,
'BINARY_OP': 42,
- 'BUILD_CONST_KEY_MAP': 43,
- 'BUILD_LIST': 44,
- 'BUILD_MAP': 45,
- 'BUILD_SET': 46,
- 'BUILD_SLICE': 47,
- 'BUILD_STRING': 48,
- 'BUILD_TUPLE': 49,
- 'CALL': 50,
- 'CALL_FUNCTION_EX': 51,
- 'CALL_INTRINSIC_1': 52,
- 'CALL_INTRINSIC_2': 53,
- 'CALL_KW': 54,
- 'COMPARE_OP': 55,
- 'CONTAINS_OP': 56,
- 'CONVERT_VALUE': 57,
- 'COPY': 58,
- 'COPY_FREE_VARS': 59,
- 'DELETE_ATTR': 60,
- 'DELETE_DEREF': 61,
- 'DELETE_FAST': 62,
- 'DELETE_GLOBAL': 63,
- 'DELETE_NAME': 64,
- 'DICT_MERGE': 65,
- 'DICT_UPDATE': 66,
- 'ENTER_EXECUTOR': 67,
- 'EXTENDED_ARG': 68,
- 'FOR_ITER': 69,
- 'GET_AWAITABLE': 70,
- 'IMPORT_FROM': 71,
- 'IMPORT_NAME': 72,
- 'IS_OP': 73,
- 'JUMP_BACKWARD': 74,
- 'JUMP_BACKWARD_NO_INTERRUPT': 75,
- 'JUMP_FORWARD': 76,
- 'LIST_APPEND': 77,
- 'LIST_EXTEND': 78,
- 'LOAD_ATTR': 79,
- 'LOAD_COMMON_CONSTANT': 80,
- 'LOAD_CONST': 81,
- 'LOAD_DEREF': 82,
- 'LOAD_FAST': 83,
- 'LOAD_FAST_AND_CLEAR': 84,
- 'LOAD_FAST_CHECK': 85,
- 'LOAD_FAST_LOAD_FAST': 86,
- 'LOAD_FROM_DICT_OR_DEREF': 87,
- 'LOAD_FROM_DICT_OR_GLOBALS': 88,
- 'LOAD_GLOBAL': 89,
- 'LOAD_NAME': 90,
- 'LOAD_SPECIAL': 91,
- 'LOAD_SUPER_ATTR': 92,
- 'MAKE_CELL': 93,
- 'MAP_ADD': 94,
- 'MATCH_CLASS': 95,
- 'POP_JUMP_IF_FALSE': 96,
- 'POP_JUMP_IF_NONE': 97,
- 'POP_JUMP_IF_NOT_NONE': 98,
- 'POP_JUMP_IF_TRUE': 99,
- 'RAISE_VARARGS': 100,
- 'RERAISE': 101,
- 'RETURN_CONST': 102,
- 'SEND': 103,
- 'SET_ADD': 104,
- 'SET_FUNCTION_ATTRIBUTE': 105,
- 'SET_UPDATE': 106,
- 'STORE_ATTR': 107,
- 'STORE_DEREF': 108,
- 'STORE_FAST': 109,
- 'STORE_FAST_LOAD_FAST': 110,
- 'STORE_FAST_STORE_FAST': 111,
- 'STORE_GLOBAL': 112,
- 'STORE_NAME': 113,
- 'SWAP': 114,
- 'UNPACK_EX': 115,
- 'UNPACK_SEQUENCE': 116,
- 'YIELD_VALUE': 117,
+ 'BUILD_LIST': 43,
+ 'BUILD_MAP': 44,
+ 'BUILD_SET': 45,
+ 'BUILD_SLICE': 46,
+ 'BUILD_STRING': 47,
+ 'BUILD_TUPLE': 48,
+ 'CALL': 49,
+ 'CALL_FUNCTION_EX': 50,
+ 'CALL_INTRINSIC_1': 51,
+ 'CALL_INTRINSIC_2': 52,
+ 'CALL_KW': 53,
+ 'COMPARE_OP': 54,
+ 'CONTAINS_OP': 55,
+ 'CONVERT_VALUE': 56,
+ 'COPY': 57,
+ 'COPY_FREE_VARS': 58,
+ 'DELETE_ATTR': 59,
+ 'DELETE_DEREF': 60,
+ 'DELETE_FAST': 61,
+ 'DELETE_GLOBAL': 62,
+ 'DELETE_NAME': 63,
+ 'DICT_MERGE': 64,
+ 'DICT_UPDATE': 65,
+ 'ENTER_EXECUTOR': 66,
+ 'EXTENDED_ARG': 67,
+ 'FOR_ITER': 68,
+ 'GET_AWAITABLE': 69,
+ 'IMPORT_FROM': 70,
+ 'IMPORT_NAME': 71,
+ 'IS_OP': 72,
+ 'JUMP_BACKWARD': 73,
+ 'JUMP_BACKWARD_NO_INTERRUPT': 74,
+ 'JUMP_FORWARD': 75,
+ 'LIST_APPEND': 76,
+ 'LIST_EXTEND': 77,
+ 'LOAD_ATTR': 78,
+ 'LOAD_COMMON_CONSTANT': 79,
+ 'LOAD_CONST': 80,
+ 'LOAD_DEREF': 81,
+ 'LOAD_FAST': 82,
+ 'LOAD_FAST_AND_CLEAR': 83,
+ 'LOAD_FAST_CHECK': 84,
+ 'LOAD_FAST_LOAD_FAST': 85,
+ 'LOAD_FROM_DICT_OR_DEREF': 86,
+ 'LOAD_FROM_DICT_OR_GLOBALS': 87,
+ 'LOAD_GLOBAL': 88,
+ 'LOAD_NAME': 89,
+ 'LOAD_SPECIAL': 90,
+ 'LOAD_SUPER_ATTR': 91,
+ 'MAKE_CELL': 92,
+ 'MAP_ADD': 93,
+ 'MATCH_CLASS': 94,
+ 'POP_JUMP_IF_FALSE': 95,
+ 'POP_JUMP_IF_NONE': 96,
+ 'POP_JUMP_IF_NOT_NONE': 97,
+ 'POP_JUMP_IF_TRUE': 98,
+ 'RAISE_VARARGS': 99,
+ 'RERAISE': 100,
+ 'RETURN_CONST': 101,
+ 'SEND': 102,
+ 'SET_ADD': 103,
+ 'SET_FUNCTION_ATTRIBUTE': 104,
+ 'SET_UPDATE': 105,
+ 'STORE_ATTR': 106,
+ 'STORE_DEREF': 107,
+ 'STORE_FAST': 108,
+ 'STORE_FAST_LOAD_FAST': 109,
+ 'STORE_FAST_STORE_FAST': 110,
+ 'STORE_GLOBAL': 111,
+ 'STORE_NAME': 112,
+ 'SWAP': 113,
+ 'UNPACK_EX': 114,
+ 'UNPACK_SEQUENCE': 115,
+ 'YIELD_VALUE': 116,
'INSTRUMENTED_RESUME': 236,
'INSTRUMENTED_END_FOR': 237,
'INSTRUMENTED_END_SEND': 238,
diff --git a/Lib/importlib/_bootstrap_external.py b/Lib/importlib/_bootstrap_external.py
index bf14d57b2503ea..2bb44b290e4a84 100644
--- a/Lib/importlib/_bootstrap_external.py
+++ b/Lib/importlib/_bootstrap_external.py
@@ -475,6 +475,7 @@ def _write_atomic(path, data, mode=0o666):
# Python 3.14a1 3600 (Add LOAD_COMMON_CONSTANT)
# Python 3.14a1 3601 (Fix miscompilation of private names in generic classes)
# Python 3.14a1 3602 (Add LOAD_SPECIAL. Remove BEFORE_WITH and BEFORE_ASYNC_WITH)
+# Python 3.14a1 3603 (Remove BUILD_CONST_KEY_MAP)
# Python 3.15 will start with 3650
@@ -491,7 +492,7 @@ def _write_atomic(path, data, mode=0o666):
# Whenever MAGIC_NUMBER is changed, the ranges in the magic_values array
# in PC/launcher.c must also be updated.
-MAGIC_NUMBER = (3602).to_bytes(2, 'little') + b'\r\n'
+MAGIC_NUMBER = (3603).to_bytes(2, 'little') + b'\r\n'
_RAW_MAGIC_NUMBER = int.from_bytes(MAGIC_NUMBER, 'little') # For import.c
diff --git a/Lib/test/test_dis.py b/Lib/test/test_dis.py
index ab1c48f9b25361..c8defde7b99ec7 100644
--- a/Lib/test/test_dis.py
+++ b/Lib/test/test_dis.py
@@ -1605,204 +1605,204 @@ def _prepare_test_cases():
Instruction = dis.Instruction
expected_opinfo_outer = [
- Instruction(opname='MAKE_CELL', opcode=93, arg=0, argval='a', argrepr='a', offset=0, start_offset=0, starts_line=True, line_number=None, label=None, positions=None, cache_info=None),
- Instruction(opname='MAKE_CELL', opcode=93, arg=1, argval='b', argrepr='b', offset=2, start_offset=2, starts_line=False, line_number=None, label=None, positions=None, cache_info=None),
+ Instruction(opname='MAKE_CELL', opcode=92, arg=0, argval='a', argrepr='a', offset=0, start_offset=0, starts_line=True, line_number=None, label=None, positions=None, cache_info=None),
+ Instruction(opname='MAKE_CELL', opcode=92, arg=1, argval='b', argrepr='b', offset=2, start_offset=2, starts_line=False, line_number=None, label=None, positions=None, cache_info=None),
Instruction(opname='RESUME', opcode=149, arg=0, argval=0, argrepr='', offset=4, start_offset=4, starts_line=True, line_number=1, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_CONST', opcode=81, arg=5, argval=(3, 4), argrepr='(3, 4)', offset=6, start_offset=6, starts_line=True, line_number=2, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_FAST', opcode=83, arg=0, argval='a', argrepr='a', offset=8, start_offset=8, starts_line=False, line_number=2, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_FAST', opcode=83, arg=1, argval='b', argrepr='b', offset=10, start_offset=10, starts_line=False, line_number=2, label=None, positions=None, cache_info=None),
- Instruction(opname='BUILD_TUPLE', opcode=49, arg=2, argval=2, argrepr='', offset=12, start_offset=12, starts_line=False, line_number=2, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_CONST', opcode=81, arg=1, argval=code_object_f, argrepr=repr(code_object_f), offset=14, start_offset=14, starts_line=False, line_number=2, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_CONST', opcode=80, arg=5, argval=(3, 4), argrepr='(3, 4)', offset=6, start_offset=6, starts_line=True, line_number=2, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_FAST', opcode=82, arg=0, argval='a', argrepr='a', offset=8, start_offset=8, starts_line=False, line_number=2, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_FAST', opcode=82, arg=1, argval='b', argrepr='b', offset=10, start_offset=10, starts_line=False, line_number=2, label=None, positions=None, cache_info=None),
+ Instruction(opname='BUILD_TUPLE', opcode=48, arg=2, argval=2, argrepr='', offset=12, start_offset=12, starts_line=False, line_number=2, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_CONST', opcode=80, arg=1, argval=code_object_f, argrepr=repr(code_object_f), offset=14, start_offset=14, starts_line=False, line_number=2, label=None, positions=None, cache_info=None),
Instruction(opname='MAKE_FUNCTION', opcode=23, arg=None, argval=None, argrepr='', offset=16, start_offset=16, starts_line=False, line_number=2, label=None, positions=None, cache_info=None),
- Instruction(opname='SET_FUNCTION_ATTRIBUTE', opcode=105, arg=8, argval=8, argrepr='closure', offset=18, start_offset=18, starts_line=False, line_number=2, label=None, positions=None, cache_info=None),
- Instruction(opname='SET_FUNCTION_ATTRIBUTE', opcode=105, arg=1, argval=1, argrepr='defaults', offset=20, start_offset=20, starts_line=False, line_number=2, label=None, positions=None, cache_info=None),
- Instruction(opname='STORE_FAST', opcode=109, arg=2, argval='f', argrepr='f', offset=22, start_offset=22, starts_line=False, line_number=2, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_GLOBAL', opcode=89, arg=1, argval='print', argrepr='print + NULL', offset=24, start_offset=24, starts_line=True, line_number=7, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
- Instruction(opname='LOAD_DEREF', opcode=82, arg=0, argval='a', argrepr='a', offset=34, start_offset=34, starts_line=False, line_number=7, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_DEREF', opcode=82, arg=1, argval='b', argrepr='b', offset=36, start_offset=36, starts_line=False, line_number=7, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_CONST', opcode=81, arg=2, argval='', argrepr="''", offset=38, start_offset=38, starts_line=False, line_number=7, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_CONST', opcode=81, arg=3, argval=1, argrepr='1', offset=40, start_offset=40, starts_line=False, line_number=7, label=None, positions=None, cache_info=None),
- Instruction(opname='BUILD_LIST', opcode=44, arg=0, argval=0, argrepr='', offset=42, start_offset=42, starts_line=False, line_number=7, label=None, positions=None, cache_info=None),
- Instruction(opname='BUILD_MAP', opcode=45, arg=0, argval=0, argrepr='', offset=44, start_offset=44, starts_line=False, line_number=7, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_CONST', opcode=81, arg=4, argval='Hello world!', argrepr="'Hello world!'", offset=46, start_offset=46, starts_line=False, line_number=7, label=None, positions=None, cache_info=None),
- Instruction(opname='CALL', opcode=50, arg=7, argval=7, argrepr='', offset=48, start_offset=48, starts_line=False, line_number=7, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
+ Instruction(opname='SET_FUNCTION_ATTRIBUTE', opcode=104, arg=8, argval=8, argrepr='closure', offset=18, start_offset=18, starts_line=False, line_number=2, label=None, positions=None, cache_info=None),
+ Instruction(opname='SET_FUNCTION_ATTRIBUTE', opcode=104, arg=1, argval=1, argrepr='defaults', offset=20, start_offset=20, starts_line=False, line_number=2, label=None, positions=None, cache_info=None),
+ Instruction(opname='STORE_FAST', opcode=108, arg=2, argval='f', argrepr='f', offset=22, start_offset=22, starts_line=False, line_number=2, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_GLOBAL', opcode=88, arg=1, argval='print', argrepr='print + NULL', offset=24, start_offset=24, starts_line=True, line_number=7, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
+ Instruction(opname='LOAD_DEREF', opcode=81, arg=0, argval='a', argrepr='a', offset=34, start_offset=34, starts_line=False, line_number=7, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_DEREF', opcode=81, arg=1, argval='b', argrepr='b', offset=36, start_offset=36, starts_line=False, line_number=7, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_CONST', opcode=80, arg=2, argval='', argrepr="''", offset=38, start_offset=38, starts_line=False, line_number=7, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_CONST', opcode=80, arg=3, argval=1, argrepr='1', offset=40, start_offset=40, starts_line=False, line_number=7, label=None, positions=None, cache_info=None),
+ Instruction(opname='BUILD_LIST', opcode=43, arg=0, argval=0, argrepr='', offset=42, start_offset=42, starts_line=False, line_number=7, label=None, positions=None, cache_info=None),
+ Instruction(opname='BUILD_MAP', opcode=44, arg=0, argval=0, argrepr='', offset=44, start_offset=44, starts_line=False, line_number=7, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_CONST', opcode=80, arg=4, argval='Hello world!', argrepr="'Hello world!'", offset=46, start_offset=46, starts_line=False, line_number=7, label=None, positions=None, cache_info=None),
+ Instruction(opname='CALL', opcode=49, arg=7, argval=7, argrepr='', offset=48, start_offset=48, starts_line=False, line_number=7, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
Instruction(opname='POP_TOP', opcode=29, arg=None, argval=None, argrepr='', offset=56, start_offset=56, starts_line=False, line_number=7, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_FAST', opcode=83, arg=2, argval='f', argrepr='f', offset=58, start_offset=58, starts_line=True, line_number=8, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_FAST', opcode=82, arg=2, argval='f', argrepr='f', offset=58, start_offset=58, starts_line=True, line_number=8, label=None, positions=None, cache_info=None),
Instruction(opname='RETURN_VALUE', opcode=33, arg=None, argval=None, argrepr='', offset=60, start_offset=60, starts_line=False, line_number=8, label=None, positions=None, cache_info=None),
]
expected_opinfo_f = [
- Instruction(opname='COPY_FREE_VARS', opcode=59, arg=2, argval=2, argrepr='', offset=0, start_offset=0, starts_line=True, line_number=None, label=None, positions=None, cache_info=None),
- Instruction(opname='MAKE_CELL', opcode=93, arg=0, argval='c', argrepr='c', offset=2, start_offset=2, starts_line=False, line_number=None, label=None, positions=None, cache_info=None),
- Instruction(opname='MAKE_CELL', opcode=93, arg=1, argval='d', argrepr='d', offset=4, start_offset=4, starts_line=False, line_number=None, label=None, positions=None, cache_info=None),
+ Instruction(opname='COPY_FREE_VARS', opcode=58, arg=2, argval=2, argrepr='', offset=0, start_offset=0, starts_line=True, line_number=None, label=None, positions=None, cache_info=None),
+ Instruction(opname='MAKE_CELL', opcode=92, arg=0, argval='c', argrepr='c', offset=2, start_offset=2, starts_line=False, line_number=None, label=None, positions=None, cache_info=None),
+ Instruction(opname='MAKE_CELL', opcode=92, arg=1, argval='d', argrepr='d', offset=4, start_offset=4, starts_line=False, line_number=None, label=None, positions=None, cache_info=None),
Instruction(opname='RESUME', opcode=149, arg=0, argval=0, argrepr='', offset=6, start_offset=6, starts_line=True, line_number=2, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_CONST', opcode=81, arg=2, argval=(5, 6), argrepr='(5, 6)', offset=8, start_offset=8, starts_line=True, line_number=3, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_FAST', opcode=83, arg=3, argval='a', argrepr='a', offset=10, start_offset=10, starts_line=False, line_number=3, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_FAST', opcode=83, arg=4, argval='b', argrepr='b', offset=12, start_offset=12, starts_line=False, line_number=3, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_FAST', opcode=83, arg=0, argval='c', argrepr='c', offset=14, start_offset=14, starts_line=False, line_number=3, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_FAST', opcode=83, arg=1, argval='d', argrepr='d', offset=16, start_offset=16, starts_line=False, line_number=3, label=None, positions=None, cache_info=None),
- Instruction(opname='BUILD_TUPLE', opcode=49, arg=4, argval=4, argrepr='', offset=18, start_offset=18, starts_line=False, line_number=3, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_CONST', opcode=81, arg=1, argval=code_object_inner, argrepr=repr(code_object_inner), offset=20, start_offset=20, starts_line=False, line_number=3, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_CONST', opcode=80, arg=2, argval=(5, 6), argrepr='(5, 6)', offset=8, start_offset=8, starts_line=True, line_number=3, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_FAST', opcode=82, arg=3, argval='a', argrepr='a', offset=10, start_offset=10, starts_line=False, line_number=3, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_FAST', opcode=82, arg=4, argval='b', argrepr='b', offset=12, start_offset=12, starts_line=False, line_number=3, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_FAST', opcode=82, arg=0, argval='c', argrepr='c', offset=14, start_offset=14, starts_line=False, line_number=3, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_FAST', opcode=82, arg=1, argval='d', argrepr='d', offset=16, start_offset=16, starts_line=False, line_number=3, label=None, positions=None, cache_info=None),
+ Instruction(opname='BUILD_TUPLE', opcode=48, arg=4, argval=4, argrepr='', offset=18, start_offset=18, starts_line=False, line_number=3, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_CONST', opcode=80, arg=1, argval=code_object_inner, argrepr=repr(code_object_inner), offset=20, start_offset=20, starts_line=False, line_number=3, label=None, positions=None, cache_info=None),
Instruction(opname='MAKE_FUNCTION', opcode=23, arg=None, argval=None, argrepr='', offset=22, start_offset=22, starts_line=False, line_number=3, label=None, positions=None, cache_info=None),
- Instruction(opname='SET_FUNCTION_ATTRIBUTE', opcode=105, arg=8, argval=8, argrepr='closure', offset=24, start_offset=24, starts_line=False, line_number=3, label=None, positions=None, cache_info=None),
- Instruction(opname='SET_FUNCTION_ATTRIBUTE', opcode=105, arg=1, argval=1, argrepr='defaults', offset=26, start_offset=26, starts_line=False, line_number=3, label=None, positions=None, cache_info=None),
- Instruction(opname='STORE_FAST', opcode=109, arg=2, argval='inner', argrepr='inner', offset=28, start_offset=28, starts_line=False, line_number=3, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_GLOBAL', opcode=89, arg=1, argval='print', argrepr='print + NULL', offset=30, start_offset=30, starts_line=True, line_number=5, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
- Instruction(opname='LOAD_DEREF', opcode=82, arg=3, argval='a', argrepr='a', offset=40, start_offset=40, starts_line=False, line_number=5, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_DEREF', opcode=82, arg=4, argval='b', argrepr='b', offset=42, start_offset=42, starts_line=False, line_number=5, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_DEREF', opcode=82, arg=0, argval='c', argrepr='c', offset=44, start_offset=44, starts_line=False, line_number=5, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_DEREF', opcode=82, arg=1, argval='d', argrepr='d', offset=46, start_offset=46, starts_line=False, line_number=5, label=None, positions=None, cache_info=None),
- Instruction(opname='CALL', opcode=50, arg=4, argval=4, argrepr='', offset=48, start_offset=48, starts_line=False, line_number=5, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
+ Instruction(opname='SET_FUNCTION_ATTRIBUTE', opcode=104, arg=8, argval=8, argrepr='closure', offset=24, start_offset=24, starts_line=False, line_number=3, label=None, positions=None, cache_info=None),
+ Instruction(opname='SET_FUNCTION_ATTRIBUTE', opcode=104, arg=1, argval=1, argrepr='defaults', offset=26, start_offset=26, starts_line=False, line_number=3, label=None, positions=None, cache_info=None),
+ Instruction(opname='STORE_FAST', opcode=108, arg=2, argval='inner', argrepr='inner', offset=28, start_offset=28, starts_line=False, line_number=3, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_GLOBAL', opcode=88, arg=1, argval='print', argrepr='print + NULL', offset=30, start_offset=30, starts_line=True, line_number=5, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
+ Instruction(opname='LOAD_DEREF', opcode=81, arg=3, argval='a', argrepr='a', offset=40, start_offset=40, starts_line=False, line_number=5, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_DEREF', opcode=81, arg=4, argval='b', argrepr='b', offset=42, start_offset=42, starts_line=False, line_number=5, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_DEREF', opcode=81, arg=0, argval='c', argrepr='c', offset=44, start_offset=44, starts_line=False, line_number=5, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_DEREF', opcode=81, arg=1, argval='d', argrepr='d', offset=46, start_offset=46, starts_line=False, line_number=5, label=None, positions=None, cache_info=None),
+ Instruction(opname='CALL', opcode=49, arg=4, argval=4, argrepr='', offset=48, start_offset=48, starts_line=False, line_number=5, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
Instruction(opname='POP_TOP', opcode=29, arg=None, argval=None, argrepr='', offset=56, start_offset=56, starts_line=False, line_number=5, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_FAST', opcode=83, arg=2, argval='inner', argrepr='inner', offset=58, start_offset=58, starts_line=True, line_number=6, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_FAST', opcode=82, arg=2, argval='inner', argrepr='inner', offset=58, start_offset=58, starts_line=True, line_number=6, label=None, positions=None, cache_info=None),
Instruction(opname='RETURN_VALUE', opcode=33, arg=None, argval=None, argrepr='', offset=60, start_offset=60, starts_line=False, line_number=6, label=None, positions=None, cache_info=None),
]
expected_opinfo_inner = [
- Instruction(opname='COPY_FREE_VARS', opcode=59, arg=4, argval=4, argrepr='', offset=0, start_offset=0, starts_line=True, line_number=None, label=None, positions=None, cache_info=None),
+ Instruction(opname='COPY_FREE_VARS', opcode=58, arg=4, argval=4, argrepr='', offset=0, start_offset=0, starts_line=True, line_number=None, label=None, positions=None, cache_info=None),
Instruction(opname='RESUME', opcode=149, arg=0, argval=0, argrepr='', offset=2, start_offset=2, starts_line=True, line_number=3, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_GLOBAL', opcode=89, arg=1, argval='print', argrepr='print + NULL', offset=4, start_offset=4, starts_line=True, line_number=4, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
- Instruction(opname='LOAD_DEREF', opcode=82, arg=2, argval='a', argrepr='a', offset=14, start_offset=14, starts_line=False, line_number=4, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_DEREF', opcode=82, arg=3, argval='b', argrepr='b', offset=16, start_offset=16, starts_line=False, line_number=4, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_DEREF', opcode=82, arg=4, argval='c', argrepr='c', offset=18, start_offset=18, starts_line=False, line_number=4, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_DEREF', opcode=82, arg=5, argval='d', argrepr='d', offset=20, start_offset=20, starts_line=False, line_number=4, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_FAST_LOAD_FAST', opcode=86, arg=1, argval=('e', 'f'), argrepr='e, f', offset=22, start_offset=22, starts_line=False, line_number=4, label=None, positions=None, cache_info=None),
- Instruction(opname='CALL', opcode=50, arg=6, argval=6, argrepr='', offset=24, start_offset=24, starts_line=False, line_number=4, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
+ Instruction(opname='LOAD_GLOBAL', opcode=88, arg=1, argval='print', argrepr='print + NULL', offset=4, start_offset=4, starts_line=True, line_number=4, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
+ Instruction(opname='LOAD_DEREF', opcode=81, arg=2, argval='a', argrepr='a', offset=14, start_offset=14, starts_line=False, line_number=4, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_DEREF', opcode=81, arg=3, argval='b', argrepr='b', offset=16, start_offset=16, starts_line=False, line_number=4, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_DEREF', opcode=81, arg=4, argval='c', argrepr='c', offset=18, start_offset=18, starts_line=False, line_number=4, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_DEREF', opcode=81, arg=5, argval='d', argrepr='d', offset=20, start_offset=20, starts_line=False, line_number=4, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_FAST_LOAD_FAST', opcode=85, arg=1, argval=('e', 'f'), argrepr='e, f', offset=22, start_offset=22, starts_line=False, line_number=4, label=None, positions=None, cache_info=None),
+ Instruction(opname='CALL', opcode=49, arg=6, argval=6, argrepr='', offset=24, start_offset=24, starts_line=False, line_number=4, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
Instruction(opname='POP_TOP', opcode=29, arg=None, argval=None, argrepr='', offset=32, start_offset=32, starts_line=False, line_number=4, label=None, positions=None, cache_info=None),
- Instruction(opname='RETURN_CONST', opcode=102, arg=0, argval=None, argrepr='None', offset=34, start_offset=34, starts_line=False, line_number=4, label=None, positions=None, cache_info=None),
+ Instruction(opname='RETURN_CONST', opcode=101, arg=0, argval=None, argrepr='None', offset=34, start_offset=34, starts_line=False, line_number=4, label=None, positions=None, cache_info=None),
]
expected_opinfo_jumpy = [
Instruction(opname='RESUME', opcode=149, arg=0, argval=0, argrepr='', offset=0, start_offset=0, starts_line=True, line_number=1, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_GLOBAL', opcode=89, arg=1, argval='range', argrepr='range + NULL', offset=2, start_offset=2, starts_line=True, line_number=3, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
- Instruction(opname='LOAD_CONST', opcode=81, arg=1, argval=10, argrepr='10', offset=12, start_offset=12, starts_line=False, line_number=3, label=None, positions=None, cache_info=None),
- Instruction(opname='CALL', opcode=50, arg=1, argval=1, argrepr='', offset=14, start_offset=14, starts_line=False, line_number=3, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
+ Instruction(opname='LOAD_GLOBAL', opcode=88, arg=1, argval='range', argrepr='range + NULL', offset=2, start_offset=2, starts_line=True, line_number=3, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
+ Instruction(opname='LOAD_CONST', opcode=80, arg=1, argval=10, argrepr='10', offset=12, start_offset=12, starts_line=False, line_number=3, label=None, positions=None, cache_info=None),
+ Instruction(opname='CALL', opcode=49, arg=1, argval=1, argrepr='', offset=14, start_offset=14, starts_line=False, line_number=3, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
Instruction(opname='GET_ITER', opcode=16, arg=None, argval=None, argrepr='', offset=22, start_offset=22, starts_line=False, line_number=3, label=None, positions=None, cache_info=None),
- Instruction(opname='FOR_ITER', opcode=69, arg=30, argval=88, argrepr='to L4', offset=24, start_offset=24, starts_line=False, line_number=3, label=1, positions=None, cache_info=[('counter', 1, b'\x00\x00')]),
- Instruction(opname='STORE_FAST', opcode=109, arg=0, argval='i', argrepr='i', offset=28, start_offset=28, starts_line=False, line_number=3, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_GLOBAL', opcode=89, arg=3, argval='print', argrepr='print + NULL', offset=30, start_offset=30, starts_line=True, line_number=4, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
- Instruction(opname='LOAD_FAST', opcode=83, arg=0, argval='i', argrepr='i', offset=40, start_offset=40, starts_line=False, line_number=4, label=None, positions=None, cache_info=None),
- Instruction(opname='CALL', opcode=50, arg=1, argval=1, argrepr='', offset=42, start_offset=42, starts_line=False, line_number=4, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
+ Instruction(opname='FOR_ITER', opcode=68, arg=30, argval=88, argrepr='to L4', offset=24, start_offset=24, starts_line=False, line_number=3, label=1, positions=None, cache_info=[('counter', 1, b'\x00\x00')]),
+ Instruction(opname='STORE_FAST', opcode=108, arg=0, argval='i', argrepr='i', offset=28, start_offset=28, starts_line=False, line_number=3, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_GLOBAL', opcode=88, arg=3, argval='print', argrepr='print + NULL', offset=30, start_offset=30, starts_line=True, line_number=4, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
+ Instruction(opname='LOAD_FAST', opcode=82, arg=0, argval='i', argrepr='i', offset=40, start_offset=40, starts_line=False, line_number=4, label=None, positions=None, cache_info=None),
+ Instruction(opname='CALL', opcode=49, arg=1, argval=1, argrepr='', offset=42, start_offset=42, starts_line=False, line_number=4, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
Instruction(opname='POP_TOP', opcode=29, arg=None, argval=None, argrepr='', offset=50, start_offset=50, starts_line=False, line_number=4, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_FAST', opcode=83, arg=0, argval='i', argrepr='i', offset=52, start_offset=52, starts_line=True, line_number=5, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_CONST', opcode=81, arg=2, argval=4, argrepr='4', offset=54, start_offset=54, starts_line=False, line_number=5, label=None, positions=None, cache_info=None),
- Instruction(opname='COMPARE_OP', opcode=55, arg=18, argval='<', argrepr='bool(<)', offset=56, start_offset=56, starts_line=False, line_number=5, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]),
- Instruction(opname='POP_JUMP_IF_FALSE', opcode=96, arg=2, argval=68, argrepr='to L2', offset=60, start_offset=60, starts_line=False, line_number=5, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]),
- Instruction(opname='JUMP_BACKWARD', opcode=74, arg=22, argval=24, argrepr='to L1', offset=64, start_offset=64, starts_line=True, line_number=6, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]),
- Instruction(opname='LOAD_FAST', opcode=83, arg=0, argval='i', argrepr='i', offset=68, start_offset=68, starts_line=True, line_number=7, label=2, positions=None, cache_info=None),
- Instruction(opname='LOAD_CONST', opcode=81, arg=3, argval=6, argrepr='6', offset=70, start_offset=70, starts_line=False, line_number=7, label=None, positions=None, cache_info=None),
- Instruction(opname='COMPARE_OP', opcode=55, arg=148, argval='>', argrepr='bool(>)', offset=72, start_offset=72, starts_line=False, line_number=7, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]),
- Instruction(opname='POP_JUMP_IF_TRUE', opcode=99, arg=2, argval=84, argrepr='to L3', offset=76, start_offset=76, starts_line=False, line_number=7, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]),
- Instruction(opname='JUMP_BACKWARD', opcode=74, arg=30, argval=24, argrepr='to L1', offset=80, start_offset=80, starts_line=False, line_number=7, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]),
+ Instruction(opname='LOAD_FAST', opcode=82, arg=0, argval='i', argrepr='i', offset=52, start_offset=52, starts_line=True, line_number=5, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_CONST', opcode=80, arg=2, argval=4, argrepr='4', offset=54, start_offset=54, starts_line=False, line_number=5, label=None, positions=None, cache_info=None),
+ Instruction(opname='COMPARE_OP', opcode=54, arg=18, argval='<', argrepr='bool(<)', offset=56, start_offset=56, starts_line=False, line_number=5, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]),
+ Instruction(opname='POP_JUMP_IF_FALSE', opcode=95, arg=2, argval=68, argrepr='to L2', offset=60, start_offset=60, starts_line=False, line_number=5, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]),
+ Instruction(opname='JUMP_BACKWARD', opcode=73, arg=22, argval=24, argrepr='to L1', offset=64, start_offset=64, starts_line=True, line_number=6, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]),
+ Instruction(opname='LOAD_FAST', opcode=82, arg=0, argval='i', argrepr='i', offset=68, start_offset=68, starts_line=True, line_number=7, label=2, positions=None, cache_info=None),
+ Instruction(opname='LOAD_CONST', opcode=80, arg=3, argval=6, argrepr='6', offset=70, start_offset=70, starts_line=False, line_number=7, label=None, positions=None, cache_info=None),
+ Instruction(opname='COMPARE_OP', opcode=54, arg=148, argval='>', argrepr='bool(>)', offset=72, start_offset=72, starts_line=False, line_number=7, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]),
+ Instruction(opname='POP_JUMP_IF_TRUE', opcode=98, arg=2, argval=84, argrepr='to L3', offset=76, start_offset=76, starts_line=False, line_number=7, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]),
+ Instruction(opname='JUMP_BACKWARD', opcode=73, arg=30, argval=24, argrepr='to L1', offset=80, start_offset=80, starts_line=False, line_number=7, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]),
Instruction(opname='POP_TOP', opcode=29, arg=None, argval=None, argrepr='', offset=84, start_offset=84, starts_line=True, line_number=8, label=3, positions=None, cache_info=None),
- Instruction(opname='JUMP_FORWARD', opcode=76, arg=13, argval=114, argrepr='to L5', offset=86, start_offset=86, starts_line=False, line_number=8, label=None, positions=None, cache_info=None),
+ Instruction(opname='JUMP_FORWARD', opcode=75, arg=13, argval=114, argrepr='to L5', offset=86, start_offset=86, starts_line=False, line_number=8, label=None, positions=None, cache_info=None),
Instruction(opname='END_FOR', opcode=9, arg=None, argval=None, argrepr='', offset=88, start_offset=88, starts_line=True, line_number=3, label=4, positions=None, cache_info=None),
Instruction(opname='POP_TOP', opcode=29, arg=None, argval=None, argrepr='', offset=90, start_offset=90, starts_line=False, line_number=3, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_GLOBAL', opcode=89, arg=3, argval='print', argrepr='print + NULL', offset=92, start_offset=92, starts_line=True, line_number=10, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
- Instruction(opname='LOAD_CONST', opcode=81, arg=4, argval='I can haz else clause?', argrepr="'I can haz else clause?'", offset=102, start_offset=102, starts_line=False, line_number=10, label=None, positions=None, cache_info=None),
- Instruction(opname='CALL', opcode=50, arg=1, argval=1, argrepr='', offset=104, start_offset=104, starts_line=False, line_number=10, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
+ Instruction(opname='LOAD_GLOBAL', opcode=88, arg=3, argval='print', argrepr='print + NULL', offset=92, start_offset=92, starts_line=True, line_number=10, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
+ Instruction(opname='LOAD_CONST', opcode=80, arg=4, argval='I can haz else clause?', argrepr="'I can haz else clause?'", offset=102, start_offset=102, starts_line=False, line_number=10, label=None, positions=None, cache_info=None),
+ Instruction(opname='CALL', opcode=49, arg=1, argval=1, argrepr='', offset=104, start_offset=104, starts_line=False, line_number=10, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
Instruction(opname='POP_TOP', opcode=29, arg=None, argval=None, argrepr='', offset=112, start_offset=112, starts_line=False, line_number=10, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_FAST_CHECK', opcode=85, arg=0, argval='i', argrepr='i', offset=114, start_offset=114, starts_line=True, line_number=11, label=5, positions=None, cache_info=None),
+ Instruction(opname='LOAD_FAST_CHECK', opcode=84, arg=0, argval='i', argrepr='i', offset=114, start_offset=114, starts_line=True, line_number=11, label=5, positions=None, cache_info=None),
Instruction(opname='TO_BOOL', opcode=37, arg=None, argval=None, argrepr='', offset=116, start_offset=116, starts_line=False, line_number=11, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('version', 2, b'\x00\x00\x00\x00')]),
- Instruction(opname='POP_JUMP_IF_FALSE', opcode=96, arg=40, argval=208, argrepr='to L9', offset=124, start_offset=124, starts_line=False, line_number=11, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]),
- Instruction(opname='LOAD_GLOBAL', opcode=89, arg=3, argval='print', argrepr='print + NULL', offset=128, start_offset=128, starts_line=True, line_number=12, label=6, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
- Instruction(opname='LOAD_FAST', opcode=83, arg=0, argval='i', argrepr='i', offset=138, start_offset=138, starts_line=False, line_number=12, label=None, positions=None, cache_info=None),
- Instruction(opname='CALL', opcode=50, arg=1, argval=1, argrepr='', offset=140, start_offset=140, starts_line=False, line_number=12, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
+ Instruction(opname='POP_JUMP_IF_FALSE', opcode=95, arg=40, argval=208, argrepr='to L9', offset=124, start_offset=124, starts_line=False, line_number=11, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]),
+ Instruction(opname='LOAD_GLOBAL', opcode=88, arg=3, argval='print', argrepr='print + NULL', offset=128, start_offset=128, starts_line=True, line_number=12, label=6, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
+ Instruction(opname='LOAD_FAST', opcode=82, arg=0, argval='i', argrepr='i', offset=138, start_offset=138, starts_line=False, line_number=12, label=None, positions=None, cache_info=None),
+ Instruction(opname='CALL', opcode=49, arg=1, argval=1, argrepr='', offset=140, start_offset=140, starts_line=False, line_number=12, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
Instruction(opname='POP_TOP', opcode=29, arg=None, argval=None, argrepr='', offset=148, start_offset=148, starts_line=False, line_number=12, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_FAST', opcode=83, arg=0, argval='i', argrepr='i', offset=150, start_offset=150, starts_line=True, line_number=13, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_CONST', opcode=81, arg=5, argval=1, argrepr='1', offset=152, start_offset=152, starts_line=False, line_number=13, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_FAST', opcode=82, arg=0, argval='i', argrepr='i', offset=150, start_offset=150, starts_line=True, line_number=13, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_CONST', opcode=80, arg=5, argval=1, argrepr='1', offset=152, start_offset=152, starts_line=False, line_number=13, label=None, positions=None, cache_info=None),
Instruction(opname='BINARY_OP', opcode=42, arg=23, argval=23, argrepr='-=', offset=154, start_offset=154, starts_line=False, line_number=13, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]),
- Instruction(opname='STORE_FAST', opcode=109, arg=0, argval='i', argrepr='i', offset=158, start_offset=158, starts_line=False, line_number=13, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_FAST', opcode=83, arg=0, argval='i', argrepr='i', offset=160, start_offset=160, starts_line=True, line_number=14, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_CONST', opcode=81, arg=3, argval=6, argrepr='6', offset=162, start_offset=162, starts_line=False, line_number=14, label=None, positions=None, cache_info=None),
- Instruction(opname='COMPARE_OP', opcode=55, arg=148, argval='>', argrepr='bool(>)', offset=164, start_offset=164, starts_line=False, line_number=14, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]),
- Instruction(opname='POP_JUMP_IF_FALSE', opcode=96, arg=2, argval=176, argrepr='to L7', offset=168, start_offset=168, starts_line=False, line_number=14, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]),
- Instruction(opname='JUMP_BACKWARD', opcode=74, arg=31, argval=114, argrepr='to L5', offset=172, start_offset=172, starts_line=True, line_number=15, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]),
- Instruction(opname='LOAD_FAST', opcode=83, arg=0, argval='i', argrepr='i', offset=176, start_offset=176, starts_line=True, line_number=16, label=7, positions=None, cache_info=None),
- Instruction(opname='LOAD_CONST', opcode=81, arg=2, argval=4, argrepr='4', offset=178, start_offset=178, starts_line=False, line_number=16, label=None, positions=None, cache_info=None),
- Instruction(opname='COMPARE_OP', opcode=55, arg=18, argval='<', argrepr='bool(<)', offset=180, start_offset=180, starts_line=False, line_number=16, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]),
- Instruction(opname='POP_JUMP_IF_FALSE', opcode=96, arg=1, argval=190, argrepr='to L8', offset=184, start_offset=184, starts_line=False, line_number=16, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]),
- Instruction(opname='JUMP_FORWARD', opcode=76, arg=20, argval=230, argrepr='to L10', offset=188, start_offset=188, starts_line=True, line_number=17, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_FAST', opcode=83, arg=0, argval='i', argrepr='i', offset=190, start_offset=190, starts_line=True, line_number=11, label=8, positions=None, cache_info=None),
+ Instruction(opname='STORE_FAST', opcode=108, arg=0, argval='i', argrepr='i', offset=158, start_offset=158, starts_line=False, line_number=13, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_FAST', opcode=82, arg=0, argval='i', argrepr='i', offset=160, start_offset=160, starts_line=True, line_number=14, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_CONST', opcode=80, arg=3, argval=6, argrepr='6', offset=162, start_offset=162, starts_line=False, line_number=14, label=None, positions=None, cache_info=None),
+ Instruction(opname='COMPARE_OP', opcode=54, arg=148, argval='>', argrepr='bool(>)', offset=164, start_offset=164, starts_line=False, line_number=14, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]),
+ Instruction(opname='POP_JUMP_IF_FALSE', opcode=95, arg=2, argval=176, argrepr='to L7', offset=168, start_offset=168, starts_line=False, line_number=14, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]),
+ Instruction(opname='JUMP_BACKWARD', opcode=73, arg=31, argval=114, argrepr='to L5', offset=172, start_offset=172, starts_line=True, line_number=15, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]),
+ Instruction(opname='LOAD_FAST', opcode=82, arg=0, argval='i', argrepr='i', offset=176, start_offset=176, starts_line=True, line_number=16, label=7, positions=None, cache_info=None),
+ Instruction(opname='LOAD_CONST', opcode=80, arg=2, argval=4, argrepr='4', offset=178, start_offset=178, starts_line=False, line_number=16, label=None, positions=None, cache_info=None),
+ Instruction(opname='COMPARE_OP', opcode=54, arg=18, argval='<', argrepr='bool(<)', offset=180, start_offset=180, starts_line=False, line_number=16, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]),
+ Instruction(opname='POP_JUMP_IF_FALSE', opcode=95, arg=1, argval=190, argrepr='to L8', offset=184, start_offset=184, starts_line=False, line_number=16, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]),
+ Instruction(opname='JUMP_FORWARD', opcode=75, arg=20, argval=230, argrepr='to L10', offset=188, start_offset=188, starts_line=True, line_number=17, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_FAST', opcode=82, arg=0, argval='i', argrepr='i', offset=190, start_offset=190, starts_line=True, line_number=11, label=8, positions=None, cache_info=None),
Instruction(opname='TO_BOOL', opcode=37, arg=None, argval=None, argrepr='', offset=192, start_offset=192, starts_line=False, line_number=11, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('version', 2, b'\x00\x00\x00\x00')]),
- Instruction(opname='POP_JUMP_IF_FALSE', opcode=96, arg=2, argval=208, argrepr='to L9', offset=200, start_offset=200, starts_line=False, line_number=11, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]),
- Instruction(opname='JUMP_BACKWARD', opcode=74, arg=40, argval=128, argrepr='to L6', offset=204, start_offset=204, starts_line=False, line_number=11, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]),
- Instruction(opname='LOAD_GLOBAL', opcode=89, arg=3, argval='print', argrepr='print + NULL', offset=208, start_offset=208, starts_line=True, line_number=19, label=9, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
- Instruction(opname='LOAD_CONST', opcode=81, arg=6, argval='Who let lolcatz into this test suite?', argrepr="'Who let lolcatz into this test suite?'", offset=218, start_offset=218, starts_line=False, line_number=19, label=None, positions=None, cache_info=None),
- Instruction(opname='CALL', opcode=50, arg=1, argval=1, argrepr='', offset=220, start_offset=220, starts_line=False, line_number=19, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
+ Instruction(opname='POP_JUMP_IF_FALSE', opcode=95, arg=2, argval=208, argrepr='to L9', offset=200, start_offset=200, starts_line=False, line_number=11, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]),
+ Instruction(opname='JUMP_BACKWARD', opcode=73, arg=40, argval=128, argrepr='to L6', offset=204, start_offset=204, starts_line=False, line_number=11, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]),
+ Instruction(opname='LOAD_GLOBAL', opcode=88, arg=3, argval='print', argrepr='print + NULL', offset=208, start_offset=208, starts_line=True, line_number=19, label=9, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
+ Instruction(opname='LOAD_CONST', opcode=80, arg=6, argval='Who let lolcatz into this test suite?', argrepr="'Who let lolcatz into this test suite?'", offset=218, start_offset=218, starts_line=False, line_number=19, label=None, positions=None, cache_info=None),
+ Instruction(opname='CALL', opcode=49, arg=1, argval=1, argrepr='', offset=220, start_offset=220, starts_line=False, line_number=19, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
Instruction(opname='POP_TOP', opcode=29, arg=None, argval=None, argrepr='', offset=228, start_offset=228, starts_line=False, line_number=19, label=None, positions=None, cache_info=None),
Instruction(opname='NOP', opcode=27, arg=None, argval=None, argrepr='', offset=230, start_offset=230, starts_line=True, line_number=20, label=10, positions=None, cache_info=None),
- Instruction(opname='LOAD_CONST', opcode=81, arg=5, argval=1, argrepr='1', offset=232, start_offset=232, starts_line=True, line_number=21, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_CONST', opcode=81, arg=7, argval=0, argrepr='0', offset=234, start_offset=234, starts_line=False, line_number=21, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_CONST', opcode=80, arg=5, argval=1, argrepr='1', offset=232, start_offset=232, starts_line=True, line_number=21, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_CONST', opcode=80, arg=7, argval=0, argrepr='0', offset=234, start_offset=234, starts_line=False, line_number=21, label=None, positions=None, cache_info=None),
Instruction(opname='BINARY_OP', opcode=42, arg=11, argval=11, argrepr='/', offset=236, start_offset=236, starts_line=False, line_number=21, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]),
Instruction(opname='POP_TOP', opcode=29, arg=None, argval=None, argrepr='', offset=240, start_offset=240, starts_line=False, line_number=21, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_FAST', opcode=83, arg=0, argval='i', argrepr='i', offset=242, start_offset=242, starts_line=True, line_number=25, label=None, positions=None, cache_info=None),
- Instruction(opname='COPY', opcode=58, arg=1, argval=1, argrepr='', offset=244, start_offset=244, starts_line=False, line_number=25, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_SPECIAL', opcode=91, arg=1, argval=1, argrepr='__exit__', offset=246, start_offset=246, starts_line=False, line_number=25, label=None, positions=None, cache_info=None),
- Instruction(opname='SWAP', opcode=114, arg=2, argval=2, argrepr='', offset=248, start_offset=248, starts_line=False, line_number=25, label=None, positions=None, cache_info=None),
- Instruction(opname='SWAP', opcode=114, arg=3, argval=3, argrepr='', offset=250, start_offset=250, starts_line=False, line_number=25, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_SPECIAL', opcode=91, arg=0, argval=0, argrepr='__enter__', offset=252, start_offset=252, starts_line=False, line_number=25, label=None, positions=None, cache_info=None),
- Instruction(opname='CALL', opcode=50, arg=0, argval=0, argrepr='', offset=254, start_offset=254, starts_line=False, line_number=25, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
- Instruction(opname='STORE_FAST', opcode=109, arg=1, argval='dodgy', argrepr='dodgy', offset=262, start_offset=262, starts_line=False, line_number=25, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_GLOBAL', opcode=89, arg=3, argval='print', argrepr='print + NULL', offset=264, start_offset=264, starts_line=True, line_number=26, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
- Instruction(opname='LOAD_CONST', opcode=81, arg=8, argval='Never reach this', argrepr="'Never reach this'", offset=274, start_offset=274, starts_line=False, line_number=26, label=None, positions=None, cache_info=None),
- Instruction(opname='CALL', opcode=50, arg=1, argval=1, argrepr='', offset=276, start_offset=276, starts_line=False, line_number=26, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
+ Instruction(opname='LOAD_FAST', opcode=82, arg=0, argval='i', argrepr='i', offset=242, start_offset=242, starts_line=True, line_number=25, label=None, positions=None, cache_info=None),
+ Instruction(opname='COPY', opcode=57, arg=1, argval=1, argrepr='', offset=244, start_offset=244, starts_line=False, line_number=25, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_SPECIAL', opcode=90, arg=1, argval=1, argrepr='__exit__', offset=246, start_offset=246, starts_line=False, line_number=25, label=None, positions=None, cache_info=None),
+ Instruction(opname='SWAP', opcode=113, arg=2, argval=2, argrepr='', offset=248, start_offset=248, starts_line=False, line_number=25, label=None, positions=None, cache_info=None),
+ Instruction(opname='SWAP', opcode=113, arg=3, argval=3, argrepr='', offset=250, start_offset=250, starts_line=False, line_number=25, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_SPECIAL', opcode=90, arg=0, argval=0, argrepr='__enter__', offset=252, start_offset=252, starts_line=False, line_number=25, label=None, positions=None, cache_info=None),
+ Instruction(opname='CALL', opcode=49, arg=0, argval=0, argrepr='', offset=254, start_offset=254, starts_line=False, line_number=25, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
+ Instruction(opname='STORE_FAST', opcode=108, arg=1, argval='dodgy', argrepr='dodgy', offset=262, start_offset=262, starts_line=False, line_number=25, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_GLOBAL', opcode=88, arg=3, argval='print', argrepr='print + NULL', offset=264, start_offset=264, starts_line=True, line_number=26, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
+ Instruction(opname='LOAD_CONST', opcode=80, arg=8, argval='Never reach this', argrepr="'Never reach this'", offset=274, start_offset=274, starts_line=False, line_number=26, label=None, positions=None, cache_info=None),
+ Instruction(opname='CALL', opcode=49, arg=1, argval=1, argrepr='', offset=276, start_offset=276, starts_line=False, line_number=26, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
Instruction(opname='POP_TOP', opcode=29, arg=None, argval=None, argrepr='', offset=284, start_offset=284, starts_line=False, line_number=26, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_CONST', opcode=81, arg=0, argval=None, argrepr='None', offset=286, start_offset=286, starts_line=True, line_number=25, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_CONST', opcode=81, arg=0, argval=None, argrepr='None', offset=288, start_offset=288, starts_line=False, line_number=25, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_CONST', opcode=81, arg=0, argval=None, argrepr='None', offset=290, start_offset=290, starts_line=False, line_number=25, label=None, positions=None, cache_info=None),
- Instruction(opname='CALL', opcode=50, arg=3, argval=3, argrepr='', offset=292, start_offset=292, starts_line=False, line_number=25, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
+ Instruction(opname='LOAD_CONST', opcode=80, arg=0, argval=None, argrepr='None', offset=286, start_offset=286, starts_line=True, line_number=25, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_CONST', opcode=80, arg=0, argval=None, argrepr='None', offset=288, start_offset=288, starts_line=False, line_number=25, label=None, positions=None, cache_info=None),
+ Instruction(opname='LOAD_CONST', opcode=80, arg=0, argval=None, argrepr='None', offset=290, start_offset=290, starts_line=False, line_number=25, label=None, positions=None, cache_info=None),
+ Instruction(opname='CALL', opcode=49, arg=3, argval=3, argrepr='', offset=292, start_offset=292, starts_line=False, line_number=25, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
Instruction(opname='POP_TOP', opcode=29, arg=None, argval=None, argrepr='', offset=300, start_offset=300, starts_line=False, line_number=25, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_GLOBAL', opcode=89, arg=3, argval='print', argrepr='print + NULL', offset=302, start_offset=302, starts_line=True, line_number=28, label=11, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
- Instruction(opname='LOAD_CONST', opcode=81, arg=10, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=312, start_offset=312, starts_line=False, line_number=28, label=None, positions=None, cache_info=None),
- Instruction(opname='CALL', opcode=50, arg=1, argval=1, argrepr='', offset=314, start_offset=314, starts_line=False, line_number=28, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
+ Instruction(opname='LOAD_GLOBAL', opcode=88, arg=3, argval='print', argrepr='print + NULL', offset=302, start_offset=302, starts_line=True, line_number=28, label=11, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
+ Instruction(opname='LOAD_CONST', opcode=80, arg=10, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=312, start_offset=312, starts_line=False, line_number=28, label=None, positions=None, cache_info=None),
+ Instruction(opname='CALL', opcode=49, arg=1, argval=1, argrepr='', offset=314, start_offset=314, starts_line=False, line_number=28, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
Instruction(opname='POP_TOP', opcode=29, arg=None, argval=None, argrepr='', offset=322, start_offset=322, starts_line=False, line_number=28, label=None, positions=None, cache_info=None),
- Instruction(opname='RETURN_CONST', opcode=102, arg=0, argval=None, argrepr='None', offset=324, start_offset=324, starts_line=False, line_number=28, label=None, positions=None, cache_info=None),
+ Instruction(opname='RETURN_CONST', opcode=101, arg=0, argval=None, argrepr='None', offset=324, start_offset=324, starts_line=False, line_number=28, label=None, positions=None, cache_info=None),
Instruction(opname='PUSH_EXC_INFO', opcode=30, arg=None, argval=None, argrepr='', offset=326, start_offset=326, starts_line=True, line_number=25, label=None, positions=None, cache_info=None),
Instruction(opname='WITH_EXCEPT_START', opcode=41, arg=None, argval=None, argrepr='', offset=328, start_offset=328, starts_line=False, line_number=25, label=None, positions=None, cache_info=None),
Instruction(opname='TO_BOOL', opcode=37, arg=None, argval=None, argrepr='', offset=330, start_offset=330, starts_line=False, line_number=25, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('version', 2, b'\x00\x00\x00\x00')]),
- Instruction(opname='POP_JUMP_IF_TRUE', opcode=99, arg=1, argval=344, argrepr='to L12', offset=338, start_offset=338, starts_line=False, line_number=25, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]),
- Instruction(opname='RERAISE', opcode=101, arg=2, argval=2, argrepr='', offset=342, start_offset=342, starts_line=False, line_number=25, label=None, positions=None, cache_info=None),
+ Instruction(opname='POP_JUMP_IF_TRUE', opcode=98, arg=1, argval=344, argrepr='to L12', offset=338, start_offset=338, starts_line=False, line_number=25, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]),
+ Instruction(opname='RERAISE', opcode=100, arg=2, argval=2, argrepr='', offset=342, start_offset=342, starts_line=False, line_number=25, label=None, positions=None, cache_info=None),
Instruction(opname='POP_TOP', opcode=29, arg=None, argval=None, argrepr='', offset=344, start_offset=344, starts_line=False, line_number=25, label=12, positions=None, cache_info=None),
Instruction(opname='POP_EXCEPT', opcode=28, arg=None, argval=None, argrepr='', offset=346, start_offset=346, starts_line=False, line_number=25, label=None, positions=None, cache_info=None),
Instruction(opname='POP_TOP', opcode=29, arg=None, argval=None, argrepr='', offset=348, start_offset=348, starts_line=False, line_number=25, label=None, positions=None, cache_info=None),
Instruction(opname='POP_TOP', opcode=29, arg=None, argval=None, argrepr='', offset=350, start_offset=350, starts_line=False, line_number=25, label=None, positions=None, cache_info=None),
Instruction(opname='POP_TOP', opcode=29, arg=None, argval=None, argrepr='', offset=352, start_offset=352, starts_line=False, line_number=25, label=None, positions=None, cache_info=None),
- Instruction(opname='JUMP_BACKWARD_NO_INTERRUPT', opcode=75, arg=27, argval=302, argrepr='to L11', offset=354, start_offset=354, starts_line=False, line_number=25, label=None, positions=None, cache_info=None),
- Instruction(opname='COPY', opcode=58, arg=3, argval=3, argrepr='', offset=356, start_offset=356, starts_line=True, line_number=None, label=None, positions=None, cache_info=None),
+ Instruction(opname='JUMP_BACKWARD_NO_INTERRUPT', opcode=74, arg=27, argval=302, argrepr='to L11', offset=354, start_offset=354, starts_line=False, line_number=25, label=None, positions=None, cache_info=None),
+ Instruction(opname='COPY', opcode=57, arg=3, argval=3, argrepr='', offset=356, start_offset=356, starts_line=True, line_number=None, label=None, positions=None, cache_info=None),
Instruction(opname='POP_EXCEPT', opcode=28, arg=None, argval=None, argrepr='', offset=358, start_offset=358, starts_line=False, line_number=None, label=None, positions=None, cache_info=None),
- Instruction(opname='RERAISE', opcode=101, arg=1, argval=1, argrepr='', offset=360, start_offset=360, starts_line=False, line_number=None, label=None, positions=None, cache_info=None),
+ Instruction(opname='RERAISE', opcode=100, arg=1, argval=1, argrepr='', offset=360, start_offset=360, starts_line=False, line_number=None, label=None, positions=None, cache_info=None),
Instruction(opname='PUSH_EXC_INFO', opcode=30, arg=None, argval=None, argrepr='', offset=362, start_offset=362, starts_line=False, line_number=None, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_GLOBAL', opcode=89, arg=4, argval='ZeroDivisionError', argrepr='ZeroDivisionError', offset=364, start_offset=364, starts_line=True, line_number=22, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
+ Instruction(opname='LOAD_GLOBAL', opcode=88, arg=4, argval='ZeroDivisionError', argrepr='ZeroDivisionError', offset=364, start_offset=364, starts_line=True, line_number=22, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
Instruction(opname='CHECK_EXC_MATCH', opcode=5, arg=None, argval=None, argrepr='', offset=374, start_offset=374, starts_line=False, line_number=22, label=None, positions=None, cache_info=None),
- Instruction(opname='POP_JUMP_IF_FALSE', opcode=96, arg=14, argval=408, argrepr='to L13', offset=376, start_offset=376, starts_line=False, line_number=22, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]),
+ Instruction(opname='POP_JUMP_IF_FALSE', opcode=95, arg=14, argval=408, argrepr='to L13', offset=376, start_offset=376, starts_line=False, line_number=22, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]),
Instruction(opname='POP_TOP', opcode=29, arg=None, argval=None, argrepr='', offset=380, start_offset=380, starts_line=False, line_number=22, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_GLOBAL', opcode=89, arg=3, argval='print', argrepr='print + NULL', offset=382, start_offset=382, starts_line=True, line_number=23, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
- Instruction(opname='LOAD_CONST', opcode=81, arg=9, argval='Here we go, here we go, here we go...', argrepr="'Here we go, here we go, here we go...'", offset=392, start_offset=392, starts_line=False, line_number=23, label=None, positions=None, cache_info=None),
- Instruction(opname='CALL', opcode=50, arg=1, argval=1, argrepr='', offset=394, start_offset=394, starts_line=False, line_number=23, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
+ Instruction(opname='LOAD_GLOBAL', opcode=88, arg=3, argval='print', argrepr='print + NULL', offset=382, start_offset=382, starts_line=True, line_number=23, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
+ Instruction(opname='LOAD_CONST', opcode=80, arg=9, argval='Here we go, here we go, here we go...', argrepr="'Here we go, here we go, here we go...'", offset=392, start_offset=392, starts_line=False, line_number=23, label=None, positions=None, cache_info=None),
+ Instruction(opname='CALL', opcode=49, arg=1, argval=1, argrepr='', offset=394, start_offset=394, starts_line=False, line_number=23, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
Instruction(opname='POP_TOP', opcode=29, arg=None, argval=None, argrepr='', offset=402, start_offset=402, starts_line=False, line_number=23, label=None, positions=None, cache_info=None),
Instruction(opname='POP_EXCEPT', opcode=28, arg=None, argval=None, argrepr='', offset=404, start_offset=404, starts_line=False, line_number=23, label=None, positions=None, cache_info=None),
- Instruction(opname='JUMP_BACKWARD_NO_INTERRUPT', opcode=75, arg=53, argval=302, argrepr='to L11', offset=406, start_offset=406, starts_line=False, line_number=23, label=None, positions=None, cache_info=None),
- Instruction(opname='RERAISE', opcode=101, arg=0, argval=0, argrepr='', offset=408, start_offset=408, starts_line=True, line_number=22, label=13, positions=None, cache_info=None),
- Instruction(opname='COPY', opcode=58, arg=3, argval=3, argrepr='', offset=410, start_offset=410, starts_line=True, line_number=None, label=None, positions=None, cache_info=None),
+ Instruction(opname='JUMP_BACKWARD_NO_INTERRUPT', opcode=74, arg=53, argval=302, argrepr='to L11', offset=406, start_offset=406, starts_line=False, line_number=23, label=None, positions=None, cache_info=None),
+ Instruction(opname='RERAISE', opcode=100, arg=0, argval=0, argrepr='', offset=408, start_offset=408, starts_line=True, line_number=22, label=13, positions=None, cache_info=None),
+ Instruction(opname='COPY', opcode=57, arg=3, argval=3, argrepr='', offset=410, start_offset=410, starts_line=True, line_number=None, label=None, positions=None, cache_info=None),
Instruction(opname='POP_EXCEPT', opcode=28, arg=None, argval=None, argrepr='', offset=412, start_offset=412, starts_line=False, line_number=None, label=None, positions=None, cache_info=None),
- Instruction(opname='RERAISE', opcode=101, arg=1, argval=1, argrepr='', offset=414, start_offset=414, starts_line=False, line_number=None, label=None, positions=None, cache_info=None),
+ Instruction(opname='RERAISE', opcode=100, arg=1, argval=1, argrepr='', offset=414, start_offset=414, starts_line=False, line_number=None, label=None, positions=None, cache_info=None),
Instruction(opname='PUSH_EXC_INFO', opcode=30, arg=None, argval=None, argrepr='', offset=416, start_offset=416, starts_line=False, line_number=None, label=None, positions=None, cache_info=None),
- Instruction(opname='LOAD_GLOBAL', opcode=89, arg=3, argval='print', argrepr='print + NULL', offset=418, start_offset=418, starts_line=True, line_number=28, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
- Instruction(opname='LOAD_CONST', opcode=81, arg=10, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=428, start_offset=428, starts_line=False, line_number=28, label=None, positions=None, cache_info=None),
- Instruction(opname='CALL', opcode=50, arg=1, argval=1, argrepr='', offset=430, start_offset=430, starts_line=False, line_number=28, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
+ Instruction(opname='LOAD_GLOBAL', opcode=88, arg=3, argval='print', argrepr='print + NULL', offset=418, start_offset=418, starts_line=True, line_number=28, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
+ Instruction(opname='LOAD_CONST', opcode=80, arg=10, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=428, start_offset=428, starts_line=False, line_number=28, label=None, positions=None, cache_info=None),
+ Instruction(opname='CALL', opcode=49, arg=1, argval=1, argrepr='', offset=430, start_offset=430, starts_line=False, line_number=28, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
Instruction(opname='POP_TOP', opcode=29, arg=None, argval=None, argrepr='', offset=438, start_offset=438, starts_line=False, line_number=28, label=None, positions=None, cache_info=None),
- Instruction(opname='RERAISE', opcode=101, arg=0, argval=0, argrepr='', offset=440, start_offset=440, starts_line=False, line_number=28, label=None, positions=None, cache_info=None),
- Instruction(opname='COPY', opcode=58, arg=3, argval=3, argrepr='', offset=442, start_offset=442, starts_line=True, line_number=None, label=None, positions=None, cache_info=None),
+ Instruction(opname='RERAISE', opcode=100, arg=0, argval=0, argrepr='', offset=440, start_offset=440, starts_line=False, line_number=28, label=None, positions=None, cache_info=None),
+ Instruction(opname='COPY', opcode=57, arg=3, argval=3, argrepr='', offset=442, start_offset=442, starts_line=True, line_number=None, label=None, positions=None, cache_info=None),
Instruction(opname='POP_EXCEPT', opcode=28, arg=None, argval=None, argrepr='', offset=444, start_offset=444, starts_line=False, line_number=None, label=None, positions=None, cache_info=None),
- Instruction(opname='RERAISE', opcode=101, arg=1, argval=1, argrepr='', offset=446, start_offset=446, starts_line=False, line_number=None, label=None, positions=None, cache_info=None),
+ Instruction(opname='RERAISE', opcode=100, arg=1, argval=1, argrepr='', offset=446, start_offset=446, starts_line=False, line_number=None, label=None, positions=None, cache_info=None),
]
# One last piece of inspect fodder to check the default line number handling
def simple(): pass
expected_opinfo_simple = [
Instruction(opname='RESUME', opcode=149, arg=0, argval=0, argrepr='', offset=0, start_offset=0, starts_line=True, line_number=simple.__code__.co_firstlineno, label=None, positions=None),
- Instruction(opname='RETURN_CONST', opcode=102, arg=0, argval=None, argrepr='None', offset=2, start_offset=2, starts_line=False, line_number=simple.__code__.co_firstlineno, label=None),
+ Instruction(opname='RETURN_CONST', opcode=101, arg=0, argval=None, argrepr='None', offset=2, start_offset=2, starts_line=False, line_number=simple.__code__.co_firstlineno, label=None),
]
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-07-23-11-57-36.gh-issue-122160.HSnrAP.rst b/Misc/NEWS.d/next/Core and Builtins/2024-07-23-11-57-36.gh-issue-122160.HSnrAP.rst
new file mode 100644
index 00000000000000..78153fc1abdaeb
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2024-07-23-11-57-36.gh-issue-122160.HSnrAP.rst
@@ -0,0 +1 @@
+Remove the ``BUILD_CONST_KEY_MAP`` opcode. Use :opcode:`BUILD_MAP` instead.
diff --git a/Programs/test_frozenmain.h b/Programs/test_frozenmain.h
index b2a7196bd6081c..22354c9bbf8a35 100644
--- a/Programs/test_frozenmain.h
+++ b/Programs/test_frozenmain.h
@@ -1,17 +1,17 @@
// Auto-generated by Programs/freeze_test_frozenmain.py
unsigned char M_test_frozenmain[] = {
227,0,0,0,0,0,0,0,0,0,0,0,0,8,0,0,
- 0,0,0,0,0,243,166,0,0,0,149,0,81,0,81,1,
- 72,0,113,0,81,0,81,1,72,1,113,1,90,2,31,0,
- 81,2,50,1,0,0,0,0,0,0,29,0,90,2,31,0,
- 81,3,90,0,79,6,0,0,0,0,0,0,0,0,0,0,
- 0,0,0,0,0,0,0,0,50,2,0,0,0,0,0,0,
- 29,0,90,1,79,8,0,0,0,0,0,0,0,0,0,0,
- 0,0,0,0,0,0,0,0,31,0,50,0,0,0,0,0,
- 0,0,81,4,2,0,0,0,113,5,81,5,16,0,69,20,
- 0,0,113,6,90,2,31,0,81,6,90,6,12,0,81,7,
- 90,5,90,6,2,0,0,0,12,0,48,4,50,1,0,0,
- 0,0,0,0,29,0,74,22,0,0,9,0,29,0,102,1,
+ 0,0,0,0,0,243,166,0,0,0,149,0,80,0,80,1,
+ 71,0,112,0,80,0,80,1,71,1,112,1,89,2,31,0,
+ 80,2,49,1,0,0,0,0,0,0,29,0,89,2,31,0,
+ 80,3,89,0,78,6,0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,49,2,0,0,0,0,0,0,
+ 29,0,89,1,78,8,0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,31,0,49,0,0,0,0,0,
+ 0,0,80,4,2,0,0,0,112,5,80,5,16,0,68,20,
+ 0,0,112,6,89,2,31,0,80,6,89,6,12,0,80,7,
+ 89,5,89,6,2,0,0,0,12,0,47,4,49,1,0,0,
+ 0,0,0,0,29,0,73,22,0,0,9,0,29,0,101,1,
41,8,233,0,0,0,0,78,122,18,70,114,111,122,101,110,
32,72,101,108,108,111,32,87,111,114,108,100,122,8,115,121,
115,46,97,114,103,118,218,6,99,111,110,102,105,103,41,5,
diff --git a/Python/bytecodes.c b/Python/bytecodes.c
index 9dd7cf37beecf0..be6b4436694774 100644
--- a/Python/bytecodes.c
+++ b/Python/bytecodes.c
@@ -1882,25 +1882,6 @@ dummy_func(
}
}
- inst(BUILD_CONST_KEY_MAP, (values[oparg], keys -- map)) {
- PyObject *keys_o = PyStackRef_AsPyObjectBorrow(keys);
-
- assert(PyTuple_CheckExact(keys_o));
- assert(PyTuple_GET_SIZE(keys_o) == (Py_ssize_t)oparg);
- STACKREFS_TO_PYOBJECTS(values, oparg, values_o);
- if (CONVERSION_FAILED(values_o)) {
- DECREF_INPUTS();
- ERROR_IF(true, error);
- }
- PyObject *map_o = _PyDict_FromItems(
- &PyTuple_GET_ITEM(keys_o, 0), 1,
- values_o, 1, oparg);
- STACKREFS_TO_PYOBJECTS_CLEANUP(values_o);
- DECREF_INPUTS();
- ERROR_IF(map_o == NULL, error);
- map = PyStackRef_FromPyObjectSteal(map_o);
- }
-
inst(DICT_UPDATE, (dict, unused[oparg - 1], update -- dict, unused[oparg - 1])) {
PyObject *dict_o = PyStackRef_AsPyObjectBorrow(dict);
PyObject *update_o = PyStackRef_AsPyObjectBorrow(update);
diff --git a/Python/compile.c b/Python/compile.c
index 87a75487a9aaa5..9707759c99c943 100644
--- a/Python/compile.c
+++ b/Python/compile.c
@@ -1759,42 +1759,24 @@ compiler_kwonlydefaults(struct compiler *c, location loc,
*/
int i;
PyObject *keys = NULL;
-
+ int default_count = 0;
for (i = 0; i < asdl_seq_LEN(kwonlyargs); i++) {
arg_ty arg = asdl_seq_GET(kwonlyargs, i);
expr_ty default_ = asdl_seq_GET(kw_defaults, i);
if (default_) {
+ default_count++;
PyObject *mangled = compiler_maybe_mangle(c, arg->arg);
if (!mangled) {
goto error;
}
- if (keys == NULL) {
- keys = PyList_New(1);
- if (keys == NULL) {
- Py_DECREF(mangled);
- return ERROR;
- }
- PyList_SET_ITEM(keys, 0, mangled);
- }
- else {
- int res = PyList_Append(keys, mangled);
- Py_DECREF(mangled);
- if (res == -1) {
- goto error;
- }
- }
+ ADDOP_LOAD_CONST_NEW(c, loc, mangled);
if (compiler_visit_expr(c, default_) < 0) {
goto error;
}
}
}
- if (keys != NULL) {
- Py_ssize_t default_count = PyList_GET_SIZE(keys);
- PyObject *keys_tuple = PyList_AsTuple(keys);
- Py_DECREF(keys);
- ADDOP_LOAD_CONST_NEW(c, loc, keys_tuple);
- ADDOP_I(c, loc, BUILD_CONST_KEY_MAP, default_count);
- assert(default_count > 0);
+ if (default_count) {
+ ADDOP_I(c, loc, BUILD_MAP, default_count);
return 1;
}
else {
@@ -4454,25 +4436,8 @@ static int
compiler_subdict(struct compiler *c, expr_ty e, Py_ssize_t begin, Py_ssize_t end)
{
Py_ssize_t i, n = end - begin;
- PyObject *keys, *key;
int big = n*2 > STACK_USE_GUIDELINE;
location loc = LOC(e);
- if (n > 1 && !big && are_all_items_const(e->v.Dict.keys, begin, end)) {
- for (i = begin; i < end; i++) {
- VISIT(c, expr, (expr_ty)asdl_seq_GET(e->v.Dict.values, i));
- }
- keys = PyTuple_New(n);
- if (keys == NULL) {
- return SUCCESS;
- }
- for (i = begin; i < end; i++) {
- key = ((expr_ty)asdl_seq_GET(e->v.Dict.keys, i))->v.Constant.value;
- PyTuple_SET_ITEM(keys, i - begin, Py_NewRef(key));
- }
- ADDOP_LOAD_CONST_NEW(c, loc, keys);
- ADDOP_I(c, loc, BUILD_CONST_KEY_MAP, n);
- return SUCCESS;
- }
if (big) {
ADDOP_I(c, loc, BUILD_MAP, 0);
}
@@ -5032,26 +4997,8 @@ compiler_subkwargs(struct compiler *c, location loc,
{
Py_ssize_t i, n = end - begin;
keyword_ty kw;
- PyObject *keys, *key;
assert(n > 0);
int big = n*2 > STACK_USE_GUIDELINE;
- if (n > 1 && !big) {
- for (i = begin; i < end; i++) {
- kw = asdl_seq_GET(keywords, i);
- VISIT(c, expr, kw->value);
- }
- keys = PyTuple_New(n);
- if (keys == NULL) {
- return ERROR;
- }
- for (i = begin; i < end; i++) {
- key = ((keyword_ty) asdl_seq_GET(keywords, i))->arg;
- PyTuple_SET_ITEM(keys, i - begin, Py_NewRef(key));
- }
- ADDOP_LOAD_CONST_NEW(c, loc, keys);
- ADDOP_I(c, loc, BUILD_CONST_KEY_MAP, n);
- return SUCCESS;
- }
if (big) {
ADDOP_I(c, NO_LOCATION, BUILD_MAP, 0);
}
diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h
index 2a4428e4a52cf0..abcc7cf4d69473 100644
--- a/Python/executor_cases.c.h
+++ b/Python/executor_cases.c.h
@@ -2028,40 +2028,6 @@
break;
}
- case _BUILD_CONST_KEY_MAP: {
- _PyStackRef keys;
- _PyStackRef *values;
- _PyStackRef map;
- oparg = CURRENT_OPARG();
- keys = stack_pointer[-1];
- values = &stack_pointer[-1 - oparg];
- PyObject *keys_o = PyStackRef_AsPyObjectBorrow(keys);
- assert(PyTuple_CheckExact(keys_o));
- assert(PyTuple_GET_SIZE(keys_o) == (Py_ssize_t)oparg);
- STACKREFS_TO_PYOBJECTS(values, oparg, values_o);
- if (CONVERSION_FAILED(values_o)) {
- for (int _i = oparg; --_i >= 0;) {
- PyStackRef_CLOSE(values[_i]);
- }
- PyStackRef_CLOSE(keys);
- if (true) JUMP_TO_ERROR();
- }
- PyObject *map_o = _PyDict_FromItems(
- &PyTuple_GET_ITEM(keys_o, 0), 1,
- values_o, 1, oparg);
- STACKREFS_TO_PYOBJECTS_CLEANUP(values_o);
- for (int _i = oparg; --_i >= 0;) {
- PyStackRef_CLOSE(values[_i]);
- }
- PyStackRef_CLOSE(keys);
- if (map_o == NULL) JUMP_TO_ERROR();
- map = PyStackRef_FromPyObjectSteal(map_o);
- stack_pointer[-1 - oparg] = map;
- stack_pointer += -oparg;
- assert(WITHIN_STACK_BOUNDS());
- break;
- }
-
case _DICT_UPDATE: {
_PyStackRef update;
_PyStackRef dict;
diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h
index 585e6825a346d8..195fe4c5a04798 100644
--- a/Python/generated_cases.c.h
+++ b/Python/generated_cases.c.h
@@ -589,42 +589,6 @@
DISPATCH();
}
- TARGET(BUILD_CONST_KEY_MAP) {
- frame->instr_ptr = next_instr;
- next_instr += 1;
- INSTRUCTION_STATS(BUILD_CONST_KEY_MAP);
- _PyStackRef *values;
- _PyStackRef keys;
- _PyStackRef map;
- keys = stack_pointer[-1];
- values = &stack_pointer[-1 - oparg];
- PyObject *keys_o = PyStackRef_AsPyObjectBorrow(keys);
- assert(PyTuple_CheckExact(keys_o));
- assert(PyTuple_GET_SIZE(keys_o) == (Py_ssize_t)oparg);
- STACKREFS_TO_PYOBJECTS(values, oparg, values_o);
- if (CONVERSION_FAILED(values_o)) {
- for (int _i = oparg; --_i >= 0;) {
- PyStackRef_CLOSE(values[_i]);
- }
- PyStackRef_CLOSE(keys);
- if (true) { stack_pointer += -1 - oparg; goto error; }
- }
- PyObject *map_o = _PyDict_FromItems(
- &PyTuple_GET_ITEM(keys_o, 0), 1,
- values_o, 1, oparg);
- STACKREFS_TO_PYOBJECTS_CLEANUP(values_o);
- for (int _i = oparg; --_i >= 0;) {
- PyStackRef_CLOSE(values[_i]);
- }
- PyStackRef_CLOSE(keys);
- if (map_o == NULL) { stack_pointer += -1 - oparg; goto error; }
- map = PyStackRef_FromPyObjectSteal(map_o);
- stack_pointer[-1 - oparg] = map;
- stack_pointer += -oparg;
- assert(WITHIN_STACK_BOUNDS());
- DISPATCH();
- }
-
TARGET(BUILD_LIST) {
frame->instr_ptr = next_instr;
next_instr += 1;
diff --git a/Python/opcode_targets.h b/Python/opcode_targets.h
index 6097b249c0ad0b..74544a1dff25c6 100644
--- a/Python/opcode_targets.h
+++ b/Python/opcode_targets.h
@@ -42,7 +42,6 @@ static void *opcode_targets[256] = {
&&TARGET_UNARY_NOT,
&&TARGET_WITH_EXCEPT_START,
&&TARGET_BINARY_OP,
- &&TARGET_BUILD_CONST_KEY_MAP,
&&TARGET_BUILD_LIST,
&&TARGET_BUILD_MAP,
&&TARGET_BUILD_SET,
@@ -148,6 +147,7 @@ static void *opcode_targets[256] = {
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
+ &&_unknown_opcode,
&&TARGET_RESUME,
&&TARGET_BINARY_OP_ADD_FLOAT,
&&TARGET_BINARY_OP_ADD_INT,
diff --git a/Python/optimizer_cases.c.h b/Python/optimizer_cases.c.h
index 60cfb214835bdd..8077badce78a39 100644
--- a/Python/optimizer_cases.c.h
+++ b/Python/optimizer_cases.c.h
@@ -943,15 +943,6 @@
break;
}
- case _BUILD_CONST_KEY_MAP: {
- _Py_UopsSymbol *map;
- map = sym_new_not_null(ctx);
- stack_pointer[-1 - oparg] = map;
- stack_pointer += -oparg;
- assert(WITHIN_STACK_BOUNDS());
- break;
- }
-
case _DICT_UPDATE: {
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
From aef95eb107fef9355c66461612aedd31265f8c21 Mon Sep 17 00:00:00 2001
From: Marc Mueller <30130371+cdce8p@users.noreply.github.com>
Date: Thu, 25 Jul 2024 17:31:30 +0200
Subject: [PATCH 027/139] gh-121489: Export private _PyBytes_Join() again
(#122267)
---
Include/cpython/bytesobject.h | 4 ++++
Include/internal/pycore_bytesobject.h | 4 ----
.../next/C API/2024-07-21-17-40-07.gh-issue-121489.SUMFCr.rst | 1 +
Modules/_io/bufferedio.c | 1 -
4 files changed, 5 insertions(+), 5 deletions(-)
create mode 100644 Misc/NEWS.d/next/C API/2024-07-21-17-40-07.gh-issue-121489.SUMFCr.rst
diff --git a/Include/cpython/bytesobject.h b/Include/cpython/bytesobject.h
index 816823716e9a6f..41537210b748a1 100644
--- a/Include/cpython/bytesobject.h
+++ b/Include/cpython/bytesobject.h
@@ -31,3 +31,7 @@ static inline Py_ssize_t PyBytes_GET_SIZE(PyObject *op) {
return Py_SIZE(self);
}
#define PyBytes_GET_SIZE(self) PyBytes_GET_SIZE(_PyObject_CAST(self))
+
+/* _PyBytes_Join(sep, x) is like sep.join(x). sep must be PyBytesObject*,
+ x must be an iterable object. */
+PyAPI_FUNC(PyObject*) _PyBytes_Join(PyObject *sep, PyObject *x);
diff --git a/Include/internal/pycore_bytesobject.h b/Include/internal/pycore_bytesobject.h
index 94d421a9eb742a..300e7f4896a39e 100644
--- a/Include/internal/pycore_bytesobject.h
+++ b/Include/internal/pycore_bytesobject.h
@@ -23,10 +23,6 @@ extern PyObject* _PyBytes_FromHex(
PyAPI_FUNC(PyObject*) _PyBytes_DecodeEscape(const char *, Py_ssize_t,
const char *, const char **);
-/* _PyBytes_Join(sep, x) is like sep.join(x). sep must be PyBytesObject*,
- x must be an iterable object. */
-extern PyObject* _PyBytes_Join(PyObject *sep, PyObject *x);
-
// Substring Search.
//
diff --git a/Misc/NEWS.d/next/C API/2024-07-21-17-40-07.gh-issue-121489.SUMFCr.rst b/Misc/NEWS.d/next/C API/2024-07-21-17-40-07.gh-issue-121489.SUMFCr.rst
new file mode 100644
index 00000000000000..8c18a49c05d547
--- /dev/null
+++ b/Misc/NEWS.d/next/C API/2024-07-21-17-40-07.gh-issue-121489.SUMFCr.rst
@@ -0,0 +1 @@
+Export private :c:func:`!_PyBytes_Join` again.
diff --git a/Modules/_io/bufferedio.c b/Modules/_io/bufferedio.c
index aa52711941d374..e45323c93a17ef 100644
--- a/Modules/_io/bufferedio.c
+++ b/Modules/_io/bufferedio.c
@@ -8,7 +8,6 @@
*/
#include "Python.h"
-#include "pycore_bytesobject.h" // _PyBytes_Join()
#include "pycore_call.h" // _PyObject_CallNoArgs()
#include "pycore_object.h" // _PyObject_GC_UNTRACK()
#include "pycore_pyerrors.h" // _Py_FatalErrorFormat()
From 5e686ff57d6bc2fd8c675bd2c59a064be6da2839 Mon Sep 17 00:00:00 2001
From: Mark Shannon
Date: Thu, 25 Jul 2024 18:32:43 +0100
Subject: [PATCH 028/139] GH-122034: Add StackRef variants of type checks to
reduce the number of PyStackRef_AsPyObjectBorrow calls (GH-122037)
---
Include/internal/pycore_stackref.h | 33 ++++++++++++++++++++++++++++++
Python/bytecodes.c | 20 +++++++++---------
Python/executor_cases.c.h | 10 ++++-----
Python/generated_cases.c.h | 24 +++++++++++-----------
4 files changed, 60 insertions(+), 27 deletions(-)
diff --git a/Include/internal/pycore_stackref.h b/Include/internal/pycore_stackref.h
index 8d3d559814bfd9..1b35a3e3269257 100644
--- a/Include/internal/pycore_stackref.h
+++ b/Include/internal/pycore_stackref.h
@@ -11,6 +11,7 @@ extern "C" {
#include "pycore_object_deferred.h"
#include
+#include
/*
This file introduces a new API for handling references on the stack, called
@@ -237,6 +238,38 @@ _PyObjectStack_FromStackRefStack(PyObject **dst, const _PyStackRef *src, size_t
}
}
+// StackRef type checks
+
+static inline bool
+PyStackRef_GenCheck(_PyStackRef stackref)
+{
+ return PyGen_Check(PyStackRef_AsPyObjectBorrow(stackref));
+}
+
+static inline bool
+PyStackRef_BoolCheck(_PyStackRef stackref)
+{
+ return PyBool_Check(PyStackRef_AsPyObjectBorrow(stackref));
+}
+
+static inline bool
+PyStackRef_LongCheck(_PyStackRef stackref)
+{
+ return PyLong_Check(PyStackRef_AsPyObjectBorrow(stackref));
+}
+
+static inline bool
+PyStackRef_ExceptionInstanceCheck(_PyStackRef stackref)
+{
+ return PyExceptionInstance_Check(PyStackRef_AsPyObjectBorrow(stackref));
+}
+
+
+static inline bool
+PyStackRef_FunctionCheck(_PyStackRef stackref)
+{
+ return PyFunction_Check(PyStackRef_AsPyObjectBorrow(stackref));
+}
#ifdef __cplusplus
}
diff --git a/Python/bytecodes.c b/Python/bytecodes.c
index be6b4436694774..b161fc0ede1dff 100644
--- a/Python/bytecodes.c
+++ b/Python/bytecodes.c
@@ -286,7 +286,7 @@ dummy_func(
tier1 inst(INSTRUMENTED_END_FOR, (receiver, value -- receiver)) {
/* Need to create a fake StopIteration error here,
* to conform to PEP 380 */
- if (PyGen_Check(PyStackRef_AsPyObjectBorrow(receiver))) {
+ if (PyStackRef_GenCheck(receiver)) {
if (monitor_stop_iteration(tstate, frame, this_instr, PyStackRef_AsPyObjectBorrow(value))) {
ERROR_NO_POP();
}
@@ -317,7 +317,7 @@ dummy_func(
}
pure inst(UNARY_NOT, (value -- res)) {
- assert(PyBool_Check(PyStackRef_AsPyObjectBorrow(value)));
+ assert(PyStackRef_BoolCheck(value));
res = PyStackRef_Is(value, PyStackRef_False)
? PyStackRef_True : PyStackRef_False;
}
@@ -353,7 +353,7 @@ dummy_func(
macro(TO_BOOL) = _SPECIALIZE_TO_BOOL + unused/2 + _TO_BOOL;
inst(TO_BOOL_BOOL, (unused/1, unused/2, value -- value)) {
- EXIT_IF(!PyBool_Check(PyStackRef_AsPyObjectBorrow(value)));
+ EXIT_IF(!PyStackRef_BoolCheck(value));
STAT_INC(TO_BOOL, hit);
}
@@ -2688,7 +2688,7 @@ dummy_func(
}
replaced op(_POP_JUMP_IF_FALSE, (cond -- )) {
- assert(PyBool_Check(PyStackRef_AsPyObjectBorrow(cond)));
+ assert(PyStackRef_BoolCheck(cond));
int flag = PyStackRef_Is(cond, PyStackRef_False);
#if ENABLE_SPECIALIZATION
this_instr[1].cache = (this_instr[1].cache << 1) | flag;
@@ -2697,7 +2697,7 @@ dummy_func(
}
replaced op(_POP_JUMP_IF_TRUE, (cond -- )) {
- assert(PyBool_Check(PyStackRef_AsPyObjectBorrow(cond)));
+ assert(PyStackRef_BoolCheck(cond));
int flag = PyStackRef_Is(cond, PyStackRef_True);
#if ENABLE_SPECIALIZATION
this_instr[1].cache = (this_instr[1].cache << 1) | flag;
@@ -3121,7 +3121,7 @@ dummy_func(
else {
Py_DECREF(tb);
}
- assert(PyLong_Check(PyStackRef_AsPyObjectBorrow(lasti)));
+ assert(PyStackRef_LongCheck(lasti));
(void)lasti; // Shut up compiler warning if asserts are off
PyObject *stack[5] = {NULL, PyStackRef_AsPyObjectBorrow(exit_self), exc, val_o, tb};
int has_self = !PyStackRef_IsNull(exit_self);
@@ -3163,7 +3163,7 @@ dummy_func(
else {
prev_exc = PyStackRef_None;
}
- assert(PyExceptionInstance_Check(PyStackRef_AsPyObjectBorrow(new_exc)));
+ assert(PyStackRef_ExceptionInstanceCheck(new_exc));
exc_info->exc_value = PyStackRef_AsPyObjectNew(new_exc);
}
@@ -3459,7 +3459,7 @@ dummy_func(
assert(Py_TYPE(callable_o) == &PyMethod_Type);
self = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self);
method = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func);
- assert(PyFunction_Check(PyStackRef_AsPyObjectBorrow(method)));
+ assert(PyStackRef_FunctionCheck(method));
PyStackRef_CLOSE(callable);
}
@@ -4467,7 +4467,7 @@ dummy_func(
inst(INSTRUMENTED_POP_JUMP_IF_TRUE, (unused/1 -- )) {
_PyStackRef cond = POP();
- assert(PyBool_Check(PyStackRef_AsPyObjectBorrow(cond)));
+ assert(PyStackRef_BoolCheck(cond));
int flag = PyStackRef_Is(cond, PyStackRef_True);
int offset = flag * oparg;
#if ENABLE_SPECIALIZATION
@@ -4478,7 +4478,7 @@ dummy_func(
inst(INSTRUMENTED_POP_JUMP_IF_FALSE, (unused/1 -- )) {
_PyStackRef cond = POP();
- assert(PyBool_Check(PyStackRef_AsPyObjectBorrow(cond)));
+ assert(PyStackRef_BoolCheck(cond));
int flag = PyStackRef_Is(cond, PyStackRef_False);
int offset = flag * oparg;
#if ENABLE_SPECIALIZATION
diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h
index abcc7cf4d69473..87c9255ef7997d 100644
--- a/Python/executor_cases.c.h
+++ b/Python/executor_cases.c.h
@@ -324,7 +324,7 @@
_PyStackRef value;
_PyStackRef res;
value = stack_pointer[-1];
- assert(PyBool_Check(PyStackRef_AsPyObjectBorrow(value)));
+ assert(PyStackRef_BoolCheck(value));
res = PyStackRef_Is(value, PyStackRef_False)
? PyStackRef_True : PyStackRef_False;
stack_pointer[-1] = res;
@@ -346,7 +346,7 @@
case _TO_BOOL_BOOL: {
_PyStackRef value;
value = stack_pointer[-1];
- if (!PyBool_Check(PyStackRef_AsPyObjectBorrow(value))) {
+ if (!PyStackRef_BoolCheck(value)) {
UOP_STAT_INC(uopcode, miss);
JUMP_TO_JUMP_TARGET();
}
@@ -3344,7 +3344,7 @@
else {
Py_DECREF(tb);
}
- assert(PyLong_Check(PyStackRef_AsPyObjectBorrow(lasti)));
+ assert(PyStackRef_LongCheck(lasti));
(void)lasti; // Shut up compiler warning if asserts are off
PyObject *stack[5] = {NULL, PyStackRef_AsPyObjectBorrow(exit_self), exc, val_o, tb};
int has_self = !PyStackRef_IsNull(exit_self);
@@ -3368,7 +3368,7 @@
else {
prev_exc = PyStackRef_None;
}
- assert(PyExceptionInstance_Check(PyStackRef_AsPyObjectBorrow(new_exc)));
+ assert(PyStackRef_ExceptionInstanceCheck(new_exc));
exc_info->exc_value = PyStackRef_AsPyObjectNew(new_exc);
stack_pointer[-1] = prev_exc;
stack_pointer[0] = new_exc;
@@ -3614,7 +3614,7 @@
assert(Py_TYPE(callable_o) == &PyMethod_Type);
self = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self);
method = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func);
- assert(PyFunction_Check(PyStackRef_AsPyObjectBorrow(method)));
+ assert(PyStackRef_FunctionCheck(method));
PyStackRef_CLOSE(callable);
stack_pointer[-2 - oparg] = method;
stack_pointer[-1 - oparg] = self;
diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h
index 195fe4c5a04798..f15a829ea3ebfa 100644
--- a/Python/generated_cases.c.h
+++ b/Python/generated_cases.c.h
@@ -1078,7 +1078,7 @@
assert(Py_TYPE(callable_o) == &PyMethod_Type);
self = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_self);
method = PyStackRef_FromPyObjectNew(((PyMethodObject *)callable_o)->im_func);
- assert(PyFunction_Check(PyStackRef_AsPyObjectBorrow(method)));
+ assert(PyStackRef_FunctionCheck(method));
PyStackRef_CLOSE(callable);
}
// flush
@@ -3544,7 +3544,7 @@
receiver = stack_pointer[-2];
/* Need to create a fake StopIteration error here,
* to conform to PEP 380 */
- if (PyGen_Check(PyStackRef_AsPyObjectBorrow(receiver))) {
+ if (PyStackRef_GenCheck(receiver)) {
if (monitor_stop_iteration(tstate, frame, this_instr, PyStackRef_AsPyObjectBorrow(value))) {
goto error;
}
@@ -3667,7 +3667,7 @@
INSTRUCTION_STATS(INSTRUMENTED_POP_JUMP_IF_FALSE);
/* Skip 1 cache entry */
_PyStackRef cond = POP();
- assert(PyBool_Check(PyStackRef_AsPyObjectBorrow(cond)));
+ assert(PyStackRef_BoolCheck(cond));
int flag = PyStackRef_Is(cond, PyStackRef_False);
int offset = flag * oparg;
#if ENABLE_SPECIALIZATION
@@ -3730,7 +3730,7 @@
INSTRUCTION_STATS(INSTRUMENTED_POP_JUMP_IF_TRUE);
/* Skip 1 cache entry */
_PyStackRef cond = POP();
- assert(PyBool_Check(PyStackRef_AsPyObjectBorrow(cond)));
+ assert(PyStackRef_BoolCheck(cond));
int flag = PyStackRef_Is(cond, PyStackRef_True);
int offset = flag * oparg;
#if ENABLE_SPECIALIZATION
@@ -5329,7 +5329,7 @@
_PyStackRef cond;
/* Skip 1 cache entry */
cond = stack_pointer[-1];
- assert(PyBool_Check(PyStackRef_AsPyObjectBorrow(cond)));
+ assert(PyStackRef_BoolCheck(cond));
int flag = PyStackRef_Is(cond, PyStackRef_False);
#if ENABLE_SPECIALIZATION
this_instr[1].cache = (this_instr[1].cache << 1) | flag;
@@ -5363,7 +5363,7 @@
// _POP_JUMP_IF_TRUE
cond = b;
{
- assert(PyBool_Check(PyStackRef_AsPyObjectBorrow(cond)));
+ assert(PyStackRef_BoolCheck(cond));
int flag = PyStackRef_Is(cond, PyStackRef_True);
#if ENABLE_SPECIALIZATION
this_instr[1].cache = (this_instr[1].cache << 1) | flag;
@@ -5398,7 +5398,7 @@
// _POP_JUMP_IF_FALSE
cond = b;
{
- assert(PyBool_Check(PyStackRef_AsPyObjectBorrow(cond)));
+ assert(PyStackRef_BoolCheck(cond));
int flag = PyStackRef_Is(cond, PyStackRef_False);
#if ENABLE_SPECIALIZATION
this_instr[1].cache = (this_instr[1].cache << 1) | flag;
@@ -5418,7 +5418,7 @@
_PyStackRef cond;
/* Skip 1 cache entry */
cond = stack_pointer[-1];
- assert(PyBool_Check(PyStackRef_AsPyObjectBorrow(cond)));
+ assert(PyStackRef_BoolCheck(cond));
int flag = PyStackRef_Is(cond, PyStackRef_True);
#if ENABLE_SPECIALIZATION
this_instr[1].cache = (this_instr[1].cache << 1) | flag;
@@ -5455,7 +5455,7 @@
else {
prev_exc = PyStackRef_None;
}
- assert(PyExceptionInstance_Check(PyStackRef_AsPyObjectBorrow(new_exc)));
+ assert(PyStackRef_ExceptionInstanceCheck(new_exc));
exc_info->exc_value = PyStackRef_AsPyObjectNew(new_exc);
stack_pointer[-1] = prev_exc;
stack_pointer[0] = new_exc;
@@ -6418,7 +6418,7 @@
/* Skip 1 cache entry */
/* Skip 2 cache entries */
value = stack_pointer[-1];
- DEOPT_IF(!PyBool_Check(PyStackRef_AsPyObjectBorrow(value)), TO_BOOL);
+ DEOPT_IF(!PyStackRef_BoolCheck(value), TO_BOOL);
STAT_INC(TO_BOOL, hit);
DISPATCH();
}
@@ -6548,7 +6548,7 @@
_PyStackRef value;
_PyStackRef res;
value = stack_pointer[-1];
- assert(PyBool_Check(PyStackRef_AsPyObjectBorrow(value)));
+ assert(PyStackRef_BoolCheck(value));
res = PyStackRef_Is(value, PyStackRef_False)
? PyStackRef_True : PyStackRef_False;
stack_pointer[-1] = res;
@@ -6715,7 +6715,7 @@
else {
Py_DECREF(tb);
}
- assert(PyLong_Check(PyStackRef_AsPyObjectBorrow(lasti)));
+ assert(PyStackRef_LongCheck(lasti));
(void)lasti; // Shut up compiler warning if asserts are off
PyObject *stack[5] = {NULL, PyStackRef_AsPyObjectBorrow(exit_self), exc, val_o, tb};
int has_self = !PyStackRef_IsNull(exit_self);
From 5f6001130f8ada871193377954cfcfee01ef93b6 Mon Sep 17 00:00:00 2001
From: Brandt Bucher
Date: Thu, 25 Jul 2024 10:45:28 -0700
Subject: [PATCH 029/139] GH-118093: Add tier two support for
LOAD_ATTR_PROPERTY (GH-122283)
---
Include/internal/pycore_opcode_metadata.h | 5 +-
Include/internal/pycore_uop_ids.h | 98 +++++++++++------------
Include/internal/pycore_uop_metadata.h | 4 +
Python/bytecodes.c | 30 ++++---
Python/executor_cases.c.h | 34 +++++++-
Python/generated_cases.c.h | 79 ++++++++++++------
Python/optimizer.c | 5 +-
Python/optimizer_cases.c.h | 7 +-
Python/specialize.c | 5 --
9 files changed, 168 insertions(+), 99 deletions(-)
diff --git a/Include/internal/pycore_opcode_metadata.h b/Include/internal/pycore_opcode_metadata.h
index 2b6e9bca51c9df..2fb6b2c4ed8205 100644
--- a/Include/internal/pycore_opcode_metadata.h
+++ b/Include/internal/pycore_opcode_metadata.h
@@ -733,7 +733,7 @@ int _PyOpcode_num_pushed(int opcode, int oparg) {
case LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES:
return 1;
case LOAD_ATTR_PROPERTY:
- return 1;
+ return 0;
case LOAD_ATTR_SLOT:
return 1 + (oparg & 1);
case LOAD_ATTR_WITH_HINT:
@@ -1109,7 +1109,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[264] = {
[LOAD_ATTR_MODULE] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG },
[LOAD_ATTR_NONDESCRIPTOR_NO_DICT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_EXIT_FLAG },
[LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG },
- [LOAD_ATTR_PROPERTY] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG },
+ [LOAD_ATTR_PROPERTY] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_ESCAPES_FLAG },
[LOAD_ATTR_SLOT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG },
[LOAD_ATTR_WITH_HINT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG },
[LOAD_BUILD_CLASS] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
@@ -1305,6 +1305,7 @@ _PyOpcode_macro_expansion[256] = {
[LOAD_ATTR_MODULE] = { .nuops = 2, .uops = { { _CHECK_ATTR_MODULE, 2, 1 }, { _LOAD_ATTR_MODULE, 1, 3 } } },
[LOAD_ATTR_NONDESCRIPTOR_NO_DICT] = { .nuops = 2, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _LOAD_ATTR_NONDESCRIPTOR_NO_DICT, 4, 5 } } },
[LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES] = { .nuops = 4, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT, 0, 0 }, { _GUARD_KEYS_VERSION, 2, 3 }, { _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES, 4, 5 } } },
+ [LOAD_ATTR_PROPERTY] = { .nuops = 5, .uops = { { _CHECK_PEP_523, 0, 0 }, { _GUARD_TYPE_VERSION, 2, 1 }, { _LOAD_ATTR_PROPERTY_FRAME, 4, 5 }, { _SAVE_RETURN_OFFSET, 7, 9 }, { _PUSH_FRAME, 0, 0 } } },
[LOAD_ATTR_SLOT] = { .nuops = 2, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _LOAD_ATTR_SLOT, 1, 3 } } },
[LOAD_ATTR_WITH_HINT] = { .nuops = 3, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _CHECK_ATTR_WITH_HINT, 0, 0 }, { _LOAD_ATTR_WITH_HINT, 1, 3 } } },
[LOAD_BUILD_CLASS] = { .nuops = 1, .uops = { { _LOAD_BUILD_CLASS, 0, 0 } } },
diff --git a/Include/internal/pycore_uop_ids.h b/Include/internal/pycore_uop_ids.h
index fc67da697cb06c..88c835ca8ed10c 100644
--- a/Include/internal/pycore_uop_ids.h
+++ b/Include/internal/pycore_uop_ids.h
@@ -182,36 +182,36 @@ extern "C" {
#define _LOAD_ATTR_MODULE 402
#define _LOAD_ATTR_NONDESCRIPTOR_NO_DICT 403
#define _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES 404
-#define _LOAD_ATTR_PROPERTY LOAD_ATTR_PROPERTY
-#define _LOAD_ATTR_SLOT 405
-#define _LOAD_ATTR_SLOT_0 406
-#define _LOAD_ATTR_SLOT_1 407
-#define _LOAD_ATTR_WITH_HINT 408
+#define _LOAD_ATTR_PROPERTY_FRAME 405
+#define _LOAD_ATTR_SLOT 406
+#define _LOAD_ATTR_SLOT_0 407
+#define _LOAD_ATTR_SLOT_1 408
+#define _LOAD_ATTR_WITH_HINT 409
#define _LOAD_BUILD_CLASS LOAD_BUILD_CLASS
#define _LOAD_COMMON_CONSTANT LOAD_COMMON_CONSTANT
#define _LOAD_CONST LOAD_CONST
-#define _LOAD_CONST_INLINE 409
-#define _LOAD_CONST_INLINE_BORROW 410
-#define _LOAD_CONST_INLINE_BORROW_WITH_NULL 411
-#define _LOAD_CONST_INLINE_WITH_NULL 412
+#define _LOAD_CONST_INLINE 410
+#define _LOAD_CONST_INLINE_BORROW 411
+#define _LOAD_CONST_INLINE_BORROW_WITH_NULL 412
+#define _LOAD_CONST_INLINE_WITH_NULL 413
#define _LOAD_DEREF LOAD_DEREF
-#define _LOAD_FAST 413
-#define _LOAD_FAST_0 414
-#define _LOAD_FAST_1 415
-#define _LOAD_FAST_2 416
-#define _LOAD_FAST_3 417
-#define _LOAD_FAST_4 418
-#define _LOAD_FAST_5 419
-#define _LOAD_FAST_6 420
-#define _LOAD_FAST_7 421
+#define _LOAD_FAST 414
+#define _LOAD_FAST_0 415
+#define _LOAD_FAST_1 416
+#define _LOAD_FAST_2 417
+#define _LOAD_FAST_3 418
+#define _LOAD_FAST_4 419
+#define _LOAD_FAST_5 420
+#define _LOAD_FAST_6 421
+#define _LOAD_FAST_7 422
#define _LOAD_FAST_AND_CLEAR LOAD_FAST_AND_CLEAR
#define _LOAD_FAST_CHECK LOAD_FAST_CHECK
#define _LOAD_FAST_LOAD_FAST LOAD_FAST_LOAD_FAST
#define _LOAD_FROM_DICT_OR_DEREF LOAD_FROM_DICT_OR_DEREF
#define _LOAD_FROM_DICT_OR_GLOBALS LOAD_FROM_DICT_OR_GLOBALS
-#define _LOAD_GLOBAL 422
-#define _LOAD_GLOBAL_BUILTINS 423
-#define _LOAD_GLOBAL_MODULE 424
+#define _LOAD_GLOBAL 423
+#define _LOAD_GLOBAL_BUILTINS 424
+#define _LOAD_GLOBAL_MODULE 425
#define _LOAD_LOCALS LOAD_LOCALS
#define _LOAD_NAME LOAD_NAME
#define _LOAD_SPECIAL LOAD_SPECIAL
@@ -226,51 +226,51 @@ extern "C" {
#define _MATCH_SEQUENCE MATCH_SEQUENCE
#define _NOP NOP
#define _POP_EXCEPT POP_EXCEPT
-#define _POP_JUMP_IF_FALSE 425
-#define _POP_JUMP_IF_TRUE 426
+#define _POP_JUMP_IF_FALSE 426
+#define _POP_JUMP_IF_TRUE 427
#define _POP_TOP POP_TOP
-#define _POP_TOP_LOAD_CONST_INLINE_BORROW 427
+#define _POP_TOP_LOAD_CONST_INLINE_BORROW 428
#define _PUSH_EXC_INFO PUSH_EXC_INFO
-#define _PUSH_FRAME 428
+#define _PUSH_FRAME 429
#define _PUSH_NULL PUSH_NULL
-#define _PY_FRAME_GENERAL 429
-#define _REPLACE_WITH_TRUE 430
+#define _PY_FRAME_GENERAL 430
+#define _REPLACE_WITH_TRUE 431
#define _RESUME_CHECK RESUME_CHECK
#define _RETURN_GENERATOR RETURN_GENERATOR
#define _RETURN_VALUE RETURN_VALUE
-#define _SAVE_RETURN_OFFSET 431
-#define _SEND 432
-#define _SEND_GEN_FRAME 433
+#define _SAVE_RETURN_OFFSET 432
+#define _SEND 433
+#define _SEND_GEN_FRAME 434
#define _SETUP_ANNOTATIONS SETUP_ANNOTATIONS
#define _SET_ADD SET_ADD
#define _SET_FUNCTION_ATTRIBUTE SET_FUNCTION_ATTRIBUTE
#define _SET_UPDATE SET_UPDATE
-#define _START_EXECUTOR 434
-#define _STORE_ATTR 435
-#define _STORE_ATTR_INSTANCE_VALUE 436
-#define _STORE_ATTR_SLOT 437
-#define _STORE_ATTR_WITH_HINT 438
+#define _START_EXECUTOR 435
+#define _STORE_ATTR 436
+#define _STORE_ATTR_INSTANCE_VALUE 437
+#define _STORE_ATTR_SLOT 438
+#define _STORE_ATTR_WITH_HINT 439
#define _STORE_DEREF STORE_DEREF
-#define _STORE_FAST 439
-#define _STORE_FAST_0 440
-#define _STORE_FAST_1 441
-#define _STORE_FAST_2 442
-#define _STORE_FAST_3 443
-#define _STORE_FAST_4 444
-#define _STORE_FAST_5 445
-#define _STORE_FAST_6 446
-#define _STORE_FAST_7 447
+#define _STORE_FAST 440
+#define _STORE_FAST_0 441
+#define _STORE_FAST_1 442
+#define _STORE_FAST_2 443
+#define _STORE_FAST_3 444
+#define _STORE_FAST_4 445
+#define _STORE_FAST_5 446
+#define _STORE_FAST_6 447
+#define _STORE_FAST_7 448
#define _STORE_FAST_LOAD_FAST STORE_FAST_LOAD_FAST
#define _STORE_FAST_STORE_FAST STORE_FAST_STORE_FAST
#define _STORE_GLOBAL STORE_GLOBAL
#define _STORE_NAME STORE_NAME
#define _STORE_SLICE STORE_SLICE
-#define _STORE_SUBSCR 448
+#define _STORE_SUBSCR 449
#define _STORE_SUBSCR_DICT STORE_SUBSCR_DICT
#define _STORE_SUBSCR_LIST_INT STORE_SUBSCR_LIST_INT
#define _SWAP SWAP
-#define _TIER2_RESUME_CHECK 449
-#define _TO_BOOL 450
+#define _TIER2_RESUME_CHECK 450
+#define _TO_BOOL 451
#define _TO_BOOL_BOOL TO_BOOL_BOOL
#define _TO_BOOL_INT TO_BOOL_INT
#define _TO_BOOL_LIST TO_BOOL_LIST
@@ -280,13 +280,13 @@ extern "C" {
#define _UNARY_NEGATIVE UNARY_NEGATIVE
#define _UNARY_NOT UNARY_NOT
#define _UNPACK_EX UNPACK_EX
-#define _UNPACK_SEQUENCE 451
+#define _UNPACK_SEQUENCE 452
#define _UNPACK_SEQUENCE_LIST UNPACK_SEQUENCE_LIST
#define _UNPACK_SEQUENCE_TUPLE UNPACK_SEQUENCE_TUPLE
#define _UNPACK_SEQUENCE_TWO_TUPLE UNPACK_SEQUENCE_TWO_TUPLE
#define _WITH_EXCEPT_START WITH_EXCEPT_START
#define _YIELD_VALUE YIELD_VALUE
-#define MAX_UOP_ID 451
+#define MAX_UOP_ID 452
#ifdef __cplusplus
}
diff --git a/Include/internal/pycore_uop_metadata.h b/Include/internal/pycore_uop_metadata.h
index e86bae1d72d1a9..14befe59f04a1e 100644
--- a/Include/internal/pycore_uop_metadata.h
+++ b/Include/internal/pycore_uop_metadata.h
@@ -151,6 +151,7 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = {
[_LOAD_ATTR_CLASS_0] = 0,
[_LOAD_ATTR_CLASS_1] = 0,
[_LOAD_ATTR_CLASS] = HAS_ARG_FLAG | HAS_OPARG_AND_1_FLAG,
+ [_LOAD_ATTR_PROPERTY_FRAME] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG,
[_GUARD_DORV_NO_DICT] = HAS_DEOPT_FLAG,
[_STORE_ATTR_INSTANCE_VALUE] = 0,
[_STORE_ATTR_WITH_HINT] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG,
@@ -420,6 +421,7 @@ const char *const _PyOpcode_uop_name[MAX_UOP_ID+1] = {
[_LOAD_ATTR_MODULE] = "_LOAD_ATTR_MODULE",
[_LOAD_ATTR_NONDESCRIPTOR_NO_DICT] = "_LOAD_ATTR_NONDESCRIPTOR_NO_DICT",
[_LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES] = "_LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES",
+ [_LOAD_ATTR_PROPERTY_FRAME] = "_LOAD_ATTR_PROPERTY_FRAME",
[_LOAD_ATTR_SLOT] = "_LOAD_ATTR_SLOT",
[_LOAD_ATTR_SLOT_0] = "_LOAD_ATTR_SLOT_0",
[_LOAD_ATTR_SLOT_1] = "_LOAD_ATTR_SLOT_1",
@@ -788,6 +790,8 @@ int _PyUop_num_popped(int opcode, int oparg)
return 1;
case _LOAD_ATTR_CLASS:
return 1;
+ case _LOAD_ATTR_PROPERTY_FRAME:
+ return 1;
case _GUARD_DORV_NO_DICT:
return 1;
case _STORE_ATTR_INSTANCE_VALUE:
diff --git a/Python/bytecodes.c b/Python/bytecodes.c
index b161fc0ede1dff..d356fc9bfdddba 100644
--- a/Python/bytecodes.c
+++ b/Python/bytecodes.c
@@ -2243,32 +2243,30 @@ dummy_func(
unused/2 +
_LOAD_ATTR_CLASS;
- inst(LOAD_ATTR_PROPERTY, (unused/1, type_version/2, func_version/2, fget/4, owner -- unused, unused if (0))) {
- PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner);
-
+ op(_LOAD_ATTR_PROPERTY_FRAME, (fget/4, owner -- new_frame: _PyInterpreterFrame *)) {
assert((oparg & 1) == 0);
- DEOPT_IF(tstate->interp->eval_frame);
-
- PyTypeObject *cls = Py_TYPE(owner_o);
- assert(type_version != 0);
- DEOPT_IF(cls->tp_version_tag != type_version);
assert(Py_IS_TYPE(fget, &PyFunction_Type));
PyFunctionObject *f = (PyFunctionObject *)fget;
- assert(func_version != 0);
- DEOPT_IF(f->func_version != func_version);
PyCodeObject *code = (PyCodeObject *)f->func_code;
- assert(code->co_argcount == 1);
+ DEOPT_IF((code->co_flags & (CO_VARKEYWORDS | CO_VARARGS | CO_OPTIMIZED)) != CO_OPTIMIZED);
+ DEOPT_IF(code->co_kwonlyargcount);
+ DEOPT_IF(code->co_argcount != 1);
DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize));
STAT_INC(LOAD_ATTR, hit);
Py_INCREF(fget);
- _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, f, 1);
- // Manipulate stack directly because we exit with DISPATCH_INLINED().
- STACK_SHRINK(1);
+ new_frame = _PyFrame_PushUnchecked(tstate, f, 1);
new_frame->localsplus[0] = owner;
- frame->return_offset = (uint16_t)(next_instr - this_instr);
- DISPATCH_INLINED(new_frame);
}
+ macro(LOAD_ATTR_PROPERTY) =
+ unused/1 +
+ _CHECK_PEP_523 +
+ _GUARD_TYPE_VERSION +
+ unused/2 +
+ _LOAD_ATTR_PROPERTY_FRAME +
+ _SAVE_RETURN_OFFSET +
+ _PUSH_FRAME;
+
inst(LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN, (unused/1, type_version/2, func_version/2, getattribute/4, owner -- unused, unused if (0))) {
PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner);
diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h
index 87c9255ef7997d..b8343f9ffd5f80 100644
--- a/Python/executor_cases.c.h
+++ b/Python/executor_cases.c.h
@@ -2507,7 +2507,39 @@
/* _LOAD_ATTR_CLASS is split on (oparg & 1) */
- /* _LOAD_ATTR_PROPERTY is not a viable micro-op for tier 2 because it uses the 'this_instr' variable */
+ case _LOAD_ATTR_PROPERTY_FRAME: {
+ _PyStackRef owner;
+ _PyInterpreterFrame *new_frame;
+ oparg = CURRENT_OPARG();
+ owner = stack_pointer[-1];
+ PyObject *fget = (PyObject *)CURRENT_OPERAND();
+ assert((oparg & 1) == 0);
+ assert(Py_IS_TYPE(fget, &PyFunction_Type));
+ PyFunctionObject *f = (PyFunctionObject *)fget;
+ PyCodeObject *code = (PyCodeObject *)f->func_code;
+ if ((code->co_flags & (CO_VARKEYWORDS | CO_VARARGS | CO_OPTIMIZED)) != CO_OPTIMIZED) {
+ UOP_STAT_INC(uopcode, miss);
+ JUMP_TO_JUMP_TARGET();
+ }
+ if (code->co_kwonlyargcount) {
+ UOP_STAT_INC(uopcode, miss);
+ JUMP_TO_JUMP_TARGET();
+ }
+ if (code->co_argcount != 1) {
+ UOP_STAT_INC(uopcode, miss);
+ JUMP_TO_JUMP_TARGET();
+ }
+ if (!_PyThreadState_HasStackSpace(tstate, code->co_framesize)) {
+ UOP_STAT_INC(uopcode, miss);
+ JUMP_TO_JUMP_TARGET();
+ }
+ STAT_INC(LOAD_ATTR, hit);
+ Py_INCREF(fget);
+ new_frame = _PyFrame_PushUnchecked(tstate, f, 1);
+ new_frame->localsplus[0] = owner;
+ stack_pointer[-1].bits = (uintptr_t)new_frame;
+ break;
+ }
/* _LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN is not a viable micro-op for tier 2 because it uses the 'this_instr' variable */
diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h
index f15a829ea3ebfa..6f996f91921cd3 100644
--- a/Python/generated_cases.c.h
+++ b/Python/generated_cases.c.h
@@ -4429,32 +4429,63 @@
INSTRUCTION_STATS(LOAD_ATTR_PROPERTY);
static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size");
_PyStackRef owner;
+ _PyInterpreterFrame *new_frame;
/* Skip 1 cache entry */
+ // _CHECK_PEP_523
+ {
+ DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR);
+ }
+ // _GUARD_TYPE_VERSION
owner = stack_pointer[-1];
- uint32_t type_version = read_u32(&this_instr[2].cache);
- uint32_t func_version = read_u32(&this_instr[4].cache);
- PyObject *fget = read_obj(&this_instr[6].cache);
- PyObject *owner_o = PyStackRef_AsPyObjectBorrow(owner);
- assert((oparg & 1) == 0);
- DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR);
- PyTypeObject *cls = Py_TYPE(owner_o);
- assert(type_version != 0);
- DEOPT_IF(cls->tp_version_tag != type_version, LOAD_ATTR);
- assert(Py_IS_TYPE(fget, &PyFunction_Type));
- PyFunctionObject *f = (PyFunctionObject *)fget;
- assert(func_version != 0);
- DEOPT_IF(f->func_version != func_version, LOAD_ATTR);
- PyCodeObject *code = (PyCodeObject *)f->func_code;
- assert(code->co_argcount == 1);
- DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), LOAD_ATTR);
- STAT_INC(LOAD_ATTR, hit);
- Py_INCREF(fget);
- _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, f, 1);
- // Manipulate stack directly because we exit with DISPATCH_INLINED().
- STACK_SHRINK(1);
- new_frame->localsplus[0] = owner;
- frame->return_offset = (uint16_t)(next_instr - this_instr);
- DISPATCH_INLINED(new_frame);
+ {
+ uint32_t type_version = read_u32(&this_instr[2].cache);
+ PyTypeObject *tp = Py_TYPE(PyStackRef_AsPyObjectBorrow(owner));
+ assert(type_version != 0);
+ DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR);
+ }
+ /* Skip 2 cache entries */
+ // _LOAD_ATTR_PROPERTY_FRAME
+ {
+ PyObject *fget = read_obj(&this_instr[6].cache);
+ assert((oparg & 1) == 0);
+ assert(Py_IS_TYPE(fget, &PyFunction_Type));
+ PyFunctionObject *f = (PyFunctionObject *)fget;
+ PyCodeObject *code = (PyCodeObject *)f->func_code;
+ DEOPT_IF((code->co_flags & (CO_VARKEYWORDS | CO_VARARGS | CO_OPTIMIZED)) != CO_OPTIMIZED, LOAD_ATTR);
+ DEOPT_IF(code->co_kwonlyargcount, LOAD_ATTR);
+ DEOPT_IF(code->co_argcount != 1, LOAD_ATTR);
+ DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), LOAD_ATTR);
+ STAT_INC(LOAD_ATTR, hit);
+ Py_INCREF(fget);
+ new_frame = _PyFrame_PushUnchecked(tstate, f, 1);
+ new_frame->localsplus[0] = owner;
+ }
+ // _SAVE_RETURN_OFFSET
+ {
+ #if TIER_ONE
+ frame->return_offset = (uint16_t)(next_instr - this_instr);
+ #endif
+ #if TIER_TWO
+ frame->return_offset = oparg;
+ #endif
+ }
+ // _PUSH_FRAME
+ {
+ // Write it out explicitly because it's subtly different.
+ // Eventually this should be the only occurrence of this code.
+ assert(tstate->interp->eval_frame == NULL);
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ _PyFrame_SetStackPointer(frame, stack_pointer);
+ new_frame->previous = frame;
+ CALL_STAT_INC(inlined_py_calls);
+ frame = tstate->current_frame = new_frame;
+ tstate->py_recursion_remaining--;
+ LOAD_SP();
+ LOAD_IP(0);
+ LLTRACE_RESUME_FRAME();
+ }
+ DISPATCH();
}
TARGET(LOAD_ATTR_SLOT) {
diff --git a/Python/optimizer.c b/Python/optimizer.c
index 73316b3587f221..e08c1dc9936a3d 100644
--- a/Python/optimizer.c
+++ b/Python/optimizer.c
@@ -797,7 +797,10 @@ translate_bytecode_to_trace(
if (uop == _PUSH_FRAME) {
assert(i + 1 == nuops);
- if (opcode == FOR_ITER_GEN || opcode == SEND_GEN) {
+ if (opcode == FOR_ITER_GEN ||
+ opcode == LOAD_ATTR_PROPERTY ||
+ opcode == SEND_GEN)
+ {
DPRINTF(2, "Bailing due to dynamic target\n");
ADD_TO_TRACE(uop, oparg, 0, target);
ADD_TO_TRACE(_DYNAMIC_EXIT, 0, 0, 0);
diff --git a/Python/optimizer_cases.c.h b/Python/optimizer_cases.c.h
index 8077badce78a39..8c2b1ac7926cec 100644
--- a/Python/optimizer_cases.c.h
+++ b/Python/optimizer_cases.c.h
@@ -1160,7 +1160,12 @@
break;
}
- /* _LOAD_ATTR_PROPERTY is not a viable micro-op for tier 2 */
+ case _LOAD_ATTR_PROPERTY_FRAME: {
+ _PyInterpreterFrame *new_frame;
+ new_frame = sym_new_not_null(ctx);
+ stack_pointer[-1] = (_Py_UopsSymbol *)new_frame;
+ break;
+ }
/* _LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN is not a viable micro-op for tier 2 */
diff --git a/Python/specialize.c b/Python/specialize.c
index 3af0deabb9b40a..c354a9079019ac 100644
--- a/Python/specialize.c
+++ b/Python/specialize.c
@@ -963,15 +963,10 @@ _Py_Specialize_LoadAttr(_PyStackRef owner_st, _Py_CODEUNIT *instr, PyObject *nam
SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_ATTR_METHOD);
goto fail;
}
- uint32_t version = function_get_version(fget, LOAD_ATTR);
- if (version == 0) {
- goto fail;
- }
if (_PyInterpreterState_GET()->eval_frame) {
SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_OTHER);
goto fail;
}
- write_u32(lm_cache->keys_version, version);
assert(type->tp_version_tag != 0);
write_u32(lm_cache->type_version, type->tp_version_tag);
/* borrowed */
From 1d607fe759ef22177b50d734ae029df3903c99e0 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Sviatoslav=20Sydorenko=20=28=D0=A1=D0=B2=D1=8F=D1=82=D0=BE?=
=?UTF-8?q?=D1=81=D0=BB=D0=B0=D0=B2=20=D0=A1=D0=B8=D0=B4=D0=BE=D1=80=D0=B5?=
=?UTF-8?q?=D0=BD=D0=BA=D0=BE=29?=
Date: Thu, 25 Jul 2024 22:27:26 +0200
Subject: [PATCH 030/139] Move macOS matrix to the calling workflow (#121809)
Co-authored-by: Hugo van Kemenade <1324225+hugovk@users.noreply.github.com>
---
.github/workflows/build.yml | 45 +++++++++++++++++-----------
.github/workflows/reusable-macos.yml | 22 ++++----------
2 files changed, 33 insertions(+), 34 deletions(-)
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 5c894abda71a87..613578ae176ad9 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -249,27 +249,38 @@ jobs:
arch: ${{ matrix.arch }}
build_macos:
- name: 'macOS'
- needs: check_source
- if: needs.check_source.outputs.run_tests == 'true'
- uses: ./.github/workflows/reusable-macos.yml
- with:
- config_hash: ${{ needs.check_source.outputs.config_hash }}
- # Cirrus and macos-14 are M1, macos-13 is default GHA Intel.
- # Cirrus used for upstream, macos-14 for forks.
- os-matrix: '["ghcr.io/cirruslabs/macos-runner:sonoma", "macos-14", "macos-13"]'
-
- build_macos_free_threading:
- name: 'macOS (free-threading)'
+ name: >-
+ macOS
+ ${{ fromJSON(matrix.free-threading) && '(free-threading)' || '' }}
needs: check_source
if: needs.check_source.outputs.run_tests == 'true'
+ strategy:
+ fail-fast: false
+ matrix:
+ # Cirrus and macos-14 are M1, macos-13 is default GHA Intel.
+ # macOS 13 only runs tests against the GIL-enabled CPython.
+ # Cirrus used for upstream, macos-14 for forks.
+ os:
+ - ghcr.io/cirruslabs/macos-runner:sonoma
+ - macos-14
+ - macos-13
+ is-fork: # only used for the exclusion trick
+ - ${{ github.repository_owner != 'python' }}
+ free-threading:
+ - false
+ - true
+ exclude:
+ - os: ghcr.io/cirruslabs/macos-runner:sonoma
+ is-fork: true
+ - os: macos-14
+ is-fork: false
+ - os: macos-13
+ free-threading: true
uses: ./.github/workflows/reusable-macos.yml
with:
config_hash: ${{ needs.check_source.outputs.config_hash }}
- free-threading: true
- # Cirrus and macos-14 are M1.
- # Cirrus used for upstream, macos-14 for forks.
- os-matrix: '["ghcr.io/cirruslabs/macos-runner:sonoma", "macos-14"]'
+ free-threading: ${{ matrix.free-threading }}
+ os: ${{ matrix.os }}
build_ubuntu:
name: >-
@@ -596,7 +607,6 @@ jobs:
- check-docs
- check_generated_files
- build_macos
- - build_macos_free_threading
- build_ubuntu
- build_ubuntu_ssltests
- build_wasi
@@ -632,7 +642,6 @@ jobs:
&& '
check_generated_files,
build_macos,
- build_macos_free_threading,
build_ubuntu,
build_ubuntu_ssltests,
build_wasi,
diff --git a/.github/workflows/reusable-macos.yml b/.github/workflows/reusable-macos.yml
index 0f189960dbea61..64ef2c91329d81 100644
--- a/.github/workflows/reusable-macos.yml
+++ b/.github/workflows/reusable-macos.yml
@@ -8,13 +8,14 @@ on:
required: false
type: boolean
default: false
- os-matrix:
- required: false
+ os:
+ description: OS to run the job
+ required: true
type: string
jobs:
build_macos:
- name: build and test (${{ matrix.os }})
+ name: build and test (${{ inputs.os }})
timeout-minutes: 60
env:
HOMEBREW_NO_ANALYTICS: 1
@@ -23,18 +24,7 @@ jobs:
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
PYTHONSTRICTEXTENSIONBUILD: 1
TERM: linux
- strategy:
- fail-fast: false
- matrix:
- os: ${{fromJson(inputs.os-matrix)}}
- is-fork:
- - ${{ github.repository_owner != 'python' }}
- exclude:
- - os: "ghcr.io/cirruslabs/macos-runner:sonoma"
- is-fork: true
- - os: "macos-14"
- is-fork: false
- runs-on: ${{ matrix.os }}
+ runs-on: ${{ inputs.os }}
steps:
- uses: actions/checkout@v4
- name: Runner image version
@@ -43,7 +33,7 @@ jobs:
uses: actions/cache@v4
with:
path: config.cache
- key: ${{ github.job }}-${{ matrix.os }}-${{ env.IMAGE_VERSION }}-${{ inputs.config_hash }}
+ key: ${{ github.job }}-${{ inputs.os }}-${{ env.IMAGE_VERSION }}-${{ inputs.config_hash }}
- name: Install Homebrew dependencies
run: brew install pkg-config openssl@3.0 xz gdbm tcl-tk
- name: Configure CPython
From d9efa45d7457b0dfea467bb1c2d22c69056ffc73 Mon Sep 17 00:00:00 2001
From: Brandt Bucher
Date: Thu, 25 Jul 2024 14:45:07 -0700
Subject: [PATCH 031/139] GH-118093: Add tier two support for
BINARY_OP_INPLACE_ADD_UNICODE (GH-122253)
---
Include/internal/pycore_opcode_metadata.h | 1 +
Include/internal/pycore_uop_ids.h | 297 +++++++++++-----------
Include/internal/pycore_uop_metadata.h | 4 +
Python/bytecodes.c | 15 +-
Python/executor_cases.c.h | 49 ++++
Python/generated_cases.c.h | 13 +-
Python/optimizer.c | 9 +
Python/optimizer_cases.c.h | 6 +
8 files changed, 241 insertions(+), 153 deletions(-)
diff --git a/Include/internal/pycore_opcode_metadata.h b/Include/internal/pycore_opcode_metadata.h
index 2fb6b2c4ed8205..9c7ad926f9a980 100644
--- a/Include/internal/pycore_opcode_metadata.h
+++ b/Include/internal/pycore_opcode_metadata.h
@@ -1218,6 +1218,7 @@ _PyOpcode_macro_expansion[256] = {
[BINARY_OP_ADD_FLOAT] = { .nuops = 2, .uops = { { _GUARD_BOTH_FLOAT, 0, 0 }, { _BINARY_OP_ADD_FLOAT, 0, 0 } } },
[BINARY_OP_ADD_INT] = { .nuops = 2, .uops = { { _GUARD_BOTH_INT, 0, 0 }, { _BINARY_OP_ADD_INT, 0, 0 } } },
[BINARY_OP_ADD_UNICODE] = { .nuops = 2, .uops = { { _GUARD_BOTH_UNICODE, 0, 0 }, { _BINARY_OP_ADD_UNICODE, 0, 0 } } },
+ [BINARY_OP_INPLACE_ADD_UNICODE] = { .nuops = 2, .uops = { { _GUARD_BOTH_UNICODE, 0, 0 }, { _BINARY_OP_INPLACE_ADD_UNICODE, 0, 0 } } },
[BINARY_OP_MULTIPLY_FLOAT] = { .nuops = 2, .uops = { { _GUARD_BOTH_FLOAT, 0, 0 }, { _BINARY_OP_MULTIPLY_FLOAT, 0, 0 } } },
[BINARY_OP_MULTIPLY_INT] = { .nuops = 2, .uops = { { _GUARD_BOTH_INT, 0, 0 }, { _BINARY_OP_MULTIPLY_INT, 0, 0 } } },
[BINARY_OP_SUBTRACT_FLOAT] = { .nuops = 2, .uops = { { _GUARD_BOTH_FLOAT, 0, 0 }, { _BINARY_OP_SUBTRACT_FLOAT, 0, 0 } } },
diff --git a/Include/internal/pycore_uop_ids.h b/Include/internal/pycore_uop_ids.h
index 88c835ca8ed10c..9ae82ca3c3dafa 100644
--- a/Include/internal/pycore_uop_ids.h
+++ b/Include/internal/pycore_uop_ids.h
@@ -15,12 +15,13 @@ extern "C" {
#define _BINARY_OP_ADD_FLOAT 303
#define _BINARY_OP_ADD_INT 304
#define _BINARY_OP_ADD_UNICODE 305
-#define _BINARY_OP_MULTIPLY_FLOAT 306
-#define _BINARY_OP_MULTIPLY_INT 307
-#define _BINARY_OP_SUBTRACT_FLOAT 308
-#define _BINARY_OP_SUBTRACT_INT 309
+#define _BINARY_OP_INPLACE_ADD_UNICODE 306
+#define _BINARY_OP_MULTIPLY_FLOAT 307
+#define _BINARY_OP_MULTIPLY_INT 308
+#define _BINARY_OP_SUBTRACT_FLOAT 309
+#define _BINARY_OP_SUBTRACT_INT 310
#define _BINARY_SLICE BINARY_SLICE
-#define _BINARY_SUBSCR 310
+#define _BINARY_SUBSCR 311
#define _BINARY_SUBSCR_DICT BINARY_SUBSCR_DICT
#define _BINARY_SUBSCR_GETITEM BINARY_SUBSCR_GETITEM
#define _BINARY_SUBSCR_LIST_INT BINARY_SUBSCR_LIST_INT
@@ -32,12 +33,12 @@ extern "C" {
#define _BUILD_SLICE BUILD_SLICE
#define _BUILD_STRING BUILD_STRING
#define _BUILD_TUPLE BUILD_TUPLE
-#define _CALL 311
+#define _CALL 312
#define _CALL_ALLOC_AND_ENTER_INIT CALL_ALLOC_AND_ENTER_INIT
-#define _CALL_BUILTIN_CLASS 312
-#define _CALL_BUILTIN_FAST 313
-#define _CALL_BUILTIN_FAST_WITH_KEYWORDS 314
-#define _CALL_BUILTIN_O 315
+#define _CALL_BUILTIN_CLASS 313
+#define _CALL_BUILTIN_FAST 314
+#define _CALL_BUILTIN_FAST_WITH_KEYWORDS 315
+#define _CALL_BUILTIN_O 316
#define _CALL_FUNCTION_EX CALL_FUNCTION_EX
#define _CALL_INTRINSIC_1 CALL_INTRINSIC_1
#define _CALL_INTRINSIC_2 CALL_INTRINSIC_2
@@ -45,38 +46,38 @@ extern "C" {
#define _CALL_KW CALL_KW
#define _CALL_LEN CALL_LEN
#define _CALL_LIST_APPEND CALL_LIST_APPEND
-#define _CALL_METHOD_DESCRIPTOR_FAST 316
-#define _CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS 317
-#define _CALL_METHOD_DESCRIPTOR_NOARGS 318
-#define _CALL_METHOD_DESCRIPTOR_O 319
-#define _CALL_NON_PY_GENERAL 320
-#define _CALL_STR_1 321
-#define _CALL_TUPLE_1 322
+#define _CALL_METHOD_DESCRIPTOR_FAST 317
+#define _CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS 318
+#define _CALL_METHOD_DESCRIPTOR_NOARGS 319
+#define _CALL_METHOD_DESCRIPTOR_O 320
+#define _CALL_NON_PY_GENERAL 321
+#define _CALL_STR_1 322
+#define _CALL_TUPLE_1 323
#define _CALL_TYPE_1 CALL_TYPE_1
-#define _CHECK_ATTR_CLASS 323
-#define _CHECK_ATTR_METHOD_LAZY_DICT 324
-#define _CHECK_ATTR_MODULE 325
-#define _CHECK_ATTR_WITH_HINT 326
-#define _CHECK_CALL_BOUND_METHOD_EXACT_ARGS 327
+#define _CHECK_ATTR_CLASS 324
+#define _CHECK_ATTR_METHOD_LAZY_DICT 325
+#define _CHECK_ATTR_MODULE 326
+#define _CHECK_ATTR_WITH_HINT 327
+#define _CHECK_CALL_BOUND_METHOD_EXACT_ARGS 328
#define _CHECK_EG_MATCH CHECK_EG_MATCH
#define _CHECK_EXC_MATCH CHECK_EXC_MATCH
-#define _CHECK_FUNCTION 328
-#define _CHECK_FUNCTION_EXACT_ARGS 329
-#define _CHECK_FUNCTION_VERSION 330
-#define _CHECK_IS_NOT_PY_CALLABLE 331
-#define _CHECK_MANAGED_OBJECT_HAS_VALUES 332
-#define _CHECK_METHOD_VERSION 333
-#define _CHECK_PEP_523 334
-#define _CHECK_PERIODIC 335
-#define _CHECK_STACK_SPACE 336
-#define _CHECK_STACK_SPACE_OPERAND 337
-#define _CHECK_VALIDITY 338
-#define _CHECK_VALIDITY_AND_SET_IP 339
-#define _COMPARE_OP 340
-#define _COMPARE_OP_FLOAT 341
-#define _COMPARE_OP_INT 342
-#define _COMPARE_OP_STR 343
-#define _CONTAINS_OP 344
+#define _CHECK_FUNCTION 329
+#define _CHECK_FUNCTION_EXACT_ARGS 330
+#define _CHECK_FUNCTION_VERSION 331
+#define _CHECK_IS_NOT_PY_CALLABLE 332
+#define _CHECK_MANAGED_OBJECT_HAS_VALUES 333
+#define _CHECK_METHOD_VERSION 334
+#define _CHECK_PEP_523 335
+#define _CHECK_PERIODIC 336
+#define _CHECK_STACK_SPACE 337
+#define _CHECK_STACK_SPACE_OPERAND 338
+#define _CHECK_VALIDITY 339
+#define _CHECK_VALIDITY_AND_SET_IP 340
+#define _COMPARE_OP 341
+#define _COMPARE_OP_FLOAT 342
+#define _COMPARE_OP_INT 343
+#define _COMPARE_OP_STR 344
+#define _CONTAINS_OP 345
#define _CONTAINS_OP_DICT CONTAINS_OP_DICT
#define _CONTAINS_OP_SET CONTAINS_OP_SET
#define _CONVERT_VALUE CONVERT_VALUE
@@ -88,55 +89,55 @@ extern "C" {
#define _DELETE_GLOBAL DELETE_GLOBAL
#define _DELETE_NAME DELETE_NAME
#define _DELETE_SUBSCR DELETE_SUBSCR
-#define _DEOPT 345
+#define _DEOPT 346
#define _DICT_MERGE DICT_MERGE
#define _DICT_UPDATE DICT_UPDATE
-#define _DYNAMIC_EXIT 346
+#define _DYNAMIC_EXIT 347
#define _END_SEND END_SEND
-#define _ERROR_POP_N 347
+#define _ERROR_POP_N 348
#define _EXIT_INIT_CHECK EXIT_INIT_CHECK
-#define _EXPAND_METHOD 348
-#define _FATAL_ERROR 349
+#define _EXPAND_METHOD 349
+#define _FATAL_ERROR 350
#define _FORMAT_SIMPLE FORMAT_SIMPLE
#define _FORMAT_WITH_SPEC FORMAT_WITH_SPEC
-#define _FOR_ITER 350
-#define _FOR_ITER_GEN_FRAME 351
-#define _FOR_ITER_TIER_TWO 352
+#define _FOR_ITER 351
+#define _FOR_ITER_GEN_FRAME 352
+#define _FOR_ITER_TIER_TWO 353
#define _GET_AITER GET_AITER
#define _GET_ANEXT GET_ANEXT
#define _GET_AWAITABLE GET_AWAITABLE
#define _GET_ITER GET_ITER
#define _GET_LEN GET_LEN
#define _GET_YIELD_FROM_ITER GET_YIELD_FROM_ITER
-#define _GUARD_BOTH_FLOAT 353
-#define _GUARD_BOTH_INT 354
-#define _GUARD_BOTH_UNICODE 355
-#define _GUARD_BUILTINS_VERSION 356
-#define _GUARD_DORV_NO_DICT 357
-#define _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT 358
-#define _GUARD_GLOBALS_VERSION 359
-#define _GUARD_IS_FALSE_POP 360
-#define _GUARD_IS_NONE_POP 361
-#define _GUARD_IS_NOT_NONE_POP 362
-#define _GUARD_IS_TRUE_POP 363
-#define _GUARD_KEYS_VERSION 364
-#define _GUARD_NOS_FLOAT 365
-#define _GUARD_NOS_INT 366
-#define _GUARD_NOT_EXHAUSTED_LIST 367
-#define _GUARD_NOT_EXHAUSTED_RANGE 368
-#define _GUARD_NOT_EXHAUSTED_TUPLE 369
-#define _GUARD_TOS_FLOAT 370
-#define _GUARD_TOS_INT 371
-#define _GUARD_TYPE_VERSION 372
+#define _GUARD_BOTH_FLOAT 354
+#define _GUARD_BOTH_INT 355
+#define _GUARD_BOTH_UNICODE 356
+#define _GUARD_BUILTINS_VERSION 357
+#define _GUARD_DORV_NO_DICT 358
+#define _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT 359
+#define _GUARD_GLOBALS_VERSION 360
+#define _GUARD_IS_FALSE_POP 361
+#define _GUARD_IS_NONE_POP 362
+#define _GUARD_IS_NOT_NONE_POP 363
+#define _GUARD_IS_TRUE_POP 364
+#define _GUARD_KEYS_VERSION 365
+#define _GUARD_NOS_FLOAT 366
+#define _GUARD_NOS_INT 367
+#define _GUARD_NOT_EXHAUSTED_LIST 368
+#define _GUARD_NOT_EXHAUSTED_RANGE 369
+#define _GUARD_NOT_EXHAUSTED_TUPLE 370
+#define _GUARD_TOS_FLOAT 371
+#define _GUARD_TOS_INT 372
+#define _GUARD_TYPE_VERSION 373
#define _IMPORT_FROM IMPORT_FROM
#define _IMPORT_NAME IMPORT_NAME
-#define _INIT_CALL_BOUND_METHOD_EXACT_ARGS 373
-#define _INIT_CALL_PY_EXACT_ARGS 374
-#define _INIT_CALL_PY_EXACT_ARGS_0 375
-#define _INIT_CALL_PY_EXACT_ARGS_1 376
-#define _INIT_CALL_PY_EXACT_ARGS_2 377
-#define _INIT_CALL_PY_EXACT_ARGS_3 378
-#define _INIT_CALL_PY_EXACT_ARGS_4 379
+#define _INIT_CALL_BOUND_METHOD_EXACT_ARGS 374
+#define _INIT_CALL_PY_EXACT_ARGS 375
+#define _INIT_CALL_PY_EXACT_ARGS_0 376
+#define _INIT_CALL_PY_EXACT_ARGS_1 377
+#define _INIT_CALL_PY_EXACT_ARGS_2 378
+#define _INIT_CALL_PY_EXACT_ARGS_3 379
+#define _INIT_CALL_PY_EXACT_ARGS_4 380
#define _INSTRUMENTED_CALL INSTRUMENTED_CALL
#define _INSTRUMENTED_CALL_FUNCTION_EX INSTRUMENTED_CALL_FUNCTION_EX
#define _INSTRUMENTED_CALL_KW INSTRUMENTED_CALL_KW
@@ -153,65 +154,65 @@ extern "C" {
#define _INSTRUMENTED_RETURN_CONST INSTRUMENTED_RETURN_CONST
#define _INSTRUMENTED_RETURN_VALUE INSTRUMENTED_RETURN_VALUE
#define _INSTRUMENTED_YIELD_VALUE INSTRUMENTED_YIELD_VALUE
-#define _INTERNAL_INCREMENT_OPT_COUNTER 380
-#define _IS_NONE 381
+#define _INTERNAL_INCREMENT_OPT_COUNTER 381
+#define _IS_NONE 382
#define _IS_OP IS_OP
-#define _ITER_CHECK_LIST 382
-#define _ITER_CHECK_RANGE 383
-#define _ITER_CHECK_TUPLE 384
-#define _ITER_JUMP_LIST 385
-#define _ITER_JUMP_RANGE 386
-#define _ITER_JUMP_TUPLE 387
-#define _ITER_NEXT_LIST 388
-#define _ITER_NEXT_RANGE 389
-#define _ITER_NEXT_TUPLE 390
-#define _JUMP_TO_TOP 391
+#define _ITER_CHECK_LIST 383
+#define _ITER_CHECK_RANGE 384
+#define _ITER_CHECK_TUPLE 385
+#define _ITER_JUMP_LIST 386
+#define _ITER_JUMP_RANGE 387
+#define _ITER_JUMP_TUPLE 388
+#define _ITER_NEXT_LIST 389
+#define _ITER_NEXT_RANGE 390
+#define _ITER_NEXT_TUPLE 391
+#define _JUMP_TO_TOP 392
#define _LIST_APPEND LIST_APPEND
#define _LIST_EXTEND LIST_EXTEND
-#define _LOAD_ATTR 392
-#define _LOAD_ATTR_CLASS 393
-#define _LOAD_ATTR_CLASS_0 394
-#define _LOAD_ATTR_CLASS_1 395
+#define _LOAD_ATTR 393
+#define _LOAD_ATTR_CLASS 394
+#define _LOAD_ATTR_CLASS_0 395
+#define _LOAD_ATTR_CLASS_1 396
#define _LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN
-#define _LOAD_ATTR_INSTANCE_VALUE 396
-#define _LOAD_ATTR_INSTANCE_VALUE_0 397
-#define _LOAD_ATTR_INSTANCE_VALUE_1 398
-#define _LOAD_ATTR_METHOD_LAZY_DICT 399
-#define _LOAD_ATTR_METHOD_NO_DICT 400
-#define _LOAD_ATTR_METHOD_WITH_VALUES 401
-#define _LOAD_ATTR_MODULE 402
-#define _LOAD_ATTR_NONDESCRIPTOR_NO_DICT 403
-#define _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES 404
-#define _LOAD_ATTR_PROPERTY_FRAME 405
-#define _LOAD_ATTR_SLOT 406
-#define _LOAD_ATTR_SLOT_0 407
-#define _LOAD_ATTR_SLOT_1 408
-#define _LOAD_ATTR_WITH_HINT 409
+#define _LOAD_ATTR_INSTANCE_VALUE 397
+#define _LOAD_ATTR_INSTANCE_VALUE_0 398
+#define _LOAD_ATTR_INSTANCE_VALUE_1 399
+#define _LOAD_ATTR_METHOD_LAZY_DICT 400
+#define _LOAD_ATTR_METHOD_NO_DICT 401
+#define _LOAD_ATTR_METHOD_WITH_VALUES 402
+#define _LOAD_ATTR_MODULE 403
+#define _LOAD_ATTR_NONDESCRIPTOR_NO_DICT 404
+#define _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES 405
+#define _LOAD_ATTR_PROPERTY_FRAME 406
+#define _LOAD_ATTR_SLOT 407
+#define _LOAD_ATTR_SLOT_0 408
+#define _LOAD_ATTR_SLOT_1 409
+#define _LOAD_ATTR_WITH_HINT 410
#define _LOAD_BUILD_CLASS LOAD_BUILD_CLASS
#define _LOAD_COMMON_CONSTANT LOAD_COMMON_CONSTANT
#define _LOAD_CONST LOAD_CONST
-#define _LOAD_CONST_INLINE 410
-#define _LOAD_CONST_INLINE_BORROW 411
-#define _LOAD_CONST_INLINE_BORROW_WITH_NULL 412
-#define _LOAD_CONST_INLINE_WITH_NULL 413
+#define _LOAD_CONST_INLINE 411
+#define _LOAD_CONST_INLINE_BORROW 412
+#define _LOAD_CONST_INLINE_BORROW_WITH_NULL 413
+#define _LOAD_CONST_INLINE_WITH_NULL 414
#define _LOAD_DEREF LOAD_DEREF
-#define _LOAD_FAST 414
-#define _LOAD_FAST_0 415
-#define _LOAD_FAST_1 416
-#define _LOAD_FAST_2 417
-#define _LOAD_FAST_3 418
-#define _LOAD_FAST_4 419
-#define _LOAD_FAST_5 420
-#define _LOAD_FAST_6 421
-#define _LOAD_FAST_7 422
+#define _LOAD_FAST 415
+#define _LOAD_FAST_0 416
+#define _LOAD_FAST_1 417
+#define _LOAD_FAST_2 418
+#define _LOAD_FAST_3 419
+#define _LOAD_FAST_4 420
+#define _LOAD_FAST_5 421
+#define _LOAD_FAST_6 422
+#define _LOAD_FAST_7 423
#define _LOAD_FAST_AND_CLEAR LOAD_FAST_AND_CLEAR
#define _LOAD_FAST_CHECK LOAD_FAST_CHECK
#define _LOAD_FAST_LOAD_FAST LOAD_FAST_LOAD_FAST
#define _LOAD_FROM_DICT_OR_DEREF LOAD_FROM_DICT_OR_DEREF
#define _LOAD_FROM_DICT_OR_GLOBALS LOAD_FROM_DICT_OR_GLOBALS
-#define _LOAD_GLOBAL 423
-#define _LOAD_GLOBAL_BUILTINS 424
-#define _LOAD_GLOBAL_MODULE 425
+#define _LOAD_GLOBAL 424
+#define _LOAD_GLOBAL_BUILTINS 425
+#define _LOAD_GLOBAL_MODULE 426
#define _LOAD_LOCALS LOAD_LOCALS
#define _LOAD_NAME LOAD_NAME
#define _LOAD_SPECIAL LOAD_SPECIAL
@@ -226,51 +227,51 @@ extern "C" {
#define _MATCH_SEQUENCE MATCH_SEQUENCE
#define _NOP NOP
#define _POP_EXCEPT POP_EXCEPT
-#define _POP_JUMP_IF_FALSE 426
-#define _POP_JUMP_IF_TRUE 427
+#define _POP_JUMP_IF_FALSE 427
+#define _POP_JUMP_IF_TRUE 428
#define _POP_TOP POP_TOP
-#define _POP_TOP_LOAD_CONST_INLINE_BORROW 428
+#define _POP_TOP_LOAD_CONST_INLINE_BORROW 429
#define _PUSH_EXC_INFO PUSH_EXC_INFO
-#define _PUSH_FRAME 429
+#define _PUSH_FRAME 430
#define _PUSH_NULL PUSH_NULL
-#define _PY_FRAME_GENERAL 430
-#define _REPLACE_WITH_TRUE 431
+#define _PY_FRAME_GENERAL 431
+#define _REPLACE_WITH_TRUE 432
#define _RESUME_CHECK RESUME_CHECK
#define _RETURN_GENERATOR RETURN_GENERATOR
#define _RETURN_VALUE RETURN_VALUE
-#define _SAVE_RETURN_OFFSET 432
-#define _SEND 433
-#define _SEND_GEN_FRAME 434
+#define _SAVE_RETURN_OFFSET 433
+#define _SEND 434
+#define _SEND_GEN_FRAME 435
#define _SETUP_ANNOTATIONS SETUP_ANNOTATIONS
#define _SET_ADD SET_ADD
#define _SET_FUNCTION_ATTRIBUTE SET_FUNCTION_ATTRIBUTE
#define _SET_UPDATE SET_UPDATE
-#define _START_EXECUTOR 435
-#define _STORE_ATTR 436
-#define _STORE_ATTR_INSTANCE_VALUE 437
-#define _STORE_ATTR_SLOT 438
-#define _STORE_ATTR_WITH_HINT 439
+#define _START_EXECUTOR 436
+#define _STORE_ATTR 437
+#define _STORE_ATTR_INSTANCE_VALUE 438
+#define _STORE_ATTR_SLOT 439
+#define _STORE_ATTR_WITH_HINT 440
#define _STORE_DEREF STORE_DEREF
-#define _STORE_FAST 440
-#define _STORE_FAST_0 441
-#define _STORE_FAST_1 442
-#define _STORE_FAST_2 443
-#define _STORE_FAST_3 444
-#define _STORE_FAST_4 445
-#define _STORE_FAST_5 446
-#define _STORE_FAST_6 447
-#define _STORE_FAST_7 448
+#define _STORE_FAST 441
+#define _STORE_FAST_0 442
+#define _STORE_FAST_1 443
+#define _STORE_FAST_2 444
+#define _STORE_FAST_3 445
+#define _STORE_FAST_4 446
+#define _STORE_FAST_5 447
+#define _STORE_FAST_6 448
+#define _STORE_FAST_7 449
#define _STORE_FAST_LOAD_FAST STORE_FAST_LOAD_FAST
#define _STORE_FAST_STORE_FAST STORE_FAST_STORE_FAST
#define _STORE_GLOBAL STORE_GLOBAL
#define _STORE_NAME STORE_NAME
#define _STORE_SLICE STORE_SLICE
-#define _STORE_SUBSCR 449
+#define _STORE_SUBSCR 450
#define _STORE_SUBSCR_DICT STORE_SUBSCR_DICT
#define _STORE_SUBSCR_LIST_INT STORE_SUBSCR_LIST_INT
#define _SWAP SWAP
-#define _TIER2_RESUME_CHECK 450
-#define _TO_BOOL 451
+#define _TIER2_RESUME_CHECK 451
+#define _TO_BOOL 452
#define _TO_BOOL_BOOL TO_BOOL_BOOL
#define _TO_BOOL_INT TO_BOOL_INT
#define _TO_BOOL_LIST TO_BOOL_LIST
@@ -280,13 +281,13 @@ extern "C" {
#define _UNARY_NEGATIVE UNARY_NEGATIVE
#define _UNARY_NOT UNARY_NOT
#define _UNPACK_EX UNPACK_EX
-#define _UNPACK_SEQUENCE 452
+#define _UNPACK_SEQUENCE 453
#define _UNPACK_SEQUENCE_LIST UNPACK_SEQUENCE_LIST
#define _UNPACK_SEQUENCE_TUPLE UNPACK_SEQUENCE_TUPLE
#define _UNPACK_SEQUENCE_TWO_TUPLE UNPACK_SEQUENCE_TWO_TUPLE
#define _WITH_EXCEPT_START WITH_EXCEPT_START
#define _YIELD_VALUE YIELD_VALUE
-#define MAX_UOP_ID 452
+#define MAX_UOP_ID 453
#ifdef __cplusplus
}
diff --git a/Include/internal/pycore_uop_metadata.h b/Include/internal/pycore_uop_metadata.h
index 14befe59f04a1e..190c6fb2365cc4 100644
--- a/Include/internal/pycore_uop_metadata.h
+++ b/Include/internal/pycore_uop_metadata.h
@@ -72,6 +72,7 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = {
[_BINARY_OP_SUBTRACT_FLOAT] = HAS_PURE_FLAG,
[_GUARD_BOTH_UNICODE] = HAS_EXIT_FLAG,
[_BINARY_OP_ADD_UNICODE] = HAS_ERROR_FLAG | HAS_PURE_FLAG,
+ [_BINARY_OP_INPLACE_ADD_UNICODE] = HAS_LOCAL_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_BINARY_SUBSCR] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_BINARY_SLICE] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_STORE_SLICE] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
@@ -279,6 +280,7 @@ const char *const _PyOpcode_uop_name[MAX_UOP_ID+1] = {
[_BINARY_OP_ADD_FLOAT] = "_BINARY_OP_ADD_FLOAT",
[_BINARY_OP_ADD_INT] = "_BINARY_OP_ADD_INT",
[_BINARY_OP_ADD_UNICODE] = "_BINARY_OP_ADD_UNICODE",
+ [_BINARY_OP_INPLACE_ADD_UNICODE] = "_BINARY_OP_INPLACE_ADD_UNICODE",
[_BINARY_OP_MULTIPLY_FLOAT] = "_BINARY_OP_MULTIPLY_FLOAT",
[_BINARY_OP_MULTIPLY_INT] = "_BINARY_OP_MULTIPLY_INT",
[_BINARY_OP_SUBTRACT_FLOAT] = "_BINARY_OP_SUBTRACT_FLOAT",
@@ -632,6 +634,8 @@ int _PyUop_num_popped(int opcode, int oparg)
return 2;
case _BINARY_OP_ADD_UNICODE:
return 2;
+ case _BINARY_OP_INPLACE_ADD_UNICODE:
+ return 2;
case _BINARY_SUBSCR:
return 2;
case _BINARY_SLICE:
diff --git a/Python/bytecodes.c b/Python/bytecodes.c
index d356fc9bfdddba..eb8b66f8d8b2e7 100644
--- a/Python/bytecodes.c
+++ b/Python/bytecodes.c
@@ -581,12 +581,18 @@ dummy_func(
// So the inputs are the same as for all BINARY_OP
// specializations, but there is no output.
// At the end we just skip over the STORE_FAST.
- tier1 op(_BINARY_OP_INPLACE_ADD_UNICODE, (left, right --)) {
+ op(_BINARY_OP_INPLACE_ADD_UNICODE, (left, right --)) {
PyObject *left_o = PyStackRef_AsPyObjectBorrow(left);
PyObject *right_o = PyStackRef_AsPyObjectBorrow(right);
+ int next_oparg;
+ #if TIER_ONE
assert(next_instr->op.code == STORE_FAST);
- _PyStackRef *target_local = &GETLOCAL(next_instr->op.arg);
+ next_oparg = next_instr->op.arg;
+ #else
+ next_oparg = CURRENT_OPERAND();
+ #endif
+ _PyStackRef *target_local = &GETLOCAL(next_oparg);
DEOPT_IF(!PyStackRef_Is(*target_local, left));
STAT_INC(BINARY_OP, hit);
/* Handle `left = left + right` or `left += right` for str.
@@ -607,9 +613,12 @@ dummy_func(
*target_local = PyStackRef_FromPyObjectSteal(temp);
_Py_DECREF_SPECIALIZED(right_o, _PyUnicode_ExactDealloc);
ERROR_IF(PyStackRef_IsNull(*target_local), error);
- // The STORE_FAST is already done.
+ #if TIER_ONE
+ // The STORE_FAST is already done. This is done here in tier one,
+ // and during trace projection in tier two:
assert(next_instr->op.code == STORE_FAST);
SKIP_OVER(1);
+ #endif
}
macro(BINARY_OP_INPLACE_ADD_UNICODE) =
diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h
index b8343f9ffd5f80..3379f0be2272dc 100644
--- a/Python/executor_cases.c.h
+++ b/Python/executor_cases.c.h
@@ -693,6 +693,55 @@
break;
}
+ case _BINARY_OP_INPLACE_ADD_UNICODE: {
+ _PyStackRef right;
+ _PyStackRef left;
+ right = stack_pointer[-1];
+ left = stack_pointer[-2];
+ PyObject *left_o = PyStackRef_AsPyObjectBorrow(left);
+ PyObject *right_o = PyStackRef_AsPyObjectBorrow(right);
+ int next_oparg;
+ #if TIER_ONE
+ assert(next_instr->op.code == STORE_FAST);
+ next_oparg = next_instr->op.arg;
+ #else
+ next_oparg = CURRENT_OPERAND();
+ #endif
+ _PyStackRef *target_local = &GETLOCAL(next_oparg);
+ if (!PyStackRef_Is(*target_local, left)) {
+ UOP_STAT_INC(uopcode, miss);
+ JUMP_TO_JUMP_TARGET();
+ }
+ STAT_INC(BINARY_OP, hit);
+ /* Handle `left = left + right` or `left += right` for str.
+ *
+ * When possible, extend `left` in place rather than
+ * allocating a new PyUnicodeObject. This attempts to avoid
+ * quadratic behavior when one neglects to use str.join().
+ *
+ * If `left` has only two references remaining (one from
+ * the stack, one in the locals), DECREFing `left` leaves
+ * only the locals reference, so PyUnicode_Append knows
+ * that the string is safe to mutate.
+ */
+ assert(Py_REFCNT(left_o) >= 2);
+ _Py_DECREF_NO_DEALLOC(left_o);
+ PyObject *temp = PyStackRef_AsPyObjectBorrow(*target_local);
+ PyUnicode_Append(&temp, right_o);
+ *target_local = PyStackRef_FromPyObjectSteal(temp);
+ _Py_DECREF_SPECIALIZED(right_o, _PyUnicode_ExactDealloc);
+ if (PyStackRef_IsNull(*target_local)) JUMP_TO_ERROR();
+ #if TIER_ONE
+ // The STORE_FAST is already done. This is done here in tier one,
+ // and during trace projection in tier two:
+ assert(next_instr->op.code == STORE_FAST);
+ SKIP_OVER(1);
+ #endif
+ stack_pointer += -2;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
case _BINARY_SUBSCR: {
_PyStackRef sub;
_PyStackRef container;
diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h
index 6f996f91921cd3..c9907438ddc466 100644
--- a/Python/generated_cases.c.h
+++ b/Python/generated_cases.c.h
@@ -181,8 +181,14 @@
{
PyObject *left_o = PyStackRef_AsPyObjectBorrow(left);
PyObject *right_o = PyStackRef_AsPyObjectBorrow(right);
+ int next_oparg;
+ #if TIER_ONE
assert(next_instr->op.code == STORE_FAST);
- _PyStackRef *target_local = &GETLOCAL(next_instr->op.arg);
+ next_oparg = next_instr->op.arg;
+ #else
+ next_oparg = CURRENT_OPERAND();
+ #endif
+ _PyStackRef *target_local = &GETLOCAL(next_oparg);
DEOPT_IF(!PyStackRef_Is(*target_local, left), BINARY_OP);
STAT_INC(BINARY_OP, hit);
/* Handle `left = left + right` or `left += right` for str.
@@ -203,9 +209,12 @@
*target_local = PyStackRef_FromPyObjectSteal(temp);
_Py_DECREF_SPECIALIZED(right_o, _PyUnicode_ExactDealloc);
if (PyStackRef_IsNull(*target_local)) goto pop_2_error;
- // The STORE_FAST is already done.
+ #if TIER_ONE
+ // The STORE_FAST is already done. This is done here in tier one,
+ // and during trace projection in tier two:
assert(next_instr->op.code == STORE_FAST);
SKIP_OVER(1);
+ #endif
}
stack_pointer += -2;
assert(WITHIN_STACK_BOUNDS());
diff --git a/Python/optimizer.c b/Python/optimizer.c
index e08c1dc9936a3d..f0793b8c8f2088 100644
--- a/Python/optimizer.c
+++ b/Python/optimizer.c
@@ -875,6 +875,15 @@ translate_bytecode_to_trace(
goto done;
}
+ if (uop == _BINARY_OP_INPLACE_ADD_UNICODE) {
+ assert(i + 1 == nuops);
+ _Py_CODEUNIT *next_instr = instr + 1 + _PyOpcode_Caches[_PyOpcode_Deopt[opcode]];
+ assert(next_instr->op.code == STORE_FAST);
+ operand = next_instr->op.arg;
+ // Skip the STORE_FAST:
+ instr++;
+ }
+
// All other instructions
ADD_TO_TRACE(uop, oparg, operand, target);
}
diff --git a/Python/optimizer_cases.c.h b/Python/optimizer_cases.c.h
index 8c2b1ac7926cec..33af8552ba69e0 100644
--- a/Python/optimizer_cases.c.h
+++ b/Python/optimizer_cases.c.h
@@ -473,6 +473,12 @@
break;
}
+ case _BINARY_OP_INPLACE_ADD_UNICODE: {
+ stack_pointer += -2;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
case _BINARY_SUBSCR: {
_Py_UopsSymbol *res;
res = sym_new_not_null(ctx);
From afb0aa6ed20bd8e982ecb307f12923cf8dbccd8c Mon Sep 17 00:00:00 2001
From: Mark Shannon
Date: Fri, 26 Jul 2024 12:24:12 +0100
Subject: [PATCH 032/139] GH-121131: Clean up and fix some instrumented
instructions. (GH-121132)
* Add support for 'prev_instr' to code generator and refactor some INSTRUMENTED instructions
---
Include/internal/pycore_opcode_metadata.h | 9 +-
Include/internal/pycore_uop_ids.h | 4 +-
Include/opcode_ids.h | 30 +--
Lib/_opcode_metadata.py | 30 +--
Lib/test/test_sys_settrace.py | 2 +-
Python/bytecodes.c | 115 +++++----
Python/ceval.c | 40 ----
Python/ceval_macros.h | 5 +-
Python/executor_cases.c.h | 8 +-
Python/generated_cases.c.h | 222 +++++++++++++-----
Python/opcode_targets.h | 6 +-
Python/optimizer_cases.c.h | 8 +-
Tools/cases_generator/analyzer.py | 9 +-
.../opcode_metadata_generator.py | 2 -
Tools/cases_generator/tier1_generator.py | 2 +
15 files changed, 277 insertions(+), 215 deletions(-)
diff --git a/Include/internal/pycore_opcode_metadata.h b/Include/internal/pycore_opcode_metadata.h
index 9c7ad926f9a980..f5c439e81a6232 100644
--- a/Include/internal/pycore_opcode_metadata.h
+++ b/Include/internal/pycore_opcode_metadata.h
@@ -231,6 +231,8 @@ int _PyOpcode_num_popped(int opcode, int oparg) {
return 0;
case INSTRUMENTED_JUMP_FORWARD:
return 0;
+ case INSTRUMENTED_LINE:
+ return 0;
case INSTRUMENTED_LOAD_SUPER_ATTR:
return 3;
case INSTRUMENTED_POP_JUMP_IF_FALSE:
@@ -676,6 +678,8 @@ int _PyOpcode_num_pushed(int opcode, int oparg) {
return 0;
case INSTRUMENTED_JUMP_FORWARD:
return 0;
+ case INSTRUMENTED_LINE:
+ return 0;
case INSTRUMENTED_LOAD_SUPER_ATTR:
return 1 + (oparg & 1);
case INSTRUMENTED_POP_JUMP_IF_FALSE:
@@ -689,9 +693,9 @@ int _PyOpcode_num_pushed(int opcode, int oparg) {
case INSTRUMENTED_RESUME:
return 0;
case INSTRUMENTED_RETURN_CONST:
- return 0;
+ return 1;
case INSTRUMENTED_RETURN_VALUE:
- return 0;
+ return 1;
case INSTRUMENTED_YIELD_VALUE:
return 1;
case INTERPRETER_EXIT:
@@ -1083,6 +1087,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[264] = {
[INSTRUMENTED_INSTRUCTION] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[INSTRUMENTED_JUMP_BACKWARD] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG },
[INSTRUMENTED_JUMP_FORWARD] = { true, INSTR_FMT_IB, HAS_ARG_FLAG },
+ [INSTRUMENTED_LINE] = { true, INSTR_FMT_IX, HAS_ESCAPES_FLAG },
[INSTRUMENTED_LOAD_SUPER_ATTR] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG },
[INSTRUMENTED_POP_JUMP_IF_FALSE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG },
[INSTRUMENTED_POP_JUMP_IF_NONE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG },
diff --git a/Include/internal/pycore_uop_ids.h b/Include/internal/pycore_uop_ids.h
index 9ae82ca3c3dafa..60de0573baf5f1 100644
--- a/Include/internal/pycore_uop_ids.h
+++ b/Include/internal/pycore_uop_ids.h
@@ -145,15 +145,13 @@ extern "C" {
#define _INSTRUMENTED_INSTRUCTION INSTRUMENTED_INSTRUCTION
#define _INSTRUMENTED_JUMP_BACKWARD INSTRUMENTED_JUMP_BACKWARD
#define _INSTRUMENTED_JUMP_FORWARD INSTRUMENTED_JUMP_FORWARD
+#define _INSTRUMENTED_LINE INSTRUMENTED_LINE
#define _INSTRUMENTED_LOAD_SUPER_ATTR INSTRUMENTED_LOAD_SUPER_ATTR
#define _INSTRUMENTED_POP_JUMP_IF_FALSE INSTRUMENTED_POP_JUMP_IF_FALSE
#define _INSTRUMENTED_POP_JUMP_IF_NONE INSTRUMENTED_POP_JUMP_IF_NONE
#define _INSTRUMENTED_POP_JUMP_IF_NOT_NONE INSTRUMENTED_POP_JUMP_IF_NOT_NONE
#define _INSTRUMENTED_POP_JUMP_IF_TRUE INSTRUMENTED_POP_JUMP_IF_TRUE
#define _INSTRUMENTED_RESUME INSTRUMENTED_RESUME
-#define _INSTRUMENTED_RETURN_CONST INSTRUMENTED_RETURN_CONST
-#define _INSTRUMENTED_RETURN_VALUE INSTRUMENTED_RETURN_VALUE
-#define _INSTRUMENTED_YIELD_VALUE INSTRUMENTED_YIELD_VALUE
#define _INTERNAL_INCREMENT_OPT_COUNTER 381
#define _IS_NONE 382
#define _IS_OP IS_OP
diff --git a/Include/opcode_ids.h b/Include/opcode_ids.h
index dd9b1ec1674949..d14b48f4289285 100644
--- a/Include/opcode_ids.h
+++ b/Include/opcode_ids.h
@@ -204,21 +204,21 @@ extern "C" {
#define INSTRUMENTED_RESUME 236
#define INSTRUMENTED_END_FOR 237
#define INSTRUMENTED_END_SEND 238
-#define INSTRUMENTED_RETURN_VALUE 239
-#define INSTRUMENTED_RETURN_CONST 240
-#define INSTRUMENTED_YIELD_VALUE 241
-#define INSTRUMENTED_LOAD_SUPER_ATTR 242
-#define INSTRUMENTED_FOR_ITER 243
-#define INSTRUMENTED_CALL 244
-#define INSTRUMENTED_CALL_KW 245
-#define INSTRUMENTED_CALL_FUNCTION_EX 246
-#define INSTRUMENTED_INSTRUCTION 247
-#define INSTRUMENTED_JUMP_FORWARD 248
-#define INSTRUMENTED_JUMP_BACKWARD 249
-#define INSTRUMENTED_POP_JUMP_IF_TRUE 250
-#define INSTRUMENTED_POP_JUMP_IF_FALSE 251
-#define INSTRUMENTED_POP_JUMP_IF_NONE 252
-#define INSTRUMENTED_POP_JUMP_IF_NOT_NONE 253
+#define INSTRUMENTED_LOAD_SUPER_ATTR 239
+#define INSTRUMENTED_FOR_ITER 240
+#define INSTRUMENTED_CALL 241
+#define INSTRUMENTED_CALL_KW 242
+#define INSTRUMENTED_CALL_FUNCTION_EX 243
+#define INSTRUMENTED_INSTRUCTION 244
+#define INSTRUMENTED_JUMP_FORWARD 245
+#define INSTRUMENTED_JUMP_BACKWARD 246
+#define INSTRUMENTED_POP_JUMP_IF_TRUE 247
+#define INSTRUMENTED_POP_JUMP_IF_FALSE 248
+#define INSTRUMENTED_POP_JUMP_IF_NONE 249
+#define INSTRUMENTED_POP_JUMP_IF_NOT_NONE 250
+#define INSTRUMENTED_RETURN_VALUE 251
+#define INSTRUMENTED_RETURN_CONST 252
+#define INSTRUMENTED_YIELD_VALUE 253
#define INSTRUMENTED_LINE 254
#define JUMP 256
#define JUMP_NO_INTERRUPT 257
diff --git a/Lib/_opcode_metadata.py b/Lib/_opcode_metadata.py
index 4b6057f4119421..01c22a89846e97 100644
--- a/Lib/_opcode_metadata.py
+++ b/Lib/_opcode_metadata.py
@@ -308,21 +308,21 @@
'INSTRUMENTED_RESUME': 236,
'INSTRUMENTED_END_FOR': 237,
'INSTRUMENTED_END_SEND': 238,
- 'INSTRUMENTED_RETURN_VALUE': 239,
- 'INSTRUMENTED_RETURN_CONST': 240,
- 'INSTRUMENTED_YIELD_VALUE': 241,
- 'INSTRUMENTED_LOAD_SUPER_ATTR': 242,
- 'INSTRUMENTED_FOR_ITER': 243,
- 'INSTRUMENTED_CALL': 244,
- 'INSTRUMENTED_CALL_KW': 245,
- 'INSTRUMENTED_CALL_FUNCTION_EX': 246,
- 'INSTRUMENTED_INSTRUCTION': 247,
- 'INSTRUMENTED_JUMP_FORWARD': 248,
- 'INSTRUMENTED_JUMP_BACKWARD': 249,
- 'INSTRUMENTED_POP_JUMP_IF_TRUE': 250,
- 'INSTRUMENTED_POP_JUMP_IF_FALSE': 251,
- 'INSTRUMENTED_POP_JUMP_IF_NONE': 252,
- 'INSTRUMENTED_POP_JUMP_IF_NOT_NONE': 253,
+ 'INSTRUMENTED_LOAD_SUPER_ATTR': 239,
+ 'INSTRUMENTED_FOR_ITER': 240,
+ 'INSTRUMENTED_CALL': 241,
+ 'INSTRUMENTED_CALL_KW': 242,
+ 'INSTRUMENTED_CALL_FUNCTION_EX': 243,
+ 'INSTRUMENTED_INSTRUCTION': 244,
+ 'INSTRUMENTED_JUMP_FORWARD': 245,
+ 'INSTRUMENTED_JUMP_BACKWARD': 246,
+ 'INSTRUMENTED_POP_JUMP_IF_TRUE': 247,
+ 'INSTRUMENTED_POP_JUMP_IF_FALSE': 248,
+ 'INSTRUMENTED_POP_JUMP_IF_NONE': 249,
+ 'INSTRUMENTED_POP_JUMP_IF_NOT_NONE': 250,
+ 'INSTRUMENTED_RETURN_VALUE': 251,
+ 'INSTRUMENTED_RETURN_CONST': 252,
+ 'INSTRUMENTED_YIELD_VALUE': 253,
'JUMP': 256,
'JUMP_NO_INTERRUPT': 257,
'LOAD_CLOSURE': 258,
diff --git a/Lib/test/test_sys_settrace.py b/Lib/test/test_sys_settrace.py
index c622fd9ce7c466..95cf0d1ec2d9ab 100644
--- a/Lib/test/test_sys_settrace.py
+++ b/Lib/test/test_sys_settrace.py
@@ -2857,7 +2857,7 @@ def test_no_jump_from_exception_event(output):
output.append(1)
1 / 0
- @jump_test(3, 2, [2, 5], event='return')
+ @jump_test(3, 2, [2, 2, 5], event='return')
def test_jump_from_yield(output):
def gen():
output.append(2)
diff --git a/Python/bytecodes.c b/Python/bytecodes.c
index eb8b66f8d8b2e7..971397c955de09 100644
--- a/Python/bytecodes.c
+++ b/Python/bytecodes.c
@@ -945,48 +945,25 @@ dummy_func(
LLTRACE_RESUME_FRAME();
}
- inst(INSTRUMENTED_RETURN_VALUE, (retval --)) {
+ tier1 op(_RETURN_VALUE_EVENT, (val -- val)) {
int err = _Py_call_instrumentation_arg(
tstate, PY_MONITORING_EVENT_PY_RETURN,
- frame, this_instr, PyStackRef_AsPyObjectBorrow(retval));
+ frame, this_instr, PyStackRef_AsPyObjectBorrow(val));
if (err) ERROR_NO_POP();
- STACK_SHRINK(1);
- assert(EMPTY());
- _PyFrame_SetStackPointer(frame, stack_pointer);
- _Py_LeaveRecursiveCallPy(tstate);
- assert(frame != &entry_frame);
- // GH-99729: We need to unlink the frame *before* clearing it:
- _PyInterpreterFrame *dying = frame;
- frame = tstate->current_frame = dying->previous;
- _PyEval_FrameClearAndPop(tstate, dying);
- _PyFrame_StackPush(frame, retval);
- LOAD_IP(frame->return_offset);
- goto resume_frame;
}
+ macro(INSTRUMENTED_RETURN_VALUE) =
+ _RETURN_VALUE_EVENT +
+ RETURN_VALUE;
+
macro(RETURN_CONST) =
LOAD_CONST +
RETURN_VALUE;
- inst(INSTRUMENTED_RETURN_CONST, (--)) {
- PyObject *retval = GETITEM(FRAME_CO_CONSTS, oparg);
- int err = _Py_call_instrumentation_arg(
- tstate, PY_MONITORING_EVENT_PY_RETURN,
- frame, this_instr, retval);
- if (err) ERROR_NO_POP();
- Py_INCREF(retval);
- assert(EMPTY());
- _PyFrame_SetStackPointer(frame, stack_pointer);
- _Py_LeaveRecursiveCallPy(tstate);
- assert(frame != &entry_frame);
- // GH-99729: We need to unlink the frame *before* clearing it:
- _PyInterpreterFrame *dying = frame;
- frame = tstate->current_frame = dying->previous;
- _PyEval_FrameClearAndPop(tstate, dying);
- _PyFrame_StackPush(frame, PyStackRef_FromPyObjectSteal(retval));
- LOAD_IP(frame->return_offset);
- goto resume_frame;
- }
+ macro(INSTRUMENTED_RETURN_CONST) =
+ LOAD_CONST +
+ _RETURN_VALUE_EVENT +
+ RETURN_VALUE;
inst(GET_AITER, (obj -- iter)) {
unaryfunc getter = NULL;
@@ -1183,31 +1160,6 @@ dummy_func(
_SEND_GEN_FRAME +
_PUSH_FRAME;
- inst(INSTRUMENTED_YIELD_VALUE, (retval -- unused)) {
- assert(frame != &entry_frame);
- frame->instr_ptr = next_instr;
- PyGenObject *gen = _PyGen_GetGeneratorFromFrame(frame);
- assert(FRAME_SUSPENDED_YIELD_FROM == FRAME_SUSPENDED + 1);
- assert(oparg == 0 || oparg == 1);
- gen->gi_frame_state = FRAME_SUSPENDED + oparg;
- _PyFrame_SetStackPointer(frame, stack_pointer - 1);
- int err = _Py_call_instrumentation_arg(
- tstate, PY_MONITORING_EVENT_PY_YIELD,
- frame, this_instr, PyStackRef_AsPyObjectBorrow(retval));
- if (err) ERROR_NO_POP();
- tstate->exc_info = gen->gi_exc_state.previous_item;
- gen->gi_exc_state.previous_item = NULL;
- _Py_LeaveRecursiveCallPy(tstate);
- _PyInterpreterFrame *gen_frame = frame;
- frame = tstate->current_frame = frame->previous;
- gen_frame->previous = NULL;
- _PyFrame_StackPush(frame, retval);
- /* We don't know which of these is relevant here, so keep them equal */
- assert(INLINE_CACHE_ENTRIES_SEND == INLINE_CACHE_ENTRIES_FOR_ITER);
- LOAD_IP(1 + INLINE_CACHE_ENTRIES_SEND);
- goto resume_frame;
- }
-
inst(YIELD_VALUE, (retval -- value)) {
// NOTE: It's important that YIELD_VALUE never raises an exception!
// The compiler treats any exception raised here as a failed close()
@@ -1244,6 +1196,23 @@ dummy_func(
LLTRACE_RESUME_FRAME();
}
+ tier1 op(_YIELD_VALUE_EVENT, (val -- val)) {
+ SAVE_SP();
+ int err = _Py_call_instrumentation_arg(
+ tstate, PY_MONITORING_EVENT_PY_YIELD,
+ frame, this_instr, PyStackRef_AsPyObjectBorrow(val));
+ LOAD_SP();
+ if (err) ERROR_NO_POP();
+ if (frame->instr_ptr != this_instr) {
+ next_instr = frame->instr_ptr;
+ DISPATCH();
+ }
+ }
+
+ macro(INSTRUMENTED_YIELD_VALUE) =
+ _YIELD_VALUE_EVENT +
+ YIELD_VALUE;
+
inst(POP_EXCEPT, (exc_value -- )) {
_PyErr_StackItem *exc_info = tstate->exc_info;
Py_XSETREF(exc_info->exc_value,
@@ -4450,6 +4419,36 @@ dummy_func(
assert(oparg >= 2);
}
+ inst(INSTRUMENTED_LINE, ( -- )) {
+ int original_opcode = 0;
+ if (tstate->tracing) {
+ PyCodeObject *code = _PyFrame_GetCode(frame);
+ original_opcode = code->_co_monitoring->lines[(int)(this_instr - _PyCode_CODE(code))].original_opcode;
+ next_instr = this_instr;
+ } else {
+ _PyFrame_SetStackPointer(frame, stack_pointer);
+ original_opcode = _Py_call_instrumentation_line(
+ tstate, frame, this_instr, prev_instr);
+ stack_pointer = _PyFrame_GetStackPointer(frame);
+ if (original_opcode < 0) {
+ next_instr = this_instr+1;
+ goto error;
+ }
+ next_instr = frame->instr_ptr;
+ if (next_instr != this_instr) {
+ DISPATCH();
+ }
+ }
+ if (_PyOpcode_Caches[original_opcode]) {
+ _PyBinaryOpCache *cache = (_PyBinaryOpCache *)(next_instr+1);
+ /* Prevent the underlying instruction from specializing
+ * and overwriting the instrumentation. */
+ PAUSE_ADAPTIVE_COUNTER(cache->counter);
+ }
+ opcode = original_opcode;
+ DISPATCH_GOTO();
+ }
+
inst(INSTRUMENTED_INSTRUCTION, ( -- )) {
int next_opcode = _Py_call_instrumentation_instruction(
tstate, frame, this_instr);
diff --git a/Python/ceval.c b/Python/ceval.c
index 1e911d3ba17189..c0074c45b27111 100644
--- a/Python/ceval.c
+++ b/Python/ceval.c
@@ -835,46 +835,6 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int
#include "generated_cases.c.h"
- /* INSTRUMENTED_LINE has to be here, rather than in bytecodes.c,
- * because it needs to capture frame->instr_ptr before it is updated,
- * as happens in the standard instruction prologue.
- */
-#if USE_COMPUTED_GOTOS
- TARGET_INSTRUMENTED_LINE:
-#else
- case INSTRUMENTED_LINE:
-#endif
- {
- _Py_CODEUNIT *prev = frame->instr_ptr;
- _Py_CODEUNIT *here = frame->instr_ptr = next_instr;
- int original_opcode = 0;
- if (tstate->tracing) {
- PyCodeObject *code = _PyFrame_GetCode(frame);
- original_opcode = code->_co_monitoring->lines[(int)(here - _PyCode_CODE(code))].original_opcode;
- } else {
- _PyFrame_SetStackPointer(frame, stack_pointer);
- original_opcode = _Py_call_instrumentation_line(
- tstate, frame, here, prev);
- stack_pointer = _PyFrame_GetStackPointer(frame);
- if (original_opcode < 0) {
- next_instr = here+1;
- goto error;
- }
- next_instr = frame->instr_ptr;
- if (next_instr != here) {
- DISPATCH();
- }
- }
- if (_PyOpcode_Caches[original_opcode]) {
- _PyBinaryOpCache *cache = (_PyBinaryOpCache *)(next_instr+1);
- /* Prevent the underlying instruction from specializing
- * and overwriting the instrumentation. */
- PAUSE_ADAPTIVE_COUNTER(cache->counter);
- }
- opcode = original_opcode;
- DISPATCH_GOTO();
- }
-
#if USE_COMPUTED_GOTOS
_unknown_opcode:
diff --git a/Python/ceval_macros.h b/Python/ceval_macros.h
index 595b72bfaf9613..60efe3d78ff22c 100644
--- a/Python/ceval_macros.h
+++ b/Python/ceval_macros.h
@@ -402,7 +402,10 @@ static inline void _Py_LeaveRecursiveCallPy(PyThreadState *tstate) {
/* There's no STORE_IP(), it's inlined by the code generator. */
#define LOAD_SP() \
-stack_pointer = _PyFrame_GetStackPointer(frame);
+stack_pointer = _PyFrame_GetStackPointer(frame)
+
+#define SAVE_SP() \
+_PyFrame_SetStackPointer(frame, stack_pointer)
/* Tier-switching macros. */
diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h
index 3379f0be2272dc..288e0f9135c27e 100644
--- a/Python/executor_cases.c.h
+++ b/Python/executor_cases.c.h
@@ -1153,10 +1153,6 @@
break;
}
- /* _INSTRUMENTED_RETURN_VALUE is not a viable micro-op for tier 2 because it is instrumented */
-
- /* _INSTRUMENTED_RETURN_CONST is not a viable micro-op for tier 2 because it is instrumented */
-
case _GET_AITER: {
_PyStackRef obj;
_PyStackRef iter;
@@ -1304,8 +1300,6 @@
break;
}
- /* _INSTRUMENTED_YIELD_VALUE is not a viable micro-op for tier 2 because it is instrumented */
-
case _YIELD_VALUE: {
_PyStackRef retval;
_PyStackRef value;
@@ -4913,6 +4907,8 @@
break;
}
+ /* _INSTRUMENTED_LINE is not a viable micro-op for tier 2 because it is instrumented */
+
/* _INSTRUMENTED_INSTRUCTION is not a viable micro-op for tier 2 because it is instrumented */
/* _INSTRUMENTED_JUMP_FORWARD is not a viable micro-op for tier 2 because it is instrumented */
diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h
index c9907438ddc466..634053a93837c6 100644
--- a/Python/generated_cases.c.h
+++ b/Python/generated_cases.c.h
@@ -3657,6 +3657,41 @@
DISPATCH();
}
+ TARGET(INSTRUMENTED_LINE) {
+ _Py_CODEUNIT *prev_instr = frame->instr_ptr;
+ _Py_CODEUNIT *this_instr = frame->instr_ptr = next_instr;
+ (void)this_instr;
+ next_instr += 1;
+ INSTRUCTION_STATS(INSTRUMENTED_LINE);
+ int original_opcode = 0;
+ if (tstate->tracing) {
+ PyCodeObject *code = _PyFrame_GetCode(frame);
+ original_opcode = code->_co_monitoring->lines[(int)(this_instr - _PyCode_CODE(code))].original_opcode;
+ next_instr = this_instr;
+ } else {
+ _PyFrame_SetStackPointer(frame, stack_pointer);
+ original_opcode = _Py_call_instrumentation_line(
+ tstate, frame, this_instr, prev_instr);
+ stack_pointer = _PyFrame_GetStackPointer(frame);
+ if (original_opcode < 0) {
+ next_instr = this_instr+1;
+ goto error;
+ }
+ next_instr = frame->instr_ptr;
+ if (next_instr != this_instr) {
+ DISPATCH();
+ }
+ }
+ if (_PyOpcode_Caches[original_opcode]) {
+ _PyBinaryOpCache *cache = (_PyBinaryOpCache *)(next_instr+1);
+ /* Prevent the underlying instruction from specializing
+ * and overwriting the instrumentation. */
+ PAUSE_ADAPTIVE_COUNTER(cache->counter);
+ }
+ opcode = original_opcode;
+ DISPATCH_GOTO();
+ }
+
TARGET(INSTRUMENTED_LOAD_SUPER_ATTR) {
_Py_CODEUNIT *this_instr = frame->instr_ptr = next_instr;
(void)this_instr;
@@ -3785,23 +3820,44 @@
(void)this_instr;
next_instr += 1;
INSTRUCTION_STATS(INSTRUMENTED_RETURN_CONST);
- PyObject *retval = GETITEM(FRAME_CO_CONSTS, oparg);
- int err = _Py_call_instrumentation_arg(
- tstate, PY_MONITORING_EVENT_PY_RETURN,
- frame, this_instr, retval);
- if (err) goto error;
- Py_INCREF(retval);
- assert(EMPTY());
- _PyFrame_SetStackPointer(frame, stack_pointer);
- _Py_LeaveRecursiveCallPy(tstate);
- assert(frame != &entry_frame);
- // GH-99729: We need to unlink the frame *before* clearing it:
- _PyInterpreterFrame *dying = frame;
- frame = tstate->current_frame = dying->previous;
- _PyEval_FrameClearAndPop(tstate, dying);
- _PyFrame_StackPush(frame, PyStackRef_FromPyObjectSteal(retval));
- LOAD_IP(frame->return_offset);
- goto resume_frame;
+ _PyStackRef value;
+ _PyStackRef val;
+ _PyStackRef retval;
+ _PyStackRef res;
+ // _LOAD_CONST
+ {
+ value = PyStackRef_FromPyObjectNew(GETITEM(FRAME_CO_CONSTS, oparg));
+ }
+ // _RETURN_VALUE_EVENT
+ val = value;
+ {
+ int err = _Py_call_instrumentation_arg(
+ tstate, PY_MONITORING_EVENT_PY_RETURN,
+ frame, this_instr, PyStackRef_AsPyObjectBorrow(val));
+ if (err) goto error;
+ }
+ // _RETURN_VALUE
+ retval = val;
+ {
+ #if TIER_ONE
+ assert(frame != &entry_frame);
+ #endif
+ _PyFrame_SetStackPointer(frame, stack_pointer);
+ assert(EMPTY());
+ _Py_LeaveRecursiveCallPy(tstate);
+ // GH-99729: We need to unlink the frame *before* clearing it:
+ _PyInterpreterFrame *dying = frame;
+ frame = tstate->current_frame = dying->previous;
+ _PyEval_FrameClearAndPop(tstate, dying);
+ LOAD_SP();
+ LOAD_IP(frame->return_offset);
+ res = retval;
+ LLTRACE_RESUME_FRAME();
+ }
+ stack_pointer[0] = res;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ DISPATCH();
}
TARGET(INSTRUMENTED_RETURN_VALUE) {
@@ -3809,24 +3865,41 @@
(void)this_instr;
next_instr += 1;
INSTRUCTION_STATS(INSTRUMENTED_RETURN_VALUE);
+ _PyStackRef val;
_PyStackRef retval;
- retval = stack_pointer[-1];
- int err = _Py_call_instrumentation_arg(
- tstate, PY_MONITORING_EVENT_PY_RETURN,
- frame, this_instr, PyStackRef_AsPyObjectBorrow(retval));
- if (err) goto error;
- STACK_SHRINK(1);
- assert(EMPTY());
- _PyFrame_SetStackPointer(frame, stack_pointer);
- _Py_LeaveRecursiveCallPy(tstate);
- assert(frame != &entry_frame);
- // GH-99729: We need to unlink the frame *before* clearing it:
- _PyInterpreterFrame *dying = frame;
- frame = tstate->current_frame = dying->previous;
- _PyEval_FrameClearAndPop(tstate, dying);
- _PyFrame_StackPush(frame, retval);
- LOAD_IP(frame->return_offset);
- goto resume_frame;
+ _PyStackRef res;
+ // _RETURN_VALUE_EVENT
+ val = stack_pointer[-1];
+ {
+ int err = _Py_call_instrumentation_arg(
+ tstate, PY_MONITORING_EVENT_PY_RETURN,
+ frame, this_instr, PyStackRef_AsPyObjectBorrow(val));
+ if (err) goto error;
+ }
+ // _RETURN_VALUE
+ retval = val;
+ {
+ #if TIER_ONE
+ assert(frame != &entry_frame);
+ #endif
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ _PyFrame_SetStackPointer(frame, stack_pointer);
+ assert(EMPTY());
+ _Py_LeaveRecursiveCallPy(tstate);
+ // GH-99729: We need to unlink the frame *before* clearing it:
+ _PyInterpreterFrame *dying = frame;
+ frame = tstate->current_frame = dying->previous;
+ _PyEval_FrameClearAndPop(tstate, dying);
+ LOAD_SP();
+ LOAD_IP(frame->return_offset);
+ res = retval;
+ LLTRACE_RESUME_FRAME();
+ }
+ stack_pointer[0] = res;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ DISPATCH();
}
TARGET(INSTRUMENTED_YIELD_VALUE) {
@@ -3834,30 +3907,65 @@
(void)this_instr;
next_instr += 1;
INSTRUCTION_STATS(INSTRUMENTED_YIELD_VALUE);
+ _PyStackRef val;
_PyStackRef retval;
- retval = stack_pointer[-1];
- assert(frame != &entry_frame);
- frame->instr_ptr = next_instr;
- PyGenObject *gen = _PyGen_GetGeneratorFromFrame(frame);
- assert(FRAME_SUSPENDED_YIELD_FROM == FRAME_SUSPENDED + 1);
- assert(oparg == 0 || oparg == 1);
- gen->gi_frame_state = FRAME_SUSPENDED + oparg;
- _PyFrame_SetStackPointer(frame, stack_pointer - 1);
- int err = _Py_call_instrumentation_arg(
- tstate, PY_MONITORING_EVENT_PY_YIELD,
- frame, this_instr, PyStackRef_AsPyObjectBorrow(retval));
- if (err) goto error;
- tstate->exc_info = gen->gi_exc_state.previous_item;
- gen->gi_exc_state.previous_item = NULL;
- _Py_LeaveRecursiveCallPy(tstate);
- _PyInterpreterFrame *gen_frame = frame;
- frame = tstate->current_frame = frame->previous;
- gen_frame->previous = NULL;
- _PyFrame_StackPush(frame, retval);
- /* We don't know which of these is relevant here, so keep them equal */
- assert(INLINE_CACHE_ENTRIES_SEND == INLINE_CACHE_ENTRIES_FOR_ITER);
- LOAD_IP(1 + INLINE_CACHE_ENTRIES_SEND);
- goto resume_frame;
+ _PyStackRef value;
+ // _YIELD_VALUE_EVENT
+ val = stack_pointer[-1];
+ {
+ SAVE_SP();
+ int err = _Py_call_instrumentation_arg(
+ tstate, PY_MONITORING_EVENT_PY_YIELD,
+ frame, this_instr, PyStackRef_AsPyObjectBorrow(val));
+ LOAD_SP();
+ if (err) goto error;
+ if (frame->instr_ptr != this_instr) {
+ next_instr = frame->instr_ptr;
+ DISPATCH();
+ }
+ }
+ // _YIELD_VALUE
+ retval = val;
+ {
+ // NOTE: It's important that YIELD_VALUE never raises an exception!
+ // The compiler treats any exception raised here as a failed close()
+ // or throw() call.
+ #if TIER_ONE
+ assert(frame != &entry_frame);
+ #endif
+ frame->instr_ptr++;
+ PyGenObject *gen = _PyGen_GetGeneratorFromFrame(frame);
+ assert(FRAME_SUSPENDED_YIELD_FROM == FRAME_SUSPENDED + 1);
+ assert(oparg == 0 || oparg == 1);
+ gen->gi_frame_state = FRAME_SUSPENDED + oparg;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ _PyFrame_SetStackPointer(frame, stack_pointer);
+ tstate->exc_info = gen->gi_exc_state.previous_item;
+ gen->gi_exc_state.previous_item = NULL;
+ _Py_LeaveRecursiveCallPy(tstate);
+ _PyInterpreterFrame *gen_frame = frame;
+ frame = tstate->current_frame = frame->previous;
+ gen_frame->previous = NULL;
+ /* We don't know which of these is relevant here, so keep them equal */
+ assert(INLINE_CACHE_ENTRIES_SEND == INLINE_CACHE_ENTRIES_FOR_ITER);
+ #if TIER_ONE
+ assert(frame->instr_ptr->op.code == INSTRUMENTED_LINE ||
+ frame->instr_ptr->op.code == INSTRUMENTED_INSTRUCTION ||
+ _PyOpcode_Deopt[frame->instr_ptr->op.code] == SEND ||
+ _PyOpcode_Deopt[frame->instr_ptr->op.code] == FOR_ITER ||
+ _PyOpcode_Deopt[frame->instr_ptr->op.code] == INTERPRETER_EXIT ||
+ _PyOpcode_Deopt[frame->instr_ptr->op.code] == ENTER_EXECUTOR);
+ #endif
+ LOAD_IP(1 + INLINE_CACHE_ENTRIES_SEND);
+ LOAD_SP();
+ value = retval;
+ LLTRACE_RESUME_FRAME();
+ }
+ stack_pointer[0] = value;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ DISPATCH();
}
TARGET(INTERPRETER_EXIT) {
diff --git a/Python/opcode_targets.h b/Python/opcode_targets.h
index 74544a1dff25c6..224aeb834935eb 100644
--- a/Python/opcode_targets.h
+++ b/Python/opcode_targets.h
@@ -238,9 +238,6 @@ static void *opcode_targets[256] = {
&&TARGET_INSTRUMENTED_RESUME,
&&TARGET_INSTRUMENTED_END_FOR,
&&TARGET_INSTRUMENTED_END_SEND,
- &&TARGET_INSTRUMENTED_RETURN_VALUE,
- &&TARGET_INSTRUMENTED_RETURN_CONST,
- &&TARGET_INSTRUMENTED_YIELD_VALUE,
&&TARGET_INSTRUMENTED_LOAD_SUPER_ATTR,
&&TARGET_INSTRUMENTED_FOR_ITER,
&&TARGET_INSTRUMENTED_CALL,
@@ -253,6 +250,9 @@ static void *opcode_targets[256] = {
&&TARGET_INSTRUMENTED_POP_JUMP_IF_FALSE,
&&TARGET_INSTRUMENTED_POP_JUMP_IF_NONE,
&&TARGET_INSTRUMENTED_POP_JUMP_IF_NOT_NONE,
+ &&TARGET_INSTRUMENTED_RETURN_VALUE,
+ &&TARGET_INSTRUMENTED_RETURN_CONST,
+ &&TARGET_INSTRUMENTED_YIELD_VALUE,
&&TARGET_INSTRUMENTED_LINE,
&&_unknown_opcode,
};
diff --git a/Python/optimizer_cases.c.h b/Python/optimizer_cases.c.h
index 33af8552ba69e0..3c9e6d3043cde1 100644
--- a/Python/optimizer_cases.c.h
+++ b/Python/optimizer_cases.c.h
@@ -621,10 +621,6 @@
break;
}
- /* _INSTRUMENTED_RETURN_VALUE is not a viable micro-op for tier 2 */
-
- /* _INSTRUMENTED_RETURN_CONST is not a viable micro-op for tier 2 */
-
case _GET_AITER: {
_Py_UopsSymbol *iter;
iter = sym_new_not_null(ctx);
@@ -656,8 +652,6 @@
break;
}
- /* _INSTRUMENTED_YIELD_VALUE is not a viable micro-op for tier 2 */
-
case _YIELD_VALUE: {
_Py_UopsSymbol *res;
res = sym_new_unknown(ctx);
@@ -2056,6 +2050,8 @@
break;
}
+ /* _INSTRUMENTED_LINE is not a viable micro-op for tier 2 */
+
/* _INSTRUMENTED_INSTRUCTION is not a viable micro-op for tier 2 */
/* _INSTRUMENTED_JUMP_FORWARD is not a viable micro-op for tier 2 */
diff --git a/Tools/cases_generator/analyzer.py b/Tools/cases_generator/analyzer.py
index f5cf4fad4470d8..675dc0b9acaf45 100644
--- a/Tools/cases_generator/analyzer.py
+++ b/Tools/cases_generator/analyzer.py
@@ -27,6 +27,7 @@ class Properties:
tier: int | None = None
oparg_and_1: bool = False
const_oparg: int = -1
+ needs_prev: bool = False
def dump(self, indent: str) -> None:
print(indent, end="")
@@ -53,6 +54,7 @@ def from_list(properties: list["Properties"]) -> "Properties":
has_free=any(p.has_free for p in properties),
side_exit=any(p.side_exit for p in properties),
pure=all(p.pure for p in properties),
+ needs_prev=any(p.needs_prev for p in properties),
)
@property
@@ -618,6 +620,7 @@ def compute_properties(op: parser.InstDef) -> Properties:
has_free=has_free,
pure="pure" in op.annotations,
tier=tier_variable(op),
+ needs_prev=variable_used(op, "prev_instr"),
)
@@ -797,12 +800,6 @@ def assign_opcodes(
instrumented = [name for name in instructions if name.startswith("INSTRUMENTED")]
- # Special case: this instruction is implemented in ceval.c
- # rather than bytecodes.c, so we need to add it explicitly
- # here (at least until we add something to bytecodes.c to
- # declare external instructions).
- instrumented.append("INSTRUMENTED_LINE")
-
specialized: set[str] = set()
no_arg: list[str] = []
has_arg: list[str] = []
diff --git a/Tools/cases_generator/opcode_metadata_generator.py b/Tools/cases_generator/opcode_metadata_generator.py
index 0f5790dc4af40f..09b9d3d211eb24 100644
--- a/Tools/cases_generator/opcode_metadata_generator.py
+++ b/Tools/cases_generator/opcode_metadata_generator.py
@@ -151,7 +151,6 @@ def generate_deopt_table(analysis: Analysis, out: CWriter) -> None:
if inst.family is not None:
deopt = inst.family.name
deopts.append((inst.name, deopt))
- deopts.append(("INSTRUMENTED_LINE", "INSTRUMENTED_LINE"))
for name, deopt in sorted(deopts):
out.emit(f"[{name}] = {deopt},\n")
out.emit("};\n\n")
@@ -179,7 +178,6 @@ def generate_name_table(analysis: Analysis, out: CWriter) -> None:
out.emit("#ifdef NEED_OPCODE_METADATA\n")
out.emit(f"const char *_PyOpcode_OpName[{table_size}] = {{\n")
names = list(analysis.instructions) + list(analysis.pseudos)
- names.append("INSTRUMENTED_LINE")
for name in sorted(names):
out.emit(f'[{name}] = "{name}",\n')
out.emit("};\n")
diff --git a/Tools/cases_generator/tier1_generator.py b/Tools/cases_generator/tier1_generator.py
index 5dec66e8e0af15..118f4b3a6eaa1c 100644
--- a/Tools/cases_generator/tier1_generator.py
+++ b/Tools/cases_generator/tier1_generator.py
@@ -148,6 +148,8 @@ def generate_tier1(
out.emit("\n")
out.emit(f"TARGET({name}) {{\n")
unused_guard = "(void)this_instr;\n" if inst.family is None else ""
+ if inst.properties.needs_prev:
+ out.emit(f"_Py_CODEUNIT *prev_instr = frame->instr_ptr;\n")
if needs_this and not inst.is_target:
out.emit(f"_Py_CODEUNIT *this_instr = frame->instr_ptr = next_instr;\n")
out.emit(unused_guard)
From 95a73917cd5a204979a78c13ba912621f1eeb2e3 Mon Sep 17 00:00:00 2001
From: Mark Shannon
Date: Fri, 26 Jul 2024 14:35:57 +0100
Subject: [PATCH 033/139] GH-122029: Break INSTRUMENTED_CALL into micro-ops, so
that its behavior is consistent with CALL (GH-122177)
---
Include/internal/pycore_opcode_metadata.h | 6 +-
Include/internal/pycore_uop_ids.h | 129 ++++++++--------
Include/internal/pycore_uop_metadata.h | 4 +
Include/opcode_ids.h | 26 ++--
Lib/_opcode_metadata.py | 26 ++--
Lib/test/test_monitoring.py | 2 +-
Python/bytecodes.c | 71 +++++----
Python/executor_cases.c.h | 34 ++++-
Python/generated_cases.c.h | 176 +++++++++++++++++++---
Python/opcode_targets.h | 2 +-
Python/optimizer_cases.c.h | 18 ++-
11 files changed, 341 insertions(+), 153 deletions(-)
diff --git a/Include/internal/pycore_opcode_metadata.h b/Include/internal/pycore_opcode_metadata.h
index f5c439e81a6232..eaba280f1bf1cd 100644
--- a/Include/internal/pycore_opcode_metadata.h
+++ b/Include/internal/pycore_opcode_metadata.h
@@ -214,7 +214,7 @@ int _PyOpcode_num_popped(int opcode, int oparg) {
case IMPORT_NAME:
return 2;
case INSTRUMENTED_CALL:
- return 0;
+ return 2 + oparg;
case INSTRUMENTED_CALL_FUNCTION_EX:
return 0;
case INSTRUMENTED_CALL_KW:
@@ -661,7 +661,7 @@ int _PyOpcode_num_pushed(int opcode, int oparg) {
case IMPORT_NAME:
return 1;
case INSTRUMENTED_CALL:
- return 0;
+ return 1;
case INSTRUMENTED_CALL_FUNCTION_EX:
return 0;
case INSTRUMENTED_CALL_KW:
@@ -1078,7 +1078,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[264] = {
[GET_YIELD_FROM_ITER] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG },
[IMPORT_FROM] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[IMPORT_NAME] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
- [INSTRUMENTED_CALL] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
+ [INSTRUMENTED_CALL] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG },
[INSTRUMENTED_CALL_FUNCTION_EX] = { true, INSTR_FMT_IX, 0 },
[INSTRUMENTED_CALL_KW] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[INSTRUMENTED_END_FOR] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG },
diff --git a/Include/internal/pycore_uop_ids.h b/Include/internal/pycore_uop_ids.h
index 60de0573baf5f1..d6c910255eb87b 100644
--- a/Include/internal/pycore_uop_ids.h
+++ b/Include/internal/pycore_uop_ids.h
@@ -33,12 +33,11 @@ extern "C" {
#define _BUILD_SLICE BUILD_SLICE
#define _BUILD_STRING BUILD_STRING
#define _BUILD_TUPLE BUILD_TUPLE
-#define _CALL 312
#define _CALL_ALLOC_AND_ENTER_INIT CALL_ALLOC_AND_ENTER_INIT
-#define _CALL_BUILTIN_CLASS 313
-#define _CALL_BUILTIN_FAST 314
-#define _CALL_BUILTIN_FAST_WITH_KEYWORDS 315
-#define _CALL_BUILTIN_O 316
+#define _CALL_BUILTIN_CLASS 312
+#define _CALL_BUILTIN_FAST 313
+#define _CALL_BUILTIN_FAST_WITH_KEYWORDS 314
+#define _CALL_BUILTIN_O 315
#define _CALL_FUNCTION_EX CALL_FUNCTION_EX
#define _CALL_INTRINSIC_1 CALL_INTRINSIC_1
#define _CALL_INTRINSIC_2 CALL_INTRINSIC_2
@@ -46,38 +45,38 @@ extern "C" {
#define _CALL_KW CALL_KW
#define _CALL_LEN CALL_LEN
#define _CALL_LIST_APPEND CALL_LIST_APPEND
-#define _CALL_METHOD_DESCRIPTOR_FAST 317
-#define _CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS 318
-#define _CALL_METHOD_DESCRIPTOR_NOARGS 319
-#define _CALL_METHOD_DESCRIPTOR_O 320
-#define _CALL_NON_PY_GENERAL 321
-#define _CALL_STR_1 322
-#define _CALL_TUPLE_1 323
+#define _CALL_METHOD_DESCRIPTOR_FAST 316
+#define _CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS 317
+#define _CALL_METHOD_DESCRIPTOR_NOARGS 318
+#define _CALL_METHOD_DESCRIPTOR_O 319
+#define _CALL_NON_PY_GENERAL 320
+#define _CALL_STR_1 321
+#define _CALL_TUPLE_1 322
#define _CALL_TYPE_1 CALL_TYPE_1
-#define _CHECK_ATTR_CLASS 324
-#define _CHECK_ATTR_METHOD_LAZY_DICT 325
-#define _CHECK_ATTR_MODULE 326
-#define _CHECK_ATTR_WITH_HINT 327
-#define _CHECK_CALL_BOUND_METHOD_EXACT_ARGS 328
+#define _CHECK_ATTR_CLASS 323
+#define _CHECK_ATTR_METHOD_LAZY_DICT 324
+#define _CHECK_ATTR_MODULE 325
+#define _CHECK_ATTR_WITH_HINT 326
+#define _CHECK_CALL_BOUND_METHOD_EXACT_ARGS 327
#define _CHECK_EG_MATCH CHECK_EG_MATCH
#define _CHECK_EXC_MATCH CHECK_EXC_MATCH
-#define _CHECK_FUNCTION 329
-#define _CHECK_FUNCTION_EXACT_ARGS 330
-#define _CHECK_FUNCTION_VERSION 331
-#define _CHECK_IS_NOT_PY_CALLABLE 332
-#define _CHECK_MANAGED_OBJECT_HAS_VALUES 333
-#define _CHECK_METHOD_VERSION 334
-#define _CHECK_PEP_523 335
-#define _CHECK_PERIODIC 336
-#define _CHECK_STACK_SPACE 337
-#define _CHECK_STACK_SPACE_OPERAND 338
-#define _CHECK_VALIDITY 339
-#define _CHECK_VALIDITY_AND_SET_IP 340
-#define _COMPARE_OP 341
-#define _COMPARE_OP_FLOAT 342
-#define _COMPARE_OP_INT 343
-#define _COMPARE_OP_STR 344
-#define _CONTAINS_OP 345
+#define _CHECK_FUNCTION 328
+#define _CHECK_FUNCTION_EXACT_ARGS 329
+#define _CHECK_FUNCTION_VERSION 330
+#define _CHECK_IS_NOT_PY_CALLABLE 331
+#define _CHECK_MANAGED_OBJECT_HAS_VALUES 332
+#define _CHECK_METHOD_VERSION 333
+#define _CHECK_PEP_523 334
+#define _CHECK_PERIODIC 335
+#define _CHECK_STACK_SPACE 336
+#define _CHECK_STACK_SPACE_OPERAND 337
+#define _CHECK_VALIDITY 338
+#define _CHECK_VALIDITY_AND_SET_IP 339
+#define _COMPARE_OP 340
+#define _COMPARE_OP_FLOAT 341
+#define _COMPARE_OP_INT 342
+#define _COMPARE_OP_STR 343
+#define _CONTAINS_OP 344
#define _CONTAINS_OP_DICT CONTAINS_OP_DICT
#define _CONTAINS_OP_SET CONTAINS_OP_SET
#define _CONVERT_VALUE CONVERT_VALUE
@@ -89,9 +88,10 @@ extern "C" {
#define _DELETE_GLOBAL DELETE_GLOBAL
#define _DELETE_NAME DELETE_NAME
#define _DELETE_SUBSCR DELETE_SUBSCR
-#define _DEOPT 346
+#define _DEOPT 345
#define _DICT_MERGE DICT_MERGE
#define _DICT_UPDATE DICT_UPDATE
+#define _DO_CALL 346
#define _DYNAMIC_EXIT 347
#define _END_SEND END_SEND
#define _ERROR_POP_N 348
@@ -138,7 +138,6 @@ extern "C" {
#define _INIT_CALL_PY_EXACT_ARGS_2 378
#define _INIT_CALL_PY_EXACT_ARGS_3 379
#define _INIT_CALL_PY_EXACT_ARGS_4 380
-#define _INSTRUMENTED_CALL INSTRUMENTED_CALL
#define _INSTRUMENTED_CALL_FUNCTION_EX INSTRUMENTED_CALL_FUNCTION_EX
#define _INSTRUMENTED_CALL_KW INSTRUMENTED_CALL_KW
#define _INSTRUMENTED_FOR_ITER INSTRUMENTED_FOR_ITER
@@ -223,53 +222,55 @@ extern "C" {
#define _MATCH_KEYS MATCH_KEYS
#define _MATCH_MAPPING MATCH_MAPPING
#define _MATCH_SEQUENCE MATCH_SEQUENCE
+#define _MAYBE_EXPAND_METHOD 427
+#define _MONITOR_CALL 428
#define _NOP NOP
#define _POP_EXCEPT POP_EXCEPT
-#define _POP_JUMP_IF_FALSE 427
-#define _POP_JUMP_IF_TRUE 428
+#define _POP_JUMP_IF_FALSE 429
+#define _POP_JUMP_IF_TRUE 430
#define _POP_TOP POP_TOP
-#define _POP_TOP_LOAD_CONST_INLINE_BORROW 429
+#define _POP_TOP_LOAD_CONST_INLINE_BORROW 431
#define _PUSH_EXC_INFO PUSH_EXC_INFO
-#define _PUSH_FRAME 430
+#define _PUSH_FRAME 432
#define _PUSH_NULL PUSH_NULL
-#define _PY_FRAME_GENERAL 431
-#define _REPLACE_WITH_TRUE 432
+#define _PY_FRAME_GENERAL 433
+#define _REPLACE_WITH_TRUE 434
#define _RESUME_CHECK RESUME_CHECK
#define _RETURN_GENERATOR RETURN_GENERATOR
#define _RETURN_VALUE RETURN_VALUE
-#define _SAVE_RETURN_OFFSET 433
-#define _SEND 434
-#define _SEND_GEN_FRAME 435
+#define _SAVE_RETURN_OFFSET 435
+#define _SEND 436
+#define _SEND_GEN_FRAME 437
#define _SETUP_ANNOTATIONS SETUP_ANNOTATIONS
#define _SET_ADD SET_ADD
#define _SET_FUNCTION_ATTRIBUTE SET_FUNCTION_ATTRIBUTE
#define _SET_UPDATE SET_UPDATE
-#define _START_EXECUTOR 436
-#define _STORE_ATTR 437
-#define _STORE_ATTR_INSTANCE_VALUE 438
-#define _STORE_ATTR_SLOT 439
-#define _STORE_ATTR_WITH_HINT 440
+#define _START_EXECUTOR 438
+#define _STORE_ATTR 439
+#define _STORE_ATTR_INSTANCE_VALUE 440
+#define _STORE_ATTR_SLOT 441
+#define _STORE_ATTR_WITH_HINT 442
#define _STORE_DEREF STORE_DEREF
-#define _STORE_FAST 441
-#define _STORE_FAST_0 442
-#define _STORE_FAST_1 443
-#define _STORE_FAST_2 444
-#define _STORE_FAST_3 445
-#define _STORE_FAST_4 446
-#define _STORE_FAST_5 447
-#define _STORE_FAST_6 448
-#define _STORE_FAST_7 449
+#define _STORE_FAST 443
+#define _STORE_FAST_0 444
+#define _STORE_FAST_1 445
+#define _STORE_FAST_2 446
+#define _STORE_FAST_3 447
+#define _STORE_FAST_4 448
+#define _STORE_FAST_5 449
+#define _STORE_FAST_6 450
+#define _STORE_FAST_7 451
#define _STORE_FAST_LOAD_FAST STORE_FAST_LOAD_FAST
#define _STORE_FAST_STORE_FAST STORE_FAST_STORE_FAST
#define _STORE_GLOBAL STORE_GLOBAL
#define _STORE_NAME STORE_NAME
#define _STORE_SLICE STORE_SLICE
-#define _STORE_SUBSCR 450
+#define _STORE_SUBSCR 452
#define _STORE_SUBSCR_DICT STORE_SUBSCR_DICT
#define _STORE_SUBSCR_LIST_INT STORE_SUBSCR_LIST_INT
#define _SWAP SWAP
-#define _TIER2_RESUME_CHECK 451
-#define _TO_BOOL 452
+#define _TIER2_RESUME_CHECK 453
+#define _TO_BOOL 454
#define _TO_BOOL_BOOL TO_BOOL_BOOL
#define _TO_BOOL_INT TO_BOOL_INT
#define _TO_BOOL_LIST TO_BOOL_LIST
@@ -279,13 +280,13 @@ extern "C" {
#define _UNARY_NEGATIVE UNARY_NEGATIVE
#define _UNARY_NOT UNARY_NOT
#define _UNPACK_EX UNPACK_EX
-#define _UNPACK_SEQUENCE 453
+#define _UNPACK_SEQUENCE 455
#define _UNPACK_SEQUENCE_LIST UNPACK_SEQUENCE_LIST
#define _UNPACK_SEQUENCE_TUPLE UNPACK_SEQUENCE_TUPLE
#define _UNPACK_SEQUENCE_TWO_TUPLE UNPACK_SEQUENCE_TWO_TUPLE
#define _WITH_EXCEPT_START WITH_EXCEPT_START
#define _YIELD_VALUE YIELD_VALUE
-#define MAX_UOP_ID 453
+#define MAX_UOP_ID 455
#ifdef __cplusplus
}
diff --git a/Include/internal/pycore_uop_metadata.h b/Include/internal/pycore_uop_metadata.h
index 190c6fb2365cc4..fd0d4a67d93538 100644
--- a/Include/internal/pycore_uop_metadata.h
+++ b/Include/internal/pycore_uop_metadata.h
@@ -199,6 +199,7 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = {
[_LOAD_ATTR_NONDESCRIPTOR_NO_DICT] = HAS_ARG_FLAG,
[_CHECK_ATTR_METHOD_LAZY_DICT] = HAS_DEOPT_FLAG,
[_LOAD_ATTR_METHOD_LAZY_DICT] = HAS_ARG_FLAG,
+ [_MAYBE_EXPAND_METHOD] = HAS_ARG_FLAG,
[_CHECK_PERIODIC] = HAS_EVAL_BREAK_FLAG,
[_PY_FRAME_GENERAL] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG,
[_CHECK_FUNCTION_VERSION] = HAS_ARG_FLAG | HAS_EXIT_FLAG,
@@ -464,6 +465,7 @@ const char *const _PyOpcode_uop_name[MAX_UOP_ID+1] = {
[_MATCH_KEYS] = "_MATCH_KEYS",
[_MATCH_MAPPING] = "_MATCH_MAPPING",
[_MATCH_SEQUENCE] = "_MATCH_SEQUENCE",
+ [_MAYBE_EXPAND_METHOD] = "_MAYBE_EXPAND_METHOD",
[_NOP] = "_NOP",
[_POP_EXCEPT] = "_POP_EXCEPT",
[_POP_TOP] = "_POP_TOP",
@@ -888,6 +890,8 @@ int _PyUop_num_popped(int opcode, int oparg)
return 1;
case _LOAD_ATTR_METHOD_LAZY_DICT:
return 1;
+ case _MAYBE_EXPAND_METHOD:
+ return 2 + oparg;
case _CHECK_PERIODIC:
return 0;
case _PY_FRAME_GENERAL:
diff --git a/Include/opcode_ids.h b/Include/opcode_ids.h
index d14b48f4289285..54dd76158bf84d 100644
--- a/Include/opcode_ids.h
+++ b/Include/opcode_ids.h
@@ -206,19 +206,19 @@ extern "C" {
#define INSTRUMENTED_END_SEND 238
#define INSTRUMENTED_LOAD_SUPER_ATTR 239
#define INSTRUMENTED_FOR_ITER 240
-#define INSTRUMENTED_CALL 241
-#define INSTRUMENTED_CALL_KW 242
-#define INSTRUMENTED_CALL_FUNCTION_EX 243
-#define INSTRUMENTED_INSTRUCTION 244
-#define INSTRUMENTED_JUMP_FORWARD 245
-#define INSTRUMENTED_JUMP_BACKWARD 246
-#define INSTRUMENTED_POP_JUMP_IF_TRUE 247
-#define INSTRUMENTED_POP_JUMP_IF_FALSE 248
-#define INSTRUMENTED_POP_JUMP_IF_NONE 249
-#define INSTRUMENTED_POP_JUMP_IF_NOT_NONE 250
-#define INSTRUMENTED_RETURN_VALUE 251
-#define INSTRUMENTED_RETURN_CONST 252
-#define INSTRUMENTED_YIELD_VALUE 253
+#define INSTRUMENTED_CALL_KW 241
+#define INSTRUMENTED_CALL_FUNCTION_EX 242
+#define INSTRUMENTED_INSTRUCTION 243
+#define INSTRUMENTED_JUMP_FORWARD 244
+#define INSTRUMENTED_JUMP_BACKWARD 245
+#define INSTRUMENTED_POP_JUMP_IF_TRUE 246
+#define INSTRUMENTED_POP_JUMP_IF_FALSE 247
+#define INSTRUMENTED_POP_JUMP_IF_NONE 248
+#define INSTRUMENTED_POP_JUMP_IF_NOT_NONE 249
+#define INSTRUMENTED_RETURN_VALUE 250
+#define INSTRUMENTED_RETURN_CONST 251
+#define INSTRUMENTED_YIELD_VALUE 252
+#define INSTRUMENTED_CALL 253
#define INSTRUMENTED_LINE 254
#define JUMP 256
#define JUMP_NO_INTERRUPT 257
diff --git a/Lib/_opcode_metadata.py b/Lib/_opcode_metadata.py
index 01c22a89846e97..05ee1f29b58331 100644
--- a/Lib/_opcode_metadata.py
+++ b/Lib/_opcode_metadata.py
@@ -310,19 +310,19 @@
'INSTRUMENTED_END_SEND': 238,
'INSTRUMENTED_LOAD_SUPER_ATTR': 239,
'INSTRUMENTED_FOR_ITER': 240,
- 'INSTRUMENTED_CALL': 241,
- 'INSTRUMENTED_CALL_KW': 242,
- 'INSTRUMENTED_CALL_FUNCTION_EX': 243,
- 'INSTRUMENTED_INSTRUCTION': 244,
- 'INSTRUMENTED_JUMP_FORWARD': 245,
- 'INSTRUMENTED_JUMP_BACKWARD': 246,
- 'INSTRUMENTED_POP_JUMP_IF_TRUE': 247,
- 'INSTRUMENTED_POP_JUMP_IF_FALSE': 248,
- 'INSTRUMENTED_POP_JUMP_IF_NONE': 249,
- 'INSTRUMENTED_POP_JUMP_IF_NOT_NONE': 250,
- 'INSTRUMENTED_RETURN_VALUE': 251,
- 'INSTRUMENTED_RETURN_CONST': 252,
- 'INSTRUMENTED_YIELD_VALUE': 253,
+ 'INSTRUMENTED_CALL_KW': 241,
+ 'INSTRUMENTED_CALL_FUNCTION_EX': 242,
+ 'INSTRUMENTED_INSTRUCTION': 243,
+ 'INSTRUMENTED_JUMP_FORWARD': 244,
+ 'INSTRUMENTED_JUMP_BACKWARD': 245,
+ 'INSTRUMENTED_POP_JUMP_IF_TRUE': 246,
+ 'INSTRUMENTED_POP_JUMP_IF_FALSE': 247,
+ 'INSTRUMENTED_POP_JUMP_IF_NONE': 248,
+ 'INSTRUMENTED_POP_JUMP_IF_NOT_NONE': 249,
+ 'INSTRUMENTED_RETURN_VALUE': 250,
+ 'INSTRUMENTED_RETURN_CONST': 251,
+ 'INSTRUMENTED_YIELD_VALUE': 252,
+ 'INSTRUMENTED_CALL': 253,
'JUMP': 256,
'JUMP_NO_INTERRUPT': 257,
'LOAD_CLOSURE': 258,
diff --git a/Lib/test/test_monitoring.py b/Lib/test/test_monitoring.py
index a07be306986b43..1a129b9432e72d 100644
--- a/Lib/test/test_monitoring.py
+++ b/Lib/test/test_monitoring.py
@@ -1575,7 +1575,7 @@ def f():
('line', 'method', 2),
('line', 'method', 3),
('line', 'method', 2),
- ('call', 'method', 1),
+ ('call', 'method', d["b"]),
('line', 'method', 1),
('line', 'method', 1),
('line', 'get_events', 11),
diff --git a/Python/bytecodes.c b/Python/bytecodes.c
index 971397c955de09..871e2dbf358418 100644
--- a/Python/bytecodes.c
+++ b/Python/bytecodes.c
@@ -3241,20 +3241,6 @@ dummy_func(
unused/1 +
_LOAD_ATTR_METHOD_LAZY_DICT;
- inst(INSTRUMENTED_CALL, (unused/3 -- )) {
- int is_meth = PyStackRef_AsPyObjectBorrow(PEEK(oparg + 1)) != NULL;
- int total_args = oparg + is_meth;
- PyObject *function = PyStackRef_AsPyObjectBorrow(PEEK(oparg + 2));
- PyObject *arg = total_args == 0 ?
- &_PyInstrumentation_MISSING : PyStackRef_AsPyObjectBorrow(PEEK(total_args));
- int err = _Py_call_instrumentation_2args(
- tstate, PY_MONITORING_EVENT_CALL,
- frame, this_instr, function, arg);
- ERROR_IF(err, error);
- PAUSE_ADAPTIVE_COUNTER(this_instr[1].counter);
- GO_TO_INSTRUCTION(CALL);
- }
-
// Cache layout: counter/1, func_version/2
// CALL_INTRINSIC_1/2, CALL_KW, and CALL_FUNCTION_EX aren't members!
family(CALL, INLINE_CACHE_ENTRIES_CALL) = {
@@ -3292,27 +3278,33 @@ dummy_func(
#endif /* ENABLE_SPECIALIZATION */
}
+ op(_MAYBE_EXPAND_METHOD, (callable, self_or_null, args[oparg] -- func, maybe_self, args[oparg])) {
+ if (PyStackRef_TYPE(callable) == &PyMethod_Type && PyStackRef_IsNull(self_or_null)) {
+ PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable);
+ PyObject *self = ((PyMethodObject *)callable_o)->im_self;
+ maybe_self = PyStackRef_FromPyObjectNew(self);
+ PyObject *method = ((PyMethodObject *)callable_o)->im_func;
+ func = PyStackRef_FromPyObjectNew(method);
+ /* Make sure that callable and all args are in memory */
+ args[-2] = func;
+ args[-1] = maybe_self;
+ PyStackRef_CLOSE(callable);
+ }
+ else {
+ func = callable;
+ maybe_self = self_or_null;
+ }
+ }
+
// When calling Python, inline the call using DISPATCH_INLINED().
- op(_CALL, (callable, self_or_null, args[oparg] -- res)) {
+ op(_DO_CALL, (callable, self_or_null, args[oparg] -- res)) {
PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable);
- PyObject *self_or_null_o = PyStackRef_AsPyObjectBorrow(self_or_null);
// oparg counts all of the args, but *not* self:
int total_args = oparg;
- if (self_or_null_o != NULL) {
- args--;
- total_args++;
- }
- else if (Py_TYPE(callable_o) == &PyMethod_Type) {
+ if (!PyStackRef_IsNull(self_or_null)) {
args--;
total_args++;
- PyObject *self = ((PyMethodObject *)callable_o)->im_self;
- args[0] = PyStackRef_FromPyObjectNew(self);
- PyObject *method = ((PyMethodObject *)callable_o)->im_func;
- args[-1] = PyStackRef_FromPyObjectNew(method);
- PyStackRef_CLOSE(callable);
- callable_o = method;
- callable = args[-1];
}
// Check if the call can be inlined or not
if (Py_TYPE(callable_o) == &PyFunction_Type &&
@@ -3376,7 +3368,28 @@ dummy_func(
CHECK_EVAL_BREAKER();
}
- macro(CALL) = _SPECIALIZE_CALL + unused/2 + _CALL + _CHECK_PERIODIC;
+ op(_MONITOR_CALL, (func, maybe_self, args[oparg] -- func, maybe_self, args[oparg])) {
+ int is_meth = !PyStackRef_IsNull(maybe_self);
+ PyObject *function = PyStackRef_AsPyObjectBorrow(func);
+ PyObject *arg0;
+ if (is_meth) {
+ arg0 = PyStackRef_AsPyObjectBorrow(maybe_self);
+ }
+ else if (oparg) {
+ arg0 = PyStackRef_AsPyObjectBorrow(args[0]);
+ }
+ else {
+ arg0 = &_PyInstrumentation_MISSING;
+ }
+ int err = _Py_call_instrumentation_2args(
+ tstate, PY_MONITORING_EVENT_CALL,
+ frame, this_instr, function, arg0
+ );
+ ERROR_IF(err, error);
+ }
+
+ macro(CALL) = _SPECIALIZE_CALL + unused/2 + _MAYBE_EXPAND_METHOD + _DO_CALL + _CHECK_PERIODIC;
+ macro(INSTRUMENTED_CALL) = unused/3 + _MAYBE_EXPAND_METHOD + _MONITOR_CALL + _DO_CALL + _CHECK_PERIODIC;
op(_PY_FRAME_GENERAL, (callable, self_or_null, args[oparg] -- new_frame: _PyInterpreterFrame*)) {
PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable);
diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h
index 288e0f9135c27e..1ced8b951b5ce9 100644
--- a/Python/executor_cases.c.h
+++ b/Python/executor_cases.c.h
@@ -3584,15 +3584,45 @@
break;
}
- /* _INSTRUMENTED_CALL is not a viable micro-op for tier 2 because it is instrumented */
+ case _MAYBE_EXPAND_METHOD: {
+ _PyStackRef *args;
+ _PyStackRef self_or_null;
+ _PyStackRef callable;
+ _PyStackRef func;
+ _PyStackRef maybe_self;
+ oparg = CURRENT_OPARG();
+ args = &stack_pointer[-oparg];
+ self_or_null = stack_pointer[-1 - oparg];
+ callable = stack_pointer[-2 - oparg];
+ if (PyStackRef_TYPE(callable) == &PyMethod_Type && PyStackRef_IsNull(self_or_null)) {
+ PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable);
+ PyObject *self = ((PyMethodObject *)callable_o)->im_self;
+ maybe_self = PyStackRef_FromPyObjectNew(self);
+ PyObject *method = ((PyMethodObject *)callable_o)->im_func;
+ func = PyStackRef_FromPyObjectNew(method);
+ /* Make sure that callable and all args are in memory */
+ args[-2] = func;
+ args[-1] = maybe_self;
+ PyStackRef_CLOSE(callable);
+ }
+ else {
+ func = callable;
+ maybe_self = self_or_null;
+ }
+ stack_pointer[-2 - oparg] = func;
+ stack_pointer[-1 - oparg] = maybe_self;
+ break;
+ }
- /* _CALL is not a viable micro-op for tier 2 because it uses the 'this_instr' variable */
+ /* _DO_CALL is not a viable micro-op for tier 2 because it uses the 'this_instr' variable */
case _CHECK_PERIODIC: {
CHECK_EVAL_BREAKER();
break;
}
+ /* _MONITOR_CALL is not a viable micro-op for tier 2 because it uses the 'this_instr' variable */
+
case _PY_FRAME_GENERAL: {
_PyStackRef *args;
_PyStackRef self_or_null;
diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h
index 634053a93837c6..76d1cc7ad6cf95 100644
--- a/Python/generated_cases.c.h
+++ b/Python/generated_cases.c.h
@@ -773,6 +773,8 @@
_PyStackRef callable;
_PyStackRef self_or_null;
_PyStackRef *args;
+ _PyStackRef func;
+ _PyStackRef maybe_self;
_PyStackRef res;
// _SPECIALIZE_CALL
self_or_null = stack_pointer[-1 - oparg];
@@ -792,26 +794,34 @@
#endif /* ENABLE_SPECIALIZATION */
}
/* Skip 2 cache entries */
- // _CALL
+ // _MAYBE_EXPAND_METHOD
+ {
+ if (PyStackRef_TYPE(callable) == &PyMethod_Type && PyStackRef_IsNull(self_or_null)) {
+ PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable);
+ PyObject *self = ((PyMethodObject *)callable_o)->im_self;
+ maybe_self = PyStackRef_FromPyObjectNew(self);
+ PyObject *method = ((PyMethodObject *)callable_o)->im_func;
+ func = PyStackRef_FromPyObjectNew(method);
+ /* Make sure that callable and all args are in memory */
+ args[-2] = func;
+ args[-1] = maybe_self;
+ PyStackRef_CLOSE(callable);
+ }
+ else {
+ func = callable;
+ maybe_self = self_or_null;
+ }
+ }
+ // _DO_CALL
+ self_or_null = maybe_self;
+ callable = func;
{
PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable);
- PyObject *self_or_null_o = PyStackRef_AsPyObjectBorrow(self_or_null);
// oparg counts all of the args, but *not* self:
int total_args = oparg;
- if (self_or_null_o != NULL) {
- args--;
- total_args++;
- }
- else if (Py_TYPE(callable_o) == &PyMethod_Type) {
+ if (!PyStackRef_IsNull(self_or_null)) {
args--;
total_args++;
- PyObject *self = ((PyMethodObject *)callable_o)->im_self;
- args[0] = PyStackRef_FromPyObjectNew(self);
- PyObject *method = ((PyMethodObject *)callable_o)->im_func;
- args[-1] = PyStackRef_FromPyObjectNew(method);
- PyStackRef_CLOSE(callable);
- callable_o = method;
- callable = args[-1];
}
// Check if the call can be inlined or not
if (Py_TYPE(callable_o) == &PyFunction_Type &&
@@ -3504,18 +3514,134 @@
(void)this_instr;
next_instr += 4;
INSTRUCTION_STATS(INSTRUMENTED_CALL);
+ _PyStackRef callable;
+ _PyStackRef self_or_null;
+ _PyStackRef *args;
+ _PyStackRef func;
+ _PyStackRef maybe_self;
+ _PyStackRef res;
/* Skip 3 cache entries */
- int is_meth = PyStackRef_AsPyObjectBorrow(PEEK(oparg + 1)) != NULL;
- int total_args = oparg + is_meth;
- PyObject *function = PyStackRef_AsPyObjectBorrow(PEEK(oparg + 2));
- PyObject *arg = total_args == 0 ?
- &_PyInstrumentation_MISSING : PyStackRef_AsPyObjectBorrow(PEEK(total_args));
- int err = _Py_call_instrumentation_2args(
- tstate, PY_MONITORING_EVENT_CALL,
- frame, this_instr, function, arg);
- if (err) goto error;
- PAUSE_ADAPTIVE_COUNTER(this_instr[1].counter);
- GO_TO_INSTRUCTION(CALL);
+ // _MAYBE_EXPAND_METHOD
+ args = &stack_pointer[-oparg];
+ self_or_null = stack_pointer[-1 - oparg];
+ callable = stack_pointer[-2 - oparg];
+ {
+ if (PyStackRef_TYPE(callable) == &PyMethod_Type && PyStackRef_IsNull(self_or_null)) {
+ PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable);
+ PyObject *self = ((PyMethodObject *)callable_o)->im_self;
+ maybe_self = PyStackRef_FromPyObjectNew(self);
+ PyObject *method = ((PyMethodObject *)callable_o)->im_func;
+ func = PyStackRef_FromPyObjectNew(method);
+ /* Make sure that callable and all args are in memory */
+ args[-2] = func;
+ args[-1] = maybe_self;
+ PyStackRef_CLOSE(callable);
+ }
+ else {
+ func = callable;
+ maybe_self = self_or_null;
+ }
+ }
+ // _MONITOR_CALL
+ {
+ int is_meth = !PyStackRef_IsNull(maybe_self);
+ PyObject *function = PyStackRef_AsPyObjectBorrow(func);
+ PyObject *arg0;
+ if (is_meth) {
+ arg0 = PyStackRef_AsPyObjectBorrow(maybe_self);
+ }
+ else if (oparg) {
+ arg0 = PyStackRef_AsPyObjectBorrow(args[0]);
+ }
+ else {
+ arg0 = &_PyInstrumentation_MISSING;
+ }
+ int err = _Py_call_instrumentation_2args(
+ tstate, PY_MONITORING_EVENT_CALL,
+ frame, this_instr, function, arg0
+ );
+ if (err) goto error;
+ }
+ // _DO_CALL
+ self_or_null = maybe_self;
+ callable = func;
+ {
+ PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable);
+ // oparg counts all of the args, but *not* self:
+ int total_args = oparg;
+ if (!PyStackRef_IsNull(self_or_null)) {
+ args--;
+ total_args++;
+ }
+ // Check if the call can be inlined or not
+ if (Py_TYPE(callable_o) == &PyFunction_Type &&
+ tstate->interp->eval_frame == NULL &&
+ ((PyFunctionObject *)callable_o)->vectorcall == _PyFunction_Vectorcall)
+ {
+ int code_flags = ((PyCodeObject*)PyFunction_GET_CODE(callable_o))->co_flags;
+ PyObject *locals = code_flags & CO_OPTIMIZED ? NULL : Py_NewRef(PyFunction_GET_GLOBALS(callable_o));
+ _PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit(
+ tstate, (PyFunctionObject *)PyStackRef_AsPyObjectSteal(callable), locals,
+ args, total_args, NULL
+ );
+ // Manipulate stack directly since we leave using DISPATCH_INLINED().
+ STACK_SHRINK(oparg + 2);
+ // The frame has stolen all the arguments from the stack,
+ // so there is no need to clean them up.
+ if (new_frame == NULL) {
+ goto error;
+ }
+ frame->return_offset = (uint16_t)(next_instr - this_instr);
+ DISPATCH_INLINED(new_frame);
+ }
+ /* Callable is not a normal Python function */
+ STACKREFS_TO_PYOBJECTS(args, total_args, args_o);
+ if (CONVERSION_FAILED(args_o)) {
+ PyStackRef_CLOSE(callable);
+ PyStackRef_CLOSE(self_or_null);
+ for (int _i = oparg; --_i >= 0;) {
+ PyStackRef_CLOSE(args[_i]);
+ }
+ if (true) { stack_pointer += -2 - oparg; goto error; }
+ }
+ PyObject *res_o = PyObject_Vectorcall(
+ callable_o, args_o,
+ total_args | PY_VECTORCALL_ARGUMENTS_OFFSET,
+ NULL);
+ STACKREFS_TO_PYOBJECTS_CLEANUP(args_o);
+ if (opcode == INSTRUMENTED_CALL) {
+ PyObject *arg = total_args == 0 ?
+ &_PyInstrumentation_MISSING : PyStackRef_AsPyObjectBorrow(args[0]);
+ if (res_o == NULL) {
+ _Py_call_instrumentation_exc2(
+ tstate, PY_MONITORING_EVENT_C_RAISE,
+ frame, this_instr, callable_o, arg);
+ }
+ else {
+ int err = _Py_call_instrumentation_2args(
+ tstate, PY_MONITORING_EVENT_C_RETURN,
+ frame, this_instr, callable_o, arg);
+ if (err < 0) {
+ Py_CLEAR(res_o);
+ }
+ }
+ }
+ assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL));
+ PyStackRef_CLOSE(callable);
+ for (int i = 0; i < total_args; i++) {
+ PyStackRef_CLOSE(args[i]);
+ }
+ if (res_o == NULL) { stack_pointer += -2 - oparg; goto error; }
+ res = PyStackRef_FromPyObjectSteal(res_o);
+ }
+ // _CHECK_PERIODIC
+ {
+ }
+ stack_pointer[-2 - oparg] = res;
+ stack_pointer += -1 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ CHECK_EVAL_BREAKER();
+ DISPATCH();
}
TARGET(INSTRUMENTED_CALL_FUNCTION_EX) {
diff --git a/Python/opcode_targets.h b/Python/opcode_targets.h
index 224aeb834935eb..6b5f231e13d15a 100644
--- a/Python/opcode_targets.h
+++ b/Python/opcode_targets.h
@@ -240,7 +240,6 @@ static void *opcode_targets[256] = {
&&TARGET_INSTRUMENTED_END_SEND,
&&TARGET_INSTRUMENTED_LOAD_SUPER_ATTR,
&&TARGET_INSTRUMENTED_FOR_ITER,
- &&TARGET_INSTRUMENTED_CALL,
&&TARGET_INSTRUMENTED_CALL_KW,
&&TARGET_INSTRUMENTED_CALL_FUNCTION_EX,
&&TARGET_INSTRUMENTED_INSTRUCTION,
@@ -253,6 +252,7 @@ static void *opcode_targets[256] = {
&&TARGET_INSTRUMENTED_RETURN_VALUE,
&&TARGET_INSTRUMENTED_RETURN_CONST,
&&TARGET_INSTRUMENTED_YIELD_VALUE,
+ &&TARGET_INSTRUMENTED_CALL,
&&TARGET_INSTRUMENTED_LINE,
&&_unknown_opcode,
};
diff --git a/Python/optimizer_cases.c.h b/Python/optimizer_cases.c.h
index 3c9e6d3043cde1..166b1674bc3334 100644
--- a/Python/optimizer_cases.c.h
+++ b/Python/optimizer_cases.c.h
@@ -1598,14 +1598,28 @@
break;
}
- /* _INSTRUMENTED_CALL is not a viable micro-op for tier 2 */
+ case _MAYBE_EXPAND_METHOD: {
+ _Py_UopsSymbol *func;
+ _Py_UopsSymbol *maybe_self;
+ _Py_UopsSymbol **args;
+ func = sym_new_not_null(ctx);
+ maybe_self = sym_new_not_null(ctx);
+ for (int _i = oparg; --_i >= 0;) {
+ args[_i] = sym_new_not_null(ctx);
+ }
+ stack_pointer[-2 - oparg] = func;
+ stack_pointer[-1 - oparg] = maybe_self;
+ break;
+ }
- /* _CALL is not a viable micro-op for tier 2 */
+ /* _DO_CALL is not a viable micro-op for tier 2 */
case _CHECK_PERIODIC: {
break;
}
+ /* _MONITOR_CALL is not a viable micro-op for tier 2 */
+
case _PY_FRAME_GENERAL: {
_Py_UopsSymbol **args;
_Py_UopsSymbol *self_or_null;
From 2c42e13e80610a9dedcb15b57d142602e8143481 Mon Sep 17 00:00:00 2001
From: Mark Shannon
Date: Fri, 26 Jul 2024 14:37:35 +0100
Subject: [PATCH 034/139] GH-116090: Fix test and clarify behavior for
exception events when exhausting a generator. (GH-120697)
---
Doc/library/sys.monitoring.rst | 4 ++++
Lib/test/test_monitoring.py | 35 ++++++++++++++++++++++++++++------
2 files changed, 33 insertions(+), 6 deletions(-)
diff --git a/Doc/library/sys.monitoring.rst b/Doc/library/sys.monitoring.rst
index 0fa06da522049f..3ead20815fa30e 100644
--- a/Doc/library/sys.monitoring.rst
+++ b/Doc/library/sys.monitoring.rst
@@ -226,6 +226,10 @@ To allow tools to monitor for real exceptions without slowing down generators
and coroutines, the :monitoring-event:`STOP_ITERATION` event is provided.
:monitoring-event:`STOP_ITERATION` can be locally disabled, unlike :monitoring-event:`RAISE`.
+Note that the :monitoring-event:`STOP_ITERATION` event and the :monitoring-event:`RAISE`
+event for a :exc:`StopIteration` exception are equivalent, and are treated as interchangeable
+when generating events. Implementations will favor :monitoring-event:`STOP_ITERATION` for
+performance reasons, but may generate a :monitoring-event:`RAISE` event with a :exc:`StopIteration`.
Turning events on and off
-------------------------
diff --git a/Lib/test/test_monitoring.py b/Lib/test/test_monitoring.py
index 1a129b9432e72d..d7043cd4866a1c 100644
--- a/Lib/test/test_monitoring.py
+++ b/Lib/test/test_monitoring.py
@@ -832,20 +832,43 @@ def func1():
self.check_events(func1, [("raise", KeyError)])
- # gh-116090: This test doesn't really require specialization, but running
- # it without specialization exposes a monitoring bug.
- @requires_specialization
def test_implicit_stop_iteration(self):
+ """Generators are documented as raising a StopIteration
+ when they terminate.
+ However, we don't do that if we can avoid it, for speed.
+ sys.monitoring handles that by injecting a STOP_ITERATION
+ event when we would otherwise have skip the RAISE event.
+ This test checks that both paths record an equivalent event.
+ """
def gen():
yield 1
return 2
- def implicit_stop_iteration():
- for _ in gen():
+ def implicit_stop_iteration(iterator=None):
+ if iterator is None:
+ iterator = gen()
+ for _ in iterator:
pass
- self.check_events(implicit_stop_iteration, [("raise", StopIteration)], recorders=(StopiterationRecorder,))
+ recorders=(ExceptionRecorder, StopiterationRecorder,)
+ expected = [("raise", StopIteration)]
+
+ # Make sure that the loop is unspecialized, and that it will not
+ # re-specialize immediately, so that we can we can test the
+ # unspecialized version of the loop first.
+ # Note: this assumes that we don't specialize loops over sets.
+ implicit_stop_iteration(set(range(100)))
+
+ # This will record a RAISE event for the StopIteration.
+ self.check_events(implicit_stop_iteration, expected, recorders=recorders)
+
+ # Now specialize, so that we see a STOP_ITERATION event.
+ for _ in range(100):
+ implicit_stop_iteration()
+
+ # This will record a STOP_ITERATION event for the StopIteration.
+ self.check_events(implicit_stop_iteration, expected, recorders=recorders)
initial = [
("raise", ZeroDivisionError),
From bc94cf7e254e43318223553a7959115573c679a5 Mon Sep 17 00:00:00 2001
From: Irit Katriel <1055913+iritkatriel@users.noreply.github.com>
Date: Fri, 26 Jul 2024 14:39:56 +0100
Subject: [PATCH 035/139] gh-122245: move checks for writes and shadowing of
__debug__ to symtable (#122246)
---
Doc/whatsnew/3.14.rst | 5 +
Lib/test/test_syntax.py | 79 ++++++++++++++++
...-07-24-22-39-07.gh-issue-122245.LVa9v8.rst | 4 +
Python/compile.c | 76 ---------------
Python/symtable.c | 92 +++++++++++++++++--
5 files changed, 173 insertions(+), 83 deletions(-)
create mode 100644 Misc/NEWS.d/next/Core and Builtins/2024-07-24-22-39-07.gh-issue-122245.LVa9v8.rst
diff --git a/Doc/whatsnew/3.14.rst b/Doc/whatsnew/3.14.rst
index bd8bdcb6732fde..d2ba7ada76733a 100644
--- a/Doc/whatsnew/3.14.rst
+++ b/Doc/whatsnew/3.14.rst
@@ -80,6 +80,11 @@ Other Language Changes
command line option. For example, ``python -O -c 'assert await 1'``
now produces a :exc:`SyntaxError`. (Contributed by Jelle Zijlstra in :gh:`121637`.)
+* Writes to ``__debug__`` are now detected even if the code is optimized
+ away by the :option:`-O` command line option. For example,
+ ``python -O -c 'assert (__debug__ := 1)'`` now produces a
+ :exc:`SyntaxError`. (Contributed by Irit Katriel in :gh:`122245`.)
+
* Added class methods :meth:`float.from_number` and :meth:`complex.from_number`
to convert a number to :class:`float` or :class:`complex` type correspondingly.
They raise an error if the argument is a string.
diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py
index cdeb26adf34d89..4421d03a6d2206 100644
--- a/Lib/test/test_syntax.py
+++ b/Lib/test/test_syntax.py
@@ -59,6 +59,18 @@
Traceback (most recent call last):
SyntaxError: cannot assign to __debug__
+>>> def __debug__(): pass
+Traceback (most recent call last):
+SyntaxError: cannot assign to __debug__
+
+>>> async def __debug__(): pass
+Traceback (most recent call last):
+SyntaxError: cannot assign to __debug__
+
+>>> class __debug__: pass
+Traceback (most recent call last):
+SyntaxError: cannot assign to __debug__
+
>>> del __debug__
Traceback (most recent call last):
SyntaxError: cannot delete __debug__
@@ -786,6 +798,9 @@
>>> __debug__: int
Traceback (most recent call last):
SyntaxError: cannot assign to __debug__
+>>> x.__debug__: int
+Traceback (most recent call last):
+SyntaxError: cannot assign to __debug__
>>> f(a=)
Traceback (most recent call last):
SyntaxError: expected argument value expression
@@ -1182,6 +1197,24 @@
Traceback (most recent call last):
SyntaxError: expected ':'
+ >>> match x:
+ ... case a, __debug__, b:
+ ... pass
+ Traceback (most recent call last):
+ SyntaxError: cannot assign to __debug__
+
+ >>> match x:
+ ... case a, b, *__debug__:
+ ... pass
+ Traceback (most recent call last):
+ SyntaxError: cannot assign to __debug__
+
+ >>> match x:
+ ... case Foo(a, __debug__=1, b=2):
+ ... pass
+ Traceback (most recent call last):
+ SyntaxError: cannot assign to __debug__
+
>>> if x = 3:
... pass
Traceback (most recent call last):
@@ -1275,6 +1308,15 @@
Traceback (most recent call last):
SyntaxError: expected 'except' or 'finally' block
+Custom error message for __debug__ as exception variable
+
+ >>> try:
+ ... pass
+ ... except TypeError as __debug__:
+ ... pass
+ Traceback (most recent call last):
+ SyntaxError: cannot assign to __debug__
+
Custom error message for try block mixing except and except*
>>> try:
@@ -1522,6 +1564,19 @@
Traceback (most recent call last):
IndentationError: expected an indented block after class definition on line 1
+ >>> class C(__debug__=42): ...
+ Traceback (most recent call last):
+ SyntaxError: cannot assign to __debug__
+
+ >>> class Meta(type):
+ ... def __new__(*args, **kwargs):
+ ... pass
+
+ >>> class C(metaclass=Meta, __debug__=42):
+ ... pass
+ Traceback (most recent call last):
+ SyntaxError: cannot assign to __debug__
+
>>> match something:
... pass
Traceback (most recent call last):
@@ -1708,6 +1763,26 @@
Traceback (most recent call last):
SyntaxError: Did you mean to use 'from ... import ...' instead?
+>>> import __debug__
+Traceback (most recent call last):
+SyntaxError: cannot assign to __debug__
+
+>>> import a as __debug__
+Traceback (most recent call last):
+SyntaxError: cannot assign to __debug__
+
+>>> import a.b.c as __debug__
+Traceback (most recent call last):
+SyntaxError: cannot assign to __debug__
+
+>>> from a import __debug__
+Traceback (most recent call last):
+SyntaxError: cannot assign to __debug__
+
+>>> from a import b as __debug__
+Traceback (most recent call last):
+SyntaxError: cannot assign to __debug__
+
# Check that we dont raise the "trailing comma" error if there is more
# input to the left of the valid part that we parsed.
@@ -2186,6 +2261,10 @@ def f(x: *b)
...
SyntaxError: yield expression cannot be used within a type alias
+ >>> type __debug__ = int
+ Traceback (most recent call last):
+ SyntaxError: cannot assign to __debug__
+
>>> class A[T]((x := 3)): ...
Traceback (most recent call last):
...
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-07-24-22-39-07.gh-issue-122245.LVa9v8.rst b/Misc/NEWS.d/next/Core and Builtins/2024-07-24-22-39-07.gh-issue-122245.LVa9v8.rst
new file mode 100644
index 00000000000000..453c45e2f7ae3f
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2024-07-24-22-39-07.gh-issue-122245.LVa9v8.rst
@@ -0,0 +1,4 @@
+Detection of writes to ``__debug__`` is moved from the compiler's codegen
+stage to the symtable. This means that these errors now detected even in
+code that is optimized away before codegen (such as assertions with the
+:option:`-O` command line option.)
diff --git a/Python/compile.c b/Python/compile.c
index 9707759c99c943..d07a435bdf8dac 100644
--- a/Python/compile.c
+++ b/Python/compile.c
@@ -1954,55 +1954,6 @@ compiler_default_arguments(struct compiler *c, location loc,
return funcflags;
}
-static bool
-forbidden_name(struct compiler *c, location loc, identifier name,
- expr_context_ty ctx)
-{
- if (ctx == Store && _PyUnicode_EqualToASCIIString(name, "__debug__")) {
- compiler_error(c, loc, "cannot assign to __debug__");
- return true;
- }
- if (ctx == Del && _PyUnicode_EqualToASCIIString(name, "__debug__")) {
- compiler_error(c, loc, "cannot delete __debug__");
- return true;
- }
- return false;
-}
-
-static int
-compiler_check_debug_one_arg(struct compiler *c, arg_ty arg)
-{
- if (arg != NULL) {
- if (forbidden_name(c, LOC(arg), arg->arg, Store)) {
- return ERROR;
- }
- }
- return SUCCESS;
-}
-
-static int
-compiler_check_debug_args_seq(struct compiler *c, asdl_arg_seq *args)
-{
- if (args != NULL) {
- for (Py_ssize_t i = 0, n = asdl_seq_LEN(args); i < n; i++) {
- RETURN_IF_ERROR(
- compiler_check_debug_one_arg(c, asdl_seq_GET(args, i)));
- }
- }
- return SUCCESS;
-}
-
-static int
-compiler_check_debug_args(struct compiler *c, arguments_ty args)
-{
- RETURN_IF_ERROR(compiler_check_debug_args_seq(c, args->posonlyargs));
- RETURN_IF_ERROR(compiler_check_debug_args_seq(c, args->args));
- RETURN_IF_ERROR(compiler_check_debug_one_arg(c, args->vararg));
- RETURN_IF_ERROR(compiler_check_debug_args_seq(c, args->kwonlyargs));
- RETURN_IF_ERROR(compiler_check_debug_one_arg(c, args->kwarg));
- return SUCCESS;
-}
-
static int
wrap_in_stopiteration_handler(struct compiler *c)
{
@@ -2267,7 +2218,6 @@ compiler_function(struct compiler *c, stmt_ty s, int is_async)
type_params = s->v.FunctionDef.type_params;
}
- RETURN_IF_ERROR(compiler_check_debug_args(c, args));
RETURN_IF_ERROR(compiler_decorators(c, decos));
firstlineno = s->lineno;
@@ -2910,8 +2860,6 @@ compiler_lambda(struct compiler *c, expr_ty e)
arguments_ty args = e->v.Lambda.args;
assert(e->kind == Lambda_kind);
- RETURN_IF_ERROR(compiler_check_debug_args(c, args));
-
location loc = LOC(e);
funcflags = compiler_default_arguments(c, loc, args);
if (funcflags == -1) {
@@ -4086,10 +4034,6 @@ compiler_nameop(struct compiler *c, location loc,
!_PyUnicode_EqualToASCIIString(name, "True") &&
!_PyUnicode_EqualToASCIIString(name, "False"));
- if (forbidden_name(c, loc, name, ctx)) {
- return ERROR;
- }
-
mangled = compiler_maybe_mangle(c, name);
if (!mangled) {
return ERROR;
@@ -4878,10 +4822,6 @@ validate_keywords(struct compiler *c, asdl_keyword_seq *keywords)
if (key->arg == NULL) {
continue;
}
- location loc = LOC(key);
- if (forbidden_name(c, loc, key->arg, Store)) {
- return ERROR;
- }
for (Py_ssize_t j = i + 1; j < nkeywords; j++) {
keyword_ty other = ((keyword_ty)asdl_seq_GET(keywords, j));
if (other->arg && !PyUnicode_Compare(key->arg, other->arg)) {
@@ -6135,9 +6075,6 @@ compiler_visit_expr(struct compiler *c, expr_ty e)
ADDOP_NAME(c, loc, LOAD_ATTR, e->v.Attribute.attr, names);
break;
case Store:
- if (forbidden_name(c, loc, e->v.Attribute.attr, e->v.Attribute.ctx)) {
- return ERROR;
- }
ADDOP_NAME(c, loc, STORE_ATTR, e->v.Attribute.attr, names);
break;
case Del:
@@ -6331,9 +6268,6 @@ compiler_annassign(struct compiler *c, stmt_ty s)
}
switch (targ->kind) {
case Name_kind:
- if (forbidden_name(c, loc, targ->v.Name.id, Store)) {
- return ERROR;
- }
/* If we have a simple name in a module or class, store annotation. */
if (s->v.AnnAssign.simple &&
(c->u->u_scope_type == COMPILER_SCOPE_MODULE ||
@@ -6365,9 +6299,6 @@ compiler_annassign(struct compiler *c, stmt_ty s)
}
break;
case Attribute_kind:
- if (forbidden_name(c, loc, targ->v.Attribute.attr, Store)) {
- return ERROR;
- }
if (!s->v.AnnAssign.value &&
check_ann_expr(c, targ->v.Attribute.value) < 0) {
return ERROR;
@@ -6631,9 +6562,6 @@ pattern_helper_store_name(struct compiler *c, location loc,
ADDOP(c, loc, POP_TOP);
return SUCCESS;
}
- if (forbidden_name(c, loc, n, Store)) {
- return ERROR;
- }
// Can't assign to the same name twice:
int duplicate = PySequence_Contains(pc->stores, n);
RETURN_IF_ERROR(duplicate);
@@ -6791,10 +6719,6 @@ validate_kwd_attrs(struct compiler *c, asdl_identifier_seq *attrs, asdl_pattern_
Py_ssize_t nattrs = asdl_seq_LEN(attrs);
for (Py_ssize_t i = 0; i < nattrs; i++) {
identifier attr = ((identifier)asdl_seq_GET(attrs, i));
- location loc = LOC((pattern_ty) asdl_seq_GET(patterns, i));
- if (forbidden_name(c, loc, attr, Store)) {
- return ERROR;
- }
for (Py_ssize_t j = i + 1; j < nattrs; j++) {
identifier other = ((identifier)asdl_seq_GET(attrs, j));
if (!PyUnicode_Compare(attr, other)) {
diff --git a/Python/symtable.c b/Python/symtable.c
index c4508cac7f5928..a5fa7588785d8b 100644
--- a/Python/symtable.c
+++ b/Python/symtable.c
@@ -1495,8 +1495,57 @@ symtable_add_def_helper(struct symtable *st, PyObject *name, int flag, struct _s
}
static int
-symtable_add_def(struct symtable *st, PyObject *name, int flag, _Py_SourceLocation loc)
+check_name(struct symtable *st, PyObject *name, _Py_SourceLocation loc,
+ expr_context_ty ctx)
{
+ if (ctx == Store && _PyUnicode_EqualToASCIIString(name, "__debug__")) {
+ PyErr_SetString(PyExc_SyntaxError, "cannot assign to __debug__");
+ SET_ERROR_LOCATION(st->st_filename, loc);
+ return 0;
+ }
+ if (ctx == Del && _PyUnicode_EqualToASCIIString(name, "__debug__")) {
+ PyErr_SetString(PyExc_SyntaxError, "cannot delete __debug__");
+ SET_ERROR_LOCATION(st->st_filename, loc);
+ return 0;
+ }
+ return 1;
+}
+
+static int
+check_keywords(struct symtable *st, asdl_keyword_seq *keywords)
+{
+ for (Py_ssize_t i = 0; i < asdl_seq_LEN(keywords); i++) {
+ keyword_ty key = ((keyword_ty)asdl_seq_GET(keywords, i));
+ if (key->arg && !check_name(st, key->arg, LOCATION(key), Store)) {
+ return 0;
+ }
+ }
+ return 1;
+}
+
+static int
+check_kwd_patterns(struct symtable *st, pattern_ty p)
+{
+ assert(p->kind == MatchClass_kind);
+ asdl_identifier_seq *kwd_attrs = p->v.MatchClass.kwd_attrs;
+ asdl_pattern_seq *kwd_patterns = p->v.MatchClass.kwd_patterns;
+ for (Py_ssize_t i = 0; i < asdl_seq_LEN(kwd_attrs); i++) {
+ _Py_SourceLocation loc = LOCATION(asdl_seq_GET(kwd_patterns, i));
+ if (!check_name(st, asdl_seq_GET(kwd_attrs, i), loc, Store)) {
+ return 0;
+ }
+ }
+ return 1;
+}
+
+static int
+symtable_add_def_ctx(struct symtable *st, PyObject *name, int flag,
+ _Py_SourceLocation loc, expr_context_ty ctx)
+{
+ int write_mask = DEF_PARAM | DEF_LOCAL | DEF_IMPORT;
+ if ((flag & write_mask) && !check_name(st, name, loc, ctx)) {
+ return 0;
+ }
if ((flag & DEF_TYPE_PARAM) && st->st_cur->ste_mangled_names != NULL) {
if(PySet_Add(st->st_cur->ste_mangled_names, name) < 0) {
return 0;
@@ -1505,6 +1554,14 @@ symtable_add_def(struct symtable *st, PyObject *name, int flag, _Py_SourceLocati
return symtable_add_def_helper(st, name, flag, st->st_cur, loc);
}
+static int
+symtable_add_def(struct symtable *st, PyObject *name, int flag,
+ _Py_SourceLocation loc)
+{
+ return symtable_add_def_ctx(st, name, flag, loc,
+ flag == USE ? Load : Store);
+}
+
static int
symtable_enter_type_param_block(struct symtable *st, identifier name,
void *ast, int has_defaults, int has_kwdefaults,
@@ -1757,6 +1814,9 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s)
VISIT_SEQ(st, type_param, s->v.ClassDef.type_params);
}
VISIT_SEQ(st, expr, s->v.ClassDef.bases);
+ if (!check_keywords(st, s->v.ClassDef.keywords)) {
+ VISIT_QUIT(st, 0);
+ }
VISIT_SEQ(st, keyword, s->v.ClassDef.keywords);
if (!symtable_enter_block(st, s->v.ClassDef.name, ClassBlock,
(void *)s, LOCATION(s))) {
@@ -1871,10 +1931,11 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s)
VISIT(st, expr, s->v.AnnAssign.value);
}
break;
- case AugAssign_kind:
+ case AugAssign_kind: {
VISIT(st, expr, s->v.AugAssign.target);
VISIT(st, expr, s->v.AugAssign.value);
break;
+ }
case For_kind:
VISIT(st, expr, s->v.For.target);
VISIT(st, expr, s->v.For.iter);
@@ -2311,6 +2372,9 @@ symtable_visit_expr(struct symtable *st, expr_ty e)
case Call_kind:
VISIT(st, expr, e->v.Call.func);
VISIT_SEQ(st, expr, e->v.Call.args);
+ if (!check_keywords(st, e->v.Call.keywords)) {
+ VISIT_QUIT(st, 0);
+ }
VISIT_SEQ_WITH_NULL(st, keyword, e->v.Call.keywords);
break;
case FormattedValue_kind:
@@ -2326,6 +2390,9 @@ symtable_visit_expr(struct symtable *st, expr_ty e)
break;
/* The following exprs can be assignment targets. */
case Attribute_kind:
+ if (!check_name(st, e->v.Attribute.attr, LOCATION(e), e->v.Attribute.ctx)) {
+ VISIT_QUIT(st, 0);
+ }
VISIT(st, expr, e->v.Attribute.value);
break;
case Subscript_kind:
@@ -2344,9 +2411,11 @@ symtable_visit_expr(struct symtable *st, expr_ty e)
VISIT(st, expr, e->v.Slice.step);
break;
case Name_kind:
- if (!symtable_add_def(st, e->v.Name.id,
- e->v.Name.ctx == Load ? USE : DEF_LOCAL, LOCATION(e)))
+ if (!symtable_add_def_ctx(st, e->v.Name.id,
+ e->v.Name.ctx == Load ? USE : DEF_LOCAL,
+ LOCATION(e), e->v.Name.ctx)) {
VISIT_QUIT(st, 0);
+ }
/* Special-case super: it counts as a use of __class__ */
if (e->v.Name.ctx == Load &&
_PyST_IsFunctionLike(st->st_cur) &&
@@ -2472,19 +2541,26 @@ symtable_visit_pattern(struct symtable *st, pattern_ty p)
break;
case MatchStar_kind:
if (p->v.MatchStar.name) {
- symtable_add_def(st, p->v.MatchStar.name, DEF_LOCAL, LOCATION(p));
+ if (!symtable_add_def(st, p->v.MatchStar.name, DEF_LOCAL, LOCATION(p))) {
+ VISIT_QUIT(st, 0);
+ }
}
break;
case MatchMapping_kind:
VISIT_SEQ(st, expr, p->v.MatchMapping.keys);
VISIT_SEQ(st, pattern, p->v.MatchMapping.patterns);
if (p->v.MatchMapping.rest) {
- symtable_add_def(st, p->v.MatchMapping.rest, DEF_LOCAL, LOCATION(p));
+ if (!symtable_add_def(st, p->v.MatchMapping.rest, DEF_LOCAL, LOCATION(p))) {
+ VISIT_QUIT(st, 0);
+ }
}
break;
case MatchClass_kind:
VISIT(st, expr, p->v.MatchClass.cls);
VISIT_SEQ(st, pattern, p->v.MatchClass.patterns);
+ if (!check_kwd_patterns(st, p)) {
+ VISIT_QUIT(st, 0);
+ }
VISIT_SEQ(st, pattern, p->v.MatchClass.kwd_patterns);
break;
case MatchAs_kind:
@@ -2492,7 +2568,9 @@ symtable_visit_pattern(struct symtable *st, pattern_ty p)
VISIT(st, pattern, p->v.MatchAs.pattern);
}
if (p->v.MatchAs.name) {
- symtable_add_def(st, p->v.MatchAs.name, DEF_LOCAL, LOCATION(p));
+ if (!symtable_add_def(st, p->v.MatchAs.name, DEF_LOCAL, LOCATION(p))) {
+ VISIT_QUIT(st, 0);
+ }
}
break;
case MatchOr_kind:
From dcafb362f7eab84710ad924cac1724bbf3b9c304 Mon Sep 17 00:00:00 2001
From: WilliamRoyNelson
Date: Fri, 26 Jul 2024 07:34:13 -0700
Subject: [PATCH 036/139] gh-121999: Change default tarfile filter to 'data'
(GH-122002)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Co-authored-by: Tomas R
Co-authored-by: Scott Odle
Co-authored-by: Bénédikt Tran <10796600+picnixz@users.noreply.github.com>
Co-authored-by: Petr Viktorin
---
Doc/library/shutil.rst | 12 ++-
Doc/library/tarfile.rst | 75 ++++++++++++-------
Lib/tarfile.py | 8 +-
Lib/test/test_shutil.py | 3 -
Lib/test/test_tarfile.py | 52 +++++--------
...-07-18-21-19-04.gh-issue-121999.8IBbTK.rst | 2 +
6 files changed, 76 insertions(+), 76 deletions(-)
create mode 100644 Misc/NEWS.d/next/Core_and_Builtins/2024-07-18-21-19-04.gh-issue-121999.8IBbTK.rst
diff --git a/Doc/library/shutil.rst b/Doc/library/shutil.rst
index fd32479195eca8..220207e5f3cbbf 100644
--- a/Doc/library/shutil.rst
+++ b/Doc/library/shutil.rst
@@ -706,11 +706,9 @@ provided. They rely on the :mod:`zipfile` and :mod:`tarfile` modules.
The keyword-only *filter* argument is passed to the underlying unpacking
function. For zip files, *filter* is not accepted.
- For tar files, it is recommended to set it to ``'data'``,
- unless using features specific to tar and UNIX-like filesystems.
+ For tar files, it is recommended to use ``'data'`` (default since Python
+ 3.14), unless using features specific to tar and UNIX-like filesystems.
(See :ref:`tarfile-extraction-filter` for details.)
- The ``'data'`` filter will become the default for tar files
- in Python 3.14.
.. audit-event:: shutil.unpack_archive filename,extract_dir,format shutil.unpack_archive
@@ -721,6 +719,12 @@ provided. They rely on the :mod:`zipfile` and :mod:`tarfile` modules.
the *extract_dir* argument, e.g. members that have absolute filenames
starting with "/" or filenames with two dots "..".
+ Since Python 3.14, the defaults for both built-in formats (zip and tar
+ files) will prevent the most dangerous of such security issues,
+ but will not prevent *all* unintended behavior.
+ Read the :ref:`tarfile-further-verification`
+ section for tar-specific details.
+
.. versionchanged:: 3.7
Accepts a :term:`path-like object` for *filename* and *extract_dir*.
diff --git a/Doc/library/tarfile.rst b/Doc/library/tarfile.rst
index 5b624f3533136f..631d869e42d09d 100644
--- a/Doc/library/tarfile.rst
+++ b/Doc/library/tarfile.rst
@@ -40,9 +40,12 @@ Some facts and figures:
Archives are extracted using a :ref:`filter `,
which makes it possible to either limit surprising/dangerous features,
or to acknowledge that they are expected and the archive is fully trusted.
- By default, archives are fully trusted, but this default is deprecated
- and slated to change in Python 3.14.
+.. versionchanged:: 3.14
+ Set the default extraction filter to :func:`data `,
+ which disallows some dangerous features such as links to absolute paths
+ or paths outside of the destination. Previously, the filter strategy
+ was equivalent to :func:`fully_trusted `.
.. function:: open(name=None, mode='r', fileobj=None, bufsize=10240, **kwargs)
@@ -495,18 +498,18 @@ be finalized; only the internally used file object will be closed. See the
The *filter* argument specifies how ``members`` are modified or rejected
before extraction.
See :ref:`tarfile-extraction-filter` for details.
- It is recommended to set this explicitly depending on which *tar* features
- you need to support.
+ It is recommended to set this explicitly only if specific *tar* features
+ are required, or as ``filter='data'`` to support Python versions with a less
+ secure default (3.13 and lower).
.. warning::
Never extract archives from untrusted sources without prior inspection.
- It is possible that files are created outside of *path*, e.g. members
- that have absolute filenames starting with ``"/"`` or filenames with two
- dots ``".."``.
- Set ``filter='data'`` to prevent the most dangerous security issues,
- and read the :ref:`tarfile-extraction-filter` section for details.
+ Since Python 3.14, the default (:func:`data `) will prevent
+ the most dangerous security issues.
+ However, it will not prevent *all* unintended or insecure behavior.
+ Read the :ref:`tarfile-extraction-filter` section for details.
.. versionchanged:: 3.5
Added the *numeric_owner* parameter.
@@ -517,6 +520,9 @@ be finalized; only the internally used file object will be closed. See the
.. versionchanged:: 3.12
Added the *filter* parameter.
+ .. versionchanged:: 3.14
+ The *filter* parameter now defaults to ``'data'``.
+
.. method:: TarFile.extract(member, path="", set_attrs=True, *, numeric_owner=False, filter=None)
@@ -536,10 +542,8 @@ be finalized; only the internally used file object will be closed. See the
.. warning::
- See the warning for :meth:`extractall`.
-
- Set ``filter='data'`` to prevent the most dangerous security issues,
- and read the :ref:`tarfile-extraction-filter` section for details.
+ Never extract archives from untrusted sources without prior inspection.
+ See the warning for :meth:`extractall` for details.
.. versionchanged:: 3.2
Added the *set_attrs* parameter.
@@ -602,14 +606,8 @@ be finalized; only the internally used file object will be closed. See the
String names are not allowed for this attribute, unlike the *filter*
argument to :meth:`~TarFile.extract`.
- If ``extraction_filter`` is ``None`` (the default),
- calling an extraction method without a *filter* argument will raise a
- ``DeprecationWarning``,
- and fall back to the :func:`fully_trusted ` filter,
- whose dangerous behavior matches previous versions of Python.
-
- In Python 3.14+, leaving ``extraction_filter=None`` will cause
- extraction methods to use the :func:`data ` filter by default.
+ If ``extraction_filter`` is ``None`` (the default), extraction methods
+ will use the :func:`data ` filter by default.
The attribute may be set on instances or overridden in subclasses.
It also is possible to set it on the ``TarFile`` class itself to set a
@@ -619,6 +617,14 @@ be finalized; only the internally used file object will be closed. See the
To set a global default this way, a filter function needs to be wrapped in
:func:`staticmethod()` to prevent injection of a ``self`` argument.
+ .. versionchanged:: 3.14
+
+ The default filter is set to :func:`data `,
+ which disallows some dangerous features such as links to absolute paths
+ or paths outside of the destination.
+ Previously, the default was equivalent to
+ :func:`fully_trusted `.
+
.. method:: TarFile.add(name, arcname=None, recursive=True, *, filter=None)
Add the file *name* to the archive. *name* may be any type of file
@@ -969,6 +975,12 @@ In most cases, the full functionality is not needed.
Therefore, *tarfile* supports extraction filters: a mechanism to limit
functionality, and thus mitigate some of the security issues.
+.. warning::
+
+ None of the available filters blocks *all* dangerous archive features.
+ Never extract archives from untrusted sources without prior inspection.
+ See also :ref:`tarfile-further-verification`.
+
.. seealso::
:pep:`706`
@@ -992,12 +1004,13 @@ can be:
* ``None`` (default): Use :attr:`TarFile.extraction_filter`.
- If that is also ``None`` (the default), raise a ``DeprecationWarning``,
- and fall back to the ``'fully_trusted'`` filter, whose dangerous behavior
- matches previous versions of Python.
+ If that is also ``None`` (the default), the ``'data'`` filter will be used.
+
+ .. versionchanged:: 3.14
- In Python 3.14, the ``'data'`` filter will become the default instead.
- It's possible to switch earlier; see :attr:`TarFile.extraction_filter`.
+ The default filter is set to :func:`data `.
+ Previously, the default was equivalent to
+ :func:`fully_trusted `.
* A callable which will be called for each extracted member with a
:ref:`TarInfo ` describing the member and the destination
@@ -1080,6 +1093,9 @@ reused in custom filters:
Return the modified ``TarInfo`` member.
+ Note that this filter does not block *all* dangerous archive features.
+ See :ref:`tarfile-further-verification` for details.
+
.. _tarfile-extraction-refuse:
@@ -1093,6 +1109,8 @@ With ``errorlevel=0`` the error will be logged and the member will be skipped,
but extraction will continue.
+.. _tarfile-further-verification:
+
Hints for further verification
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -1110,9 +1128,10 @@ Here is an incomplete list of things to consider:
disk, memory and CPU usage.
* Check filenames against an allow-list of characters
(to filter out control characters, confusables, foreign path separators,
- etc.).
+ and so on).
* Check that filenames have expected extensions (discouraging files that
- execute when you “click on them”, or extension-less files like Windows special device names).
+ execute when you “click on them”, or extension-less files like Windows
+ special device names).
* Limit the number of extracted files, total size of extracted data,
filename length (including symlink length), and size of individual files.
* Check for files that would be shadowed on case-insensitive filesystems.
diff --git a/Lib/tarfile.py b/Lib/tarfile.py
index d5d8a469779f50..4fa7bb6740adbb 100644
--- a/Lib/tarfile.py
+++ b/Lib/tarfile.py
@@ -2248,13 +2248,7 @@ def _get_filter_function(self, filter):
if filter is None:
filter = self.extraction_filter
if filter is None:
- import warnings
- warnings.warn(
- 'Python 3.14 will, by default, filter extracted tar '
- + 'archives and reject files or modify their metadata. '
- + 'Use the filter argument to control this behavior.',
- DeprecationWarning, stacklevel=3)
- return fully_trusted_filter
+ return data_filter
if isinstance(filter, str):
raise TypeError(
'String names are not supported for '
diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py
index c458c5df32572b..c770be21b41c2b 100644
--- a/Lib/test/test_shutil.py
+++ b/Lib/test/test_shutil.py
@@ -2145,9 +2145,6 @@ def check_unpack_archive_with_converter(self, format, converter, **kwargs):
def check_unpack_tarball(self, format):
self.check_unpack_archive(format, filter='fully_trusted')
self.check_unpack_archive(format, filter='data')
- with warnings_helper.check_warnings(
- ('Python 3.14', DeprecationWarning)):
- self.check_unpack_archive(format)
def test_unpack_archive_tar(self):
self.check_unpack_tarball('tar')
diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py
index f715940de1d584..5600f0746770b8 100644
--- a/Lib/test/test_tarfile.py
+++ b/Lib/test/test_tarfile.py
@@ -722,6 +722,24 @@ def format_mtime(mtime):
tar.close()
os_helper.rmtree(DIR)
+ @staticmethod
+ def test_extractall_default_filter():
+ # Test that the default filter is now "data", and the other filter types are not used.
+ DIR = pathlib.Path(TEMPDIR) / "extractall_default_filter"
+ with (
+ os_helper.temp_dir(DIR),
+ tarfile.open(tarname, encoding="iso8859-1") as tar,
+ unittest.mock.patch("tarfile.data_filter", wraps=tarfile.data_filter) as mock_data_filter,
+ unittest.mock.patch("tarfile.tar_filter", wraps=tarfile.tar_filter) as mock_tar_filter,
+ unittest.mock.patch("tarfile.fully_trusted_filter", wraps=tarfile.fully_trusted_filter) as mock_ft_filter
+ ):
+ directories = [t for t in tar if t.isdir()]
+ tar.extractall(DIR, directories)
+
+ mock_data_filter.assert_called()
+ mock_ft_filter.assert_not_called()
+ mock_tar_filter.assert_not_called()
+
@os_helper.skip_unless_working_chmod
def test_extract_directory(self):
dirtype = "ustar/dirtype"
@@ -738,31 +756,6 @@ def test_extract_directory(self):
finally:
os_helper.rmtree(DIR)
- def test_deprecation_if_no_filter_passed_to_extractall(self):
- DIR = pathlib.Path(TEMPDIR) / "extractall"
- with (
- os_helper.temp_dir(DIR),
- tarfile.open(tarname, encoding="iso8859-1") as tar
- ):
- directories = [t for t in tar if t.isdir()]
- with self.assertWarnsRegex(DeprecationWarning, "Use the filter argument") as cm:
- tar.extractall(DIR, directories)
- # check that the stacklevel of the deprecation warning is correct:
- self.assertEqual(cm.filename, __file__)
-
- def test_deprecation_if_no_filter_passed_to_extract(self):
- dirtype = "ustar/dirtype"
- DIR = pathlib.Path(TEMPDIR) / "extractall"
- with (
- os_helper.temp_dir(DIR),
- tarfile.open(tarname, encoding="iso8859-1") as tar
- ):
- tarinfo = tar.getmember(dirtype)
- with self.assertWarnsRegex(DeprecationWarning, "Use the filter argument") as cm:
- tar.extract(tarinfo, path=DIR)
- # check that the stacklevel of the deprecation warning is correct:
- self.assertEqual(cm.filename, __file__)
-
def test_extractall_pathlike_dir(self):
DIR = os.path.join(TEMPDIR, "extractall")
with os_helper.temp_dir(DIR), \
@@ -4011,15 +4004,6 @@ def test_data_filter(self):
self.assertIs(filtered.name, tarinfo.name)
self.assertIs(filtered.type, tarinfo.type)
- def test_default_filter_warns(self):
- """Ensure the default filter warns"""
- with ArchiveMaker() as arc:
- arc.add('foo')
- with warnings_helper.check_warnings(
- ('Python 3.14', DeprecationWarning)):
- with self.check_context(arc.open(), None):
- self.expect_file('foo')
-
def test_change_default_filter_on_instance(self):
tar = tarfile.TarFile(tarname, 'r')
def strict_filter(tarinfo, path):
diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-07-18-21-19-04.gh-issue-121999.8IBbTK.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-07-18-21-19-04.gh-issue-121999.8IBbTK.rst
new file mode 100644
index 00000000000000..e65aa993566446
--- /dev/null
+++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-07-18-21-19-04.gh-issue-121999.8IBbTK.rst
@@ -0,0 +1,2 @@
+The default extraction filter for the :mod:`tarfile` module is now
+set to :func:`'data' `.
From 7c2921844f9fa713f93152bf3a569812cee347a0 Mon Sep 17 00:00:00 2001
From: Serhiy Storchaka
Date: Fri, 26 Jul 2024 17:48:44 +0300
Subject: [PATCH 037/139] gh-122311: Fix typo in the pickle error formatting
code (GH-122312)
---
Lib/pickle.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Lib/pickle.py b/Lib/pickle.py
index 2d764980cdf7b2..c6c151b2065f4d 100644
--- a/Lib/pickle.py
+++ b/Lib/pickle.py
@@ -1153,7 +1153,7 @@ def _save_toplevel_by_name(self, module_name, name):
except UnicodeEncodeError:
raise PicklingError(
"can't pickle global identifier '%s.%s' using "
- "pickle protocol %i" % (module, name, self.proto)) from None
+ "pickle protocol %i" % (module_name, name, self.proto)) from None
def save_type(self, obj):
if obj is type(None):
From db2d8b6db1b56c2bd3802b86f9b76da33e8898d7 Mon Sep 17 00:00:00 2001
From: Pablo Galindo Salgado
Date: Fri, 26 Jul 2024 17:29:41 +0100
Subject: [PATCH 038/139] gh-122300: Preserve AST nodes for format specifiers
with single elements (#122308)
---
Doc/library/ast.rst | 4 +++-
Lib/test/test_ast.py | 2 +-
.../2024-07-26-14-05-51.gh-issue-122300.SVIF-l.rst | 2 ++
Parser/action_helpers.c | 3 ++-
4 files changed, 8 insertions(+), 3 deletions(-)
create mode 100644 Misc/NEWS.d/next/Core and Builtins/2024-07-26-14-05-51.gh-issue-122300.SVIF-l.rst
diff --git a/Doc/library/ast.rst b/Doc/library/ast.rst
index d05ad1e2a7854f..dd5dd5ca4e9e32 100644
--- a/Doc/library/ast.rst
+++ b/Doc/library/ast.rst
@@ -316,7 +316,9 @@ Literals
args=[
Name(id='a', ctx=Load())]),
conversion=-1,
- format_spec=Constant(value='.3'))]))
+ format_spec=JoinedStr(
+ values=[
+ Constant(value='.3')]))]))
.. class:: List(elts, ctx)
diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py
index 5144187d7c3ddd..55725ec36fd3a7 100644
--- a/Lib/test/test_ast.py
+++ b/Lib/test/test_ast.py
@@ -3638,7 +3638,7 @@ def main():
('Expression', ('Subscript', (1, 0, 1, 10), ('List', (1, 0, 1, 3), [('Constant', (1, 1, 1, 2), 5, None)], ('Load',)), ('Slice', (1, 4, 1, 9), ('Constant', (1, 4, 1, 5), 1, None), ('Constant', (1, 6, 1, 7), 1, None), ('Constant', (1, 8, 1, 9), 1, None)), ('Load',))),
('Expression', ('IfExp', (1, 0, 1, 21), ('Name', (1, 9, 1, 10), 'x', ('Load',)), ('Call', (1, 0, 1, 5), ('Name', (1, 0, 1, 3), 'foo', ('Load',)), [], []), ('Call', (1, 16, 1, 21), ('Name', (1, 16, 1, 19), 'bar', ('Load',)), [], []))),
('Expression', ('JoinedStr', (1, 0, 1, 6), [('FormattedValue', (1, 2, 1, 5), ('Name', (1, 3, 1, 4), 'a', ('Load',)), -1, None)])),
-('Expression', ('JoinedStr', (1, 0, 1, 10), [('FormattedValue', (1, 2, 1, 9), ('Name', (1, 3, 1, 4), 'a', ('Load',)), -1, ('Constant', (1, 5, 1, 8), '.2f', None))])),
+('Expression', ('JoinedStr', (1, 0, 1, 10), [('FormattedValue', (1, 2, 1, 9), ('Name', (1, 3, 1, 4), 'a', ('Load',)), -1, ('JoinedStr', (1, 4, 1, 8), [('Constant', (1, 5, 1, 8), '.2f', None)]))])),
('Expression', ('JoinedStr', (1, 0, 1, 8), [('FormattedValue', (1, 2, 1, 7), ('Name', (1, 3, 1, 4), 'a', ('Load',)), 114, None)])),
('Expression', ('JoinedStr', (1, 0, 1, 11), [('Constant', (1, 2, 1, 6), 'foo(', None), ('FormattedValue', (1, 6, 1, 9), ('Name', (1, 7, 1, 8), 'a', ('Load',)), -1, None), ('Constant', (1, 9, 1, 10), ')', None)])),
]
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-07-26-14-05-51.gh-issue-122300.SVIF-l.rst b/Misc/NEWS.d/next/Core and Builtins/2024-07-26-14-05-51.gh-issue-122300.SVIF-l.rst
new file mode 100644
index 00000000000000..6b58f89247d1d4
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2024-07-26-14-05-51.gh-issue-122300.SVIF-l.rst
@@ -0,0 +1,2 @@
+Preserve AST nodes for f-string with single-element format specifiers. Patch
+by Pablo Galindo
diff --git a/Parser/action_helpers.c b/Parser/action_helpers.c
index db6f872c7224d1..1972c606827cdb 100644
--- a/Parser/action_helpers.c
+++ b/Parser/action_helpers.c
@@ -1010,7 +1010,8 @@ _PyPegen_setup_full_format_spec(Parser *p, Token *colon, asdl_expr_seq *spec, in
spec = resized_spec;
}
expr_ty res;
- if (asdl_seq_LEN(spec) == 0) {
+ Py_ssize_t n = asdl_seq_LEN(spec);
+ if (n == 0 || (n == 1 && asdl_seq_GET(spec, 0)->kind == Constant_kind)) {
res = _PyAST_JoinedStr(spec, lineno, col_offset, end_lineno,
end_col_offset, p->arena);
} else {
From 64857d849f3079a73367525ce93fd7a463b83908 Mon Sep 17 00:00:00 2001
From: Brandt Bucher
Date: Fri, 26 Jul 2024 09:40:15 -0700
Subject: [PATCH 039/139] GH-122294: Burn in the addresses of side exits
(GH-122295)
---
Include/internal/pycore_uop_metadata.h | 4 ++--
Python/bytecodes.c | 12 ++++++------
Python/executor_cases.c.h | 12 ++++++------
Python/optimizer.c | 10 ++++++----
Python/optimizer_bytecodes.c | 3 ++-
Python/optimizer_cases.c.h | 2 ++
6 files changed, 24 insertions(+), 19 deletions(-)
diff --git a/Include/internal/pycore_uop_metadata.h b/Include/internal/pycore_uop_metadata.h
index fd0d4a67d93538..d23a4e2ea14345 100644
--- a/Include/internal/pycore_uop_metadata.h
+++ b/Include/internal/pycore_uop_metadata.h
@@ -252,7 +252,7 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = {
[_SET_IP] = 0,
[_CHECK_STACK_SPACE_OPERAND] = HAS_DEOPT_FLAG,
[_SAVE_RETURN_OFFSET] = HAS_ARG_FLAG,
- [_EXIT_TRACE] = HAS_ARG_FLAG | HAS_ESCAPES_FLAG,
+ [_EXIT_TRACE] = HAS_ESCAPES_FLAG,
[_CHECK_VALIDITY] = HAS_DEOPT_FLAG,
[_LOAD_CONST_INLINE] = HAS_PURE_FLAG,
[_LOAD_CONST_INLINE_BORROW] = HAS_PURE_FLAG,
@@ -261,7 +261,7 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = {
[_LOAD_CONST_INLINE_BORROW_WITH_NULL] = HAS_PURE_FLAG,
[_CHECK_FUNCTION] = HAS_DEOPT_FLAG,
[_INTERNAL_INCREMENT_OPT_COUNTER] = 0,
- [_DYNAMIC_EXIT] = HAS_ARG_FLAG | HAS_ESCAPES_FLAG,
+ [_DYNAMIC_EXIT] = HAS_ESCAPES_FLAG,
[_START_EXECUTOR] = 0,
[_FATAL_ERROR] = 0,
[_CHECK_VALIDITY_AND_SET_IP] = HAS_DEOPT_FLAG,
diff --git a/Python/bytecodes.c b/Python/bytecodes.c
index 871e2dbf358418..d74f2aae0483ce 100644
--- a/Python/bytecodes.c
+++ b/Python/bytecodes.c
@@ -4609,8 +4609,8 @@ dummy_func(
#endif
}
- tier2 op(_EXIT_TRACE, (--)) {
- _PyExitData *exit = ¤t_executor->exits[oparg];
+ tier2 op(_EXIT_TRACE, (exit_p/4 --)) {
+ _PyExitData *exit = (_PyExitData *)exit_p;
PyCodeObject *code = _PyFrame_GetCode(frame);
_Py_CODEUNIT *target = _PyCode_CODE(code) + exit->target;
#if defined(Py_DEBUG) && !defined(_Py_JIT)
@@ -4619,7 +4619,7 @@ dummy_func(
printf("SIDE EXIT: [UOp ");
_PyUOpPrint(&next_uop[-1]);
printf(", exit %u, temp %d, target %d -> %s]\n",
- oparg, exit->temperature.as_counter,
+ exit - current_executor->exits, exit->temperature.as_counter,
(int)(target - _PyCode_CODE(code)),
_PyOpcode_OpName[target->op.code]);
}
@@ -4698,9 +4698,9 @@ dummy_func(
exe->count++;
}
- tier2 op(_DYNAMIC_EXIT, (--)) {
+ tier2 op(_DYNAMIC_EXIT, (exit_p/4 --)) {
tstate->previous_executor = (PyObject *)current_executor;
- _PyExitData *exit = (_PyExitData *)¤t_executor->exits[oparg];
+ _PyExitData *exit = (_PyExitData *)exit_p;
_Py_CODEUNIT *target = frame->instr_ptr;
#if defined(Py_DEBUG) && !defined(_Py_JIT)
OPT_HIST(trace_uop_execution_counter, trace_run_length_hist);
@@ -4708,7 +4708,7 @@ dummy_func(
printf("DYNAMIC EXIT: [UOp ");
_PyUOpPrint(&next_uop[-1]);
printf(", exit %u, temp %d, target %d -> %s]\n",
- oparg, exit->temperature.as_counter,
+ exit - current_executor->exits, exit->temperature.as_counter,
(int)(target - _PyCode_CODE(_PyFrame_GetCode(frame))),
_PyOpcode_OpName[target->op.code]);
}
diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h
index 1ced8b951b5ce9..6e3f6cc62fe11f 100644
--- a/Python/executor_cases.c.h
+++ b/Python/executor_cases.c.h
@@ -5044,8 +5044,8 @@
}
case _EXIT_TRACE: {
- oparg = CURRENT_OPARG();
- _PyExitData *exit = ¤t_executor->exits[oparg];
+ PyObject *exit_p = (PyObject *)CURRENT_OPERAND();
+ _PyExitData *exit = (_PyExitData *)exit_p;
PyCodeObject *code = _PyFrame_GetCode(frame);
_Py_CODEUNIT *target = _PyCode_CODE(code) + exit->target;
#if defined(Py_DEBUG) && !defined(_Py_JIT)
@@ -5054,7 +5054,7 @@
printf("SIDE EXIT: [UOp ");
_PyUOpPrint(&next_uop[-1]);
printf(", exit %u, temp %d, target %d -> %s]\n",
- oparg, exit->temperature.as_counter,
+ exit - current_executor->exits, exit->temperature.as_counter,
(int)(target - _PyCode_CODE(code)),
_PyOpcode_OpName[target->op.code]);
}
@@ -5182,9 +5182,9 @@
}
case _DYNAMIC_EXIT: {
- oparg = CURRENT_OPARG();
+ PyObject *exit_p = (PyObject *)CURRENT_OPERAND();
tstate->previous_executor = (PyObject *)current_executor;
- _PyExitData *exit = (_PyExitData *)¤t_executor->exits[oparg];
+ _PyExitData *exit = (_PyExitData *)exit_p;
_Py_CODEUNIT *target = frame->instr_ptr;
#if defined(Py_DEBUG) && !defined(_Py_JIT)
OPT_HIST(trace_uop_execution_counter, trace_run_length_hist);
@@ -5192,7 +5192,7 @@
printf("DYNAMIC EXIT: [UOp ");
_PyUOpPrint(&next_uop[-1]);
printf(", exit %u, temp %d, target %d -> %s]\n",
- oparg, exit->temperature.as_counter,
+ exit - current_executor->exits, exit->temperature.as_counter,
(int)(target - _PyCode_CODE(_PyFrame_GetCode(frame))),
_PyOpcode_OpName[target->op.code]);
}
diff --git a/Python/optimizer.c b/Python/optimizer.c
index f0793b8c8f2088..7b875af2aae898 100644
--- a/Python/optimizer.c
+++ b/Python/optimizer.c
@@ -1153,13 +1153,15 @@ make_executor_from_uops(_PyUOpInstruction *buffer, int length, const _PyBloomFil
*dest = buffer[i];
assert(opcode != _POP_JUMP_IF_FALSE && opcode != _POP_JUMP_IF_TRUE);
if (opcode == _EXIT_TRACE) {
- executor->exits[next_exit].target = buffer[i].target;
- dest->oparg = next_exit;
+ _PyExitData *exit = &executor->exits[next_exit];
+ exit->target = buffer[i].target;
+ dest->operand = (uint64_t)exit;
next_exit--;
}
if (opcode == _DYNAMIC_EXIT) {
- executor->exits[next_exit].target = 0;
- dest->oparg = next_exit;
+ _PyExitData *exit = &executor->exits[next_exit];
+ exit->target = 0;
+ dest->operand = (uint64_t)exit;
next_exit--;
}
}
diff --git a/Python/optimizer_bytecodes.c b/Python/optimizer_bytecodes.c
index a506f9948fd9ae..4d4f89301c7475 100644
--- a/Python/optimizer_bytecodes.c
+++ b/Python/optimizer_bytecodes.c
@@ -788,7 +788,8 @@ dummy_func(void) {
ctx->done = true;
}
- op(_EXIT_TRACE, (--)) {
+ op(_EXIT_TRACE, (exit_p/4 --)) {
+ (void)exit_p;
ctx->done = true;
}
diff --git a/Python/optimizer_cases.c.h b/Python/optimizer_cases.c.h
index 166b1674bc3334..fae93ce89e82e5 100644
--- a/Python/optimizer_cases.c.h
+++ b/Python/optimizer_cases.c.h
@@ -2163,6 +2163,8 @@
}
case _EXIT_TRACE: {
+ PyObject *exit_p = (PyObject *)this_instr->operand;
+ (void)exit_p;
ctx->done = true;
break;
}
From c557ae97d6bd9d04164a19b4fe136610e54dbdd8 Mon Sep 17 00:00:00 2001
From: Sam Gross
Date: Fri, 26 Jul 2024 13:06:07 -0400
Subject: [PATCH 040/139] gh-122201: Lock mutex when setting handling_thread to
NULL (#122204)
In the free-threaded build, we need to lock pending->mutex when clearing
the handling_thread in order not to race with a concurrent
make_pending_calls in the same interpreter.
---
Python/ceval_gil.c | 18 +++++++++++++++---
Tools/tsan/suppressions_free_threading.txt | 1 -
2 files changed, 15 insertions(+), 4 deletions(-)
diff --git a/Python/ceval_gil.c b/Python/ceval_gil.c
index dc3baf79ccba62..0b45caba0d49ff 100644
--- a/Python/ceval_gil.c
+++ b/Python/ceval_gil.c
@@ -901,6 +901,18 @@ unsignal_pending_calls(PyThreadState *tstate, PyInterpreterState *interp)
#endif
}
+static void
+clear_pending_handling_thread(struct _pending_calls *pending)
+{
+#ifdef Py_GIL_DISABLED
+ PyMutex_Lock(&pending->mutex);
+ pending->handling_thread = NULL;
+ PyMutex_Unlock(&pending->mutex);
+#else
+ pending->handling_thread = NULL;
+#endif
+}
+
static int
make_pending_calls(PyThreadState *tstate)
{
@@ -933,7 +945,7 @@ make_pending_calls(PyThreadState *tstate)
int32_t npending;
if (_make_pending_calls(pending, &npending) != 0) {
- pending->handling_thread = NULL;
+ clear_pending_handling_thread(pending);
/* There might not be more calls to make, but we play it safe. */
signal_pending_calls(tstate, interp);
return -1;
@@ -945,7 +957,7 @@ make_pending_calls(PyThreadState *tstate)
if (_Py_IsMainThread() && _Py_IsMainInterpreter(interp)) {
if (_make_pending_calls(pending_main, &npending) != 0) {
- pending->handling_thread = NULL;
+ clear_pending_handling_thread(pending);
/* There might not be more calls to make, but we play it safe. */
signal_pending_calls(tstate, interp);
return -1;
@@ -956,7 +968,7 @@ make_pending_calls(PyThreadState *tstate)
}
}
- pending->handling_thread = NULL;
+ clear_pending_handling_thread(pending);
return 0;
}
diff --git a/Tools/tsan/suppressions_free_threading.txt b/Tools/tsan/suppressions_free_threading.txt
index 0955387dfb8370..a54e66d1212d1f 100644
--- a/Tools/tsan/suppressions_free_threading.txt
+++ b/Tools/tsan/suppressions_free_threading.txt
@@ -28,7 +28,6 @@ race_top:assign_version_tag
race_top:new_reference
race_top:_multiprocessing_SemLock_acquire_impl
race_top:list_get_item_ref
-race_top:make_pending_calls
race_top:_Py_slot_tp_getattr_hook
race_top:add_threadstate
race_top:dump_traceback
From 1ca99ed240e1e70502d84fea274423b660d172c2 Mon Sep 17 00:00:00 2001
From: Mark Shannon
Date: Fri, 26 Jul 2024 18:38:52 +0100
Subject: [PATCH 041/139] Manually override bytecode definition in optimizer,
to avoid build error (GH-122316)
---
Python/optimizer_bytecodes.c | 8 ++++++++
Python/optimizer_cases.c.h | 13 +++++++++----
2 files changed, 17 insertions(+), 4 deletions(-)
diff --git a/Python/optimizer_bytecodes.c b/Python/optimizer_bytecodes.c
index 4d4f89301c7475..c982e37182157a 100644
--- a/Python/optimizer_bytecodes.c
+++ b/Python/optimizer_bytecodes.c
@@ -596,6 +596,14 @@ dummy_func(void) {
}
}
+ op(_MAYBE_EXPAND_METHOD, (callable, self_or_null, args[oparg] -- func, maybe_self, args[oparg])) {
+ (void)callable;
+ (void)self_or_null;
+ (void)args;
+ func = sym_new_not_null(ctx);
+ maybe_self = sym_new_not_null(ctx);
+ }
+
op(_PY_FRAME_GENERAL, (callable, self_or_null, args[oparg] -- new_frame: _Py_UOpsAbstractFrame *)) {
/* The _Py_UOpsAbstractFrame design assumes that we can copy arguments across directly */
(void)callable;
diff --git a/Python/optimizer_cases.c.h b/Python/optimizer_cases.c.h
index fae93ce89e82e5..4fa40ff861ba70 100644
--- a/Python/optimizer_cases.c.h
+++ b/Python/optimizer_cases.c.h
@@ -1599,14 +1599,19 @@
}
case _MAYBE_EXPAND_METHOD: {
+ _Py_UopsSymbol **args;
+ _Py_UopsSymbol *self_or_null;
+ _Py_UopsSymbol *callable;
_Py_UopsSymbol *func;
_Py_UopsSymbol *maybe_self;
- _Py_UopsSymbol **args;
+ args = &stack_pointer[-oparg];
+ self_or_null = stack_pointer[-1 - oparg];
+ callable = stack_pointer[-2 - oparg];
+ (void)callable;
+ (void)self_or_null;
+ (void)args;
func = sym_new_not_null(ctx);
maybe_self = sym_new_not_null(ctx);
- for (int _i = oparg; --_i >= 0;) {
- args[_i] = sym_new_not_null(ctx);
- }
stack_pointer[-2 - oparg] = func;
stack_pointer[-1 - oparg] = maybe_self;
break;
From d791b9815a64c99991fcfd2f8408fc0b7ddb00bd Mon Sep 17 00:00:00 2001
From: Irit Katriel <1055913+iritkatriel@users.noreply.github.com>
Date: Fri, 26 Jul 2024 19:40:36 +0100
Subject: [PATCH 042/139] gh-122245: Add test case of generic type with
__debug__ (#122322)
---
Lib/test/test_syntax.py | 4 ++++
.../2024-07-24-22-39-07.gh-issue-122245.LVa9v8.rst | 4 ++--
2 files changed, 6 insertions(+), 2 deletions(-)
diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py
index 4421d03a6d2206..206b7f0088a925 100644
--- a/Lib/test/test_syntax.py
+++ b/Lib/test/test_syntax.py
@@ -2265,6 +2265,10 @@ def f(x: *b)
Traceback (most recent call last):
SyntaxError: cannot assign to __debug__
+ >>> class A[__debug__]: pass
+ Traceback (most recent call last):
+ SyntaxError: cannot assign to __debug__
+
>>> class A[T]((x := 3)): ...
Traceback (most recent call last):
...
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-07-24-22-39-07.gh-issue-122245.LVa9v8.rst b/Misc/NEWS.d/next/Core and Builtins/2024-07-24-22-39-07.gh-issue-122245.LVa9v8.rst
index 453c45e2f7ae3f..fff99b4992e321 100644
--- a/Misc/NEWS.d/next/Core and Builtins/2024-07-24-22-39-07.gh-issue-122245.LVa9v8.rst
+++ b/Misc/NEWS.d/next/Core and Builtins/2024-07-24-22-39-07.gh-issue-122245.LVa9v8.rst
@@ -1,4 +1,4 @@
Detection of writes to ``__debug__`` is moved from the compiler's codegen
-stage to the symtable. This means that these errors now detected even in
+stage to the symtable. This means that these errors are now detected even in
code that is optimized away before codegen (such as assertions with the
-:option:`-O` command line option.)
+:option:`-O` command line option).
From 33586d64ca911b472de2550cf4f5b524cef65921 Mon Sep 17 00:00:00 2001
From: Carol Willing
Date: Fri, 26 Jul 2024 13:56:39 -0700
Subject: [PATCH 043/139] Remove reference to docs mailing list for bug reports
(#122323)
---
Doc/bugs.rst | 9 ++-------
1 file changed, 2 insertions(+), 7 deletions(-)
diff --git a/Doc/bugs.rst b/Doc/bugs.rst
index 9aff2f0ff5187d..5d0f68ca69675e 100644
--- a/Doc/bugs.rst
+++ b/Doc/bugs.rst
@@ -16,21 +16,16 @@ Documentation bugs
==================
If you find a bug in this documentation or would like to propose an improvement,
-please submit a bug report on the :ref:`tracker `. If you
+please submit a bug report on the :ref:`issue tracker `. If you
have a suggestion on how to fix it, include that as well.
You can also open a discussion item on our
`Documentation Discourse forum `_.
If you find a bug in the theme (HTML / CSS / JavaScript) of the
-documentation, please submit a bug report on the `python-doc-theme bug
+documentation, please submit a bug report on the `python-doc-theme issue
tracker `_.
-If you're short on time, you can also email documentation bug reports to
-docs@python.org (behavioral bugs can be sent to python-list@python.org).
-'docs@' is a mailing list run by volunteers; your request will be noticed,
-though it may take a while to be processed.
-
.. seealso::
`Documentation bugs`_
From d52726ccd456833ea9f09cabb4b8aef09755e472 Mon Sep 17 00:00:00 2001
From: Subrahmanya Gaonkar <148525245+negativenagesh@users.noreply.github.com>
Date: Sat, 27 Jul 2024 03:33:08 +0530
Subject: [PATCH 044/139] Document ``mimetypes.MimeTypes.add_type()`` (#122301)
Co-authored-by: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
---
Doc/library/mimetypes.rst | 10 ++++++++++
1 file changed, 10 insertions(+)
diff --git a/Doc/library/mimetypes.rst b/Doc/library/mimetypes.rst
index 91e8c30f8607b3..8ad4850584a7e1 100644
--- a/Doc/library/mimetypes.rst
+++ b/Doc/library/mimetypes.rst
@@ -295,3 +295,13 @@ than one MIME-type database; it provides an interface similar to the one of the
types, else to the list of non-standard types.
.. versionadded:: 3.2
+
+
+ .. method:: MimeTypes.add_type(type, ext, strict=True)
+
+ Add a mapping from the MIME type *type* to the extension *ext*. When the
+ extension is already known, the new type will replace the old one. When the type
+ is already known the extension will be added to the list of known extensions.
+
+ When *strict* is ``True`` (the default), the mapping will be added to the
+ official MIME types, otherwise to the non-standard ones.
From 762e771cc01e1485e8040ea046fcc55a4a420d42 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Sat, 27 Jul 2024 02:00:56 +0100
Subject: [PATCH 045/139] Fix underline for 'pty' in What's New in Python 3.14
(#122337)
---
Doc/whatsnew/3.14.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Doc/whatsnew/3.14.rst b/Doc/whatsnew/3.14.rst
index d2ba7ada76733a..cc03088592d9d4 100644
--- a/Doc/whatsnew/3.14.rst
+++ b/Doc/whatsnew/3.14.rst
@@ -309,7 +309,7 @@ pathlib
arguments are joined onto *other*.
pty
-___
+---
* Remove deprecated :func:`!pty.master_open` and :func:`!pty.slave_open`.
They had previously raised a :exc:`DeprecationWarning` since Python 3.12.
From 4a2607c1807982a107445b5a35240f587a61eb0d Mon Sep 17 00:00:00 2001
From: Russell Keith-Magee
Date: Sat, 27 Jul 2024 11:53:44 +1000
Subject: [PATCH 046/139] gh-120831: Correct default minimum iOS version.
(#122339)
Correct default minimum iOS version.
---
Lib/sysconfig/__init__.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Lib/sysconfig/__init__.py b/Lib/sysconfig/__init__.py
index 83e057c177f8c0..80aef3447117e5 100644
--- a/Lib/sysconfig/__init__.py
+++ b/Lib/sysconfig/__init__.py
@@ -642,7 +642,7 @@ def get_platform():
release = m.group()
elif osname[:6] == "darwin":
if sys.platform == "ios":
- release = get_config_vars().get("IPHONEOS_DEPLOYMENT_TARGET", "12.0")
+ release = get_config_vars().get("IPHONEOS_DEPLOYMENT_TARGET", "13.0")
osname = sys.platform
machine = sys.implementation._multiarch
else:
From 863a92f2bc708b9e3dfa9828bb8155b8d371e09c Mon Sep 17 00:00:00 2001
From: Russell Keith-Magee
Date: Sat, 27 Jul 2024 12:24:30 +1000
Subject: [PATCH 047/139] gh-121832: Revert test skip introduced by #122150.
(#122340)
Revert test skip introduced by #122150.
---
Lib/test/test_types.py | 1 -
1 file changed, 1 deletion(-)
diff --git a/Lib/test/test_types.py b/Lib/test/test_types.py
index a87bb275d296a0..a84f43ba9b51fd 100644
--- a/Lib/test/test_types.py
+++ b/Lib/test/test_types.py
@@ -2382,7 +2382,6 @@ def setUpClass(cls):
@cpython_only
@no_rerun('channels (and queues) might have a refleak; see gh-122199')
- @unittest.skipIf(is_apple_mobile, "Fails on iOS due to test ordering; see #121832.")
def test_slot_wrappers(self):
rch, sch = interpreters.channels.create()
From c08696286f52d286674f264eecf7b33a335a890b Mon Sep 17 00:00:00 2001
From: Peter Bierma
Date: Sat, 27 Jul 2024 02:27:48 -0400
Subject: [PATCH 048/139] gh-122332: Fix missing `NULL` check in
`asyncio.Task.get_coro` (#122338)
---
Lib/test/test_asyncio/test_eager_task_factory.py | 12 ++++++++++++
.../2024-07-26-21-21-13.gh-issue-122332.fvw88r.rst | 2 ++
Modules/_asynciomodule.c | 6 +++++-
3 files changed, 19 insertions(+), 1 deletion(-)
create mode 100644 Misc/NEWS.d/next/Library/2024-07-26-21-21-13.gh-issue-122332.fvw88r.rst
diff --git a/Lib/test/test_asyncio/test_eager_task_factory.py b/Lib/test/test_asyncio/test_eager_task_factory.py
index 0f8212dbec47be..0777f39b572486 100644
--- a/Lib/test/test_asyncio/test_eager_task_factory.py
+++ b/Lib/test/test_asyncio/test_eager_task_factory.py
@@ -241,6 +241,18 @@ class DummyLoop:
_, out, err = assert_python_ok("-c", code)
self.assertFalse(err)
+ def test_issue122332(self):
+ async def coro():
+ pass
+
+ async def run():
+ task = self.loop.create_task(coro())
+ await task
+ self.assertIsNone(task.get_coro())
+
+ self.run_coro(run())
+
+
class AsyncTaskCounter:
def __init__(self, loop, *, task_class, eager):
self.suspense_count = 0
diff --git a/Misc/NEWS.d/next/Library/2024-07-26-21-21-13.gh-issue-122332.fvw88r.rst b/Misc/NEWS.d/next/Library/2024-07-26-21-21-13.gh-issue-122332.fvw88r.rst
new file mode 100644
index 00000000000000..55bb1dc44add1b
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-07-26-21-21-13.gh-issue-122332.fvw88r.rst
@@ -0,0 +1,2 @@
+Fixed segfault with :meth:`asyncio.Task.get_coro` when using an eager task
+factory.
diff --git a/Modules/_asynciomodule.c b/Modules/_asynciomodule.c
index 1a223f9bd0cbae..873c17cd78709d 100644
--- a/Modules/_asynciomodule.c
+++ b/Modules/_asynciomodule.c
@@ -2500,7 +2500,11 @@ static PyObject *
_asyncio_Task_get_coro_impl(TaskObj *self)
/*[clinic end generated code: output=bcac27c8cc6c8073 input=d2e8606c42a7b403]*/
{
- return Py_NewRef(self->task_coro);
+ if (self->task_coro) {
+ return Py_NewRef(self->task_coro);
+ }
+
+ Py_RETURN_NONE;
}
/*[clinic input]
From bb09ba679223666e01f8da780f97888a29d07131 Mon Sep 17 00:00:00 2001
From: Petr Viktorin
Date: Sat, 27 Jul 2024 10:27:06 +0200
Subject: [PATCH 049/139] gh-122291: Intern latin-1 one-byte strings at startup
(GH-122303)
---
InternalDocs/string_interning.md | 66 +++++++++++++++-----------------
Objects/unicodeobject.c | 36 +++++------------
2 files changed, 40 insertions(+), 62 deletions(-)
diff --git a/InternalDocs/string_interning.md b/InternalDocs/string_interning.md
index 930ea110d857ac..358e2c070cd5fa 100644
--- a/InternalDocs/string_interning.md
+++ b/InternalDocs/string_interning.md
@@ -8,51 +8,50 @@
This is used to optimize dict and attribute lookups, among other things.
-Python uses three different mechanisms to intern strings:
+Python uses two different mechanisms to intern strings: singletons and
+dynamic interning.
-- Singleton strings marked in C source with `_Py_STR` and `_Py_ID` macros.
- These are statically allocated, and collected using `make regen-global-objects`
- (`Tools/build/generate_global_objects.py`), which generates code
- for declaration, initialization and finalization.
+## Singletons
- The difference between the two kinds is not important. (A `_Py_ID` string is
- a valid C name, with which we can refer to it; a `_Py_STR` may e.g. contain
- non-identifier characters, so it needs a separate C-compatible name.)
+The 256 possible one-character latin-1 strings, which can be retrieved with
+`_Py_LATIN1_CHR(c)`, are stored in statically allocated arrays,
+`_PyRuntime.static_objects.strings.ascii` and
+`_PyRuntime.static_objects.strings.latin1`.
- The empty string is in this category (as `_Py_STR(empty)`).
+Longer singleton strings are marked in C source with `_Py_ID` (if the string
+is a valid C identifier fragment) or `_Py_STR` (if it needs a separate
+C-compatible name.)
+These are also stored in statically allocated arrays.
+They are collected from CPython sources using `make regen-global-objects`
+(`Tools/build/generate_global_objects.py`), which generates code
+for declaration, initialization and finalization.
- These singletons are interned in a runtime-global lookup table,
- `_PyRuntime.cached_objects.interned_strings` (`INTERNED_STRINGS`),
- at runtime initialization.
+The empty string is one of the singletons: `_Py_STR(empty)`.
-- The 256 possible one-character latin-1 strings are singletons,
- which can be retrieved with `_Py_LATIN1_CHR(c)`, are stored in runtime-global
- arrays, `_PyRuntime.static_objects.strings.ascii` and
- `_PyRuntime.static_objects.strings.latin1`.
+The three sets of singletons (`_Py_LATIN1_CHR`, `_Py_ID`, `_Py_STR`)
+are disjoint.
+If you have such a singleton, it (and no other copy) will be interned.
- These are NOT interned at startup in the normal build.
- In the free-threaded build, they are; this avoids modifying the
- global lookup table after threads are started.
+These singletons are interned in a runtime-global lookup table,
+`_PyRuntime.cached_objects.interned_strings` (`INTERNED_STRINGS`),
+at runtime initialization, and immutable until it's torn down
+at runtime finalization.
+It is shared across threads and interpreters without any synchronization.
- Interning a one-char latin-1 string will always intern the corresponding
- singleton.
-- All other strings are allocated dynamically, and have their
- `_PyUnicode_STATE(s).statically_allocated` flag set to zero.
- When interned, such strings are added to an interpreter-wide dict,
- `PyInterpreterState.cached_objects.interned_strings`.
+## Dynamically allocated strings
- The key and value of each entry in this dict reference the same object.
+All other strings are allocated dynamically, and have their
+`_PyUnicode_STATE(s).statically_allocated` flag set to zero.
+When interned, such strings are added to an interpreter-wide dict,
+`PyInterpreterState.cached_objects.interned_strings`.
-The three sets of singletons (`_Py_STR`, `_Py_ID`, `_Py_LATIN1_CHR`)
-are disjoint.
-If you have such a singleton, it (and no other copy) will be interned.
+The key and value of each entry in this dict reference the same object.
## Immortality and reference counting
-Invariant: Every immortal string is interned, *except* the one-char latin-1
-singletons (which might but might not be interned).
+Invariant: Every immortal string is interned.
In practice, this means that you must not use `_Py_SetImmortal` on
a string. (If you know it's already immortal, don't immortalize it;
@@ -115,8 +114,5 @@ The valid transitions between these states are:
Using `_PyUnicode_InternStatic` on these is an error; the other cases
don't change the state.
-- One-char latin-1 singletons can be interned (0 -> 3) using any interning
- function; after that the functions don't change the state.
-
-- Other statically allocated strings are interned (0 -> 3) at runtime init;
+- Singletons are interned (0 -> 3) at runtime init;
after that all interning functions don't change the state.
diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c
index 6196a8e766a15b..ffb879a68745b1 100644
--- a/Objects/unicodeobject.c
+++ b/Objects/unicodeobject.c
@@ -325,7 +325,8 @@ init_global_interned_strings(PyInterpreterState *interp)
return _PyStatus_ERR("failed to create global interned dict");
}
- /* Intern statically allocated string identifiers and deepfreeze strings.
+ /* Intern statically allocated string identifiers, deepfreeze strings,
+ * and one-byte latin-1 strings.
* This must be done before any module initialization so that statically
* allocated string identifiers are used instead of heap allocated strings.
* Deepfreeze uses the interned identifiers if present to save space
@@ -333,14 +334,11 @@ init_global_interned_strings(PyInterpreterState *interp)
*/
_PyUnicode_InitStaticStrings(interp);
-#ifdef Py_GIL_DISABLED
-// In the free-threaded build, intern the 1-byte strings as well
for (int i = 0; i < 256; i++) {
PyObject *s = LATIN1(i);
_PyUnicode_InternStatic(interp, &s);
assert(s == LATIN1(i));
}
-#endif
#ifdef Py_DEBUG
assert(_PyUnicode_CheckConsistency(&_Py_STR(empty), 1));
@@ -15355,26 +15353,14 @@ intern_static(PyInterpreterState *interp, PyObject *s /* stolen */)
assert(s != NULL);
assert(_PyUnicode_CHECK(s));
assert(_PyUnicode_STATE(s).statically_allocated);
-
- switch (PyUnicode_CHECK_INTERNED(s)) {
- case SSTATE_NOT_INTERNED:
- break;
- case SSTATE_INTERNED_IMMORTAL_STATIC:
- return s;
- default:
- Py_FatalError("_PyUnicode_InternStatic called on wrong string");
- }
+ assert(!PyUnicode_CHECK_INTERNED(s));
#ifdef Py_DEBUG
/* We must not add process-global interned string if there's already a
* per-interpreter interned_dict, which might contain duplicates.
- * Except "short string" singletons: those are special-cased. */
+ */
PyObject *interned = get_interned_dict(interp);
- assert(interned == NULL || unicode_is_singleton(s));
-#ifdef Py_GIL_DISABLED
- // In the free-threaded build, don't allow even the short strings.
assert(interned == NULL);
-#endif
#endif
/* Look in the global cache first. */
@@ -15446,11 +15432,6 @@ intern_common(PyInterpreterState *interp, PyObject *s /* stolen */,
return s;
}
- /* Handle statically allocated strings. */
- if (_PyUnicode_STATE(s).statically_allocated) {
- return intern_static(interp, s);
- }
-
/* Is it already interned? */
switch (PyUnicode_CHECK_INTERNED(s)) {
case SSTATE_NOT_INTERNED:
@@ -15467,6 +15448,9 @@ intern_common(PyInterpreterState *interp, PyObject *s /* stolen */,
return s;
}
+ /* Statically allocated strings must be already interned. */
+ assert(!_PyUnicode_STATE(s).statically_allocated);
+
#if Py_GIL_DISABLED
/* In the free-threaded build, all interned strings are immortal */
immortalize = 1;
@@ -15477,13 +15461,11 @@ intern_common(PyInterpreterState *interp, PyObject *s /* stolen */,
immortalize = 1;
}
- /* if it's a short string, get the singleton -- and intern it */
+ /* if it's a short string, get the singleton */
if (PyUnicode_GET_LENGTH(s) == 1 &&
PyUnicode_KIND(s) == PyUnicode_1BYTE_KIND) {
PyObject *r = LATIN1(*(unsigned char*)PyUnicode_DATA(s));
- if (!PyUnicode_CHECK_INTERNED(r)) {
- r = intern_static(interp, r);
- }
+ assert(PyUnicode_CHECK_INTERNED(r));
Py_DECREF(s);
return r;
}
From 8ac5565be2e5a11fad643c2fe9cbf16d2ddb95cd Mon Sep 17 00:00:00 2001
From: Nate Ohlson
Date: Sat, 27 Jul 2024 04:57:44 -0500
Subject: [PATCH 050/139] gh-112301: Compiler warning management tooling
(#121730)
Co-authored-by: blurb-it[bot] <43283697+blurb-it[bot]@users.noreply.github.com>
Co-authored-by: Hugo van Kemenade <1324225+hugovk@users.noreply.github.com>
---
.github/workflows/build.yml | 2 +-
.github/workflows/reusable-ubuntu.yml | 5 +-
...-07-13-21-55-58.gh-issue-112301.YJS1dl.rst | 2 +
Tools/build/.warningignore_ubuntu | 3 +
Tools/build/check_warnings.py | 195 ++++++++++++++++++
5 files changed, 205 insertions(+), 2 deletions(-)
create mode 100644 Misc/NEWS.d/next/Tests/2024-07-13-21-55-58.gh-issue-112301.YJS1dl.rst
create mode 100644 Tools/build/.warningignore_ubuntu
create mode 100644 Tools/build/check_warnings.py
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 613578ae176ad9..6568b50c3a6a13 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -348,7 +348,7 @@ jobs:
with:
save: false
- name: Configure CPython
- run: ./configure --config-cache --enable-slower-safety --with-pydebug --with-openssl=$OPENSSL_DIR
+ run: ./configure CFLAGS="-fdiagnostics-format=json" --config-cache --enable-slower-safety --with-pydebug --with-openssl=$OPENSSL_DIR
- name: Build CPython
run: make -j4
- name: Display build info
diff --git a/.github/workflows/reusable-ubuntu.yml b/.github/workflows/reusable-ubuntu.yml
index 54d7765d159d49..c6289a74e9a5f6 100644
--- a/.github/workflows/reusable-ubuntu.yml
+++ b/.github/workflows/reusable-ubuntu.yml
@@ -67,6 +67,7 @@ jobs:
working-directory: ${{ env.CPYTHON_BUILDDIR }}
run: >-
../cpython-ro-srcdir/configure
+ CFLAGS="-fdiagnostics-format=json"
--config-cache
--with-pydebug
--enable-slower-safety
@@ -74,10 +75,12 @@ jobs:
${{ fromJSON(inputs.free-threading) && '--disable-gil' || '' }}
- name: Build CPython out-of-tree
working-directory: ${{ env.CPYTHON_BUILDDIR }}
- run: make -j4
+ run: make -j4 &> compiler_output.txt
- name: Display build info
working-directory: ${{ env.CPYTHON_BUILDDIR }}
run: make pythoninfo
+ - name: Check compiler warnings
+ run: python Tools/build/check_warnings.py --compiler-output-file-path=${{ env.CPYTHON_BUILDDIR }}/compiler_output.txt --warning-ignore-file-path ${GITHUB_WORKSPACE}/Tools/build/.warningignore_ubuntu
- name: Remount sources writable for tests
# some tests write to srcdir, lack of pyc files slows down testing
run: sudo mount $CPYTHON_RO_SRCDIR -oremount,rw
diff --git a/Misc/NEWS.d/next/Tests/2024-07-13-21-55-58.gh-issue-112301.YJS1dl.rst b/Misc/NEWS.d/next/Tests/2024-07-13-21-55-58.gh-issue-112301.YJS1dl.rst
new file mode 100644
index 00000000000000..d5718ed4be7606
--- /dev/null
+++ b/Misc/NEWS.d/next/Tests/2024-07-13-21-55-58.gh-issue-112301.YJS1dl.rst
@@ -0,0 +1,2 @@
+Add tooling to check for changes in compiler warnings.
+Patch by Nate Ohlson.
diff --git a/Tools/build/.warningignore_ubuntu b/Tools/build/.warningignore_ubuntu
new file mode 100644
index 00000000000000..8242c8d17c89fb
--- /dev/null
+++ b/Tools/build/.warningignore_ubuntu
@@ -0,0 +1,3 @@
+# Files listed will be ignored by the compiler warning checker
+# for the Ubuntu/build and test job.
+# Keep lines sorted lexicographically to help avoid merge conflicts.
diff --git a/Tools/build/check_warnings.py b/Tools/build/check_warnings.py
new file mode 100644
index 00000000000000..f0c0067f4ab255
--- /dev/null
+++ b/Tools/build/check_warnings.py
@@ -0,0 +1,195 @@
+#!/usr/bin/env python3
+"""
+Parses compiler output with -fdiagnostics-format=json and checks that warnings
+exist only in files that are expected to have warnings.
+"""
+import argparse
+import json
+import re
+import sys
+from pathlib import Path
+
+
+def extract_warnings_from_compiler_output(compiler_output: str) -> list[dict]:
+ """
+ Extracts warnings from the compiler output when using
+ -fdiagnostics-format=json
+
+ Compiler output as a whole is not a valid json document, but includes many
+ json objects and may include other output that is not json.
+ """
+
+ # Regex to find json arrays at the top level of the file
+ # in the compiler output
+ json_arrays = re.findall(
+ r"\[(?:[^\[\]]|\[(?:[^\[\]]|\[[^\[\]]*\])*\])*\]", compiler_output
+ )
+ compiler_warnings = []
+ for array in json_arrays:
+ try:
+ json_data = json.loads(array)
+ json_objects_in_array = [entry for entry in json_data]
+ compiler_warnings.extend(
+ [
+ entry
+ for entry in json_objects_in_array
+ if entry.get("kind") == "warning"
+ ]
+ )
+ except json.JSONDecodeError:
+ continue # Skip malformed JSON
+
+ return compiler_warnings
+
+
+def get_warnings_by_file(warnings: list[dict]) -> dict[str, list[dict]]:
+ """
+ Returns a dictionary where the key is the file and the data is the warnings
+ in that file
+ """
+ warnings_by_file = {}
+ for warning in warnings:
+ locations = warning["locations"]
+ for location in locations:
+ for key in ["caret", "start", "end"]:
+ if key in location:
+ file = location[key]["file"]
+ file = file.lstrip(
+ "./"
+ ) # Remove leading current directory if present
+ if file not in warnings_by_file:
+ warnings_by_file[file] = []
+ warnings_by_file[file].append(warning)
+
+ return warnings_by_file
+
+
+def get_unexpected_warnings(
+ warnings: list[dict],
+ files_with_expected_warnings: set[str],
+ files_with_warnings: set[str],
+) -> int:
+ """
+ Returns failure status if warnings discovered in list of warnings
+ are associated with a file that is not found in the list of files
+ with expected warnings
+ """
+ unexpected_warnings = []
+ for file in files_with_warnings.keys():
+ if file not in files_with_expected_warnings:
+ unexpected_warnings.extend(files_with_warnings[file])
+
+ if unexpected_warnings:
+ print("Unexpected warnings:")
+ for warning in unexpected_warnings:
+ print(warning)
+ return 1
+
+ return 0
+
+
+def get_unexpected_improvements(
+ warnings: list[dict],
+ files_with_expected_warnings: set[str],
+ files_with_warnings: set[str],
+) -> int:
+ """
+ Returns failure status if there are no warnings in the list of warnings for
+ a file that is in the list of files with expected warnings
+ """
+ unexpected_improvements = []
+ for file in files_with_expected_warnings:
+ if file not in files_with_warnings.keys():
+ unexpected_improvements.append(file)
+
+ if unexpected_improvements:
+ print("Unexpected improvements:")
+ for file in unexpected_improvements:
+ print(file)
+ return 1
+
+ return 0
+
+
+def main(argv: list[str] | None = None) -> int:
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ "--compiler-output-file-path",
+ type=str,
+ required=True,
+ help="Path to the compiler output file",
+ )
+ parser.add_argument(
+ "--warning-ignore-file-path",
+ type=str,
+ required=True,
+ help="Path to the warning ignore file",
+ )
+ parser.add_argument(
+ "--fail-on-regression",
+ action="store_true",
+ default=False,
+ help="Flag to fail if new warnings are found",
+ )
+ parser.add_argument(
+ "--fail-on-improvement",
+ action="store_true",
+ default=False,
+ help="Flag to fail if files that were expected "
+ "to have warnings have no warnings",
+ )
+
+ args = parser.parse_args(argv)
+
+ exit_code = 0
+
+ # Check that the compiler output file is a valid path
+ if not Path(args.compiler_output_file_path).is_file():
+ print(
+ "Compiler output file does not exist: "
+ f"{args.compiler_output_file_path}"
+ )
+ return 1
+
+ # Check that the warning ignore file is a valid path
+ if not Path(args.warning_ignore_file_path).is_file():
+ print(
+ "Warning ignore file does not exist: "
+ f"{args.warning_ignore_file_path}"
+ )
+ return 1
+
+ with Path(args.compiler_output_file_path).open(encoding="UTF-8") as f:
+ compiler_output_file_contents = f.read()
+
+ with Path(args.warning_ignore_file_path).open(
+ encoding="UTF-8"
+ ) as clean_files:
+ files_with_expected_warnings = {
+ file.strip()
+ for file in clean_files
+ if file.strip() and not file.startswith("#")
+ }
+
+ warnings = extract_warnings_from_compiler_output(
+ compiler_output_file_contents
+ )
+ files_with_warnings = get_warnings_by_file(warnings)
+
+ status = get_unexpected_warnings(
+ warnings, files_with_expected_warnings, files_with_warnings
+ )
+ if args.fail_on_regression:
+ exit_code |= status
+
+ status = get_unexpected_improvements(
+ warnings, files_with_expected_warnings, files_with_warnings
+ )
+ if args.fail_on_improvement:
+ exit_code |= status
+
+ return exit_code
+
+
+if __name__ == "__main__":
+ sys.exit(main())
From 7a6d4ccf0ec16e09f0d8b21c5a0c591e5e3e45f7 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?B=C3=A9n=C3=A9dikt=20Tran?=
<10796600+picnixz@users.noreply.github.com>
Date: Sat, 27 Jul 2024 12:10:42 +0200
Subject: [PATCH 051/139] gh-122170: Handle ValueError raised by os.stat() in
linecache (GH-122176)
---
Lib/linecache.py | 6 ++--
Lib/test/test_linecache.py | 31 +++++++++++++++++++
...-07-23-15-30-23.gh-issue-122170.Z9gi3Y.rst | 2 ++
3 files changed, 37 insertions(+), 2 deletions(-)
create mode 100644 Misc/NEWS.d/next/Library/2024-07-23-15-30-23.gh-issue-122170.Z9gi3Y.rst
diff --git a/Lib/linecache.py b/Lib/linecache.py
index 3462f1c451ba29..4b38a0464d8747 100644
--- a/Lib/linecache.py
+++ b/Lib/linecache.py
@@ -70,7 +70,7 @@ def checkcache(filename=None):
return
try:
stat = os.stat(fullname)
- except OSError:
+ except (OSError, ValueError):
cache.pop(filename, None)
continue
if size != stat.st_size or mtime != stat.st_mtime:
@@ -135,10 +135,12 @@ def updatecache(filename, module_globals=None):
try:
stat = os.stat(fullname)
break
- except OSError:
+ except (OSError, ValueError):
pass
else:
return []
+ except ValueError: # may be raised by os.stat()
+ return []
try:
with tokenize.open(fullname) as fp:
lines = fp.readlines()
diff --git a/Lib/test/test_linecache.py b/Lib/test/test_linecache.py
index 8ac521d72ef13e..6f5955791407ea 100644
--- a/Lib/test/test_linecache.py
+++ b/Lib/test/test_linecache.py
@@ -280,6 +280,37 @@ def test_loader(self):
self.assertEqual(linecache.getlines(filename, module_globals),
['source for x.y.z\n'])
+ def test_invalid_names(self):
+ for name, desc in [
+ ('\x00', 'NUL bytes filename'),
+ (__file__ + '\x00', 'filename with embedded NUL bytes'),
+ # A filename with surrogate codes. A UnicodeEncodeError is raised
+ # by os.stat() upon querying, which is a subclass of ValueError.
+ ("\uD834\uDD1E.py", 'surrogate codes (MUSICAL SYMBOL G CLEF)'),
+ # For POSIX platforms, an OSError will be raised but for Windows
+ # platforms, a ValueError is raised due to the path_t converter.
+ # See: https://github.com/python/cpython/issues/122170
+ ('a' * 1_000_000, 'very long filename'),
+ ]:
+ with self.subTest(f'updatecache: {desc}'):
+ linecache.clearcache()
+ lines = linecache.updatecache(name)
+ self.assertListEqual(lines, [])
+ self.assertNotIn(name, linecache.cache)
+
+ # hack into the cache (it shouldn't be allowed
+ # but we never know what people do...)
+ for key, fullname in [(name, 'ok'), ('key', name), (name, name)]:
+ with self.subTest(f'checkcache: {desc}',
+ key=key, fullname=fullname):
+ linecache.clearcache()
+ linecache.cache[key] = (0, 1234, [], fullname)
+ linecache.checkcache(key)
+ self.assertNotIn(key, linecache.cache)
+
+ # just to be sure that we did not mess with cache
+ linecache.clearcache()
+
class LineCacheInvalidationTests(unittest.TestCase):
def setUp(self):
diff --git a/Misc/NEWS.d/next/Library/2024-07-23-15-30-23.gh-issue-122170.Z9gi3Y.rst b/Misc/NEWS.d/next/Library/2024-07-23-15-30-23.gh-issue-122170.Z9gi3Y.rst
new file mode 100644
index 00000000000000..7eeb9f67ad4b3a
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-07-23-15-30-23.gh-issue-122170.Z9gi3Y.rst
@@ -0,0 +1,2 @@
+Handle :exc:`ValueError`\s raised by :func:`os.stat` in :mod:`linecache`.
+Patch by Bénédikt Tran.
From 4e04d1a3d237abd0cba354024556c39519e0d163 Mon Sep 17 00:00:00 2001
From: Seth Michael Larson
Date: Sat, 27 Jul 2024 06:10:05 -0500
Subject: [PATCH 052/139] gh-122044: Don't error during gitignore filtering
with no files (#122045)
---
Tools/build/generate_sbom.py | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/Tools/build/generate_sbom.py b/Tools/build/generate_sbom.py
index c08568f2e00326..1b000c3b16a17a 100644
--- a/Tools/build/generate_sbom.py
+++ b/Tools/build/generate_sbom.py
@@ -108,6 +108,10 @@ def filter_gitignored_paths(paths: list[str]) -> list[str]:
'.gitignore:9:*.a Tools/lib.a'
"""
+ # No paths means no filtering to be done.
+ if not paths:
+ return []
+
# Filter out files in gitignore.
# Non-matching files show up as '::'
git_check_ignore_proc = subprocess.run(
From 4e7550934941050f54c86338cd5e40cd565ceaf2 Mon Sep 17 00:00:00 2001
From: Hugo van Kemenade <1324225+hugovk@users.noreply.github.com>
Date: Sat, 27 Jul 2024 14:17:54 +0300
Subject: [PATCH 053/139] gh-122085: Use include files for `whatsnew/3.14.rst`
deprecations (#122242)
---
Doc/deprecations/pending-removal-in-3.16.rst | 7 +++-
.../pending-removal-in-future.rst | 4 +++
Doc/whatsnew/3.12.rst | 2 ++
Doc/whatsnew/3.14.rst | 32 ++++++++++++-------
4 files changed, 32 insertions(+), 13 deletions(-)
diff --git a/Doc/deprecations/pending-removal-in-3.16.rst b/Doc/deprecations/pending-removal-in-3.16.rst
index 97e6bf28efddf2..10cb5e424a623b 100644
--- a/Doc/deprecations/pending-removal-in-3.16.rst
+++ b/Doc/deprecations/pending-removal-in-3.16.rst
@@ -1,5 +1,10 @@
Pending Removal in Python 3.16
------------------------------
-* :class:`array.array` ``'u'`` type (:c:type:`wchar_t`):
+* :mod:`array`:
+ :class:`array.array` ``'u'`` type (:c:type:`wchar_t`):
use the ``'w'`` type instead (``Py_UCS4``).
+
+* :mod:`symtable`:
+ Deprecate :meth:`symtable.Class.get_methods` due to the lack of interest.
+ (Contributed by Bénédikt Tran in :gh:`119698`.)
diff --git a/Doc/deprecations/pending-removal-in-future.rst b/Doc/deprecations/pending-removal-in-future.rst
index db6a41fe8880f6..7f10d9a98257f9 100644
--- a/Doc/deprecations/pending-removal-in-future.rst
+++ b/Doc/deprecations/pending-removal-in-future.rst
@@ -34,6 +34,10 @@ although there is currently no date scheduled for their removal.
:class:`complex`: these methods will be required to return an instance of
:class:`complex`.
* Delegation of ``int()`` to ``__trunc__()`` method.
+ * Passing a complex number as the *real* or *imag* argument in the
+ :func:`complex` constructor is now deprecated; it should only be passed
+ as a single positional argument.
+ (Contributed by Serhiy Storchaka in :gh:`109218`.)
* :mod:`calendar`: ``calendar.January`` and ``calendar.February`` constants are
deprecated and replaced by :data:`calendar.JANUARY` and
diff --git a/Doc/whatsnew/3.12.rst b/Doc/whatsnew/3.12.rst
index b4cd4aa6e83b91..fc2b6519fb1307 100644
--- a/Doc/whatsnew/3.12.rst
+++ b/Doc/whatsnew/3.12.rst
@@ -1336,6 +1336,8 @@ Deprecated
.. include:: ../deprecations/pending-removal-in-3.15.rst
+.. include:: ../deprecations/pending-removal-in-3.16.rst
+
.. include:: ../deprecations/pending-removal-in-future.rst
Removed
diff --git a/Doc/whatsnew/3.14.rst b/Doc/whatsnew/3.14.rst
index cc03088592d9d4..7450597e8597ad 100644
--- a/Doc/whatsnew/3.14.rst
+++ b/Doc/whatsnew/3.14.rst
@@ -156,6 +156,12 @@ pdb
:pdbcmd:`commands` are preserved across hard-coded breakpoints.
(Contributed by Tian Gao in :gh:`121450`.)
+pickle
+------
+
+* Set the default protocol version on the :mod:`pickle` module to 5.
+ For more details, please see :ref:`pickle protocols `.
+
symtable
--------
@@ -167,12 +173,7 @@ symtable
(Contributed by Bénédikt Tran in :gh:`120029`.)
-pickle
-------
-
-* Set the default protocol version on the :mod:`pickle` module to 5.
- For more details, please see :ref:`pickle protocols `.
-
+.. Add improved modules above alphabetically, not here at the end.
Optimizations
=============
@@ -185,24 +186,32 @@ asyncio
reduces memory usage.
(Contributed by Kumar Aditya in :gh:`107803`.)
-
-
Deprecated
==========
-* Passing a complex number as the *real* or *imag* argument in the
+* :mod:`builtins`:
+ Passing a complex number as the *real* or *imag* argument in the
:func:`complex` constructor is now deprecated; it should only be passed
as a single positional argument.
(Contributed by Serhiy Storchaka in :gh:`109218`.)
-* :term:`Soft deprecate ` :func:`os.popen` and
+* :mod:`os`:
+ :term:`Soft deprecate ` :func:`os.popen` and
:func:`os.spawn* ` functions. They should no longer be used to
write new code. The :mod:`subprocess` module is recommended instead.
(Contributed by Victor Stinner in :gh:`120743`.)
-* Deprecate :meth:`symtable.Class.get_methods` due to the lack of interest.
+* :mod:`symtable`:
+ Deprecate :meth:`symtable.Class.get_methods` due to the lack of interest.
(Contributed by Bénédikt Tran in :gh:`119698`.)
+.. Add deprecations above alphabetically, not here at the end.
+
+.. include:: ../deprecations/pending-removal-in-3.15.rst
+
+.. include:: ../deprecations/pending-removal-in-3.16.rst
+
+.. include:: ../deprecations/pending-removal-in-future.rst
Removed
=======
@@ -262,7 +271,6 @@ asyncio
(Contributed by Kumar Aditya in :gh:`120804`.)
-
collections.abc
---------------
From 45614ecb2bdc2b984f051c7eade39458a3f8709f Mon Sep 17 00:00:00 2001
From: Jelle Zijlstra
Date: Sat, 27 Jul 2024 09:36:06 -0700
Subject: [PATCH 054/139] gh-119180: Use type descriptors to access annotations
(PEP 749) (#122074)
---
Lib/annotationlib.py | 36 +++++++-
Lib/test/test_annotationlib.py | 84 ++++++++++++++++++-
...-07-23-17-13-10.gh-issue-119180.5PZELo.rst | 2 +
3 files changed, 117 insertions(+), 5 deletions(-)
create mode 100644 Misc/NEWS.d/next/Library/2024-07-23-17-13-10.gh-issue-119180.5PZELo.rst
diff --git a/Lib/annotationlib.py b/Lib/annotationlib.py
index b4036ffb189c2d..eea24232f9f0d0 100644
--- a/Lib/annotationlib.py
+++ b/Lib/annotationlib.py
@@ -524,6 +524,27 @@ def call_annotate_function(annotate, format, owner=None):
raise ValueError(f"Invalid format: {format!r}")
+# We use the descriptors from builtins.type instead of accessing
+# .__annotations__ and .__annotate__ directly on class objects, because
+# otherwise we could get wrong results in some cases involving metaclasses.
+# See PEP 749.
+_BASE_GET_ANNOTATE = type.__dict__["__annotate__"].__get__
+_BASE_GET_ANNOTATIONS = type.__dict__["__annotations__"].__get__
+
+
+def get_annotate_function(obj):
+ """Get the __annotate__ function for an object.
+
+ obj may be a function, class, or module, or a user-defined type with
+ an `__annotate__` attribute.
+
+ Returns the __annotate__ function or None.
+ """
+ if isinstance(obj, type):
+ return _BASE_GET_ANNOTATE(obj)
+ return getattr(obj, "__annotate__", None)
+
+
def get_annotations(
obj, *, globals=None, locals=None, eval_str=False, format=Format.VALUE
):
@@ -576,16 +597,23 @@ def get_annotations(
# For VALUE format, we look at __annotations__ directly.
if format != Format.VALUE:
- annotate = getattr(obj, "__annotate__", None)
+ annotate = get_annotate_function(obj)
if annotate is not None:
ann = call_annotate_function(annotate, format, owner=obj)
if not isinstance(ann, dict):
raise ValueError(f"{obj!r}.__annotate__ returned a non-dict")
return dict(ann)
- ann = getattr(obj, "__annotations__", None)
- if ann is None:
- return {}
+ if isinstance(obj, type):
+ try:
+ ann = _BASE_GET_ANNOTATIONS(obj)
+ except AttributeError:
+ # For static types, the descriptor raises AttributeError.
+ return {}
+ else:
+ ann = getattr(obj, "__annotations__", None)
+ if ann is None:
+ return {}
if not isinstance(ann, dict):
raise ValueError(f"{obj!r}.__annotations__ is neither a dict nor None")
diff --git a/Lib/test/test_annotationlib.py b/Lib/test/test_annotationlib.py
index e68d63c91d1a73..e459d27d3c4b38 100644
--- a/Lib/test/test_annotationlib.py
+++ b/Lib/test/test_annotationlib.py
@@ -2,8 +2,10 @@
import annotationlib
import functools
+import itertools
import pickle
import unittest
+from annotationlib import Format, get_annotations, get_annotate_function
from typing import Unpack
from test.test_inspect import inspect_stock_annotations
@@ -767,5 +769,85 @@ def test_pep_695_generics_with_future_annotations_nested_in_function(self):
self.assertEqual(
set(results.generic_func_annotations.values()),
- set(results.generic_func.__type_params__)
+ set(results.generic_func.__type_params__),
)
+
+
+class MetaclassTests(unittest.TestCase):
+ def test_annotated_meta(self):
+ class Meta(type):
+ a: int
+
+ class X(metaclass=Meta):
+ pass
+
+ class Y(metaclass=Meta):
+ b: float
+
+ self.assertEqual(get_annotations(Meta), {"a": int})
+ self.assertEqual(get_annotate_function(Meta)(Format.VALUE), {"a": int})
+
+ self.assertEqual(get_annotations(X), {})
+ self.assertIs(get_annotate_function(X), None)
+
+ self.assertEqual(get_annotations(Y), {"b": float})
+ self.assertEqual(get_annotate_function(Y)(Format.VALUE), {"b": float})
+
+ def test_unannotated_meta(self):
+ class Meta(type): pass
+
+ class X(metaclass=Meta):
+ a: str
+
+ class Y(X): pass
+
+ self.assertEqual(get_annotations(Meta), {})
+ self.assertIs(get_annotate_function(Meta), None)
+
+ self.assertEqual(get_annotations(Y), {})
+ self.assertIs(get_annotate_function(Y), None)
+
+ self.assertEqual(get_annotations(X), {"a": str})
+ self.assertEqual(get_annotate_function(X)(Format.VALUE), {"a": str})
+
+ def test_ordering(self):
+ # Based on a sample by David Ellis
+ # https://discuss.python.org/t/pep-749-implementing-pep-649/54974/38
+
+ def make_classes():
+ class Meta(type):
+ a: int
+ expected_annotations = {"a": int}
+
+ class A(type, metaclass=Meta):
+ b: float
+ expected_annotations = {"b": float}
+
+ class B(metaclass=A):
+ c: str
+ expected_annotations = {"c": str}
+
+ class C(B):
+ expected_annotations = {}
+
+ class D(metaclass=Meta):
+ expected_annotations = {}
+
+ return Meta, A, B, C, D
+
+ classes = make_classes()
+ class_count = len(classes)
+ for order in itertools.permutations(range(class_count), class_count):
+ names = ", ".join(classes[i].__name__ for i in order)
+ with self.subTest(names=names):
+ classes = make_classes() # Regenerate classes
+ for i in order:
+ get_annotations(classes[i])
+ for c in classes:
+ with self.subTest(c=c):
+ self.assertEqual(get_annotations(c), c.expected_annotations)
+ annotate_func = get_annotate_function(c)
+ if c.expected_annotations:
+ self.assertEqual(annotate_func(Format.VALUE), c.expected_annotations)
+ else:
+ self.assertIs(annotate_func, None)
diff --git a/Misc/NEWS.d/next/Library/2024-07-23-17-13-10.gh-issue-119180.5PZELo.rst b/Misc/NEWS.d/next/Library/2024-07-23-17-13-10.gh-issue-119180.5PZELo.rst
new file mode 100644
index 00000000000000..d65e89f7523b0a
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-07-23-17-13-10.gh-issue-119180.5PZELo.rst
@@ -0,0 +1,2 @@
+Fix handling of classes with custom metaclasses in
+``annotationlib.get_annotations``.
From cbac8a3888411587beb026e246889154fbdd49a3 Mon Sep 17 00:00:00 2001
From: Barney Gale
Date: Sat, 27 Jul 2024 18:03:18 +0100
Subject: [PATCH 055/139] GH-121462: pathlib docs: improve table of
corresponding os/os.path functions (#121465)
Re-order table of corresponding functions with the following priorities:
1. Pure functionality is at the top
2. `os.path` functions are shown before `os` functions
3. Similar functionality is kept together
4. Functionality follows docs order where possible
Add a few missed correspondences:
- `os.path.isjunction` and `Path.is_junction`
- `os.path.ismount` and `Path.is_mount`
- `os.lstat()` and `Path.lstat()`
- `os.lchmod()` and `Path.lchmod()`
Also add footnotes describing a few differences.
---
Doc/library/pathlib.rst | 87 ++++++++++++++++++++++++-----------------
1 file changed, 51 insertions(+), 36 deletions(-)
diff --git a/Doc/library/pathlib.rst b/Doc/library/pathlib.rst
index 41b2d40a504a12..60c099af928123 100644
--- a/Doc/library/pathlib.rst
+++ b/Doc/library/pathlib.rst
@@ -1864,39 +1864,54 @@ Corresponding tools
Below is a table mapping various :mod:`os` functions to their corresponding
:class:`PurePath`/:class:`Path` equivalent.
-==================================== ==============================
-:mod:`os` and :mod:`os.path` :mod:`pathlib`
-==================================== ==============================
-:func:`os.path.abspath` :meth:`Path.absolute`
-:func:`os.path.realpath` :meth:`Path.resolve`
-:func:`os.chmod` :meth:`Path.chmod`
-:func:`os.mkdir` :meth:`Path.mkdir`
-:func:`os.makedirs` :meth:`Path.mkdir`
-:func:`os.rename` :meth:`Path.rename`
-:func:`os.replace` :meth:`Path.replace`
-:func:`os.rmdir` :meth:`Path.rmdir`
-:func:`os.remove`, :func:`os.unlink` :meth:`Path.unlink`
-:func:`os.getcwd` :func:`Path.cwd`
-:func:`os.path.exists` :meth:`Path.exists`
-:func:`os.path.expanduser` :meth:`Path.expanduser` and
- :meth:`Path.home`
-:func:`os.listdir` :meth:`Path.iterdir`
-:func:`os.walk` :meth:`Path.walk`
-:func:`os.path.isdir` :meth:`Path.is_dir`
-:func:`os.path.isfile` :meth:`Path.is_file`
-:func:`os.path.islink` :meth:`Path.is_symlink`
-:func:`os.link` :meth:`Path.hardlink_to`
-:func:`os.symlink` :meth:`Path.symlink_to`
-:func:`os.readlink` :meth:`Path.readlink`
-:func:`os.path.relpath` :meth:`PurePath.relative_to`
-:func:`os.stat` :meth:`Path.stat`,
- :meth:`Path.owner`,
- :meth:`Path.group`
-:func:`os.path.isabs` :meth:`PurePath.is_absolute`
-:func:`os.path.join` :func:`PurePath.joinpath`
-:func:`os.path.basename` :attr:`PurePath.name`
-:func:`os.path.dirname` :attr:`PurePath.parent`
-:func:`os.path.samefile` :meth:`Path.samefile`
-:func:`os.path.splitext` :attr:`PurePath.stem` and
- :attr:`PurePath.suffix`
-==================================== ==============================
+===================================== ==============================================
+:mod:`os` and :mod:`os.path` :mod:`pathlib`
+===================================== ==============================================
+:func:`os.path.dirname` :attr:`PurePath.parent`
+:func:`os.path.basename` :attr:`PurePath.name`
+:func:`os.path.splitext` :attr:`PurePath.stem`, :attr:`PurePath.suffix`
+:func:`os.path.join` :meth:`PurePath.joinpath`
+:func:`os.path.isabs` :meth:`PurePath.is_absolute`
+:func:`os.path.relpath` :meth:`PurePath.relative_to` [1]_
+:func:`os.path.expanduser` :meth:`Path.expanduser` [2]_
+:func:`os.path.realpath` :meth:`Path.resolve`
+:func:`os.path.abspath` :meth:`Path.absolute` [3]_
+:func:`os.path.exists` :meth:`Path.exists`
+:func:`os.path.isfile` :meth:`Path.is_file`
+:func:`os.path.isdir` :meth:`Path.is_dir`
+:func:`os.path.islink` :meth:`Path.is_symlink`
+:func:`os.path.isjunction` :meth:`Path.is_junction`
+:func:`os.path.ismount` :meth:`Path.is_mount`
+:func:`os.path.samefile` :meth:`Path.samefile`
+:func:`os.getcwd` :meth:`Path.cwd`
+:func:`os.stat` :meth:`Path.stat`
+:func:`os.lstat` :meth:`Path.lstat`
+:func:`os.listdir` :meth:`Path.iterdir`
+:func:`os.walk` :meth:`Path.walk` [4]_
+:func:`os.mkdir`, :func:`os.makedirs` :meth:`Path.mkdir`
+:func:`os.link` :meth:`Path.hardlink_to`
+:func:`os.symlink` :meth:`Path.symlink_to`
+:func:`os.readlink` :meth:`Path.readlink`
+:func:`os.rename` :meth:`Path.rename`
+:func:`os.replace` :meth:`Path.replace`
+:func:`os.remove`, :func:`os.unlink` :meth:`Path.unlink`
+:func:`os.rmdir` :meth:`Path.rmdir`
+:func:`os.chmod` :meth:`Path.chmod`
+:func:`os.lchmod` :meth:`Path.lchmod`
+===================================== ==============================================
+
+.. rubric:: Footnotes
+
+.. [1] :func:`os.path.relpath` calls :func:`~os.path.abspath` to make paths
+ absolute and remove "``..``" parts, whereas :meth:`PurePath.relative_to`
+ is a lexical operation that raises :exc:`ValueError` when its inputs'
+ anchors differ (e.g. if one path is absolute and the other relative.)
+.. [2] :func:`os.path.expanduser` returns the path unchanged if the home
+ directory can't be resolved, whereas :meth:`Path.expanduser` raises
+ :exc:`RuntimeError`.
+.. [3] :func:`os.path.abspath` removes "``..``" components without resolving
+ symlinks, which may change the meaning of the path, whereas
+ :meth:`Path.absolute` leaves any "``..``" components in the path.
+.. [4] :func:`os.walk` always follows symlinks when categorizing paths into
+ *dirnames* and *filenames*, whereas :meth:`Path.walk` categorizes all
+ symlinks into *filenames* when *follow_symlinks* is false (the default.)
From ae192262ad1cffb6ece9d16e67804386c382be0c Mon Sep 17 00:00:00 2001
From: Jelle Zijlstra
Date: Sat, 27 Jul 2024 10:24:10 -0700
Subject: [PATCH 056/139] gh-119180: Add evaluate functions for type params and
type aliases (#122212)
---
Include/internal/pycore_global_objects.h | 1 +
Include/internal/pycore_typevarobject.h | 1 +
Lib/annotationlib.py | 19 +-
Lib/test/test_annotationlib.py | 19 ++
Lib/test/test_type_params.py | 43 ++-
...-07-23-22-26-00.gh-issue-119180.B2IVT8.rst | 7 +
Objects/genericaliasobject.c | 70 +----
Objects/typevarobject.c | 274 ++++++++++++++++++
Objects/unionobject.c | 66 +----
Python/compile.c | 13 +-
Python/symtable.c | 31 +-
11 files changed, 385 insertions(+), 159 deletions(-)
create mode 100644 Misc/NEWS.d/next/Library/2024-07-23-22-26-00.gh-issue-119180.B2IVT8.rst
diff --git a/Include/internal/pycore_global_objects.h b/Include/internal/pycore_global_objects.h
index 327fcc24cb29f1..913dce6f1ec0fe 100644
--- a/Include/internal/pycore_global_objects.h
+++ b/Include/internal/pycore_global_objects.h
@@ -81,6 +81,7 @@ struct _Py_interp_cached_objects {
PyTypeObject *paramspec_type;
PyTypeObject *paramspecargs_type;
PyTypeObject *paramspeckwargs_type;
+ PyTypeObject *constevaluator_type;
};
#define _Py_INTERP_STATIC_OBJECT(interp, NAME) \
diff --git a/Include/internal/pycore_typevarobject.h b/Include/internal/pycore_typevarobject.h
index a368edebd622a1..4d7556e68cdaee 100644
--- a/Include/internal/pycore_typevarobject.h
+++ b/Include/internal/pycore_typevarobject.h
@@ -16,6 +16,7 @@ extern PyObject *_Py_subscript_generic(PyThreadState *, PyObject *);
extern PyObject *_Py_set_typeparam_default(PyThreadState *, PyObject *, PyObject *);
extern int _Py_initialize_generic(PyInterpreterState *);
extern void _Py_clear_generic_types(PyInterpreterState *);
+extern int _Py_typing_type_repr(PyUnicodeWriter *, PyObject *);
extern PyTypeObject _PyTypeAlias_Type;
extern PyTypeObject _PyNoDefault_Type;
diff --git a/Lib/annotationlib.py b/Lib/annotationlib.py
index eea24232f9f0d0..141e31bbf910e3 100644
--- a/Lib/annotationlib.py
+++ b/Lib/annotationlib.py
@@ -413,7 +413,16 @@ def __missing__(self, key):
return fwdref
-def call_annotate_function(annotate, format, owner=None):
+def call_evaluate_function(evaluate, format, *, owner=None):
+ """Call an evaluate function. Evaluate functions are normally generated for
+ the value of type aliases and the bounds, constraints, and defaults of
+ type parameter objects.
+ """
+ return call_annotate_function(evaluate, format, owner=owner, _is_evaluate=True)
+
+
+def call_annotate_function(annotate, format, *, owner=None,
+ _is_evaluate=False):
"""Call an __annotate__ function. __annotate__ functions are normally
generated by the compiler to defer the evaluation of annotations. They
can be called with any of the format arguments in the Format enum, but
@@ -459,8 +468,11 @@ def call_annotate_function(annotate, format, owner=None):
closure = tuple(new_closure)
else:
closure = None
- func = types.FunctionType(annotate.__code__, globals, closure=closure)
+ func = types.FunctionType(annotate.__code__, globals, closure=closure,
+ argdefs=annotate.__defaults__, kwdefaults=annotate.__kwdefaults__)
annos = func(Format.VALUE)
+ if _is_evaluate:
+ return annos if isinstance(annos, str) else repr(annos)
return {
key: val if isinstance(val, str) else repr(val)
for key, val in annos.items()
@@ -511,7 +523,8 @@ def call_annotate_function(annotate, format, owner=None):
closure = tuple(new_closure)
else:
closure = None
- func = types.FunctionType(annotate.__code__, globals, closure=closure)
+ func = types.FunctionType(annotate.__code__, globals, closure=closure,
+ argdefs=annotate.__defaults__, kwdefaults=annotate.__kwdefaults__)
result = func(Format.VALUE)
for obj in globals.stringifiers:
obj.__class__ = ForwardRef
diff --git a/Lib/test/test_annotationlib.py b/Lib/test/test_annotationlib.py
index e459d27d3c4b38..e4dcdb6b58d009 100644
--- a/Lib/test/test_annotationlib.py
+++ b/Lib/test/test_annotationlib.py
@@ -773,6 +773,25 @@ def test_pep_695_generics_with_future_annotations_nested_in_function(self):
)
+class TestCallEvaluateFunction(unittest.TestCase):
+ def test_evaluation(self):
+ def evaluate(format, exc=NotImplementedError):
+ if format != 1:
+ raise exc
+ return undefined
+
+ with self.assertRaises(NameError):
+ annotationlib.call_evaluate_function(evaluate, annotationlib.Format.VALUE)
+ self.assertEqual(
+ annotationlib.call_evaluate_function(evaluate, annotationlib.Format.FORWARDREF),
+ annotationlib.ForwardRef("undefined"),
+ )
+ self.assertEqual(
+ annotationlib.call_evaluate_function(evaluate, annotationlib.Format.SOURCE),
+ "undefined",
+ )
+
+
class MetaclassTests(unittest.TestCase):
def test_annotated_meta(self):
class Meta(type):
diff --git a/Lib/test/test_type_params.py b/Lib/test/test_type_params.py
index bf1a34b9fc82b3..d9c9ec4eddc368 100644
--- a/Lib/test/test_type_params.py
+++ b/Lib/test/test_type_params.py
@@ -1,3 +1,4 @@
+import annotationlib
import asyncio
import textwrap
import types
@@ -6,7 +7,7 @@
import weakref
from test.support import requires_working_socket, check_syntax_error, run_code
-from typing import Generic, NoDefault, Sequence, TypeVar, TypeVarTuple, ParamSpec, get_args
+from typing import Generic, NoDefault, Sequence, TypeAliasType, TypeVar, TypeVarTuple, ParamSpec, get_args
class TypeParamsInvalidTest(unittest.TestCase):
@@ -1394,3 +1395,43 @@ def test_symtable_key_regression_name(self):
self.assertEqual(ns["X1"].__type_params__[0].__default__, "A")
self.assertEqual(ns["X2"].__type_params__[0].__default__, "B")
+
+
+class TestEvaluateFunctions(unittest.TestCase):
+ def test_general(self):
+ type Alias = int
+ Alias2 = TypeAliasType("Alias2", int)
+ def f[T: int = int, **P = int, *Ts = int](): pass
+ T, P, Ts = f.__type_params__
+ T2 = TypeVar("T2", bound=int, default=int)
+ P2 = ParamSpec("P2", default=int)
+ Ts2 = TypeVarTuple("Ts2", default=int)
+ cases = [
+ Alias.evaluate_value,
+ Alias2.evaluate_value,
+ T.evaluate_bound,
+ T.evaluate_default,
+ P.evaluate_default,
+ Ts.evaluate_default,
+ T2.evaluate_bound,
+ T2.evaluate_default,
+ P2.evaluate_default,
+ Ts2.evaluate_default,
+ ]
+ for case in cases:
+ with self.subTest(case=case):
+ self.assertIs(case(1), int)
+ self.assertIs(annotationlib.call_evaluate_function(case, annotationlib.Format.VALUE), int)
+ self.assertIs(annotationlib.call_evaluate_function(case, annotationlib.Format.FORWARDREF), int)
+ self.assertEqual(annotationlib.call_evaluate_function(case, annotationlib.Format.SOURCE), 'int')
+
+ def test_constraints(self):
+ def f[T: (int, str)](): pass
+ T, = f.__type_params__
+ T2 = TypeVar("T2", int, str)
+ for case in [T, T2]:
+ with self.subTest(case=case):
+ self.assertEqual(case.evaluate_constraints(1), (int, str))
+ self.assertEqual(annotationlib.call_evaluate_function(case.evaluate_constraints, annotationlib.Format.VALUE), (int, str))
+ self.assertEqual(annotationlib.call_evaluate_function(case.evaluate_constraints, annotationlib.Format.FORWARDREF), (int, str))
+ self.assertEqual(annotationlib.call_evaluate_function(case.evaluate_constraints, annotationlib.Format.SOURCE), '(int, str)')
diff --git a/Misc/NEWS.d/next/Library/2024-07-23-22-26-00.gh-issue-119180.B2IVT8.rst b/Misc/NEWS.d/next/Library/2024-07-23-22-26-00.gh-issue-119180.B2IVT8.rst
new file mode 100644
index 00000000000000..13f51e4c42f4a0
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-07-23-22-26-00.gh-issue-119180.B2IVT8.rst
@@ -0,0 +1,7 @@
+As part of :pep:`749`, add the following attributes for customizing
+evaluation of annotation scopes:
+
+* ``evaluate_value`` on :class:`typing.TypeAliasType`
+* ``evaluate_bound``, ``evaluate_constraints``, and ``evaluate_default`` on :class:`typing.TypeVar`
+* ``evaluate_default`` on :class:`typing.ParamSpec`
+* ``evaluate_default`` on :class:`typing.TypeVarTuple`
diff --git a/Objects/genericaliasobject.c b/Objects/genericaliasobject.c
index 96c96491501a2c..64b4e2645cbaee 100644
--- a/Objects/genericaliasobject.c
+++ b/Objects/genericaliasobject.c
@@ -4,6 +4,7 @@
#include "pycore_ceval.h" // _PyEval_GetBuiltin()
#include "pycore_modsupport.h" // _PyArg_NoKeywords()
#include "pycore_object.h"
+#include "pycore_typevarobject.h" // _Py_typing_type_repr
#include "pycore_unionobject.h" // _Py_union_type_or, _PyGenericAlias_Check
@@ -50,69 +51,6 @@ ga_traverse(PyObject *self, visitproc visit, void *arg)
return 0;
}
-static int
-ga_repr_item(PyUnicodeWriter *writer, PyObject *p)
-{
- PyObject *qualname = NULL;
- PyObject *module = NULL;
- int rc;
-
- if (p == Py_Ellipsis) {
- // The Ellipsis object
- rc = PyUnicodeWriter_WriteUTF8(writer, "...", 3);
- goto done;
- }
-
- if ((rc = PyObject_HasAttrWithError(p, &_Py_ID(__origin__))) > 0 &&
- (rc = PyObject_HasAttrWithError(p, &_Py_ID(__args__))) > 0)
- {
- // It looks like a GenericAlias
- goto use_repr;
- }
- if (rc < 0) {
- goto error;
- }
-
- if (PyObject_GetOptionalAttr(p, &_Py_ID(__qualname__), &qualname) < 0) {
- goto error;
- }
- if (qualname == NULL) {
- goto use_repr;
- }
- if (PyObject_GetOptionalAttr(p, &_Py_ID(__module__), &module) < 0) {
- goto error;
- }
- if (module == NULL || module == Py_None) {
- goto use_repr;
- }
-
- // Looks like a class
- if (PyUnicode_Check(module) &&
- _PyUnicode_EqualToASCIIString(module, "builtins"))
- {
- // builtins don't need a module name
- rc = PyUnicodeWriter_WriteStr(writer, qualname);
- goto done;
- }
- else {
- rc = PyUnicodeWriter_Format(writer, "%S.%S", module, qualname);
- goto done;
- }
-
-error:
- rc = -1;
- goto done;
-
-use_repr:
- rc = PyUnicodeWriter_WriteRepr(writer, p);
- goto done;
-
-done:
- Py_XDECREF(qualname);
- Py_XDECREF(module);
- return rc;
-}
-
static int
ga_repr_items_list(PyUnicodeWriter *writer, PyObject *p)
{
@@ -131,7 +69,7 @@ ga_repr_items_list(PyUnicodeWriter *writer, PyObject *p)
}
}
PyObject *item = PyList_GET_ITEM(p, i);
- if (ga_repr_item(writer, item) < 0) {
+ if (_Py_typing_type_repr(writer, item) < 0) {
return -1;
}
}
@@ -162,7 +100,7 @@ ga_repr(PyObject *self)
goto error;
}
}
- if (ga_repr_item(writer, alias->origin) < 0) {
+ if (_Py_typing_type_repr(writer, alias->origin) < 0) {
goto error;
}
if (PyUnicodeWriter_WriteChar(writer, '[') < 0) {
@@ -181,7 +119,7 @@ ga_repr(PyObject *self)
goto error;
}
}
- else if (ga_repr_item(writer, p) < 0) {
+ else if (_Py_typing_type_repr(writer, p) < 0) {
goto error;
}
}
diff --git a/Objects/typevarobject.c b/Objects/typevarobject.c
index c8ab14053de418..fb1f260571b582 100644
--- a/Objects/typevarobject.c
+++ b/Objects/typevarobject.c
@@ -116,6 +116,201 @@ PyTypeObject _PyNoDefault_Type = {
PyObject _Py_NoDefaultStruct = _PyObject_HEAD_INIT(&_PyNoDefault_Type);
+typedef struct {
+ PyObject_HEAD
+ PyObject *value;
+} constevaluatorobject;
+
+static void
+constevaluator_dealloc(PyObject *self)
+{
+ PyTypeObject *tp = Py_TYPE(self);
+ constevaluatorobject *ce = (constevaluatorobject *)self;
+
+ _PyObject_GC_UNTRACK(self);
+
+ Py_XDECREF(ce->value);
+
+ Py_TYPE(self)->tp_free(self);
+ Py_DECREF(tp);
+}
+
+static int
+constevaluator_traverse(PyObject *self, visitproc visit, void *arg)
+{
+ constevaluatorobject *ce = (constevaluatorobject *)self;
+ Py_VISIT(ce->value);
+ return 0;
+}
+
+static int
+constevaluator_clear(PyObject *self)
+{
+ Py_CLEAR(((constevaluatorobject *)self)->value);
+ return 0;
+}
+
+static PyObject *
+constevaluator_repr(PyObject *self, PyObject *repr)
+{
+ PyObject *value = ((constevaluatorobject *)self)->value;
+ return PyUnicode_FromFormat("", value);
+}
+
+static PyObject *
+constevaluator_call(PyObject *self, PyObject *args, PyObject *kwargs)
+{
+ if (!_PyArg_NoKeywords("constevaluator.__call__", kwargs)) {
+ return NULL;
+ }
+ int format;
+ if (!PyArg_ParseTuple(args, "i:constevaluator.__call__", &format)) {
+ return NULL;
+ }
+ PyObject *value = ((constevaluatorobject *)self)->value;
+ if (format == 3) { // SOURCE
+ _PyUnicodeWriter writer;
+ _PyUnicodeWriter_Init(&writer);
+ if (PyTuple_Check(value)) {
+ if (_PyUnicodeWriter_WriteASCIIString(&writer, "(", 1) < 0) {
+ _PyUnicodeWriter_Dealloc(&writer);
+ return NULL;
+ }
+ for (Py_ssize_t i = 0; i < PyTuple_GET_SIZE(value); i++) {
+ PyObject *item = PyTuple_GET_ITEM(value, i);
+ if (i > 0) {
+ if (_PyUnicodeWriter_WriteASCIIString(&writer, ", ", 2) < 0) {
+ _PyUnicodeWriter_Dealloc(&writer);
+ return NULL;
+ }
+ }
+ if (_Py_typing_type_repr(&writer, item) < 0) {
+ _PyUnicodeWriter_Dealloc(&writer);
+ return NULL;
+ }
+ }
+ if (_PyUnicodeWriter_WriteASCIIString(&writer, ")", 1) < 0) {
+ _PyUnicodeWriter_Dealloc(&writer);
+ return NULL;
+ }
+ }
+ else {
+ if (_Py_typing_type_repr(&writer, value) < 0) {
+ _PyUnicodeWriter_Dealloc(&writer);
+ return NULL;
+ }
+ }
+ return _PyUnicodeWriter_Finish(&writer);
+ }
+ return Py_NewRef(value);
+}
+
+static PyObject *
+constevaluator_alloc(PyObject *value)
+{
+ PyTypeObject *tp = _PyInterpreterState_GET()->cached_objects.constevaluator_type;
+ assert(tp != NULL);
+ constevaluatorobject *ce = PyObject_GC_New(constevaluatorobject, tp);
+ if (ce == NULL) {
+ return NULL;
+ }
+ ce->value = Py_NewRef(value);
+ _PyObject_GC_TRACK(ce);
+ return (PyObject *)ce;
+
+}
+
+PyDoc_STRVAR(constevaluator_doc,
+"_ConstEvaluator()\n"
+"--\n\n"
+"Internal type for implementing evaluation functions.");
+
+static PyType_Slot constevaluator_slots[] = {
+ {Py_tp_doc, (void *)constevaluator_doc},
+ {Py_tp_dealloc, constevaluator_dealloc},
+ {Py_tp_traverse, constevaluator_traverse},
+ {Py_tp_clear, constevaluator_clear},
+ {Py_tp_repr, constevaluator_repr},
+ {Py_tp_call, constevaluator_call},
+ {Py_tp_alloc, PyType_GenericAlloc},
+ {Py_tp_free, PyObject_GC_Del},
+ {0, NULL},
+};
+
+PyType_Spec constevaluator_spec = {
+ .name = "_typing._ConstEvaluator",
+ .basicsize = sizeof(constevaluatorobject),
+ .flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | Py_TPFLAGS_IMMUTABLETYPE,
+ .slots = constevaluator_slots,
+};
+
+int
+_Py_typing_type_repr(PyUnicodeWriter *writer, PyObject *p)
+{
+ PyObject *qualname = NULL;
+ PyObject *module = NULL;
+ PyObject *r = NULL;
+ int rc;
+
+ if (p == Py_Ellipsis) {
+ // The Ellipsis object
+ r = PyUnicode_FromString("...");
+ goto exit;
+ }
+
+ if (p == (PyObject *)&_PyNone_Type) {
+ return _PyUnicodeWriter_WriteASCIIString(writer, "None", 4);
+ }
+
+ if ((rc = PyObject_HasAttrWithError(p, &_Py_ID(__origin__))) > 0 &&
+ (rc = PyObject_HasAttrWithError(p, &_Py_ID(__args__))) > 0)
+ {
+ // It looks like a GenericAlias
+ goto use_repr;
+ }
+ if (rc < 0) {
+ goto exit;
+ }
+
+ if (PyObject_GetOptionalAttr(p, &_Py_ID(__qualname__), &qualname) < 0) {
+ goto exit;
+ }
+ if (qualname == NULL) {
+ goto use_repr;
+ }
+ if (PyObject_GetOptionalAttr(p, &_Py_ID(__module__), &module) < 0) {
+ goto exit;
+ }
+ if (module == NULL || module == Py_None) {
+ goto use_repr;
+ }
+
+ // Looks like a class
+ if (PyUnicode_Check(module) &&
+ _PyUnicode_EqualToASCIIString(module, "builtins"))
+ {
+ // builtins don't need a module name
+ r = PyObject_Str(qualname);
+ goto exit;
+ }
+ else {
+ r = PyUnicode_FromFormat("%S.%S", module, qualname);
+ goto exit;
+ }
+
+use_repr:
+ r = PyObject_Repr(p);
+exit:
+ Py_XDECREF(qualname);
+ Py_XDECREF(module);
+ if (r == NULL) {
+ return -1;
+ }
+ rc = _PyUnicodeWriter_WriteStr(writer, r);
+ Py_DECREF(r);
+ return rc;
+}
+
static PyObject *
call_typing_func_object(const char *name, PyObject **args, size_t nargs)
@@ -364,10 +559,49 @@ typevar_constraints(typevarobject *self, void *Py_UNUSED(ignored))
return constraints;
}
+static PyObject *
+typevar_evaluate_bound(typevarobject *self, void *Py_UNUSED(ignored))
+{
+ if (self->evaluate_bound != NULL) {
+ return Py_NewRef(self->evaluate_bound);
+ }
+ if (self->bound != NULL) {
+ return constevaluator_alloc(self->bound);
+ }
+ Py_RETURN_NONE;
+}
+
+static PyObject *
+typevar_evaluate_constraints(typevarobject *self, void *Py_UNUSED(ignored))
+{
+ if (self->evaluate_constraints != NULL) {
+ return Py_NewRef(self->evaluate_constraints);
+ }
+ if (self->constraints != NULL) {
+ return constevaluator_alloc(self->constraints);
+ }
+ Py_RETURN_NONE;
+}
+
+static PyObject *
+typevar_evaluate_default(typevarobject *self, void *Py_UNUSED(ignored))
+{
+ if (self->evaluate_default != NULL) {
+ return Py_NewRef(self->evaluate_default);
+ }
+ if (self->default_value != NULL) {
+ return constevaluator_alloc(self->default_value);
+ }
+ Py_RETURN_NONE;
+}
+
static PyGetSetDef typevar_getset[] = {
{"__bound__", (getter)typevar_bound, NULL, NULL, NULL},
{"__constraints__", (getter)typevar_constraints, NULL, NULL, NULL},
{"__default__", (getter)typevar_default, NULL, NULL, NULL},
+ {"evaluate_bound", (getter)typevar_evaluate_bound, NULL, NULL, NULL},
+ {"evaluate_constraints", (getter)typevar_evaluate_constraints, NULL, NULL, NULL},
+ {"evaluate_default", (getter)typevar_evaluate_default, NULL, NULL, NULL},
{0}
};
@@ -995,10 +1229,23 @@ paramspec_default(paramspecobject *self, void *unused)
return default_value;
}
+static PyObject *
+paramspec_evaluate_default(paramspecobject *self, void *unused)
+{
+ if (self->evaluate_default != NULL) {
+ return Py_NewRef(self->evaluate_default);
+ }
+ if (self->default_value != NULL) {
+ return constevaluator_alloc(self->default_value);
+ }
+ Py_RETURN_NONE;
+}
+
static PyGetSetDef paramspec_getset[] = {
{"args", (getter)paramspec_args, NULL, PyDoc_STR("Represents positional arguments."), NULL},
{"kwargs", (getter)paramspec_kwargs, NULL, PyDoc_STR("Represents keyword arguments."), NULL},
{"__default__", (getter)paramspec_default, NULL, "The default value for this ParamSpec.", NULL},
+ {"evaluate_default", (getter)paramspec_evaluate_default, NULL, NULL, NULL},
{0},
};
@@ -1437,8 +1684,21 @@ typevartuple_default(typevartupleobject *self, void *unused)
return default_value;
}
+static PyObject *
+typevartuple_evaluate_default(typevartupleobject *self, void *unused)
+{
+ if (self->evaluate_default != NULL) {
+ return Py_NewRef(self->evaluate_default);
+ }
+ if (self->default_value != NULL) {
+ return constevaluator_alloc(self->default_value);
+ }
+ Py_RETURN_NONE;
+}
+
static PyGetSetDef typevartuple_getset[] = {
{"__default__", (getter)typevartuple_default, NULL, "The default value for this TypeVarTuple.", NULL},
+ {"evaluate_default", (getter)typevartuple_evaluate_default, NULL, NULL, NULL},
{0},
};
@@ -1584,6 +1844,17 @@ typealias_value(PyObject *self, void *unused)
return typealias_get_value(ta);
}
+static PyObject *
+typealias_evaluate_value(PyObject *self, void *unused)
+{
+ typealiasobject *ta = (typealiasobject *)self;
+ if (ta->compute_value != NULL) {
+ return Py_NewRef(ta->compute_value);
+ }
+ assert(ta->value != NULL);
+ return constevaluator_alloc(ta->value);
+}
+
static PyObject *
typealias_parameters(PyObject *self, void *unused)
{
@@ -1627,6 +1898,7 @@ static PyGetSetDef typealias_getset[] = {
{"__parameters__", typealias_parameters, (setter)NULL, NULL, NULL},
{"__type_params__", typealias_type_params, (setter)NULL, NULL, NULL},
{"__value__", typealias_value, (setter)NULL, NULL, NULL},
+ {"evaluate_value", typealias_evaluate_value, (setter)NULL, NULL, NULL},
{"__module__", typealias_module, (setter)NULL, NULL, NULL},
{0}
};
@@ -1952,6 +2224,7 @@ int _Py_initialize_generic(PyInterpreterState *interp)
MAKE_TYPE(paramspec);
MAKE_TYPE(paramspecargs);
MAKE_TYPE(paramspeckwargs);
+ MAKE_TYPE(constevaluator);
#undef MAKE_TYPE
return 0;
}
@@ -1964,6 +2237,7 @@ void _Py_clear_generic_types(PyInterpreterState *interp)
Py_CLEAR(interp->cached_objects.paramspec_type);
Py_CLEAR(interp->cached_objects.paramspecargs_type);
Py_CLEAR(interp->cached_objects.paramspeckwargs_type);
+ Py_CLEAR(interp->cached_objects.constevaluator_type);
}
PyObject *
diff --git a/Objects/unionobject.c b/Objects/unionobject.c
index 7931f4345f7fdd..6e65a653a95c46 100644
--- a/Objects/unionobject.c
+++ b/Objects/unionobject.c
@@ -1,11 +1,10 @@
// types.UnionType -- used to represent e.g. Union[int, str], int | str
#include "Python.h"
#include "pycore_object.h" // _PyObject_GC_TRACK/UNTRACK
-#include "pycore_typevarobject.h" // _PyTypeAlias_Type
+#include "pycore_typevarobject.h" // _PyTypeAlias_Type, _Py_typing_type_repr
#include "pycore_unionobject.h"
-
static PyObject *make_union(PyObject *);
@@ -181,67 +180,6 @@ _Py_union_type_or(PyObject* self, PyObject* other)
return new_union;
}
-static int
-union_repr_item(PyUnicodeWriter *writer, PyObject *p)
-{
- PyObject *qualname = NULL;
- PyObject *module = NULL;
- int rc;
-
- if (p == (PyObject *)&_PyNone_Type) {
- return PyUnicodeWriter_WriteUTF8(writer, "None", 4);
- }
-
- if ((rc = PyObject_HasAttrWithError(p, &_Py_ID(__origin__))) > 0 &&
- (rc = PyObject_HasAttrWithError(p, &_Py_ID(__args__))) > 0)
- {
- // It looks like a GenericAlias
- goto use_repr;
- }
- if (rc < 0) {
- goto error;
- }
-
- if (PyObject_GetOptionalAttr(p, &_Py_ID(__qualname__), &qualname) < 0) {
- goto error;
- }
- if (qualname == NULL) {
- goto use_repr;
- }
- if (PyObject_GetOptionalAttr(p, &_Py_ID(__module__), &module) < 0) {
- goto error;
- }
- if (module == NULL || module == Py_None) {
- goto use_repr;
- }
-
- // Looks like a class
- if (PyUnicode_Check(module) &&
- _PyUnicode_EqualToASCIIString(module, "builtins"))
- {
- // builtins don't need a module name
- rc = PyUnicodeWriter_WriteStr(writer, qualname);
- goto done;
- }
- else {
- rc = PyUnicodeWriter_Format(writer, "%S.%S", module, qualname);
- goto done;
- }
-
-error:
- rc = -1;
- goto done;
-
-use_repr:
- rc = PyUnicodeWriter_WriteRepr(writer, p);
- goto done;
-
-done:
- Py_XDECREF(qualname);
- Py_XDECREF(module);
- return rc;
-}
-
static PyObject *
union_repr(PyObject *self)
{
@@ -260,7 +198,7 @@ union_repr(PyObject *self)
goto error;
}
PyObject *p = PyTuple_GET_ITEM(alias->args, i);
- if (union_repr_item(writer, p) < 0) {
+ if (_Py_typing_type_repr(writer, p) < 0) {
goto error;
}
}
diff --git a/Python/compile.c b/Python/compile.c
index d07a435bdf8dac..02b5345cedd0a3 100644
--- a/Python/compile.c
+++ b/Python/compile.c
@@ -1978,8 +1978,9 @@ compiler_type_param_bound_or_default(struct compiler *c, expr_ty e,
identifier name, void *key,
bool allow_starred)
{
- if (compiler_enter_scope(c, name, COMPILER_SCOPE_ANNOTATIONS,
- key, e->lineno, NULL) == -1) {
+ PyObject *defaults = PyTuple_Pack(1, _PyLong_GetOne());
+ ADDOP_LOAD_CONST_NEW(c, LOC(e), defaults);
+ if (compiler_setup_annotations_scope(c, LOC(e), key, name) == -1) {
return ERROR;
}
if (allow_starred && e->kind == Starred_kind) {
@@ -1995,7 +1996,7 @@ compiler_type_param_bound_or_default(struct compiler *c, expr_ty e,
if (co == NULL) {
return ERROR;
}
- if (compiler_make_closure(c, LOC(e), co, 0) < 0) {
+ if (compiler_make_closure(c, LOC(e), co, MAKE_FUNCTION_DEFAULTS) < 0) {
Py_DECREF(co);
return ERROR;
}
@@ -2566,8 +2567,10 @@ compiler_typealias_body(struct compiler *c, stmt_ty s)
{
location loc = LOC(s);
PyObject *name = s->v.TypeAlias.name->v.Name.id;
+ PyObject *defaults = PyTuple_Pack(1, _PyLong_GetOne());
+ ADDOP_LOAD_CONST_NEW(c, loc, defaults);
RETURN_IF_ERROR(
- compiler_enter_scope(c, name, COMPILER_SCOPE_FUNCTION, s, loc.lineno, NULL));
+ compiler_setup_annotations_scope(c, LOC(s), s, name));
/* Make None the first constant, so the evaluate function can't have a
docstring. */
RETURN_IF_ERROR(compiler_add_const(c, Py_None));
@@ -2578,7 +2581,7 @@ compiler_typealias_body(struct compiler *c, stmt_ty s)
if (co == NULL) {
return ERROR;
}
- if (compiler_make_closure(c, loc, co, 0) < 0) {
+ if (compiler_make_closure(c, loc, co, MAKE_FUNCTION_DEFAULTS) < 0) {
Py_DECREF(co);
return ERROR;
}
diff --git a/Python/symtable.c b/Python/symtable.c
index a5fa7588785d8b..88af37198bfba5 100644
--- a/Python/symtable.c
+++ b/Python/symtable.c
@@ -260,6 +260,7 @@ static int symtable_visit_pattern(struct symtable *st, pattern_ty s);
static int symtable_raise_if_annotation_block(struct symtable *st, const char *, expr_ty);
static int symtable_raise_if_not_coroutine(struct symtable *st, const char *msg, _Py_SourceLocation loc);
static int symtable_raise_if_comprehension_block(struct symtable *st, expr_ty);
+static int symtable_add_def(struct symtable *st, PyObject *name, int flag, _Py_SourceLocation loc);
/* For debugging purposes only */
#if _PY_DUMP_SYMTABLE
@@ -1388,6 +1389,16 @@ symtable_enter_block(struct symtable *st, identifier name, _Py_block_ty block,
return 0;
int result = symtable_enter_existing_block(st, ste);
Py_DECREF(ste);
+ if (block == AnnotationBlock || block == TypeVariableBlock || block == TypeAliasBlock) {
+ _Py_DECLARE_STR(format, ".format");
+ // We need to insert code that reads this "parameter" to the function.
+ if (!symtable_add_def(st, &_Py_STR(format), DEF_PARAM, loc)) {
+ return 0;
+ }
+ if (!symtable_add_def(st, &_Py_STR(format), USE, loc)) {
+ return 0;
+ }
+ }
return result;
}
@@ -2630,18 +2641,6 @@ symtable_visit_annotation(struct symtable *st, expr_ty annotation, void *key)
return 0;
}
}
-
- _Py_DECLARE_STR(format, ".format");
- // The generated __annotate__ function takes a single parameter with the
- // internal name ".format".
- if (!symtable_add_def(st, &_Py_STR(format), DEF_PARAM,
- LOCATION(annotation))) {
- return 0;
- }
- if (!symtable_add_def(st, &_Py_STR(format), USE,
- LOCATION(annotation))) {
- return 0;
- }
}
else {
if (!symtable_enter_existing_block(st, parent_ste->ste_annotation_block)) {
@@ -2690,14 +2689,6 @@ symtable_visit_annotations(struct symtable *st, stmt_ty o, arguments_ty a, expr_
return 0;
}
}
- _Py_DECLARE_STR(format, ".format");
- // We need to insert code that reads this "parameter" to the function.
- if (!symtable_add_def(st, &_Py_STR(format), DEF_PARAM, LOCATION(o))) {
- return 0;
- }
- if (!symtable_add_def(st, &_Py_STR(format), USE, LOCATION(o))) {
- return 0;
- }
if (a->posonlyargs && !symtable_visit_argannotations(st, a->posonlyargs))
return 0;
if (a->args && !symtable_visit_argannotations(st, a->args))
From 04eb5c8db1e24cabd0cb81392bb2632c03be1550 Mon Sep 17 00:00:00 2001
From: sobolevn
Date: Sat, 27 Jul 2024 21:33:38 +0300
Subject: [PATCH 057/139] gh-122361: Use proper `PyUnicodeWriter_*` API in
`constevaluator_call` (#122362)
---
Objects/typevarobject.c | 32 +++++++++++++++++---------------
1 file changed, 17 insertions(+), 15 deletions(-)
diff --git a/Objects/typevarobject.c b/Objects/typevarobject.c
index fb1f260571b582..3c96850589d378 100644
--- a/Objects/typevarobject.c
+++ b/Objects/typevarobject.c
@@ -169,38 +169,40 @@ constevaluator_call(PyObject *self, PyObject *args, PyObject *kwargs)
}
PyObject *value = ((constevaluatorobject *)self)->value;
if (format == 3) { // SOURCE
- _PyUnicodeWriter writer;
- _PyUnicodeWriter_Init(&writer);
+ PyUnicodeWriter *writer = PyUnicodeWriter_Create(5); // cannot be <5
+ if (writer == NULL) {
+ return NULL;
+ }
if (PyTuple_Check(value)) {
- if (_PyUnicodeWriter_WriteASCIIString(&writer, "(", 1) < 0) {
- _PyUnicodeWriter_Dealloc(&writer);
+ if (PyUnicodeWriter_WriteChar(writer, '(') < 0) {
+ PyUnicodeWriter_Discard(writer);
return NULL;
}
for (Py_ssize_t i = 0; i < PyTuple_GET_SIZE(value); i++) {
PyObject *item = PyTuple_GET_ITEM(value, i);
if (i > 0) {
- if (_PyUnicodeWriter_WriteASCIIString(&writer, ", ", 2) < 0) {
- _PyUnicodeWriter_Dealloc(&writer);
+ if (PyUnicodeWriter_WriteUTF8(writer, ", ", 2) < 0) {
+ PyUnicodeWriter_Discard(writer);
return NULL;
}
}
- if (_Py_typing_type_repr(&writer, item) < 0) {
- _PyUnicodeWriter_Dealloc(&writer);
+ if (_Py_typing_type_repr(writer, item) < 0) {
+ PyUnicodeWriter_Discard(writer);
return NULL;
}
}
- if (_PyUnicodeWriter_WriteASCIIString(&writer, ")", 1) < 0) {
- _PyUnicodeWriter_Dealloc(&writer);
+ if (PyUnicodeWriter_WriteChar(writer, ')') < 0) {
+ PyUnicodeWriter_Discard(writer);
return NULL;
}
}
else {
- if (_Py_typing_type_repr(&writer, value) < 0) {
- _PyUnicodeWriter_Dealloc(&writer);
+ if (_Py_typing_type_repr(writer, value) < 0) {
+ PyUnicodeWriter_Discard(writer);
return NULL;
}
}
- return _PyUnicodeWriter_Finish(&writer);
+ return PyUnicodeWriter_Finish(writer);
}
return Py_NewRef(value);
}
@@ -259,7 +261,7 @@ _Py_typing_type_repr(PyUnicodeWriter *writer, PyObject *p)
}
if (p == (PyObject *)&_PyNone_Type) {
- return _PyUnicodeWriter_WriteASCIIString(writer, "None", 4);
+ return PyUnicodeWriter_WriteUTF8(writer, "None", 4);
}
if ((rc = PyObject_HasAttrWithError(p, &_Py_ID(__origin__))) > 0 &&
@@ -306,7 +308,7 @@ _Py_typing_type_repr(PyUnicodeWriter *writer, PyObject *p)
if (r == NULL) {
return -1;
}
- rc = _PyUnicodeWriter_WriteStr(writer, r);
+ rc = PyUnicodeWriter_WriteStr(writer, r);
Py_DECREF(r);
return rc;
}
From 3ff5ce4706630207bb2c2e2589a4501bf0d1bd78 Mon Sep 17 00:00:00 2001
From: Jelle Zijlstra
Date: Sun, 28 Jul 2024 00:50:14 -0700
Subject: [PATCH 058/139] gh-119180: Add myself as CODEOWNER for annotationlib
(#122366)
Co-authored-by: Hugo van Kemenade <1324225+hugovk@users.noreply.github.com>
---
.github/CODEOWNERS | 1 +
1 file changed, 1 insertion(+)
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 95e30ac3001c9c..9aa5004b0cdb7f 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -214,6 +214,7 @@ Doc/c-api/stable.rst @encukou
**/*idlelib* @terryjreedy
/Doc/library/idle.rst @terryjreedy
+**/*annotationlib* @JelleZijlstra
**/*typing* @JelleZijlstra @AlexWaygood
**/*ftplib @giampaolo
From aa449cf063581ea515e2a6194d175f5e1db3d62e Mon Sep 17 00:00:00 2001
From: Hugo van Kemenade <1324225+hugovk@users.noreply.github.com>
Date: Sun, 28 Jul 2024 10:53:21 +0300
Subject: [PATCH 059/139] gh-122085: Create dedicated page for deprecations
(#122352)
Co-authored-by: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
---
Doc/contents.rst | 1 +
Doc/deprecations/index.rst | 10 ++++++++++
Doc/tools/templates/indexcontent.html | 2 ++
3 files changed, 13 insertions(+)
create mode 100644 Doc/deprecations/index.rst
diff --git a/Doc/contents.rst b/Doc/contents.rst
index 24ceacb0076b5e..b57f4b09a5dcb6 100644
--- a/Doc/contents.rst
+++ b/Doc/contents.rst
@@ -14,6 +14,7 @@
installing/index.rst
howto/index.rst
faq/index.rst
+ deprecations/index.rst
glossary.rst
about.rst
diff --git a/Doc/deprecations/index.rst b/Doc/deprecations/index.rst
new file mode 100644
index 00000000000000..cfb30dd09aef6f
--- /dev/null
+++ b/Doc/deprecations/index.rst
@@ -0,0 +1,10 @@
+Deprecations
+============
+
+.. include:: pending-removal-in-3.14.rst
+
+.. include:: pending-removal-in-3.15.rst
+
+.. include:: pending-removal-in-3.16.rst
+
+.. include:: pending-removal-in-future.rst
diff --git a/Doc/tools/templates/indexcontent.html b/Doc/tools/templates/indexcontent.html
index 6f854e86ab8ef1..f2e9fbb0106452 100644
--- a/Doc/tools/templates/indexcontent.html
+++ b/Doc/tools/templates/indexcontent.html
@@ -33,6 +33,8 @@ {{ docstitle|e }}
{% trans %}C API reference{% endtrans %}
{% trans %}FAQs{% endtrans %}
{% trans %}Frequently asked questions (with answers!){% endtrans %}
+ {% trans %}Deprecations{% endtrans %}
+ {% trans %}Deprecated functionality{% endtrans %}
From b359f66c4c315ca14b2a075ee136145ba6610760 Mon Sep 17 00:00:00 2001
From: Victor Stinner
Date: Sun, 28 Jul 2024 09:59:07 +0200
Subject: [PATCH 060/139] gh-120593: Make _PyLong_CompactValue() parameter
const again (#122367)
Change _PyLong_IsCompact() and _PyLong_CompactValue() parameter type
from 'PyObject*' to 'const PyObject*'. Avoid the Py_TYPE() macro
which does not support const parameter.
---
Include/cpython/longintrepr.h | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/Include/cpython/longintrepr.h b/Include/cpython/longintrepr.h
index d841c043f37fc4..c60ccc463653f9 100644
--- a/Include/cpython/longintrepr.h
+++ b/Include/cpython/longintrepr.h
@@ -119,18 +119,18 @@ PyAPI_FUNC(PyLongObject*) _PyLong_FromDigits(
static inline int
-_PyLong_IsCompact(PyLongObject* op) {
- assert(PyType_HasFeature(Py_TYPE(op), Py_TPFLAGS_LONG_SUBCLASS));
+_PyLong_IsCompact(const PyLongObject* op) {
+ assert(PyType_HasFeature(op->ob_base.ob_type, Py_TPFLAGS_LONG_SUBCLASS));
return op->long_value.lv_tag < (2 << _PyLong_NON_SIZE_BITS);
}
#define PyUnstable_Long_IsCompact _PyLong_IsCompact
static inline Py_ssize_t
-_PyLong_CompactValue(PyLongObject *op)
+_PyLong_CompactValue(const PyLongObject *op)
{
Py_ssize_t sign;
- assert(PyType_HasFeature(Py_TYPE(op), Py_TPFLAGS_LONG_SUBCLASS));
+ assert(PyType_HasFeature(op->ob_base.ob_type, Py_TPFLAGS_LONG_SUBCLASS));
assert(PyUnstable_Long_IsCompact(op));
sign = 1 - (op->long_value.lv_tag & _PyLong_SIGN_MASK);
return sign * (Py_ssize_t)op->long_value.ob_digit[0];
From bc93923a2dee00751e44da58b6967c63e3f5c392 Mon Sep 17 00:00:00 2001
From: Serhiy Storchaka
Date: Sun, 28 Jul 2024 11:33:17 +0300
Subject: [PATCH 061/139] gh-122311: Add more tests for pickle (GH-122376)
---
Lib/test/pickletester.py | 770 ++++++++++++++++++++++++++++++++-------
Lib/test/test_pickle.py | 18 +
2 files changed, 660 insertions(+), 128 deletions(-)
diff --git a/Lib/test/pickletester.py b/Lib/test/pickletester.py
index 13663220fc77ea..174f4ff6d021b2 100644
--- a/Lib/test/pickletester.py
+++ b/Lib/test/pickletester.py
@@ -144,6 +144,14 @@ class E(C):
def __getinitargs__(self):
return ()
+import __main__
+__main__.C = C
+C.__module__ = "__main__"
+__main__.D = D
+D.__module__ = "__main__"
+__main__.E = E
+E.__module__ = "__main__"
+
# Simple mutable object.
class Object:
pass
@@ -157,14 +165,6 @@ def __reduce__(self):
# Shouldn't support the recursion itself
return K, (self.value,)
-import __main__
-__main__.C = C
-C.__module__ = "__main__"
-__main__.D = D
-D.__module__ = "__main__"
-__main__.E = E
-E.__module__ = "__main__"
-
class myint(int):
def __init__(self, x):
self.str = str(x)
@@ -1179,6 +1179,124 @@ def test_compat_unpickle(self):
self.assertIs(type(unpickled), collections.UserDict)
self.assertEqual(unpickled, collections.UserDict({1: 2}))
+ def test_load_global(self):
+ self.assertIs(self.loads(b'cbuiltins\nstr\n.'), str)
+ self.assertIs(self.loads(b'cmath\nlog\n.'), math.log)
+ self.assertIs(self.loads(b'cos.path\njoin\n.'), os.path.join)
+ self.assertIs(self.loads(b'\x80\x04cbuiltins\nstr.upper\n.'), str.upper)
+ with support.swap_item(sys.modules, 'mödule', types.SimpleNamespace(glöbal=42)):
+ self.assertEqual(self.loads(b'\x80\x04cm\xc3\xb6dule\ngl\xc3\xb6bal\n.'), 42)
+
+ self.assertRaises(UnicodeDecodeError, self.loads, b'c\xff\nlog\n.')
+ self.assertRaises(UnicodeDecodeError, self.loads, b'cmath\n\xff\n.')
+ self.assertRaises(self.truncated_errors, self.loads, b'c\nlog\n.')
+ self.assertRaises(self.truncated_errors, self.loads, b'cmath\n\n.')
+ self.assertRaises(self.truncated_errors, self.loads, b'\x80\x04cmath\n\n.')
+
+ def test_load_stack_global(self):
+ self.assertIs(self.loads(b'\x8c\x08builtins\x8c\x03str\x93.'), str)
+ self.assertIs(self.loads(b'\x8c\x04math\x8c\x03log\x93.'), math.log)
+ self.assertIs(self.loads(b'\x8c\x07os.path\x8c\x04join\x93.'),
+ os.path.join)
+ self.assertIs(self.loads(b'\x80\x04\x8c\x08builtins\x8c\x09str.upper\x93.'),
+ str.upper)
+ with support.swap_item(sys.modules, 'mödule', types.SimpleNamespace(glöbal=42)):
+ self.assertEqual(self.loads(b'\x80\x04\x8c\x07m\xc3\xb6dule\x8c\x07gl\xc3\xb6bal\x93.'), 42)
+
+ self.assertRaises(UnicodeDecodeError, self.loads, b'\x8c\x01\xff\x8c\x03log\x93.')
+ self.assertRaises(UnicodeDecodeError, self.loads, b'\x8c\x04math\x8c\x01\xff\x93.')
+ self.assertRaises(ValueError, self.loads, b'\x8c\x00\x8c\x03log\x93.')
+ self.assertRaises(AttributeError, self.loads, b'\x8c\x04math\x8c\x00\x93.')
+ self.assertRaises(AttributeError, self.loads, b'\x80\x04\x8c\x04math\x8c\x00\x93.')
+
+ self.assertRaises(pickle.UnpicklingError, self.loads, b'N\x8c\x03log\x93.')
+ self.assertRaises(pickle.UnpicklingError, self.loads, b'\x8c\x04mathN\x93.')
+ self.assertRaises(pickle.UnpicklingError, self.loads, b'\x80\x04\x8c\x04mathN\x93.')
+
+ def test_find_class(self):
+ unpickler = self.unpickler(io.BytesIO())
+ unpickler_nofix = self.unpickler(io.BytesIO(), fix_imports=False)
+ unpickler4 = self.unpickler(io.BytesIO(b'\x80\x04N.'))
+ unpickler4.load()
+
+ self.assertIs(unpickler.find_class('__builtin__', 'str'), str)
+ self.assertRaises(ModuleNotFoundError,
+ unpickler_nofix.find_class, '__builtin__', 'str')
+ self.assertIs(unpickler.find_class('builtins', 'str'), str)
+ self.assertIs(unpickler_nofix.find_class('builtins', 'str'), str)
+ self.assertIs(unpickler.find_class('math', 'log'), math.log)
+ self.assertIs(unpickler.find_class('os.path', 'join'), os.path.join)
+ self.assertIs(unpickler.find_class('os.path', 'join'), os.path.join)
+
+ self.assertIs(unpickler4.find_class('builtins', 'str.upper'), str.upper)
+ with self.assertRaises(AttributeError):
+ unpickler.find_class('builtins', 'str.upper')
+
+ with self.assertRaises(AttributeError):
+ unpickler.find_class('math', 'spam')
+ with self.assertRaises(AttributeError):
+ unpickler4.find_class('math', 'spam')
+ with self.assertRaises(AttributeError):
+ unpickler.find_class('math', 'log.spam')
+ with self.assertRaises(AttributeError):
+ unpickler4.find_class('math', 'log.spam')
+ with self.assertRaises(AttributeError):
+ unpickler.find_class('math', 'log..spam')
+ with self.assertRaises(AttributeError):
+ unpickler4.find_class('math', 'log..spam')
+ with self.assertRaises(AttributeError):
+ unpickler.find_class('math', '')
+ with self.assertRaises(AttributeError):
+ unpickler4.find_class('math', '')
+ self.assertRaises(ModuleNotFoundError, unpickler.find_class, 'spam', 'log')
+ self.assertRaises(ValueError, unpickler.find_class, '', 'log')
+
+ self.assertRaises(TypeError, unpickler.find_class, None, 'log')
+ self.assertRaises(TypeError, unpickler.find_class, 'math', None)
+ self.assertRaises((TypeError, AttributeError), unpickler4.find_class, 'math', None)
+
+ def test_custom_find_class(self):
+ def loads(data):
+ class Unpickler(self.unpickler):
+ def find_class(self, module_name, global_name):
+ return (module_name, global_name)
+ return Unpickler(io.BytesIO(data)).load()
+
+ self.assertEqual(loads(b'cmath\nlog\n.'), ('math', 'log'))
+ self.assertEqual(loads(b'\x8c\x04math\x8c\x03log\x93.'), ('math', 'log'))
+
+ def loads(data):
+ class Unpickler(self.unpickler):
+ @staticmethod
+ def find_class(module_name, global_name):
+ return (module_name, global_name)
+ return Unpickler(io.BytesIO(data)).load()
+
+ self.assertEqual(loads(b'cmath\nlog\n.'), ('math', 'log'))
+ self.assertEqual(loads(b'\x8c\x04math\x8c\x03log\x93.'), ('math', 'log'))
+
+ def loads(data):
+ class Unpickler(self.unpickler):
+ @classmethod
+ def find_class(cls, module_name, global_name):
+ return (module_name, global_name)
+ return Unpickler(io.BytesIO(data)).load()
+
+ self.assertEqual(loads(b'cmath\nlog\n.'), ('math', 'log'))
+ self.assertEqual(loads(b'\x8c\x04math\x8c\x03log\x93.'), ('math', 'log'))
+
+ def loads(data):
+ class Unpickler(self.unpickler):
+ pass
+ def find_class(module_name, global_name):
+ return (module_name, global_name)
+ unpickler = Unpickler(io.BytesIO(data))
+ unpickler.find_class = find_class
+ return unpickler.load()
+
+ self.assertEqual(loads(b'cmath\nlog\n.'), ('math', 'log'))
+ self.assertEqual(loads(b'\x8c\x04math\x8c\x03log\x93.'), ('math', 'log'))
+
def test_bad_reduce(self):
self.assertEqual(self.loads(b'cbuiltins\nint\n)R.'), 0)
self.check_unpickling_error(TypeError, b'N)R.')
@@ -1443,6 +1561,474 @@ def t():
[ToBeUnpickled] * 2)
+class AbstractPicklingErrorTests:
+ # Subclass must define self.dumps, self.pickler.
+
+ def test_bad_reduce_result(self):
+ obj = REX([print, ()])
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises(pickle.PicklingError):
+ self.dumps(obj, proto)
+
+ obj = REX((print,))
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises(pickle.PicklingError):
+ self.dumps(obj, proto)
+
+ obj = REX((print, (), None, None, None, None, None))
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises(pickle.PicklingError):
+ self.dumps(obj, proto)
+
+ def test_bad_reconstructor(self):
+ obj = REX((42, ()))
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises(pickle.PicklingError):
+ self.dumps(obj, proto)
+
+ def test_unpickleable_reconstructor(self):
+ obj = REX((UnpickleableCallable(), ()))
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises(CustomError):
+ self.dumps(obj, proto)
+
+ def test_bad_reconstructor_args(self):
+ obj = REX((print, []))
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises(pickle.PicklingError):
+ self.dumps(obj, proto)
+
+ def test_unpickleable_reconstructor_args(self):
+ obj = REX((print, (1, 2, UNPICKLEABLE)))
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises(CustomError):
+ self.dumps(obj, proto)
+
+ def test_bad_newobj_args(self):
+ obj = REX((copyreg.__newobj__, ()))
+ for proto in protocols[2:]:
+ with self.subTest(proto=proto):
+ with self.assertRaises((IndexError, pickle.PicklingError)) as cm:
+ self.dumps(obj, proto)
+
+ obj = REX((copyreg.__newobj__, [REX]))
+ for proto in protocols[2:]:
+ with self.subTest(proto=proto):
+ with self.assertRaises((IndexError, pickle.PicklingError)):
+ self.dumps(obj, proto)
+
+ def test_bad_newobj_class(self):
+ obj = REX((copyreg.__newobj__, (NoNew(),)))
+ for proto in protocols[2:]:
+ with self.subTest(proto=proto):
+ with self.assertRaises(pickle.PicklingError):
+ self.dumps(obj, proto)
+
+ def test_wrong_newobj_class(self):
+ obj = REX((copyreg.__newobj__, (str,)))
+ for proto in protocols[2:]:
+ with self.subTest(proto=proto):
+ with self.assertRaises(pickle.PicklingError):
+ self.dumps(obj, proto)
+
+ def test_unpickleable_newobj_class(self):
+ class LocalREX(REX): pass
+ obj = LocalREX((copyreg.__newobj__, (LocalREX,)))
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises((pickle.PicklingError, AttributeError)):
+ self.dumps(obj, proto)
+
+ def test_unpickleable_newobj_args(self):
+ obj = REX((copyreg.__newobj__, (REX, 1, 2, UNPICKLEABLE)))
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises(CustomError):
+ self.dumps(obj, proto)
+
+ def test_bad_newobj_ex_args(self):
+ obj = REX((copyreg.__newobj_ex__, ()))
+ for proto in protocols[2:]:
+ with self.subTest(proto=proto):
+ with self.assertRaises((ValueError, pickle.PicklingError)):
+ self.dumps(obj, proto)
+
+ obj = REX((copyreg.__newobj_ex__, 42))
+ for proto in protocols[2:]:
+ with self.subTest(proto=proto):
+ with self.assertRaises(pickle.PicklingError):
+ self.dumps(obj, proto)
+
+ obj = REX((copyreg.__newobj_ex__, (REX, 42, {})))
+ is_py = self.pickler is pickle._Pickler
+ for proto in protocols[2:4] if is_py else protocols[2:]:
+ with self.subTest(proto=proto):
+ with self.assertRaises((TypeError, pickle.PicklingError)):
+ self.dumps(obj, proto)
+
+ obj = REX((copyreg.__newobj_ex__, (REX, (), [])))
+ for proto in protocols[2:4] if is_py else protocols[2:]:
+ with self.subTest(proto=proto):
+ with self.assertRaises((TypeError, pickle.PicklingError)):
+ self.dumps(obj, proto)
+
+ def test_bad_newobj_ex__class(self):
+ obj = REX((copyreg.__newobj_ex__, (NoNew(), (), {})))
+ for proto in protocols[2:]:
+ with self.subTest(proto=proto):
+ with self.assertRaises(pickle.PicklingError):
+ self.dumps(obj, proto)
+
+ def test_wrong_newobj_ex_class(self):
+ if self.pickler is not pickle._Pickler:
+ self.skipTest('only verified in the Python implementation')
+ obj = REX((copyreg.__newobj_ex__, (str, (), {})))
+ for proto in protocols[2:]:
+ with self.subTest(proto=proto):
+ with self.assertRaises(pickle.PicklingError):
+ self.dumps(obj, proto)
+
+ def test_unpickleable_newobj_ex_class(self):
+ class LocalREX(REX): pass
+ obj = LocalREX((copyreg.__newobj_ex__, (LocalREX, (), {})))
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises((pickle.PicklingError, AttributeError)):
+ self.dumps(obj, proto)
+
+ def test_unpickleable_newobj_ex_args(self):
+ obj = REX((copyreg.__newobj_ex__, (REX, (1, 2, UNPICKLEABLE), {})))
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises(CustomError):
+ self.dumps(obj, proto)
+
+ def test_unpickleable_newobj_ex_kwargs(self):
+ obj = REX((copyreg.__newobj_ex__, (REX, (), {'a': UNPICKLEABLE})))
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises(CustomError):
+ self.dumps(obj, proto)
+
+ def test_unpickleable_state(self):
+ obj = REX_state(UNPICKLEABLE)
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises(CustomError):
+ self.dumps(obj, proto)
+
+ def test_bad_state_setter(self):
+ if self.pickler is pickle._Pickler:
+ self.skipTest('only verified in the C implementation')
+ obj = REX((print, (), 'state', None, None, 42))
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises(pickle.PicklingError):
+ self.dumps(obj, proto)
+
+ def test_unpickleable_state_setter(self):
+ obj = REX((print, (), 'state', None, None, UnpickleableCallable()))
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises(CustomError):
+ self.dumps(obj, proto)
+
+ def test_unpickleable_state_with_state_setter(self):
+ obj = REX((print, (), UNPICKLEABLE, None, None, print))
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises(CustomError):
+ self.dumps(obj, proto)
+
+ def test_bad_object_list_items(self):
+ # Issue4176: crash when 4th and 5th items of __reduce__()
+ # are not iterators
+ obj = REX((list, (), None, 42))
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises((TypeError, pickle.PicklingError)):
+ self.dumps(obj, proto)
+
+ if self.pickler is not pickle._Pickler:
+ # Python implementation is less strict and also accepts iterables.
+ obj = REX((list, (), None, []))
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises((TypeError, pickle.PicklingError)):
+ self.dumps(obj, proto)
+
+ def test_unpickleable_object_list_items(self):
+ obj = REX_six([1, 2, UNPICKLEABLE])
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises(CustomError):
+ self.dumps(obj, proto)
+
+ def test_bad_object_dict_items(self):
+ # Issue4176: crash when 4th and 5th items of __reduce__()
+ # are not iterators
+ obj = REX((dict, (), None, None, 42))
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises((TypeError, pickle.PicklingError)):
+ self.dumps(obj, proto)
+
+ for proto in protocols:
+ obj = REX((dict, (), None, None, iter([('a',)])))
+ with self.subTest(proto=proto):
+ with self.assertRaises((ValueError, TypeError)):
+ self.dumps(obj, proto)
+
+ if self.pickler is not pickle._Pickler:
+ # Python implementation is less strict and also accepts iterables.
+ obj = REX((dict, (), None, None, []))
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises((TypeError, pickle.PicklingError)):
+ self.dumps(obj, proto)
+
+ def test_unpickleable_object_dict_items(self):
+ obj = REX_seven({'a': UNPICKLEABLE})
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises(CustomError):
+ self.dumps(obj, proto)
+
+ def test_unpickleable_list_items(self):
+ obj = [1, [2, 3, UNPICKLEABLE]]
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises(CustomError):
+ self.dumps(obj, proto)
+ for n in [0, 1, 1000, 1005]:
+ obj = [*range(n), UNPICKLEABLE]
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises(CustomError):
+ self.dumps(obj, proto)
+
+ def test_unpickleable_tuple_items(self):
+ obj = (1, (2, 3, UNPICKLEABLE))
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises(CustomError):
+ self.dumps(obj, proto)
+ obj = (*range(10), UNPICKLEABLE)
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises(CustomError):
+ self.dumps(obj, proto)
+
+ def test_unpickleable_dict_items(self):
+ obj = {'a': {'b': UNPICKLEABLE}}
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises(CustomError):
+ self.dumps(obj, proto)
+ for n in [0, 1, 1000, 1005]:
+ obj = dict.fromkeys(range(n))
+ obj['a'] = UNPICKLEABLE
+ for proto in protocols:
+ with self.subTest(proto=proto, n=n):
+ with self.assertRaises(CustomError):
+ self.dumps(obj, proto)
+
+ def test_unpickleable_set_items(self):
+ obj = {UNPICKLEABLE}
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises(CustomError):
+ self.dumps(obj, proto)
+
+ def test_unpickleable_frozenset_items(self):
+ obj = frozenset({frozenset({UNPICKLEABLE})})
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises(CustomError):
+ self.dumps(obj, proto)
+
+ def test_global_lookup_error(self):
+ # Global name does not exist
+ obj = REX('spam')
+ obj.__module__ = __name__
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises(pickle.PicklingError):
+ self.dumps(obj, proto)
+
+ obj.__module__ = 'nonexisting'
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises(pickle.PicklingError):
+ self.dumps(obj, proto)
+
+ obj.__module__ = ''
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises((ValueError, pickle.PicklingError)):
+ self.dumps(obj, proto)
+
+ obj.__module__ = None
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises(pickle.PicklingError):
+ self.dumps(obj, proto)
+
+ def test_nonencodable_global_name_error(self):
+ for proto in protocols[:4]:
+ with self.subTest(proto=proto):
+ name = 'nonascii\xff' if proto < 3 else 'nonencodable\udbff'
+ obj = REX(name)
+ obj.__module__ = __name__
+ with support.swap_item(globals(), name, obj):
+ with self.assertRaises((UnicodeEncodeError, pickle.PicklingError)):
+ self.dumps(obj, proto)
+
+ def test_nonencodable_module_name_error(self):
+ for proto in protocols[:4]:
+ with self.subTest(proto=proto):
+ name = 'nonascii\xff' if proto < 3 else 'nonencodable\udbff'
+ obj = REX('test')
+ obj.__module__ = name
+ mod = types.SimpleNamespace(test=obj)
+ with support.swap_item(sys.modules, name, mod):
+ with self.assertRaises((UnicodeEncodeError, pickle.PicklingError)):
+ self.dumps(obj, proto)
+
+ def test_nested_lookup_error(self):
+ # Nested name does not exist
+ obj = REX('AbstractPickleTests.spam')
+ obj.__module__ = __name__
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises(pickle.PicklingError):
+ self.dumps(obj, proto)
+
+ obj.__module__ = None
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises(pickle.PicklingError):
+ self.dumps(obj, proto)
+
+ def test_wrong_object_lookup_error(self):
+ # Name is bound to different object
+ obj = REX('AbstractPickleTests')
+ obj.__module__ = __name__
+ AbstractPickleTests.ham = []
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises(pickle.PicklingError):
+ self.dumps(obj, proto)
+
+ obj.__module__ = None
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises(pickle.PicklingError):
+ self.dumps(obj, proto)
+
+ def test_local_lookup_error(self):
+ # Test that whichmodule() errors out cleanly when looking up
+ # an assumed globally-reachable object fails.
+ def f():
+ pass
+ # Since the function is local, lookup will fail
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises((AttributeError, pickle.PicklingError)):
+ self.dumps(f, proto)
+ # Same without a __module__ attribute (exercises a different path
+ # in _pickle.c).
+ del f.__module__
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises((AttributeError, pickle.PicklingError)):
+ self.dumps(f, proto)
+ # Yet a different path.
+ f.__name__ = f.__qualname__
+ for proto in protocols:
+ with self.subTest(proto=proto):
+ with self.assertRaises((AttributeError, pickle.PicklingError)):
+ self.dumps(f, proto)
+
+ def test_reduce_ex_None(self):
+ c = REX_None()
+ with self.assertRaises(TypeError):
+ self.dumps(c)
+
+ def test_reduce_None(self):
+ c = R_None()
+ with self.assertRaises(TypeError):
+ self.dumps(c)
+
+ @no_tracing
+ def test_bad_getattr(self):
+ # Issue #3514: crash when there is an infinite loop in __getattr__
+ x = BadGetattr()
+ for proto in range(2):
+ with support.infinite_recursion(25):
+ self.assertRaises(RuntimeError, self.dumps, x, proto)
+ for proto in range(2, pickle.HIGHEST_PROTOCOL + 1):
+ s = self.dumps(x, proto)
+
+ def test_picklebuffer_error(self):
+ # PickleBuffer forbidden with protocol < 5
+ pb = pickle.PickleBuffer(b"foobar")
+ for proto in range(0, 5):
+ with self.subTest(proto=proto):
+ with self.assertRaises(pickle.PickleError):
+ self.dumps(pb, proto)
+
+ def test_non_continuous_buffer(self):
+ if self.pickler is pickle._Pickler:
+ self.skipTest('CRASHES (see gh-122306)')
+ for proto in protocols[5:]:
+ with self.subTest(proto=proto):
+ pb = pickle.PickleBuffer(memoryview(b"foobar")[::2])
+ with self.assertRaises(pickle.PicklingError):
+ self.dumps(pb, proto)
+
+ def test_buffer_callback_error(self):
+ def buffer_callback(buffers):
+ raise CustomError
+ pb = pickle.PickleBuffer(b"foobar")
+ with self.assertRaises(CustomError):
+ self.dumps(pb, 5, buffer_callback=buffer_callback)
+
+ def test_evil_pickler_mutating_collection(self):
+ # https://github.com/python/cpython/issues/92930
+ global Clearer
+ class Clearer:
+ pass
+
+ def check(collection):
+ class EvilPickler(self.pickler):
+ def persistent_id(self, obj):
+ if isinstance(obj, Clearer):
+ collection.clear()
+ return None
+ pickler = EvilPickler(io.BytesIO(), proto)
+ try:
+ pickler.dump(collection)
+ except RuntimeError as e:
+ expected = "changed size during iteration"
+ self.assertIn(expected, str(e))
+
+ for proto in protocols:
+ check([Clearer()])
+ check([Clearer(), Clearer()])
+ check({Clearer()})
+ check({Clearer(), Clearer()})
+ check({Clearer(): 1})
+ check({Clearer(): 1, Clearer(): 2})
+ check({1: Clearer(), 2: Clearer()})
+
class AbstractPickleTests:
# Subclass must define self.dumps, self.loads.
@@ -2453,55 +3039,12 @@ def test_reduce_calls_base(self):
y = self.loads(s)
self.assertEqual(y._reduce_called, 1)
- def test_reduce_ex_None(self):
- c = REX_None()
- with self.assertRaises(TypeError):
- self.dumps(c)
-
- def test_reduce_None(self):
- c = R_None()
- with self.assertRaises(TypeError):
- self.dumps(c)
-
def test_pickle_setstate_None(self):
c = C_None_setstate()
p = self.dumps(c)
with self.assertRaises(TypeError):
self.loads(p)
- @no_tracing
- def test_bad_getattr(self):
- # Issue #3514: crash when there is an infinite loop in __getattr__
- x = BadGetattr()
- for proto in range(2):
- with support.infinite_recursion(25):
- self.assertRaises(RuntimeError, self.dumps, x, proto)
- for proto in range(2, pickle.HIGHEST_PROTOCOL + 1):
- s = self.dumps(x, proto)
-
- def test_reduce_bad_iterator(self):
- # Issue4176: crash when 4th and 5th items of __reduce__()
- # are not iterators
- class C(object):
- def __reduce__(self):
- # 4th item is not an iterator
- return list, (), None, [], None
- class D(object):
- def __reduce__(self):
- # 5th item is not an iterator
- return dict, (), None, None, []
-
- # Python implementation is less strict and also accepts iterables.
- for proto in protocols:
- try:
- self.dumps(C(), proto)
- except pickle.PicklingError:
- pass
- try:
- self.dumps(D(), proto)
- except pickle.PicklingError:
- pass
-
def test_many_puts_and_gets(self):
# Test that internal data structures correctly deal with lots of
# puts/gets.
@@ -2950,27 +3493,6 @@ def test_compat_pickle(self):
self.assertIn(('c%s\n%s' % (mod, name)).encode(), pickled)
self.assertIs(type(self.loads(pickled)), type(val))
- def test_local_lookup_error(self):
- # Test that whichmodule() errors out cleanly when looking up
- # an assumed globally-reachable object fails.
- def f():
- pass
- # Since the function is local, lookup will fail
- for proto in range(0, pickle.HIGHEST_PROTOCOL + 1):
- with self.assertRaises((AttributeError, pickle.PicklingError)):
- pickletools.dis(self.dumps(f, proto))
- # Same without a __module__ attribute (exercises a different path
- # in _pickle.c).
- del f.__module__
- for proto in range(0, pickle.HIGHEST_PROTOCOL + 1):
- with self.assertRaises((AttributeError, pickle.PicklingError)):
- pickletools.dis(self.dumps(f, proto))
- # Yet a different path.
- f.__name__ = f.__qualname__
- for proto in range(0, pickle.HIGHEST_PROTOCOL + 1):
- with self.assertRaises((AttributeError, pickle.PicklingError)):
- pickletools.dis(self.dumps(f, proto))
-
#
# PEP 574 tests below
#
@@ -3081,20 +3603,6 @@ def test_oob_buffers_writable_to_readonly(self):
self.assertIs(type(new), type(obj))
self.assertEqual(new, obj)
- def test_picklebuffer_error(self):
- # PickleBuffer forbidden with protocol < 5
- pb = pickle.PickleBuffer(b"foobar")
- for proto in range(0, 5):
- with self.assertRaises(pickle.PickleError):
- self.dumps(pb, proto)
-
- def test_buffer_callback_error(self):
- def buffer_callback(buffers):
- 1/0
- pb = pickle.PickleBuffer(b"foobar")
- with self.assertRaises(ZeroDivisionError):
- self.dumps(pb, 5, buffer_callback=buffer_callback)
-
def test_buffers_error(self):
pb = pickle.PickleBuffer(b"foobar")
for proto in range(5, pickle.HIGHEST_PROTOCOL + 1):
@@ -3186,37 +3694,6 @@ def __reduce__(self):
expected = "changed size during iteration"
self.assertIn(expected, str(e))
- def test_evil_pickler_mutating_collection(self):
- # https://github.com/python/cpython/issues/92930
- if not hasattr(self, "pickler"):
- raise self.skipTest(f"{type(self)} has no associated pickler type")
-
- global Clearer
- class Clearer:
- pass
-
- def check(collection):
- class EvilPickler(self.pickler):
- def persistent_id(self, obj):
- if isinstance(obj, Clearer):
- collection.clear()
- return None
- pickler = EvilPickler(io.BytesIO(), proto)
- try:
- pickler.dump(collection)
- except RuntimeError as e:
- expected = "changed size during iteration"
- self.assertIn(expected, str(e))
-
- for proto in protocols:
- check([Clearer()])
- check([Clearer(), Clearer()])
- check({Clearer()})
- check({Clearer(), Clearer()})
- check({Clearer(): 1})
- check({Clearer(): 1, Clearer(): 2})
- check({1: Clearer(), 2: Clearer()})
-
class BigmemPickleTests:
@@ -3347,6 +3824,18 @@ def test_huge_str_64b(self, size):
# Test classes for reduce_ex
+class R:
+ def __init__(self, reduce=None):
+ self.reduce = reduce
+ def __reduce__(self, proto):
+ return self.reduce
+
+class REX:
+ def __init__(self, reduce_ex=None):
+ self.reduce_ex = reduce_ex
+ def __reduce_ex__(self, proto):
+ return self.reduce_ex
+
class REX_one(object):
"""No __reduce_ex__ here, but inheriting it from object"""
_reduce_called = 0
@@ -3437,6 +3926,19 @@ def __getstate__(self):
__setstate__ = None
+class CustomError(Exception):
+ pass
+
+class Unpickleable:
+ def __reduce__(self):
+ raise CustomError
+
+UNPICKLEABLE = Unpickleable()
+
+class UnpickleableCallable(Unpickleable):
+ def __call__(self, *args, **kwargs):
+ pass
+
# Test classes for newobj
@@ -3505,6 +4007,12 @@ class BadGetattr:
def __getattr__(self, key):
self.foo
+class NoNew:
+ def __getattribute__(self, name):
+ if name == '__new__':
+ raise AttributeError
+ return super().__getattribute__(name)
+
class AbstractPickleModuleTests:
@@ -3577,7 +4085,7 @@ def raises_oserror(self, *args, **kwargs):
raise OSError
@property
def bad_property(self):
- 1/0
+ raise CustomError
# File without read and readline
class F:
@@ -3598,23 +4106,23 @@ class F:
class F:
read = bad_property
readline = raises_oserror
- self.assertRaises(ZeroDivisionError, self.Unpickler, F())
+ self.assertRaises(CustomError, self.Unpickler, F())
# File with bad readline
class F:
readline = bad_property
read = raises_oserror
- self.assertRaises(ZeroDivisionError, self.Unpickler, F())
+ self.assertRaises(CustomError, self.Unpickler, F())
# File with bad readline, no read
class F:
readline = bad_property
- self.assertRaises(ZeroDivisionError, self.Unpickler, F())
+ self.assertRaises(CustomError, self.Unpickler, F())
# File with bad read, no readline
class F:
read = bad_property
- self.assertRaises((AttributeError, ZeroDivisionError), self.Unpickler, F())
+ self.assertRaises((AttributeError, CustomError), self.Unpickler, F())
# File with bad peek
class F:
@@ -3623,7 +4131,7 @@ class F:
readline = raises_oserror
try:
self.Unpickler(F())
- except ZeroDivisionError:
+ except CustomError:
pass
# File with bad readinto
@@ -3633,7 +4141,7 @@ class F:
readline = raises_oserror
try:
self.Unpickler(F())
- except ZeroDivisionError:
+ except CustomError:
pass
def test_pickler_bad_file(self):
@@ -3646,8 +4154,8 @@ class F:
class F:
@property
def write(self):
- 1/0
- self.assertRaises(ZeroDivisionError, self.Pickler, F())
+ raise CustomError
+ self.assertRaises(CustomError, self.Pickler, F())
def check_dumps_loads_oob_buffers(self, dumps, loads):
# No need to do the full gamut of tests here, just enough to
@@ -3755,9 +4263,15 @@ def test_return_correct_type(self):
def test_protocol0_is_ascii_only(self):
non_ascii_str = "\N{EMPTY SET}"
- self.assertRaises(pickle.PicklingError, self.dumps, non_ascii_str, 0)
+ with self.assertRaises(pickle.PicklingError) as cm:
+ self.dumps(non_ascii_str, 0)
+ self.assertEqual(str(cm.exception),
+ 'persistent IDs in protocol 0 must be ASCII strings')
pickled = pickle.PERSID + non_ascii_str.encode('utf-8') + b'\n.'
- self.assertRaises(pickle.UnpicklingError, self.loads, pickled)
+ with self.assertRaises(pickle.UnpicklingError) as cm:
+ self.loads(pickled)
+ self.assertEqual(str(cm.exception),
+ 'persistent IDs in protocol 0 must be ASCII strings')
class AbstractPicklerUnpicklerObjectTests:
diff --git a/Lib/test/test_pickle.py b/Lib/test/test_pickle.py
index 49aa4b386039ec..c84e507cdf645f 100644
--- a/Lib/test/test_pickle.py
+++ b/Lib/test/test_pickle.py
@@ -16,6 +16,7 @@
from test.pickletester import AbstractHookTests
from test.pickletester import AbstractUnpickleTests
+from test.pickletester import AbstractPicklingErrorTests
from test.pickletester import AbstractPickleTests
from test.pickletester import AbstractPickleModuleTests
from test.pickletester import AbstractPersistentPicklerTests
@@ -55,6 +56,18 @@ def loads(self, buf, **kwds):
return u.load()
+class PyPicklingErrorTests(AbstractPicklingErrorTests, unittest.TestCase):
+
+ pickler = pickle._Pickler
+
+ def dumps(self, arg, proto=None, **kwargs):
+ f = io.BytesIO()
+ p = self.pickler(f, proto, **kwargs)
+ p.dump(arg)
+ f.seek(0)
+ return bytes(f.read())
+
+
class PyPicklerTests(AbstractPickleTests, unittest.TestCase):
pickler = pickle._Pickler
@@ -88,6 +101,8 @@ def loads(self, buf, **kwds):
return pickle.loads(buf, **kwds)
test_framed_write_sizes_with_delayed_writer = None
+ test_find_class = None
+ test_custom_find_class = None
class PersistentPicklerUnpicklerMixin(object):
@@ -267,6 +282,9 @@ class CUnpicklerTests(PyUnpicklerTests):
bad_stack_errors = (pickle.UnpicklingError,)
truncated_errors = (pickle.UnpicklingError,)
+ class CPicklingErrorTests(PyPicklingErrorTests):
+ pickler = _pickle.Pickler
+
class CPicklerTests(PyPicklerTests):
pickler = _pickle.Pickler
unpickler = _pickle.Unpickler
From 169e7138ab84db465b6bf28e6c1dc6c39dbf89f4 Mon Sep 17 00:00:00 2001
From: Sergey B Kirpichev
Date: Mon, 29 Jul 2024 06:56:40 +0300
Subject: [PATCH 062/139] gh-122234: fix accuracy issues for sum() (#122236)
* Use compensated summation for complex sums with floating-point items.
This amends #121176.
* sum() specializations for floats and complexes now use
PyLong_AsDouble() instead of PyLong_AsLongAndOverflow() and
compensated summation as well.
---
Lib/test/test_builtin.py | 5 ++++
...-07-24-17-11-51.gh-issue-122234.VxsP_F.rst | 4 ++++
Python/bltinmodule.c | 24 ++++++++++---------
3 files changed, 22 insertions(+), 11 deletions(-)
create mode 100644 Misc/NEWS.d/next/Core and Builtins/2024-07-24-17-11-51.gh-issue-122234.VxsP_F.rst
diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py
index c6a563cc90fec4..85f139db9bcd45 100644
--- a/Lib/test/test_builtin.py
+++ b/Lib/test/test_builtin.py
@@ -1778,6 +1778,8 @@ def test_sum(self):
self.assertRaises(TypeError, sum, [], '')
self.assertRaises(TypeError, sum, [], b'')
self.assertRaises(TypeError, sum, [], bytearray())
+ self.assertRaises(OverflowError, sum, [1.0, 10**1000])
+ self.assertRaises(OverflowError, sum, [1j, 10**1000])
class BadSeq:
def __getitem__(self, index):
@@ -1803,6 +1805,9 @@ def test_sum_accuracy(self):
self.assertEqual(sum([1.0, 10E100, 1.0, -10E100, 2j]), 2+2j)
self.assertEqual(sum([2+1j, 10E100j, 1j, -10E100j]), 2+2j)
self.assertEqual(sum([1j, 1, 10E100j, 1j, 1.0, -10E100j]), 2+2j)
+ self.assertEqual(sum([2j, 1., 10E100, 1., -10E100]), 2+2j)
+ self.assertEqual(sum([1.0, 10**100, 1.0, -10**100]), 2.0)
+ self.assertEqual(sum([2j, 1.0, 10**100, 1.0, -10**100]), 2+2j)
self.assertEqual(sum([0.1j]*10 + [fractions.Fraction(1, 10)]), 0.1+1j)
def test_type(self):
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-07-24-17-11-51.gh-issue-122234.VxsP_F.rst b/Misc/NEWS.d/next/Core and Builtins/2024-07-24-17-11-51.gh-issue-122234.VxsP_F.rst
new file mode 100644
index 00000000000000..b86d6fbdfc648f
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2024-07-24-17-11-51.gh-issue-122234.VxsP_F.rst
@@ -0,0 +1,4 @@
+Specializations for sums with float and complex inputs in :func:`sum()` now
+always use compensated summation. Also, for integer items in above
+specializations: :c:func:`PyLong_AsDouble` is used, instead of
+:c:func:`PyLong_AsLongAndOverflow`. Patch by Sergey B Kirpichev.
diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c
index 3f7bf4d568ee46..ae025e767ec838 100644
--- a/Python/bltinmodule.c
+++ b/Python/bltinmodule.c
@@ -2687,14 +2687,15 @@ builtin_sum_impl(PyObject *module, PyObject *iterable, PyObject *start)
continue;
}
if (PyLong_Check(item)) {
- long value;
- int overflow;
- value = PyLong_AsLongAndOverflow(item, &overflow);
- if (!overflow) {
- re_sum.hi += (double)value;
+ double value = PyLong_AsDouble(item);
+ if (value != -1.0 || !PyErr_Occurred()) {
+ re_sum = cs_add(re_sum, value);
Py_DECREF(item);
continue;
}
+ else {
+ return NULL;
+ }
}
result = PyFloat_FromDouble(cs_to_double(re_sum));
if (result == NULL) {
@@ -2736,19 +2737,20 @@ builtin_sum_impl(PyObject *module, PyObject *iterable, PyObject *start)
continue;
}
if (PyLong_Check(item)) {
- long value;
- int overflow;
- value = PyLong_AsLongAndOverflow(item, &overflow);
- if (!overflow) {
- re_sum.hi += (double)value;
+ double value = PyLong_AsDouble(item);
+ if (value != -1.0 || !PyErr_Occurred()) {
+ re_sum = cs_add(re_sum, value);
im_sum.hi += 0.0;
Py_DECREF(item);
continue;
}
+ else {
+ return NULL;
+ }
}
if (PyFloat_Check(item)) {
double value = PyFloat_AS_DOUBLE(item);
- re_sum.hi += value;
+ re_sum = cs_add(re_sum, value);
im_sum.hi += 0.0;
_Py_DECREF_SPECIALIZED(item, _PyFloat_ExactDealloc);
continue;
From 3b034d26eb8480f8d12ae11f42d038d24cf8498a Mon Sep 17 00:00:00 2001
From: Serhiy Storchaka
Date: Mon, 29 Jul 2024 11:49:13 +0300
Subject: [PATCH 063/139] gh-122311: Fix some error messages in pickle
(GH-122386)
---
Lib/pickle.py | 7 ++++---
Lib/test/pickletester.py | 4 +++-
.../Library/2024-07-29-10-24-48.gh-issue-122311.xChV1b.rst | 1 +
Modules/_pickle.c | 6 +++---
4 files changed, 11 insertions(+), 7 deletions(-)
create mode 100644 Misc/NEWS.d/next/Library/2024-07-29-10-24-48.gh-issue-122311.xChV1b.rst
diff --git a/Lib/pickle.py b/Lib/pickle.py
index c6c151b2065f4d..299c9e0e5e5641 100644
--- a/Lib/pickle.py
+++ b/Lib/pickle.py
@@ -314,16 +314,17 @@ def load_frame(self, frame_size):
# Tools used for pickling.
def _getattribute(obj, name):
+ top = obj
for subpath in name.split('.'):
if subpath == '':
raise AttributeError("Can't get local attribute {!r} on {!r}"
- .format(name, obj))
+ .format(name, top))
try:
parent = obj
obj = getattr(obj, subpath)
except AttributeError:
raise AttributeError("Can't get attribute {!r} on {!r}"
- .format(name, obj)) from None
+ .format(name, top)) from None
return obj, parent
def whichmodule(obj, name):
@@ -832,7 +833,7 @@ def save_bytearray(self, obj):
if _HAVE_PICKLE_BUFFER:
def save_picklebuffer(self, obj):
if self.proto < 5:
- raise PicklingError("PickleBuffer can only pickled with "
+ raise PicklingError("PickleBuffer can only be pickled with "
"protocol >= 5")
with obj.raw() as m:
if not m.contiguous:
diff --git a/Lib/test/pickletester.py b/Lib/test/pickletester.py
index 174f4ff6d021b2..a2b49e6c92a7b3 100644
--- a/Lib/test/pickletester.py
+++ b/Lib/test/pickletester.py
@@ -1982,8 +1982,10 @@ def test_picklebuffer_error(self):
pb = pickle.PickleBuffer(b"foobar")
for proto in range(0, 5):
with self.subTest(proto=proto):
- with self.assertRaises(pickle.PickleError):
+ with self.assertRaises(pickle.PickleError) as cm:
self.dumps(pb, proto)
+ self.assertEqual(str(cm.exception),
+ 'PickleBuffer can only be pickled with protocol >= 5')
def test_non_continuous_buffer(self):
if self.pickler is pickle._Pickler:
diff --git a/Misc/NEWS.d/next/Library/2024-07-29-10-24-48.gh-issue-122311.xChV1b.rst b/Misc/NEWS.d/next/Library/2024-07-29-10-24-48.gh-issue-122311.xChV1b.rst
new file mode 100644
index 00000000000000..8d70c610a8dad6
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-07-29-10-24-48.gh-issue-122311.xChV1b.rst
@@ -0,0 +1 @@
+Fix some error messages in :mod:`pickle`.
diff --git a/Modules/_pickle.c b/Modules/_pickle.c
index 861363b68c20c5..452b4aff0237ca 100644
--- a/Modules/_pickle.c
+++ b/Modules/_pickle.c
@@ -1817,10 +1817,10 @@ get_dotted_path(PyObject *obj, PyObject *name)
if (_PyUnicode_EqualToASCIIString(subpath, "")) {
if (obj == NULL)
PyErr_Format(PyExc_AttributeError,
- "Can't pickle local object %R", name);
+ "Can't get local object %R", name);
else
PyErr_Format(PyExc_AttributeError,
- "Can't pickle local attribute %R on %R", name, obj);
+ "Can't get local attribute %R on %R", name, obj);
Py_DECREF(dotted_path);
return NULL;
}
@@ -2507,7 +2507,7 @@ save_picklebuffer(PickleState *st, PicklerObject *self, PyObject *obj)
{
if (self->proto < 5) {
PyErr_SetString(st->PicklingError,
- "PickleBuffer can only pickled with protocol >= 5");
+ "PickleBuffer can only be pickled with protocol >= 5");
return -1;
}
const Py_buffer* view = PyPickleBuffer_GetBuffer(obj);
From 0697188084bf61b28f258fbbe867e1010d679b3e Mon Sep 17 00:00:00 2001
From: Serhiy Storchaka
Date: Mon, 29 Jul 2024 13:40:16 +0300
Subject: [PATCH 064/139] gh-122311: Add more tests for error messages in
pickle (GH-122373)
---
Lib/test/pickletester.py | 235 ++++++++++++++++++++++++++++++---------
1 file changed, 185 insertions(+), 50 deletions(-)
diff --git a/Lib/test/pickletester.py b/Lib/test/pickletester.py
index a2b49e6c92a7b3..3c936b3bc4029e 100644
--- a/Lib/test/pickletester.py
+++ b/Lib/test/pickletester.py
@@ -1229,24 +1229,38 @@ def test_find_class(self):
self.assertIs(unpickler.find_class('os.path', 'join'), os.path.join)
self.assertIs(unpickler4.find_class('builtins', 'str.upper'), str.upper)
- with self.assertRaises(AttributeError):
+ with self.assertRaisesRegex(AttributeError,
+ r"module 'builtins' has no attribute 'str\.upper'|"
+ r"Can't get attribute 'str\.upper' on \.spam'|"
+ r"Can't get attribute 'log\.\.spam' on .spam')
- with self.assertRaises(AttributeError):
+ with self.assertRaisesRegex(AttributeError,
+ r"Can't get local attribute 'log\.\.spam' on .spam')
- with self.assertRaises(AttributeError):
+ with self.assertRaisesRegex(AttributeError,
+ "module 'math' has no attribute ''|"
+ "Can't get attribute '' on )'
+ r' must return (a )?string or tuple')
with self.assertRaisesRegex(
ValueError, 'The reducer just failed'):
From 9187484dd97f6beb94fc17676014706922e380e1 Mon Sep 17 00:00:00 2001
From: Kirill Podoprigora
Date: Mon, 29 Jul 2024 13:59:42 +0300
Subject: [PATCH 065/139] gh-122292: Split up ``Lib/test/test_ast.py`` into a
couple of files (#122293)
---
Lib/test/test_ast/__init__.py | 7 +
Lib/test/test_ast/snippets.py | 602 +++++++++++++++++++++++++++
Lib/test/{ => test_ast}/test_ast.py | 616 +---------------------------
Lib/test/test_ast/utils.py | 15 +
Makefile.pre.in | 1 +
5 files changed, 629 insertions(+), 612 deletions(-)
create mode 100644 Lib/test/test_ast/__init__.py
create mode 100644 Lib/test/test_ast/snippets.py
rename Lib/test/{ => test_ast}/test_ast.py (68%)
create mode 100644 Lib/test/test_ast/utils.py
diff --git a/Lib/test/test_ast/__init__.py b/Lib/test/test_ast/__init__.py
new file mode 100644
index 00000000000000..9a89d27ba9f979
--- /dev/null
+++ b/Lib/test/test_ast/__init__.py
@@ -0,0 +1,7 @@
+import os
+
+from test import support
+
+
+def load_tests(*args):
+ return support.load_package_tests(os.path.dirname(__file__), *args)
diff --git a/Lib/test/test_ast/snippets.py b/Lib/test/test_ast/snippets.py
new file mode 100644
index 00000000000000..95dc3ca03cd38b
--- /dev/null
+++ b/Lib/test/test_ast/snippets.py
@@ -0,0 +1,602 @@
+import ast
+import sys
+
+from test.test_ast.utils import to_tuple
+
+
+# These tests are compiled through "exec"
+# There should be at least one test per statement
+exec_tests = [
+ # Module docstring
+ "'module docstring'",
+ # FunctionDef
+ "def f(): pass",
+ # FunctionDef with docstring
+ "def f(): 'function docstring'",
+ # FunctionDef with arg
+ "def f(a): pass",
+ # FunctionDef with arg and default value
+ "def f(a=0): pass",
+ # FunctionDef with varargs
+ "def f(*args): pass",
+ # FunctionDef with varargs as TypeVarTuple
+ "def f(*args: *Ts): pass",
+ # FunctionDef with varargs as unpacked Tuple
+ "def f(*args: *tuple[int, ...]): pass",
+ # FunctionDef with varargs as unpacked Tuple *and* TypeVarTuple
+ "def f(*args: *tuple[int, *Ts]): pass",
+ # FunctionDef with kwargs
+ "def f(**kwargs): pass",
+ # FunctionDef with all kind of args and docstring
+ "def f(a, b=1, c=None, d=[], e={}, *args, f=42, **kwargs): 'doc for f()'",
+ # FunctionDef with type annotation on return involving unpacking
+ "def f() -> tuple[*Ts]: pass",
+ "def f() -> tuple[int, *Ts]: pass",
+ "def f() -> tuple[int, *tuple[int, ...]]: pass",
+ # ClassDef
+ "class C:pass",
+ # ClassDef with docstring
+ "class C: 'docstring for class C'",
+ # ClassDef, new style class
+ "class C(object): pass",
+ # Classdef with multiple bases
+ "class C(A, B): pass",
+ # Return
+ "def f():return 1",
+ "def f():return",
+ # Delete
+ "del v",
+ # Assign
+ "v = 1",
+ "a,b = c",
+ "(a,b) = c",
+ "[a,b] = c",
+ "a[b] = c",
+ # AnnAssign with unpacked types
+ "x: tuple[*Ts]",
+ "x: tuple[int, *Ts]",
+ "x: tuple[int, *tuple[str, ...]]",
+ # AugAssign
+ "v += 1",
+ "v -= 1",
+ "v *= 1",
+ "v @= 1",
+ "v /= 1",
+ "v %= 1",
+ "v **= 1",
+ "v <<= 1",
+ "v >>= 1",
+ "v |= 1",
+ "v ^= 1",
+ "v &= 1",
+ "v //= 1",
+ # For
+ "for v in v:pass",
+ # For-Else
+ "for v in v:\n pass\nelse:\n pass",
+ # While
+ "while v:pass",
+ # While-Else
+ "while v:\n pass\nelse:\n pass",
+ # If-Elif-Else
+ "if v:pass",
+ "if a:\n pass\nelif b:\n pass",
+ "if a:\n pass\nelse:\n pass",
+ "if a:\n pass\nelif b:\n pass\nelse:\n pass",
+ "if a:\n pass\nelif b:\n pass\nelif b:\n pass\nelif b:\n pass\nelse:\n pass",
+ # With
+ "with x: pass",
+ "with x, y: pass",
+ "with x as y: pass",
+ "with x as y, z as q: pass",
+ "with (x as y): pass",
+ "with (x, y): pass",
+ # Raise
+ "raise",
+ "raise Exception('string')",
+ "raise Exception",
+ "raise Exception('string') from None",
+ # TryExcept
+ "try:\n pass\nexcept Exception:\n pass",
+ "try:\n pass\nexcept Exception as exc:\n pass",
+ # TryFinally
+ "try:\n pass\nfinally:\n pass",
+ # TryStarExcept
+ "try:\n pass\nexcept* Exception:\n pass",
+ "try:\n pass\nexcept* Exception as exc:\n pass",
+ # TryExceptFinallyElse
+ "try:\n pass\nexcept Exception:\n pass\nelse: pass\nfinally:\n pass",
+ "try:\n pass\nexcept Exception as exc:\n pass\nelse: pass\nfinally:\n pass",
+ "try:\n pass\nexcept* Exception as exc:\n pass\nelse: pass\nfinally:\n pass",
+ # Assert
+ "assert v",
+ # Assert with message
+ "assert v, 'message'",
+ # Import
+ "import sys",
+ "import foo as bar",
+ # ImportFrom
+ "from sys import x as y",
+ "from sys import v",
+ # Global
+ "global v",
+ # Expr
+ "1",
+ # Pass,
+ "pass",
+ # Break
+ "for v in v:break",
+ # Continue
+ "for v in v:continue",
+ # for statements with naked tuples (see http://bugs.python.org/issue6704)
+ "for a,b in c: pass",
+ "for (a,b) in c: pass",
+ "for [a,b] in c: pass",
+ # Multiline generator expression (test for .lineno & .col_offset)
+ """(
+ (
+ Aa
+ ,
+ Bb
+ )
+ for
+ Aa
+ ,
+ Bb in Cc
+ )""",
+ # dictcomp
+ "{a : b for w in x for m in p if g}",
+ # dictcomp with naked tuple
+ "{a : b for v,w in x}",
+ # setcomp
+ "{r for l in x if g}",
+ # setcomp with naked tuple
+ "{r for l,m in x}",
+ # AsyncFunctionDef
+ "async def f():\n 'async function'\n await something()",
+ # AsyncFor
+ "async def f():\n async for e in i: 1\n else: 2",
+ # AsyncWith
+ "async def f():\n async with a as b: 1",
+ # PEP 448: Additional Unpacking Generalizations
+ "{**{1:2}, 2:3}",
+ "{*{1, 2}, 3}",
+ # Function with yield (from)
+ "def f(): yield 1",
+ "def f(): yield from []",
+ # Asynchronous comprehensions
+ "async def f():\n [i async for b in c]",
+ # Decorated FunctionDef
+ "@deco1\n@deco2()\n@deco3(1)\ndef f(): pass",
+ # Decorated AsyncFunctionDef
+ "@deco1\n@deco2()\n@deco3(1)\nasync def f(): pass",
+ # Decorated ClassDef
+ "@deco1\n@deco2()\n@deco3(1)\nclass C: pass",
+ # Decorator with generator argument
+ "@deco(a for a in b)\ndef f(): pass",
+ # Decorator with attribute
+ "@a.b.c\ndef f(): pass",
+ # Simple assignment expression
+ "(a := 1)",
+ # Assignment expression in if statement
+ "if a := foo(): pass",
+ # Assignment expression in while
+ "while a := foo(): pass",
+ # Positional-only arguments
+ "def f(a, /,): pass",
+ "def f(a, /, c, d, e): pass",
+ "def f(a, /, c, *, d, e): pass",
+ "def f(a, /, c, *, d, e, **kwargs): pass",
+ # Positional-only arguments with defaults
+ "def f(a=1, /,): pass",
+ "def f(a=1, /, b=2, c=4): pass",
+ "def f(a=1, /, b=2, *, c=4): pass",
+ "def f(a=1, /, b=2, *, c): pass",
+ "def f(a=1, /, b=2, *, c=4, **kwargs): pass",
+ "def f(a=1, /, b=2, *, c, **kwargs): pass",
+ # Type aliases
+ "type X = int",
+ "type X[T] = int",
+ "type X[T, *Ts, **P] = (T, Ts, P)",
+ "type X[T: int, *Ts, **P] = (T, Ts, P)",
+ "type X[T: (int, str), *Ts, **P] = (T, Ts, P)",
+ "type X[T: int = 1, *Ts = 2, **P =3] = (T, Ts, P)",
+ # Generic classes
+ "class X[T]: pass",
+ "class X[T, *Ts, **P]: pass",
+ "class X[T: int, *Ts, **P]: pass",
+ "class X[T: (int, str), *Ts, **P]: pass",
+ "class X[T: int = 1, *Ts = 2, **P = 3]: pass",
+ # Generic functions
+ "def f[T](): pass",
+ "def f[T, *Ts, **P](): pass",
+ "def f[T: int, *Ts, **P](): pass",
+ "def f[T: (int, str), *Ts, **P](): pass",
+ "def f[T: int = 1, *Ts = 2, **P = 3](): pass",
+ # Match
+ "match x:\n\tcase 1:\n\t\tpass",
+ # Match with _
+ "match x:\n\tcase 1:\n\t\tpass\n\tcase _:\n\t\tpass",
+]
+
+# These are compiled through "single"
+# because of overlap with "eval", it just tests what
+# can't be tested with "eval"
+single_tests = [
+ "1+2"
+]
+
+# These are compiled through "eval"
+# It should test all expressions
+eval_tests = [
+ # Constant(value=None)
+ "None",
+ # True
+ "True",
+ # False
+ "False",
+ # BoolOp
+ "a and b",
+ "a or b",
+ # BinOp
+ "a + b",
+ "a - b",
+ "a * b",
+ "a / b",
+ "a @ b",
+ "a // b",
+ "a ** b",
+ "a % b",
+ "a >> b",
+ "a << b",
+ "a ^ b",
+ "a | b",
+ "a & b",
+ # UnaryOp
+ "not v",
+ "+v",
+ "-v",
+ "~v",
+ # Lambda
+ "lambda:None",
+ # Dict
+ "{ 1:2 }",
+ # Empty dict
+ "{}",
+ # Set
+ "{None,}",
+ # Multiline dict (test for .lineno & .col_offset)
+ """{
+ 1
+ :
+ 2
+ }""",
+ # Multiline list
+ """[
+ 1
+ ,
+ 1
+ ]""",
+ # Multiline tuple
+ """(
+ 1
+ ,
+ )""",
+ # Multiline set
+ """{
+ 1
+ ,
+ 1
+ }""",
+ # ListComp
+ "[a for b in c if d]",
+ # GeneratorExp
+ "(a for b in c if d)",
+ # SetComp
+ "{a for b in c if d}",
+ # DictComp
+ "{k: v for k, v in c if d}",
+ # Comprehensions with multiple for targets
+ "[(a,b) for a,b in c]",
+ "[(a,b) for (a,b) in c]",
+ "[(a,b) for [a,b] in c]",
+ "{(a,b) for a,b in c}",
+ "{(a,b) for (a,b) in c}",
+ "{(a,b) for [a,b] in c}",
+ "((a,b) for a,b in c)",
+ "((a,b) for (a,b) in c)",
+ "((a,b) for [a,b] in c)",
+ # Async comprehensions - async comprehensions can't work outside an asynchronous function
+ #
+ # Yield - yield expressions can't work outside a function
+ #
+ # Compare
+ "1 < 2 < 3",
+ "a == b",
+ "a <= b",
+ "a >= b",
+ "a != b",
+ "a is b",
+ "a is not b",
+ "a in b",
+ "a not in b",
+ # Call without argument
+ "f()",
+ # Call
+ "f(1,2,c=3,*d,**e)",
+ # Call with multi-character starred
+ "f(*[0, 1])",
+ # Call with a generator argument
+ "f(a for a in b)",
+ # Constant(value=int())
+ "10",
+ # Complex num
+ "1j",
+ # Constant(value=str())
+ "'string'",
+ # Attribute
+ "a.b",
+ # Subscript
+ "a[b:c]",
+ # Name
+ "v",
+ # List
+ "[1,2,3]",
+ # Empty list
+ "[]",
+ # Tuple
+ "1,2,3",
+ # Tuple
+ "(1,2,3)",
+ # Empty tuple
+ "()",
+ # Combination
+ "a.b.c.d(a.b[1:2])",
+ # Slice
+ "[5][1:]",
+ "[5][:1]",
+ "[5][::1]",
+ "[5][1:1:1]",
+ # IfExp
+ "foo() if x else bar()",
+ # JoinedStr and FormattedValue
+ "f'{a}'",
+ "f'{a:.2f}'",
+ "f'{a!r}'",
+ "f'foo({a})'",
+]
+
+
+def main():
+ if __name__ != '__main__':
+ return
+ if sys.argv[1:] == ['-g']:
+ for statements, kind in ((exec_tests, "exec"), (single_tests, "single"),
+ (eval_tests, "eval")):
+ print(kind+"_results = [")
+ for statement in statements:
+ tree = ast.parse(statement, "?", kind)
+ print("%r," % (to_tuple(tree),))
+ print("]")
+ print("main()")
+ raise SystemExit
+ unittest.main()
+
+#### EVERYTHING BELOW IS GENERATED BY python Lib/test/test_ast/snippets.py -g #####
+exec_results = [
+('Module', [('Expr', (1, 0, 1, 18), ('Constant', (1, 0, 1, 18), 'module docstring', None))], []),
+('Module', [('FunctionDef', (1, 0, 1, 13), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 9, 1, 13))], [], None, None, [])], []),
+('Module', [('FunctionDef', (1, 0, 1, 29), 'f', ('arguments', [], [], None, [], [], None, []), [('Expr', (1, 9, 1, 29), ('Constant', (1, 9, 1, 29), 'function docstring', None))], [], None, None, [])], []),
+('Module', [('FunctionDef', (1, 0, 1, 14), 'f', ('arguments', [], [('arg', (1, 6, 1, 7), 'a', None, None)], None, [], [], None, []), [('Pass', (1, 10, 1, 14))], [], None, None, [])], []),
+('Module', [('FunctionDef', (1, 0, 1, 16), 'f', ('arguments', [], [('arg', (1, 6, 1, 7), 'a', None, None)], None, [], [], None, [('Constant', (1, 8, 1, 9), 0, None)]), [('Pass', (1, 12, 1, 16))], [], None, None, [])], []),
+('Module', [('FunctionDef', (1, 0, 1, 18), 'f', ('arguments', [], [], ('arg', (1, 7, 1, 11), 'args', None, None), [], [], None, []), [('Pass', (1, 14, 1, 18))], [], None, None, [])], []),
+('Module', [('FunctionDef', (1, 0, 1, 23), 'f', ('arguments', [], [], ('arg', (1, 7, 1, 16), 'args', ('Starred', (1, 13, 1, 16), ('Name', (1, 14, 1, 16), 'Ts', ('Load',)), ('Load',)), None), [], [], None, []), [('Pass', (1, 19, 1, 23))], [], None, None, [])], []),
+('Module', [('FunctionDef', (1, 0, 1, 36), 'f', ('arguments', [], [], ('arg', (1, 7, 1, 29), 'args', ('Starred', (1, 13, 1, 29), ('Subscript', (1, 14, 1, 29), ('Name', (1, 14, 1, 19), 'tuple', ('Load',)), ('Tuple', (1, 20, 1, 28), [('Name', (1, 20, 1, 23), 'int', ('Load',)), ('Constant', (1, 25, 1, 28), Ellipsis, None)], ('Load',)), ('Load',)), ('Load',)), None), [], [], None, []), [('Pass', (1, 32, 1, 36))], [], None, None, [])], []),
+('Module', [('FunctionDef', (1, 0, 1, 36), 'f', ('arguments', [], [], ('arg', (1, 7, 1, 29), 'args', ('Starred', (1, 13, 1, 29), ('Subscript', (1, 14, 1, 29), ('Name', (1, 14, 1, 19), 'tuple', ('Load',)), ('Tuple', (1, 20, 1, 28), [('Name', (1, 20, 1, 23), 'int', ('Load',)), ('Starred', (1, 25, 1, 28), ('Name', (1, 26, 1, 28), 'Ts', ('Load',)), ('Load',))], ('Load',)), ('Load',)), ('Load',)), None), [], [], None, []), [('Pass', (1, 32, 1, 36))], [], None, None, [])], []),
+('Module', [('FunctionDef', (1, 0, 1, 21), 'f', ('arguments', [], [], None, [], [], ('arg', (1, 8, 1, 14), 'kwargs', None, None), []), [('Pass', (1, 17, 1, 21))], [], None, None, [])], []),
+('Module', [('FunctionDef', (1, 0, 1, 71), 'f', ('arguments', [], [('arg', (1, 6, 1, 7), 'a', None, None), ('arg', (1, 9, 1, 10), 'b', None, None), ('arg', (1, 14, 1, 15), 'c', None, None), ('arg', (1, 22, 1, 23), 'd', None, None), ('arg', (1, 28, 1, 29), 'e', None, None)], ('arg', (1, 35, 1, 39), 'args', None, None), [('arg', (1, 41, 1, 42), 'f', None, None)], [('Constant', (1, 43, 1, 45), 42, None)], ('arg', (1, 49, 1, 55), 'kwargs', None, None), [('Constant', (1, 11, 1, 12), 1, None), ('Constant', (1, 16, 1, 20), None, None), ('List', (1, 24, 1, 26), [], ('Load',)), ('Dict', (1, 30, 1, 32), [], [])]), [('Expr', (1, 58, 1, 71), ('Constant', (1, 58, 1, 71), 'doc for f()', None))], [], None, None, [])], []),
+('Module', [('FunctionDef', (1, 0, 1, 27), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 23, 1, 27))], [], ('Subscript', (1, 11, 1, 21), ('Name', (1, 11, 1, 16), 'tuple', ('Load',)), ('Tuple', (1, 17, 1, 20), [('Starred', (1, 17, 1, 20), ('Name', (1, 18, 1, 20), 'Ts', ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, [])], []),
+('Module', [('FunctionDef', (1, 0, 1, 32), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 28, 1, 32))], [], ('Subscript', (1, 11, 1, 26), ('Name', (1, 11, 1, 16), 'tuple', ('Load',)), ('Tuple', (1, 17, 1, 25), [('Name', (1, 17, 1, 20), 'int', ('Load',)), ('Starred', (1, 22, 1, 25), ('Name', (1, 23, 1, 25), 'Ts', ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, [])], []),
+('Module', [('FunctionDef', (1, 0, 1, 45), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 41, 1, 45))], [], ('Subscript', (1, 11, 1, 39), ('Name', (1, 11, 1, 16), 'tuple', ('Load',)), ('Tuple', (1, 17, 1, 38), [('Name', (1, 17, 1, 20), 'int', ('Load',)), ('Starred', (1, 22, 1, 38), ('Subscript', (1, 23, 1, 38), ('Name', (1, 23, 1, 28), 'tuple', ('Load',)), ('Tuple', (1, 29, 1, 37), [('Name', (1, 29, 1, 32), 'int', ('Load',)), ('Constant', (1, 34, 1, 37), Ellipsis, None)], ('Load',)), ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, [])], []),
+('Module', [('ClassDef', (1, 0, 1, 12), 'C', [], [], [('Pass', (1, 8, 1, 12))], [], [])], []),
+('Module', [('ClassDef', (1, 0, 1, 32), 'C', [], [], [('Expr', (1, 9, 1, 32), ('Constant', (1, 9, 1, 32), 'docstring for class C', None))], [], [])], []),
+('Module', [('ClassDef', (1, 0, 1, 21), 'C', [('Name', (1, 8, 1, 14), 'object', ('Load',))], [], [('Pass', (1, 17, 1, 21))], [], [])], []),
+('Module', [('ClassDef', (1, 0, 1, 19), 'C', [('Name', (1, 8, 1, 9), 'A', ('Load',)), ('Name', (1, 11, 1, 12), 'B', ('Load',))], [], [('Pass', (1, 15, 1, 19))], [], [])], []),
+('Module', [('FunctionDef', (1, 0, 1, 16), 'f', ('arguments', [], [], None, [], [], None, []), [('Return', (1, 8, 1, 16), ('Constant', (1, 15, 1, 16), 1, None))], [], None, None, [])], []),
+('Module', [('FunctionDef', (1, 0, 1, 14), 'f', ('arguments', [], [], None, [], [], None, []), [('Return', (1, 8, 1, 14), None)], [], None, None, [])], []),
+('Module', [('Delete', (1, 0, 1, 5), [('Name', (1, 4, 1, 5), 'v', ('Del',))])], []),
+('Module', [('Assign', (1, 0, 1, 5), [('Name', (1, 0, 1, 1), 'v', ('Store',))], ('Constant', (1, 4, 1, 5), 1, None), None)], []),
+('Module', [('Assign', (1, 0, 1, 7), [('Tuple', (1, 0, 1, 3), [('Name', (1, 0, 1, 1), 'a', ('Store',)), ('Name', (1, 2, 1, 3), 'b', ('Store',))], ('Store',))], ('Name', (1, 6, 1, 7), 'c', ('Load',)), None)], []),
+('Module', [('Assign', (1, 0, 1, 9), [('Tuple', (1, 0, 1, 5), [('Name', (1, 1, 1, 2), 'a', ('Store',)), ('Name', (1, 3, 1, 4), 'b', ('Store',))], ('Store',))], ('Name', (1, 8, 1, 9), 'c', ('Load',)), None)], []),
+('Module', [('Assign', (1, 0, 1, 9), [('List', (1, 0, 1, 5), [('Name', (1, 1, 1, 2), 'a', ('Store',)), ('Name', (1, 3, 1, 4), 'b', ('Store',))], ('Store',))], ('Name', (1, 8, 1, 9), 'c', ('Load',)), None)], []),
+('Module', [('Assign', (1, 0, 1, 8), [('Subscript', (1, 0, 1, 4), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Name', (1, 2, 1, 3), 'b', ('Load',)), ('Store',))], ('Name', (1, 7, 1, 8), 'c', ('Load',)), None)], []),
+('Module', [('AnnAssign', (1, 0, 1, 13), ('Name', (1, 0, 1, 1), 'x', ('Store',)), ('Subscript', (1, 3, 1, 13), ('Name', (1, 3, 1, 8), 'tuple', ('Load',)), ('Tuple', (1, 9, 1, 12), [('Starred', (1, 9, 1, 12), ('Name', (1, 10, 1, 12), 'Ts', ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, 1)], []),
+('Module', [('AnnAssign', (1, 0, 1, 18), ('Name', (1, 0, 1, 1), 'x', ('Store',)), ('Subscript', (1, 3, 1, 18), ('Name', (1, 3, 1, 8), 'tuple', ('Load',)), ('Tuple', (1, 9, 1, 17), [('Name', (1, 9, 1, 12), 'int', ('Load',)), ('Starred', (1, 14, 1, 17), ('Name', (1, 15, 1, 17), 'Ts', ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, 1)], []),
+('Module', [('AnnAssign', (1, 0, 1, 31), ('Name', (1, 0, 1, 1), 'x', ('Store',)), ('Subscript', (1, 3, 1, 31), ('Name', (1, 3, 1, 8), 'tuple', ('Load',)), ('Tuple', (1, 9, 1, 30), [('Name', (1, 9, 1, 12), 'int', ('Load',)), ('Starred', (1, 14, 1, 30), ('Subscript', (1, 15, 1, 30), ('Name', (1, 15, 1, 20), 'tuple', ('Load',)), ('Tuple', (1, 21, 1, 29), [('Name', (1, 21, 1, 24), 'str', ('Load',)), ('Constant', (1, 26, 1, 29), Ellipsis, None)], ('Load',)), ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, 1)], []),
+('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('Add',), ('Constant', (1, 5, 1, 6), 1, None))], []),
+('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('Sub',), ('Constant', (1, 5, 1, 6), 1, None))], []),
+('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('Mult',), ('Constant', (1, 5, 1, 6), 1, None))], []),
+('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('MatMult',), ('Constant', (1, 5, 1, 6), 1, None))], []),
+('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('Div',), ('Constant', (1, 5, 1, 6), 1, None))], []),
+('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('Mod',), ('Constant', (1, 5, 1, 6), 1, None))], []),
+('Module', [('AugAssign', (1, 0, 1, 7), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('Pow',), ('Constant', (1, 6, 1, 7), 1, None))], []),
+('Module', [('AugAssign', (1, 0, 1, 7), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('LShift',), ('Constant', (1, 6, 1, 7), 1, None))], []),
+('Module', [('AugAssign', (1, 0, 1, 7), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('RShift',), ('Constant', (1, 6, 1, 7), 1, None))], []),
+('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('BitOr',), ('Constant', (1, 5, 1, 6), 1, None))], []),
+('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('BitXor',), ('Constant', (1, 5, 1, 6), 1, None))], []),
+('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('BitAnd',), ('Constant', (1, 5, 1, 6), 1, None))], []),
+('Module', [('AugAssign', (1, 0, 1, 7), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('FloorDiv',), ('Constant', (1, 6, 1, 7), 1, None))], []),
+('Module', [('For', (1, 0, 1, 15), ('Name', (1, 4, 1, 5), 'v', ('Store',)), ('Name', (1, 9, 1, 10), 'v', ('Load',)), [('Pass', (1, 11, 1, 15))], [], None)], []),
+('Module', [('For', (1, 0, 4, 6), ('Name', (1, 4, 1, 5), 'v', ('Store',)), ('Name', (1, 9, 1, 10), 'v', ('Load',)), [('Pass', (2, 2, 2, 6))], [('Pass', (4, 2, 4, 6))], None)], []),
+('Module', [('While', (1, 0, 1, 12), ('Name', (1, 6, 1, 7), 'v', ('Load',)), [('Pass', (1, 8, 1, 12))], [])], []),
+('Module', [('While', (1, 0, 4, 6), ('Name', (1, 6, 1, 7), 'v', ('Load',)), [('Pass', (2, 2, 2, 6))], [('Pass', (4, 2, 4, 6))])], []),
+('Module', [('If', (1, 0, 1, 9), ('Name', (1, 3, 1, 4), 'v', ('Load',)), [('Pass', (1, 5, 1, 9))], [])], []),
+('Module', [('If', (1, 0, 4, 6), ('Name', (1, 3, 1, 4), 'a', ('Load',)), [('Pass', (2, 2, 2, 6))], [('If', (3, 0, 4, 6), ('Name', (3, 5, 3, 6), 'b', ('Load',)), [('Pass', (4, 2, 4, 6))], [])])], []),
+('Module', [('If', (1, 0, 4, 6), ('Name', (1, 3, 1, 4), 'a', ('Load',)), [('Pass', (2, 2, 2, 6))], [('Pass', (4, 2, 4, 6))])], []),
+('Module', [('If', (1, 0, 6, 6), ('Name', (1, 3, 1, 4), 'a', ('Load',)), [('Pass', (2, 2, 2, 6))], [('If', (3, 0, 6, 6), ('Name', (3, 5, 3, 6), 'b', ('Load',)), [('Pass', (4, 2, 4, 6))], [('Pass', (6, 2, 6, 6))])])], []),
+('Module', [('If', (1, 0, 10, 6), ('Name', (1, 3, 1, 4), 'a', ('Load',)), [('Pass', (2, 2, 2, 6))], [('If', (3, 0, 10, 6), ('Name', (3, 5, 3, 6), 'b', ('Load',)), [('Pass', (4, 2, 4, 6))], [('If', (5, 0, 10, 6), ('Name', (5, 5, 5, 6), 'b', ('Load',)), [('Pass', (6, 2, 6, 6))], [('If', (7, 0, 10, 6), ('Name', (7, 5, 7, 6), 'b', ('Load',)), [('Pass', (8, 2, 8, 6))], [('Pass', (10, 2, 10, 6))])])])])], []),
+('Module', [('With', (1, 0, 1, 12), [('withitem', ('Name', (1, 5, 1, 6), 'x', ('Load',)), None)], [('Pass', (1, 8, 1, 12))], None)], []),
+('Module', [('With', (1, 0, 1, 15), [('withitem', ('Name', (1, 5, 1, 6), 'x', ('Load',)), None), ('withitem', ('Name', (1, 8, 1, 9), 'y', ('Load',)), None)], [('Pass', (1, 11, 1, 15))], None)], []),
+('Module', [('With', (1, 0, 1, 17), [('withitem', ('Name', (1, 5, 1, 6), 'x', ('Load',)), ('Name', (1, 10, 1, 11), 'y', ('Store',)))], [('Pass', (1, 13, 1, 17))], None)], []),
+('Module', [('With', (1, 0, 1, 25), [('withitem', ('Name', (1, 5, 1, 6), 'x', ('Load',)), ('Name', (1, 10, 1, 11), 'y', ('Store',))), ('withitem', ('Name', (1, 13, 1, 14), 'z', ('Load',)), ('Name', (1, 18, 1, 19), 'q', ('Store',)))], [('Pass', (1, 21, 1, 25))], None)], []),
+('Module', [('With', (1, 0, 1, 19), [('withitem', ('Name', (1, 6, 1, 7), 'x', ('Load',)), ('Name', (1, 11, 1, 12), 'y', ('Store',)))], [('Pass', (1, 15, 1, 19))], None)], []),
+('Module', [('With', (1, 0, 1, 17), [('withitem', ('Name', (1, 6, 1, 7), 'x', ('Load',)), None), ('withitem', ('Name', (1, 9, 1, 10), 'y', ('Load',)), None)], [('Pass', (1, 13, 1, 17))], None)], []),
+('Module', [('Raise', (1, 0, 1, 5), None, None)], []),
+('Module', [('Raise', (1, 0, 1, 25), ('Call', (1, 6, 1, 25), ('Name', (1, 6, 1, 15), 'Exception', ('Load',)), [('Constant', (1, 16, 1, 24), 'string', None)], []), None)], []),
+('Module', [('Raise', (1, 0, 1, 15), ('Name', (1, 6, 1, 15), 'Exception', ('Load',)), None)], []),
+('Module', [('Raise', (1, 0, 1, 35), ('Call', (1, 6, 1, 25), ('Name', (1, 6, 1, 15), 'Exception', ('Load',)), [('Constant', (1, 16, 1, 24), 'string', None)], []), ('Constant', (1, 31, 1, 35), None, None))], []),
+('Module', [('Try', (1, 0, 4, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 7, 3, 16), 'Exception', ('Load',)), None, [('Pass', (4, 2, 4, 6))])], [], [])], []),
+('Module', [('Try', (1, 0, 4, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 7, 3, 16), 'Exception', ('Load',)), 'exc', [('Pass', (4, 2, 4, 6))])], [], [])], []),
+('Module', [('Try', (1, 0, 4, 6), [('Pass', (2, 2, 2, 6))], [], [], [('Pass', (4, 2, 4, 6))])], []),
+('Module', [('TryStar', (1, 0, 4, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 8, 3, 17), 'Exception', ('Load',)), None, [('Pass', (4, 2, 4, 6))])], [], [])], []),
+('Module', [('TryStar', (1, 0, 4, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 8, 3, 17), 'Exception', ('Load',)), 'exc', [('Pass', (4, 2, 4, 6))])], [], [])], []),
+('Module', [('Try', (1, 0, 7, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 7, 3, 16), 'Exception', ('Load',)), None, [('Pass', (4, 2, 4, 6))])], [('Pass', (5, 7, 5, 11))], [('Pass', (7, 2, 7, 6))])], []),
+('Module', [('Try', (1, 0, 7, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 7, 3, 16), 'Exception', ('Load',)), 'exc', [('Pass', (4, 2, 4, 6))])], [('Pass', (5, 7, 5, 11))], [('Pass', (7, 2, 7, 6))])], []),
+('Module', [('TryStar', (1, 0, 7, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 8, 3, 17), 'Exception', ('Load',)), 'exc', [('Pass', (4, 2, 4, 6))])], [('Pass', (5, 7, 5, 11))], [('Pass', (7, 2, 7, 6))])], []),
+('Module', [('Assert', (1, 0, 1, 8), ('Name', (1, 7, 1, 8), 'v', ('Load',)), None)], []),
+('Module', [('Assert', (1, 0, 1, 19), ('Name', (1, 7, 1, 8), 'v', ('Load',)), ('Constant', (1, 10, 1, 19), 'message', None))], []),
+('Module', [('Import', (1, 0, 1, 10), [('alias', (1, 7, 1, 10), 'sys', None)])], []),
+('Module', [('Import', (1, 0, 1, 17), [('alias', (1, 7, 1, 17), 'foo', 'bar')])], []),
+('Module', [('ImportFrom', (1, 0, 1, 22), 'sys', [('alias', (1, 16, 1, 22), 'x', 'y')], 0)], []),
+('Module', [('ImportFrom', (1, 0, 1, 17), 'sys', [('alias', (1, 16, 1, 17), 'v', None)], 0)], []),
+('Module', [('Global', (1, 0, 1, 8), ['v'])], []),
+('Module', [('Expr', (1, 0, 1, 1), ('Constant', (1, 0, 1, 1), 1, None))], []),
+('Module', [('Pass', (1, 0, 1, 4))], []),
+('Module', [('For', (1, 0, 1, 16), ('Name', (1, 4, 1, 5), 'v', ('Store',)), ('Name', (1, 9, 1, 10), 'v', ('Load',)), [('Break', (1, 11, 1, 16))], [], None)], []),
+('Module', [('For', (1, 0, 1, 19), ('Name', (1, 4, 1, 5), 'v', ('Store',)), ('Name', (1, 9, 1, 10), 'v', ('Load',)), [('Continue', (1, 11, 1, 19))], [], None)], []),
+('Module', [('For', (1, 0, 1, 18), ('Tuple', (1, 4, 1, 7), [('Name', (1, 4, 1, 5), 'a', ('Store',)), ('Name', (1, 6, 1, 7), 'b', ('Store',))], ('Store',)), ('Name', (1, 11, 1, 12), 'c', ('Load',)), [('Pass', (1, 14, 1, 18))], [], None)], []),
+('Module', [('For', (1, 0, 1, 20), ('Tuple', (1, 4, 1, 9), [('Name', (1, 5, 1, 6), 'a', ('Store',)), ('Name', (1, 7, 1, 8), 'b', ('Store',))], ('Store',)), ('Name', (1, 13, 1, 14), 'c', ('Load',)), [('Pass', (1, 16, 1, 20))], [], None)], []),
+('Module', [('For', (1, 0, 1, 20), ('List', (1, 4, 1, 9), [('Name', (1, 5, 1, 6), 'a', ('Store',)), ('Name', (1, 7, 1, 8), 'b', ('Store',))], ('Store',)), ('Name', (1, 13, 1, 14), 'c', ('Load',)), [('Pass', (1, 16, 1, 20))], [], None)], []),
+('Module', [('Expr', (1, 0, 11, 5), ('GeneratorExp', (1, 0, 11, 5), ('Tuple', (2, 4, 6, 5), [('Name', (3, 4, 3, 6), 'Aa', ('Load',)), ('Name', (5, 7, 5, 9), 'Bb', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (8, 4, 10, 6), [('Name', (8, 4, 8, 6), 'Aa', ('Store',)), ('Name', (10, 4, 10, 6), 'Bb', ('Store',))], ('Store',)), ('Name', (10, 10, 10, 12), 'Cc', ('Load',)), [], 0)]))], []),
+('Module', [('Expr', (1, 0, 1, 34), ('DictComp', (1, 0, 1, 34), ('Name', (1, 1, 1, 2), 'a', ('Load',)), ('Name', (1, 5, 1, 6), 'b', ('Load',)), [('comprehension', ('Name', (1, 11, 1, 12), 'w', ('Store',)), ('Name', (1, 16, 1, 17), 'x', ('Load',)), [], 0), ('comprehension', ('Name', (1, 22, 1, 23), 'm', ('Store',)), ('Name', (1, 27, 1, 28), 'p', ('Load',)), [('Name', (1, 32, 1, 33), 'g', ('Load',))], 0)]))], []),
+('Module', [('Expr', (1, 0, 1, 20), ('DictComp', (1, 0, 1, 20), ('Name', (1, 1, 1, 2), 'a', ('Load',)), ('Name', (1, 5, 1, 6), 'b', ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 14), [('Name', (1, 11, 1, 12), 'v', ('Store',)), ('Name', (1, 13, 1, 14), 'w', ('Store',))], ('Store',)), ('Name', (1, 18, 1, 19), 'x', ('Load',)), [], 0)]))], []),
+('Module', [('Expr', (1, 0, 1, 19), ('SetComp', (1, 0, 1, 19), ('Name', (1, 1, 1, 2), 'r', ('Load',)), [('comprehension', ('Name', (1, 7, 1, 8), 'l', ('Store',)), ('Name', (1, 12, 1, 13), 'x', ('Load',)), [('Name', (1, 17, 1, 18), 'g', ('Load',))], 0)]))], []),
+('Module', [('Expr', (1, 0, 1, 16), ('SetComp', (1, 0, 1, 16), ('Name', (1, 1, 1, 2), 'r', ('Load',)), [('comprehension', ('Tuple', (1, 7, 1, 10), [('Name', (1, 7, 1, 8), 'l', ('Store',)), ('Name', (1, 9, 1, 10), 'm', ('Store',))], ('Store',)), ('Name', (1, 14, 1, 15), 'x', ('Load',)), [], 0)]))], []),
+('Module', [('AsyncFunctionDef', (1, 0, 3, 18), 'f', ('arguments', [], [], None, [], [], None, []), [('Expr', (2, 1, 2, 17), ('Constant', (2, 1, 2, 17), 'async function', None)), ('Expr', (3, 1, 3, 18), ('Await', (3, 1, 3, 18), ('Call', (3, 7, 3, 18), ('Name', (3, 7, 3, 16), 'something', ('Load',)), [], [])))], [], None, None, [])], []),
+('Module', [('AsyncFunctionDef', (1, 0, 3, 8), 'f', ('arguments', [], [], None, [], [], None, []), [('AsyncFor', (2, 1, 3, 8), ('Name', (2, 11, 2, 12), 'e', ('Store',)), ('Name', (2, 16, 2, 17), 'i', ('Load',)), [('Expr', (2, 19, 2, 20), ('Constant', (2, 19, 2, 20), 1, None))], [('Expr', (3, 7, 3, 8), ('Constant', (3, 7, 3, 8), 2, None))], None)], [], None, None, [])], []),
+('Module', [('AsyncFunctionDef', (1, 0, 2, 21), 'f', ('arguments', [], [], None, [], [], None, []), [('AsyncWith', (2, 1, 2, 21), [('withitem', ('Name', (2, 12, 2, 13), 'a', ('Load',)), ('Name', (2, 17, 2, 18), 'b', ('Store',)))], [('Expr', (2, 20, 2, 21), ('Constant', (2, 20, 2, 21), 1, None))], None)], [], None, None, [])], []),
+('Module', [('Expr', (1, 0, 1, 14), ('Dict', (1, 0, 1, 14), [None, ('Constant', (1, 10, 1, 11), 2, None)], [('Dict', (1, 3, 1, 8), [('Constant', (1, 4, 1, 5), 1, None)], [('Constant', (1, 6, 1, 7), 2, None)]), ('Constant', (1, 12, 1, 13), 3, None)]))], []),
+('Module', [('Expr', (1, 0, 1, 12), ('Set', (1, 0, 1, 12), [('Starred', (1, 1, 1, 8), ('Set', (1, 2, 1, 8), [('Constant', (1, 3, 1, 4), 1, None), ('Constant', (1, 6, 1, 7), 2, None)]), ('Load',)), ('Constant', (1, 10, 1, 11), 3, None)]))], []),
+('Module', [('FunctionDef', (1, 0, 1, 16), 'f', ('arguments', [], [], None, [], [], None, []), [('Expr', (1, 9, 1, 16), ('Yield', (1, 9, 1, 16), ('Constant', (1, 15, 1, 16), 1, None)))], [], None, None, [])], []),
+('Module', [('FunctionDef', (1, 0, 1, 22), 'f', ('arguments', [], [], None, [], [], None, []), [('Expr', (1, 9, 1, 22), ('YieldFrom', (1, 9, 1, 22), ('List', (1, 20, 1, 22), [], ('Load',))))], [], None, None, [])], []),
+('Module', [('AsyncFunctionDef', (1, 0, 2, 21), 'f', ('arguments', [], [], None, [], [], None, []), [('Expr', (2, 1, 2, 21), ('ListComp', (2, 1, 2, 21), ('Name', (2, 2, 2, 3), 'i', ('Load',)), [('comprehension', ('Name', (2, 14, 2, 15), 'b', ('Store',)), ('Name', (2, 19, 2, 20), 'c', ('Load',)), [], 1)]))], [], None, None, [])], []),
+('Module', [('FunctionDef', (4, 0, 4, 13), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (4, 9, 4, 13))], [('Name', (1, 1, 1, 6), 'deco1', ('Load',)), ('Call', (2, 1, 2, 8), ('Name', (2, 1, 2, 6), 'deco2', ('Load',)), [], []), ('Call', (3, 1, 3, 9), ('Name', (3, 1, 3, 6), 'deco3', ('Load',)), [('Constant', (3, 7, 3, 8), 1, None)], [])], None, None, [])], []),
+('Module', [('AsyncFunctionDef', (4, 0, 4, 19), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (4, 15, 4, 19))], [('Name', (1, 1, 1, 6), 'deco1', ('Load',)), ('Call', (2, 1, 2, 8), ('Name', (2, 1, 2, 6), 'deco2', ('Load',)), [], []), ('Call', (3, 1, 3, 9), ('Name', (3, 1, 3, 6), 'deco3', ('Load',)), [('Constant', (3, 7, 3, 8), 1, None)], [])], None, None, [])], []),
+('Module', [('ClassDef', (4, 0, 4, 13), 'C', [], [], [('Pass', (4, 9, 4, 13))], [('Name', (1, 1, 1, 6), 'deco1', ('Load',)), ('Call', (2, 1, 2, 8), ('Name', (2, 1, 2, 6), 'deco2', ('Load',)), [], []), ('Call', (3, 1, 3, 9), ('Name', (3, 1, 3, 6), 'deco3', ('Load',)), [('Constant', (3, 7, 3, 8), 1, None)], [])], [])], []),
+('Module', [('FunctionDef', (2, 0, 2, 13), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (2, 9, 2, 13))], [('Call', (1, 1, 1, 19), ('Name', (1, 1, 1, 5), 'deco', ('Load',)), [('GeneratorExp', (1, 5, 1, 19), ('Name', (1, 6, 1, 7), 'a', ('Load',)), [('comprehension', ('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 17, 1, 18), 'b', ('Load',)), [], 0)])], [])], None, None, [])], []),
+('Module', [('FunctionDef', (2, 0, 2, 13), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (2, 9, 2, 13))], [('Attribute', (1, 1, 1, 6), ('Attribute', (1, 1, 1, 4), ('Name', (1, 1, 1, 2), 'a', ('Load',)), 'b', ('Load',)), 'c', ('Load',))], None, None, [])], []),
+('Module', [('Expr', (1, 0, 1, 8), ('NamedExpr', (1, 1, 1, 7), ('Name', (1, 1, 1, 2), 'a', ('Store',)), ('Constant', (1, 6, 1, 7), 1, None)))], []),
+('Module', [('If', (1, 0, 1, 19), ('NamedExpr', (1, 3, 1, 13), ('Name', (1, 3, 1, 4), 'a', ('Store',)), ('Call', (1, 8, 1, 13), ('Name', (1, 8, 1, 11), 'foo', ('Load',)), [], [])), [('Pass', (1, 15, 1, 19))], [])], []),
+('Module', [('While', (1, 0, 1, 22), ('NamedExpr', (1, 6, 1, 16), ('Name', (1, 6, 1, 7), 'a', ('Store',)), ('Call', (1, 11, 1, 16), ('Name', (1, 11, 1, 14), 'foo', ('Load',)), [], [])), [('Pass', (1, 18, 1, 22))], [])], []),
+('Module', [('FunctionDef', (1, 0, 1, 18), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [], None, [], [], None, []), [('Pass', (1, 14, 1, 18))], [], None, None, [])], []),
+('Module', [('FunctionDef', (1, 0, 1, 26), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 12, 1, 13), 'c', None, None), ('arg', (1, 15, 1, 16), 'd', None, None), ('arg', (1, 18, 1, 19), 'e', None, None)], None, [], [], None, []), [('Pass', (1, 22, 1, 26))], [], None, None, [])], []),
+('Module', [('FunctionDef', (1, 0, 1, 29), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 12, 1, 13), 'c', None, None)], None, [('arg', (1, 18, 1, 19), 'd', None, None), ('arg', (1, 21, 1, 22), 'e', None, None)], [None, None], None, []), [('Pass', (1, 25, 1, 29))], [], None, None, [])], []),
+('Module', [('FunctionDef', (1, 0, 1, 39), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 12, 1, 13), 'c', None, None)], None, [('arg', (1, 18, 1, 19), 'd', None, None), ('arg', (1, 21, 1, 22), 'e', None, None)], [None, None], ('arg', (1, 26, 1, 32), 'kwargs', None, None), []), [('Pass', (1, 35, 1, 39))], [], None, None, [])], []),
+('Module', [('FunctionDef', (1, 0, 1, 20), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [], None, [], [], None, [('Constant', (1, 8, 1, 9), 1, None)]), [('Pass', (1, 16, 1, 20))], [], None, None, [])], []),
+('Module', [('FunctionDef', (1, 0, 1, 29), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 14, 1, 15), 'b', None, None), ('arg', (1, 19, 1, 20), 'c', None, None)], None, [], [], None, [('Constant', (1, 8, 1, 9), 1, None), ('Constant', (1, 16, 1, 17), 2, None), ('Constant', (1, 21, 1, 22), 4, None)]), [('Pass', (1, 25, 1, 29))], [], None, None, [])], []),
+('Module', [('FunctionDef', (1, 0, 1, 32), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 14, 1, 15), 'b', None, None)], None, [('arg', (1, 22, 1, 23), 'c', None, None)], [('Constant', (1, 24, 1, 25), 4, None)], None, [('Constant', (1, 8, 1, 9), 1, None), ('Constant', (1, 16, 1, 17), 2, None)]), [('Pass', (1, 28, 1, 32))], [], None, None, [])], []),
+('Module', [('FunctionDef', (1, 0, 1, 30), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 14, 1, 15), 'b', None, None)], None, [('arg', (1, 22, 1, 23), 'c', None, None)], [None], None, [('Constant', (1, 8, 1, 9), 1, None), ('Constant', (1, 16, 1, 17), 2, None)]), [('Pass', (1, 26, 1, 30))], [], None, None, [])], []),
+('Module', [('FunctionDef', (1, 0, 1, 42), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 14, 1, 15), 'b', None, None)], None, [('arg', (1, 22, 1, 23), 'c', None, None)], [('Constant', (1, 24, 1, 25), 4, None)], ('arg', (1, 29, 1, 35), 'kwargs', None, None), [('Constant', (1, 8, 1, 9), 1, None), ('Constant', (1, 16, 1, 17), 2, None)]), [('Pass', (1, 38, 1, 42))], [], None, None, [])], []),
+('Module', [('FunctionDef', (1, 0, 1, 40), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 14, 1, 15), 'b', None, None)], None, [('arg', (1, 22, 1, 23), 'c', None, None)], [None], ('arg', (1, 27, 1, 33), 'kwargs', None, None), [('Constant', (1, 8, 1, 9), 1, None), ('Constant', (1, 16, 1, 17), 2, None)]), [('Pass', (1, 36, 1, 40))], [], None, None, [])], []),
+('Module', [('TypeAlias', (1, 0, 1, 12), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [], ('Name', (1, 9, 1, 12), 'int', ('Load',)))], []),
+('Module', [('TypeAlias', (1, 0, 1, 15), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [('TypeVar', (1, 7, 1, 8), 'T', None, None)], ('Name', (1, 12, 1, 15), 'int', ('Load',)))], []),
+('Module', [('TypeAlias', (1, 0, 1, 32), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [('TypeVar', (1, 7, 1, 8), 'T', None, None), ('TypeVarTuple', (1, 10, 1, 13), 'Ts', None), ('ParamSpec', (1, 15, 1, 18), 'P', None)], ('Tuple', (1, 22, 1, 32), [('Name', (1, 23, 1, 24), 'T', ('Load',)), ('Name', (1, 26, 1, 28), 'Ts', ('Load',)), ('Name', (1, 30, 1, 31), 'P', ('Load',))], ('Load',)))], []),
+('Module', [('TypeAlias', (1, 0, 1, 37), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [('TypeVar', (1, 7, 1, 13), 'T', ('Name', (1, 10, 1, 13), 'int', ('Load',)), None), ('TypeVarTuple', (1, 15, 1, 18), 'Ts', None), ('ParamSpec', (1, 20, 1, 23), 'P', None)], ('Tuple', (1, 27, 1, 37), [('Name', (1, 28, 1, 29), 'T', ('Load',)), ('Name', (1, 31, 1, 33), 'Ts', ('Load',)), ('Name', (1, 35, 1, 36), 'P', ('Load',))], ('Load',)))], []),
+('Module', [('TypeAlias', (1, 0, 1, 44), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [('TypeVar', (1, 7, 1, 20), 'T', ('Tuple', (1, 10, 1, 20), [('Name', (1, 11, 1, 14), 'int', ('Load',)), ('Name', (1, 16, 1, 19), 'str', ('Load',))], ('Load',)), None), ('TypeVarTuple', (1, 22, 1, 25), 'Ts', None), ('ParamSpec', (1, 27, 1, 30), 'P', None)], ('Tuple', (1, 34, 1, 44), [('Name', (1, 35, 1, 36), 'T', ('Load',)), ('Name', (1, 38, 1, 40), 'Ts', ('Load',)), ('Name', (1, 42, 1, 43), 'P', ('Load',))], ('Load',)))], []),
+('Module', [('TypeAlias', (1, 0, 1, 48), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [('TypeVar', (1, 7, 1, 17), 'T', ('Name', (1, 10, 1, 13), 'int', ('Load',)), ('Constant', (1, 16, 1, 17), 1, None)), ('TypeVarTuple', (1, 19, 1, 26), 'Ts', ('Constant', (1, 25, 1, 26), 2, None)), ('ParamSpec', (1, 28, 1, 34), 'P', ('Constant', (1, 33, 1, 34), 3, None))], ('Tuple', (1, 38, 1, 48), [('Name', (1, 39, 1, 40), 'T', ('Load',)), ('Name', (1, 42, 1, 44), 'Ts', ('Load',)), ('Name', (1, 46, 1, 47), 'P', ('Load',))], ('Load',)))], []),
+('Module', [('ClassDef', (1, 0, 1, 16), 'X', [], [], [('Pass', (1, 12, 1, 16))], [], [('TypeVar', (1, 8, 1, 9), 'T', None, None)])], []),
+('Module', [('ClassDef', (1, 0, 1, 26), 'X', [], [], [('Pass', (1, 22, 1, 26))], [], [('TypeVar', (1, 8, 1, 9), 'T', None, None), ('TypeVarTuple', (1, 11, 1, 14), 'Ts', None), ('ParamSpec', (1, 16, 1, 19), 'P', None)])], []),
+('Module', [('ClassDef', (1, 0, 1, 31), 'X', [], [], [('Pass', (1, 27, 1, 31))], [], [('TypeVar', (1, 8, 1, 14), 'T', ('Name', (1, 11, 1, 14), 'int', ('Load',)), None), ('TypeVarTuple', (1, 16, 1, 19), 'Ts', None), ('ParamSpec', (1, 21, 1, 24), 'P', None)])], []),
+('Module', [('ClassDef', (1, 0, 1, 38), 'X', [], [], [('Pass', (1, 34, 1, 38))], [], [('TypeVar', (1, 8, 1, 21), 'T', ('Tuple', (1, 11, 1, 21), [('Name', (1, 12, 1, 15), 'int', ('Load',)), ('Name', (1, 17, 1, 20), 'str', ('Load',))], ('Load',)), None), ('TypeVarTuple', (1, 23, 1, 26), 'Ts', None), ('ParamSpec', (1, 28, 1, 31), 'P', None)])], []),
+('Module', [('ClassDef', (1, 0, 1, 43), 'X', [], [], [('Pass', (1, 39, 1, 43))], [], [('TypeVar', (1, 8, 1, 18), 'T', ('Name', (1, 11, 1, 14), 'int', ('Load',)), ('Constant', (1, 17, 1, 18), 1, None)), ('TypeVarTuple', (1, 20, 1, 27), 'Ts', ('Constant', (1, 26, 1, 27), 2, None)), ('ParamSpec', (1, 29, 1, 36), 'P', ('Constant', (1, 35, 1, 36), 3, None))])], []),
+('Module', [('FunctionDef', (1, 0, 1, 16), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 12, 1, 16))], [], None, None, [('TypeVar', (1, 6, 1, 7), 'T', None, None)])], []),
+('Module', [('FunctionDef', (1, 0, 1, 26), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 22, 1, 26))], [], None, None, [('TypeVar', (1, 6, 1, 7), 'T', None, None), ('TypeVarTuple', (1, 9, 1, 12), 'Ts', None), ('ParamSpec', (1, 14, 1, 17), 'P', None)])], []),
+('Module', [('FunctionDef', (1, 0, 1, 31), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 27, 1, 31))], [], None, None, [('TypeVar', (1, 6, 1, 12), 'T', ('Name', (1, 9, 1, 12), 'int', ('Load',)), None), ('TypeVarTuple', (1, 14, 1, 17), 'Ts', None), ('ParamSpec', (1, 19, 1, 22), 'P', None)])], []),
+('Module', [('FunctionDef', (1, 0, 1, 38), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 34, 1, 38))], [], None, None, [('TypeVar', (1, 6, 1, 19), 'T', ('Tuple', (1, 9, 1, 19), [('Name', (1, 10, 1, 13), 'int', ('Load',)), ('Name', (1, 15, 1, 18), 'str', ('Load',))], ('Load',)), None), ('TypeVarTuple', (1, 21, 1, 24), 'Ts', None), ('ParamSpec', (1, 26, 1, 29), 'P', None)])], []),
+('Module', [('FunctionDef', (1, 0, 1, 43), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 39, 1, 43))], [], None, None, [('TypeVar', (1, 6, 1, 16), 'T', ('Name', (1, 9, 1, 12), 'int', ('Load',)), ('Constant', (1, 15, 1, 16), 1, None)), ('TypeVarTuple', (1, 18, 1, 25), 'Ts', ('Constant', (1, 24, 1, 25), 2, None)), ('ParamSpec', (1, 27, 1, 34), 'P', ('Constant', (1, 33, 1, 34), 3, None))])], []),
+('Module', [('Match', (1, 0, 3, 6), ('Name', (1, 6, 1, 7), 'x', ('Load',)), [('match_case', ('MatchValue', (2, 6, 2, 7), ('Constant', (2, 6, 2, 7), 1, None)), None, [('Pass', (3, 2, 3, 6))])])], []),
+('Module', [('Match', (1, 0, 5, 6), ('Name', (1, 6, 1, 7), 'x', ('Load',)), [('match_case', ('MatchValue', (2, 6, 2, 7), ('Constant', (2, 6, 2, 7), 1, None)), None, [('Pass', (3, 2, 3, 6))]), ('match_case', ('MatchAs', (4, 6, 4, 7), None, None), None, [('Pass', (5, 2, 5, 6))])])], []),
+]
+single_results = [
+('Interactive', [('Expr', (1, 0, 1, 3), ('BinOp', (1, 0, 1, 3), ('Constant', (1, 0, 1, 1), 1, None), ('Add',), ('Constant', (1, 2, 1, 3), 2, None)))]),
+]
+eval_results = [
+('Expression', ('Constant', (1, 0, 1, 4), None, None)),
+('Expression', ('Constant', (1, 0, 1, 4), True, None)),
+('Expression', ('Constant', (1, 0, 1, 5), False, None)),
+('Expression', ('BoolOp', (1, 0, 1, 7), ('And',), [('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Name', (1, 6, 1, 7), 'b', ('Load',))])),
+('Expression', ('BoolOp', (1, 0, 1, 6), ('Or',), [('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Name', (1, 5, 1, 6), 'b', ('Load',))])),
+('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Add',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))),
+('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Sub',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))),
+('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Mult',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))),
+('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Div',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))),
+('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('MatMult',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))),
+('Expression', ('BinOp', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('FloorDiv',), ('Name', (1, 5, 1, 6), 'b', ('Load',)))),
+('Expression', ('BinOp', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Pow',), ('Name', (1, 5, 1, 6), 'b', ('Load',)))),
+('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Mod',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))),
+('Expression', ('BinOp', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('RShift',), ('Name', (1, 5, 1, 6), 'b', ('Load',)))),
+('Expression', ('BinOp', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('LShift',), ('Name', (1, 5, 1, 6), 'b', ('Load',)))),
+('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('BitXor',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))),
+('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('BitOr',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))),
+('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('BitAnd',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))),
+('Expression', ('UnaryOp', (1, 0, 1, 5), ('Not',), ('Name', (1, 4, 1, 5), 'v', ('Load',)))),
+('Expression', ('UnaryOp', (1, 0, 1, 2), ('UAdd',), ('Name', (1, 1, 1, 2), 'v', ('Load',)))),
+('Expression', ('UnaryOp', (1, 0, 1, 2), ('USub',), ('Name', (1, 1, 1, 2), 'v', ('Load',)))),
+('Expression', ('UnaryOp', (1, 0, 1, 2), ('Invert',), ('Name', (1, 1, 1, 2), 'v', ('Load',)))),
+('Expression', ('Lambda', (1, 0, 1, 11), ('arguments', [], [], None, [], [], None, []), ('Constant', (1, 7, 1, 11), None, None))),
+('Expression', ('Dict', (1, 0, 1, 7), [('Constant', (1, 2, 1, 3), 1, None)], [('Constant', (1, 4, 1, 5), 2, None)])),
+('Expression', ('Dict', (1, 0, 1, 2), [], [])),
+('Expression', ('Set', (1, 0, 1, 7), [('Constant', (1, 1, 1, 5), None, None)])),
+('Expression', ('Dict', (1, 0, 5, 6), [('Constant', (2, 6, 2, 7), 1, None)], [('Constant', (4, 10, 4, 11), 2, None)])),
+('Expression', ('List', (1, 0, 5, 6), [('Constant', (2, 6, 2, 7), 1, None), ('Constant', (4, 8, 4, 9), 1, None)], ('Load',))),
+('Expression', ('Tuple', (1, 0, 4, 6), [('Constant', (2, 6, 2, 7), 1, None)], ('Load',))),
+('Expression', ('Set', (1, 0, 5, 6), [('Constant', (2, 6, 2, 7), 1, None), ('Constant', (4, 8, 4, 9), 1, None)])),
+('Expression', ('ListComp', (1, 0, 1, 19), ('Name', (1, 1, 1, 2), 'a', ('Load',)), [('comprehension', ('Name', (1, 7, 1, 8), 'b', ('Store',)), ('Name', (1, 12, 1, 13), 'c', ('Load',)), [('Name', (1, 17, 1, 18), 'd', ('Load',))], 0)])),
+('Expression', ('GeneratorExp', (1, 0, 1, 19), ('Name', (1, 1, 1, 2), 'a', ('Load',)), [('comprehension', ('Name', (1, 7, 1, 8), 'b', ('Store',)), ('Name', (1, 12, 1, 13), 'c', ('Load',)), [('Name', (1, 17, 1, 18), 'd', ('Load',))], 0)])),
+('Expression', ('SetComp', (1, 0, 1, 19), ('Name', (1, 1, 1, 2), 'a', ('Load',)), [('comprehension', ('Name', (1, 7, 1, 8), 'b', ('Store',)), ('Name', (1, 12, 1, 13), 'c', ('Load',)), [('Name', (1, 17, 1, 18), 'd', ('Load',))], 0)])),
+('Expression', ('DictComp', (1, 0, 1, 25), ('Name', (1, 1, 1, 2), 'k', ('Load',)), ('Name', (1, 4, 1, 5), 'v', ('Load',)), [('comprehension', ('Tuple', (1, 10, 1, 14), [('Name', (1, 10, 1, 11), 'k', ('Store',)), ('Name', (1, 13, 1, 14), 'v', ('Store',))], ('Store',)), ('Name', (1, 18, 1, 19), 'c', ('Load',)), [('Name', (1, 23, 1, 24), 'd', ('Load',))], 0)])),
+('Expression', ('ListComp', (1, 0, 1, 20), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 14), [('Name', (1, 11, 1, 12), 'a', ('Store',)), ('Name', (1, 13, 1, 14), 'b', ('Store',))], ('Store',)), ('Name', (1, 18, 1, 19), 'c', ('Load',)), [], 0)])),
+('Expression', ('ListComp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])),
+('Expression', ('ListComp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('List', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])),
+('Expression', ('SetComp', (1, 0, 1, 20), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 14), [('Name', (1, 11, 1, 12), 'a', ('Store',)), ('Name', (1, 13, 1, 14), 'b', ('Store',))], ('Store',)), ('Name', (1, 18, 1, 19), 'c', ('Load',)), [], 0)])),
+('Expression', ('SetComp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])),
+('Expression', ('SetComp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('List', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])),
+('Expression', ('GeneratorExp', (1, 0, 1, 20), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 14), [('Name', (1, 11, 1, 12), 'a', ('Store',)), ('Name', (1, 13, 1, 14), 'b', ('Store',))], ('Store',)), ('Name', (1, 18, 1, 19), 'c', ('Load',)), [], 0)])),
+('Expression', ('GeneratorExp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])),
+('Expression', ('GeneratorExp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('List', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])),
+('Expression', ('Compare', (1, 0, 1, 9), ('Constant', (1, 0, 1, 1), 1, None), [('Lt',), ('Lt',)], [('Constant', (1, 4, 1, 5), 2, None), ('Constant', (1, 8, 1, 9), 3, None)])),
+('Expression', ('Compare', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('Eq',)], [('Name', (1, 5, 1, 6), 'b', ('Load',))])),
+('Expression', ('Compare', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('LtE',)], [('Name', (1, 5, 1, 6), 'b', ('Load',))])),
+('Expression', ('Compare', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('GtE',)], [('Name', (1, 5, 1, 6), 'b', ('Load',))])),
+('Expression', ('Compare', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('NotEq',)], [('Name', (1, 5, 1, 6), 'b', ('Load',))])),
+('Expression', ('Compare', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('Is',)], [('Name', (1, 5, 1, 6), 'b', ('Load',))])),
+('Expression', ('Compare', (1, 0, 1, 10), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('IsNot',)], [('Name', (1, 9, 1, 10), 'b', ('Load',))])),
+('Expression', ('Compare', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('In',)], [('Name', (1, 5, 1, 6), 'b', ('Load',))])),
+('Expression', ('Compare', (1, 0, 1, 10), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('NotIn',)], [('Name', (1, 9, 1, 10), 'b', ('Load',))])),
+('Expression', ('Call', (1, 0, 1, 3), ('Name', (1, 0, 1, 1), 'f', ('Load',)), [], [])),
+('Expression', ('Call', (1, 0, 1, 17), ('Name', (1, 0, 1, 1), 'f', ('Load',)), [('Constant', (1, 2, 1, 3), 1, None), ('Constant', (1, 4, 1, 5), 2, None), ('Starred', (1, 10, 1, 12), ('Name', (1, 11, 1, 12), 'd', ('Load',)), ('Load',))], [('keyword', (1, 6, 1, 9), 'c', ('Constant', (1, 8, 1, 9), 3, None)), ('keyword', (1, 13, 1, 16), None, ('Name', (1, 15, 1, 16), 'e', ('Load',)))])),
+('Expression', ('Call', (1, 0, 1, 10), ('Name', (1, 0, 1, 1), 'f', ('Load',)), [('Starred', (1, 2, 1, 9), ('List', (1, 3, 1, 9), [('Constant', (1, 4, 1, 5), 0, None), ('Constant', (1, 7, 1, 8), 1, None)], ('Load',)), ('Load',))], [])),
+('Expression', ('Call', (1, 0, 1, 15), ('Name', (1, 0, 1, 1), 'f', ('Load',)), [('GeneratorExp', (1, 1, 1, 15), ('Name', (1, 2, 1, 3), 'a', ('Load',)), [('comprehension', ('Name', (1, 8, 1, 9), 'a', ('Store',)), ('Name', (1, 13, 1, 14), 'b', ('Load',)), [], 0)])], [])),
+('Expression', ('Constant', (1, 0, 1, 2), 10, None)),
+('Expression', ('Constant', (1, 0, 1, 2), 1j, None)),
+('Expression', ('Constant', (1, 0, 1, 8), 'string', None)),
+('Expression', ('Attribute', (1, 0, 1, 3), ('Name', (1, 0, 1, 1), 'a', ('Load',)), 'b', ('Load',))),
+('Expression', ('Subscript', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Slice', (1, 2, 1, 5), ('Name', (1, 2, 1, 3), 'b', ('Load',)), ('Name', (1, 4, 1, 5), 'c', ('Load',)), None), ('Load',))),
+('Expression', ('Name', (1, 0, 1, 1), 'v', ('Load',))),
+('Expression', ('List', (1, 0, 1, 7), [('Constant', (1, 1, 1, 2), 1, None), ('Constant', (1, 3, 1, 4), 2, None), ('Constant', (1, 5, 1, 6), 3, None)], ('Load',))),
+('Expression', ('List', (1, 0, 1, 2), [], ('Load',))),
+('Expression', ('Tuple', (1, 0, 1, 5), [('Constant', (1, 0, 1, 1), 1, None), ('Constant', (1, 2, 1, 3), 2, None), ('Constant', (1, 4, 1, 5), 3, None)], ('Load',))),
+('Expression', ('Tuple', (1, 0, 1, 7), [('Constant', (1, 1, 1, 2), 1, None), ('Constant', (1, 3, 1, 4), 2, None), ('Constant', (1, 5, 1, 6), 3, None)], ('Load',))),
+('Expression', ('Tuple', (1, 0, 1, 2), [], ('Load',))),
+('Expression', ('Call', (1, 0, 1, 17), ('Attribute', (1, 0, 1, 7), ('Attribute', (1, 0, 1, 5), ('Attribute', (1, 0, 1, 3), ('Name', (1, 0, 1, 1), 'a', ('Load',)), 'b', ('Load',)), 'c', ('Load',)), 'd', ('Load',)), [('Subscript', (1, 8, 1, 16), ('Attribute', (1, 8, 1, 11), ('Name', (1, 8, 1, 9), 'a', ('Load',)), 'b', ('Load',)), ('Slice', (1, 12, 1, 15), ('Constant', (1, 12, 1, 13), 1, None), ('Constant', (1, 14, 1, 15), 2, None), None), ('Load',))], [])),
+('Expression', ('Subscript', (1, 0, 1, 7), ('List', (1, 0, 1, 3), [('Constant', (1, 1, 1, 2), 5, None)], ('Load',)), ('Slice', (1, 4, 1, 6), ('Constant', (1, 4, 1, 5), 1, None), None, None), ('Load',))),
+('Expression', ('Subscript', (1, 0, 1, 7), ('List', (1, 0, 1, 3), [('Constant', (1, 1, 1, 2), 5, None)], ('Load',)), ('Slice', (1, 4, 1, 6), None, ('Constant', (1, 5, 1, 6), 1, None), None), ('Load',))),
+('Expression', ('Subscript', (1, 0, 1, 8), ('List', (1, 0, 1, 3), [('Constant', (1, 1, 1, 2), 5, None)], ('Load',)), ('Slice', (1, 4, 1, 7), None, None, ('Constant', (1, 6, 1, 7), 1, None)), ('Load',))),
+('Expression', ('Subscript', (1, 0, 1, 10), ('List', (1, 0, 1, 3), [('Constant', (1, 1, 1, 2), 5, None)], ('Load',)), ('Slice', (1, 4, 1, 9), ('Constant', (1, 4, 1, 5), 1, None), ('Constant', (1, 6, 1, 7), 1, None), ('Constant', (1, 8, 1, 9), 1, None)), ('Load',))),
+('Expression', ('IfExp', (1, 0, 1, 21), ('Name', (1, 9, 1, 10), 'x', ('Load',)), ('Call', (1, 0, 1, 5), ('Name', (1, 0, 1, 3), 'foo', ('Load',)), [], []), ('Call', (1, 16, 1, 21), ('Name', (1, 16, 1, 19), 'bar', ('Load',)), [], []))),
+('Expression', ('JoinedStr', (1, 0, 1, 6), [('FormattedValue', (1, 2, 1, 5), ('Name', (1, 3, 1, 4), 'a', ('Load',)), -1, None)])),
+('Expression', ('JoinedStr', (1, 0, 1, 10), [('FormattedValue', (1, 2, 1, 9), ('Name', (1, 3, 1, 4), 'a', ('Load',)), -1, ('JoinedStr', (1, 4, 1, 8), [('Constant', (1, 5, 1, 8), '.2f', None)]))])),
+('Expression', ('JoinedStr', (1, 0, 1, 8), [('FormattedValue', (1, 2, 1, 7), ('Name', (1, 3, 1, 4), 'a', ('Load',)), 114, None)])),
+('Expression', ('JoinedStr', (1, 0, 1, 11), [('Constant', (1, 2, 1, 6), 'foo(', None), ('FormattedValue', (1, 6, 1, 9), ('Name', (1, 7, 1, 8), 'a', ('Load',)), -1, None), ('Constant', (1, 9, 1, 10), ')', None)])),
+]
+main()
diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast/test_ast.py
similarity index 68%
rename from Lib/test/test_ast.py
rename to Lib/test/test_ast/test_ast.py
index 55725ec36fd3a7..0a3edef4678546 100644
--- a/Lib/test/test_ast.py
+++ b/Lib/test/test_ast/test_ast.py
@@ -19,389 +19,16 @@
from test import support
from test.support import os_helper, script_helper
from test.support.ast_helper import ASTTestMixin
+from test.test_ast.utils import to_tuple
+from test.test_ast.snippets import (
+ eval_tests, eval_results, exec_tests, exec_results, single_tests, single_results
+)
-def to_tuple(t):
- if t is None or isinstance(t, (str, int, complex, float, bytes)) or t is Ellipsis:
- return t
- elif isinstance(t, list):
- return [to_tuple(e) for e in t]
- result = [t.__class__.__name__]
- if hasattr(t, 'lineno') and hasattr(t, 'col_offset'):
- result.append((t.lineno, t.col_offset))
- if hasattr(t, 'end_lineno') and hasattr(t, 'end_col_offset'):
- result[-1] += (t.end_lineno, t.end_col_offset)
- if t._fields is None:
- return tuple(result)
- for f in t._fields:
- result.append(to_tuple(getattr(t, f)))
- return tuple(result)
STDLIB = os.path.dirname(ast.__file__)
STDLIB_FILES = [fn for fn in os.listdir(STDLIB) if fn.endswith(".py")]
STDLIB_FILES.extend(["test/test_grammar.py", "test/test_unpack_ex.py"])
-# These tests are compiled through "exec"
-# There should be at least one test per statement
-exec_tests = [
- # Module docstring
- "'module docstring'",
- # FunctionDef
- "def f(): pass",
- # FunctionDef with docstring
- "def f(): 'function docstring'",
- # FunctionDef with arg
- "def f(a): pass",
- # FunctionDef with arg and default value
- "def f(a=0): pass",
- # FunctionDef with varargs
- "def f(*args): pass",
- # FunctionDef with varargs as TypeVarTuple
- "def f(*args: *Ts): pass",
- # FunctionDef with varargs as unpacked Tuple
- "def f(*args: *tuple[int, ...]): pass",
- # FunctionDef with varargs as unpacked Tuple *and* TypeVarTuple
- "def f(*args: *tuple[int, *Ts]): pass",
- # FunctionDef with kwargs
- "def f(**kwargs): pass",
- # FunctionDef with all kind of args and docstring
- "def f(a, b=1, c=None, d=[], e={}, *args, f=42, **kwargs): 'doc for f()'",
- # FunctionDef with type annotation on return involving unpacking
- "def f() -> tuple[*Ts]: pass",
- "def f() -> tuple[int, *Ts]: pass",
- "def f() -> tuple[int, *tuple[int, ...]]: pass",
- # ClassDef
- "class C:pass",
- # ClassDef with docstring
- "class C: 'docstring for class C'",
- # ClassDef, new style class
- "class C(object): pass",
- # Classdef with multiple bases
- "class C(A, B): pass",
- # Return
- "def f():return 1",
- "def f():return",
- # Delete
- "del v",
- # Assign
- "v = 1",
- "a,b = c",
- "(a,b) = c",
- "[a,b] = c",
- "a[b] = c",
- # AnnAssign with unpacked types
- "x: tuple[*Ts]",
- "x: tuple[int, *Ts]",
- "x: tuple[int, *tuple[str, ...]]",
- # AugAssign
- "v += 1",
- "v -= 1",
- "v *= 1",
- "v @= 1",
- "v /= 1",
- "v %= 1",
- "v **= 1",
- "v <<= 1",
- "v >>= 1",
- "v |= 1",
- "v ^= 1",
- "v &= 1",
- "v //= 1",
- # For
- "for v in v:pass",
- # For-Else
- "for v in v:\n pass\nelse:\n pass",
- # While
- "while v:pass",
- # While-Else
- "while v:\n pass\nelse:\n pass",
- # If-Elif-Else
- "if v:pass",
- "if a:\n pass\nelif b:\n pass",
- "if a:\n pass\nelse:\n pass",
- "if a:\n pass\nelif b:\n pass\nelse:\n pass",
- "if a:\n pass\nelif b:\n pass\nelif b:\n pass\nelif b:\n pass\nelse:\n pass",
- # With
- "with x: pass",
- "with x, y: pass",
- "with x as y: pass",
- "with x as y, z as q: pass",
- "with (x as y): pass",
- "with (x, y): pass",
- # Raise
- "raise",
- "raise Exception('string')",
- "raise Exception",
- "raise Exception('string') from None",
- # TryExcept
- "try:\n pass\nexcept Exception:\n pass",
- "try:\n pass\nexcept Exception as exc:\n pass",
- # TryFinally
- "try:\n pass\nfinally:\n pass",
- # TryStarExcept
- "try:\n pass\nexcept* Exception:\n pass",
- "try:\n pass\nexcept* Exception as exc:\n pass",
- # TryExceptFinallyElse
- "try:\n pass\nexcept Exception:\n pass\nelse: pass\nfinally:\n pass",
- "try:\n pass\nexcept Exception as exc:\n pass\nelse: pass\nfinally:\n pass",
- "try:\n pass\nexcept* Exception as exc:\n pass\nelse: pass\nfinally:\n pass",
- # Assert
- "assert v",
- # Assert with message
- "assert v, 'message'",
- # Import
- "import sys",
- "import foo as bar",
- # ImportFrom
- "from sys import x as y",
- "from sys import v",
- # Global
- "global v",
- # Expr
- "1",
- # Pass,
- "pass",
- # Break
- "for v in v:break",
- # Continue
- "for v in v:continue",
- # for statements with naked tuples (see http://bugs.python.org/issue6704)
- "for a,b in c: pass",
- "for (a,b) in c: pass",
- "for [a,b] in c: pass",
- # Multiline generator expression (test for .lineno & .col_offset)
- """(
- (
- Aa
- ,
- Bb
- )
- for
- Aa
- ,
- Bb in Cc
- )""",
- # dictcomp
- "{a : b for w in x for m in p if g}",
- # dictcomp with naked tuple
- "{a : b for v,w in x}",
- # setcomp
- "{r for l in x if g}",
- # setcomp with naked tuple
- "{r for l,m in x}",
- # AsyncFunctionDef
- "async def f():\n 'async function'\n await something()",
- # AsyncFor
- "async def f():\n async for e in i: 1\n else: 2",
- # AsyncWith
- "async def f():\n async with a as b: 1",
- # PEP 448: Additional Unpacking Generalizations
- "{**{1:2}, 2:3}",
- "{*{1, 2}, 3}",
- # Function with yield (from)
- "def f(): yield 1",
- "def f(): yield from []",
- # Asynchronous comprehensions
- "async def f():\n [i async for b in c]",
- # Decorated FunctionDef
- "@deco1\n@deco2()\n@deco3(1)\ndef f(): pass",
- # Decorated AsyncFunctionDef
- "@deco1\n@deco2()\n@deco3(1)\nasync def f(): pass",
- # Decorated ClassDef
- "@deco1\n@deco2()\n@deco3(1)\nclass C: pass",
- # Decorator with generator argument
- "@deco(a for a in b)\ndef f(): pass",
- # Decorator with attribute
- "@a.b.c\ndef f(): pass",
- # Simple assignment expression
- "(a := 1)",
- # Assignment expression in if statement
- "if a := foo(): pass",
- # Assignment expression in while
- "while a := foo(): pass",
- # Positional-only arguments
- "def f(a, /,): pass",
- "def f(a, /, c, d, e): pass",
- "def f(a, /, c, *, d, e): pass",
- "def f(a, /, c, *, d, e, **kwargs): pass",
- # Positional-only arguments with defaults
- "def f(a=1, /,): pass",
- "def f(a=1, /, b=2, c=4): pass",
- "def f(a=1, /, b=2, *, c=4): pass",
- "def f(a=1, /, b=2, *, c): pass",
- "def f(a=1, /, b=2, *, c=4, **kwargs): pass",
- "def f(a=1, /, b=2, *, c, **kwargs): pass",
- # Type aliases
- "type X = int",
- "type X[T] = int",
- "type X[T, *Ts, **P] = (T, Ts, P)",
- "type X[T: int, *Ts, **P] = (T, Ts, P)",
- "type X[T: (int, str), *Ts, **P] = (T, Ts, P)",
- "type X[T: int = 1, *Ts = 2, **P =3] = (T, Ts, P)",
- # Generic classes
- "class X[T]: pass",
- "class X[T, *Ts, **P]: pass",
- "class X[T: int, *Ts, **P]: pass",
- "class X[T: (int, str), *Ts, **P]: pass",
- "class X[T: int = 1, *Ts = 2, **P = 3]: pass",
- # Generic functions
- "def f[T](): pass",
- "def f[T, *Ts, **P](): pass",
- "def f[T: int, *Ts, **P](): pass",
- "def f[T: (int, str), *Ts, **P](): pass",
- "def f[T: int = 1, *Ts = 2, **P = 3](): pass",
- # Match
- "match x:\n\tcase 1:\n\t\tpass",
- # Match with _
- "match x:\n\tcase 1:\n\t\tpass\n\tcase _:\n\t\tpass",
-]
-
-# These are compiled through "single"
-# because of overlap with "eval", it just tests what
-# can't be tested with "eval"
-single_tests = [
- "1+2"
-]
-
-# These are compiled through "eval"
-# It should test all expressions
-eval_tests = [
- # Constant(value=None)
- "None",
- # True
- "True",
- # False
- "False",
- # BoolOp
- "a and b",
- "a or b",
- # BinOp
- "a + b",
- "a - b",
- "a * b",
- "a / b",
- "a @ b",
- "a // b",
- "a ** b",
- "a % b",
- "a >> b",
- "a << b",
- "a ^ b",
- "a | b",
- "a & b",
- # UnaryOp
- "not v",
- "+v",
- "-v",
- "~v",
- # Lambda
- "lambda:None",
- # Dict
- "{ 1:2 }",
- # Empty dict
- "{}",
- # Set
- "{None,}",
- # Multiline dict (test for .lineno & .col_offset)
- """{
- 1
- :
- 2
- }""",
- # Multiline list
- """[
- 1
- ,
- 1
- ]""",
- # Multiline tuple
- """(
- 1
- ,
- )""",
- # Multiline set
- """{
- 1
- ,
- 1
- }""",
- # ListComp
- "[a for b in c if d]",
- # GeneratorExp
- "(a for b in c if d)",
- # SetComp
- "{a for b in c if d}",
- # DictComp
- "{k: v for k, v in c if d}",
- # Comprehensions with multiple for targets
- "[(a,b) for a,b in c]",
- "[(a,b) for (a,b) in c]",
- "[(a,b) for [a,b] in c]",
- "{(a,b) for a,b in c}",
- "{(a,b) for (a,b) in c}",
- "{(a,b) for [a,b] in c}",
- "((a,b) for a,b in c)",
- "((a,b) for (a,b) in c)",
- "((a,b) for [a,b] in c)",
- # Async comprehensions - async comprehensions can't work outside an asynchronous function
- #
- # Yield - yield expressions can't work outside a function
- #
- # Compare
- "1 < 2 < 3",
- "a == b",
- "a <= b",
- "a >= b",
- "a != b",
- "a is b",
- "a is not b",
- "a in b",
- "a not in b",
- # Call without argument
- "f()",
- # Call
- "f(1,2,c=3,*d,**e)",
- # Call with multi-character starred
- "f(*[0, 1])",
- # Call with a generator argument
- "f(a for a in b)",
- # Constant(value=int())
- "10",
- # Complex num
- "1j",
- # Constant(value=str())
- "'string'",
- # Attribute
- "a.b",
- # Subscript
- "a[b:c]",
- # Name
- "v",
- # List
- "[1,2,3]",
- # Empty list
- "[]",
- # Tuple
- "1,2,3",
- # Tuple
- "(1,2,3)",
- # Empty tuple
- "()",
- # Combination
- "a.b.c.d(a.b[1:2])",
- # Slice
- "[5][1:]",
- "[5][:1]",
- "[5][::1]",
- "[5][1:1:1]",
- # IfExp
- "foo() if x else bar()",
- # JoinedStr and FormattedValue
- "f'{a}'",
- "f'{a:.2f}'",
- "f'{a!r}'",
- "f'foo({a})'",
-]
-
class AST_Tests(unittest.TestCase):
maxDiff = None
@@ -3408,238 +3035,3 @@ def test_cli_file_input(self):
self.assertEqual(expected.splitlines(),
res.out.decode("utf8").splitlines())
self.assertEqual(res.rc, 0)
-
-
-def main():
- if __name__ != '__main__':
- return
- if sys.argv[1:] == ['-g']:
- for statements, kind in ((exec_tests, "exec"), (single_tests, "single"),
- (eval_tests, "eval")):
- print(kind+"_results = [")
- for statement in statements:
- tree = ast.parse(statement, "?", kind)
- print("%r," % (to_tuple(tree),))
- print("]")
- print("main()")
- raise SystemExit
- unittest.main()
-
-#### EVERYTHING BELOW IS GENERATED BY python Lib/test/test_ast.py -g #####
-exec_results = [
-('Module', [('Expr', (1, 0, 1, 18), ('Constant', (1, 0, 1, 18), 'module docstring', None))], []),
-('Module', [('FunctionDef', (1, 0, 1, 13), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 9, 1, 13))], [], None, None, [])], []),
-('Module', [('FunctionDef', (1, 0, 1, 29), 'f', ('arguments', [], [], None, [], [], None, []), [('Expr', (1, 9, 1, 29), ('Constant', (1, 9, 1, 29), 'function docstring', None))], [], None, None, [])], []),
-('Module', [('FunctionDef', (1, 0, 1, 14), 'f', ('arguments', [], [('arg', (1, 6, 1, 7), 'a', None, None)], None, [], [], None, []), [('Pass', (1, 10, 1, 14))], [], None, None, [])], []),
-('Module', [('FunctionDef', (1, 0, 1, 16), 'f', ('arguments', [], [('arg', (1, 6, 1, 7), 'a', None, None)], None, [], [], None, [('Constant', (1, 8, 1, 9), 0, None)]), [('Pass', (1, 12, 1, 16))], [], None, None, [])], []),
-('Module', [('FunctionDef', (1, 0, 1, 18), 'f', ('arguments', [], [], ('arg', (1, 7, 1, 11), 'args', None, None), [], [], None, []), [('Pass', (1, 14, 1, 18))], [], None, None, [])], []),
-('Module', [('FunctionDef', (1, 0, 1, 23), 'f', ('arguments', [], [], ('arg', (1, 7, 1, 16), 'args', ('Starred', (1, 13, 1, 16), ('Name', (1, 14, 1, 16), 'Ts', ('Load',)), ('Load',)), None), [], [], None, []), [('Pass', (1, 19, 1, 23))], [], None, None, [])], []),
-('Module', [('FunctionDef', (1, 0, 1, 36), 'f', ('arguments', [], [], ('arg', (1, 7, 1, 29), 'args', ('Starred', (1, 13, 1, 29), ('Subscript', (1, 14, 1, 29), ('Name', (1, 14, 1, 19), 'tuple', ('Load',)), ('Tuple', (1, 20, 1, 28), [('Name', (1, 20, 1, 23), 'int', ('Load',)), ('Constant', (1, 25, 1, 28), Ellipsis, None)], ('Load',)), ('Load',)), ('Load',)), None), [], [], None, []), [('Pass', (1, 32, 1, 36))], [], None, None, [])], []),
-('Module', [('FunctionDef', (1, 0, 1, 36), 'f', ('arguments', [], [], ('arg', (1, 7, 1, 29), 'args', ('Starred', (1, 13, 1, 29), ('Subscript', (1, 14, 1, 29), ('Name', (1, 14, 1, 19), 'tuple', ('Load',)), ('Tuple', (1, 20, 1, 28), [('Name', (1, 20, 1, 23), 'int', ('Load',)), ('Starred', (1, 25, 1, 28), ('Name', (1, 26, 1, 28), 'Ts', ('Load',)), ('Load',))], ('Load',)), ('Load',)), ('Load',)), None), [], [], None, []), [('Pass', (1, 32, 1, 36))], [], None, None, [])], []),
-('Module', [('FunctionDef', (1, 0, 1, 21), 'f', ('arguments', [], [], None, [], [], ('arg', (1, 8, 1, 14), 'kwargs', None, None), []), [('Pass', (1, 17, 1, 21))], [], None, None, [])], []),
-('Module', [('FunctionDef', (1, 0, 1, 71), 'f', ('arguments', [], [('arg', (1, 6, 1, 7), 'a', None, None), ('arg', (1, 9, 1, 10), 'b', None, None), ('arg', (1, 14, 1, 15), 'c', None, None), ('arg', (1, 22, 1, 23), 'd', None, None), ('arg', (1, 28, 1, 29), 'e', None, None)], ('arg', (1, 35, 1, 39), 'args', None, None), [('arg', (1, 41, 1, 42), 'f', None, None)], [('Constant', (1, 43, 1, 45), 42, None)], ('arg', (1, 49, 1, 55), 'kwargs', None, None), [('Constant', (1, 11, 1, 12), 1, None), ('Constant', (1, 16, 1, 20), None, None), ('List', (1, 24, 1, 26), [], ('Load',)), ('Dict', (1, 30, 1, 32), [], [])]), [('Expr', (1, 58, 1, 71), ('Constant', (1, 58, 1, 71), 'doc for f()', None))], [], None, None, [])], []),
-('Module', [('FunctionDef', (1, 0, 1, 27), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 23, 1, 27))], [], ('Subscript', (1, 11, 1, 21), ('Name', (1, 11, 1, 16), 'tuple', ('Load',)), ('Tuple', (1, 17, 1, 20), [('Starred', (1, 17, 1, 20), ('Name', (1, 18, 1, 20), 'Ts', ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, [])], []),
-('Module', [('FunctionDef', (1, 0, 1, 32), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 28, 1, 32))], [], ('Subscript', (1, 11, 1, 26), ('Name', (1, 11, 1, 16), 'tuple', ('Load',)), ('Tuple', (1, 17, 1, 25), [('Name', (1, 17, 1, 20), 'int', ('Load',)), ('Starred', (1, 22, 1, 25), ('Name', (1, 23, 1, 25), 'Ts', ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, [])], []),
-('Module', [('FunctionDef', (1, 0, 1, 45), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 41, 1, 45))], [], ('Subscript', (1, 11, 1, 39), ('Name', (1, 11, 1, 16), 'tuple', ('Load',)), ('Tuple', (1, 17, 1, 38), [('Name', (1, 17, 1, 20), 'int', ('Load',)), ('Starred', (1, 22, 1, 38), ('Subscript', (1, 23, 1, 38), ('Name', (1, 23, 1, 28), 'tuple', ('Load',)), ('Tuple', (1, 29, 1, 37), [('Name', (1, 29, 1, 32), 'int', ('Load',)), ('Constant', (1, 34, 1, 37), Ellipsis, None)], ('Load',)), ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, [])], []),
-('Module', [('ClassDef', (1, 0, 1, 12), 'C', [], [], [('Pass', (1, 8, 1, 12))], [], [])], []),
-('Module', [('ClassDef', (1, 0, 1, 32), 'C', [], [], [('Expr', (1, 9, 1, 32), ('Constant', (1, 9, 1, 32), 'docstring for class C', None))], [], [])], []),
-('Module', [('ClassDef', (1, 0, 1, 21), 'C', [('Name', (1, 8, 1, 14), 'object', ('Load',))], [], [('Pass', (1, 17, 1, 21))], [], [])], []),
-('Module', [('ClassDef', (1, 0, 1, 19), 'C', [('Name', (1, 8, 1, 9), 'A', ('Load',)), ('Name', (1, 11, 1, 12), 'B', ('Load',))], [], [('Pass', (1, 15, 1, 19))], [], [])], []),
-('Module', [('FunctionDef', (1, 0, 1, 16), 'f', ('arguments', [], [], None, [], [], None, []), [('Return', (1, 8, 1, 16), ('Constant', (1, 15, 1, 16), 1, None))], [], None, None, [])], []),
-('Module', [('FunctionDef', (1, 0, 1, 14), 'f', ('arguments', [], [], None, [], [], None, []), [('Return', (1, 8, 1, 14), None)], [], None, None, [])], []),
-('Module', [('Delete', (1, 0, 1, 5), [('Name', (1, 4, 1, 5), 'v', ('Del',))])], []),
-('Module', [('Assign', (1, 0, 1, 5), [('Name', (1, 0, 1, 1), 'v', ('Store',))], ('Constant', (1, 4, 1, 5), 1, None), None)], []),
-('Module', [('Assign', (1, 0, 1, 7), [('Tuple', (1, 0, 1, 3), [('Name', (1, 0, 1, 1), 'a', ('Store',)), ('Name', (1, 2, 1, 3), 'b', ('Store',))], ('Store',))], ('Name', (1, 6, 1, 7), 'c', ('Load',)), None)], []),
-('Module', [('Assign', (1, 0, 1, 9), [('Tuple', (1, 0, 1, 5), [('Name', (1, 1, 1, 2), 'a', ('Store',)), ('Name', (1, 3, 1, 4), 'b', ('Store',))], ('Store',))], ('Name', (1, 8, 1, 9), 'c', ('Load',)), None)], []),
-('Module', [('Assign', (1, 0, 1, 9), [('List', (1, 0, 1, 5), [('Name', (1, 1, 1, 2), 'a', ('Store',)), ('Name', (1, 3, 1, 4), 'b', ('Store',))], ('Store',))], ('Name', (1, 8, 1, 9), 'c', ('Load',)), None)], []),
-('Module', [('Assign', (1, 0, 1, 8), [('Subscript', (1, 0, 1, 4), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Name', (1, 2, 1, 3), 'b', ('Load',)), ('Store',))], ('Name', (1, 7, 1, 8), 'c', ('Load',)), None)], []),
-('Module', [('AnnAssign', (1, 0, 1, 13), ('Name', (1, 0, 1, 1), 'x', ('Store',)), ('Subscript', (1, 3, 1, 13), ('Name', (1, 3, 1, 8), 'tuple', ('Load',)), ('Tuple', (1, 9, 1, 12), [('Starred', (1, 9, 1, 12), ('Name', (1, 10, 1, 12), 'Ts', ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, 1)], []),
-('Module', [('AnnAssign', (1, 0, 1, 18), ('Name', (1, 0, 1, 1), 'x', ('Store',)), ('Subscript', (1, 3, 1, 18), ('Name', (1, 3, 1, 8), 'tuple', ('Load',)), ('Tuple', (1, 9, 1, 17), [('Name', (1, 9, 1, 12), 'int', ('Load',)), ('Starred', (1, 14, 1, 17), ('Name', (1, 15, 1, 17), 'Ts', ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, 1)], []),
-('Module', [('AnnAssign', (1, 0, 1, 31), ('Name', (1, 0, 1, 1), 'x', ('Store',)), ('Subscript', (1, 3, 1, 31), ('Name', (1, 3, 1, 8), 'tuple', ('Load',)), ('Tuple', (1, 9, 1, 30), [('Name', (1, 9, 1, 12), 'int', ('Load',)), ('Starred', (1, 14, 1, 30), ('Subscript', (1, 15, 1, 30), ('Name', (1, 15, 1, 20), 'tuple', ('Load',)), ('Tuple', (1, 21, 1, 29), [('Name', (1, 21, 1, 24), 'str', ('Load',)), ('Constant', (1, 26, 1, 29), Ellipsis, None)], ('Load',)), ('Load',)), ('Load',))], ('Load',)), ('Load',)), None, 1)], []),
-('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('Add',), ('Constant', (1, 5, 1, 6), 1, None))], []),
-('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('Sub',), ('Constant', (1, 5, 1, 6), 1, None))], []),
-('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('Mult',), ('Constant', (1, 5, 1, 6), 1, None))], []),
-('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('MatMult',), ('Constant', (1, 5, 1, 6), 1, None))], []),
-('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('Div',), ('Constant', (1, 5, 1, 6), 1, None))], []),
-('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('Mod',), ('Constant', (1, 5, 1, 6), 1, None))], []),
-('Module', [('AugAssign', (1, 0, 1, 7), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('Pow',), ('Constant', (1, 6, 1, 7), 1, None))], []),
-('Module', [('AugAssign', (1, 0, 1, 7), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('LShift',), ('Constant', (1, 6, 1, 7), 1, None))], []),
-('Module', [('AugAssign', (1, 0, 1, 7), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('RShift',), ('Constant', (1, 6, 1, 7), 1, None))], []),
-('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('BitOr',), ('Constant', (1, 5, 1, 6), 1, None))], []),
-('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('BitXor',), ('Constant', (1, 5, 1, 6), 1, None))], []),
-('Module', [('AugAssign', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('BitAnd',), ('Constant', (1, 5, 1, 6), 1, None))], []),
-('Module', [('AugAssign', (1, 0, 1, 7), ('Name', (1, 0, 1, 1), 'v', ('Store',)), ('FloorDiv',), ('Constant', (1, 6, 1, 7), 1, None))], []),
-('Module', [('For', (1, 0, 1, 15), ('Name', (1, 4, 1, 5), 'v', ('Store',)), ('Name', (1, 9, 1, 10), 'v', ('Load',)), [('Pass', (1, 11, 1, 15))], [], None)], []),
-('Module', [('For', (1, 0, 4, 6), ('Name', (1, 4, 1, 5), 'v', ('Store',)), ('Name', (1, 9, 1, 10), 'v', ('Load',)), [('Pass', (2, 2, 2, 6))], [('Pass', (4, 2, 4, 6))], None)], []),
-('Module', [('While', (1, 0, 1, 12), ('Name', (1, 6, 1, 7), 'v', ('Load',)), [('Pass', (1, 8, 1, 12))], [])], []),
-('Module', [('While', (1, 0, 4, 6), ('Name', (1, 6, 1, 7), 'v', ('Load',)), [('Pass', (2, 2, 2, 6))], [('Pass', (4, 2, 4, 6))])], []),
-('Module', [('If', (1, 0, 1, 9), ('Name', (1, 3, 1, 4), 'v', ('Load',)), [('Pass', (1, 5, 1, 9))], [])], []),
-('Module', [('If', (1, 0, 4, 6), ('Name', (1, 3, 1, 4), 'a', ('Load',)), [('Pass', (2, 2, 2, 6))], [('If', (3, 0, 4, 6), ('Name', (3, 5, 3, 6), 'b', ('Load',)), [('Pass', (4, 2, 4, 6))], [])])], []),
-('Module', [('If', (1, 0, 4, 6), ('Name', (1, 3, 1, 4), 'a', ('Load',)), [('Pass', (2, 2, 2, 6))], [('Pass', (4, 2, 4, 6))])], []),
-('Module', [('If', (1, 0, 6, 6), ('Name', (1, 3, 1, 4), 'a', ('Load',)), [('Pass', (2, 2, 2, 6))], [('If', (3, 0, 6, 6), ('Name', (3, 5, 3, 6), 'b', ('Load',)), [('Pass', (4, 2, 4, 6))], [('Pass', (6, 2, 6, 6))])])], []),
-('Module', [('If', (1, 0, 10, 6), ('Name', (1, 3, 1, 4), 'a', ('Load',)), [('Pass', (2, 2, 2, 6))], [('If', (3, 0, 10, 6), ('Name', (3, 5, 3, 6), 'b', ('Load',)), [('Pass', (4, 2, 4, 6))], [('If', (5, 0, 10, 6), ('Name', (5, 5, 5, 6), 'b', ('Load',)), [('Pass', (6, 2, 6, 6))], [('If', (7, 0, 10, 6), ('Name', (7, 5, 7, 6), 'b', ('Load',)), [('Pass', (8, 2, 8, 6))], [('Pass', (10, 2, 10, 6))])])])])], []),
-('Module', [('With', (1, 0, 1, 12), [('withitem', ('Name', (1, 5, 1, 6), 'x', ('Load',)), None)], [('Pass', (1, 8, 1, 12))], None)], []),
-('Module', [('With', (1, 0, 1, 15), [('withitem', ('Name', (1, 5, 1, 6), 'x', ('Load',)), None), ('withitem', ('Name', (1, 8, 1, 9), 'y', ('Load',)), None)], [('Pass', (1, 11, 1, 15))], None)], []),
-('Module', [('With', (1, 0, 1, 17), [('withitem', ('Name', (1, 5, 1, 6), 'x', ('Load',)), ('Name', (1, 10, 1, 11), 'y', ('Store',)))], [('Pass', (1, 13, 1, 17))], None)], []),
-('Module', [('With', (1, 0, 1, 25), [('withitem', ('Name', (1, 5, 1, 6), 'x', ('Load',)), ('Name', (1, 10, 1, 11), 'y', ('Store',))), ('withitem', ('Name', (1, 13, 1, 14), 'z', ('Load',)), ('Name', (1, 18, 1, 19), 'q', ('Store',)))], [('Pass', (1, 21, 1, 25))], None)], []),
-('Module', [('With', (1, 0, 1, 19), [('withitem', ('Name', (1, 6, 1, 7), 'x', ('Load',)), ('Name', (1, 11, 1, 12), 'y', ('Store',)))], [('Pass', (1, 15, 1, 19))], None)], []),
-('Module', [('With', (1, 0, 1, 17), [('withitem', ('Name', (1, 6, 1, 7), 'x', ('Load',)), None), ('withitem', ('Name', (1, 9, 1, 10), 'y', ('Load',)), None)], [('Pass', (1, 13, 1, 17))], None)], []),
-('Module', [('Raise', (1, 0, 1, 5), None, None)], []),
-('Module', [('Raise', (1, 0, 1, 25), ('Call', (1, 6, 1, 25), ('Name', (1, 6, 1, 15), 'Exception', ('Load',)), [('Constant', (1, 16, 1, 24), 'string', None)], []), None)], []),
-('Module', [('Raise', (1, 0, 1, 15), ('Name', (1, 6, 1, 15), 'Exception', ('Load',)), None)], []),
-('Module', [('Raise', (1, 0, 1, 35), ('Call', (1, 6, 1, 25), ('Name', (1, 6, 1, 15), 'Exception', ('Load',)), [('Constant', (1, 16, 1, 24), 'string', None)], []), ('Constant', (1, 31, 1, 35), None, None))], []),
-('Module', [('Try', (1, 0, 4, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 7, 3, 16), 'Exception', ('Load',)), None, [('Pass', (4, 2, 4, 6))])], [], [])], []),
-('Module', [('Try', (1, 0, 4, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 7, 3, 16), 'Exception', ('Load',)), 'exc', [('Pass', (4, 2, 4, 6))])], [], [])], []),
-('Module', [('Try', (1, 0, 4, 6), [('Pass', (2, 2, 2, 6))], [], [], [('Pass', (4, 2, 4, 6))])], []),
-('Module', [('TryStar', (1, 0, 4, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 8, 3, 17), 'Exception', ('Load',)), None, [('Pass', (4, 2, 4, 6))])], [], [])], []),
-('Module', [('TryStar', (1, 0, 4, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 8, 3, 17), 'Exception', ('Load',)), 'exc', [('Pass', (4, 2, 4, 6))])], [], [])], []),
-('Module', [('Try', (1, 0, 7, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 7, 3, 16), 'Exception', ('Load',)), None, [('Pass', (4, 2, 4, 6))])], [('Pass', (5, 7, 5, 11))], [('Pass', (7, 2, 7, 6))])], []),
-('Module', [('Try', (1, 0, 7, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 7, 3, 16), 'Exception', ('Load',)), 'exc', [('Pass', (4, 2, 4, 6))])], [('Pass', (5, 7, 5, 11))], [('Pass', (7, 2, 7, 6))])], []),
-('Module', [('TryStar', (1, 0, 7, 6), [('Pass', (2, 2, 2, 6))], [('ExceptHandler', (3, 0, 4, 6), ('Name', (3, 8, 3, 17), 'Exception', ('Load',)), 'exc', [('Pass', (4, 2, 4, 6))])], [('Pass', (5, 7, 5, 11))], [('Pass', (7, 2, 7, 6))])], []),
-('Module', [('Assert', (1, 0, 1, 8), ('Name', (1, 7, 1, 8), 'v', ('Load',)), None)], []),
-('Module', [('Assert', (1, 0, 1, 19), ('Name', (1, 7, 1, 8), 'v', ('Load',)), ('Constant', (1, 10, 1, 19), 'message', None))], []),
-('Module', [('Import', (1, 0, 1, 10), [('alias', (1, 7, 1, 10), 'sys', None)])], []),
-('Module', [('Import', (1, 0, 1, 17), [('alias', (1, 7, 1, 17), 'foo', 'bar')])], []),
-('Module', [('ImportFrom', (1, 0, 1, 22), 'sys', [('alias', (1, 16, 1, 22), 'x', 'y')], 0)], []),
-('Module', [('ImportFrom', (1, 0, 1, 17), 'sys', [('alias', (1, 16, 1, 17), 'v', None)], 0)], []),
-('Module', [('Global', (1, 0, 1, 8), ['v'])], []),
-('Module', [('Expr', (1, 0, 1, 1), ('Constant', (1, 0, 1, 1), 1, None))], []),
-('Module', [('Pass', (1, 0, 1, 4))], []),
-('Module', [('For', (1, 0, 1, 16), ('Name', (1, 4, 1, 5), 'v', ('Store',)), ('Name', (1, 9, 1, 10), 'v', ('Load',)), [('Break', (1, 11, 1, 16))], [], None)], []),
-('Module', [('For', (1, 0, 1, 19), ('Name', (1, 4, 1, 5), 'v', ('Store',)), ('Name', (1, 9, 1, 10), 'v', ('Load',)), [('Continue', (1, 11, 1, 19))], [], None)], []),
-('Module', [('For', (1, 0, 1, 18), ('Tuple', (1, 4, 1, 7), [('Name', (1, 4, 1, 5), 'a', ('Store',)), ('Name', (1, 6, 1, 7), 'b', ('Store',))], ('Store',)), ('Name', (1, 11, 1, 12), 'c', ('Load',)), [('Pass', (1, 14, 1, 18))], [], None)], []),
-('Module', [('For', (1, 0, 1, 20), ('Tuple', (1, 4, 1, 9), [('Name', (1, 5, 1, 6), 'a', ('Store',)), ('Name', (1, 7, 1, 8), 'b', ('Store',))], ('Store',)), ('Name', (1, 13, 1, 14), 'c', ('Load',)), [('Pass', (1, 16, 1, 20))], [], None)], []),
-('Module', [('For', (1, 0, 1, 20), ('List', (1, 4, 1, 9), [('Name', (1, 5, 1, 6), 'a', ('Store',)), ('Name', (1, 7, 1, 8), 'b', ('Store',))], ('Store',)), ('Name', (1, 13, 1, 14), 'c', ('Load',)), [('Pass', (1, 16, 1, 20))], [], None)], []),
-('Module', [('Expr', (1, 0, 11, 5), ('GeneratorExp', (1, 0, 11, 5), ('Tuple', (2, 4, 6, 5), [('Name', (3, 4, 3, 6), 'Aa', ('Load',)), ('Name', (5, 7, 5, 9), 'Bb', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (8, 4, 10, 6), [('Name', (8, 4, 8, 6), 'Aa', ('Store',)), ('Name', (10, 4, 10, 6), 'Bb', ('Store',))], ('Store',)), ('Name', (10, 10, 10, 12), 'Cc', ('Load',)), [], 0)]))], []),
-('Module', [('Expr', (1, 0, 1, 34), ('DictComp', (1, 0, 1, 34), ('Name', (1, 1, 1, 2), 'a', ('Load',)), ('Name', (1, 5, 1, 6), 'b', ('Load',)), [('comprehension', ('Name', (1, 11, 1, 12), 'w', ('Store',)), ('Name', (1, 16, 1, 17), 'x', ('Load',)), [], 0), ('comprehension', ('Name', (1, 22, 1, 23), 'm', ('Store',)), ('Name', (1, 27, 1, 28), 'p', ('Load',)), [('Name', (1, 32, 1, 33), 'g', ('Load',))], 0)]))], []),
-('Module', [('Expr', (1, 0, 1, 20), ('DictComp', (1, 0, 1, 20), ('Name', (1, 1, 1, 2), 'a', ('Load',)), ('Name', (1, 5, 1, 6), 'b', ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 14), [('Name', (1, 11, 1, 12), 'v', ('Store',)), ('Name', (1, 13, 1, 14), 'w', ('Store',))], ('Store',)), ('Name', (1, 18, 1, 19), 'x', ('Load',)), [], 0)]))], []),
-('Module', [('Expr', (1, 0, 1, 19), ('SetComp', (1, 0, 1, 19), ('Name', (1, 1, 1, 2), 'r', ('Load',)), [('comprehension', ('Name', (1, 7, 1, 8), 'l', ('Store',)), ('Name', (1, 12, 1, 13), 'x', ('Load',)), [('Name', (1, 17, 1, 18), 'g', ('Load',))], 0)]))], []),
-('Module', [('Expr', (1, 0, 1, 16), ('SetComp', (1, 0, 1, 16), ('Name', (1, 1, 1, 2), 'r', ('Load',)), [('comprehension', ('Tuple', (1, 7, 1, 10), [('Name', (1, 7, 1, 8), 'l', ('Store',)), ('Name', (1, 9, 1, 10), 'm', ('Store',))], ('Store',)), ('Name', (1, 14, 1, 15), 'x', ('Load',)), [], 0)]))], []),
-('Module', [('AsyncFunctionDef', (1, 0, 3, 18), 'f', ('arguments', [], [], None, [], [], None, []), [('Expr', (2, 1, 2, 17), ('Constant', (2, 1, 2, 17), 'async function', None)), ('Expr', (3, 1, 3, 18), ('Await', (3, 1, 3, 18), ('Call', (3, 7, 3, 18), ('Name', (3, 7, 3, 16), 'something', ('Load',)), [], [])))], [], None, None, [])], []),
-('Module', [('AsyncFunctionDef', (1, 0, 3, 8), 'f', ('arguments', [], [], None, [], [], None, []), [('AsyncFor', (2, 1, 3, 8), ('Name', (2, 11, 2, 12), 'e', ('Store',)), ('Name', (2, 16, 2, 17), 'i', ('Load',)), [('Expr', (2, 19, 2, 20), ('Constant', (2, 19, 2, 20), 1, None))], [('Expr', (3, 7, 3, 8), ('Constant', (3, 7, 3, 8), 2, None))], None)], [], None, None, [])], []),
-('Module', [('AsyncFunctionDef', (1, 0, 2, 21), 'f', ('arguments', [], [], None, [], [], None, []), [('AsyncWith', (2, 1, 2, 21), [('withitem', ('Name', (2, 12, 2, 13), 'a', ('Load',)), ('Name', (2, 17, 2, 18), 'b', ('Store',)))], [('Expr', (2, 20, 2, 21), ('Constant', (2, 20, 2, 21), 1, None))], None)], [], None, None, [])], []),
-('Module', [('Expr', (1, 0, 1, 14), ('Dict', (1, 0, 1, 14), [None, ('Constant', (1, 10, 1, 11), 2, None)], [('Dict', (1, 3, 1, 8), [('Constant', (1, 4, 1, 5), 1, None)], [('Constant', (1, 6, 1, 7), 2, None)]), ('Constant', (1, 12, 1, 13), 3, None)]))], []),
-('Module', [('Expr', (1, 0, 1, 12), ('Set', (1, 0, 1, 12), [('Starred', (1, 1, 1, 8), ('Set', (1, 2, 1, 8), [('Constant', (1, 3, 1, 4), 1, None), ('Constant', (1, 6, 1, 7), 2, None)]), ('Load',)), ('Constant', (1, 10, 1, 11), 3, None)]))], []),
-('Module', [('FunctionDef', (1, 0, 1, 16), 'f', ('arguments', [], [], None, [], [], None, []), [('Expr', (1, 9, 1, 16), ('Yield', (1, 9, 1, 16), ('Constant', (1, 15, 1, 16), 1, None)))], [], None, None, [])], []),
-('Module', [('FunctionDef', (1, 0, 1, 22), 'f', ('arguments', [], [], None, [], [], None, []), [('Expr', (1, 9, 1, 22), ('YieldFrom', (1, 9, 1, 22), ('List', (1, 20, 1, 22), [], ('Load',))))], [], None, None, [])], []),
-('Module', [('AsyncFunctionDef', (1, 0, 2, 21), 'f', ('arguments', [], [], None, [], [], None, []), [('Expr', (2, 1, 2, 21), ('ListComp', (2, 1, 2, 21), ('Name', (2, 2, 2, 3), 'i', ('Load',)), [('comprehension', ('Name', (2, 14, 2, 15), 'b', ('Store',)), ('Name', (2, 19, 2, 20), 'c', ('Load',)), [], 1)]))], [], None, None, [])], []),
-('Module', [('FunctionDef', (4, 0, 4, 13), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (4, 9, 4, 13))], [('Name', (1, 1, 1, 6), 'deco1', ('Load',)), ('Call', (2, 1, 2, 8), ('Name', (2, 1, 2, 6), 'deco2', ('Load',)), [], []), ('Call', (3, 1, 3, 9), ('Name', (3, 1, 3, 6), 'deco3', ('Load',)), [('Constant', (3, 7, 3, 8), 1, None)], [])], None, None, [])], []),
-('Module', [('AsyncFunctionDef', (4, 0, 4, 19), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (4, 15, 4, 19))], [('Name', (1, 1, 1, 6), 'deco1', ('Load',)), ('Call', (2, 1, 2, 8), ('Name', (2, 1, 2, 6), 'deco2', ('Load',)), [], []), ('Call', (3, 1, 3, 9), ('Name', (3, 1, 3, 6), 'deco3', ('Load',)), [('Constant', (3, 7, 3, 8), 1, None)], [])], None, None, [])], []),
-('Module', [('ClassDef', (4, 0, 4, 13), 'C', [], [], [('Pass', (4, 9, 4, 13))], [('Name', (1, 1, 1, 6), 'deco1', ('Load',)), ('Call', (2, 1, 2, 8), ('Name', (2, 1, 2, 6), 'deco2', ('Load',)), [], []), ('Call', (3, 1, 3, 9), ('Name', (3, 1, 3, 6), 'deco3', ('Load',)), [('Constant', (3, 7, 3, 8), 1, None)], [])], [])], []),
-('Module', [('FunctionDef', (2, 0, 2, 13), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (2, 9, 2, 13))], [('Call', (1, 1, 1, 19), ('Name', (1, 1, 1, 5), 'deco', ('Load',)), [('GeneratorExp', (1, 5, 1, 19), ('Name', (1, 6, 1, 7), 'a', ('Load',)), [('comprehension', ('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 17, 1, 18), 'b', ('Load',)), [], 0)])], [])], None, None, [])], []),
-('Module', [('FunctionDef', (2, 0, 2, 13), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (2, 9, 2, 13))], [('Attribute', (1, 1, 1, 6), ('Attribute', (1, 1, 1, 4), ('Name', (1, 1, 1, 2), 'a', ('Load',)), 'b', ('Load',)), 'c', ('Load',))], None, None, [])], []),
-('Module', [('Expr', (1, 0, 1, 8), ('NamedExpr', (1, 1, 1, 7), ('Name', (1, 1, 1, 2), 'a', ('Store',)), ('Constant', (1, 6, 1, 7), 1, None)))], []),
-('Module', [('If', (1, 0, 1, 19), ('NamedExpr', (1, 3, 1, 13), ('Name', (1, 3, 1, 4), 'a', ('Store',)), ('Call', (1, 8, 1, 13), ('Name', (1, 8, 1, 11), 'foo', ('Load',)), [], [])), [('Pass', (1, 15, 1, 19))], [])], []),
-('Module', [('While', (1, 0, 1, 22), ('NamedExpr', (1, 6, 1, 16), ('Name', (1, 6, 1, 7), 'a', ('Store',)), ('Call', (1, 11, 1, 16), ('Name', (1, 11, 1, 14), 'foo', ('Load',)), [], [])), [('Pass', (1, 18, 1, 22))], [])], []),
-('Module', [('FunctionDef', (1, 0, 1, 18), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [], None, [], [], None, []), [('Pass', (1, 14, 1, 18))], [], None, None, [])], []),
-('Module', [('FunctionDef', (1, 0, 1, 26), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 12, 1, 13), 'c', None, None), ('arg', (1, 15, 1, 16), 'd', None, None), ('arg', (1, 18, 1, 19), 'e', None, None)], None, [], [], None, []), [('Pass', (1, 22, 1, 26))], [], None, None, [])], []),
-('Module', [('FunctionDef', (1, 0, 1, 29), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 12, 1, 13), 'c', None, None)], None, [('arg', (1, 18, 1, 19), 'd', None, None), ('arg', (1, 21, 1, 22), 'e', None, None)], [None, None], None, []), [('Pass', (1, 25, 1, 29))], [], None, None, [])], []),
-('Module', [('FunctionDef', (1, 0, 1, 39), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 12, 1, 13), 'c', None, None)], None, [('arg', (1, 18, 1, 19), 'd', None, None), ('arg', (1, 21, 1, 22), 'e', None, None)], [None, None], ('arg', (1, 26, 1, 32), 'kwargs', None, None), []), [('Pass', (1, 35, 1, 39))], [], None, None, [])], []),
-('Module', [('FunctionDef', (1, 0, 1, 20), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [], None, [], [], None, [('Constant', (1, 8, 1, 9), 1, None)]), [('Pass', (1, 16, 1, 20))], [], None, None, [])], []),
-('Module', [('FunctionDef', (1, 0, 1, 29), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 14, 1, 15), 'b', None, None), ('arg', (1, 19, 1, 20), 'c', None, None)], None, [], [], None, [('Constant', (1, 8, 1, 9), 1, None), ('Constant', (1, 16, 1, 17), 2, None), ('Constant', (1, 21, 1, 22), 4, None)]), [('Pass', (1, 25, 1, 29))], [], None, None, [])], []),
-('Module', [('FunctionDef', (1, 0, 1, 32), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 14, 1, 15), 'b', None, None)], None, [('arg', (1, 22, 1, 23), 'c', None, None)], [('Constant', (1, 24, 1, 25), 4, None)], None, [('Constant', (1, 8, 1, 9), 1, None), ('Constant', (1, 16, 1, 17), 2, None)]), [('Pass', (1, 28, 1, 32))], [], None, None, [])], []),
-('Module', [('FunctionDef', (1, 0, 1, 30), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 14, 1, 15), 'b', None, None)], None, [('arg', (1, 22, 1, 23), 'c', None, None)], [None], None, [('Constant', (1, 8, 1, 9), 1, None), ('Constant', (1, 16, 1, 17), 2, None)]), [('Pass', (1, 26, 1, 30))], [], None, None, [])], []),
-('Module', [('FunctionDef', (1, 0, 1, 42), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 14, 1, 15), 'b', None, None)], None, [('arg', (1, 22, 1, 23), 'c', None, None)], [('Constant', (1, 24, 1, 25), 4, None)], ('arg', (1, 29, 1, 35), 'kwargs', None, None), [('Constant', (1, 8, 1, 9), 1, None), ('Constant', (1, 16, 1, 17), 2, None)]), [('Pass', (1, 38, 1, 42))], [], None, None, [])], []),
-('Module', [('FunctionDef', (1, 0, 1, 40), 'f', ('arguments', [('arg', (1, 6, 1, 7), 'a', None, None)], [('arg', (1, 14, 1, 15), 'b', None, None)], None, [('arg', (1, 22, 1, 23), 'c', None, None)], [None], ('arg', (1, 27, 1, 33), 'kwargs', None, None), [('Constant', (1, 8, 1, 9), 1, None), ('Constant', (1, 16, 1, 17), 2, None)]), [('Pass', (1, 36, 1, 40))], [], None, None, [])], []),
-('Module', [('TypeAlias', (1, 0, 1, 12), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [], ('Name', (1, 9, 1, 12), 'int', ('Load',)))], []),
-('Module', [('TypeAlias', (1, 0, 1, 15), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [('TypeVar', (1, 7, 1, 8), 'T', None, None)], ('Name', (1, 12, 1, 15), 'int', ('Load',)))], []),
-('Module', [('TypeAlias', (1, 0, 1, 32), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [('TypeVar', (1, 7, 1, 8), 'T', None, None), ('TypeVarTuple', (1, 10, 1, 13), 'Ts', None), ('ParamSpec', (1, 15, 1, 18), 'P', None)], ('Tuple', (1, 22, 1, 32), [('Name', (1, 23, 1, 24), 'T', ('Load',)), ('Name', (1, 26, 1, 28), 'Ts', ('Load',)), ('Name', (1, 30, 1, 31), 'P', ('Load',))], ('Load',)))], []),
-('Module', [('TypeAlias', (1, 0, 1, 37), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [('TypeVar', (1, 7, 1, 13), 'T', ('Name', (1, 10, 1, 13), 'int', ('Load',)), None), ('TypeVarTuple', (1, 15, 1, 18), 'Ts', None), ('ParamSpec', (1, 20, 1, 23), 'P', None)], ('Tuple', (1, 27, 1, 37), [('Name', (1, 28, 1, 29), 'T', ('Load',)), ('Name', (1, 31, 1, 33), 'Ts', ('Load',)), ('Name', (1, 35, 1, 36), 'P', ('Load',))], ('Load',)))], []),
-('Module', [('TypeAlias', (1, 0, 1, 44), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [('TypeVar', (1, 7, 1, 20), 'T', ('Tuple', (1, 10, 1, 20), [('Name', (1, 11, 1, 14), 'int', ('Load',)), ('Name', (1, 16, 1, 19), 'str', ('Load',))], ('Load',)), None), ('TypeVarTuple', (1, 22, 1, 25), 'Ts', None), ('ParamSpec', (1, 27, 1, 30), 'P', None)], ('Tuple', (1, 34, 1, 44), [('Name', (1, 35, 1, 36), 'T', ('Load',)), ('Name', (1, 38, 1, 40), 'Ts', ('Load',)), ('Name', (1, 42, 1, 43), 'P', ('Load',))], ('Load',)))], []),
-('Module', [('TypeAlias', (1, 0, 1, 48), ('Name', (1, 5, 1, 6), 'X', ('Store',)), [('TypeVar', (1, 7, 1, 17), 'T', ('Name', (1, 10, 1, 13), 'int', ('Load',)), ('Constant', (1, 16, 1, 17), 1, None)), ('TypeVarTuple', (1, 19, 1, 26), 'Ts', ('Constant', (1, 25, 1, 26), 2, None)), ('ParamSpec', (1, 28, 1, 34), 'P', ('Constant', (1, 33, 1, 34), 3, None))], ('Tuple', (1, 38, 1, 48), [('Name', (1, 39, 1, 40), 'T', ('Load',)), ('Name', (1, 42, 1, 44), 'Ts', ('Load',)), ('Name', (1, 46, 1, 47), 'P', ('Load',))], ('Load',)))], []),
-('Module', [('ClassDef', (1, 0, 1, 16), 'X', [], [], [('Pass', (1, 12, 1, 16))], [], [('TypeVar', (1, 8, 1, 9), 'T', None, None)])], []),
-('Module', [('ClassDef', (1, 0, 1, 26), 'X', [], [], [('Pass', (1, 22, 1, 26))], [], [('TypeVar', (1, 8, 1, 9), 'T', None, None), ('TypeVarTuple', (1, 11, 1, 14), 'Ts', None), ('ParamSpec', (1, 16, 1, 19), 'P', None)])], []),
-('Module', [('ClassDef', (1, 0, 1, 31), 'X', [], [], [('Pass', (1, 27, 1, 31))], [], [('TypeVar', (1, 8, 1, 14), 'T', ('Name', (1, 11, 1, 14), 'int', ('Load',)), None), ('TypeVarTuple', (1, 16, 1, 19), 'Ts', None), ('ParamSpec', (1, 21, 1, 24), 'P', None)])], []),
-('Module', [('ClassDef', (1, 0, 1, 38), 'X', [], [], [('Pass', (1, 34, 1, 38))], [], [('TypeVar', (1, 8, 1, 21), 'T', ('Tuple', (1, 11, 1, 21), [('Name', (1, 12, 1, 15), 'int', ('Load',)), ('Name', (1, 17, 1, 20), 'str', ('Load',))], ('Load',)), None), ('TypeVarTuple', (1, 23, 1, 26), 'Ts', None), ('ParamSpec', (1, 28, 1, 31), 'P', None)])], []),
-('Module', [('ClassDef', (1, 0, 1, 43), 'X', [], [], [('Pass', (1, 39, 1, 43))], [], [('TypeVar', (1, 8, 1, 18), 'T', ('Name', (1, 11, 1, 14), 'int', ('Load',)), ('Constant', (1, 17, 1, 18), 1, None)), ('TypeVarTuple', (1, 20, 1, 27), 'Ts', ('Constant', (1, 26, 1, 27), 2, None)), ('ParamSpec', (1, 29, 1, 36), 'P', ('Constant', (1, 35, 1, 36), 3, None))])], []),
-('Module', [('FunctionDef', (1, 0, 1, 16), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 12, 1, 16))], [], None, None, [('TypeVar', (1, 6, 1, 7), 'T', None, None)])], []),
-('Module', [('FunctionDef', (1, 0, 1, 26), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 22, 1, 26))], [], None, None, [('TypeVar', (1, 6, 1, 7), 'T', None, None), ('TypeVarTuple', (1, 9, 1, 12), 'Ts', None), ('ParamSpec', (1, 14, 1, 17), 'P', None)])], []),
-('Module', [('FunctionDef', (1, 0, 1, 31), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 27, 1, 31))], [], None, None, [('TypeVar', (1, 6, 1, 12), 'T', ('Name', (1, 9, 1, 12), 'int', ('Load',)), None), ('TypeVarTuple', (1, 14, 1, 17), 'Ts', None), ('ParamSpec', (1, 19, 1, 22), 'P', None)])], []),
-('Module', [('FunctionDef', (1, 0, 1, 38), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 34, 1, 38))], [], None, None, [('TypeVar', (1, 6, 1, 19), 'T', ('Tuple', (1, 9, 1, 19), [('Name', (1, 10, 1, 13), 'int', ('Load',)), ('Name', (1, 15, 1, 18), 'str', ('Load',))], ('Load',)), None), ('TypeVarTuple', (1, 21, 1, 24), 'Ts', None), ('ParamSpec', (1, 26, 1, 29), 'P', None)])], []),
-('Module', [('FunctionDef', (1, 0, 1, 43), 'f', ('arguments', [], [], None, [], [], None, []), [('Pass', (1, 39, 1, 43))], [], None, None, [('TypeVar', (1, 6, 1, 16), 'T', ('Name', (1, 9, 1, 12), 'int', ('Load',)), ('Constant', (1, 15, 1, 16), 1, None)), ('TypeVarTuple', (1, 18, 1, 25), 'Ts', ('Constant', (1, 24, 1, 25), 2, None)), ('ParamSpec', (1, 27, 1, 34), 'P', ('Constant', (1, 33, 1, 34), 3, None))])], []),
-('Module', [('Match', (1, 0, 3, 6), ('Name', (1, 6, 1, 7), 'x', ('Load',)), [('match_case', ('MatchValue', (2, 6, 2, 7), ('Constant', (2, 6, 2, 7), 1, None)), None, [('Pass', (3, 2, 3, 6))])])], []),
-('Module', [('Match', (1, 0, 5, 6), ('Name', (1, 6, 1, 7), 'x', ('Load',)), [('match_case', ('MatchValue', (2, 6, 2, 7), ('Constant', (2, 6, 2, 7), 1, None)), None, [('Pass', (3, 2, 3, 6))]), ('match_case', ('MatchAs', (4, 6, 4, 7), None, None), None, [('Pass', (5, 2, 5, 6))])])], []),
-]
-single_results = [
-('Interactive', [('Expr', (1, 0, 1, 3), ('BinOp', (1, 0, 1, 3), ('Constant', (1, 0, 1, 1), 1, None), ('Add',), ('Constant', (1, 2, 1, 3), 2, None)))]),
-]
-eval_results = [
-('Expression', ('Constant', (1, 0, 1, 4), None, None)),
-('Expression', ('Constant', (1, 0, 1, 4), True, None)),
-('Expression', ('Constant', (1, 0, 1, 5), False, None)),
-('Expression', ('BoolOp', (1, 0, 1, 7), ('And',), [('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Name', (1, 6, 1, 7), 'b', ('Load',))])),
-('Expression', ('BoolOp', (1, 0, 1, 6), ('Or',), [('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Name', (1, 5, 1, 6), 'b', ('Load',))])),
-('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Add',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))),
-('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Sub',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))),
-('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Mult',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))),
-('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Div',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))),
-('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('MatMult',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))),
-('Expression', ('BinOp', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('FloorDiv',), ('Name', (1, 5, 1, 6), 'b', ('Load',)))),
-('Expression', ('BinOp', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Pow',), ('Name', (1, 5, 1, 6), 'b', ('Load',)))),
-('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Mod',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))),
-('Expression', ('BinOp', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('RShift',), ('Name', (1, 5, 1, 6), 'b', ('Load',)))),
-('Expression', ('BinOp', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('LShift',), ('Name', (1, 5, 1, 6), 'b', ('Load',)))),
-('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('BitXor',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))),
-('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('BitOr',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))),
-('Expression', ('BinOp', (1, 0, 1, 5), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('BitAnd',), ('Name', (1, 4, 1, 5), 'b', ('Load',)))),
-('Expression', ('UnaryOp', (1, 0, 1, 5), ('Not',), ('Name', (1, 4, 1, 5), 'v', ('Load',)))),
-('Expression', ('UnaryOp', (1, 0, 1, 2), ('UAdd',), ('Name', (1, 1, 1, 2), 'v', ('Load',)))),
-('Expression', ('UnaryOp', (1, 0, 1, 2), ('USub',), ('Name', (1, 1, 1, 2), 'v', ('Load',)))),
-('Expression', ('UnaryOp', (1, 0, 1, 2), ('Invert',), ('Name', (1, 1, 1, 2), 'v', ('Load',)))),
-('Expression', ('Lambda', (1, 0, 1, 11), ('arguments', [], [], None, [], [], None, []), ('Constant', (1, 7, 1, 11), None, None))),
-('Expression', ('Dict', (1, 0, 1, 7), [('Constant', (1, 2, 1, 3), 1, None)], [('Constant', (1, 4, 1, 5), 2, None)])),
-('Expression', ('Dict', (1, 0, 1, 2), [], [])),
-('Expression', ('Set', (1, 0, 1, 7), [('Constant', (1, 1, 1, 5), None, None)])),
-('Expression', ('Dict', (1, 0, 5, 6), [('Constant', (2, 6, 2, 7), 1, None)], [('Constant', (4, 10, 4, 11), 2, None)])),
-('Expression', ('List', (1, 0, 5, 6), [('Constant', (2, 6, 2, 7), 1, None), ('Constant', (4, 8, 4, 9), 1, None)], ('Load',))),
-('Expression', ('Tuple', (1, 0, 4, 6), [('Constant', (2, 6, 2, 7), 1, None)], ('Load',))),
-('Expression', ('Set', (1, 0, 5, 6), [('Constant', (2, 6, 2, 7), 1, None), ('Constant', (4, 8, 4, 9), 1, None)])),
-('Expression', ('ListComp', (1, 0, 1, 19), ('Name', (1, 1, 1, 2), 'a', ('Load',)), [('comprehension', ('Name', (1, 7, 1, 8), 'b', ('Store',)), ('Name', (1, 12, 1, 13), 'c', ('Load',)), [('Name', (1, 17, 1, 18), 'd', ('Load',))], 0)])),
-('Expression', ('GeneratorExp', (1, 0, 1, 19), ('Name', (1, 1, 1, 2), 'a', ('Load',)), [('comprehension', ('Name', (1, 7, 1, 8), 'b', ('Store',)), ('Name', (1, 12, 1, 13), 'c', ('Load',)), [('Name', (1, 17, 1, 18), 'd', ('Load',))], 0)])),
-('Expression', ('SetComp', (1, 0, 1, 19), ('Name', (1, 1, 1, 2), 'a', ('Load',)), [('comprehension', ('Name', (1, 7, 1, 8), 'b', ('Store',)), ('Name', (1, 12, 1, 13), 'c', ('Load',)), [('Name', (1, 17, 1, 18), 'd', ('Load',))], 0)])),
-('Expression', ('DictComp', (1, 0, 1, 25), ('Name', (1, 1, 1, 2), 'k', ('Load',)), ('Name', (1, 4, 1, 5), 'v', ('Load',)), [('comprehension', ('Tuple', (1, 10, 1, 14), [('Name', (1, 10, 1, 11), 'k', ('Store',)), ('Name', (1, 13, 1, 14), 'v', ('Store',))], ('Store',)), ('Name', (1, 18, 1, 19), 'c', ('Load',)), [('Name', (1, 23, 1, 24), 'd', ('Load',))], 0)])),
-('Expression', ('ListComp', (1, 0, 1, 20), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 14), [('Name', (1, 11, 1, 12), 'a', ('Store',)), ('Name', (1, 13, 1, 14), 'b', ('Store',))], ('Store',)), ('Name', (1, 18, 1, 19), 'c', ('Load',)), [], 0)])),
-('Expression', ('ListComp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])),
-('Expression', ('ListComp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('List', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])),
-('Expression', ('SetComp', (1, 0, 1, 20), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 14), [('Name', (1, 11, 1, 12), 'a', ('Store',)), ('Name', (1, 13, 1, 14), 'b', ('Store',))], ('Store',)), ('Name', (1, 18, 1, 19), 'c', ('Load',)), [], 0)])),
-('Expression', ('SetComp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])),
-('Expression', ('SetComp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('List', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])),
-('Expression', ('GeneratorExp', (1, 0, 1, 20), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 14), [('Name', (1, 11, 1, 12), 'a', ('Store',)), ('Name', (1, 13, 1, 14), 'b', ('Store',))], ('Store',)), ('Name', (1, 18, 1, 19), 'c', ('Load',)), [], 0)])),
-('Expression', ('GeneratorExp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])),
-('Expression', ('GeneratorExp', (1, 0, 1, 22), ('Tuple', (1, 1, 1, 6), [('Name', (1, 2, 1, 3), 'a', ('Load',)), ('Name', (1, 4, 1, 5), 'b', ('Load',))], ('Load',)), [('comprehension', ('List', (1, 11, 1, 16), [('Name', (1, 12, 1, 13), 'a', ('Store',)), ('Name', (1, 14, 1, 15), 'b', ('Store',))], ('Store',)), ('Name', (1, 20, 1, 21), 'c', ('Load',)), [], 0)])),
-('Expression', ('Compare', (1, 0, 1, 9), ('Constant', (1, 0, 1, 1), 1, None), [('Lt',), ('Lt',)], [('Constant', (1, 4, 1, 5), 2, None), ('Constant', (1, 8, 1, 9), 3, None)])),
-('Expression', ('Compare', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('Eq',)], [('Name', (1, 5, 1, 6), 'b', ('Load',))])),
-('Expression', ('Compare', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('LtE',)], [('Name', (1, 5, 1, 6), 'b', ('Load',))])),
-('Expression', ('Compare', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('GtE',)], [('Name', (1, 5, 1, 6), 'b', ('Load',))])),
-('Expression', ('Compare', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('NotEq',)], [('Name', (1, 5, 1, 6), 'b', ('Load',))])),
-('Expression', ('Compare', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('Is',)], [('Name', (1, 5, 1, 6), 'b', ('Load',))])),
-('Expression', ('Compare', (1, 0, 1, 10), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('IsNot',)], [('Name', (1, 9, 1, 10), 'b', ('Load',))])),
-('Expression', ('Compare', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('In',)], [('Name', (1, 5, 1, 6), 'b', ('Load',))])),
-('Expression', ('Compare', (1, 0, 1, 10), ('Name', (1, 0, 1, 1), 'a', ('Load',)), [('NotIn',)], [('Name', (1, 9, 1, 10), 'b', ('Load',))])),
-('Expression', ('Call', (1, 0, 1, 3), ('Name', (1, 0, 1, 1), 'f', ('Load',)), [], [])),
-('Expression', ('Call', (1, 0, 1, 17), ('Name', (1, 0, 1, 1), 'f', ('Load',)), [('Constant', (1, 2, 1, 3), 1, None), ('Constant', (1, 4, 1, 5), 2, None), ('Starred', (1, 10, 1, 12), ('Name', (1, 11, 1, 12), 'd', ('Load',)), ('Load',))], [('keyword', (1, 6, 1, 9), 'c', ('Constant', (1, 8, 1, 9), 3, None)), ('keyword', (1, 13, 1, 16), None, ('Name', (1, 15, 1, 16), 'e', ('Load',)))])),
-('Expression', ('Call', (1, 0, 1, 10), ('Name', (1, 0, 1, 1), 'f', ('Load',)), [('Starred', (1, 2, 1, 9), ('List', (1, 3, 1, 9), [('Constant', (1, 4, 1, 5), 0, None), ('Constant', (1, 7, 1, 8), 1, None)], ('Load',)), ('Load',))], [])),
-('Expression', ('Call', (1, 0, 1, 15), ('Name', (1, 0, 1, 1), 'f', ('Load',)), [('GeneratorExp', (1, 1, 1, 15), ('Name', (1, 2, 1, 3), 'a', ('Load',)), [('comprehension', ('Name', (1, 8, 1, 9), 'a', ('Store',)), ('Name', (1, 13, 1, 14), 'b', ('Load',)), [], 0)])], [])),
-('Expression', ('Constant', (1, 0, 1, 2), 10, None)),
-('Expression', ('Constant', (1, 0, 1, 2), 1j, None)),
-('Expression', ('Constant', (1, 0, 1, 8), 'string', None)),
-('Expression', ('Attribute', (1, 0, 1, 3), ('Name', (1, 0, 1, 1), 'a', ('Load',)), 'b', ('Load',))),
-('Expression', ('Subscript', (1, 0, 1, 6), ('Name', (1, 0, 1, 1), 'a', ('Load',)), ('Slice', (1, 2, 1, 5), ('Name', (1, 2, 1, 3), 'b', ('Load',)), ('Name', (1, 4, 1, 5), 'c', ('Load',)), None), ('Load',))),
-('Expression', ('Name', (1, 0, 1, 1), 'v', ('Load',))),
-('Expression', ('List', (1, 0, 1, 7), [('Constant', (1, 1, 1, 2), 1, None), ('Constant', (1, 3, 1, 4), 2, None), ('Constant', (1, 5, 1, 6), 3, None)], ('Load',))),
-('Expression', ('List', (1, 0, 1, 2), [], ('Load',))),
-('Expression', ('Tuple', (1, 0, 1, 5), [('Constant', (1, 0, 1, 1), 1, None), ('Constant', (1, 2, 1, 3), 2, None), ('Constant', (1, 4, 1, 5), 3, None)], ('Load',))),
-('Expression', ('Tuple', (1, 0, 1, 7), [('Constant', (1, 1, 1, 2), 1, None), ('Constant', (1, 3, 1, 4), 2, None), ('Constant', (1, 5, 1, 6), 3, None)], ('Load',))),
-('Expression', ('Tuple', (1, 0, 1, 2), [], ('Load',))),
-('Expression', ('Call', (1, 0, 1, 17), ('Attribute', (1, 0, 1, 7), ('Attribute', (1, 0, 1, 5), ('Attribute', (1, 0, 1, 3), ('Name', (1, 0, 1, 1), 'a', ('Load',)), 'b', ('Load',)), 'c', ('Load',)), 'd', ('Load',)), [('Subscript', (1, 8, 1, 16), ('Attribute', (1, 8, 1, 11), ('Name', (1, 8, 1, 9), 'a', ('Load',)), 'b', ('Load',)), ('Slice', (1, 12, 1, 15), ('Constant', (1, 12, 1, 13), 1, None), ('Constant', (1, 14, 1, 15), 2, None), None), ('Load',))], [])),
-('Expression', ('Subscript', (1, 0, 1, 7), ('List', (1, 0, 1, 3), [('Constant', (1, 1, 1, 2), 5, None)], ('Load',)), ('Slice', (1, 4, 1, 6), ('Constant', (1, 4, 1, 5), 1, None), None, None), ('Load',))),
-('Expression', ('Subscript', (1, 0, 1, 7), ('List', (1, 0, 1, 3), [('Constant', (1, 1, 1, 2), 5, None)], ('Load',)), ('Slice', (1, 4, 1, 6), None, ('Constant', (1, 5, 1, 6), 1, None), None), ('Load',))),
-('Expression', ('Subscript', (1, 0, 1, 8), ('List', (1, 0, 1, 3), [('Constant', (1, 1, 1, 2), 5, None)], ('Load',)), ('Slice', (1, 4, 1, 7), None, None, ('Constant', (1, 6, 1, 7), 1, None)), ('Load',))),
-('Expression', ('Subscript', (1, 0, 1, 10), ('List', (1, 0, 1, 3), [('Constant', (1, 1, 1, 2), 5, None)], ('Load',)), ('Slice', (1, 4, 1, 9), ('Constant', (1, 4, 1, 5), 1, None), ('Constant', (1, 6, 1, 7), 1, None), ('Constant', (1, 8, 1, 9), 1, None)), ('Load',))),
-('Expression', ('IfExp', (1, 0, 1, 21), ('Name', (1, 9, 1, 10), 'x', ('Load',)), ('Call', (1, 0, 1, 5), ('Name', (1, 0, 1, 3), 'foo', ('Load',)), [], []), ('Call', (1, 16, 1, 21), ('Name', (1, 16, 1, 19), 'bar', ('Load',)), [], []))),
-('Expression', ('JoinedStr', (1, 0, 1, 6), [('FormattedValue', (1, 2, 1, 5), ('Name', (1, 3, 1, 4), 'a', ('Load',)), -1, None)])),
-('Expression', ('JoinedStr', (1, 0, 1, 10), [('FormattedValue', (1, 2, 1, 9), ('Name', (1, 3, 1, 4), 'a', ('Load',)), -1, ('JoinedStr', (1, 4, 1, 8), [('Constant', (1, 5, 1, 8), '.2f', None)]))])),
-('Expression', ('JoinedStr', (1, 0, 1, 8), [('FormattedValue', (1, 2, 1, 7), ('Name', (1, 3, 1, 4), 'a', ('Load',)), 114, None)])),
-('Expression', ('JoinedStr', (1, 0, 1, 11), [('Constant', (1, 2, 1, 6), 'foo(', None), ('FormattedValue', (1, 6, 1, 9), ('Name', (1, 7, 1, 8), 'a', ('Load',)), -1, None), ('Constant', (1, 9, 1, 10), ')', None)])),
-]
-main()
diff --git a/Lib/test/test_ast/utils.py b/Lib/test/test_ast/utils.py
new file mode 100644
index 00000000000000..145e89ee94e935
--- /dev/null
+++ b/Lib/test/test_ast/utils.py
@@ -0,0 +1,15 @@
+def to_tuple(t):
+ if t is None or isinstance(t, (str, int, complex, float, bytes)) or t is Ellipsis:
+ return t
+ elif isinstance(t, list):
+ return [to_tuple(e) for e in t]
+ result = [t.__class__.__name__]
+ if hasattr(t, 'lineno') and hasattr(t, 'col_offset'):
+ result.append((t.lineno, t.col_offset))
+ if hasattr(t, 'end_lineno') and hasattr(t, 'end_col_offset'):
+ result[-1] += (t.end_lineno, t.end_col_offset)
+ if t._fields is None:
+ return tuple(result)
+ for f in t._fields:
+ result.append(to_tuple(getattr(t, f)))
+ return tuple(result)
diff --git a/Makefile.pre.in b/Makefile.pre.in
index 9ea7bc49be316c..5608e593ac9aca 100644
--- a/Makefile.pre.in
+++ b/Makefile.pre.in
@@ -2366,6 +2366,7 @@ LIBSUBDIRS= asyncio \
__phello__
TESTSUBDIRS= idlelib/idle_test \
test \
+ test/test_ast \
test/archivetestdata \
test/audiodata \
test/certdata \
From 89fa05fdce20cc0a19689eb365f1828b086d0b17 Mon Sep 17 00:00:00 2001
From: Petr Viktorin
Date: Mon, 29 Jul 2024 18:10:25 +0200
Subject: [PATCH 066/139] gh-122234: Add DECREFs to error paths (#122406)
Co-Authored-By: Kirill Podoprigora
---
Python/bltinmodule.c | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c
index ae025e767ec838..99ed06972be98e 100644
--- a/Python/bltinmodule.c
+++ b/Python/bltinmodule.c
@@ -2694,6 +2694,8 @@ builtin_sum_impl(PyObject *module, PyObject *iterable, PyObject *start)
continue;
}
else {
+ Py_DECREF(item);
+ Py_DECREF(iter);
return NULL;
}
}
@@ -2745,6 +2747,8 @@ builtin_sum_impl(PyObject *module, PyObject *iterable, PyObject *start)
continue;
}
else {
+ Py_DECREF(item);
+ Py_DECREF(iter);
return NULL;
}
}
From 490e0ad83ac72c5688dfbbab4eac61ccfd7be5fd Mon Sep 17 00:00:00 2001
From: Eric Snow
Date: Mon, 29 Jul 2024 10:23:23 -0600
Subject: [PATCH 067/139] gh-117482: Fix the Slot Wrapper Inheritance Tests
(gh-122248)
The tests were only checking cases where the slot wrapper was present in the initial case. They were missing when the slot wrapper was added in the additional initializations. This fixes that.
---
Lib/test/support/__init__.py | 55 ++++++++++++++++++++++++++++++
Lib/test/test_embed.py | 57 ++++++++++++++++++++-----------
Lib/test/test_types.py | 65 +++++++++++++++++++++---------------
Programs/_testembed.c | 14 ++++++--
4 files changed, 143 insertions(+), 48 deletions(-)
diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py
index 37e3305036f499..f4dce793ff1acb 100644
--- a/Lib/test/support/__init__.py
+++ b/Lib/test/support/__init__.py
@@ -2608,6 +2608,61 @@ def copy_python_src_ignore(path, names):
return ignored
+def iter_builtin_types():
+ for obj in __builtins__.values():
+ if not isinstance(obj, type):
+ continue
+ cls = obj
+ if cls.__module__ != 'builtins':
+ continue
+ yield cls
+
+
+def iter_slot_wrappers(cls):
+ assert cls.__module__ == 'builtins', cls
+
+ def is_slot_wrapper(name, value):
+ if not isinstance(value, types.WrapperDescriptorType):
+ assert not repr(value).startswith(' 3
+ ? main_argc - 2
+ : INIT_LOOPS;
- for (int i=1; i <= INIT_LOOPS; i++) {
- fprintf(stderr, "--- Loop #%d ---\n", i);
+ for (int i=0; i < loops; i++) {
+ fprintf(stderr, "--- Loop #%d ---\n", i+1);
fflush(stderr);
+ if (main_argc > 3) {
+ code = main_argv[i+2];
+ }
+
_testembed_Py_InitializeFromConfig();
int err = PyRun_SimpleString(code);
Py_Finalize();
From 68840e91ac6689d3954b98a9ab136e194b5250b8 Mon Sep 17 00:00:00 2001
From: Serhiy Storchaka
Date: Mon, 29 Jul 2024 21:52:48 +0300
Subject: [PATCH 068/139] gh-122311: Fix a refleak in pickle (GH-122411)
---
Modules/_pickle.c | 1 +
1 file changed, 1 insertion(+)
diff --git a/Modules/_pickle.c b/Modules/_pickle.c
index 452b4aff0237ca..50c73dca0db281 100644
--- a/Modules/_pickle.c
+++ b/Modules/_pickle.c
@@ -3123,6 +3123,7 @@ batch_dict(PickleState *state, PicklerObject *self, PyObject *iter)
if (!PyTuple_Check(obj) || PyTuple_Size(obj) != 2) {
PyErr_SetString(PyExc_TypeError, "dict items "
"iterator must return 2-tuples");
+ Py_DECREF(obj);
return -1;
}
i = save(state, self, PyTuple_GET_ITEM(obj, 0), 0);
From 15d4cd096758ca089c6bd6ed808c34cca676d9bb Mon Sep 17 00:00:00 2001
From: Brandt Bucher
Date: Mon, 29 Jul 2024 12:17:47 -0700
Subject: [PATCH 069/139] GH-116090: Fire RAISE events from _FOR_ITER_TIER_TWO
(GH-122413)
---
Include/internal/pycore_ceval.h | 1 +
.../2024-07-29-10-55-46.gh-issue-116090.p1MhU0.rst | 2 ++
Python/bytecodes.c | 7 ++++---
Python/ceval.c | 9 +++------
Python/executor_cases.c.h | 1 +
Python/generated_cases.c.h | 6 +++---
6 files changed, 14 insertions(+), 12 deletions(-)
create mode 100644 Misc/NEWS.d/next/Core_and_Builtins/2024-07-29-10-55-46.gh-issue-116090.p1MhU0.rst
diff --git a/Include/internal/pycore_ceval.h b/Include/internal/pycore_ceval.h
index fac4a4d228053e..4fdee9fdf2a1ff 100644
--- a/Include/internal/pycore_ceval.h
+++ b/Include/internal/pycore_ceval.h
@@ -263,6 +263,7 @@ PyAPI_FUNC(PyObject *) _PyEval_ImportFrom(PyThreadState *, PyObject *, PyObject
PyAPI_FUNC(PyObject *) _PyEval_ImportName(PyThreadState *, _PyInterpreterFrame *, PyObject *, PyObject *, PyObject *);
PyAPI_FUNC(PyObject *)_PyEval_MatchClass(PyThreadState *tstate, PyObject *subject, PyObject *type, Py_ssize_t nargs, PyObject *kwargs);
PyAPI_FUNC(PyObject *)_PyEval_MatchKeys(PyThreadState *tstate, PyObject *map, PyObject *keys);
+PyAPI_FUNC(void) _PyEval_MonitorRaise(PyThreadState *tstate, _PyInterpreterFrame *frame, _Py_CODEUNIT *instr);
PyAPI_FUNC(int) _PyEval_UnpackIterableStackRef(PyThreadState *tstate, _PyStackRef v, int argcnt, int argcntafter, _PyStackRef *sp);
PyAPI_FUNC(void) _PyEval_FrameClearAndPop(PyThreadState *tstate, _PyInterpreterFrame *frame);
PyAPI_FUNC(PyObject **) _PyObjectArray_FromStackRefArray(_PyStackRef *input, Py_ssize_t nargs, PyObject **scratch);
diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-07-29-10-55-46.gh-issue-116090.p1MhU0.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-07-29-10-55-46.gh-issue-116090.p1MhU0.rst
new file mode 100644
index 00000000000000..6efb620961f498
--- /dev/null
+++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-07-29-10-55-46.gh-issue-116090.p1MhU0.rst
@@ -0,0 +1,2 @@
+Fix an issue in JIT builds that prevented some :keyword:`for` loops from
+correctly firing :monitoring-event:`RAISE` monitoring events.
diff --git a/Python/bytecodes.c b/Python/bytecodes.c
index d74f2aae0483ce..4afce2cc3bea9d 100644
--- a/Python/bytecodes.c
+++ b/Python/bytecodes.c
@@ -1124,7 +1124,7 @@ dummy_func(
if (retval_o == NULL) {
if (_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)
) {
- monitor_raise(tstate, frame, this_instr);
+ _PyEval_MonitorRaise(tstate, frame, this_instr);
}
if (_PyGen_FetchStopIterationValue(&retval_o) == 0) {
assert(retval_o != NULL);
@@ -2824,7 +2824,7 @@ dummy_func(
if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) {
ERROR_NO_POP();
}
- monitor_raise(tstate, frame, this_instr);
+ _PyEval_MonitorRaise(tstate, frame, this_instr);
_PyErr_Clear(tstate);
}
/* iterator ended normally */
@@ -2849,6 +2849,7 @@ dummy_func(
if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) {
ERROR_NO_POP();
}
+ _PyEval_MonitorRaise(tstate, frame, frame->instr_ptr);
_PyErr_Clear(tstate);
}
/* iterator ended normally */
@@ -2875,7 +2876,7 @@ dummy_func(
if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) {
ERROR_NO_POP();
}
- monitor_raise(tstate, frame, this_instr);
+ _PyEval_MonitorRaise(tstate, frame, this_instr);
_PyErr_Clear(tstate);
}
/* iterator ended normally */
diff --git a/Python/ceval.c b/Python/ceval.c
index c0074c45b27111..425a2a01bea8ed 100644
--- a/Python/ceval.c
+++ b/Python/ceval.c
@@ -225,9 +225,6 @@ maybe_lltrace_resume_frame(_PyInterpreterFrame *frame, _PyInterpreterFrame *skip
#endif
-static void monitor_raise(PyThreadState *tstate,
- _PyInterpreterFrame *frame,
- _Py_CODEUNIT *instr);
static void monitor_reraise(PyThreadState *tstate,
_PyInterpreterFrame *frame,
_Py_CODEUNIT *instr);
@@ -884,7 +881,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int
PyTraceBack_Here(f);
}
}
- monitor_raise(tstate, frame, next_instr-1);
+ _PyEval_MonitorRaise(tstate, frame, next_instr-1);
exception_unwind:
{
/* We can't use frame->instr_ptr here, as RERAISE may have set it */
@@ -2200,8 +2197,8 @@ no_tools_for_local_event(PyThreadState *tstate, _PyInterpreterFrame *frame, int
}
}
-static void
-monitor_raise(PyThreadState *tstate, _PyInterpreterFrame *frame,
+void
+_PyEval_MonitorRaise(PyThreadState *tstate, _PyInterpreterFrame *frame,
_Py_CODEUNIT *instr)
{
if (no_tools_for_global_event(tstate, PY_MONITORING_EVENT_RAISE)) {
diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h
index 6e3f6cc62fe11f..62654035e80f50 100644
--- a/Python/executor_cases.c.h
+++ b/Python/executor_cases.c.h
@@ -3173,6 +3173,7 @@
if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) {
JUMP_TO_ERROR();
}
+ _PyEval_MonitorRaise(tstate, frame, frame->instr_ptr);
_PyErr_Clear(tstate);
}
/* iterator ended normally */
diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h
index 76d1cc7ad6cf95..3c643f637ab095 100644
--- a/Python/generated_cases.c.h
+++ b/Python/generated_cases.c.h
@@ -3063,7 +3063,7 @@
if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) {
goto error;
}
- monitor_raise(tstate, frame, this_instr);
+ _PyEval_MonitorRaise(tstate, frame, this_instr);
_PyErr_Clear(tstate);
}
/* iterator ended normally */
@@ -3731,7 +3731,7 @@
if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) {
goto error;
}
- monitor_raise(tstate, frame, this_instr);
+ _PyEval_MonitorRaise(tstate, frame, this_instr);
_PyErr_Clear(tstate);
}
/* iterator ended normally */
@@ -6026,7 +6026,7 @@
if (retval_o == NULL) {
if (_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)
) {
- monitor_raise(tstate, frame, this_instr);
+ _PyEval_MonitorRaise(tstate, frame, this_instr);
}
if (_PyGen_FetchStopIterationValue(&retval_o) == 0) {
assert(retval_o != NULL);
From 046670c3a0480560b6bfa06727fd7aa6a1798614 Mon Sep 17 00:00:00 2001
From: Donghee Na
Date: Tue, 30 Jul 2024 04:20:36 +0900
Subject: [PATCH 070/139] gh-121996: Fix --disable-safety and
--enable-slower-safety options (gh-122414)
---
configure | 19 +++++++++++++------
configure.ac | 11 ++++++-----
2 files changed, 19 insertions(+), 11 deletions(-)
diff --git a/configure b/configure
index 52988f77f6d926..39ab48fa4e2526 100755
--- a/configure
+++ b/configure
@@ -9682,10 +9682,10 @@ then :
then :
disable_safety=no
else $as_nop
- disable_saftey=yes
+ disable_safety=yes
fi
else $as_nop
- disable_saftey=no
+ disable_safety=no
fi
{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $disable_safety" >&5
@@ -9726,7 +9726,7 @@ fi
printf "%s\n" "$ax_cv_check_cflags__Werror__fstack_protector_strong" >&6; }
if test "x$ax_cv_check_cflags__Werror__fstack_protector_strong" = xyes
then :
- BASECFLAGS="$BASECFLAGS -fstack-protector-strong"
+ CFLAGS_NODIST="$CFLAGS_NODIST -fstack-protector-strong"
else $as_nop
{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: -fstack-protector-strong not supported" >&5
printf "%s\n" "$as_me: WARNING: -fstack-protector-strong not supported" >&2;}
@@ -9765,7 +9765,7 @@ fi
printf "%s\n" "$ax_cv_check_cflags__Werror__Wtrampolines" >&6; }
if test "x$ax_cv_check_cflags__Werror__Wtrampolines" = xyes
then :
- BASECFLAGS="$BASECFLAGS -Wtrampolines"
+ CFLAGS_NODIST="$CFLAGS_NODIST -Wtrampolines"
else $as_nop
{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: -Wtrampolines not supported" >&5
printf "%s\n" "$as_me: WARNING: -Wtrampolines not supported" >&2;}
@@ -9778,7 +9778,14 @@ printf %s "checking for --enable-slower-safety... " >&6; }
# Check whether --enable-slower-safety was given.
if test ${enable_slower_safety+y}
then :
- enableval=$enable_slower_safety;
+ enableval=$enable_slower_safety; if test "x$disable_slower_safety" = xyes
+then :
+ enable_slower_safety=no
+else $as_nop
+ enable_slower_safety=yes
+fi
+else $as_nop
+ enable_slower_safety=no
fi
{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $enable_slower_safety" >&5
@@ -9819,7 +9826,7 @@ fi
printf "%s\n" "$ax_cv_check_cflags__Werror__D_FORTIFY_SOURCE_3" >&6; }
if test "x$ax_cv_check_cflags__Werror__D_FORTIFY_SOURCE_3" = xyes
then :
- BASECFLAGS="$BASECFLAGS -U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=3"
+ CFLAGS_NODIST="$CFLAGS_NODIST -U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=3"
else $as_nop
{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: -D_FORTIFY_SOURCE=3 not supported" >&5
printf "%s\n" "$as_me: WARNING: -D_FORTIFY_SOURCE=3 not supported" >&2;}
diff --git a/configure.ac b/configure.ac
index 5bde6803cd5a7b..62ed812991fc4e 100644
--- a/configure.ac
+++ b/configure.ac
@@ -2503,23 +2503,24 @@ AS_VAR_IF([with_strict_overflow], [yes],
AC_MSG_CHECKING([for --disable-safety])
AC_ARG_ENABLE([safety],
[AS_HELP_STRING([--disable-safety], [disable usage of the security compiler options with no performance overhead])],
- [AS_VAR_IF([enable_safety], [yes], [disable_safety=no], [disable_saftey=yes])], [disable_saftey=no])
+ [AS_VAR_IF([enable_safety], [yes], [disable_safety=no], [disable_safety=yes])], [disable_safety=no])
AC_MSG_RESULT([$disable_safety])
if test "$disable_safety" = "no"
then
- AX_CHECK_COMPILE_FLAG([-fstack-protector-strong], [BASECFLAGS="$BASECFLAGS -fstack-protector-strong"], [AC_MSG_WARN([-fstack-protector-strong not supported])], [-Werror])
- AX_CHECK_COMPILE_FLAG([-Wtrampolines], [BASECFLAGS="$BASECFLAGS -Wtrampolines"], [AC_MSG_WARN([-Wtrampolines not supported])], [-Werror])
+ AX_CHECK_COMPILE_FLAG([-fstack-protector-strong], [CFLAGS_NODIST="$CFLAGS_NODIST -fstack-protector-strong"], [AC_MSG_WARN([-fstack-protector-strong not supported])], [-Werror])
+ AX_CHECK_COMPILE_FLAG([-Wtrampolines], [CFLAGS_NODIST="$CFLAGS_NODIST -Wtrampolines"], [AC_MSG_WARN([-Wtrampolines not supported])], [-Werror])
fi
AC_MSG_CHECKING([for --enable-slower-safety])
AC_ARG_ENABLE([slower-safety],
- [AS_HELP_STRING([--enable-slower-safety], [enable usage of the security compiler options with performance overhead])],[])
+ [AS_HELP_STRING([--enable-slower-safety], [enable usage of the security compiler options with performance overhead])],
+ [AS_VAR_IF([disable_slower_safety], [yes], [enable_slower_safety=no], [enable_slower_safety=yes])], [enable_slower_safety=no])
AC_MSG_RESULT([$enable_slower_safety])
if test "$enable_slower_safety" = "yes"
then
- AX_CHECK_COMPILE_FLAG([-D_FORTIFY_SOURCE=3], [BASECFLAGS="$BASECFLAGS -U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=3"], [AC_MSG_WARN([-D_FORTIFY_SOURCE=3 not supported])], [-Werror])
+ AX_CHECK_COMPILE_FLAG([-D_FORTIFY_SOURCE=3], [CFLAGS_NODIST="$CFLAGS_NODIST -U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=3"], [AC_MSG_WARN([-D_FORTIFY_SOURCE=3 not supported])], [-Werror])
fi
case $GCC in
From 76bdfa4cd02532519fb43ae91244e2b4b3650d78 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Mon, 29 Jul 2024 22:20:40 +0100
Subject: [PATCH 071/139] GH-122085: Use include files for C API deprecations
(#109843)
---
.../c-api-pending-removal-in-3.14.rst | 46 ++++++++
.../c-api-pending-removal-in-3.15.rst | 20 ++++
.../c-api-pending-removal-in-future.rst | 31 ++++++
Doc/deprecations/index.rst | 9 +-
Doc/whatsnew/3.12.rst | 87 +--------------
Doc/whatsnew/3.13.rst | 103 ++----------------
Doc/whatsnew/3.14.rst | 6 +
7 files changed, 121 insertions(+), 181 deletions(-)
create mode 100644 Doc/deprecations/c-api-pending-removal-in-3.14.rst
create mode 100644 Doc/deprecations/c-api-pending-removal-in-3.15.rst
create mode 100644 Doc/deprecations/c-api-pending-removal-in-future.rst
diff --git a/Doc/deprecations/c-api-pending-removal-in-3.14.rst b/Doc/deprecations/c-api-pending-removal-in-3.14.rst
new file mode 100644
index 00000000000000..369892a75b16eb
--- /dev/null
+++ b/Doc/deprecations/c-api-pending-removal-in-3.14.rst
@@ -0,0 +1,46 @@
+Pending Removal in Python 3.14
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+* The ``ma_version_tag`` field in :c:type:`PyDictObject` for extension modules
+ (:pep:`699`; :gh:`101193`).
+
+* Creating :c:data:`immutable types ` with mutable
+ bases (:gh:`95388`).
+
+* Functions to configure Python's initialization, deprecated in Python 3.11:
+
+ * ``PySys_SetArgvEx()``: set :c:member:`PyConfig.argv` instead.
+ * ``PySys_SetArgv()``: set :c:member:`PyConfig.argv` instead.
+ * ``Py_SetProgramName()``: set :c:member:`PyConfig.program_name` instead.
+ * ``Py_SetPythonHome()``: set :c:member:`PyConfig.home` instead.
+
+ The :c:func:`Py_InitializeFromConfig` API should be used with
+ :c:type:`PyConfig` instead.
+
+* Global configuration variables:
+
+ * :c:var:`Py_DebugFlag`: use :c:member:`PyConfig.parser_debug` instead.
+ * :c:var:`Py_VerboseFlag`: use :c:member:`PyConfig.verbose` instead.
+ * :c:var:`Py_QuietFlag`: use :c:member:`PyConfig.quiet` instead.
+ * :c:var:`Py_InteractiveFlag`: use :c:member:`PyConfig.interactive` instead.
+ * :c:var:`Py_InspectFlag`: use :c:member:`PyConfig.inspect` instead.
+ * :c:var:`Py_OptimizeFlag`: use :c:member:`PyConfig.optimization_level` instead.
+ * :c:var:`Py_NoSiteFlag`: use :c:member:`PyConfig.site_import` instead.
+ * :c:var:`Py_BytesWarningFlag`: use :c:member:`PyConfig.bytes_warning` instead.
+ * :c:var:`Py_FrozenFlag`: use :c:member:`PyConfig.pathconfig_warnings` instead.
+ * :c:var:`Py_IgnoreEnvironmentFlag`: use :c:member:`PyConfig.use_environment` instead.
+ * :c:var:`Py_DontWriteBytecodeFlag`: use :c:member:`PyConfig.write_bytecode` instead.
+ * :c:var:`Py_NoUserSiteDirectory`: use :c:member:`PyConfig.user_site_directory` instead.
+ * :c:var:`Py_UnbufferedStdioFlag`: use :c:member:`PyConfig.buffered_stdio` instead.
+ * :c:var:`Py_HashRandomizationFlag`: use :c:member:`PyConfig.use_hash_seed`
+ and :c:member:`PyConfig.hash_seed` instead.
+ * :c:var:`Py_IsolatedFlag`: use :c:member:`PyConfig.isolated` instead.
+ * :c:var:`Py_LegacyWindowsFSEncodingFlag`: use :c:member:`PyPreConfig.legacy_windows_fs_encoding` instead.
+ * :c:var:`Py_LegacyWindowsStdioFlag`: use :c:member:`PyConfig.legacy_windows_stdio` instead.
+ * :c:var:`!Py_FileSystemDefaultEncoding`: use :c:member:`PyConfig.filesystem_encoding` instead.
+ * :c:var:`!Py_HasFileSystemDefaultEncoding`: use :c:member:`PyConfig.filesystem_encoding` instead.
+ * :c:var:`!Py_FileSystemDefaultEncodeErrors`: use :c:member:`PyConfig.filesystem_errors` instead.
+ * :c:var:`!Py_UTF8Mode`: use :c:member:`PyPreConfig.utf8_mode` instead. (see :c:func:`Py_PreInitialize`)
+
+ The :c:func:`Py_InitializeFromConfig` API should be used with
+ :c:type:`PyConfig` instead.
diff --git a/Doc/deprecations/c-api-pending-removal-in-3.15.rst b/Doc/deprecations/c-api-pending-removal-in-3.15.rst
new file mode 100644
index 00000000000000..c676927ed69212
--- /dev/null
+++ b/Doc/deprecations/c-api-pending-removal-in-3.15.rst
@@ -0,0 +1,20 @@
+Pending Removal in Python 3.15
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+* The bundled copy of ``libmpdecimal``.
+* :c:func:`PyImport_ImportModuleNoBlock`: use :c:func:`PyImport_ImportModule` instead.
+* :c:func:`PyWeakref_GET_OBJECT`: use :c:func:`PyWeakref_GetRef` instead.
+* :c:func:`PyWeakref_GetObject`: use :c:func:`PyWeakref_GetRef` instead.
+* :c:type:`!Py_UNICODE_WIDE` type: use :c:type:`wchar_t` instead.
+* :c:type:`Py_UNICODE` type: use :c:type:`wchar_t` instead.
+* Python initialization functions:
+
+ * :c:func:`PySys_ResetWarnOptions`: clear :data:`sys.warnoptions` and
+ :data:`!warnings.filters` instead.
+ * :c:func:`Py_GetExecPrefix`: get :data:`sys.exec_prefix` instead.
+ * :c:func:`Py_GetPath`: get :data:`sys.path` instead.
+ * :c:func:`Py_GetPrefix`: get :data:`sys.prefix` instead.
+ * :c:func:`Py_GetProgramFullPath`: get :data:`sys.executable` instead.
+ * :c:func:`Py_GetProgramName`: get :data:`sys.executable` instead.
+ * :c:func:`Py_GetPythonHome`: get :c:member:`PyConfig.home` or
+ the :envvar:`PYTHONHOME` environment variable instead.
diff --git a/Doc/deprecations/c-api-pending-removal-in-future.rst b/Doc/deprecations/c-api-pending-removal-in-future.rst
new file mode 100644
index 00000000000000..f646be45c8a770
--- /dev/null
+++ b/Doc/deprecations/c-api-pending-removal-in-future.rst
@@ -0,0 +1,31 @@
+Pending Removal in Future Versions
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The following APIs are deprecated and will be removed,
+although there is currently no date scheduled for their removal.
+
+* :c:macro:`Py_TPFLAGS_HAVE_FINALIZE`: unneeded since Python 3.8.
+* :c:func:`PyErr_Fetch`: use :c:func:`PyErr_GetRaisedException` instead.
+* :c:func:`PyErr_NormalizeException`: use :c:func:`PyErr_GetRaisedException` instead.
+* :c:func:`PyErr_Restore`: use :c:func:`PyErr_SetRaisedException` instead.
+* :c:func:`PyModule_GetFilename`: use :c:func:`PyModule_GetFilenameObject` instead.
+* :c:func:`PyOS_AfterFork`: use :c:func:`PyOS_AfterFork_Child` instead.
+* :c:func:`PySlice_GetIndicesEx`: use :c:func:`PySlice_Unpack` and :c:func:`PySlice_AdjustIndices` instead.
+* :c:func:`!PyUnicode_AsDecodedObject`: use :c:func:`PyCodec_Decode` instead.
+* :c:func:`!PyUnicode_AsDecodedUnicode`: use :c:func:`PyCodec_Decode` instead.
+* :c:func:`!PyUnicode_AsEncodedObject`: use :c:func:`PyCodec_Encode` instead.
+* :c:func:`!PyUnicode_AsEncodedUnicode`: use :c:func:`PyCodec_Encode` instead.
+* :c:func:`PyUnicode_READY`: unneeded since Python 3.12
+* :c:func:`!PyErr_Display`: use :c:func:`PyErr_DisplayException` instead.
+* :c:func:`!_PyErr_ChainExceptions`: use ``_PyErr_ChainExceptions1`` instead.
+* :c:member:`!PyBytesObject.ob_shash` member:
+ call :c:func:`PyObject_Hash` instead.
+* :c:member:`!PyDictObject.ma_version_tag` member.
+* Thread Local Storage (TLS) API:
+
+ * :c:func:`PyThread_create_key`: use :c:func:`PyThread_tss_alloc` instead.
+ * :c:func:`PyThread_delete_key`: use :c:func:`PyThread_tss_free` instead.
+ * :c:func:`PyThread_set_key_value`: use :c:func:`PyThread_tss_set` instead.
+ * :c:func:`PyThread_get_key_value`: use :c:func:`PyThread_tss_get` instead.
+ * :c:func:`PyThread_delete_key_value`: use :c:func:`PyThread_tss_delete` instead.
+ * :c:func:`PyThread_ReInitTLS`: unneeded since Python 3.7.
diff --git a/Doc/deprecations/index.rst b/Doc/deprecations/index.rst
index cfb30dd09aef6f..a9efb0bc744335 100644
--- a/Doc/deprecations/index.rst
+++ b/Doc/deprecations/index.rst
@@ -1,10 +1,15 @@
Deprecations
============
-.. include:: pending-removal-in-3.14.rst
-
.. include:: pending-removal-in-3.15.rst
.. include:: pending-removal-in-3.16.rst
.. include:: pending-removal-in-future.rst
+
+C API Deprecations
+------------------
+
+.. include:: c-api-pending-removal-in-3.15.rst
+
+.. include:: c-api-pending-removal-in-future.rst
diff --git a/Doc/whatsnew/3.12.rst b/Doc/whatsnew/3.12.rst
index fc2b6519fb1307..3821ee3648e909 100644
--- a/Doc/whatsnew/3.12.rst
+++ b/Doc/whatsnew/3.12.rst
@@ -2210,92 +2210,13 @@ Deprecated
overrides :c:member:`~PyTypeObject.tp_new` is deprecated.
Call the metaclass instead.
-Pending Removal in Python 3.14
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+.. Add deprecations above alphabetically, not here at the end.
-* The ``ma_version_tag`` field in :c:type:`PyDictObject` for extension modules
- (:pep:`699`; :gh:`101193`).
+.. include:: ../deprecations/c-api-pending-removal-in-3.14.rst
-* Global configuration variables:
+.. include:: ../deprecations/c-api-pending-removal-in-3.15.rst
- * :c:var:`Py_DebugFlag`: use :c:member:`PyConfig.parser_debug`
- * :c:var:`Py_VerboseFlag`: use :c:member:`PyConfig.verbose`
- * :c:var:`Py_QuietFlag`: use :c:member:`PyConfig.quiet`
- * :c:var:`Py_InteractiveFlag`: use :c:member:`PyConfig.interactive`
- * :c:var:`Py_InspectFlag`: use :c:member:`PyConfig.inspect`
- * :c:var:`Py_OptimizeFlag`: use :c:member:`PyConfig.optimization_level`
- * :c:var:`Py_NoSiteFlag`: use :c:member:`PyConfig.site_import`
- * :c:var:`Py_BytesWarningFlag`: use :c:member:`PyConfig.bytes_warning`
- * :c:var:`Py_FrozenFlag`: use :c:member:`PyConfig.pathconfig_warnings`
- * :c:var:`Py_IgnoreEnvironmentFlag`: use :c:member:`PyConfig.use_environment`
- * :c:var:`Py_DontWriteBytecodeFlag`: use :c:member:`PyConfig.write_bytecode`
- * :c:var:`Py_NoUserSiteDirectory`: use :c:member:`PyConfig.user_site_directory`
- * :c:var:`Py_UnbufferedStdioFlag`: use :c:member:`PyConfig.buffered_stdio`
- * :c:var:`Py_HashRandomizationFlag`: use :c:member:`PyConfig.use_hash_seed`
- and :c:member:`PyConfig.hash_seed`
- * :c:var:`Py_IsolatedFlag`: use :c:member:`PyConfig.isolated`
- * :c:var:`Py_LegacyWindowsFSEncodingFlag`: use :c:member:`PyPreConfig.legacy_windows_fs_encoding`
- * :c:var:`Py_LegacyWindowsStdioFlag`: use :c:member:`PyConfig.legacy_windows_stdio`
- * :c:var:`!Py_FileSystemDefaultEncoding`: use :c:member:`PyConfig.filesystem_encoding`
- * :c:var:`!Py_HasFileSystemDefaultEncoding`: use :c:member:`PyConfig.filesystem_encoding`
- * :c:var:`!Py_FileSystemDefaultEncodeErrors`: use :c:member:`PyConfig.filesystem_errors`
- * :c:var:`!Py_UTF8Mode`: use :c:member:`PyPreConfig.utf8_mode` (see :c:func:`Py_PreInitialize`)
-
- The :c:func:`Py_InitializeFromConfig` API should be used with
- :c:type:`PyConfig` instead.
-
-* Creating :c:data:`immutable types ` with mutable
- bases (:gh:`95388`).
-
-Pending Removal in Python 3.15
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-* :c:func:`PyImport_ImportModuleNoBlock`: use :c:func:`PyImport_ImportModule`
-* :c:type:`!Py_UNICODE_WIDE` type: use :c:type:`wchar_t`
-* :c:type:`Py_UNICODE` type: use :c:type:`wchar_t`
-* Python initialization functions:
-
- * :c:func:`PySys_ResetWarnOptions`: clear :data:`sys.warnoptions` and
- :data:`!warnings.filters`
- * :c:func:`Py_GetExecPrefix`: get :data:`sys.exec_prefix`
- * :c:func:`Py_GetPath`: get :data:`sys.path`
- * :c:func:`Py_GetPrefix`: get :data:`sys.prefix`
- * :c:func:`Py_GetProgramFullPath`: get :data:`sys.executable`
- * :c:func:`Py_GetProgramName`: get :data:`sys.executable`
- * :c:func:`Py_GetPythonHome`: get :c:member:`PyConfig.home` or
- the :envvar:`PYTHONHOME` environment variable
-
-Pending Removal in Future Versions
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-The following APIs are deprecated and will be removed,
-although there is currently no date scheduled for their removal.
-
-* :c:macro:`Py_TPFLAGS_HAVE_FINALIZE`: unneeded since Python 3.8
-* :c:func:`PyErr_Fetch`: use :c:func:`PyErr_GetRaisedException`
-* :c:func:`PyErr_NormalizeException`: use :c:func:`PyErr_GetRaisedException`
-* :c:func:`PyErr_Restore`: use :c:func:`PyErr_SetRaisedException`
-* :c:func:`PyModule_GetFilename`: use :c:func:`PyModule_GetFilenameObject`
-* :c:func:`PyOS_AfterFork`: use :c:func:`PyOS_AfterFork_Child`
-* :c:func:`PySlice_GetIndicesEx`: use :c:func:`PySlice_Unpack` and :c:func:`PySlice_AdjustIndices`
-* :c:func:`!PyUnicode_AsDecodedObject`: use :c:func:`PyCodec_Decode`
-* :c:func:`!PyUnicode_AsDecodedUnicode`: use :c:func:`PyCodec_Decode`
-* :c:func:`!PyUnicode_AsEncodedObject`: use :c:func:`PyCodec_Encode`
-* :c:func:`!PyUnicode_AsEncodedUnicode`: use :c:func:`PyCodec_Encode`
-* :c:func:`PyUnicode_READY`: unneeded since Python 3.12
-* :c:func:`!PyErr_Display`: use :c:func:`PyErr_DisplayException`
-* :c:func:`!_PyErr_ChainExceptions`: use ``_PyErr_ChainExceptions1``
-* :c:member:`!PyBytesObject.ob_shash` member:
- call :c:func:`PyObject_Hash` instead
-* :c:member:`!PyDictObject.ma_version_tag` member
-* Thread Local Storage (TLS) API:
-
- * :c:func:`PyThread_create_key`: use :c:func:`PyThread_tss_alloc`
- * :c:func:`PyThread_delete_key`: use :c:func:`PyThread_tss_free`
- * :c:func:`PyThread_set_key_value`: use :c:func:`PyThread_tss_set`
- * :c:func:`PyThread_get_key_value`: use :c:func:`PyThread_tss_get`
- * :c:func:`PyThread_delete_key_value`: use :c:func:`PyThread_tss_delete`
- * :c:func:`PyThread_ReInitTLS`: unneeded since Python 3.7
+.. include:: ../deprecations/c-api-pending-removal-in-future.rst
Removed
-------
diff --git a/Doc/whatsnew/3.13.rst b/Doc/whatsnew/3.13.rst
index e89abfdd292f48..0854631c832ef4 100644
--- a/Doc/whatsnew/3.13.rst
+++ b/Doc/whatsnew/3.13.rst
@@ -2208,6 +2208,9 @@ Removed C APIs
be used instead.
(Contributed by Serhiy Storchaka in :gh:`86493`.)
+* Remove undocumented ``PY_TIMEOUT_MAX`` constant from the limited C API.
+ (Contributed by Victor Stinner in :gh:`110014`.)
+
Deprecated C APIs
-----------------
@@ -2249,105 +2252,13 @@ Deprecated C APIs
Refer to the deprecation notices on each function for their recommended replacements.
(Soft deprecated as part of :pep:`667`.)
-Pending Removal in Python 3.14
-------------------------------
-
-* Creating immutable types (:c:macro:`Py_TPFLAGS_IMMUTABLETYPE`) with mutable
- bases using the C API.
-
-* Functions to configure the Python initialization, deprecated in Python 3.11:
-
- * ``PySys_SetArgvEx()``: set :c:member:`PyConfig.argv` instead.
- * ``PySys_SetArgv()``: set :c:member:`PyConfig.argv` instead.
- * ``Py_SetProgramName()``: set :c:member:`PyConfig.program_name` instead.
- * ``Py_SetPythonHome()``: set :c:member:`PyConfig.home` instead.
-
- The :c:func:`Py_InitializeFromConfig` API should be used with
- :c:type:`PyConfig` instead.
-
-* Global configuration variables:
-
- * :c:var:`Py_DebugFlag`: use :c:member:`PyConfig.parser_debug`
- * :c:var:`Py_VerboseFlag`: use :c:member:`PyConfig.verbose`
- * :c:var:`Py_QuietFlag`: use :c:member:`PyConfig.quiet`
- * :c:var:`Py_InteractiveFlag`: use :c:member:`PyConfig.interactive`
- * :c:var:`Py_InspectFlag`: use :c:member:`PyConfig.inspect`
- * :c:var:`Py_OptimizeFlag`: use :c:member:`PyConfig.optimization_level`
- * :c:var:`Py_NoSiteFlag`: use :c:member:`PyConfig.site_import`
- * :c:var:`Py_BytesWarningFlag`: use :c:member:`PyConfig.bytes_warning`
- * :c:var:`Py_FrozenFlag`: use :c:member:`PyConfig.pathconfig_warnings`
- * :c:var:`Py_IgnoreEnvironmentFlag`: use :c:member:`PyConfig.use_environment`
- * :c:var:`Py_DontWriteBytecodeFlag`: use :c:member:`PyConfig.write_bytecode`
- * :c:var:`Py_NoUserSiteDirectory`: use :c:member:`PyConfig.user_site_directory`
- * :c:var:`Py_UnbufferedStdioFlag`: use :c:member:`PyConfig.buffered_stdio`
- * :c:var:`Py_HashRandomizationFlag`: use :c:member:`PyConfig.use_hash_seed`
- and :c:member:`PyConfig.hash_seed`
- * :c:var:`Py_IsolatedFlag`: use :c:member:`PyConfig.isolated`
- * :c:var:`Py_LegacyWindowsFSEncodingFlag`: use :c:member:`PyPreConfig.legacy_windows_fs_encoding`
- * :c:var:`Py_LegacyWindowsStdioFlag`: use :c:member:`PyConfig.legacy_windows_stdio`
- * :c:var:`!Py_FileSystemDefaultEncoding`: use :c:member:`PyConfig.filesystem_encoding`
- * :c:var:`!Py_HasFileSystemDefaultEncoding`: use :c:member:`PyConfig.filesystem_encoding`
- * :c:var:`!Py_FileSystemDefaultEncodeErrors`: use :c:member:`PyConfig.filesystem_errors`
- * :c:var:`!Py_UTF8Mode`: use :c:member:`PyPreConfig.utf8_mode` (see :c:func:`Py_PreInitialize`)
-
- The :c:func:`Py_InitializeFromConfig` API should be used with
- :c:type:`PyConfig` instead.
-
-Pending Removal in Python 3.15
-------------------------------
-
-* The bundled copy of ``libmpdecimal``.
-* :c:func:`PyImport_ImportModuleNoBlock`: use :c:func:`PyImport_ImportModule`.
-* :c:func:`PyWeakref_GET_OBJECT`: use :c:func:`PyWeakref_GetRef` instead.
-* :c:func:`PyWeakref_GetObject`: use :c:func:`PyWeakref_GetRef` instead.
-* :c:type:`!Py_UNICODE_WIDE` type: use :c:type:`wchar_t` instead.
-* :c:type:`Py_UNICODE` type: use :c:type:`wchar_t` instead.
-* Python initialization functions:
-
- * :c:func:`PySys_ResetWarnOptions`: clear :data:`sys.warnoptions` and
- :data:`!warnings.filters` instead.
- * :c:func:`Py_GetExecPrefix`: get :data:`sys.exec_prefix` instead.
- * :c:func:`Py_GetPath`: get :data:`sys.path` instead.
- * :c:func:`Py_GetPrefix`: get :data:`sys.prefix` instead.
- * :c:func:`Py_GetProgramFullPath`: get :data:`sys.executable` instead.
- * :c:func:`Py_GetProgramName`: get :data:`sys.executable` instead.
- * :c:func:`Py_GetPythonHome`: get :c:member:`PyConfig.home` or
- :envvar:`PYTHONHOME` environment variable instead.
+.. Add deprecations above alphabetically, not here at the end.
-Pending Removal in Future Versions
-----------------------------------
-
-The following APIs were deprecated in earlier Python versions and will be
-removed, although there is currently no date scheduled for their removal.
-
-* :c:macro:`Py_TPFLAGS_HAVE_FINALIZE`: no needed since Python 3.8.
-* :c:func:`PyErr_Fetch`: use :c:func:`PyErr_GetRaisedException`.
-* :c:func:`PyErr_NormalizeException`: use :c:func:`PyErr_GetRaisedException`.
-* :c:func:`PyErr_Restore`: use :c:func:`PyErr_SetRaisedException`.
-* :c:func:`PyModule_GetFilename`: use :c:func:`PyModule_GetFilenameObject`.
-* :c:func:`PyOS_AfterFork`: use :c:func:`PyOS_AfterFork_Child()`.
-* :c:func:`PySlice_GetIndicesEx`.
-* :c:func:`!PyUnicode_AsDecodedObject`.
-* :c:func:`!PyUnicode_AsDecodedUnicode`.
-* :c:func:`!PyUnicode_AsEncodedObject`.
-* :c:func:`!PyUnicode_AsEncodedUnicode`.
-* :c:func:`PyUnicode_READY`: not needed since Python 3.12.
-* :c:func:`!_PyErr_ChainExceptions`.
-* :c:member:`!PyBytesObject.ob_shash` member:
- call :c:func:`PyObject_Hash` instead.
-* :c:member:`!PyDictObject.ma_version_tag` member.
-* TLS API:
-
- * :c:func:`PyThread_create_key`: use :c:func:`PyThread_tss_alloc`.
- * :c:func:`PyThread_delete_key`: use :c:func:`PyThread_tss_free`.
- * :c:func:`PyThread_set_key_value`: use :c:func:`PyThread_tss_set`.
- * :c:func:`PyThread_get_key_value`: use :c:func:`PyThread_tss_get`.
- * :c:func:`PyThread_delete_key_value`: use :c:func:`PyThread_tss_delete`.
- * :c:func:`PyThread_ReInitTLS`: no longer needed.
+.. include:: ../deprecations/c-api-pending-removal-in-3.14.rst
-* Remove undocumented ``PY_TIMEOUT_MAX`` constant from the limited C API.
- (Contributed by Victor Stinner in :gh:`110014`.)
+.. include:: ../deprecations/c-api-pending-removal-in-3.15.rst
+.. include:: ../deprecations/c-api-pending-removal-in-future.rst
Regression Test Changes
=======================
diff --git a/Doc/whatsnew/3.14.rst b/Doc/whatsnew/3.14.rst
index 7450597e8597ad..aecc7cabd0d1f5 100644
--- a/Doc/whatsnew/3.14.rst
+++ b/Doc/whatsnew/3.14.rst
@@ -422,6 +422,12 @@ Deprecated
:c:macro:`!isfinite` available from :file:`math.h`
since C99. (Contributed by Sergey B Kirpichev in :gh:`119613`.)
+.. Add deprecations above alphabetically, not here at the end.
+
+.. include:: ../deprecations/c-api-pending-removal-in-3.15.rst
+
+.. include:: ../deprecations/c-api-pending-removal-in-future.rst
+
Removed
-------
From 78df1043dbdce5c989600616f9f87b4ee72944e5 Mon Sep 17 00:00:00 2001
From: Seth Michael Larson
Date: Mon, 29 Jul 2024 16:44:35 -0500
Subject: [PATCH 072/139] gh-122133: Authenticate socket connection for
`socket.socketpair()` fallback (GH-122134)
* Authenticate socket connection for `socket.socketpair()` fallback when the platform does not have a native `socketpair` C API. We authenticate in-process using `getsocketname` and `getpeername` (thanks to Nathaniel J Smith for that suggestion).
Co-authored-by: Gregory P. Smith
---
Lib/socket.py | 17 +++
Lib/test/test_socket.py | 128 +++++++++++++++++-
...-07-22-13-11-28.gh-issue-122133.0mPeta.rst | 5 +
3 files changed, 147 insertions(+), 3 deletions(-)
create mode 100644 Misc/NEWS.d/next/Security/2024-07-22-13-11-28.gh-issue-122133.0mPeta.rst
diff --git a/Lib/socket.py b/Lib/socket.py
index 524ce1361b9091..2e6043cbdb8005 100644
--- a/Lib/socket.py
+++ b/Lib/socket.py
@@ -650,6 +650,23 @@ def socketpair(family=AF_INET, type=SOCK_STREAM, proto=0):
raise
finally:
lsock.close()
+
+ # Authenticating avoids using a connection from something else
+ # able to connect to {host}:{port} instead of us.
+ # We expect only AF_INET and AF_INET6 families.
+ try:
+ if (
+ ssock.getsockname() != csock.getpeername()
+ or csock.getsockname() != ssock.getpeername()
+ ):
+ raise ConnectionError("Unexpected peer connection")
+ except:
+ # getsockname() and getpeername() can fail
+ # if either socket isn't connected.
+ ssock.close()
+ csock.close()
+ raise
+
return (ssock, csock)
__all__.append("socketpair")
diff --git a/Lib/test/test_socket.py b/Lib/test/test_socket.py
index ce0f64b43ed49f..bb65c3c5993b88 100644
--- a/Lib/test/test_socket.py
+++ b/Lib/test/test_socket.py
@@ -592,19 +592,27 @@ class SocketPairTest(unittest.TestCase, ThreadableTest):
def __init__(self, methodName='runTest'):
unittest.TestCase.__init__(self, methodName=methodName)
ThreadableTest.__init__(self)
+ self.cli = None
+ self.serv = None
+
+ def socketpair(self):
+ # To be overridden by some child classes.
+ return socket.socketpair()
def setUp(self):
- self.serv, self.cli = socket.socketpair()
+ self.serv, self.cli = self.socketpair()
def tearDown(self):
- self.serv.close()
+ if self.serv:
+ self.serv.close()
self.serv = None
def clientSetUp(self):
pass
def clientTearDown(self):
- self.cli.close()
+ if self.cli:
+ self.cli.close()
self.cli = None
ThreadableTest.clientTearDown(self)
@@ -4852,6 +4860,120 @@ def _testSend(self):
self.assertEqual(msg, MSG)
+class PurePythonSocketPairTest(SocketPairTest):
+
+ # Explicitly use socketpair AF_INET or AF_INET6 to ensure that is the
+ # code path we're using regardless platform is the pure python one where
+ # `_socket.socketpair` does not exist. (AF_INET does not work with
+ # _socket.socketpair on many platforms).
+ def socketpair(self):
+ # called by super().setUp().
+ try:
+ return socket.socketpair(socket.AF_INET6)
+ except OSError:
+ return socket.socketpair(socket.AF_INET)
+
+ # Local imports in this class make for easy security fix backporting.
+
+ def setUp(self):
+ import _socket
+ self._orig_sp = getattr(_socket, 'socketpair', None)
+ if self._orig_sp is not None:
+ # This forces the version using the non-OS provided socketpair
+ # emulation via an AF_INET socket in Lib/socket.py.
+ del _socket.socketpair
+ import importlib
+ global socket
+ socket = importlib.reload(socket)
+ else:
+ pass # This platform already uses the non-OS provided version.
+ super().setUp()
+
+ def tearDown(self):
+ super().tearDown()
+ import _socket
+ if self._orig_sp is not None:
+ # Restore the default socket.socketpair definition.
+ _socket.socketpair = self._orig_sp
+ import importlib
+ global socket
+ socket = importlib.reload(socket)
+
+ def test_recv(self):
+ msg = self.serv.recv(1024)
+ self.assertEqual(msg, MSG)
+
+ def _test_recv(self):
+ self.cli.send(MSG)
+
+ def test_send(self):
+ self.serv.send(MSG)
+
+ def _test_send(self):
+ msg = self.cli.recv(1024)
+ self.assertEqual(msg, MSG)
+
+ def test_ipv4(self):
+ cli, srv = socket.socketpair(socket.AF_INET)
+ cli.close()
+ srv.close()
+
+ def _test_ipv4(self):
+ pass
+
+ @unittest.skipIf(not hasattr(_socket, 'IPPROTO_IPV6') or
+ not hasattr(_socket, 'IPV6_V6ONLY'),
+ "IPV6_V6ONLY option not supported")
+ @unittest.skipUnless(socket_helper.IPV6_ENABLED, 'IPv6 required for this test')
+ def test_ipv6(self):
+ cli, srv = socket.socketpair(socket.AF_INET6)
+ cli.close()
+ srv.close()
+
+ def _test_ipv6(self):
+ pass
+
+ def test_injected_authentication_failure(self):
+ orig_getsockname = socket.socket.getsockname
+ inject_sock = None
+
+ def inject_getsocketname(self):
+ nonlocal inject_sock
+ sockname = orig_getsockname(self)
+ # Connect to the listening socket ahead of the
+ # client socket.
+ if inject_sock is None:
+ inject_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ inject_sock.setblocking(False)
+ try:
+ inject_sock.connect(sockname[:2])
+ except (BlockingIOError, InterruptedError):
+ pass
+ inject_sock.setblocking(True)
+ return sockname
+
+ sock1 = sock2 = None
+ try:
+ socket.socket.getsockname = inject_getsocketname
+ with self.assertRaises(OSError):
+ sock1, sock2 = socket.socketpair()
+ finally:
+ socket.socket.getsockname = orig_getsockname
+ if inject_sock:
+ inject_sock.close()
+ if sock1: # This cleanup isn't needed on a successful test.
+ sock1.close()
+ if sock2:
+ sock2.close()
+
+ def _test_injected_authentication_failure(self):
+ # No-op. Exists for base class threading infrastructure to call.
+ # We could refactor this test into its own lesser class along with the
+ # setUp and tearDown code to construct an ideal; it is simpler to keep
+ # it here and live with extra overhead one this _one_ failure test.
+ pass
+
+
class NonBlockingTCPTests(ThreadedTCPSocketTest):
def __init__(self, methodName='runTest'):
diff --git a/Misc/NEWS.d/next/Security/2024-07-22-13-11-28.gh-issue-122133.0mPeta.rst b/Misc/NEWS.d/next/Security/2024-07-22-13-11-28.gh-issue-122133.0mPeta.rst
new file mode 100644
index 00000000000000..3544eb3824d0da
--- /dev/null
+++ b/Misc/NEWS.d/next/Security/2024-07-22-13-11-28.gh-issue-122133.0mPeta.rst
@@ -0,0 +1,5 @@
+Authenticate the socket connection for the ``socket.socketpair()`` fallback
+on platforms where ``AF_UNIX`` is not available like Windows.
+
+Patch by Gregory P. Smith and Seth Larson . Reported by Ellie
+
From 7797182b78baf78f64fe16f436aa2279cf6afc23 Mon Sep 17 00:00:00 2001
From: Brandt Bucher
Date: Mon, 29 Jul 2024 14:49:17 -0700
Subject: [PATCH 073/139] GH-118093: Improve handling of short and mid-loop
traces (GH-122252)
---
Objects/genobject.c | 5 ++--
Python/optimizer.c | 64 ++++++++++++++++++++++-----------------------
2 files changed, 34 insertions(+), 35 deletions(-)
diff --git a/Objects/genobject.c b/Objects/genobject.c
index c204ac04b480a4..b281af8d7e1f4e 100644
--- a/Objects/genobject.c
+++ b/Objects/genobject.c
@@ -342,8 +342,9 @@ _PyGen_yf(PyGenObject *gen)
{
if (gen->gi_frame_state == FRAME_SUSPENDED_YIELD_FROM) {
_PyInterpreterFrame *frame = &gen->gi_iframe;
- assert(is_resume(frame->instr_ptr));
- assert((frame->instr_ptr->op.arg & RESUME_OPARG_LOCATION_MASK) >= RESUME_AFTER_YIELD_FROM);
+ // GH-122390: These asserts are wrong in the presence of ENTER_EXECUTOR!
+ // assert(is_resume(frame->instr_ptr));
+ // assert((frame->instr_ptr->op.arg & RESUME_OPARG_LOCATION_MASK) >= RESUME_AFTER_YIELD_FROM);
return PyStackRef_AsPyObjectNew(_PyFrame_StackPeek(frame));
}
return NULL;
diff --git a/Python/optimizer.c b/Python/optimizer.c
index 7b875af2aae898..ce8a36575cde1d 100644
--- a/Python/optimizer.c
+++ b/Python/optimizer.c
@@ -503,8 +503,7 @@ add_to_trace(
if (trace_stack_depth >= TRACE_STACK_SIZE) { \
DPRINTF(2, "Trace stack overflow\n"); \
OPT_STAT_INC(trace_stack_overflow); \
- trace_length = 0; \
- goto done; \
+ return 0; \
} \
assert(func == NULL || func->func_code == (PyObject *)code); \
trace_stack[trace_stack_depth].func = func; \
@@ -550,6 +549,7 @@ translate_bytecode_to_trace(
} trace_stack[TRACE_STACK_SIZE];
int trace_stack_depth = 0;
int confidence = CONFIDENCE_RANGE; // Adjusted by branch instructions
+ bool jump_seen = false;
#ifdef Py_DEBUG
char *python_lltrace = Py_GETENV("PYTHON_LLTRACE");
@@ -568,7 +568,6 @@ translate_bytecode_to_trace(
ADD_TO_TRACE(_START_EXECUTOR, 0, (uintptr_t)instr, INSTR_IP(instr, code));
uint32_t target = 0;
-top: // Jump here after _PUSH_FRAME or likely branches
for (;;) {
target = INSTR_IP(instr, code);
// Need space for _DEOPT
@@ -577,6 +576,13 @@ translate_bytecode_to_trace(
uint32_t opcode = instr->op.code;
uint32_t oparg = instr->op.arg;
+ if (!progress_needed && instr == initial_instr) {
+ // We have looped around to the start:
+ RESERVE(1);
+ ADD_TO_TRACE(_JUMP_TO_TOP, 0, 0, 0);
+ goto done;
+ }
+
DPRINTF(2, "%d: %s(%d)\n", target, _PyOpcode_OpName[opcode], oparg);
if (opcode == ENTER_EXECUTOR) {
@@ -603,30 +609,21 @@ translate_bytecode_to_trace(
/* Special case the first instruction,
* so that we can guarantee forward progress */
if (progress_needed) {
- progress_needed = false;
- if (opcode == JUMP_BACKWARD || opcode == JUMP_BACKWARD_NO_INTERRUPT) {
- instr += 1 + _PyOpcode_Caches[opcode] - (int32_t)oparg;
- initial_instr = instr;
- if (opcode == JUMP_BACKWARD) {
- ADD_TO_TRACE(_TIER2_RESUME_CHECK, 0, 0, target);
- }
- continue;
- }
- else {
- if (OPCODE_HAS_EXIT(opcode) || OPCODE_HAS_DEOPT(opcode)) {
- opcode = _PyOpcode_Deopt[opcode];
- }
- assert(!OPCODE_HAS_EXIT(opcode));
- assert(!OPCODE_HAS_DEOPT(opcode));
+ if (OPCODE_HAS_EXIT(opcode) || OPCODE_HAS_DEOPT(opcode)) {
+ opcode = _PyOpcode_Deopt[opcode];
}
+ assert(!OPCODE_HAS_EXIT(opcode));
+ assert(!OPCODE_HAS_DEOPT(opcode));
}
if (OPCODE_HAS_EXIT(opcode)) {
- // Make space for exit code
+ // Make space for side exit and final _EXIT_TRACE:
+ RESERVE_RAW(2, "_EXIT_TRACE");
max_length--;
}
if (OPCODE_HAS_ERROR(opcode)) {
- // Make space for error code
+ // Make space for error stub and final _EXIT_TRACE:
+ RESERVE_RAW(2, "_ERROR_POP_N");
max_length--;
}
switch (opcode) {
@@ -672,19 +669,18 @@ translate_bytecode_to_trace(
}
case JUMP_BACKWARD:
+ ADD_TO_TRACE(_CHECK_PERIODIC, 0, 0, target);
+ _Py_FALLTHROUGH;
case JUMP_BACKWARD_NO_INTERRUPT:
{
- _Py_CODEUNIT *target = instr + 1 + _PyOpcode_Caches[opcode] - (int)oparg;
- if (target == initial_instr) {
- /* We have looped round to the start */
- RESERVE(1);
- ADD_TO_TRACE(_JUMP_TO_TOP, 0, 0, 0);
- }
- else {
+ instr += 1 + _PyOpcode_Caches[_PyOpcode_Deopt[opcode]] - (int)oparg;
+ if (jump_seen) {
OPT_STAT_INC(inner_loop);
DPRINTF(2, "JUMP_BACKWARD not to top ends trace\n");
+ goto done;
}
- goto done;
+ jump_seen = true;
+ goto top;
}
case JUMP_FORWARD:
@@ -904,6 +900,9 @@ translate_bytecode_to_trace(
assert(instr->op.code == POP_TOP);
instr++;
}
+ top:
+ // Jump here after _PUSH_FRAME or likely branches.
+ progress_needed = false;
} // End for (;;)
done:
@@ -911,16 +910,15 @@ translate_bytecode_to_trace(
TRACE_STACK_POP();
}
assert(code == initial_code);
- // Skip short traces like _SET_IP, LOAD_FAST, _SET_IP, _EXIT_TRACE
- if (progress_needed || trace_length < 5) {
+ // Skip short traces where we can't even translate a single instruction:
+ if (progress_needed) {
OPT_STAT_INC(trace_too_short);
DPRINTF(2,
- "No trace for %s (%s:%d) at byte offset %d (%s)\n",
+ "No trace for %s (%s:%d) at byte offset %d (no progress)\n",
PyUnicode_AsUTF8(code->co_qualname),
PyUnicode_AsUTF8(code->co_filename),
code->co_firstlineno,
- 2 * INSTR_IP(initial_instr, code),
- progress_needed ? "no progress" : "too short");
+ 2 * INSTR_IP(initial_instr, code));
return 0;
}
if (trace[trace_length-1].opcode != _JUMP_TO_TOP) {
From ac8da34621a574cd5773217404757a294025ba49 Mon Sep 17 00:00:00 2001
From: Sam Gross
Date: Mon, 29 Jul 2024 18:15:03 -0400
Subject: [PATCH 074/139] gh-122420: Fix accounting for immortal interned
strings in refleak.py (GH-122421)
The `_PyUnicode_Intern*` functions already adjust the total refcount, so
we don't want to readjust it in refleak.py.
---
Lib/test/libregrtest/refleak.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Lib/test/libregrtest/refleak.py b/Lib/test/libregrtest/refleak.py
index 20b05954c762ff..fa447a4336a399 100644
--- a/Lib/test/libregrtest/refleak.py
+++ b/Lib/test/libregrtest/refleak.py
@@ -145,7 +145,7 @@ def get_pooled_int(value):
# Use an internal-only keyword argument that mypy doesn't know yet
_only_immortal=True) # type: ignore[call-arg]
alloc_after = getallocatedblocks() - interned_immortal_after
- rc_after = gettotalrefcount() - interned_immortal_after * 2
+ rc_after = gettotalrefcount()
fd_after = fd_count()
rc_deltas[i] = get_pooled_int(rc_after - rc_before)
From 11ad731f4fa096c8b5d71731f1182d893a88c9b4 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Tue, 30 Jul 2024 04:49:00 +0100
Subject: [PATCH 075/139] GH-121970: Extract ``audit_events`` into a new
extension (#122325)
---
Doc/conf.py | 1 +
Doc/tools/extensions/audit_events.py | 262 +++++++++++++++++++++++++++
Doc/tools/extensions/pyspecific.py | 207 ---------------------
3 files changed, 263 insertions(+), 207 deletions(-)
create mode 100644 Doc/tools/extensions/audit_events.py
diff --git a/Doc/conf.py b/Doc/conf.py
index 4841b69e380085..3860d146a27e85 100644
--- a/Doc/conf.py
+++ b/Doc/conf.py
@@ -20,6 +20,7 @@
# ---------------------
extensions = [
+ 'audit_events',
'c_annotations',
'glossary_search',
'lexers',
diff --git a/Doc/tools/extensions/audit_events.py b/Doc/tools/extensions/audit_events.py
new file mode 100644
index 00000000000000..d0f08522d21ea2
--- /dev/null
+++ b/Doc/tools/extensions/audit_events.py
@@ -0,0 +1,262 @@
+"""Support for documenting audit events."""
+
+from __future__ import annotations
+
+import re
+from typing import TYPE_CHECKING
+
+from docutils import nodes
+from sphinx.errors import NoUri
+from sphinx.locale import _ as sphinx_gettext
+from sphinx.transforms.post_transforms import SphinxPostTransform
+from sphinx.util import logging
+from sphinx.util.docutils import SphinxDirective
+
+if TYPE_CHECKING:
+ from collections.abc import Iterator
+
+ from sphinx.application import Sphinx
+ from sphinx.builders import Builder
+ from sphinx.environment import BuildEnvironment
+
+logger = logging.getLogger(__name__)
+
+# This list of sets are allowable synonyms for event argument names.
+# If two names are in the same set, they are treated as equal for the
+# purposes of warning. This won't help if the number of arguments is
+# different!
+_SYNONYMS = [
+ frozenset({"file", "path", "fd"}),
+]
+
+
+class AuditEvents:
+ def __init__(self) -> None:
+ self.events: dict[str, list[str]] = {}
+ self.sources: dict[str, list[tuple[str, str]]] = {}
+
+ def __iter__(self) -> Iterator[tuple[str, list[str], tuple[str, str]]]:
+ for name, args in self.events.items():
+ for source in self.sources[name]:
+ yield name, args, source
+
+ def add_event(
+ self, name, args: list[str], source: tuple[str, str]
+ ) -> None:
+ if name in self.events:
+ self._check_args_match(name, args)
+ else:
+ self.events[name] = args
+ self.sources.setdefault(name, []).append(source)
+
+ def _check_args_match(self, name: str, args: list[str]) -> None:
+ current_args = self.events[name]
+ msg = (
+ f"Mismatched arguments for audit-event {name}: "
+ f"{current_args!r} != {args!r}"
+ )
+ if current_args == args:
+ return
+ if len(current_args) != len(args):
+ logger.warning(msg)
+ return
+ for a1, a2 in zip(current_args, args, strict=False):
+ if a1 == a2:
+ continue
+ if any(a1 in s and a2 in s for s in _SYNONYMS):
+ continue
+ logger.warning(msg)
+ return
+
+ def id_for(self, name) -> str:
+ source_count = len(self.sources.get(name, ()))
+ name_clean = re.sub(r"\W", "_", name)
+ return f"audit_event_{name_clean}_{source_count}"
+
+ def rows(self) -> Iterator[tuple[str, list[str], list[tuple[str, str]]]]:
+ for name in sorted(self.events.keys()):
+ yield name, self.events[name], self.sources[name]
+
+
+def initialise_audit_events(app: Sphinx) -> None:
+ """Initialise the audit_events attribute on the environment."""
+ if not hasattr(app.env, "audit_events"):
+ app.env.audit_events = AuditEvents()
+
+
+def audit_events_purge(
+ app: Sphinx, env: BuildEnvironment, docname: str
+) -> None:
+ """This is to remove traces of removed documents from env.audit_events."""
+ fresh_audit_events = AuditEvents()
+ for name, args, (doc, target) in env.audit_events:
+ if doc != docname:
+ fresh_audit_events.add_event(name, args, (doc, target))
+
+
+def audit_events_merge(
+ app: Sphinx,
+ env: BuildEnvironment,
+ docnames: list[str],
+ other: BuildEnvironment,
+) -> None:
+ """In Sphinx parallel builds, this merges audit_events from subprocesses."""
+ for name, args, source in other.audit_events:
+ env.audit_events.add_event(name, args, source)
+
+
+class AuditEvent(SphinxDirective):
+ has_content = True
+ required_arguments = 1
+ optional_arguments = 2
+ final_argument_whitespace = True
+
+ _label = [
+ sphinx_gettext(
+ "Raises an :ref:`auditing event ` "
+ "{name} with no arguments."
+ ),
+ sphinx_gettext(
+ "Raises an :ref:`auditing event ` "
+ "{name} with argument {args}."
+ ),
+ sphinx_gettext(
+ "Raises an :ref:`auditing event ` "
+ "{name} with arguments {args}."
+ ),
+ ]
+
+ def run(self) -> list[nodes.paragraph]:
+ name = self.arguments[0]
+ if len(self.arguments) >= 2 and self.arguments[1]:
+ args = [
+ arg
+ for argument in self.arguments[1].strip("'\"").split(",")
+ if (arg := argument.strip())
+ ]
+ else:
+ args = []
+ ids = []
+ try:
+ target = self.arguments[2].strip("\"'")
+ except (IndexError, TypeError):
+ target = None
+ if not target:
+ target = self.env.audit_events.id_for(name)
+ ids.append(target)
+ self.env.audit_events.add_event(name, args, (self.env.docname, target))
+
+ node = nodes.paragraph("", classes=["audit-hook"], ids=ids)
+ self.set_source_info(node)
+ if self.content:
+ self.state.nested_parse(self.content, self.content_offset, node)
+ else:
+ num_args = min(2, len(args))
+ text = self._label[num_args].format(
+ name=f"``{name}``",
+ args=", ".join(f"``{a}``" for a in args),
+ )
+ parsed, messages = self.state.inline_text(text, self.lineno)
+ node += parsed
+ node += messages
+ return [node]
+
+
+class audit_event_list(nodes.General, nodes.Element): # noqa: N801
+ pass
+
+
+class AuditEventListDirective(SphinxDirective):
+ def run(self) -> list[audit_event_list]:
+ return [audit_event_list()]
+
+
+class AuditEventListTransform(SphinxPostTransform):
+ default_priority = 500
+
+ def run(self) -> None:
+ if self.document.next_node(audit_event_list) is None:
+ return
+
+ table = self._make_table(self.app.builder, self.env.docname)
+ for node in self.document.findall(audit_event_list):
+ node.replace_self(table)
+
+ def _make_table(self, builder: Builder, docname: str) -> nodes.table:
+ table = nodes.table(cols=3)
+ group = nodes.tgroup(
+ "",
+ nodes.colspec(colwidth=30),
+ nodes.colspec(colwidth=55),
+ nodes.colspec(colwidth=15),
+ cols=3,
+ )
+ head = nodes.thead()
+ body = nodes.tbody()
+
+ table += group
+ group += head
+ group += body
+
+ head += nodes.row(
+ "",
+ nodes.entry("", nodes.paragraph("", "Audit event")),
+ nodes.entry("", nodes.paragraph("", "Arguments")),
+ nodes.entry("", nodes.paragraph("", "References")),
+ )
+
+ for name, args, sources in builder.env.audit_events.rows():
+ body += self._make_row(builder, docname, name, args, sources)
+
+ return table
+
+ @staticmethod
+ def _make_row(
+ builder: Builder,
+ docname: str,
+ name: str,
+ args: list[str],
+ sources: list[tuple[str, str]],
+ ) -> nodes.row:
+ row = nodes.row()
+ name_node = nodes.paragraph("", nodes.Text(name))
+ row += nodes.entry("", name_node)
+
+ args_node = nodes.paragraph()
+ for arg in args:
+ args_node += nodes.literal(arg, arg)
+ args_node += nodes.Text(", ")
+ if len(args_node.children) > 0:
+ args_node.children.pop() # remove trailing comma
+ row += nodes.entry("", args_node)
+
+ backlinks_node = nodes.paragraph()
+ backlinks = enumerate(sorted(set(sources)), start=1)
+ for i, (doc, label) in backlinks:
+ if isinstance(label, str):
+ ref = nodes.reference("", f"[{i}]", internal=True)
+ try:
+ target = (
+ f"{builder.get_relative_uri(docname, doc)}#{label}"
+ )
+ except NoUri:
+ continue
+ else:
+ ref["refuri"] = target
+ backlinks_node += ref
+ row += nodes.entry("", backlinks_node)
+ return row
+
+
+def setup(app: Sphinx):
+ app.add_directive("audit-event", AuditEvent)
+ app.add_directive("audit-event-table", AuditEventListDirective)
+ app.add_post_transform(AuditEventListTransform)
+ app.connect("builder-inited", initialise_audit_events)
+ app.connect("env-purge-doc", audit_events_purge)
+ app.connect("env-merge-info", audit_events_merge)
+ return {
+ "version": "1.0",
+ "parallel_read_safe": True,
+ "parallel_write_safe": True,
+ }
diff --git a/Doc/tools/extensions/pyspecific.py b/Doc/tools/extensions/pyspecific.py
index f5be19a8d49cc9..791d9296a975e7 100644
--- a/Doc/tools/extensions/pyspecific.py
+++ b/Doc/tools/extensions/pyspecific.py
@@ -15,7 +15,6 @@
from time import asctime
from pprint import pformat
-import sphinx
from docutils import nodes
from docutils.io import StringOutput
from docutils.parsers.rst import directives
@@ -24,7 +23,6 @@
from sphinx.builders import Builder
from sphinx.domains.changeset import VersionChange, versionlabels, versionlabel_classes
from sphinx.domains.python import PyFunction, PyMethod, PyModule
-from sphinx.errors import NoUri
from sphinx.locale import _ as sphinx_gettext
from sphinx.util import logging
from sphinx.util.docutils import SphinxDirective
@@ -184,142 +182,6 @@ def parse_platforms(self):
return platforms
-# Support for documenting audit event
-
-def audit_events_purge(app, env, docname):
- """This is to remove from env.all_audit_events old traces of removed
- documents.
- """
- if not hasattr(env, 'all_audit_events'):
- return
- fresh_all_audit_events = {}
- for name, event in env.all_audit_events.items():
- event["source"] = [(d, t) for d, t in event["source"] if d != docname]
- if event["source"]:
- # Only keep audit_events that have at least one source.
- fresh_all_audit_events[name] = event
- env.all_audit_events = fresh_all_audit_events
-
-
-def audit_events_merge(app, env, docnames, other):
- """In Sphinx parallel builds, this merges env.all_audit_events from
- subprocesses.
-
- all_audit_events is a dict of names, with values like:
- {'source': [(docname, target), ...], 'args': args}
- """
- if not hasattr(other, 'all_audit_events'):
- return
- if not hasattr(env, 'all_audit_events'):
- env.all_audit_events = {}
- for name, value in other.all_audit_events.items():
- if name in env.all_audit_events:
- env.all_audit_events[name]["source"].extend(value["source"])
- else:
- env.all_audit_events[name] = value
-
-
-class AuditEvent(SphinxDirective):
-
- has_content = True
- required_arguments = 1
- optional_arguments = 2
- final_argument_whitespace = True
-
- _label = [
- sphinx_gettext("Raises an :ref:`auditing event ` {name} with no arguments."),
- sphinx_gettext("Raises an :ref:`auditing event ` {name} with argument {args}."),
- sphinx_gettext("Raises an :ref:`auditing event ` {name} with arguments {args}."),
- ]
-
- @property
- def logger(self):
- cls = type(self)
- return logging.getLogger(cls.__module__ + "." + cls.__name__)
-
- def run(self):
- name = self.arguments[0]
- if len(self.arguments) >= 2 and self.arguments[1]:
- args = (a.strip() for a in self.arguments[1].strip("'\"").split(","))
- args = [a for a in args if a]
- else:
- args = []
-
- label = self._label[min(2, len(args))]
- text = label.format(name="``{}``".format(name),
- args=", ".join("``{}``".format(a) for a in args if a))
-
- if not hasattr(self.env, 'all_audit_events'):
- self.env.all_audit_events = {}
-
- new_info = {
- 'source': [],
- 'args': args
- }
- info = self.env.all_audit_events.setdefault(name, new_info)
- if info is not new_info:
- if not self._do_args_match(info['args'], new_info['args']):
- self.logger.warning(
- "Mismatched arguments for audit-event {}: {!r} != {!r}"
- .format(name, info['args'], new_info['args'])
- )
-
- ids = []
- try:
- target = self.arguments[2].strip("\"'")
- except (IndexError, TypeError):
- target = None
- if not target:
- target = "audit_event_{}_{}".format(
- re.sub(r'\W', '_', name),
- len(info['source']),
- )
- ids.append(target)
-
- info['source'].append((self.env.docname, target))
-
- pnode = nodes.paragraph(text, classes=["audit-hook"], ids=ids)
- pnode.line = self.lineno
- if self.content:
- self.state.nested_parse(self.content, self.content_offset, pnode)
- else:
- n, m = self.state.inline_text(text, self.lineno)
- pnode.extend(n + m)
-
- return [pnode]
-
- # This list of sets are allowable synonyms for event argument names.
- # If two names are in the same set, they are treated as equal for the
- # purposes of warning. This won't help if number of arguments is
- # different!
- _SYNONYMS = [
- {"file", "path", "fd"},
- ]
-
- def _do_args_match(self, args1, args2):
- if args1 == args2:
- return True
- if len(args1) != len(args2):
- return False
- for a1, a2 in zip(args1, args2):
- if a1 == a2:
- continue
- if any(a1 in s and a2 in s for s in self._SYNONYMS):
- continue
- return False
- return True
-
-
-class audit_event_list(nodes.General, nodes.Element):
- pass
-
-
-class AuditEventListDirective(SphinxDirective):
-
- def run(self):
- return [audit_event_list('')]
-
-
# Support for documenting decorators
class PyDecoratorMixin(object):
@@ -583,70 +445,6 @@ def parse_monitoring_event(env, sig, signode):
return sig
-def process_audit_events(app, doctree, fromdocname):
- for node in doctree.findall(audit_event_list):
- break
- else:
- return
-
- env = app.builder.env
-
- table = nodes.table(cols=3)
- group = nodes.tgroup(
- '',
- nodes.colspec(colwidth=30),
- nodes.colspec(colwidth=55),
- nodes.colspec(colwidth=15),
- cols=3,
- )
- head = nodes.thead()
- body = nodes.tbody()
-
- table += group
- group += head
- group += body
-
- row = nodes.row()
- row += nodes.entry('', nodes.paragraph('', nodes.Text('Audit event')))
- row += nodes.entry('', nodes.paragraph('', nodes.Text('Arguments')))
- row += nodes.entry('', nodes.paragraph('', nodes.Text('References')))
- head += row
-
- for name in sorted(getattr(env, "all_audit_events", ())):
- audit_event = env.all_audit_events[name]
-
- row = nodes.row()
- node = nodes.paragraph('', nodes.Text(name))
- row += nodes.entry('', node)
-
- node = nodes.paragraph()
- for i, a in enumerate(audit_event['args']):
- if i:
- node += nodes.Text(", ")
- node += nodes.literal(a, nodes.Text(a))
- row += nodes.entry('', node)
-
- node = nodes.paragraph()
- backlinks = enumerate(sorted(set(audit_event['source'])), start=1)
- for i, (doc, label) in backlinks:
- if isinstance(label, str):
- ref = nodes.reference("", nodes.Text("[{}]".format(i)), internal=True)
- try:
- ref['refuri'] = "{}#{}".format(
- app.builder.get_relative_uri(fromdocname, doc),
- label,
- )
- except NoUri:
- continue
- node += ref
- row += nodes.entry('', node)
-
- body += row
-
- for node in doctree.findall(audit_event_list):
- node.replace_self(table)
-
-
def patch_pairindextypes(app, _env) -> None:
"""Remove all entries from ``pairindextypes`` before writing POT files.
@@ -676,8 +474,6 @@ def setup(app):
app.add_role('gh', gh_issue_role)
app.add_directive('impl-detail', ImplementationDetail)
app.add_directive('availability', Availability)
- app.add_directive('audit-event', AuditEvent)
- app.add_directive('audit-event-table', AuditEventListDirective)
app.add_directive('deprecated-removed', DeprecatedRemoved)
app.add_builder(PydocTopicsBuilder)
app.add_object_type('opcode', 'opcode', '%s (opcode)', parse_opcode_signature)
@@ -692,7 +488,4 @@ def setup(app):
app.add_directive_to_domain('py', 'abstractmethod', PyAbstractMethod)
app.add_directive('miscnews', MiscNews)
app.connect('env-check-consistency', patch_pairindextypes)
- app.connect('doctree-resolved', process_audit_events)
- app.connect('env-merge-info', audit_events_merge)
- app.connect('env-purge-doc', audit_events_purge)
return {'version': '1.0', 'parallel_read_safe': True}
From 3833d27f985a62c4709dcd9dc73724fc19d46ebf Mon Sep 17 00:00:00 2001
From: Petr Viktorin
Date: Tue, 30 Jul 2024 09:37:58 +0200
Subject: [PATCH 076/139] gh-105733: Soft-deprecate ctypes.ARRAY, rather than
hard-deprecating it. (GH-122281)
Soft-deprecate ctypes.ARRAY, rather than hard-deprecating it.
Partially reverts 2211454fe210637ed7fabda12690dac6cc9a8149
---
Doc/library/ctypes.rst | 9 +++++++++
Doc/whatsnew/3.13.rst | 4 ++--
Lib/ctypes/__init__.py | 2 --
Lib/test/test_ctypes/test_arrays.py | 14 +-------------
.../2024-07-25-15-41-14.gh-issue-105733.o3koJA.rst | 2 ++
5 files changed, 14 insertions(+), 17 deletions(-)
create mode 100644 Misc/NEWS.d/next/Library/2024-07-25-15-41-14.gh-issue-105733.o3koJA.rst
diff --git a/Doc/library/ctypes.rst b/Doc/library/ctypes.rst
index 9e69b3dc51a1ac..c2f928e16aa90c 100644
--- a/Doc/library/ctypes.rst
+++ b/Doc/library/ctypes.rst
@@ -2688,6 +2688,15 @@ Arrays and pointers
Array subclass constructors accept positional arguments, used to
initialize the elements in order.
+.. function:: ARRAY(type, length)
+
+ Create an array.
+ Equivalent to ``type * length``, where *type* is a
+ :mod:`ctypes` data type and *length* an integer.
+
+ This function is :term:`soft deprecated` in favor of multiplication.
+ There are no plans to remove it.
+
.. class:: _Pointer
diff --git a/Doc/whatsnew/3.13.rst b/Doc/whatsnew/3.13.rst
index 0854631c832ef4..fbf19d1c9598e1 100644
--- a/Doc/whatsnew/3.13.rst
+++ b/Doc/whatsnew/3.13.rst
@@ -1494,8 +1494,8 @@ New Deprecations
(Contributed by Hugo van Kemenade in :gh:`80480`.)
* :mod:`ctypes`: Deprecate undocumented :func:`!ctypes.SetPointerType`
- and :func:`!ctypes.ARRAY` functions.
- Replace ``ctypes.ARRAY(item_type, size)`` with ``item_type * size``.
+ function. :term:`Soft-deprecate ` the :func:`ctypes.ARRAY`
+ function in favor of multiplication.
(Contributed by Victor Stinner in :gh:`105733`.)
* :mod:`decimal`: Deprecate non-standard format specifier "N" for
diff --git a/Lib/ctypes/__init__.py b/Lib/ctypes/__init__.py
index 721522caeeac92..cb3a61287bfe5d 100644
--- a/Lib/ctypes/__init__.py
+++ b/Lib/ctypes/__init__.py
@@ -324,8 +324,6 @@ def SetPointerType(pointer, cls):
del _pointer_type_cache[id(pointer)]
def ARRAY(typ, len):
- import warnings
- warnings._deprecated("ctypes.ARRAY", remove=(3, 15))
return typ * len
################################################################
diff --git a/Lib/test/test_ctypes/test_arrays.py b/Lib/test/test_ctypes/test_arrays.py
index 6846773d7069ae..c80fdff5de685d 100644
--- a/Lib/test/test_ctypes/test_arrays.py
+++ b/Lib/test/test_ctypes/test_arrays.py
@@ -1,8 +1,7 @@
import ctypes
import sys
import unittest
-import warnings
-from ctypes import (Structure, Array, sizeof, addressof,
+from ctypes import (Structure, Array, ARRAY, sizeof, addressof,
create_string_buffer, create_unicode_buffer,
c_char, c_wchar, c_byte, c_ubyte, c_short, c_ushort, c_int, c_uint,
c_long, c_ulonglong, c_float, c_double, c_longdouble)
@@ -17,13 +16,6 @@
c_long, c_ulonglong, c_float, c_double, c_longdouble
-def ARRAY(*args):
- # ignore DeprecationWarning in tests
- with warnings.catch_warnings():
- warnings.simplefilter('ignore', DeprecationWarning)
- return ctypes.ARRAY(*args)
-
-
class ArrayTestCase(unittest.TestCase):
def test_inheritance_hierarchy(self):
self.assertEqual(Array.mro(), [Array, _CData, object])
@@ -275,10 +267,6 @@ def test_bpo36504_signed_int_overflow(self):
def test_large_array(self, size):
c_char * size
- def test_deprecation(self):
- with self.assertWarns(DeprecationWarning):
- CharArray = ctypes.ARRAY(c_char, 3)
-
if __name__ == '__main__':
unittest.main()
diff --git a/Misc/NEWS.d/next/Library/2024-07-25-15-41-14.gh-issue-105733.o3koJA.rst b/Misc/NEWS.d/next/Library/2024-07-25-15-41-14.gh-issue-105733.o3koJA.rst
new file mode 100644
index 00000000000000..60c5e69d2f6f9c
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-07-25-15-41-14.gh-issue-105733.o3koJA.rst
@@ -0,0 +1,2 @@
+:func:`ctypes.ARRAY` is now :term:`soft deprecated`: it no longer emits deprecation
+warnings and is not scheduled for removal.
From 3a9b2aae615165a40614db9aaa8b90c55ff0c7f9 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?B=C3=A9n=C3=A9dikt=20Tran?=
<10796600+picnixz@users.noreply.github.com>
Date: Tue, 30 Jul 2024 10:50:30 +0200
Subject: [PATCH 077/139] gh-122400: Handle ValueError in filecmp (GH-122401)
---
Lib/filecmp.py | 10 +++---
Lib/test/test_filecmp.py | 33 +++++++++++++++++++
...-07-29-16-47-08.gh-issue-122400.fM0YSv.rst | 3 ++
3 files changed, 42 insertions(+), 4 deletions(-)
create mode 100644 Misc/NEWS.d/next/Library/2024-07-29-16-47-08.gh-issue-122400.fM0YSv.rst
diff --git a/Lib/filecmp.py b/Lib/filecmp.py
index 020ea694ca63e9..c5b8d854d77de3 100644
--- a/Lib/filecmp.py
+++ b/Lib/filecmp.py
@@ -164,12 +164,14 @@ def phase2(self): # Distinguish files, directories, funnies
ok = True
try:
a_stat = os.stat(a_path)
- except OSError:
+ except (OSError, ValueError):
+ # See https://github.com/python/cpython/issues/122400
+ # for the rationale for protecting against ValueError.
# print('Can\'t stat', a_path, ':', why.args[1])
ok = False
try:
b_stat = os.stat(b_path)
- except OSError:
+ except (OSError, ValueError):
# print('Can\'t stat', b_path, ':', why.args[1])
ok = False
@@ -285,12 +287,12 @@ def cmpfiles(a, b, common, shallow=True):
# Return:
# 0 for equal
# 1 for different
-# 2 for funny cases (can't stat, etc.)
+# 2 for funny cases (can't stat, NUL bytes, etc.)
#
def _cmp(a, b, sh, abs=abs, cmp=cmp):
try:
return not abs(cmp(a, b, sh))
- except OSError:
+ except (OSError, ValueError):
return 2
diff --git a/Lib/test/test_filecmp.py b/Lib/test/test_filecmp.py
index 1fb47163719ede..2c83667b22feb4 100644
--- a/Lib/test/test_filecmp.py
+++ b/Lib/test/test_filecmp.py
@@ -156,6 +156,39 @@ def test_cmpfiles(self):
(['file'], ['file2'], []),
"Comparing mismatched directories fails")
+ def test_cmpfiles_invalid_names(self):
+ # See https://github.com/python/cpython/issues/122400.
+ for file, desc in [
+ ('\x00', 'NUL bytes filename'),
+ (__file__ + '\x00', 'filename with embedded NUL bytes'),
+ ("\uD834\uDD1E.py", 'surrogate codes (MUSICAL SYMBOL G CLEF)'),
+ ('a' * 1_000_000, 'very long filename'),
+ ]:
+ for other_dir in [self.dir, self.dir_same, self.dir_diff]:
+ with self.subTest(f'cmpfiles: {desc}', other_dir=other_dir):
+ res = filecmp.cmpfiles(self.dir, other_dir, [file])
+ self.assertTupleEqual(res, ([], [], [file]))
+
+ def test_dircmp_invalid_names(self):
+ for bad_dir, desc in [
+ ('\x00', 'NUL bytes dirname'),
+ (f'Top{os.sep}Mid\x00', 'dirname with embedded NUL bytes'),
+ ("\uD834\uDD1E", 'surrogate codes (MUSICAL SYMBOL G CLEF)'),
+ ('a' * 1_000_000, 'very long dirname'),
+ ]:
+ d1 = filecmp.dircmp(self.dir, bad_dir)
+ d2 = filecmp.dircmp(bad_dir, self.dir)
+ for target in [
+ # attributes where os.listdir() raises OSError or ValueError
+ 'left_list', 'right_list',
+ 'left_only', 'right_only', 'common',
+ ]:
+ with self.subTest(f'dircmp(ok, bad): {desc}', target=target):
+ with self.assertRaises((OSError, ValueError)):
+ getattr(d1, target)
+ with self.subTest(f'dircmp(bad, ok): {desc}', target=target):
+ with self.assertRaises((OSError, ValueError)):
+ getattr(d2, target)
def _assert_lists(self, actual, expected):
"""Assert that two lists are equal, up to ordering."""
diff --git a/Misc/NEWS.d/next/Library/2024-07-29-16-47-08.gh-issue-122400.fM0YSv.rst b/Misc/NEWS.d/next/Library/2024-07-29-16-47-08.gh-issue-122400.fM0YSv.rst
new file mode 100644
index 00000000000000..8c47e94f78d9f0
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-07-29-16-47-08.gh-issue-122400.fM0YSv.rst
@@ -0,0 +1,3 @@
+Handle :exc:`ValueError`\s raised by :func:`os.stat` in
+:class:`filecmp.dircmp` and :func:`filecmp.cmpfiles`.
+Patch by Bénédikt Tran.
From d27a53fc02a87e76066fc4e15ff1fff3922a482d Mon Sep 17 00:00:00 2001
From: Clinton
Date: Tue, 30 Jul 2024 04:53:07 -0400
Subject: [PATCH 078/139] gh-121474: Add threading.Barrier parties arg sanity
check. (GH-121480)
---
Lib/test/lock_tests.py | 4 ++++
Lib/threading.py | 2 ++
.../Library/2024-07-08-03-45-34.gh-issue-121474.NsvrUN.rst | 2 ++
3 files changed, 8 insertions(+)
create mode 100644 Misc/NEWS.d/next/Library/2024-07-08-03-45-34.gh-issue-121474.NsvrUN.rst
diff --git a/Lib/test/lock_tests.py b/Lib/test/lock_tests.py
index 024c6debcd4a54..8c8f8901f00178 100644
--- a/Lib/test/lock_tests.py
+++ b/Lib/test/lock_tests.py
@@ -1013,6 +1013,10 @@ def multipass(self, results, n):
self.assertEqual(self.barrier.n_waiting, 0)
self.assertFalse(self.barrier.broken)
+ def test_constructor(self):
+ self.assertRaises(ValueError, self.barriertype, parties=0)
+ self.assertRaises(ValueError, self.barriertype, parties=-1)
+
def test_barrier(self, passes=1):
"""
Test that a barrier is passed in lockstep
diff --git a/Lib/threading.py b/Lib/threading.py
index 2dcdd0c9e067b6..94ea2f08178369 100644
--- a/Lib/threading.py
+++ b/Lib/threading.py
@@ -689,6 +689,8 @@ def __init__(self, parties, action=None, timeout=None):
default for all subsequent 'wait()' calls.
"""
+ if parties < 1:
+ raise ValueError("parties must be > 0")
self._cond = Condition(Lock())
self._action = action
self._timeout = timeout
diff --git a/Misc/NEWS.d/next/Library/2024-07-08-03-45-34.gh-issue-121474.NsvrUN.rst b/Misc/NEWS.d/next/Library/2024-07-08-03-45-34.gh-issue-121474.NsvrUN.rst
new file mode 100644
index 00000000000000..605f30d76f5d47
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-07-08-03-45-34.gh-issue-121474.NsvrUN.rst
@@ -0,0 +1,2 @@
+Fix missing sanity check for ``parties`` arg in :class:`threading.Barrier`
+constructor. Patch by Clinton Christian (pygeek).
From d1a1bca1f0550a4715f1bf32b1586caa7bc4487b Mon Sep 17 00:00:00 2001
From: Dino Viehland
Date: Tue, 30 Jul 2024 05:03:52 -0700
Subject: [PATCH 079/139] gh-119896: Fix CTRL-Z behavior in the new REPL on
Windows (GH-122217)
---
Lib/_pyrepl/reader.py | 9 +++++++--
Lib/_pyrepl/simple_interact.py | 1 +
Lib/_pyrepl/utils.py | 3 ++-
Lib/_pyrepl/windows_console.py | 5 ++++-
4 files changed, 14 insertions(+), 4 deletions(-)
diff --git a/Lib/_pyrepl/reader.py b/Lib/_pyrepl/reader.py
index 8b282a382d374f..13b1f3eb9d118a 100644
--- a/Lib/_pyrepl/reader.py
+++ b/Lib/_pyrepl/reader.py
@@ -21,6 +21,8 @@
from __future__ import annotations
+import sys
+
from contextlib import contextmanager
from dataclasses import dataclass, field, fields
import unicodedata
@@ -52,7 +54,10 @@ def disp_str(buffer: str) -> tuple[str, list[int]]:
b: list[int] = []
s: list[str] = []
for c in buffer:
- if ord(c) < 128:
+ if c == '\x1a':
+ s.append(c)
+ b.append(2)
+ elif ord(c) < 128:
s.append(c)
b.append(1)
elif unicodedata.category(c).startswith("C"):
@@ -110,7 +115,7 @@ def make_default_commands() -> dict[CommandName, type[Command]]:
(r"\C-w", "unix-word-rubout"),
(r"\C-x\C-u", "upcase-region"),
(r"\C-y", "yank"),
- (r"\C-z", "suspend"),
+ *(() if sys.platform == "win32" else ((r"\C-z", "suspend"), )),
(r"\M-b", "backward-word"),
(r"\M-c", "capitalize-word"),
(r"\M-d", "kill-word"),
diff --git a/Lib/_pyrepl/simple_interact.py b/Lib/_pyrepl/simple_interact.py
index 2c3dffe070c629..91aef5e01eb867 100644
--- a/Lib/_pyrepl/simple_interact.py
+++ b/Lib/_pyrepl/simple_interact.py
@@ -76,6 +76,7 @@ def _clear_screen():
"copyright": _sitebuiltins._Printer('copyright', sys.copyright),
"help": "help",
"clear": _clear_screen,
+ "\x1a": _sitebuiltins.Quitter('\x1a', ''),
}
diff --git a/Lib/_pyrepl/utils.py b/Lib/_pyrepl/utils.py
index 20dbb1f7e17229..0f36083b6ffa92 100644
--- a/Lib/_pyrepl/utils.py
+++ b/Lib/_pyrepl/utils.py
@@ -21,4 +21,5 @@ def wlen(s: str) -> int:
length = sum(str_width(i) for i in s)
# remove lengths of any escape sequences
sequence = ANSI_ESCAPE_SEQUENCE.findall(s)
- return length - sum(len(i) for i in sequence)
+ ctrl_z_cnt = s.count('\x1a')
+ return length - sum(len(i) for i in sequence) + ctrl_z_cnt
diff --git a/Lib/_pyrepl/windows_console.py b/Lib/_pyrepl/windows_console.py
index 9e97b1524e29a0..ba9af36b8be99c 100644
--- a/Lib/_pyrepl/windows_console.py
+++ b/Lib/_pyrepl/windows_console.py
@@ -253,7 +253,7 @@ def __write_changed_line(
else:
self.__posxy = wlen(newline), y
- if "\x1b" in newline or y != self.__posxy[1]:
+ if "\x1b" in newline or y != self.__posxy[1] or '\x1a' in newline:
# ANSI escape characters are present, so we can't assume
# anything about the position of the cursor. Moving the cursor
# to the left margin should work to get to a known position.
@@ -291,6 +291,9 @@ def _disable_blinking(self):
self.__write("\x1b[?12l")
def __write(self, text: str) -> None:
+ if "\x1a" in text:
+ text = ''.join(["^Z" if x == '\x1a' else x for x in text])
+
if self.out is not None:
self.out.write(text.encode(self.encoding, "replace"))
self.out.flush()
From 8fb88b22b7a932ff16002dd19e904f9cafd59e9f Mon Sep 17 00:00:00 2001
From: Sam Gross
Date: Tue, 30 Jul 2024 11:30:52 -0400
Subject: [PATCH 080/139] gh-121946: Temporarily switch to llvm-17 in TSan CI
again (GH-122466)
The Ubuntu package for llvm-18 is broken
---
.github/workflows/reusable-tsan.yml | 10 +++++-----
1 file changed, 5 insertions(+), 5 deletions(-)
diff --git a/.github/workflows/reusable-tsan.yml b/.github/workflows/reusable-tsan.yml
index b6d5d8fa1c7157..27f4eacd86fd95 100644
--- a/.github/workflows/reusable-tsan.yml
+++ b/.github/workflows/reusable-tsan.yml
@@ -36,11 +36,11 @@ jobs:
# Install clang-18
wget https://apt.llvm.org/llvm.sh
chmod +x llvm.sh
- sudo ./llvm.sh 18
- sudo update-alternatives --install /usr/bin/clang clang /usr/bin/clang-18 100
- sudo update-alternatives --set clang /usr/bin/clang-18
- sudo update-alternatives --install /usr/bin/clang++ clang++ /usr/bin/clang++-18 100
- sudo update-alternatives --set clang++ /usr/bin/clang++-18
+ sudo ./llvm.sh 17 # gh-121946: llvm-18 package is temporarily broken
+ sudo update-alternatives --install /usr/bin/clang clang /usr/bin/clang-17 100
+ sudo update-alternatives --set clang /usr/bin/clang-17
+ sudo update-alternatives --install /usr/bin/clang++ clang++ /usr/bin/clang++-17 100
+ sudo update-alternatives --set clang++ /usr/bin/clang++-17
# Reduce ASLR to avoid TSAN crashing
sudo sysctl -w vm.mmap_rnd_bits=28
- name: TSAN Option Setup
From 1d8e45390733d3eb29164799ea10f8406f53e830 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=C5=81ukasz=20Langa?=
Date: Tue, 30 Jul 2024 18:57:19 +0200
Subject: [PATCH 081/139] gh-116402: Avoid readline in test_builtin TTY input
tests (GH-122447)
---
Lib/test/test_builtin.py | 39 ++++++++++++++++++++++++---------------
1 file changed, 24 insertions(+), 15 deletions(-)
diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py
index 85f139db9bcd45..2ea97e797a4892 100644
--- a/Lib/test/test_builtin.py
+++ b/Lib/test/test_builtin.py
@@ -4,6 +4,7 @@
import asyncio
import builtins
import collections
+import contextlib
import decimal
import fractions
import gc
@@ -31,6 +32,7 @@
from operator import neg
from test import support
from test.support import (cpython_only, swap_attr, maybe_get_event_loop_policy)
+from test.support.import_helper import import_module
from test.support.os_helper import (EnvironmentVarGuard, TESTFN, unlink)
from test.support.script_helper import assert_python_ok
from test.support.warnings_helper import check_warnings
@@ -2412,7 +2414,8 @@ def child(wpipe):
print(ascii(input(prompt)), file=wpipe)
except BaseException as e:
print(ascii(f'{e.__class__.__name__}: {e!s}'), file=wpipe)
- lines = self.run_child(child, terminal_input + b"\r\n")
+ with self.detach_readline():
+ lines = self.run_child(child, terminal_input + b"\r\n")
# Check we did exercise the GNU readline path
self.assertIn(lines[0], {'tty = True', 'tty = False'})
if lines[0] != 'tty = True':
@@ -2425,28 +2428,36 @@ def child(wpipe):
expected = terminal_input.decode(sys.stdin.encoding) # what else?
self.assertEqual(input_result, expected)
- def test_input_tty(self):
- # Test input() functionality when wired to a tty (the code path
- # is different and invokes GNU readline if available).
- self.check_input_tty("prompt", b"quux")
-
- def skip_if_readline(self):
+ @contextlib.contextmanager
+ def detach_readline(self):
# bpo-13886: When the readline module is loaded, PyOS_Readline() uses
# the readline implementation. In some cases, the Python readline
# callback rlhandler() is called by readline with a string without
- # non-ASCII characters. Skip tests on non-ASCII characters if the
- # readline module is loaded, since test_builtin is not intended to test
+ # non-ASCII characters.
+ # Unlink readline temporarily from PyOS_Readline() for those tests,
+ # since test_builtin is not intended to test
# the readline module, but the builtins module.
- if 'readline' in sys.modules:
- self.skipTest("the readline module is loaded")
+ if "readline" in sys.modules:
+ c = import_module("ctypes")
+ fp_api = "PyOS_ReadlineFunctionPointer"
+ prev_value = c.c_void_p.in_dll(c.pythonapi, fp_api).value
+ c.c_void_p.in_dll(c.pythonapi, fp_api).value = None
+ try:
+ yield
+ finally:
+ c.c_void_p.in_dll(c.pythonapi, fp_api).value = prev_value
+ else:
+ yield
+
+ def test_input_tty(self):
+ # Test input() functionality when wired to a tty
+ self.check_input_tty("prompt", b"quux")
def test_input_tty_non_ascii(self):
- self.skip_if_readline()
# Check stdin/stdout encoding is used when invoking PyOS_Readline()
self.check_input_tty("prompté", b"quux\xc3\xa9", "utf-8")
def test_input_tty_non_ascii_unicode_errors(self):
- self.skip_if_readline()
# Check stdin/stdout error handler is used when invoking PyOS_Readline()
self.check_input_tty("prompté", b"quux\xe9", "ascii")
@@ -2456,14 +2467,12 @@ def test_input_tty_null_in_prompt(self):
'null characters')
def test_input_tty_nonencodable_prompt(self):
- self.skip_if_readline()
self.check_input_tty("prompté", b"quux", "ascii", stdout_errors='strict',
expected="UnicodeEncodeError: 'ascii' codec can't encode "
"character '\\xe9' in position 6: ordinal not in "
"range(128)")
def test_input_tty_nondecodable_input(self):
- self.skip_if_readline()
self.check_input_tty("prompt", b"quux\xe9", "ascii", stdin_errors='strict',
expected="UnicodeDecodeError: 'ascii' codec can't decode "
"byte 0xe9 in position 4: ordinal not in "
From 2b163aa9e796b312bb0549d49145d26e4904768e Mon Sep 17 00:00:00 2001
From: Sam Gross
Date: Tue, 30 Jul 2024 13:53:47 -0400
Subject: [PATCH 082/139] gh-117657: Avoid race in `PAUSE_ADAPTIVE_COUNTER` in
free-threaded build (#122190)
The adaptive counter doesn't do anything currently in the free-threaded
build and TSan reports a data race due to concurrent modifications to
the counter.
---
Python/ceval_macros.h | 3 ++-
Tools/tsan/suppressions_free_threading.txt | 1 -
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/Python/ceval_macros.h b/Python/ceval_macros.h
index 60efe3d78ff22c..2881ed2153a7c1 100644
--- a/Python/ceval_macros.h
+++ b/Python/ceval_macros.h
@@ -316,17 +316,18 @@ GETITEM(PyObject *v, Py_ssize_t i) {
/* gh-115999 tracks progress on addressing this. */ \
static_assert(0, "The specializing interpreter is not yet thread-safe"); \
} while (0);
+#define PAUSE_ADAPTIVE_COUNTER(COUNTER) ((void)COUNTER)
#else
#define ADVANCE_ADAPTIVE_COUNTER(COUNTER) \
do { \
(COUNTER) = advance_backoff_counter((COUNTER)); \
} while (0);
-#endif
#define PAUSE_ADAPTIVE_COUNTER(COUNTER) \
do { \
(COUNTER) = pause_backoff_counter((COUNTER)); \
} while (0);
+#endif
#define UNBOUNDLOCAL_ERROR_MSG \
"cannot access local variable '%s' where it is not associated with a value"
diff --git a/Tools/tsan/suppressions_free_threading.txt b/Tools/tsan/suppressions_free_threading.txt
index a54e66d1212d1f..78449aed4009d3 100644
--- a/Tools/tsan/suppressions_free_threading.txt
+++ b/Tools/tsan/suppressions_free_threading.txt
@@ -23,7 +23,6 @@ race:free_threadstate
# These warnings trigger directly in a CPython function.
-race_top:_PyEval_EvalFrameDefault
race_top:assign_version_tag
race_top:new_reference
race_top:_multiprocessing_SemLock_acquire_impl
From af0a00f022d0fb8f1edb4abdda1bc6b915f0448d Mon Sep 17 00:00:00 2001
From: Michael Droettboom
Date: Tue, 30 Jul 2024 15:31:05 -0400
Subject: [PATCH 083/139] gh-122188: Move magic number to its own file
(#122243)
* gh-122188: Move magic number to its own file
* Add versionadded directive
* Do work in C
* Integrate launcher.c
* Make _pyc_magic_number private
* Remove metadata
* Move sys.implementation -> _imp
* Modernize comment
* Move _RAW_MAGIC_NUMBER to the C side as well
* _pyc_magic_number -> pyc_magic_number
* Remove unused import
* Update docs
* Apply suggestions from code review
Co-authored-by: Eric Snow
* Fix typo in tests
---------
Co-authored-by: Eric Snow
---
Include/internal/pycore_magic_number.h | 280 +++++++++++++++++++++++++
InternalDocs/compiler.md | 2 +-
Lib/importlib/_bootstrap_external.py | 279 +-----------------------
Lib/importlib/util.py | 3 +-
Lib/test/test_import/__init__.py | 9 +
Lib/zipimport.py | 2 +-
Python/import.c | 27 ++-
7 files changed, 307 insertions(+), 295 deletions(-)
create mode 100644 Include/internal/pycore_magic_number.h
diff --git a/Include/internal/pycore_magic_number.h b/Include/internal/pycore_magic_number.h
new file mode 100644
index 00000000000000..3af6817e9fde85
--- /dev/null
+++ b/Include/internal/pycore_magic_number.h
@@ -0,0 +1,280 @@
+#ifndef Py_INTERNAL_MAGIC_NUMBER_H
+#define Py_INTERNAL_MAGIC_NUMBER_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
+#endif
+
+/*
+
+Magic number to reject .pyc files generated by other Python versions.
+It should change for each incompatible change to the bytecode.
+
+PYC_MAGIC_NUMBER must change whenever the bytecode emitted by the compiler may
+no longer be understood by older implementations of the eval loop (usually due
+to the addition of new opcodes).
+
+The value of CR and LF is incorporated so if you ever read or write
+a .pyc file in text mode the magic number will be wrong; also, the
+Apple MPW compiler swaps their values, botching string constants.
+
+There were a variety of old schemes for setting the magic number. Starting with
+Python 3.11, Python 3.n starts with magic number 2900+50n. Within each minor
+version, the magic number is incremented by 1 each time the file format changes.
+
+Known values:
+ Python 1.5: 20121
+ Python 1.5.1: 20121
+ Python 1.5.2: 20121
+ Python 1.6: 50428
+ Python 2.0: 50823
+ Python 2.0.1: 50823
+ Python 2.1: 60202
+ Python 2.1.1: 60202
+ Python 2.1.2: 60202
+ Python 2.2: 60717
+ Python 2.3a0: 62011
+ Python 2.3a0: 62021
+ Python 2.3a0: 62011 (!)
+ Python 2.4a0: 62041
+ Python 2.4a3: 62051
+ Python 2.4b1: 62061
+ Python 2.5a0: 62071
+ Python 2.5a0: 62081 (ast-branch)
+ Python 2.5a0: 62091 (with)
+ Python 2.5a0: 62092 (changed WITH_CLEANUP opcode)
+ Python 2.5b3: 62101 (fix wrong code: for x, in ...)
+ Python 2.5b3: 62111 (fix wrong code: x += yield)
+ Python 2.5c1: 62121 (fix wrong lnotab with for loops and
+ storing constants that should have been removed)
+ Python 2.5c2: 62131 (fix wrong code: for x, in ... in listcomp/genexp)
+ Python 2.6a0: 62151 (peephole optimizations and STORE_MAP opcode)
+ Python 2.6a1: 62161 (WITH_CLEANUP optimization)
+ Python 2.7a0: 62171 (optimize list comprehensions/change LIST_APPEND)
+ Python 2.7a0: 62181 (optimize conditional branches:
+ introduce POP_JUMP_IF_FALSE and POP_JUMP_IF_TRUE)
+ Python 2.7a0 62191 (introduce SETUP_WITH)
+ Python 2.7a0 62201 (introduce BUILD_SET)
+ Python 2.7a0 62211 (introduce MAP_ADD and SET_ADD)
+ Python 3000: 3000
+ 3010 (removed UNARY_CONVERT)
+ 3020 (added BUILD_SET)
+ 3030 (added keyword-only parameters)
+ 3040 (added signature annotations)
+ 3050 (print becomes a function)
+ 3060 (PEP 3115 metaclass syntax)
+ 3061 (string literals become unicode)
+ 3071 (PEP 3109 raise changes)
+ 3081 (PEP 3137 make __file__ and __name__ unicode)
+ 3091 (kill str8 interning)
+ 3101 (merge from 2.6a0, see 62151)
+ 3103 (__file__ points to source file)
+ Python 3.0a4: 3111 (WITH_CLEANUP optimization).
+ Python 3.0b1: 3131 (lexical exception stacking, including POP_EXCEPT
+ #3021)
+ Python 3.1a1: 3141 (optimize list, set and dict comprehensions:
+ change LIST_APPEND and SET_ADD, add MAP_ADD #2183)
+ Python 3.1a1: 3151 (optimize conditional branches:
+ introduce POP_JUMP_IF_FALSE and POP_JUMP_IF_TRUE
+ #4715)
+ Python 3.2a1: 3160 (add SETUP_WITH #6101)
+ Python 3.2a2: 3170 (add DUP_TOP_TWO, remove DUP_TOPX and ROT_FOUR #9225)
+ Python 3.2a3 3180 (add DELETE_DEREF #4617)
+ Python 3.3a1 3190 (__class__ super closure changed)
+ Python 3.3a1 3200 (PEP 3155 __qualname__ added #13448)
+ Python 3.3a1 3210 (added size modulo 2**32 to the pyc header #13645)
+ Python 3.3a2 3220 (changed PEP 380 implementation #14230)
+ Python 3.3a4 3230 (revert changes to implicit __class__ closure #14857)
+ Python 3.4a1 3250 (evaluate positional default arguments before
+ keyword-only defaults #16967)
+ Python 3.4a1 3260 (add LOAD_CLASSDEREF; allow locals of class to override
+ free vars #17853)
+ Python 3.4a1 3270 (various tweaks to the __class__ closure #12370)
+ Python 3.4a1 3280 (remove implicit class argument)
+ Python 3.4a4 3290 (changes to __qualname__ computation #19301)
+ Python 3.4a4 3300 (more changes to __qualname__ computation #19301)
+ Python 3.4rc2 3310 (alter __qualname__ computation #20625)
+ Python 3.5a1 3320 (PEP 465: Matrix multiplication operator #21176)
+ Python 3.5b1 3330 (PEP 448: Additional Unpacking Generalizations #2292)
+ Python 3.5b2 3340 (fix dictionary display evaluation order #11205)
+ Python 3.5b3 3350 (add GET_YIELD_FROM_ITER opcode #24400)
+ Python 3.5.2 3351 (fix BUILD_MAP_UNPACK_WITH_CALL opcode #27286)
+ Python 3.6a0 3360 (add FORMAT_VALUE opcode #25483)
+ Python 3.6a1 3361 (lineno delta of code.co_lnotab becomes signed #26107)
+ Python 3.6a2 3370 (16 bit wordcode #26647)
+ Python 3.6a2 3371 (add BUILD_CONST_KEY_MAP opcode #27140)
+ Python 3.6a2 3372 (MAKE_FUNCTION simplification, remove MAKE_CLOSURE
+ #27095)
+ Python 3.6b1 3373 (add BUILD_STRING opcode #27078)
+ Python 3.6b1 3375 (add SETUP_ANNOTATIONS and STORE_ANNOTATION opcodes
+ #27985)
+ Python 3.6b1 3376 (simplify CALL_FUNCTIONs & BUILD_MAP_UNPACK_WITH_CALL
+ #27213)
+ Python 3.6b1 3377 (set __class__ cell from type.__new__ #23722)
+ Python 3.6b2 3378 (add BUILD_TUPLE_UNPACK_WITH_CALL #28257)
+ Python 3.6rc1 3379 (more thorough __class__ validation #23722)
+ Python 3.7a1 3390 (add LOAD_METHOD and CALL_METHOD opcodes #26110)
+ Python 3.7a2 3391 (update GET_AITER #31709)
+ Python 3.7a4 3392 (PEP 552: Deterministic pycs #31650)
+ Python 3.7b1 3393 (remove STORE_ANNOTATION opcode #32550)
+ Python 3.7b5 3394 (restored docstring as the first stmt in the body;
+ this might affected the first line number #32911)
+ Python 3.8a1 3400 (move frame block handling to compiler #17611)
+ Python 3.8a1 3401 (add END_ASYNC_FOR #33041)
+ Python 3.8a1 3410 (PEP570 Python Positional-Only Parameters #36540)
+ Python 3.8b2 3411 (Reverse evaluation order of key: value in dict
+ comprehensions #35224)
+ Python 3.8b2 3412 (Swap the position of positional args and positional
+ only args in ast.arguments #37593)
+ Python 3.8b4 3413 (Fix "break" and "continue" in "finally" #37830)
+ Python 3.9a0 3420 (add LOAD_ASSERTION_ERROR #34880)
+ Python 3.9a0 3421 (simplified bytecode for with blocks #32949)
+ Python 3.9a0 3422 (remove BEGIN_FINALLY, END_FINALLY, CALL_FINALLY, POP_FINALLY bytecodes #33387)
+ Python 3.9a2 3423 (add IS_OP, CONTAINS_OP and JUMP_IF_NOT_EXC_MATCH bytecodes #39156)
+ Python 3.9a2 3424 (simplify bytecodes for *value unpacking)
+ Python 3.9a2 3425 (simplify bytecodes for **value unpacking)
+ Python 3.10a1 3430 (Make 'annotations' future by default)
+ Python 3.10a1 3431 (New line number table format -- PEP 626)
+ Python 3.10a2 3432 (Function annotation for MAKE_FUNCTION is changed from dict to tuple bpo-42202)
+ Python 3.10a2 3433 (RERAISE restores f_lasti if oparg != 0)
+ Python 3.10a6 3434 (PEP 634: Structural Pattern Matching)
+ Python 3.10a7 3435 Use instruction offsets (as opposed to byte offsets).
+ Python 3.10b1 3436 (Add GEN_START bytecode #43683)
+ Python 3.10b1 3437 (Undo making 'annotations' future by default - We like to dance among core devs!)
+ Python 3.10b1 3438 Safer line number table handling.
+ Python 3.10b1 3439 (Add ROT_N)
+ Python 3.11a1 3450 Use exception table for unwinding ("zero cost" exception handling)
+ Python 3.11a1 3451 (Add CALL_METHOD_KW)
+ Python 3.11a1 3452 (drop nlocals from marshaled code objects)
+ Python 3.11a1 3453 (add co_fastlocalnames and co_fastlocalkinds)
+ Python 3.11a1 3454 (compute cell offsets relative to locals bpo-43693)
+ Python 3.11a1 3455 (add MAKE_CELL bpo-43693)
+ Python 3.11a1 3456 (interleave cell args bpo-43693)
+ Python 3.11a1 3457 (Change localsplus to a bytes object bpo-43693)
+ Python 3.11a1 3458 (imported objects now don't use LOAD_METHOD/CALL_METHOD)
+ Python 3.11a1 3459 (PEP 657: add end line numbers and column offsets for instructions)
+ Python 3.11a1 3460 (Add co_qualname field to PyCodeObject bpo-44530)
+ Python 3.11a1 3461 (JUMP_ABSOLUTE must jump backwards)
+ Python 3.11a2 3462 (bpo-44511: remove COPY_DICT_WITHOUT_KEYS, change
+ MATCH_CLASS and MATCH_KEYS, and add COPY)
+ Python 3.11a3 3463 (bpo-45711: JUMP_IF_NOT_EXC_MATCH no longer pops the
+ active exception)
+ Python 3.11a3 3464 (bpo-45636: Merge numeric BINARY_*INPLACE_* into
+ BINARY_OP)
+ Python 3.11a3 3465 (Add COPY_FREE_VARS opcode)
+ Python 3.11a4 3466 (bpo-45292: PEP-654 except*)
+ Python 3.11a4 3467 (Change CALL_xxx opcodes)
+ Python 3.11a4 3468 (Add SEND opcode)
+ Python 3.11a4 3469 (bpo-45711: remove type, traceback from exc_info)
+ Python 3.11a4 3470 (bpo-46221: PREP_RERAISE_STAR no longer pushes lasti)
+ Python 3.11a4 3471 (bpo-46202: remove pop POP_EXCEPT_AND_RERAISE)
+ Python 3.11a4 3472 (bpo-46009: replace GEN_START with POP_TOP)
+ Python 3.11a4 3473 (Add POP_JUMP_IF_NOT_NONE/POP_JUMP_IF_NONE opcodes)
+ Python 3.11a4 3474 (Add RESUME opcode)
+ Python 3.11a5 3475 (Add RETURN_GENERATOR opcode)
+ Python 3.11a5 3476 (Add ASYNC_GEN_WRAP opcode)
+ Python 3.11a5 3477 (Replace DUP_TOP/DUP_TOP_TWO with COPY and
+ ROT_TWO/ROT_THREE/ROT_FOUR/ROT_N with SWAP)
+ Python 3.11a5 3478 (New CALL opcodes)
+ Python 3.11a5 3479 (Add PUSH_NULL opcode)
+ Python 3.11a5 3480 (New CALL opcodes, second iteration)
+ Python 3.11a5 3481 (Use inline cache for BINARY_OP)
+ Python 3.11a5 3482 (Use inline caching for UNPACK_SEQUENCE and LOAD_GLOBAL)
+ Python 3.11a5 3483 (Use inline caching for COMPARE_OP and BINARY_SUBSCR)
+ Python 3.11a5 3484 (Use inline caching for LOAD_ATTR, LOAD_METHOD, and
+ STORE_ATTR)
+ Python 3.11a5 3485 (Add an oparg to GET_AWAITABLE)
+ Python 3.11a6 3486 (Use inline caching for PRECALL and CALL)
+ Python 3.11a6 3487 (Remove the adaptive "oparg counter" mechanism)
+ Python 3.11a6 3488 (LOAD_GLOBAL can push additional NULL)
+ Python 3.11a6 3489 (Add JUMP_BACKWARD, remove JUMP_ABSOLUTE)
+ Python 3.11a6 3490 (remove JUMP_IF_NOT_EXC_MATCH, add CHECK_EXC_MATCH)
+ Python 3.11a6 3491 (remove JUMP_IF_NOT_EG_MATCH, add CHECK_EG_MATCH,
+ add JUMP_BACKWARD_NO_INTERRUPT, make JUMP_NO_INTERRUPT virtual)
+ Python 3.11a7 3492 (make POP_JUMP_IF_NONE/NOT_NONE/TRUE/FALSE relative)
+ Python 3.11a7 3493 (Make JUMP_IF_TRUE_OR_POP/JUMP_IF_FALSE_OR_POP relative)
+ Python 3.11a7 3494 (New location info table)
+ Python 3.11b4 3495 (Set line number of module's RESUME instr to 0 per PEP 626)
+ Python 3.12a1 3500 (Remove PRECALL opcode)
+ Python 3.12a1 3501 (YIELD_VALUE oparg == stack_depth)
+ Python 3.12a1 3502 (LOAD_FAST_CHECK, no NULL-check in LOAD_FAST)
+ Python 3.12a1 3503 (Shrink LOAD_METHOD cache)
+ Python 3.12a1 3504 (Merge LOAD_METHOD back into LOAD_ATTR)
+ Python 3.12a1 3505 (Specialization/Cache for FOR_ITER)
+ Python 3.12a1 3506 (Add BINARY_SLICE and STORE_SLICE instructions)
+ Python 3.12a1 3507 (Set lineno of module's RESUME to 0)
+ Python 3.12a1 3508 (Add CLEANUP_THROW)
+ Python 3.12a1 3509 (Conditional jumps only jump forward)
+ Python 3.12a2 3510 (FOR_ITER leaves iterator on the stack)
+ Python 3.12a2 3511 (Add STOPITERATION_ERROR instruction)
+ Python 3.12a2 3512 (Remove all unused consts from code objects)
+ Python 3.12a4 3513 (Add CALL_INTRINSIC_1 instruction, removed STOPITERATION_ERROR, PRINT_EXPR, IMPORT_STAR)
+ Python 3.12a4 3514 (Remove ASYNC_GEN_WRAP, LIST_TO_TUPLE, and UNARY_POSITIVE)
+ Python 3.12a5 3515 (Embed jump mask in COMPARE_OP oparg)
+ Python 3.12a5 3516 (Add COMPARE_AND_BRANCH instruction)
+ Python 3.12a5 3517 (Change YIELD_VALUE oparg to exception block depth)
+ Python 3.12a6 3518 (Add RETURN_CONST instruction)
+ Python 3.12a6 3519 (Modify SEND instruction)
+ Python 3.12a6 3520 (Remove PREP_RERAISE_STAR, add CALL_INTRINSIC_2)
+ Python 3.12a7 3521 (Shrink the LOAD_GLOBAL caches)
+ Python 3.12a7 3522 (Removed JUMP_IF_FALSE_OR_POP/JUMP_IF_TRUE_OR_POP)
+ Python 3.12a7 3523 (Convert COMPARE_AND_BRANCH back to COMPARE_OP)
+ Python 3.12a7 3524 (Shrink the BINARY_SUBSCR caches)
+ Python 3.12b1 3525 (Shrink the CALL caches)
+ Python 3.12b1 3526 (Add instrumentation support)
+ Python 3.12b1 3527 (Add LOAD_SUPER_ATTR)
+ Python 3.12b1 3528 (Add LOAD_SUPER_ATTR_METHOD specialization)
+ Python 3.12b1 3529 (Inline list/dict/set comprehensions)
+ Python 3.12b1 3530 (Shrink the LOAD_SUPER_ATTR caches)
+ Python 3.12b1 3531 (Add PEP 695 changes)
+ Python 3.13a1 3550 (Plugin optimizer support)
+ Python 3.13a1 3551 (Compact superinstructions)
+ Python 3.13a1 3552 (Remove LOAD_FAST__LOAD_CONST and LOAD_CONST__LOAD_FAST)
+ Python 3.13a1 3553 (Add SET_FUNCTION_ATTRIBUTE)
+ Python 3.13a1 3554 (more efficient bytecodes for f-strings)
+ Python 3.13a1 3555 (generate specialized opcodes metadata from bytecodes.c)
+ Python 3.13a1 3556 (Convert LOAD_CLOSURE to a pseudo-op)
+ Python 3.13a1 3557 (Make the conversion to boolean in jumps explicit)
+ Python 3.13a1 3558 (Reorder the stack items for CALL)
+ Python 3.13a1 3559 (Generate opcode IDs from bytecodes.c)
+ Python 3.13a1 3560 (Add RESUME_CHECK instruction)
+ Python 3.13a1 3561 (Add cache entry to branch instructions)
+ Python 3.13a1 3562 (Assign opcode IDs for internal ops in separate range)
+ Python 3.13a1 3563 (Add CALL_KW and remove KW_NAMES)
+ Python 3.13a1 3564 (Removed oparg from YIELD_VALUE, changed oparg values of RESUME)
+ Python 3.13a1 3565 (Oparg of YIELD_VALUE indicates whether it is in a yield-from)
+ Python 3.13a1 3566 (Emit JUMP_NO_INTERRUPT instead of JUMP for non-loop no-lineno cases)
+ Python 3.13a1 3567 (Reimplement line number propagation by the compiler)
+ Python 3.13a1 3568 (Change semantics of END_FOR)
+ Python 3.13a5 3569 (Specialize CONTAINS_OP)
+ Python 3.13a6 3570 (Add __firstlineno__ class attribute)
+ Python 3.14a1 3600 (Add LOAD_COMMON_CONSTANT)
+ Python 3.14a1 3601 (Fix miscompilation of private names in generic classes)
+ Python 3.14a1 3602 (Add LOAD_SPECIAL. Remove BEFORE_WITH and BEFORE_ASYNC_WITH)
+ Python 3.14a1 3603 (Remove BUILD_CONST_KEY_MAP)
+
+ Python 3.15 will start with 3650
+
+ Please don't copy-paste the same pre-release tag for new entries above!!!
+ You should always use the *upcoming* tag. For example, if 3.12a6 came out
+ a week ago, I should put "Python 3.12a7" next to my new magic number.
+
+Whenever PYC_MAGIC_NUMBER is changed, the ranges in the magic_values array in
+PC/launcher.c must also be updated.
+
+*/
+
+#define PYC_MAGIC_NUMBER 3603
+/* This is equivalent to converting PYC_MAGIC_NUMBER to 2 bytes
+ (little-endian) and then appending b'\r\n'. */
+#define PYC_MAGIC_NUMBER_TOKEN \
+ ((uint32_t)PYC_MAGIC_NUMBER | ((uint32_t)'\r' << 16) | ((uint32_t)'\n' << 24))
+
+
+#ifdef __cplusplus
+}
+#endif
+#endif // !Py_INTERNAL_MAGIC_NUMBER_H
diff --git a/InternalDocs/compiler.md b/InternalDocs/compiler.md
index b3dc0a48069969..52a3ab2f0a4abd 100644
--- a/InternalDocs/compiler.md
+++ b/InternalDocs/compiler.md
@@ -616,7 +616,7 @@ Important files
* [Lib/opcode.py](https://github.com/python/cpython/blob/main/Lib/opcode.py)
: opcode utilities exposed to Python.
- * [Lib/importlib/_bootstrap_external.py](https://github.com/python/cpython/blob/main/Lib/importlib/_bootstrap_external.py)
+ * [Include/core/pycore_magic_number.h](https://github.com/python/cpython/blob/main/Include/internal/pycore_magic_number.h)
: Home of the magic number (named ``MAGIC_NUMBER``) for bytecode versioning.
diff --git a/Lib/importlib/_bootstrap_external.py b/Lib/importlib/_bootstrap_external.py
index 2bb44b290e4a84..4d154dc4c25edc 100644
--- a/Lib/importlib/_bootstrap_external.py
+++ b/Lib/importlib/_bootstrap_external.py
@@ -221,280 +221,7 @@ def _write_atomic(path, data, mode=0o666):
_code_type = type(_write_atomic.__code__)
-
-# Finder/loader utility code ###############################################
-
-# Magic word to reject .pyc files generated by other Python versions.
-# It should change for each incompatible change to the bytecode.
-#
-# The value of CR and LF is incorporated so if you ever read or write
-# a .pyc file in text mode the magic number will be wrong; also, the
-# Apple MPW compiler swaps their values, botching string constants.
-#
-# There were a variety of old schemes for setting the magic number.
-# The current working scheme is to increment the previous value by
-# 10.
-#
-# Starting with the adoption of PEP 3147 in Python 3.2, every bump in magic
-# number also includes a new "magic tag", i.e. a human readable string used
-# to represent the magic number in __pycache__ directories. When you change
-# the magic number, you must also set a new unique magic tag. Generally this
-# can be named after the Python major version of the magic number bump, but
-# it can really be anything, as long as it's different than anything else
-# that's come before. The tags are included in the following table, starting
-# with Python 3.2a0.
-#
-# Known values:
-# Python 1.5: 20121
-# Python 1.5.1: 20121
-# Python 1.5.2: 20121
-# Python 1.6: 50428
-# Python 2.0: 50823
-# Python 2.0.1: 50823
-# Python 2.1: 60202
-# Python 2.1.1: 60202
-# Python 2.1.2: 60202
-# Python 2.2: 60717
-# Python 2.3a0: 62011
-# Python 2.3a0: 62021
-# Python 2.3a0: 62011 (!)
-# Python 2.4a0: 62041
-# Python 2.4a3: 62051
-# Python 2.4b1: 62061
-# Python 2.5a0: 62071
-# Python 2.5a0: 62081 (ast-branch)
-# Python 2.5a0: 62091 (with)
-# Python 2.5a0: 62092 (changed WITH_CLEANUP opcode)
-# Python 2.5b3: 62101 (fix wrong code: for x, in ...)
-# Python 2.5b3: 62111 (fix wrong code: x += yield)
-# Python 2.5c1: 62121 (fix wrong lnotab with for loops and
-# storing constants that should have been removed)
-# Python 2.5c2: 62131 (fix wrong code: for x, in ... in listcomp/genexp)
-# Python 2.6a0: 62151 (peephole optimizations and STORE_MAP opcode)
-# Python 2.6a1: 62161 (WITH_CLEANUP optimization)
-# Python 2.7a0: 62171 (optimize list comprehensions/change LIST_APPEND)
-# Python 2.7a0: 62181 (optimize conditional branches:
-# introduce POP_JUMP_IF_FALSE and POP_JUMP_IF_TRUE)
-# Python 2.7a0 62191 (introduce SETUP_WITH)
-# Python 2.7a0 62201 (introduce BUILD_SET)
-# Python 2.7a0 62211 (introduce MAP_ADD and SET_ADD)
-# Python 3000: 3000
-# 3010 (removed UNARY_CONVERT)
-# 3020 (added BUILD_SET)
-# 3030 (added keyword-only parameters)
-# 3040 (added signature annotations)
-# 3050 (print becomes a function)
-# 3060 (PEP 3115 metaclass syntax)
-# 3061 (string literals become unicode)
-# 3071 (PEP 3109 raise changes)
-# 3081 (PEP 3137 make __file__ and __name__ unicode)
-# 3091 (kill str8 interning)
-# 3101 (merge from 2.6a0, see 62151)
-# 3103 (__file__ points to source file)
-# Python 3.0a4: 3111 (WITH_CLEANUP optimization).
-# Python 3.0b1: 3131 (lexical exception stacking, including POP_EXCEPT
- #3021)
-# Python 3.1a1: 3141 (optimize list, set and dict comprehensions:
-# change LIST_APPEND and SET_ADD, add MAP_ADD #2183)
-# Python 3.1a1: 3151 (optimize conditional branches:
-# introduce POP_JUMP_IF_FALSE and POP_JUMP_IF_TRUE
- #4715)
-# Python 3.2a1: 3160 (add SETUP_WITH #6101)
-# tag: cpython-32
-# Python 3.2a2: 3170 (add DUP_TOP_TWO, remove DUP_TOPX and ROT_FOUR #9225)
-# tag: cpython-32
-# Python 3.2a3 3180 (add DELETE_DEREF #4617)
-# Python 3.3a1 3190 (__class__ super closure changed)
-# Python 3.3a1 3200 (PEP 3155 __qualname__ added #13448)
-# Python 3.3a1 3210 (added size modulo 2**32 to the pyc header #13645)
-# Python 3.3a2 3220 (changed PEP 380 implementation #14230)
-# Python 3.3a4 3230 (revert changes to implicit __class__ closure #14857)
-# Python 3.4a1 3250 (evaluate positional default arguments before
-# keyword-only defaults #16967)
-# Python 3.4a1 3260 (add LOAD_CLASSDEREF; allow locals of class to override
-# free vars #17853)
-# Python 3.4a1 3270 (various tweaks to the __class__ closure #12370)
-# Python 3.4a1 3280 (remove implicit class argument)
-# Python 3.4a4 3290 (changes to __qualname__ computation #19301)
-# Python 3.4a4 3300 (more changes to __qualname__ computation #19301)
-# Python 3.4rc2 3310 (alter __qualname__ computation #20625)
-# Python 3.5a1 3320 (PEP 465: Matrix multiplication operator #21176)
-# Python 3.5b1 3330 (PEP 448: Additional Unpacking Generalizations #2292)
-# Python 3.5b2 3340 (fix dictionary display evaluation order #11205)
-# Python 3.5b3 3350 (add GET_YIELD_FROM_ITER opcode #24400)
-# Python 3.5.2 3351 (fix BUILD_MAP_UNPACK_WITH_CALL opcode #27286)
-# Python 3.6a0 3360 (add FORMAT_VALUE opcode #25483)
-# Python 3.6a1 3361 (lineno delta of code.co_lnotab becomes signed #26107)
-# Python 3.6a2 3370 (16 bit wordcode #26647)
-# Python 3.6a2 3371 (add BUILD_CONST_KEY_MAP opcode #27140)
-# Python 3.6a2 3372 (MAKE_FUNCTION simplification, remove MAKE_CLOSURE
-# #27095)
-# Python 3.6b1 3373 (add BUILD_STRING opcode #27078)
-# Python 3.6b1 3375 (add SETUP_ANNOTATIONS and STORE_ANNOTATION opcodes
-# #27985)
-# Python 3.6b1 3376 (simplify CALL_FUNCTIONs & BUILD_MAP_UNPACK_WITH_CALL
- #27213)
-# Python 3.6b1 3377 (set __class__ cell from type.__new__ #23722)
-# Python 3.6b2 3378 (add BUILD_TUPLE_UNPACK_WITH_CALL #28257)
-# Python 3.6rc1 3379 (more thorough __class__ validation #23722)
-# Python 3.7a1 3390 (add LOAD_METHOD and CALL_METHOD opcodes #26110)
-# Python 3.7a2 3391 (update GET_AITER #31709)
-# Python 3.7a4 3392 (PEP 552: Deterministic pycs #31650)
-# Python 3.7b1 3393 (remove STORE_ANNOTATION opcode #32550)
-# Python 3.7b5 3394 (restored docstring as the first stmt in the body;
-# this might affected the first line number #32911)
-# Python 3.8a1 3400 (move frame block handling to compiler #17611)
-# Python 3.8a1 3401 (add END_ASYNC_FOR #33041)
-# Python 3.8a1 3410 (PEP570 Python Positional-Only Parameters #36540)
-# Python 3.8b2 3411 (Reverse evaluation order of key: value in dict
-# comprehensions #35224)
-# Python 3.8b2 3412 (Swap the position of positional args and positional
-# only args in ast.arguments #37593)
-# Python 3.8b4 3413 (Fix "break" and "continue" in "finally" #37830)
-# Python 3.9a0 3420 (add LOAD_ASSERTION_ERROR #34880)
-# Python 3.9a0 3421 (simplified bytecode for with blocks #32949)
-# Python 3.9a0 3422 (remove BEGIN_FINALLY, END_FINALLY, CALL_FINALLY, POP_FINALLY bytecodes #33387)
-# Python 3.9a2 3423 (add IS_OP, CONTAINS_OP and JUMP_IF_NOT_EXC_MATCH bytecodes #39156)
-# Python 3.9a2 3424 (simplify bytecodes for *value unpacking)
-# Python 3.9a2 3425 (simplify bytecodes for **value unpacking)
-# Python 3.10a1 3430 (Make 'annotations' future by default)
-# Python 3.10a1 3431 (New line number table format -- PEP 626)
-# Python 3.10a2 3432 (Function annotation for MAKE_FUNCTION is changed from dict to tuple bpo-42202)
-# Python 3.10a2 3433 (RERAISE restores f_lasti if oparg != 0)
-# Python 3.10a6 3434 (PEP 634: Structural Pattern Matching)
-# Python 3.10a7 3435 Use instruction offsets (as opposed to byte offsets).
-# Python 3.10b1 3436 (Add GEN_START bytecode #43683)
-# Python 3.10b1 3437 (Undo making 'annotations' future by default - We like to dance among core devs!)
-# Python 3.10b1 3438 Safer line number table handling.
-# Python 3.10b1 3439 (Add ROT_N)
-# Python 3.11a1 3450 Use exception table for unwinding ("zero cost" exception handling)
-# Python 3.11a1 3451 (Add CALL_METHOD_KW)
-# Python 3.11a1 3452 (drop nlocals from marshaled code objects)
-# Python 3.11a1 3453 (add co_fastlocalnames and co_fastlocalkinds)
-# Python 3.11a1 3454 (compute cell offsets relative to locals bpo-43693)
-# Python 3.11a1 3455 (add MAKE_CELL bpo-43693)
-# Python 3.11a1 3456 (interleave cell args bpo-43693)
-# Python 3.11a1 3457 (Change localsplus to a bytes object bpo-43693)
-# Python 3.11a1 3458 (imported objects now don't use LOAD_METHOD/CALL_METHOD)
-# Python 3.11a1 3459 (PEP 657: add end line numbers and column offsets for instructions)
-# Python 3.11a1 3460 (Add co_qualname field to PyCodeObject bpo-44530)
-# Python 3.11a1 3461 (JUMP_ABSOLUTE must jump backwards)
-# Python 3.11a2 3462 (bpo-44511: remove COPY_DICT_WITHOUT_KEYS, change
-# MATCH_CLASS and MATCH_KEYS, and add COPY)
-# Python 3.11a3 3463 (bpo-45711: JUMP_IF_NOT_EXC_MATCH no longer pops the
-# active exception)
-# Python 3.11a3 3464 (bpo-45636: Merge numeric BINARY_*/INPLACE_* into
-# BINARY_OP)
-# Python 3.11a3 3465 (Add COPY_FREE_VARS opcode)
-# Python 3.11a4 3466 (bpo-45292: PEP-654 except*)
-# Python 3.11a4 3467 (Change CALL_xxx opcodes)
-# Python 3.11a4 3468 (Add SEND opcode)
-# Python 3.11a4 3469 (bpo-45711: remove type, traceback from exc_info)
-# Python 3.11a4 3470 (bpo-46221: PREP_RERAISE_STAR no longer pushes lasti)
-# Python 3.11a4 3471 (bpo-46202: remove pop POP_EXCEPT_AND_RERAISE)
-# Python 3.11a4 3472 (bpo-46009: replace GEN_START with POP_TOP)
-# Python 3.11a4 3473 (Add POP_JUMP_IF_NOT_NONE/POP_JUMP_IF_NONE opcodes)
-# Python 3.11a4 3474 (Add RESUME opcode)
-# Python 3.11a5 3475 (Add RETURN_GENERATOR opcode)
-# Python 3.11a5 3476 (Add ASYNC_GEN_WRAP opcode)
-# Python 3.11a5 3477 (Replace DUP_TOP/DUP_TOP_TWO with COPY and
-# ROT_TWO/ROT_THREE/ROT_FOUR/ROT_N with SWAP)
-# Python 3.11a5 3478 (New CALL opcodes)
-# Python 3.11a5 3479 (Add PUSH_NULL opcode)
-# Python 3.11a5 3480 (New CALL opcodes, second iteration)
-# Python 3.11a5 3481 (Use inline cache for BINARY_OP)
-# Python 3.11a5 3482 (Use inline caching for UNPACK_SEQUENCE and LOAD_GLOBAL)
-# Python 3.11a5 3483 (Use inline caching for COMPARE_OP and BINARY_SUBSCR)
-# Python 3.11a5 3484 (Use inline caching for LOAD_ATTR, LOAD_METHOD, and
-# STORE_ATTR)
-# Python 3.11a5 3485 (Add an oparg to GET_AWAITABLE)
-# Python 3.11a6 3486 (Use inline caching for PRECALL and CALL)
-# Python 3.11a6 3487 (Remove the adaptive "oparg counter" mechanism)
-# Python 3.11a6 3488 (LOAD_GLOBAL can push additional NULL)
-# Python 3.11a6 3489 (Add JUMP_BACKWARD, remove JUMP_ABSOLUTE)
-# Python 3.11a6 3490 (remove JUMP_IF_NOT_EXC_MATCH, add CHECK_EXC_MATCH)
-# Python 3.11a6 3491 (remove JUMP_IF_NOT_EG_MATCH, add CHECK_EG_MATCH,
-# add JUMP_BACKWARD_NO_INTERRUPT, make JUMP_NO_INTERRUPT virtual)
-# Python 3.11a7 3492 (make POP_JUMP_IF_NONE/NOT_NONE/TRUE/FALSE relative)
-# Python 3.11a7 3493 (Make JUMP_IF_TRUE_OR_POP/JUMP_IF_FALSE_OR_POP relative)
-# Python 3.11a7 3494 (New location info table)
-# Python 3.11b4 3495 (Set line number of module's RESUME instr to 0 per PEP 626)
-# Python 3.12a1 3500 (Remove PRECALL opcode)
-# Python 3.12a1 3501 (YIELD_VALUE oparg == stack_depth)
-# Python 3.12a1 3502 (LOAD_FAST_CHECK, no NULL-check in LOAD_FAST)
-# Python 3.12a1 3503 (Shrink LOAD_METHOD cache)
-# Python 3.12a1 3504 (Merge LOAD_METHOD back into LOAD_ATTR)
-# Python 3.12a1 3505 (Specialization/Cache for FOR_ITER)
-# Python 3.12a1 3506 (Add BINARY_SLICE and STORE_SLICE instructions)
-# Python 3.12a1 3507 (Set lineno of module's RESUME to 0)
-# Python 3.12a1 3508 (Add CLEANUP_THROW)
-# Python 3.12a1 3509 (Conditional jumps only jump forward)
-# Python 3.12a2 3510 (FOR_ITER leaves iterator on the stack)
-# Python 3.12a2 3511 (Add STOPITERATION_ERROR instruction)
-# Python 3.12a2 3512 (Remove all unused consts from code objects)
-# Python 3.12a4 3513 (Add CALL_INTRINSIC_1 instruction, removed STOPITERATION_ERROR, PRINT_EXPR, IMPORT_STAR)
-# Python 3.12a4 3514 (Remove ASYNC_GEN_WRAP, LIST_TO_TUPLE, and UNARY_POSITIVE)
-# Python 3.12a5 3515 (Embed jump mask in COMPARE_OP oparg)
-# Python 3.12a5 3516 (Add COMPARE_AND_BRANCH instruction)
-# Python 3.12a5 3517 (Change YIELD_VALUE oparg to exception block depth)
-# Python 3.12a6 3518 (Add RETURN_CONST instruction)
-# Python 3.12a6 3519 (Modify SEND instruction)
-# Python 3.12a6 3520 (Remove PREP_RERAISE_STAR, add CALL_INTRINSIC_2)
-# Python 3.12a7 3521 (Shrink the LOAD_GLOBAL caches)
-# Python 3.12a7 3522 (Removed JUMP_IF_FALSE_OR_POP/JUMP_IF_TRUE_OR_POP)
-# Python 3.12a7 3523 (Convert COMPARE_AND_BRANCH back to COMPARE_OP)
-# Python 3.12a7 3524 (Shrink the BINARY_SUBSCR caches)
-# Python 3.12b1 3525 (Shrink the CALL caches)
-# Python 3.12b1 3526 (Add instrumentation support)
-# Python 3.12b1 3527 (Add LOAD_SUPER_ATTR)
-# Python 3.12b1 3528 (Add LOAD_SUPER_ATTR_METHOD specialization)
-# Python 3.12b1 3529 (Inline list/dict/set comprehensions)
-# Python 3.12b1 3530 (Shrink the LOAD_SUPER_ATTR caches)
-# Python 3.12b1 3531 (Add PEP 695 changes)
-# Python 3.13a1 3550 (Plugin optimizer support)
-# Python 3.13a1 3551 (Compact superinstructions)
-# Python 3.13a1 3552 (Remove LOAD_FAST__LOAD_CONST and LOAD_CONST__LOAD_FAST)
-# Python 3.13a1 3553 (Add SET_FUNCTION_ATTRIBUTE)
-# Python 3.13a1 3554 (more efficient bytecodes for f-strings)
-# Python 3.13a1 3555 (generate specialized opcodes metadata from bytecodes.c)
-# Python 3.13a1 3556 (Convert LOAD_CLOSURE to a pseudo-op)
-# Python 3.13a1 3557 (Make the conversion to boolean in jumps explicit)
-# Python 3.13a1 3558 (Reorder the stack items for CALL)
-# Python 3.13a1 3559 (Generate opcode IDs from bytecodes.c)
-# Python 3.13a1 3560 (Add RESUME_CHECK instruction)
-# Python 3.13a1 3561 (Add cache entry to branch instructions)
-# Python 3.13a1 3562 (Assign opcode IDs for internal ops in separate range)
-# Python 3.13a1 3563 (Add CALL_KW and remove KW_NAMES)
-# Python 3.13a1 3564 (Removed oparg from YIELD_VALUE, changed oparg values of RESUME)
-# Python 3.13a1 3565 (Oparg of YIELD_VALUE indicates whether it is in a yield-from)
-# Python 3.13a1 3566 (Emit JUMP_NO_INTERRUPT instead of JUMP for non-loop no-lineno cases)
-# Python 3.13a1 3567 (Reimplement line number propagation by the compiler)
-# Python 3.13a1 3568 (Change semantics of END_FOR)
-# Python 3.13a5 3569 (Specialize CONTAINS_OP)
-# Python 3.13a6 3570 (Add __firstlineno__ class attribute)
-# Python 3.14a1 3600 (Add LOAD_COMMON_CONSTANT)
-# Python 3.14a1 3601 (Fix miscompilation of private names in generic classes)
-# Python 3.14a1 3602 (Add LOAD_SPECIAL. Remove BEFORE_WITH and BEFORE_ASYNC_WITH)
-# Python 3.14a1 3603 (Remove BUILD_CONST_KEY_MAP)
-
-# Python 3.15 will start with 3650
-
-# Please don't copy-paste the same pre-release tag for new entries above!!!
-# You should always use the *upcoming* tag. For example, if 3.12a6 came out
-# a week ago, I should put "Python 3.12a7" next to my new magic number.
-
-# MAGIC must change whenever the bytecode emitted by the compiler may no
-# longer be understood by older implementations of the eval loop (usually
-# due to the addition of new opcodes).
-#
-# Starting with Python 3.11, Python 3.n starts with magic number 2900+50n.
-#
-# Whenever MAGIC_NUMBER is changed, the ranges in the magic_values array
-# in PC/launcher.c must also be updated.
-
-MAGIC_NUMBER = (3603).to_bytes(2, 'little') + b'\r\n'
-
-_RAW_MAGIC_NUMBER = int.from_bytes(MAGIC_NUMBER, 'little') # For import.c
+MAGIC_NUMBER = (_imp.pyc_magic_number).to_bytes(2, 'little') + b'\r\n'
_PYCACHE = '__pycache__'
_OPT = 'opt-'
@@ -1133,7 +860,7 @@ def get_code(self, fullname):
_imp.check_hash_based_pycs == 'always')):
source_bytes = self.get_data(source_path)
source_hash = _imp.source_hash(
- _RAW_MAGIC_NUMBER,
+ _imp.pyc_magic_number_token,
source_bytes,
)
_validate_hash_pyc(data, source_hash, fullname,
@@ -1162,7 +889,7 @@ def get_code(self, fullname):
source_mtime is not None):
if hash_based:
if source_hash is None:
- source_hash = _imp.source_hash(_RAW_MAGIC_NUMBER,
+ source_hash = _imp.source_hash(_imp.pyc_magic_number_token,
source_bytes)
data = _code_to_hash_pyc(code_object, source_hash, check_source)
else:
diff --git a/Lib/importlib/util.py b/Lib/importlib/util.py
index 8403ef9b44ad1a..2b564e9b52e0cb 100644
--- a/Lib/importlib/util.py
+++ b/Lib/importlib/util.py
@@ -5,7 +5,6 @@
from ._bootstrap import spec_from_loader
from ._bootstrap import _find_spec
from ._bootstrap_external import MAGIC_NUMBER
-from ._bootstrap_external import _RAW_MAGIC_NUMBER
from ._bootstrap_external import cache_from_source
from ._bootstrap_external import decode_source
from ._bootstrap_external import source_from_cache
@@ -18,7 +17,7 @@
def source_hash(source_bytes):
"Return the hash of *source_bytes* as used in hash-based pyc files."
- return _imp.source_hash(_RAW_MAGIC_NUMBER, source_bytes)
+ return _imp.source_hash(_imp.pyc_magic_number_token, source_bytes)
def resolve_name(name, package):
diff --git a/Lib/test/test_import/__init__.py b/Lib/test/test_import/__init__.py
index e29097baaf53ae..56c6ffe93fce37 100644
--- a/Lib/test/test_import/__init__.py
+++ b/Lib/test/test_import/__init__.py
@@ -3113,6 +3113,15 @@ def test_pyimport_addmodule_create(self):
self.assertIs(mod, sys.modules[name])
+@cpython_only
+class TestMagicNumber(unittest.TestCase):
+ def test_magic_number_endianness(self):
+ magic_number = (_imp.pyc_magic_number).to_bytes(2, 'little') + b'\r\n'
+ raw_magic_number = int.from_bytes(magic_number, 'little')
+
+ self.assertEqual(raw_magic_number, _imp.pyc_magic_number_token)
+
+
if __name__ == '__main__':
# Test needs to be a package, so we can do relative imports.
unittest.main()
diff --git a/Lib/zipimport.py b/Lib/zipimport.py
index 68f031f89c9996..f2724dd0268358 100644
--- a/Lib/zipimport.py
+++ b/Lib/zipimport.py
@@ -705,7 +705,7 @@ def _unmarshal_code(self, pathname, fullpath, fullname, data):
source_bytes = _get_pyc_source(self, fullpath)
if source_bytes is not None:
source_hash = _imp.source_hash(
- _bootstrap_external._RAW_MAGIC_NUMBER,
+ _imp.pyc_magic_number_token,
source_bytes,
)
diff --git a/Python/import.c b/Python/import.c
index 40b7feac001d6e..540874a0f0414f 100644
--- a/Python/import.c
+++ b/Python/import.c
@@ -6,6 +6,7 @@
#include "pycore_import.h" // _PyImport_BootstrapImp()
#include "pycore_initconfig.h" // _PyStatus_OK()
#include "pycore_interp.h" // struct _import_runtime_state
+#include "pycore_magic_number.h" // PYC_MAGIC_NUMBER
#include "pycore_namespace.h" // _PyNamespace_Type
#include "pycore_object.h" // _Py_SetImmortal()
#include "pycore_pyerrors.h" // _PyErr_SetString()
@@ -2475,23 +2476,9 @@ _PyImport_GetBuiltinModuleNames(void)
long
PyImport_GetMagicNumber(void)
{
- long res;
- PyInterpreterState *interp = _PyInterpreterState_GET();
- PyObject *external, *pyc_magic;
-
- external = PyObject_GetAttrString(IMPORTLIB(interp), "_bootstrap_external");
- if (external == NULL)
- return -1;
- pyc_magic = PyObject_GetAttrString(external, "_RAW_MAGIC_NUMBER");
- Py_DECREF(external);
- if (pyc_magic == NULL)
- return -1;
- res = PyLong_AsLong(pyc_magic);
- Py_DECREF(pyc_magic);
- return res;
+ return PYC_MAGIC_NUMBER_TOKEN;
}
-
extern const char * _PySys_ImplCacheTag;
const char *
@@ -4823,6 +4810,16 @@ imp_module_exec(PyObject *module)
return -1;
}
+ if (PyModule_AddIntConstant(module, "pyc_magic_number", PYC_MAGIC_NUMBER) < 0) {
+ return -1;
+ }
+
+ if (PyModule_AddIntConstant(
+ module, "pyc_magic_number_token", PYC_MAGIC_NUMBER_TOKEN) < 0)
+ {
+ return -1;
+ }
+
return 0;
}
From c68cb8e0c9bd75ded25578c2fba6469e55a06e93 Mon Sep 17 00:00:00 2001
From: Thomas Grainger
Date: Tue, 30 Jul 2024 20:42:25 +0100
Subject: [PATCH 084/139] Remove outdated note about instance methods from
datamodel.rst (#122471)
---
Doc/reference/datamodel.rst | 9 +--------
1 file changed, 1 insertion(+), 8 deletions(-)
diff --git a/Doc/reference/datamodel.rst b/Doc/reference/datamodel.rst
index 144c6f78ccd443..2576f9a07284eb 100644
--- a/Doc/reference/datamodel.rst
+++ b/Doc/reference/datamodel.rst
@@ -730,14 +730,7 @@ When an instance method object is derived from a :class:`classmethod` object, th
itself, so that calling either ``x.f(1)`` or ``C.f(1)`` is equivalent to
calling ``f(C,1)`` where ``f`` is the underlying function.
-Note that the transformation from :ref:`function object `
-to instance method
-object happens each time the attribute is retrieved from the instance. In
-some cases, a fruitful optimization is to assign the attribute to a local
-variable and call that local variable. Also notice that this
-transformation only happens for user-defined functions; other callable
-objects (and all non-callable objects) are retrieved without
-transformation. It is also important to note that user-defined functions
+It is important to note that user-defined functions
which are attributes of a class instance are not converted to bound
methods; this *only* happens when the function is an attribute of the
class.
From 1cac0908fb6866c30b7fe106bc8d6cd03c7977f9 Mon Sep 17 00:00:00 2001
From: Nate Ohlson
Date: Tue, 30 Jul 2024 14:49:15 -0500
Subject: [PATCH 085/139] gh-112301: Add argument aliases and tee compiler
output for check warnings (GH-122465)
Also remove superfluous shebang from the warning check script
---
.github/workflows/reusable-ubuntu.yml | 2 +-
Tools/build/check_warnings.py | 5 ++++-
2 files changed, 5 insertions(+), 2 deletions(-)
diff --git a/.github/workflows/reusable-ubuntu.yml b/.github/workflows/reusable-ubuntu.yml
index c6289a74e9a5f6..8dd5f559585368 100644
--- a/.github/workflows/reusable-ubuntu.yml
+++ b/.github/workflows/reusable-ubuntu.yml
@@ -75,7 +75,7 @@ jobs:
${{ fromJSON(inputs.free-threading) && '--disable-gil' || '' }}
- name: Build CPython out-of-tree
working-directory: ${{ env.CPYTHON_BUILDDIR }}
- run: make -j4 &> compiler_output.txt
+ run: set -o pipefail; make -j4 2>&1 | tee compiler_output.txt
- name: Display build info
working-directory: ${{ env.CPYTHON_BUILDDIR }}
run: make pythoninfo
diff --git a/Tools/build/check_warnings.py b/Tools/build/check_warnings.py
index f0c0067f4ab255..af9f7f169ad943 100644
--- a/Tools/build/check_warnings.py
+++ b/Tools/build/check_warnings.py
@@ -1,4 +1,3 @@
-#!/usr/bin/env python3
"""
Parses compiler output with -fdiagnostics-format=json and checks that warnings
exist only in files that are expected to have warnings.
@@ -114,24 +113,28 @@ def get_unexpected_improvements(
def main(argv: list[str] | None = None) -> int:
parser = argparse.ArgumentParser()
parser.add_argument(
+ "-c",
"--compiler-output-file-path",
type=str,
required=True,
help="Path to the compiler output file",
)
parser.add_argument(
+ "-i",
"--warning-ignore-file-path",
type=str,
required=True,
help="Path to the warning ignore file",
)
parser.add_argument(
+ "-x",
"--fail-on-regression",
action="store_true",
default=False,
help="Flag to fail if new warnings are found",
)
parser.add_argument(
+ "-X",
"--fail-on-improvement",
action="store_true",
default=False,
From 5912487938ac4b517209082ab9e6d2d3d0fb4f4d Mon Sep 17 00:00:00 2001
From: Petr Viktorin
Date: Wed, 31 Jul 2024 00:11:00 +0200
Subject: [PATCH 086/139] gh-120906: Support arbitrary hashable keys in
FrameLocalsProxy (GH-122309)
Co-authored-by: Alyssa Coghlan
---
Lib/test/test_frame.py | 127 ++++++++++++++++
...-07-26-13-56-32.gh-issue-120906.qBh2I9.rst | 1 +
Objects/frameobject.c | 140 ++++++++++--------
3 files changed, 208 insertions(+), 60 deletions(-)
create mode 100644 Misc/NEWS.d/next/Core_and_Builtins/2024-07-26-13-56-32.gh-issue-120906.qBh2I9.rst
diff --git a/Lib/test/test_frame.py b/Lib/test/test_frame.py
index b7ef6cefaabbc0..ca88e657367d9a 100644
--- a/Lib/test/test_frame.py
+++ b/Lib/test/test_frame.py
@@ -15,6 +15,7 @@
from test import support
from test.support import import_helper, threading_helper
from test.support.script_helper import assert_python_ok
+from test import mapping_tests
class ClearTest(unittest.TestCase):
@@ -431,6 +432,132 @@ def test_is_mapping(self):
kind = "other"
self.assertEqual(kind, "mapping")
+ def _x_stringlikes(self):
+ class StringSubclass(str):
+ pass
+
+ class ImpostorX:
+ def __hash__(self):
+ return hash('x')
+
+ def __eq__(self, other):
+ return other == 'x'
+
+ return StringSubclass('x'), ImpostorX(), 'x'
+
+ def test_proxy_key_stringlikes_overwrite(self):
+ def f(obj):
+ x = 1
+ proxy = sys._getframe().f_locals
+ proxy[obj] = 2
+ return (
+ list(proxy.keys()),
+ dict(proxy),
+ proxy
+ )
+
+ for obj in self._x_stringlikes():
+ with self.subTest(cls=type(obj).__name__):
+
+ keys_snapshot, proxy_snapshot, proxy = f(obj)
+ expected_keys = ['obj', 'x', 'proxy']
+ expected_dict = {'obj': 'x', 'x': 2, 'proxy': proxy}
+ self.assertEqual(proxy.keys(), expected_keys)
+ self.assertEqual(proxy, expected_dict)
+ self.assertEqual(keys_snapshot, expected_keys)
+ self.assertEqual(proxy_snapshot, expected_dict)
+
+ def test_proxy_key_stringlikes_ftrst_write(self):
+ def f(obj):
+ proxy = sys._getframe().f_locals
+ proxy[obj] = 2
+ self.assertEqual(x, 2)
+ x = 1
+
+ for obj in self._x_stringlikes():
+ with self.subTest(cls=type(obj).__name__):
+ f(obj)
+
+ def test_proxy_key_unhashables(self):
+ class StringSubclass(str):
+ __hash__ = None
+
+ class ObjectSubclass:
+ __hash__ = None
+
+ proxy = sys._getframe().f_locals
+
+ for obj in StringSubclass('x'), ObjectSubclass():
+ with self.subTest(cls=type(obj).__name__):
+ with self.assertRaises(TypeError):
+ proxy[obj]
+ with self.assertRaises(TypeError):
+ proxy[obj] = 0
+
+
+class FrameLocalsProxyMappingTests(mapping_tests.TestHashMappingProtocol):
+ """Test that FrameLocalsProxy behaves like a Mapping (with exceptions)"""
+
+ def _f(*args, **kwargs):
+ def _f():
+ return sys._getframe().f_locals
+ return _f()
+ type2test = _f
+
+ @unittest.skipIf(True, 'Locals proxies for different frames never compare as equal')
+ def test_constructor(self):
+ pass
+
+ @unittest.skipIf(True, 'Unlike a mapping: del proxy[key] fails')
+ def test_write(self):
+ pass
+
+ @unittest.skipIf(True, 'Unlike a mapping: no proxy.popitem')
+ def test_popitem(self):
+ pass
+
+ @unittest.skipIf(True, 'Unlike a mapping: no proxy.pop')
+ def test_pop(self):
+ pass
+
+ @unittest.skipIf(True, 'Unlike a mapping: no proxy.clear')
+ def test_clear(self):
+ pass
+
+ @unittest.skipIf(True, 'Unlike a mapping: no proxy.fromkeys')
+ def test_fromkeys(self):
+ pass
+
+ # no del
+ def test_getitem(self):
+ mapping_tests.BasicTestMappingProtocol.test_getitem(self)
+ d = self._full_mapping({'a': 1, 'b': 2})
+ self.assertEqual(d['a'], 1)
+ self.assertEqual(d['b'], 2)
+ d['c'] = 3
+ d['a'] = 4
+ self.assertEqual(d['c'], 3)
+ self.assertEqual(d['a'], 4)
+
+ @unittest.skipIf(True, 'Unlike a mapping: no proxy.update')
+ def test_update(self):
+ pass
+
+ # proxy.copy returns a regular dict
+ def test_copy(self):
+ d = self._full_mapping({1:1, 2:2, 3:3})
+ self.assertEqual(d.copy(), {1:1, 2:2, 3:3})
+ d = self._empty_mapping()
+ self.assertEqual(d.copy(), d)
+ self.assertRaises(TypeError, d.copy, None)
+
+ self.assertIsInstance(d.copy(), dict)
+
+ @unittest.skipIf(True, 'Locals proxies for different frames never compare as equal')
+ def test_eq(self):
+ pass
+
+
class TestFrameCApi(unittest.TestCase):
def test_basic(self):
x = 1
diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-07-26-13-56-32.gh-issue-120906.qBh2I9.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-07-26-13-56-32.gh-issue-120906.qBh2I9.rst
new file mode 100644
index 00000000000000..2b753bc37d4a39
--- /dev/null
+++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-07-26-13-56-32.gh-issue-120906.qBh2I9.rst
@@ -0,0 +1 @@
+:attr:`frame.f_locals` now supports arbitrary hashable objects as keys.
diff --git a/Objects/frameobject.c b/Objects/frameobject.c
index 88093eb9071ae4..a8be7d75371c16 100644
--- a/Objects/frameobject.c
+++ b/Objects/frameobject.c
@@ -53,22 +53,27 @@ static int
framelocalsproxy_getkeyindex(PyFrameObject *frame, PyObject* key, bool read)
{
/*
- * Returns the fast locals index of the key
+ * Returns -2 (!) if an error occurred; exception will be set.
+ * Returns the fast locals index of the key on success:
* - if read == true, returns the index if the value is not NULL
* - if read == false, returns the index if the value is not hidden
+ * Otherwise returns -1.
*/
- assert(PyUnicode_CheckExact(key));
-
PyCodeObject *co = _PyFrame_GetCode(frame->f_frame);
- int found_key = false;
+
+ // Ensure that the key is hashable.
+ Py_hash_t key_hash = PyObject_Hash(key);
+ if (key_hash == -1) {
+ return -2;
+ }
+ bool found = false;
// We do 2 loops here because it's highly possible the key is interned
// and we can do a pointer comparison.
for (int i = 0; i < co->co_nlocalsplus; i++) {
PyObject *name = PyTuple_GET_ITEM(co->co_localsplusnames, i);
if (name == key) {
- found_key = true;
if (read) {
if (framelocalsproxy_getval(frame->f_frame, co, i) != NULL) {
return i;
@@ -78,23 +83,35 @@ framelocalsproxy_getkeyindex(PyFrameObject *frame, PyObject* key, bool read)
return i;
}
}
+ found = true;
}
}
-
- if (!found_key) {
- // This is unlikely, but we need to make sure. This means the key
- // is not interned.
- for (int i = 0; i < co->co_nlocalsplus; i++) {
- PyObject *name = PyTuple_GET_ITEM(co->co_localsplusnames, i);
- if (_PyUnicode_EQ(name, key)) {
- if (read) {
- if (framelocalsproxy_getval(frame->f_frame, co, i) != NULL) {
- return i;
- }
- } else {
- if (!(_PyLocals_GetKind(co->co_localspluskinds, i) & CO_FAST_HIDDEN)) {
- return i;
- }
+ if (found) {
+ // This is an attempt to read an unset local variable or
+ // write to a variable that is hidden from regular write operations
+ return -1;
+ }
+ // This is unlikely, but we need to make sure. This means the key
+ // is not interned.
+ for (int i = 0; i < co->co_nlocalsplus; i++) {
+ PyObject *name = PyTuple_GET_ITEM(co->co_localsplusnames, i);
+ Py_hash_t name_hash = PyObject_Hash(name);
+ assert(name_hash != -1); // keys are exact unicode
+ if (name_hash != key_hash) {
+ continue;
+ }
+ int same = PyObject_RichCompareBool(name, key, Py_EQ);
+ if (same < 0) {
+ return -2;
+ }
+ if (same) {
+ if (read) {
+ if (framelocalsproxy_getval(frame->f_frame, co, i) != NULL) {
+ return i;
+ }
+ } else {
+ if (!(_PyLocals_GetKind(co->co_localspluskinds, i) & CO_FAST_HIDDEN)) {
+ return i;
}
}
}
@@ -109,13 +126,14 @@ framelocalsproxy_getitem(PyObject *self, PyObject *key)
PyFrameObject* frame = ((PyFrameLocalsProxyObject*)self)->frame;
PyCodeObject* co = _PyFrame_GetCode(frame->f_frame);
- if (PyUnicode_CheckExact(key)) {
- int i = framelocalsproxy_getkeyindex(frame, key, true);
- if (i >= 0) {
- PyObject *value = framelocalsproxy_getval(frame->f_frame, co, i);
- assert(value != NULL);
- return Py_NewRef(value);
- }
+ int i = framelocalsproxy_getkeyindex(frame, key, true);
+ if (i == -2) {
+ return NULL;
+ }
+ if (i >= 0) {
+ PyObject *value = framelocalsproxy_getval(frame->f_frame, co, i);
+ assert(value != NULL);
+ return Py_NewRef(value);
}
// Okay not in the fast locals, try extra locals
@@ -145,37 +163,38 @@ framelocalsproxy_setitem(PyObject *self, PyObject *key, PyObject *value)
return -1;
}
- if (PyUnicode_CheckExact(key)) {
- int i = framelocalsproxy_getkeyindex(frame, key, false);
- if (i >= 0) {
- _Py_Executors_InvalidateDependency(PyInterpreterState_Get(), co, 1);
-
- _PyLocals_Kind kind = _PyLocals_GetKind(co->co_localspluskinds, i);
- _PyStackRef oldvalue = fast[i];
- PyObject *cell = NULL;
- if (kind == CO_FAST_FREE) {
- // The cell was set when the frame was created from
- // the function's closure.
- assert(oldvalue.bits != 0 && PyCell_Check(PyStackRef_AsPyObjectBorrow(oldvalue)));
- cell = PyStackRef_AsPyObjectBorrow(oldvalue);
- } else if (kind & CO_FAST_CELL && oldvalue.bits != 0) {
- PyObject *as_obj = PyStackRef_AsPyObjectBorrow(oldvalue);
- if (PyCell_Check(as_obj)) {
- cell = as_obj;
- }
+ int i = framelocalsproxy_getkeyindex(frame, key, false);
+ if (i == -2) {
+ return -1;
+ }
+ if (i >= 0) {
+ _Py_Executors_InvalidateDependency(PyInterpreterState_Get(), co, 1);
+
+ _PyLocals_Kind kind = _PyLocals_GetKind(co->co_localspluskinds, i);
+ _PyStackRef oldvalue = fast[i];
+ PyObject *cell = NULL;
+ if (kind == CO_FAST_FREE) {
+ // The cell was set when the frame was created from
+ // the function's closure.
+ assert(oldvalue.bits != 0 && PyCell_Check(PyStackRef_AsPyObjectBorrow(oldvalue)));
+ cell = PyStackRef_AsPyObjectBorrow(oldvalue);
+ } else if (kind & CO_FAST_CELL && oldvalue.bits != 0) {
+ PyObject *as_obj = PyStackRef_AsPyObjectBorrow(oldvalue);
+ if (PyCell_Check(as_obj)) {
+ cell = as_obj;
}
- if (cell != NULL) {
- PyObject *oldvalue_o = PyCell_GET(cell);
- if (value != oldvalue_o) {
- PyCell_SET(cell, Py_XNewRef(value));
- Py_XDECREF(oldvalue_o);
- }
- } else if (value != PyStackRef_AsPyObjectBorrow(oldvalue)) {
- PyStackRef_XCLOSE(fast[i]);
- fast[i] = PyStackRef_FromPyObjectNew(value);
+ }
+ if (cell != NULL) {
+ PyObject *oldvalue_o = PyCell_GET(cell);
+ if (value != oldvalue_o) {
+ PyCell_SET(cell, Py_XNewRef(value));
+ Py_XDECREF(oldvalue_o);
}
- return 0;
+ } else if (value != PyStackRef_AsPyObjectBorrow(oldvalue)) {
+ PyStackRef_XCLOSE(fast[i]);
+ fast[i] = PyStackRef_FromPyObjectNew(value);
}
+ return 0;
}
// Okay not in the fast locals, try extra locals
@@ -545,11 +564,12 @@ framelocalsproxy_contains(PyObject *self, PyObject *key)
{
PyFrameObject *frame = ((PyFrameLocalsProxyObject*)self)->frame;
- if (PyUnicode_CheckExact(key)) {
- int i = framelocalsproxy_getkeyindex(frame, key, true);
- if (i >= 0) {
- return 1;
- }
+ int i = framelocalsproxy_getkeyindex(frame, key, true);
+ if (i == -2) {
+ return -1;
+ }
+ if (i >= 0) {
+ return 1;
}
PyObject *extra = ((PyFrameObject*)frame)->f_extra_locals;
From 097633981879b3c9de9a1dd120d3aa585ecc2384 Mon Sep 17 00:00:00 2001
From: Petr Viktorin
Date: Wed, 31 Jul 2024 00:19:48 +0200
Subject: [PATCH 087/139] gh-121650: Encode newlines in headers, and verify
headers are sound (GH-122233)
## Encode header parts that contain newlines
Per RFC 2047:
> [...] these encoding schemes allow the
> encoding of arbitrary octet values, mail readers that implement this
> decoding should also ensure that display of the decoded data on the
> recipient's terminal will not cause unwanted side-effects
It seems that the "quoted-word" scheme is a valid way to include
a newline character in a header value, just like we already allow
undecodable bytes or control characters.
They do need to be properly quoted when serialized to text, though.
## Verify that email headers are well-formed
This should fail for custom fold() implementations that aren't careful
about newlines.
Co-authored-by: Bas Bloemsaat
Co-authored-by: Serhiy Storchaka
---
Doc/library/email.errors.rst | 7 +++
Doc/library/email.policy.rst | 18 ++++++
Doc/whatsnew/3.13.rst | 9 +++
Lib/email/_header_value_parser.py | 12 +++-
Lib/email/_policybase.py | 8 +++
Lib/email/errors.py | 4 ++
Lib/email/generator.py | 13 +++-
Lib/test/test_email/test_generator.py | 62 +++++++++++++++++++
Lib/test/test_email/test_policy.py | 26 ++++++++
...-07-27-16-10-41.gh-issue-121650.nf6oc9.rst | 5 ++
10 files changed, 160 insertions(+), 4 deletions(-)
create mode 100644 Misc/NEWS.d/next/Library/2024-07-27-16-10-41.gh-issue-121650.nf6oc9.rst
diff --git a/Doc/library/email.errors.rst b/Doc/library/email.errors.rst
index 33ab4265116178..f8f43d82a3df2e 100644
--- a/Doc/library/email.errors.rst
+++ b/Doc/library/email.errors.rst
@@ -58,6 +58,13 @@ The following exception classes are defined in the :mod:`email.errors` module:
:class:`~email.mime.nonmultipart.MIMENonMultipart` (e.g.
:class:`~email.mime.image.MIMEImage`).
+
+.. exception:: HeaderWriteError()
+
+ Raised when an error occurs when the :mod:`~email.generator` outputs
+ headers.
+
+
.. exception:: MessageDefect()
This is the base class for all defects found when parsing email messages.
diff --git a/Doc/library/email.policy.rst b/Doc/library/email.policy.rst
index 83feedf728351e..314767d0802a08 100644
--- a/Doc/library/email.policy.rst
+++ b/Doc/library/email.policy.rst
@@ -229,6 +229,24 @@ added matters. To illustrate::
.. versionadded:: 3.6
+
+ .. attribute:: verify_generated_headers
+
+ If ``True`` (the default), the generator will raise
+ :exc:`~email.errors.HeaderWriteError` instead of writing a header
+ that is improperly folded or delimited, such that it would
+ be parsed as multiple headers or joined with adjacent data.
+ Such headers can be generated by custom header classes or bugs
+ in the ``email`` module.
+
+ As it's a security feature, this defaults to ``True`` even in the
+ :class:`~email.policy.Compat32` policy.
+ For backwards compatible, but unsafe, behavior, it must be set to
+ ``False`` explicitly.
+
+ .. versionadded:: 3.13
+
+
The following :class:`Policy` method is intended to be called by code using
the email library to create policy instances with custom settings:
diff --git a/Doc/whatsnew/3.13.rst b/Doc/whatsnew/3.13.rst
index fbf19d1c9598e1..5761712a3c714b 100644
--- a/Doc/whatsnew/3.13.rst
+++ b/Doc/whatsnew/3.13.rst
@@ -736,6 +736,15 @@ doctest
email
-----
+* Headers with embedded newlines are now quoted on output.
+
+ The :mod:`~email.generator` will now refuse to serialize (write) headers
+ that are improperly folded or delimited, such that they would be parsed as
+ multiple headers or joined with adjacent data.
+ If you need to turn this safety feature off,
+ set :attr:`~email.policy.Policy.verify_generated_headers`.
+ (Contributed by Bas Bloemsaat and Petr Viktorin in :gh:`121650`.)
+
* :func:`email.utils.getaddresses` and :func:`email.utils.parseaddr` now return
``('', '')`` 2-tuples in more situations where invalid email addresses are
encountered instead of potentially inaccurate values. Add optional *strict*
diff --git a/Lib/email/_header_value_parser.py b/Lib/email/_header_value_parser.py
index 7da1bbaf8a80d7..ec2215a5e5f33c 100644
--- a/Lib/email/_header_value_parser.py
+++ b/Lib/email/_header_value_parser.py
@@ -92,6 +92,8 @@
ASPECIALS = TSPECIALS | set("*'%")
ATTRIBUTE_ENDS = ASPECIALS | WSP
EXTENDED_ATTRIBUTE_ENDS = ATTRIBUTE_ENDS - set('%')
+NLSET = {'\n', '\r'}
+SPECIALSNL = SPECIALS | NLSET
def quote_string(value):
return '"'+str(value).replace('\\', '\\\\').replace('"', r'\"')+'"'
@@ -2802,9 +2804,13 @@ def _refold_parse_tree(parse_tree, *, policy):
wrap_as_ew_blocked -= 1
continue
tstr = str(part)
- if part.token_type == 'ptext' and set(tstr) & SPECIALS:
- # Encode if tstr contains special characters.
- want_encoding = True
+ if not want_encoding:
+ if part.token_type == 'ptext':
+ # Encode if tstr contains special characters.
+ want_encoding = not SPECIALSNL.isdisjoint(tstr)
+ else:
+ # Encode if tstr contains newlines.
+ want_encoding = not NLSET.isdisjoint(tstr)
try:
tstr.encode(encoding)
charset = encoding
diff --git a/Lib/email/_policybase.py b/Lib/email/_policybase.py
index 1c76ed63b61ae8..c7694a44e26639 100644
--- a/Lib/email/_policybase.py
+++ b/Lib/email/_policybase.py
@@ -157,6 +157,13 @@ class Policy(_PolicyBase, metaclass=abc.ABCMeta):
message_factory -- the class to use to create new message objects.
If the value is None, the default is Message.
+ verify_generated_headers
+ -- if true, the generator verifies that each header
+ they are properly folded, so that a parser won't
+ treat it as multiple headers, start-of-body, or
+ part of another header.
+ This is a check against custom Header & fold()
+ implementations.
"""
raise_on_defect = False
@@ -165,6 +172,7 @@ class Policy(_PolicyBase, metaclass=abc.ABCMeta):
max_line_length = 78
mangle_from_ = False
message_factory = None
+ verify_generated_headers = True
def handle_defect(self, obj, defect):
"""Based on policy, either raise defect or call register_defect.
diff --git a/Lib/email/errors.py b/Lib/email/errors.py
index 3ad00565549968..02aa5eced6ae46 100644
--- a/Lib/email/errors.py
+++ b/Lib/email/errors.py
@@ -29,6 +29,10 @@ class CharsetError(MessageError):
"""An illegal charset was given."""
+class HeaderWriteError(MessageError):
+ """Error while writing headers."""
+
+
# These are parsing defects which the parser was able to work around.
class MessageDefect(ValueError):
"""Base class for a message defect."""
diff --git a/Lib/email/generator.py b/Lib/email/generator.py
index 9d058ceada24f8..42c84aa4da1044 100644
--- a/Lib/email/generator.py
+++ b/Lib/email/generator.py
@@ -14,12 +14,14 @@
from copy import deepcopy
from io import StringIO, BytesIO
from email.utils import _has_surrogates
+from email.errors import HeaderWriteError
UNDERSCORE = '_'
NL = '\n' # XXX: no longer used by the code below.
NLCRE = re.compile(r'\r\n|\r|\n')
fcre = re.compile(r'^From ', re.MULTILINE)
+NEWLINE_WITHOUT_FWSP = re.compile(r'\r\n[^ \t]|\r[^ \n\t]|\n[^ \t]')
class Generator:
@@ -222,7 +224,16 @@ def _dispatch(self, msg):
def _write_headers(self, msg):
for h, v in msg.raw_items():
- self.write(self.policy.fold(h, v))
+ folded = self.policy.fold(h, v)
+ if self.policy.verify_generated_headers:
+ linesep = self.policy.linesep
+ if not folded.endswith(self.policy.linesep):
+ raise HeaderWriteError(
+ f'folded header does not end with {linesep!r}: {folded!r}')
+ if NEWLINE_WITHOUT_FWSP.search(folded.removesuffix(linesep)):
+ raise HeaderWriteError(
+ f'folded header contains newline: {folded!r}')
+ self.write(folded)
# A blank line always separates headers from body
self.write(self._NL)
diff --git a/Lib/test/test_email/test_generator.py b/Lib/test/test_email/test_generator.py
index bc6f734d4fd0a9..c75a842c33578e 100644
--- a/Lib/test/test_email/test_generator.py
+++ b/Lib/test/test_email/test_generator.py
@@ -6,6 +6,7 @@
from email.generator import Generator, BytesGenerator
from email.headerregistry import Address
from email import policy
+import email.errors
from test.test_email import TestEmailBase, parameterize
@@ -249,6 +250,44 @@ def test_rfc2231_wrapping_switches_to_default_len_if_too_narrow(self):
g.flatten(msg)
self.assertEqual(s.getvalue(), self.typ(expected))
+ def test_keep_encoded_newlines(self):
+ msg = self.msgmaker(self.typ(textwrap.dedent("""\
+ To: nobody
+ Subject: Bad subject=?UTF-8?Q?=0A?=Bcc: injection@example.com
+
+ None
+ """)))
+ expected = textwrap.dedent("""\
+ To: nobody
+ Subject: Bad subject=?UTF-8?Q?=0A?=Bcc: injection@example.com
+
+ None
+ """)
+ s = self.ioclass()
+ g = self.genclass(s, policy=self.policy.clone(max_line_length=80))
+ g.flatten(msg)
+ self.assertEqual(s.getvalue(), self.typ(expected))
+
+ def test_keep_long_encoded_newlines(self):
+ msg = self.msgmaker(self.typ(textwrap.dedent("""\
+ To: nobody
+ Subject: Bad subject=?UTF-8?Q?=0A?=Bcc: injection@example.com
+
+ None
+ """)))
+ expected = textwrap.dedent("""\
+ To: nobody
+ Subject: Bad subject
+ =?utf-8?q?=0A?=Bcc:
+ injection@example.com
+
+ None
+ """)
+ s = self.ioclass()
+ g = self.genclass(s, policy=self.policy.clone(max_line_length=30))
+ g.flatten(msg)
+ self.assertEqual(s.getvalue(), self.typ(expected))
+
class TestGenerator(TestGeneratorBase, TestEmailBase):
@@ -273,6 +312,29 @@ def test_flatten_unicode_linesep(self):
g.flatten(msg)
self.assertEqual(s.getvalue(), self.typ(expected))
+ def test_verify_generated_headers(self):
+ """gh-121650: by default the generator prevents header injection"""
+ class LiteralHeader(str):
+ name = 'Header'
+ def fold(self, **kwargs):
+ return self
+
+ for text in (
+ 'Value\r\nBad Injection\r\n',
+ 'NoNewLine'
+ ):
+ with self.subTest(text=text):
+ message = message_from_string(
+ "Header: Value\r\n\r\nBody",
+ policy=self.policy,
+ )
+
+ del message['Header']
+ message['Header'] = LiteralHeader(text)
+
+ with self.assertRaises(email.errors.HeaderWriteError):
+ message.as_string()
+
class TestBytesGenerator(TestGeneratorBase, TestEmailBase):
diff --git a/Lib/test/test_email/test_policy.py b/Lib/test/test_email/test_policy.py
index c6b9c80efe1b54..baa35fd68e49c5 100644
--- a/Lib/test/test_email/test_policy.py
+++ b/Lib/test/test_email/test_policy.py
@@ -26,6 +26,7 @@ class PolicyAPITests(unittest.TestCase):
'raise_on_defect': False,
'mangle_from_': True,
'message_factory': None,
+ 'verify_generated_headers': True,
}
# These default values are the ones set on email.policy.default.
# If any of these defaults change, the docs must be updated.
@@ -294,6 +295,31 @@ def test_short_maxlen_error(self):
with self.assertRaises(email.errors.HeaderParseError):
policy.fold("Subject", subject)
+ def test_verify_generated_headers(self):
+ """Turning protection off allows header injection"""
+ policy = email.policy.default.clone(verify_generated_headers=False)
+ for text in (
+ 'Header: Value\r\nBad: Injection\r\n',
+ 'Header: NoNewLine'
+ ):
+ with self.subTest(text=text):
+ message = email.message_from_string(
+ "Header: Value\r\n\r\nBody",
+ policy=policy,
+ )
+ class LiteralHeader(str):
+ name = 'Header'
+ def fold(self, **kwargs):
+ return self
+
+ del message['Header']
+ message['Header'] = LiteralHeader(text)
+
+ self.assertEqual(
+ message.as_string(),
+ f"{text}\nBody",
+ )
+
# XXX: Need subclassing tests.
# For adding subclassed objects, make sure the usual rules apply (subclass
# wins), but that the order still works (right overrides left).
diff --git a/Misc/NEWS.d/next/Library/2024-07-27-16-10-41.gh-issue-121650.nf6oc9.rst b/Misc/NEWS.d/next/Library/2024-07-27-16-10-41.gh-issue-121650.nf6oc9.rst
new file mode 100644
index 00000000000000..83dd28d4ac575b
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-07-27-16-10-41.gh-issue-121650.nf6oc9.rst
@@ -0,0 +1,5 @@
+:mod:`email` headers with embedded newlines are now quoted on output. The
+:mod:`~email.generator` will now refuse to serialize (write) headers that
+are unsafely folded or delimited; see
+:attr:`~email.policy.Policy.verify_generated_headers`. (Contributed by Bas
+Bloemsaat and Petr Viktorin in :gh:`121650`.)
From 29c04dfa2718dd25ad8b381a1027045b312f9739 Mon Sep 17 00:00:00 2001
From: Terry Jan Reedy
Date: Tue, 30 Jul 2024 18:29:52 -0400
Subject: [PATCH 088/139] GH-122482: Make About IDLE direct discussion to DPO
(#122483)
Currently, idle-dev@python.org and idle-dev mailing list
serve to collect spam (90+%). Change About IDLE to direct
discussions to discuss.python.org. Users are already
doing so.
---
Lib/idlelib/News3.txt | 3 +++
Lib/idlelib/help_about.py | 15 ++++++++-------
...2024-07-30-18-02-55.gh-issue-122482.TerE0g.rst | 2 ++
3 files changed, 13 insertions(+), 7 deletions(-)
create mode 100644 Misc/NEWS.d/next/IDLE/2024-07-30-18-02-55.gh-issue-122482.TerE0g.rst
diff --git a/Lib/idlelib/News3.txt b/Lib/idlelib/News3.txt
index 68702ac8fb9157..a7a92e97b6c244 100644
--- a/Lib/idlelib/News3.txt
+++ b/Lib/idlelib/News3.txt
@@ -4,6 +4,9 @@ Released on 2024-10-xx
=========================
+gh-122482: Change About IDLE to direct users to discuss.python.org
+instead of the now unused idle-dev email and mailing list.
+
gh-78889: Stop Shell freezes by blocking user access to non-method
sys.stdout.shell attributes, which are all private.
diff --git a/Lib/idlelib/help_about.py b/Lib/idlelib/help_about.py
index aa1c352897f9e7..81c65f6264e7b9 100644
--- a/Lib/idlelib/help_about.py
+++ b/Lib/idlelib/help_about.py
@@ -85,15 +85,18 @@ def create_widgets(self):
byline = Label(frame_background, text=byline_text, justify=LEFT,
fg=self.fg, bg=self.bg)
byline.grid(row=2, column=0, sticky=W, columnspan=3, padx=10, pady=5)
- email = Label(frame_background, text='email: idle-dev@python.org',
- justify=LEFT, fg=self.fg, bg=self.bg)
- email.grid(row=6, column=0, columnspan=2, sticky=W, padx=10, pady=0)
+
+ forums_url = "https://discuss.python.org"
+ forums = Label(frame_background, text="Python forums: "+forums_url,
+ justify=LEFT, fg=self.fg, bg=self.bg)
+ forums.grid(row=6, column=0, sticky=W, padx=10, pady=0)
+ forums.bind("", lambda event: webbrowser.open(forums_url))
docs_url = ("https://docs.python.org/%d.%d/library/idle.html" %
sys.version_info[:2])
docs = Label(frame_background, text=docs_url,
justify=LEFT, fg=self.fg, bg=self.bg)
docs.grid(row=7, column=0, columnspan=2, sticky=W, padx=10, pady=0)
- docs.bind("", lambda event: webbrowser.open(docs['text']))
+ docs.bind("", lambda event: webbrowser.open(docs_url))
Frame(frame_background, borderwidth=1, relief=SUNKEN,
height=2, bg=self.bg).grid(row=8, column=0, sticky=EW,
@@ -123,9 +126,7 @@ def create_widgets(self):
height=2, bg=self.bg).grid(row=11, column=0, sticky=EW,
columnspan=3, padx=5, pady=5)
- idle = Label(frame_background,
- text='IDLE',
- fg=self.fg, bg=self.bg)
+ idle = Label(frame_background, text='IDLE', fg=self.fg, bg=self.bg)
idle.grid(row=12, column=0, sticky=W, padx=10, pady=0)
idle_buttons = Frame(frame_background, bg=self.bg)
idle_buttons.grid(row=13, column=0, columnspan=3, sticky=NSEW)
diff --git a/Misc/NEWS.d/next/IDLE/2024-07-30-18-02-55.gh-issue-122482.TerE0g.rst b/Misc/NEWS.d/next/IDLE/2024-07-30-18-02-55.gh-issue-122482.TerE0g.rst
new file mode 100644
index 00000000000000..8a11e73305992f
--- /dev/null
+++ b/Misc/NEWS.d/next/IDLE/2024-07-30-18-02-55.gh-issue-122482.TerE0g.rst
@@ -0,0 +1,2 @@
+Change About IDLE to direct users to discuss.python.org instead of the now
+unused idle-dev email and mailing list.
From 82db5728136ebec3a1d221570b810b4128a21255 Mon Sep 17 00:00:00 2001
From: Malcolm Smith
Date: Wed, 31 Jul 2024 01:21:43 +0100
Subject: [PATCH 089/139] gh-116622: Fix testPyObjectPrintOSError on Android
(#122487)
Adds extra handling for way BSD/Android return errors from calls to fwrite.
---
Android/android.py | 25 ++++++++++++++-----
Android/testbed/app/build.gradle.kts | 9 ++++++-
...-07-30-23-48-26.gh-issue-116622.yTTtil.rst | 3 +++
Objects/object.c | 11 ++++++--
4 files changed, 39 insertions(+), 9 deletions(-)
create mode 100644 Misc/NEWS.d/next/C API/2024-07-30-23-48-26.gh-issue-116622.yTTtil.rst
diff --git a/Android/android.py b/Android/android.py
index 0a1393e61ddb0e..a78b15c9c4e58c 100755
--- a/Android/android.py
+++ b/Android/android.py
@@ -1,6 +1,7 @@
#!/usr/bin/env python3
import argparse
+from glob import glob
import os
import re
import shutil
@@ -16,16 +17,21 @@
CROSS_BUILD_DIR = CHECKOUT / "cross-build"
-def delete_if_exists(path):
- if path.exists():
+def delete_glob(pattern):
+ # Path.glob doesn't accept non-relative patterns.
+ for path in glob(str(pattern)):
+ path = Path(path)
print(f"Deleting {path} ...")
- shutil.rmtree(path)
+ if path.is_dir() and not path.is_symlink():
+ shutil.rmtree(path)
+ else:
+ path.unlink()
def subdir(name, *, clean=None):
path = CROSS_BUILD_DIR / name
if clean:
- delete_if_exists(path)
+ delete_glob(path)
if not path.exists():
if clean is None:
sys.exit(
@@ -150,10 +156,17 @@ def configure_host_python(context):
def make_host_python(context):
+ # The CFLAGS and LDFLAGS set in android-env include the prefix dir, so
+ # delete any previously-installed Python libs and include files to prevent
+ # them being used during the build.
host_dir = subdir(context.host)
+ prefix_dir = host_dir / "prefix"
+ delete_glob(f"{prefix_dir}/include/python*")
+ delete_glob(f"{prefix_dir}/lib/libpython*")
+
os.chdir(host_dir / "build")
run(["make", "-j", str(os.cpu_count())], host=context.host)
- run(["make", "install", f"prefix={host_dir}/prefix"], host=context.host)
+ run(["make", "install", f"prefix={prefix_dir}"], host=context.host)
def build_all(context):
@@ -164,7 +177,7 @@ def build_all(context):
def clean_all(context):
- delete_if_exists(CROSS_BUILD_DIR)
+ delete_glob(CROSS_BUILD_DIR)
# To avoid distributing compiled artifacts without corresponding source code,
diff --git a/Android/testbed/app/build.gradle.kts b/Android/testbed/app/build.gradle.kts
index 7690d3fd86b2fd..7320b21e98bbd1 100644
--- a/Android/testbed/app/build.gradle.kts
+++ b/Android/testbed/app/build.gradle.kts
@@ -7,10 +7,17 @@ plugins {
val PYTHON_DIR = File(projectDir, "../../..").canonicalPath
val PYTHON_CROSS_DIR = "$PYTHON_DIR/cross-build"
+
val ABIS = mapOf(
"arm64-v8a" to "aarch64-linux-android",
"x86_64" to "x86_64-linux-android",
-)
+).filter { File("$PYTHON_CROSS_DIR/${it.value}").exists() }
+if (ABIS.isEmpty()) {
+ throw GradleException(
+ "No Android ABIs found in $PYTHON_CROSS_DIR: see Android/README.md " +
+ "for building instructions."
+ )
+}
val PYTHON_VERSION = File("$PYTHON_DIR/Include/patchlevel.h").useLines {
for (line in it) {
diff --git a/Misc/NEWS.d/next/C API/2024-07-30-23-48-26.gh-issue-116622.yTTtil.rst b/Misc/NEWS.d/next/C API/2024-07-30-23-48-26.gh-issue-116622.yTTtil.rst
new file mode 100644
index 00000000000000..7ae0f83f37bd62
--- /dev/null
+++ b/Misc/NEWS.d/next/C API/2024-07-30-23-48-26.gh-issue-116622.yTTtil.rst
@@ -0,0 +1,3 @@
+Make :any:`PyObject_Print` work around a bug in Android and OpenBSD which
+prevented it from throwing an exception when trying to write to a read-only
+stream.
diff --git a/Objects/object.c b/Objects/object.c
index 8a648a46487910..db9d3e46795668 100644
--- a/Objects/object.c
+++ b/Objects/object.c
@@ -536,6 +536,7 @@ int
PyObject_Print(PyObject *op, FILE *fp, int flags)
{
int ret = 0;
+ int write_error = 0;
if (PyErr_CheckSignals())
return -1;
#ifdef USE_STACKCHECK
@@ -574,14 +575,20 @@ PyObject_Print(PyObject *op, FILE *fp, int flags)
ret = -1;
}
else {
- fwrite(t, 1, len, fp);
+ /* Versions of Android and OpenBSD from before 2023 fail to
+ set the `ferror` indicator when writing to a read-only
+ stream, so we need to check the return value.
+ (https://github.com/openbsd/src/commit/fc99cf9338942ecd9adc94ea08bf6188f0428c15) */
+ if (fwrite(t, 1, len, fp) != (size_t)len) {
+ write_error = 1;
+ }
}
Py_DECREF(s);
}
}
}
if (ret == 0) {
- if (ferror(fp)) {
+ if (write_error || ferror(fp)) {
PyErr_SetFromErrno(PyExc_OSError);
clearerr(fp);
ret = -1;
From a9344cdffa30fdf60154d645f9e74ab3d67ae2e9 Mon Sep 17 00:00:00 2001
From: Cody Maloney
Date: Tue, 30 Jul 2024 18:39:54 -0700
Subject: [PATCH 090/139] gh-121381 Remove subprocess._USE_VFORK escape hatch
(#121383)
This flag was added as an escape hatch in gh-91401 and backported to
Python 3.10. The flag broke at some point between its addition and now.
As there is currently no publicly known environments that require this,
remove it rather than work on fixing it.
This leaves the flag in the subprocess module to not break code which
may have used / checked the flag itself.
discussion: https://discuss.python.org/t/subprocess-use-vfork-escape-hatch-broken-fix-or-remove/56915/2
---
Doc/library/subprocess.rst | 25 ++++---------------
Lib/multiprocessing/util.py | 3 +--
Lib/subprocess.py | 3 +--
Lib/test/test_capi/test_misc.py | 6 ++---
Lib/test/test_subprocess.py | 22 +---------------
...-06-28-23-17-22.gh-issue-121381.i2xL7P.rst | 2 ++
Modules/_posixsubprocess.c | 7 +++---
Modules/clinic/_posixsubprocess.c.h | 15 ++++-------
8 files changed, 21 insertions(+), 62 deletions(-)
create mode 100644 Misc/NEWS.d/next/Core and Builtins/2024-06-28-23-17-22.gh-issue-121381.i2xL7P.rst
diff --git a/Doc/library/subprocess.rst b/Doc/library/subprocess.rst
index a0ba97f429bec9..f10a8085e64244 100644
--- a/Doc/library/subprocess.rst
+++ b/Doc/library/subprocess.rst
@@ -1561,36 +1561,22 @@ runtime):
Module which provides function to parse and escape command lines.
-.. _disable_vfork:
.. _disable_posix_spawn:
-Disabling use of ``vfork()`` or ``posix_spawn()``
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Disable use of ``posix_spawn()``
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
On Linux, :mod:`subprocess` defaults to using the ``vfork()`` system call
internally when it is safe to do so rather than ``fork()``. This greatly
improves performance.
-If you ever encounter a presumed highly unusual situation where you need to
-prevent ``vfork()`` from being used by Python, you can set the
-:const:`subprocess._USE_VFORK` attribute to a false value.
-
-::
-
- subprocess._USE_VFORK = False # See CPython issue gh-NNNNNN.
-
-Setting this has no impact on use of ``posix_spawn()`` which could use
-``vfork()`` internally within its libc implementation. There is a similar
-:const:`subprocess._USE_POSIX_SPAWN` attribute if you need to prevent use of
-that.
-
::
subprocess._USE_POSIX_SPAWN = False # See CPython issue gh-NNNNNN.
-It is safe to set these to false on any Python version. They will have no
-effect on older versions when unsupported. Do not assume the attributes are
-available to read. Despite their names, a true value does not indicate that the
+It is safe to set this to false on any Python version. It will have no
+effect on older or newer versions where unsupported. Do not assume the attribute
+is available to read. Despite the name, a true value does not indicate the
corresponding function will be used, only that it may be.
Please file issues any time you have to use these private knobs with a way to
@@ -1598,4 +1584,3 @@ reproduce the issue you were seeing. Link to that issue from a comment in your
code.
.. versionadded:: 3.8 ``_USE_POSIX_SPAWN``
-.. versionadded:: 3.11 ``_USE_VFORK``
diff --git a/Lib/multiprocessing/util.py b/Lib/multiprocessing/util.py
index 4f471fbde71ace..d48ef8a86b34e1 100644
--- a/Lib/multiprocessing/util.py
+++ b/Lib/multiprocessing/util.py
@@ -445,8 +445,7 @@ def spawnv_passfds(path, args, passfds):
return _posixsubprocess.fork_exec(
args, [path], True, passfds, None, None,
-1, -1, -1, -1, -1, -1, errpipe_read, errpipe_write,
- False, False, -1, None, None, None, -1, None,
- subprocess._USE_VFORK)
+ False, False, -1, None, None, None, -1, None)
finally:
os.close(errpipe_read)
os.close(errpipe_write)
diff --git a/Lib/subprocess.py b/Lib/subprocess.py
index bc08878db313df..88f0230b05fbc7 100644
--- a/Lib/subprocess.py
+++ b/Lib/subprocess.py
@@ -749,7 +749,6 @@ def _use_posix_spawn():
# These are primarily fail-safe knobs for negatives. A True value does not
# guarantee the given libc/syscall API will be used.
_USE_POSIX_SPAWN = _use_posix_spawn()
-_USE_VFORK = True
_HAVE_POSIX_SPAWN_CLOSEFROM = hasattr(os, 'POSIX_SPAWN_CLOSEFROM')
@@ -1902,7 +1901,7 @@ def _execute_child(self, args, executable, preexec_fn, close_fds,
errpipe_read, errpipe_write,
restore_signals, start_new_session,
process_group, gid, gids, uid, umask,
- preexec_fn, _USE_VFORK)
+ preexec_fn)
self._child_created = True
finally:
# be sure the FD is closed no matter what
diff --git a/Lib/test/test_capi/test_misc.py b/Lib/test/test_capi/test_misc.py
index 9de97c0c2c776a..5c4547da1bdc53 100644
--- a/Lib/test/test_capi/test_misc.py
+++ b/Lib/test/test_capi/test_misc.py
@@ -120,7 +120,7 @@ def __len__(self):
return 1
with self.assertRaisesRegex(TypeError, 'indexing'):
_posixsubprocess.fork_exec(
- 1,Z(),True,(1, 2),5,6,7,8,9,10,11,12,13,14,True,True,17,False,19,20,21,22,False)
+ 1,Z(),True,(1, 2),5,6,7,8,9,10,11,12,13,14,True,True,17,False,19,20,21,22)
# Issue #15736: overflow in _PySequence_BytesToCharpArray()
class Z(object):
def __len__(self):
@@ -128,7 +128,7 @@ def __len__(self):
def __getitem__(self, i):
return b'x'
self.assertRaises(MemoryError, _posixsubprocess.fork_exec,
- 1,Z(),True,(1, 2),5,6,7,8,9,10,11,12,13,14,True,True,17,False,19,20,21,22,False)
+ 1,Z(),True,(1, 2),5,6,7,8,9,10,11,12,13,14,True,True,17,False,19,20,21,22)
@unittest.skipUnless(_posixsubprocess, '_posixsubprocess required for this test.')
def test_subprocess_fork_exec(self):
@@ -138,7 +138,7 @@ def __len__(self):
# Issue #15738: crash in subprocess_fork_exec()
self.assertRaises(TypeError, _posixsubprocess.fork_exec,
- Z(),[b'1'],True,(1, 2),5,6,7,8,9,10,11,12,13,14,True,True,17,False,19,20,21,22,False)
+ Z(),[b'1'],True,(1, 2),5,6,7,8,9,10,11,12,13,14,True,True,17,False,19,20,21,22)
@unittest.skipIf(MISSING_C_DOCSTRINGS,
"Signature information for builtins requires docstrings")
diff --git a/Lib/test/test_subprocess.py b/Lib/test/test_subprocess.py
index 9412a2d737bb2e..f065b9c9bb1c2c 100644
--- a/Lib/test/test_subprocess.py
+++ b/Lib/test/test_subprocess.py
@@ -3278,7 +3278,7 @@ def __int__(self):
1, 2, 3, 4,
True, True, 0,
None, None, None, -1,
- None, True)
+ None)
self.assertIn('fds_to_keep', str(c.exception))
finally:
if not gc_enabled:
@@ -3413,25 +3413,6 @@ def __del__(self):
self.assertEqual(out.strip(), b"OK")
self.assertIn(b"preexec_fn not supported at interpreter shutdown", err)
- @unittest.skipIf(not sysconfig.get_config_var("HAVE_VFORK"),
- "vfork() not enabled by configure.")
- @mock.patch("subprocess._fork_exec")
- @mock.patch("subprocess._USE_POSIX_SPAWN", new=False)
- def test__use_vfork(self, mock_fork_exec):
- self.assertTrue(subprocess._USE_VFORK) # The default value regardless.
- mock_fork_exec.side_effect = RuntimeError("just testing args")
- with self.assertRaises(RuntimeError):
- subprocess.run([sys.executable, "-c", "pass"])
- mock_fork_exec.assert_called_once()
- # NOTE: These assertions are *ugly* as they require the last arg
- # to remain the have_vfork boolean. We really need to refactor away
- # from the giant "wall of args" internal C extension API.
- self.assertTrue(mock_fork_exec.call_args.args[-1])
- with mock.patch.object(subprocess, '_USE_VFORK', False):
- with self.assertRaises(RuntimeError):
- subprocess.run([sys.executable, "-c", "pass"])
- self.assertFalse(mock_fork_exec.call_args_list[-1].args[-1])
-
@unittest.skipIf(not sysconfig.get_config_var("HAVE_VFORK"),
"vfork() not enabled by configure.")
@unittest.skipIf(sys.platform != "linux", "Linux only, requires strace.")
@@ -3478,7 +3459,6 @@ def test_vfork_used_when_expected(self):
# Test that each individual thing that would disable the use of vfork
# actually disables it.
for sub_name, preamble, sp_kwarg, expect_permission_error in (
- ("!use_vfork", "subprocess._USE_VFORK = False", "", False),
("preexec", "", "preexec_fn=lambda: None", False),
("setgid", "", f"group={os.getgid()}", True),
("setuid", "", f"user={os.getuid()}", True),
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-06-28-23-17-22.gh-issue-121381.i2xL7P.rst b/Misc/NEWS.d/next/Core and Builtins/2024-06-28-23-17-22.gh-issue-121381.i2xL7P.rst
new file mode 100644
index 00000000000000..3a02145378e2cd
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2024-06-28-23-17-22.gh-issue-121381.i2xL7P.rst
@@ -0,0 +1,2 @@
+Remove ``subprocess._USE_VFORK`` escape hatch code and documentation.
+It was added just in case, and doesn't have any known cases that require it.
diff --git a/Modules/_posixsubprocess.c b/Modules/_posixsubprocess.c
index daec4ad708dea4..ad6d7ceda84e37 100644
--- a/Modules/_posixsubprocess.c
+++ b/Modules/_posixsubprocess.c
@@ -977,7 +977,6 @@ _posixsubprocess.fork_exec as subprocess_fork_exec
uid as uid_object: object
child_umask: int
preexec_fn: object
- allow_vfork: bool
/
Spawn a fresh new child process.
@@ -1014,8 +1013,8 @@ subprocess_fork_exec_impl(PyObject *module, PyObject *process_args,
pid_t pgid_to_set, PyObject *gid_object,
PyObject *extra_groups_packed,
PyObject *uid_object, int child_umask,
- PyObject *preexec_fn, int allow_vfork)
-/*[clinic end generated code: output=7ee4f6ee5cf22b5b input=51757287ef266ffa]*/
+ PyObject *preexec_fn)
+/*[clinic end generated code: output=288464dc56e373c7 input=f311c3bcb5dd55c8]*/
{
PyObject *converted_args = NULL, *fast_args = NULL;
PyObject *preexec_fn_args_tuple = NULL;
@@ -1218,7 +1217,7 @@ subprocess_fork_exec_impl(PyObject *module, PyObject *process_args,
#ifdef VFORK_USABLE
/* Use vfork() only if it's safe. See the comment above child_exec(). */
sigset_t old_sigs;
- if (preexec_fn == Py_None && allow_vfork &&
+ if (preexec_fn == Py_None &&
uid == (uid_t)-1 && gid == (gid_t)-1 && extra_group_size < 0) {
/* Block all signals to ensure that no signal handlers are run in the
* child process while it shares memory with us. Note that signals
diff --git a/Modules/clinic/_posixsubprocess.c.h b/Modules/clinic/_posixsubprocess.c.h
index dd7644de6b7534..d52629cf6eaa5b 100644
--- a/Modules/clinic/_posixsubprocess.c.h
+++ b/Modules/clinic/_posixsubprocess.c.h
@@ -9,7 +9,7 @@ PyDoc_STRVAR(subprocess_fork_exec__doc__,
" env, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite,\n"
" errpipe_read, errpipe_write, restore_signals, call_setsid,\n"
" pgid_to_set, gid, extra_groups, uid, child_umask, preexec_fn,\n"
-" allow_vfork, /)\n"
+" /)\n"
"--\n"
"\n"
"Spawn a fresh new child process.\n"
@@ -48,7 +48,7 @@ subprocess_fork_exec_impl(PyObject *module, PyObject *process_args,
pid_t pgid_to_set, PyObject *gid_object,
PyObject *extra_groups_packed,
PyObject *uid_object, int child_umask,
- PyObject *preexec_fn, int allow_vfork);
+ PyObject *preexec_fn);
static PyObject *
subprocess_fork_exec(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
@@ -76,9 +76,8 @@ subprocess_fork_exec(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
PyObject *uid_object;
int child_umask;
PyObject *preexec_fn;
- int allow_vfork;
- if (!_PyArg_CheckPositional("fork_exec", nargs, 23, 23)) {
+ if (!_PyArg_CheckPositional("fork_exec", nargs, 22, 22)) {
goto exit;
}
process_args = args[0];
@@ -146,13 +145,9 @@ subprocess_fork_exec(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
goto exit;
}
preexec_fn = args[21];
- allow_vfork = PyObject_IsTrue(args[22]);
- if (allow_vfork < 0) {
- goto exit;
- }
- return_value = subprocess_fork_exec_impl(module, process_args, executable_list, close_fds, py_fds_to_keep, cwd_obj, env_list, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, errpipe_read, errpipe_write, restore_signals, call_setsid, pgid_to_set, gid_object, extra_groups_packed, uid_object, child_umask, preexec_fn, allow_vfork);
+ return_value = subprocess_fork_exec_impl(module, process_args, executable_list, close_fds, py_fds_to_keep, cwd_obj, env_list, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, errpipe_read, errpipe_write, restore_signals, call_setsid, pgid_to_set, gid_object, extra_groups_packed, uid_object, child_umask, preexec_fn);
exit:
return return_value;
}
-/*[clinic end generated code: output=48555f5965a871be input=a9049054013a1b77]*/
+/*[clinic end generated code: output=942bc2748a9c2785 input=a9049054013a1b77]*/
From d01fd240517e0c5fb679686a93119d0aa6b0fc0f Mon Sep 17 00:00:00 2001
From: Hugo van Kemenade <1324225+hugovk@users.noreply.github.com>
Date: Wed, 31 Jul 2024 10:02:08 +0300
Subject: [PATCH 091/139] Docs: bump Sphinx to 8.0 and update constraints
(#122496)
Co-authored-by: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
---
Doc/constraints.txt | 14 +++++++-------
Doc/requirements.txt | 2 +-
2 files changed, 8 insertions(+), 8 deletions(-)
diff --git a/Doc/constraints.txt b/Doc/constraints.txt
index 147de1271eb2b7..ab3b39bf380dad 100644
--- a/Doc/constraints.txt
+++ b/Doc/constraints.txt
@@ -9,16 +9,16 @@ babel<3
colorama<0.5
imagesize<1.5
Jinja2<3.2
-packaging<24
-Pygments>=2.16.1,<3
+packaging<25
+Pygments<3
requests<3
snowballstemmer<3
-sphinxcontrib-applehelp<1.1
-sphinxcontrib-devhelp<1.1
-sphinxcontrib-htmlhelp<2.1
+sphinxcontrib-applehelp<2.1
+sphinxcontrib-devhelp<2.1
+sphinxcontrib-htmlhelp<2.2
sphinxcontrib-jsmath<1.1
-sphinxcontrib-qthelp<1.1
-sphinxcontrib-serializinghtml<1.2
+sphinxcontrib-qthelp<2.1
+sphinxcontrib-serializinghtml<2.1
# Direct dependencies of Jinja2 (Jinja is a dependency of Sphinx, see above)
MarkupSafe<2.2
diff --git a/Doc/requirements.txt b/Doc/requirements.txt
index 98ad52e17538a4..bf1028020b7af7 100644
--- a/Doc/requirements.txt
+++ b/Doc/requirements.txt
@@ -6,7 +6,7 @@
# Sphinx version is pinned so that new versions that introduce new warnings
# won't suddenly cause build failures. Updating the version is fine as long
# as no warnings are raised by doing so.
-sphinx~=7.4.0
+sphinx~=8.0.0
blurb
From f071f01b7b7e19d7d6b3a4b0ec62f820ecb14660 Mon Sep 17 00:00:00 2001
From: Russell Keith-Magee
Date: Wed, 31 Jul 2024 16:24:15 +0800
Subject: [PATCH 092/139] gh-122133: Rework pure Python socketpair tests to
avoid use of importlib.reload. (#122493)
Co-authored-by: Gregory P. Smith
---
Lib/socket.py | 121 +++++++++++++++++++---------------------
Lib/test/test_socket.py | 20 ++-----
2 files changed, 64 insertions(+), 77 deletions(-)
diff --git a/Lib/socket.py b/Lib/socket.py
index 2e6043cbdb8005..9207101dcf9d58 100644
--- a/Lib/socket.py
+++ b/Lib/socket.py
@@ -592,16 +592,65 @@ def fromshare(info):
return socket(0, 0, 0, info)
__all__.append("fromshare")
-if hasattr(_socket, "socketpair"):
+# Origin: https://gist.github.com/4325783, by Geert Jansen. Public domain.
+# This is used if _socket doesn't natively provide socketpair. It's
+# always defined so that it can be patched in for testing purposes.
+def _fallback_socketpair(family=AF_INET, type=SOCK_STREAM, proto=0):
+ if family == AF_INET:
+ host = _LOCALHOST
+ elif family == AF_INET6:
+ host = _LOCALHOST_V6
+ else:
+ raise ValueError("Only AF_INET and AF_INET6 socket address families "
+ "are supported")
+ if type != SOCK_STREAM:
+ raise ValueError("Only SOCK_STREAM socket type is supported")
+ if proto != 0:
+ raise ValueError("Only protocol zero is supported")
+
+ # We create a connected TCP socket. Note the trick with
+ # setblocking(False) that prevents us from having to create a thread.
+ lsock = socket(family, type, proto)
+ try:
+ lsock.bind((host, 0))
+ lsock.listen()
+ # On IPv6, ignore flow_info and scope_id
+ addr, port = lsock.getsockname()[:2]
+ csock = socket(family, type, proto)
+ try:
+ csock.setblocking(False)
+ try:
+ csock.connect((addr, port))
+ except (BlockingIOError, InterruptedError):
+ pass
+ csock.setblocking(True)
+ ssock, _ = lsock.accept()
+ except:
+ csock.close()
+ raise
+ finally:
+ lsock.close()
- def socketpair(family=None, type=SOCK_STREAM, proto=0):
- """socketpair([family[, type[, proto]]]) -> (socket object, socket object)
+ # Authenticating avoids using a connection from something else
+ # able to connect to {host}:{port} instead of us.
+ # We expect only AF_INET and AF_INET6 families.
+ try:
+ if (
+ ssock.getsockname() != csock.getpeername()
+ or csock.getsockname() != ssock.getpeername()
+ ):
+ raise ConnectionError("Unexpected peer connection")
+ except:
+ # getsockname() and getpeername() can fail
+ # if either socket isn't connected.
+ ssock.close()
+ csock.close()
+ raise
- Create a pair of socket objects from the sockets returned by the platform
- socketpair() function.
- The arguments are the same as for socket() except the default family is
- AF_UNIX if defined on the platform; otherwise, the default is AF_INET.
- """
+ return (ssock, csock)
+
+if hasattr(_socket, "socketpair"):
+ def socketpair(family=None, type=SOCK_STREAM, proto=0):
if family is None:
try:
family = AF_UNIX
@@ -613,61 +662,7 @@ def socketpair(family=None, type=SOCK_STREAM, proto=0):
return a, b
else:
-
- # Origin: https://gist.github.com/4325783, by Geert Jansen. Public domain.
- def socketpair(family=AF_INET, type=SOCK_STREAM, proto=0):
- if family == AF_INET:
- host = _LOCALHOST
- elif family == AF_INET6:
- host = _LOCALHOST_V6
- else:
- raise ValueError("Only AF_INET and AF_INET6 socket address families "
- "are supported")
- if type != SOCK_STREAM:
- raise ValueError("Only SOCK_STREAM socket type is supported")
- if proto != 0:
- raise ValueError("Only protocol zero is supported")
-
- # We create a connected TCP socket. Note the trick with
- # setblocking(False) that prevents us from having to create a thread.
- lsock = socket(family, type, proto)
- try:
- lsock.bind((host, 0))
- lsock.listen()
- # On IPv6, ignore flow_info and scope_id
- addr, port = lsock.getsockname()[:2]
- csock = socket(family, type, proto)
- try:
- csock.setblocking(False)
- try:
- csock.connect((addr, port))
- except (BlockingIOError, InterruptedError):
- pass
- csock.setblocking(True)
- ssock, _ = lsock.accept()
- except:
- csock.close()
- raise
- finally:
- lsock.close()
-
- # Authenticating avoids using a connection from something else
- # able to connect to {host}:{port} instead of us.
- # We expect only AF_INET and AF_INET6 families.
- try:
- if (
- ssock.getsockname() != csock.getpeername()
- or csock.getsockname() != ssock.getpeername()
- ):
- raise ConnectionError("Unexpected peer connection")
- except:
- # getsockname() and getpeername() can fail
- # if either socket isn't connected.
- ssock.close()
- csock.close()
- raise
-
- return (ssock, csock)
+ socketpair = _fallback_socketpair
__all__.append("socketpair")
socketpair.__doc__ = """socketpair([family[, type[, proto]]]) -> (socket object, socket object)
diff --git a/Lib/test/test_socket.py b/Lib/test/test_socket.py
index bb65c3c5993b88..7c607a809aa428 100644
--- a/Lib/test/test_socket.py
+++ b/Lib/test/test_socket.py
@@ -4861,7 +4861,6 @@ def _testSend(self):
class PurePythonSocketPairTest(SocketPairTest):
-
# Explicitly use socketpair AF_INET or AF_INET6 to ensure that is the
# code path we're using regardless platform is the pure python one where
# `_socket.socketpair` does not exist. (AF_INET does not work with
@@ -4876,28 +4875,21 @@ def socketpair(self):
# Local imports in this class make for easy security fix backporting.
def setUp(self):
- import _socket
- self._orig_sp = getattr(_socket, 'socketpair', None)
- if self._orig_sp is not None:
+ if hasattr(_socket, "socketpair"):
+ self._orig_sp = socket.socketpair
# This forces the version using the non-OS provided socketpair
# emulation via an AF_INET socket in Lib/socket.py.
- del _socket.socketpair
- import importlib
- global socket
- socket = importlib.reload(socket)
+ socket.socketpair = socket._fallback_socketpair
else:
- pass # This platform already uses the non-OS provided version.
+ # This platform already uses the non-OS provided version.
+ self._orig_sp = None
super().setUp()
def tearDown(self):
super().tearDown()
- import _socket
if self._orig_sp is not None:
# Restore the default socket.socketpair definition.
- _socket.socketpair = self._orig_sp
- import importlib
- global socket
- socket = importlib.reload(socket)
+ socket.socketpair = self._orig_sp
def test_recv(self):
msg = self.serv.recv(1024)
From e60ee11cb51b87deeb22ad125717bd0d0dc10fa8 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Sviatoslav=20Sydorenko=20=28=D0=A1=D0=B2=D1=8F=D1=82=D0=BE?=
=?UTF-8?q?=D1=81=D0=BB=D0=B0=D0=B2=20=D0=A1=D0=B8=D0=B4=D0=BE=D1=80=D0=B5?=
=?UTF-8?q?=D0=BD=D0=BA=D0=BE=29?=
Date: Wed, 31 Jul 2024 10:32:16 +0200
Subject: [PATCH 093/139] Move change detection to separate workflow in CI
(#122336)
---
.github/workflows/build.yml | 131 ++-------------
.../workflows/reusable-change-detection.yml | 150 ++++++++++++++++++
2 files changed, 163 insertions(+), 118 deletions(-)
create mode 100644 .github/workflows/reusable-change-detection.yml
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 6568b50c3a6a13..4f3995a020e31b 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -1,8 +1,5 @@
name: Tests
-# gh-84728: "paths-ignore" is not used to skip documentation-only PRs, because
-# it prevents to mark a job as mandatory. A PR cannot be merged if a job is
-# mandatory but not scheduled because of "paths-ignore".
on:
workflow_dispatch:
push:
@@ -23,121 +20,19 @@ concurrency:
jobs:
check_source:
- name: 'Check for source changes'
- runs-on: ubuntu-latest
- timeout-minutes: 10
- outputs:
- # Some of the referenced steps set outputs conditionally and there may be
- # cases when referencing them evaluates to empty strings. It is nice to
- # work with proper booleans so they have to be evaluated through JSON
- # conversion in the expressions. However, empty strings used like that
- # may trigger all sorts of undefined and hard-to-debug behaviors in
- # GitHub Actions CI/CD. To help with this, all of the outputs set here
- # that are meant to be used as boolean flags (and not arbitrary strings),
- # MUST have fallbacks with default values set. A common pattern would be
- # to add ` || false` to all such expressions here, in the output
- # definitions. They can then later be safely used through the following
- # idiom in job conditionals and other expressions. Here's some examples:
- #
- # if: fromJSON(needs.check_source.outputs.run-docs)
- #
- # ${{
- # fromJSON(needs.check_source.outputs.run_tests)
- # && 'truthy-branch'
- # || 'falsy-branch'
- # }}
- #
- run-docs: ${{ steps.docs-changes.outputs.run-docs || false }}
- run-win-msi: ${{ steps.win-msi-changes.outputs.run-win-msi || false }}
- run_tests: ${{ steps.check.outputs.run_tests || false }}
- run_hypothesis: ${{ steps.check.outputs.run_hypothesis || false }}
- run_cifuzz: ${{ steps.check.outputs.run_cifuzz || false }}
- config_hash: ${{ steps.config_hash.outputs.hash }} # str
- steps:
- - uses: actions/checkout@v4
- - name: Check for source changes
- id: check
- run: |
- if [ -z "$GITHUB_BASE_REF" ]; then
- echo "run_tests=true" >> $GITHUB_OUTPUT
- else
- git fetch origin $GITHUB_BASE_REF --depth=1
- # git diff "origin/$GITHUB_BASE_REF..." (3 dots) may be more
- # reliable than git diff "origin/$GITHUB_BASE_REF.." (2 dots),
- # but it requires to download more commits (this job uses
- # "git fetch --depth=1").
- #
- # git diff "origin/$GITHUB_BASE_REF..." (3 dots) works with Git
- # 2.26, but Git 2.28 is stricter and fails with "no merge base".
- #
- # git diff "origin/$GITHUB_BASE_REF.." (2 dots) should be enough on
- # GitHub, since GitHub starts by merging origin/$GITHUB_BASE_REF
- # into the PR branch anyway.
- #
- # https://github.com/python/core-workflow/issues/373
- git diff --name-only origin/$GITHUB_BASE_REF.. | grep -qvE '(\.rst$|^Doc|^Misc|^\.pre-commit-config\.yaml$|\.ruff\.toml$|\.md$|mypy\.ini$)' && echo "run_tests=true" >> $GITHUB_OUTPUT || true
- fi
-
- # Check if we should run hypothesis tests
- GIT_BRANCH=${GITHUB_BASE_REF:-${GITHUB_REF#refs/heads/}}
- echo $GIT_BRANCH
- if $(echo "$GIT_BRANCH" | grep -q -w '3\.\(8\|9\|10\|11\)'); then
- echo "Branch too old for hypothesis tests"
- echo "run_hypothesis=false" >> $GITHUB_OUTPUT
- else
- echo "Run hypothesis tests"
- echo "run_hypothesis=true" >> $GITHUB_OUTPUT
- fi
-
- # oss-fuzz maintains a configuration for fuzzing the main branch of
- # CPython, so CIFuzz should be run only for code that is likely to be
- # merged into the main branch; compatibility with older branches may
- # be broken.
- FUZZ_RELEVANT_FILES='(\.c$|\.h$|\.cpp$|^configure$|^\.github/workflows/build\.yml$|^Modules/_xxtestfuzz)'
- if [ "$GITHUB_BASE_REF" = "main" ] && [ "$(git diff --name-only origin/$GITHUB_BASE_REF.. | grep -qE $FUZZ_RELEVANT_FILES; echo $?)" -eq 0 ]; then
- # The tests are pretty slow so they are executed only for PRs
- # changing relevant files.
- echo "Run CIFuzz tests"
- echo "run_cifuzz=true" >> $GITHUB_OUTPUT
- else
- echo "Branch too old for CIFuzz tests; or no C files were changed"
- echo "run_cifuzz=false" >> $GITHUB_OUTPUT
- fi
- - name: Compute hash for config cache key
- id: config_hash
- run: |
- echo "hash=${{ hashFiles('configure', 'configure.ac', '.github/workflows/build.yml') }}" >> $GITHUB_OUTPUT
- - name: Get a list of the changed documentation-related files
- if: github.event_name == 'pull_request'
- id: changed-docs-files
- uses: Ana06/get-changed-files@v2.3.0
- with:
- filter: |
- Doc/**
- Misc/**
- .github/workflows/reusable-docs.yml
- format: csv # works for paths with spaces
- - name: Check for docs changes
- if: >-
- github.event_name == 'pull_request'
- && steps.changed-docs-files.outputs.added_modified_renamed != ''
- id: docs-changes
- run: |
- echo "run-docs=true" >> "${GITHUB_OUTPUT}"
- - name: Get a list of the MSI installer-related files
- id: changed-win-msi-files
- uses: Ana06/get-changed-files@v2.3.0
- with:
- filter: |
- Tools/msi/**
- .github/workflows/reusable-windows-msi.yml
- format: csv # works for paths with spaces
- - name: Check for changes in MSI installer-related files
- if: >-
- steps.changed-win-msi-files.outputs.added_modified_renamed != ''
- id: win-msi-changes
- run: |
- echo "run-win-msi=true" >> "${GITHUB_OUTPUT}"
+ name: Change detection
+ # To use boolean outputs from this job, parse them as JSON.
+ # Here's some examples:
+ #
+ # if: fromJSON(needs.check_source.outputs.run-docs)
+ #
+ # ${{
+ # fromJSON(needs.check_source.outputs.run_tests)
+ # && 'truthy-branch'
+ # || 'falsy-branch'
+ # }}
+ #
+ uses: ./.github/workflows/reusable-change-detection.yml
check-docs:
name: Docs
diff --git a/.github/workflows/reusable-change-detection.yml b/.github/workflows/reusable-change-detection.yml
new file mode 100644
index 00000000000000..25c789d335efc8
--- /dev/null
+++ b/.github/workflows/reusable-change-detection.yml
@@ -0,0 +1,150 @@
+---
+
+name: Change detection
+
+on: # yamllint disable-line rule:truthy
+ workflow_call:
+ outputs:
+ # Some of the referenced steps set outputs conditionally and there may be
+ # cases when referencing them evaluates to empty strings. It is nice to
+ # work with proper booleans so they have to be evaluated through JSON
+ # conversion in the expressions. However, empty strings used like that
+ # may trigger all sorts of undefined and hard-to-debug behaviors in
+ # GitHub Actions CI/CD. To help with this, all of the outputs set here
+ # that are meant to be used as boolean flags (and not arbitrary strings),
+ # MUST have fallbacks with default values set. A common pattern would be
+ # to add ` || false` to all such expressions here, in the output
+ # definitions. They can then later be safely used through the following
+ # idiom in job conditionals and other expressions. Here's some examples:
+ #
+ # if: fromJSON(needs.change-detection.outputs.run-docs)
+ #
+ # ${{
+ # fromJSON(needs.change-detection.outputs.run-tests)
+ # && 'truthy-branch'
+ # || 'falsy-branch'
+ # }}
+ #
+ config_hash:
+ description: Config hash value for use in cache keys
+ value: ${{ jobs.compute-changes.outputs.config-hash }} # str
+ run-docs:
+ description: Whether to build the docs
+ value: ${{ jobs.compute-changes.outputs.run-docs || false }} # bool
+ run_tests:
+ description: Whether to run the regular tests
+ value: ${{ jobs.compute-changes.outputs.run-tests || false }} # bool
+ run-win-msi:
+ description: Whether to run the MSI installer smoke tests
+ value: >- # bool
+ ${{ jobs.compute-changes.outputs.run-win-msi || false }}
+ run_hypothesis:
+ description: Whether to run the Hypothesis tests
+ value: >- # bool
+ ${{ jobs.compute-changes.outputs.run-hypothesis || false }}
+ run_cifuzz:
+ description: Whether to run the CIFuzz job
+ value: >- # bool
+ ${{ jobs.compute-changes.outputs.run-cifuzz || false }}
+
+jobs:
+ compute-changes:
+ name: Compute changed files
+ runs-on: ubuntu-latest
+ timeout-minutes: 10
+ outputs:
+ config-hash: ${{ steps.config-hash.outputs.hash }}
+ run-cifuzz: ${{ steps.check.outputs.run-cifuzz }}
+ run-docs: ${{ steps.docs-changes.outputs.run-docs }}
+ run-hypothesis: ${{ steps.check.outputs.run-hypothesis }}
+ run-tests: ${{ steps.check.outputs.run-tests }}
+ run-win-msi: ${{ steps.win-msi-changes.outputs.run-win-msi }}
+ steps:
+ - run: >-
+ echo '${{ github.event_name }}'
+ - uses: actions/checkout@v4
+ - name: Check for source changes
+ id: check
+ run: |
+ if [ -z "$GITHUB_BASE_REF" ]; then
+ echo "run-tests=true" >> $GITHUB_OUTPUT
+ else
+ git fetch origin $GITHUB_BASE_REF --depth=1
+ # git diff "origin/$GITHUB_BASE_REF..." (3 dots) may be more
+ # reliable than git diff "origin/$GITHUB_BASE_REF.." (2 dots),
+ # but it requires to download more commits (this job uses
+ # "git fetch --depth=1").
+ #
+ # git diff "origin/$GITHUB_BASE_REF..." (3 dots) works with Git
+ # 2.26, but Git 2.28 is stricter and fails with "no merge base".
+ #
+ # git diff "origin/$GITHUB_BASE_REF.." (2 dots) should be enough on
+ # GitHub, since GitHub starts by merging origin/$GITHUB_BASE_REF
+ # into the PR branch anyway.
+ #
+ # https://github.com/python/core-workflow/issues/373
+ git diff --name-only origin/$GITHUB_BASE_REF.. | grep -qvE '(\.rst$|^Doc|^Misc|^\.pre-commit-config\.yaml$|\.ruff\.toml$|\.md$|mypy\.ini$)' && echo "run-tests=true" >> $GITHUB_OUTPUT || true
+ fi
+
+ # Check if we should run hypothesis tests
+ GIT_BRANCH=${GITHUB_BASE_REF:-${GITHUB_REF#refs/heads/}}
+ echo $GIT_BRANCH
+ if $(echo "$GIT_BRANCH" | grep -q -w '3\.\(8\|9\|10\|11\)'); then
+ echo "Branch too old for hypothesis tests"
+ echo "run-hypothesis=false" >> $GITHUB_OUTPUT
+ else
+ echo "Run hypothesis tests"
+ echo "run-hypothesis=true" >> $GITHUB_OUTPUT
+ fi
+
+ # oss-fuzz maintains a configuration for fuzzing the main branch of
+ # CPython, so CIFuzz should be run only for code that is likely to be
+ # merged into the main branch; compatibility with older branches may
+ # be broken.
+ FUZZ_RELEVANT_FILES='(\.c$|\.h$|\.cpp$|^configure$|^\.github/workflows/build\.yml$|^Modules/_xxtestfuzz)'
+ if [ "$GITHUB_BASE_REF" = "main" ] && [ "$(git diff --name-only origin/$GITHUB_BASE_REF.. | grep -qE $FUZZ_RELEVANT_FILES; echo $?)" -eq 0 ]; then
+ # The tests are pretty slow so they are executed only for PRs
+ # changing relevant files.
+ echo "Run CIFuzz tests"
+ echo "run-cifuzz=true" >> $GITHUB_OUTPUT
+ else
+ echo "Branch too old for CIFuzz tests; or no C files were changed"
+ echo "run-cifuzz=false" >> $GITHUB_OUTPUT
+ fi
+ - name: Compute hash for config cache key
+ id: config-hash
+ run: |
+ echo "hash=${{ hashFiles('configure', 'configure.ac', '.github/workflows/build.yml') }}" >> $GITHUB_OUTPUT
+ - name: Get a list of the changed documentation-related files
+ if: github.event_name == 'pull_request'
+ id: changed-docs-files
+ uses: Ana06/get-changed-files@v2.3.0
+ with:
+ filter: |
+ Doc/**
+ Misc/**
+ .github/workflows/reusable-docs.yml
+ format: csv # works for paths with spaces
+ - name: Check for docs changes
+ if: >-
+ github.event_name == 'pull_request'
+ && steps.changed-docs-files.outputs.added_modified_renamed != ''
+ id: docs-changes
+ run: |
+ echo "run-docs=true" >> "${GITHUB_OUTPUT}"
+ - name: Get a list of the MSI installer-related files
+ id: changed-win-msi-files
+ uses: Ana06/get-changed-files@v2.3.0
+ with:
+ filter: |
+ Tools/msi/**
+ .github/workflows/reusable-windows-msi.yml
+ format: csv # works for paths with spaces
+ - name: Check for changes in MSI installer-related files
+ if: >-
+ steps.changed-win-msi-files.outputs.added_modified_renamed != ''
+ id: win-msi-changes
+ run: |
+ echo "run-win-msi=true" >> "${GITHUB_OUTPUT}"
+
+...
From bd3d31f380cd451a4ab6da5fbfde463fed95b5b5 Mon Sep 17 00:00:00 2001
From: CF Bolz-Tereick
Date: Wed, 31 Jul 2024 12:33:29 +0200
Subject: [PATCH 094/139] gh-87320: In the code module, handle exceptions
raised in sys.excepthook (GH-122456)
Before, the exception caused by calling non-default sys.excepthook
in code.InteractiveInterpreter bubbled up to the caller, ending the REPL.
---
Lib/code.py | 19 +++++++++--
Lib/test/test_code_module.py | 33 +++++++++++++++++++
Lib/test/test_pyrepl/test_pyrepl.py | 24 ++++++++++++++
...4-07-30-14-46-16.gh-issue-87320.-Yk1wb.rst | 3 ++
4 files changed, 76 insertions(+), 3 deletions(-)
create mode 100644 Misc/NEWS.d/next/Library/2024-07-30-14-46-16.gh-issue-87320.-Yk1wb.rst
diff --git a/Lib/code.py b/Lib/code.py
index a55fced0704b1d..cd3889067d068d 100644
--- a/Lib/code.py
+++ b/Lib/code.py
@@ -129,7 +129,7 @@ def showsyntaxerror(self, filename=None, **kwargs):
else:
# If someone has set sys.excepthook, we let that take precedence
# over self.write
- sys.excepthook(type, value, tb)
+ self._call_excepthook(type, value, tb)
def showtraceback(self, **kwargs):
"""Display the exception that just occurred.
@@ -144,16 +144,29 @@ def showtraceback(self, **kwargs):
sys.last_traceback = last_tb
sys.last_exc = ei[1]
try:
- lines = traceback.format_exception(ei[0], ei[1], last_tb.tb_next, colorize=colorize)
if sys.excepthook is sys.__excepthook__:
+ lines = traceback.format_exception(ei[0], ei[1], last_tb.tb_next, colorize=colorize)
self.write(''.join(lines))
else:
# If someone has set sys.excepthook, we let that take precedence
# over self.write
- sys.excepthook(ei[0], ei[1], last_tb)
+ self._call_excepthook(ei[0], ei[1], last_tb)
finally:
last_tb = ei = None
+ def _call_excepthook(self, typ, value, tb):
+ try:
+ sys.excepthook(typ, value, tb)
+ except SystemExit:
+ raise
+ except BaseException as e:
+ e.__context__ = None
+ print('Error in sys.excepthook:', file=sys.stderr)
+ sys.__excepthook__(type(e), e, e.__traceback__.tb_next)
+ print(file=sys.stderr)
+ print('Original exception was:', file=sys.stderr)
+ sys.__excepthook__(typ, value, tb)
+
def write(self, data):
"""Write a string.
diff --git a/Lib/test/test_code_module.py b/Lib/test/test_code_module.py
index 259778a5cade98..5dc89108f0ad88 100644
--- a/Lib/test/test_code_module.py
+++ b/Lib/test/test_code_module.py
@@ -77,6 +77,39 @@ def test_sysexcepthook(self):
self.console.interact()
self.assertTrue(hook.called)
+ def test_sysexcepthook_crashing_doesnt_close_repl(self):
+ self.infunc.side_effect = ["1/0", "a = 123", "print(a)", EOFError('Finished')]
+ self.sysmod.excepthook = 1
+ self.console.interact()
+ self.assertEqual(['write', ('123', ), {}], self.stdout.method_calls[0])
+ error = "".join(call.args[0] for call in self.stderr.method_calls if call[0] == 'write')
+ self.assertIn("Error in sys.excepthook:", error)
+ self.assertEqual(error.count("'int' object is not callable"), 1)
+ self.assertIn("Original exception was:", error)
+ self.assertIn("division by zero", error)
+
+ def test_sysexcepthook_raising_BaseException(self):
+ self.infunc.side_effect = ["1/0", "a = 123", "print(a)", EOFError('Finished')]
+ s = "not so fast"
+ def raise_base(*args, **kwargs):
+ raise BaseException(s)
+ self.sysmod.excepthook = raise_base
+ self.console.interact()
+ self.assertEqual(['write', ('123', ), {}], self.stdout.method_calls[0])
+ error = "".join(call.args[0] for call in self.stderr.method_calls if call[0] == 'write')
+ self.assertIn("Error in sys.excepthook:", error)
+ self.assertEqual(error.count("not so fast"), 1)
+ self.assertIn("Original exception was:", error)
+ self.assertIn("division by zero", error)
+
+ def test_sysexcepthook_raising_SystemExit_gets_through(self):
+ self.infunc.side_effect = ["1/0"]
+ def raise_base(*args, **kwargs):
+ raise SystemExit
+ self.sysmod.excepthook = raise_base
+ with self.assertRaises(SystemExit):
+ self.console.interact()
+
def test_banner(self):
# with banner
self.infunc.side_effect = EOFError('Finished')
diff --git a/Lib/test/test_pyrepl/test_pyrepl.py b/Lib/test/test_pyrepl/test_pyrepl.py
index 3a1bacef8a1756..d5eafc5eb58cac 100644
--- a/Lib/test/test_pyrepl/test_pyrepl.py
+++ b/Lib/test/test_pyrepl/test_pyrepl.py
@@ -1049,6 +1049,30 @@ def test_python_basic_repl(self):
self.assertNotIn("Exception", output)
self.assertNotIn("Traceback", output)
+ @force_not_colorized
+ def test_bad_sys_excepthook_doesnt_crash_pyrepl(self):
+ env = os.environ.copy()
+ commands = ("import sys\n"
+ "sys.excepthook = 1\n"
+ "1/0\n"
+ "exit()\n")
+
+ def check(output, exitcode):
+ self.assertIn("Error in sys.excepthook:", output)
+ self.assertEqual(output.count("'int' object is not callable"), 1)
+ self.assertIn("Original exception was:", output)
+ self.assertIn("division by zero", output)
+ self.assertEqual(exitcode, 0)
+ env.pop("PYTHON_BASIC_REPL", None)
+ output, exit_code = self.run_repl(commands, env=env)
+ if "can\'t use pyrepl" in output:
+ self.skipTest("pyrepl not available")
+ check(output, exit_code)
+
+ env["PYTHON_BASIC_REPL"] = "1"
+ output, exit_code = self.run_repl(commands, env=env)
+ check(output, exit_code)
+
def test_not_wiping_history_file(self):
# skip, if readline module is not available
import_module('readline')
diff --git a/Misc/NEWS.d/next/Library/2024-07-30-14-46-16.gh-issue-87320.-Yk1wb.rst b/Misc/NEWS.d/next/Library/2024-07-30-14-46-16.gh-issue-87320.-Yk1wb.rst
new file mode 100644
index 00000000000000..4322b719c690c2
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-07-30-14-46-16.gh-issue-87320.-Yk1wb.rst
@@ -0,0 +1,3 @@
+In :class:`code.InteractiveInterpreter`, handle exceptions caused by calling a
+non-default :func:`sys.excepthook`. Before, the exception bubbled up to the
+caller, ending the :term:`REPL`.
From 8844197daaeb3aa026cfe1cac6cf9d1b52c2540e Mon Sep 17 00:00:00 2001
From: Malcolm Smith
Date: Wed, 31 Jul 2024 19:35:10 +0100
Subject: [PATCH 095/139] gh-116622: Skip PosixPathTest.test_expanduser_pwd2 on
platforms which don't support pwd.getpwall (GH-122521)
---
Lib/test/test_posixpath.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/Lib/test/test_posixpath.py b/Lib/test/test_posixpath.py
index fb714fd90ae2b3..ca5cf42f8fcd71 100644
--- a/Lib/test/test_posixpath.py
+++ b/Lib/test/test_posixpath.py
@@ -5,7 +5,7 @@
import unittest
from posixpath import realpath, abspath, dirname, basename
from test import test_genericpath
-from test.support import import_helper
+from test.support import get_attribute, import_helper
from test.support import cpython_only, os_helper
from test.support.os_helper import FakePath
from unittest import mock
@@ -359,7 +359,7 @@ def test_expanduser_pwd(self):
"no home directory on VxWorks")
def test_expanduser_pwd2(self):
pwd = import_helper.import_module('pwd')
- for all_entry in pwd.getpwall():
+ for all_entry in get_attribute(pwd, 'getpwall')():
name = all_entry.pw_name
# gh-121200: pw_dir can be different between getpwall() and
From 06656e259bc9b2c3cf8a23bdc6e4acb052c56e1f Mon Sep 17 00:00:00 2001
From: Malcolm Smith
Date: Thu, 1 Aug 2024 01:23:10 +0100
Subject: [PATCH 096/139] gh-116622: Don't expose `FICLONE` ioctl on Android
(#122522)
Don't expose `FICLONE` ioctl on Android
Co-authored-by: Russell Keith-Magee
---
.../Library/2024-07-31-15-08-42.gh-issue-116622.aKxIQA.rst | 2 ++
Modules/fcntlmodule.c | 5 +++++
2 files changed, 7 insertions(+)
create mode 100644 Misc/NEWS.d/next/Library/2024-07-31-15-08-42.gh-issue-116622.aKxIQA.rst
diff --git a/Misc/NEWS.d/next/Library/2024-07-31-15-08-42.gh-issue-116622.aKxIQA.rst b/Misc/NEWS.d/next/Library/2024-07-31-15-08-42.gh-issue-116622.aKxIQA.rst
new file mode 100644
index 00000000000000..fc65b4d973b27d
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-07-31-15-08-42.gh-issue-116622.aKxIQA.rst
@@ -0,0 +1,2 @@
+On Android, the ``FICLONE`` and ``FICLONERANGE`` constants are no longer
+exposed by :mod:`fcntl`, as these ioctls are blocked by SELinux.
diff --git a/Modules/fcntlmodule.c b/Modules/fcntlmodule.c
index 0c06c03a6c403e..90ebfd7e99a777 100644
--- a/Modules/fcntlmodule.c
+++ b/Modules/fcntlmodule.c
@@ -580,12 +580,17 @@ all_ins(PyObject* m)
#ifdef F_GETPIPE_SZ
if (PyModule_AddIntMacro(m, F_GETPIPE_SZ)) return -1;
#endif
+
+/* On Android, FICLONE is blocked by SELinux. */
+#ifndef __ANDROID__
#ifdef FICLONE
if (PyModule_AddIntMacro(m, FICLONE)) return -1;
#endif
#ifdef FICLONERANGE
if (PyModule_AddIntMacro(m, FICLONERANGE)) return -1;
#endif
+#endif
+
#ifdef F_GETOWN_EX
// since Linux 2.6.32
if (PyModule_AddIntMacro(m, F_GETOWN_EX)) return -1;
From 46f5a4f9e1781ad8d60eb53bbaf6cd8534a286cd Mon Sep 17 00:00:00 2001
From: jianghuyiyuan
Date: Thu, 1 Aug 2024 09:26:09 +0900
Subject: [PATCH 097/139] Fix typos in docs, error messages and comments
(#122502)
Signed-off-by: jianghuyiyuan
---
Doc/howto/isolating-extensions.rst | 2 +-
Doc/library/threading.rst | 2 +-
Doc/using/configure.rst | 2 +-
Include/internal/pycore_freelist_state.h | 2 +-
Lib/test/test_webbrowser.py | 2 +-
Python/crossinterp.c | 2 +-
6 files changed, 6 insertions(+), 6 deletions(-)
diff --git a/Doc/howto/isolating-extensions.rst b/Doc/howto/isolating-extensions.rst
index e35855deedbe5f..a636e06bda8344 100644
--- a/Doc/howto/isolating-extensions.rst
+++ b/Doc/howto/isolating-extensions.rst
@@ -339,7 +339,7 @@ That is, heap types should:
- Define a traverse function using ``Py_tp_traverse``, which
visits the type (e.g. using ``Py_VISIT(Py_TYPE(self))``).
-Please refer to the the documentation of
+Please refer to the documentation of
:c:macro:`Py_TPFLAGS_HAVE_GC` and :c:member:`~PyTypeObject.tp_traverse`
for additional considerations.
diff --git a/Doc/library/threading.rst b/Doc/library/threading.rst
index 49c2b9b3ccd4fd..cb82fea377697b 100644
--- a/Doc/library/threading.rst
+++ b/Doc/library/threading.rst
@@ -1018,7 +1018,7 @@ method. The :meth:`~Event.wait` method blocks until the flag is true.
has not expired. The return value represents the
reason that this blocking method returned; ``True`` if returning because
the internal flag is set to true, or ``False`` if a timeout is given and
- the the internal flag did not become true within the given wait time.
+ the internal flag did not become true within the given wait time.
When the timeout argument is present and not ``None``, it should be a
floating-point number specifying a timeout for the operation in seconds,
diff --git a/Doc/using/configure.rst b/Doc/using/configure.rst
index 32adfb0ba6e5fc..6a4a52bb6e8b12 100644
--- a/Doc/using/configure.rst
+++ b/Doc/using/configure.rst
@@ -1120,7 +1120,7 @@ Remove built files.
make distclean
^^^^^^^^^^^^^^
-In addition to the the work done by ``make clean``, remove files
+In addition to the work done by ``make clean``, remove files
created by the configure script. ``configure`` will have to be run
before building again. [#]_
diff --git a/Include/internal/pycore_freelist_state.h b/Include/internal/pycore_freelist_state.h
index edf79dd7521c41..e8df784bcba06e 100644
--- a/Include/internal/pycore_freelist_state.h
+++ b/Include/internal/pycore_freelist_state.h
@@ -28,7 +28,7 @@ extern "C" {
// A generic freelist of either PyObjects or other data structures.
struct _Py_freelist {
- // Entries are linked together using the first word of the the object.
+ // Entries are linked together using the first word of the object.
// For PyObjects, this overlaps with the `ob_refcnt` field or the `ob_tid`
// field.
void *freelist;
diff --git a/Lib/test/test_webbrowser.py b/Lib/test/test_webbrowser.py
index ae8d776e8413ff..4fcbc5c2e59ea3 100644
--- a/Lib/test/test_webbrowser.py
+++ b/Lib/test/test_webbrowser.py
@@ -244,7 +244,7 @@ def _obj_ref(self, *args):
@unittest.skipIf(getattr(webbrowser, "objc", None) is None,
"iOS Webbrowser tests require ctypes")
def setUp(self):
- # Intercept the the objc library. Wrap the calls to get the
+ # Intercept the objc library. Wrap the calls to get the
# references to classes and selectors to return strings, and
# wrap msgSend to return stringified object references
self.orig_objc = webbrowser.objc
diff --git a/Python/crossinterp.c b/Python/crossinterp.c
index acb372af42408e..0aca322d987dba 100644
--- a/Python/crossinterp.c
+++ b/Python/crossinterp.c
@@ -699,7 +699,7 @@ _PyXI_excinfo_InitFromException(_PyXI_excinfo *info, PyObject *exc)
Py_DECREF(tbexc);
if (info->errdisplay == NULL) {
#ifdef Py_DEBUG
- PyErr_FormatUnraisable("Exception ignored while formating TracebackException");
+ PyErr_FormatUnraisable("Exception ignored while formatting TracebackException");
#endif
PyErr_Clear();
}
From a9d56e38a08ec198a2289d8fff65444b39dd4a32 Mon Sep 17 00:00:00 2001
From: Mark Shannon
Date: Thu, 1 Aug 2024 09:27:26 +0100
Subject: [PATCH 098/139] GH-122155: Track local variables between pops and
pushes in cases generator (GH-122286)
---
Include/internal/pycore_opcode_metadata.h | 2 +-
Lib/test/test_generated_cases.py | 65 +++++-
Python/bytecodes.c | 9 +-
Python/executor_cases.c.h | 12 +-
Python/generated_cases.c.h | 216 +++++++++++++++----
Python/optimizer_cases.c.h | 3 +-
Tools/cases_generator/analyzer.py | 10 +-
Tools/cases_generator/generators_common.py | 29 ++-
Tools/cases_generator/optimizer_generator.py | 25 +--
Tools/cases_generator/parsing.py | 5 +
Tools/cases_generator/stack.py | 172 +++++++++++----
Tools/cases_generator/tier1_generator.py | 50 +++--
Tools/cases_generator/tier2_generator.py | 24 ++-
13 files changed, 463 insertions(+), 159 deletions(-)
diff --git a/Include/internal/pycore_opcode_metadata.h b/Include/internal/pycore_opcode_metadata.h
index eaba280f1bf1cd..464d3500890e53 100644
--- a/Include/internal/pycore_opcode_metadata.h
+++ b/Include/internal/pycore_opcode_metadata.h
@@ -903,7 +903,7 @@ int _PyOpcode_num_pushed(int opcode, int oparg) {
case UNARY_NOT:
return 1;
case UNPACK_EX:
- return 1 + (oparg >> 8) + (oparg & 0xFF);
+ return 1 + (oparg & 0xFF) + (oparg >> 8);
case UNPACK_SEQUENCE:
return oparg;
case UNPACK_SEQUENCE_LIST:
diff --git a/Lib/test/test_generated_cases.py b/Lib/test/test_generated_cases.py
index a4cbdecdc9db11..beafa544aaacb7 100644
--- a/Lib/test/test_generated_cases.py
+++ b/Lib/test/test_generated_cases.py
@@ -31,7 +31,7 @@ def skip_if_different_mount_drives():
with test_tools.imports_under_tool("cases_generator"):
from analyzer import StackItem
import parser
- from stack import Stack
+ from stack import Local, Stack
import tier1_generator
import optimizer_generator
@@ -60,9 +60,9 @@ def test_effect_sizes(self):
stack.pop(y)
stack.pop(x)
for out in outputs:
- stack.push(out)
- self.assertEqual(stack.base_offset.to_c(), "-1 - oparg*2 - oparg")
- self.assertEqual(stack.top_offset.to_c(), "1 - oparg*2 - oparg + oparg*4")
+ stack.push(Local.local(out))
+ self.assertEqual(stack.base_offset.to_c(), "-1 - oparg - oparg*2")
+ self.assertEqual(stack.top_offset.to_c(), "1 - oparg - oparg*2 + oparg*4")
class TestGeneratedCases(unittest.TestCase):
@@ -602,7 +602,11 @@ def test_array_error_if(self):
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(OP);
- if (oparg == 0) { stack_pointer += -1 - oparg; goto somewhere; }
+ if (oparg == 0) {
+ stack_pointer += -1 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ goto somewhere;
+ }
stack_pointer += -1 - oparg;
assert(WITHIN_STACK_BOUNDS());
DISPATCH();
@@ -908,7 +912,6 @@ def test_used_unused_used(self):
next_instr += 1;
INSTRUCTION_STATS(TEST);
_PyStackRef w;
- _PyStackRef x;
_PyStackRef y;
// FIRST
w = stack_pointer[-1];
@@ -916,11 +919,10 @@ def test_used_unused_used(self):
use(w);
}
// SECOND
- x = w;
{
}
// THIRD
- y = x;
+ y = w;
{
use(y);
}
@@ -1024,6 +1026,7 @@ def test_pop_on_error_peeks(self):
}
op(THIRD, (j, k --)) {
+ j,k; // Mark j and k as used
ERROR_IF(cond, error);
}
@@ -1054,6 +1057,7 @@ def test_pop_on_error_peeks(self):
k = b;
j = a;
{
+ j,k; // Mark j and k as used
if (cond) goto pop_2_error;
}
stack_pointer += -2;
@@ -1063,6 +1067,51 @@ def test_pop_on_error_peeks(self):
"""
self.run_cases_test(input, output)
+ def test_push_then_error(self):
+
+ input = """
+ op(FIRST, ( -- a)) {
+ a = 1;
+ }
+
+ op(SECOND, (a -- a, b)) {
+ b = 1;
+ ERROR_IF(cond, error);
+ }
+
+ macro(TEST) = FIRST + SECOND;
+ """
+
+ output = """
+ TARGET(TEST) {
+ frame->instr_ptr = next_instr;
+ next_instr += 1;
+ INSTRUCTION_STATS(TEST);
+ _PyStackRef a;
+ _PyStackRef b;
+ // FIRST
+ {
+ a = 1;
+ }
+ // SECOND
+ {
+ b = 1;
+ if (cond) {
+ stack_pointer[0] = a;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ goto error;
+ }
+ }
+ stack_pointer[0] = a;
+ stack_pointer[1] = b;
+ stack_pointer += 2;
+ assert(WITHIN_STACK_BOUNDS());
+ DISPATCH();
+ }
+ """
+ self.run_cases_test(input, output)
+
class TestGeneratedAbstractCases(unittest.TestCase):
def setUp(self) -> None:
diff --git a/Python/bytecodes.c b/Python/bytecodes.c
index 4afce2cc3bea9d..abfd8039b293a1 100644
--- a/Python/bytecodes.c
+++ b/Python/bytecodes.c
@@ -1357,8 +1357,8 @@ dummy_func(
(void)counter;
}
- op(_UNPACK_SEQUENCE, (seq -- unused[oparg])) {
- _PyStackRef *top = stack_pointer + oparg - 1;
+ op(_UNPACK_SEQUENCE, (seq -- output[oparg])) {
+ _PyStackRef *top = output + oparg;
int res = _PyEval_UnpackIterableStackRef(tstate, seq, oparg, -1, top);
DECREF_INPUTS();
ERROR_IF(res == 0, error);
@@ -1401,9 +1401,8 @@ dummy_func(
DECREF_INPUTS();
}
- inst(UNPACK_EX, (seq -- unused[oparg & 0xFF], unused, unused[oparg >> 8])) {
- int totalargs = 1 + (oparg & 0xFF) + (oparg >> 8);
- _PyStackRef *top = stack_pointer + totalargs - 1;
+ inst(UNPACK_EX, (seq -- left[oparg & 0xFF], unused, right[oparg >> 8])) {
+ _PyStackRef *top = right + (oparg >> 8);
int res = _PyEval_UnpackIterableStackRef(tstate, seq, oparg & 0xFF, oparg >> 8, top);
DECREF_INPUTS();
ERROR_IF(res == 0, error);
diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h
index 62654035e80f50..f0acc3b6ea2ef4 100644
--- a/Python/executor_cases.c.h
+++ b/Python/executor_cases.c.h
@@ -1440,9 +1440,11 @@
case _UNPACK_SEQUENCE: {
_PyStackRef seq;
+ _PyStackRef *output;
oparg = CURRENT_OPARG();
seq = stack_pointer[-1];
- _PyStackRef *top = stack_pointer + oparg - 1;
+ output = &stack_pointer[-1];
+ _PyStackRef *top = output + oparg;
int res = _PyEval_UnpackIterableStackRef(tstate, seq, oparg, -1, top);
PyStackRef_CLOSE(seq);
if (res == 0) JUMP_TO_ERROR();
@@ -1532,14 +1534,15 @@
case _UNPACK_EX: {
_PyStackRef seq;
+ _PyStackRef *right;
oparg = CURRENT_OPARG();
seq = stack_pointer[-1];
- int totalargs = 1 + (oparg & 0xFF) + (oparg >> 8);
- _PyStackRef *top = stack_pointer + totalargs - 1;
+ right = &stack_pointer[(oparg & 0xFF)];
+ _PyStackRef *top = right + (oparg >> 8);
int res = _PyEval_UnpackIterableStackRef(tstate, seq, oparg & 0xFF, oparg >> 8, top);
PyStackRef_CLOSE(seq);
if (res == 0) JUMP_TO_ERROR();
- stack_pointer += (oparg >> 8) + (oparg & 0xFF);
+ stack_pointer += (oparg & 0xFF) + (oparg >> 8);
assert(WITHIN_STACK_BOUNDS());
break;
}
@@ -3595,6 +3598,7 @@
args = &stack_pointer[-oparg];
self_or_null = stack_pointer[-1 - oparg];
callable = stack_pointer[-2 - oparg];
+ args = &stack_pointer[-oparg];
if (PyStackRef_TYPE(callable) == &PyMethod_Type && PyStackRef_IsNull(self_or_null)) {
PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable);
PyObject *self = ((PyMethodObject *)callable_o)->im_self;
diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h
index 3c643f637ab095..ff8c4eab58f324 100644
--- a/Python/generated_cases.c.h
+++ b/Python/generated_cases.c.h
@@ -610,11 +610,19 @@
for (int _i = oparg; --_i >= 0;) {
PyStackRef_CLOSE(values[_i]);
}
- if (true) { stack_pointer += -oparg; goto error; }
+ if (true) {
+ stack_pointer += -oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ goto error;
+ }
}
PyObject *list_o = _PyList_FromArraySteal(values_o, oparg);
STACKREFS_TO_PYOBJECTS_CLEANUP(values_o);
- if (list_o == NULL) { stack_pointer += -oparg; goto error; }
+ if (list_o == NULL) {
+ stack_pointer += -oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ goto error;
+ }
list = PyStackRef_FromPyObjectSteal(list_o);
stack_pointer[-oparg] = list;
stack_pointer += 1 - oparg;
@@ -634,7 +642,11 @@
for (int _i = oparg*2; --_i >= 0;) {
PyStackRef_CLOSE(values[_i]);
}
- if (true) { stack_pointer += -oparg*2; goto error; }
+ if (true) {
+ stack_pointer += -oparg*2;
+ assert(WITHIN_STACK_BOUNDS());
+ goto error;
+ }
}
PyObject *map_o = _PyDict_FromItems(
values_o, 2,
@@ -644,7 +656,11 @@
for (int _i = oparg*2; --_i >= 0;) {
PyStackRef_CLOSE(values[_i]);
}
- if (map_o == NULL) { stack_pointer += -oparg*2; goto error; }
+ if (map_o == NULL) {
+ stack_pointer += -oparg*2;
+ assert(WITHIN_STACK_BOUNDS());
+ goto error;
+ }
map = PyStackRef_FromPyObjectSteal(map_o);
stack_pointer[-oparg*2] = map;
stack_pointer += 1 - oparg*2;
@@ -664,7 +680,11 @@
for (int _i = oparg; --_i >= 0;) {
PyStackRef_CLOSE(values[_i]);
}
- if (true) { stack_pointer += -oparg; goto error; }
+ if (true) {
+ stack_pointer += -oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ goto error;
+ }
}
int err = 0;
for (int i = 0; i < oparg; i++) {
@@ -676,7 +696,11 @@
}
if (err != 0) {
Py_DECREF(set_o);
- if (true) { stack_pointer += -oparg; goto error; }
+ if (true) {
+ stack_pointer += -oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ goto error;
+ }
}
set = PyStackRef_FromPyObjectSteal(set_o);
stack_pointer[-oparg] = set;
@@ -703,7 +727,11 @@
PyStackRef_CLOSE(start);
PyStackRef_CLOSE(stop);
PyStackRef_XCLOSE(step);
- if (slice_o == NULL) { stack_pointer += -2 - ((oparg == 3) ? 1 : 0); goto error; }
+ if (slice_o == NULL) {
+ stack_pointer += -2 - ((oparg == 3) ? 1 : 0);
+ assert(WITHIN_STACK_BOUNDS());
+ goto error;
+ }
slice = PyStackRef_FromPyObjectSteal(slice_o);
stack_pointer[-2 - ((oparg == 3) ? 1 : 0)] = slice;
stack_pointer += -1 - ((oparg == 3) ? 1 : 0);
@@ -723,14 +751,22 @@
for (int _i = oparg; --_i >= 0;) {
PyStackRef_CLOSE(pieces[_i]);
}
- if (true) { stack_pointer += -oparg; goto error; }
+ if (true) {
+ stack_pointer += -oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ goto error;
+ }
}
PyObject *str_o = _PyUnicode_JoinArray(&_Py_STR(empty), pieces_o, oparg);
STACKREFS_TO_PYOBJECTS_CLEANUP(pieces_o);
for (int _i = oparg; --_i >= 0;) {
PyStackRef_CLOSE(pieces[_i]);
}
- if (str_o == NULL) { stack_pointer += -oparg; goto error; }
+ if (str_o == NULL) {
+ stack_pointer += -oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ goto error;
+ }
str = PyStackRef_FromPyObjectSteal(str_o);
stack_pointer[-oparg] = str;
stack_pointer += 1 - oparg;
@@ -746,7 +782,11 @@
_PyStackRef tup;
values = &stack_pointer[-oparg];
PyObject *tup_o = _PyTuple_FromStackRefSteal(values, oparg);
- if (tup_o == NULL) { stack_pointer += -oparg; goto error; }
+ if (tup_o == NULL) {
+ stack_pointer += -oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ goto error;
+ }
tup = PyStackRef_FromPyObjectSteal(tup_o);
stack_pointer[-oparg] = tup;
stack_pointer += 1 - oparg;
@@ -780,7 +820,6 @@
self_or_null = stack_pointer[-1 - oparg];
callable = stack_pointer[-2 - oparg];
{
- args = &stack_pointer[-oparg];
uint16_t counter = read_u16(&this_instr[1].cache);
(void)counter;
#if ENABLE_SPECIALIZATION
@@ -795,7 +834,9 @@
}
/* Skip 2 cache entries */
// _MAYBE_EXPAND_METHOD
+ args = &stack_pointer[-oparg];
{
+ args = &stack_pointer[-oparg];
if (PyStackRef_TYPE(callable) == &PyMethod_Type && PyStackRef_IsNull(self_or_null)) {
PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable);
PyObject *self = ((PyMethodObject *)callable_o)->im_self;
@@ -813,6 +854,7 @@
}
}
// _DO_CALL
+ args = &stack_pointer[-oparg];
self_or_null = maybe_self;
callable = func;
{
@@ -852,7 +894,11 @@
for (int _i = oparg; --_i >= 0;) {
PyStackRef_CLOSE(args[_i]);
}
- if (true) { stack_pointer += -2 - oparg; goto error; }
+ if (true) {
+ stack_pointer += -2 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ goto error;
+ }
}
PyObject *res_o = PyObject_Vectorcall(
callable_o, args_o,
@@ -881,7 +927,11 @@
for (int i = 0; i < total_args; i++) {
PyStackRef_CLOSE(args[i]);
}
- if (res_o == NULL) { stack_pointer += -2 - oparg; goto error; }
+ if (res_o == NULL) {
+ stack_pointer += -2 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ goto error;
+ }
res = PyStackRef_FromPyObjectSteal(res_o);
}
// _CHECK_PERIODIC
@@ -1190,7 +1240,11 @@
for (int _i = oparg; --_i >= 0;) {
PyStackRef_CLOSE(args[_i]);
}
- if (true) { stack_pointer += -2 - oparg; goto error; }
+ if (true) {
+ stack_pointer += -2 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ goto error;
+ }
}
PyObject *res_o = tp->tp_vectorcall((PyObject *)tp, args_o, total_args, NULL);
STACKREFS_TO_PYOBJECTS_CLEANUP(args_o);
@@ -1199,7 +1253,11 @@
PyStackRef_CLOSE(args[i]);
}
PyStackRef_CLOSE(callable);
- if (res_o == NULL) { stack_pointer += -2 - oparg; goto error; }
+ if (res_o == NULL) {
+ stack_pointer += -2 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ goto error;
+ }
res = PyStackRef_FromPyObjectSteal(res_o);
}
// _CHECK_PERIODIC
@@ -1247,7 +1305,11 @@
for (int _i = oparg; --_i >= 0;) {
PyStackRef_CLOSE(args[_i]);
}
- if (true) { stack_pointer += -2 - oparg; goto error; }
+ if (true) {
+ stack_pointer += -2 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ goto error;
+ }
}
PyObject *res_o = ((PyCFunctionFast)(void(*)(void))cfunc)(
PyCFunction_GET_SELF(callable_o),
@@ -1260,7 +1322,11 @@
PyStackRef_CLOSE(args[i]);
}
PyStackRef_CLOSE(callable);
- if (res_o == NULL) { stack_pointer += -2 - oparg; goto error; }
+ if (res_o == NULL) {
+ stack_pointer += -2 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ goto error;
+ }
res = PyStackRef_FromPyObjectSteal(res_o);
}
// _CHECK_PERIODIC
@@ -1310,7 +1376,11 @@
for (int _i = oparg; --_i >= 0;) {
PyStackRef_CLOSE(args[_i]);
}
- if (true) { stack_pointer += -2 - oparg; goto error; }
+ if (true) {
+ stack_pointer += -2 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ goto error;
+ }
}
PyObject *res_o = cfunc(PyCFunction_GET_SELF(callable_o), args_o, total_args, NULL);
STACKREFS_TO_PYOBJECTS_CLEANUP(args_o);
@@ -1320,7 +1390,11 @@
PyStackRef_CLOSE(args[i]);
}
PyStackRef_CLOSE(callable);
- if (res_o == NULL) { stack_pointer += -2 - oparg; goto error; }
+ if (res_o == NULL) {
+ stack_pointer += -2 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ goto error;
+ }
res = PyStackRef_FromPyObjectSteal(res_o);
}
// _CHECK_PERIODIC
@@ -1370,7 +1444,11 @@
assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL));
PyStackRef_CLOSE(arg);
PyStackRef_CLOSE(callable);
- if (res_o == NULL) { stack_pointer += -2 - oparg; goto error; }
+ if (res_o == NULL) {
+ stack_pointer += -2 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ goto error;
+ }
res = PyStackRef_FromPyObjectSteal(res_o);
}
// _CHECK_PERIODIC
@@ -1467,7 +1545,11 @@
PyStackRef_CLOSE(callargs_st);
PyStackRef_XCLOSE(kwargs_st);
assert(PyStackRef_AsPyObjectBorrow(PEEK(2 + (oparg & 1))) == NULL);
- if (PyStackRef_IsNull(result)) { stack_pointer += -3 - (oparg & 1); goto error; }
+ if (PyStackRef_IsNull(result)) {
+ stack_pointer += -3 - (oparg & 1);
+ assert(WITHIN_STACK_BOUNDS());
+ goto error;
+ }
stack_pointer[-3 - (oparg & 1)] = result;
stack_pointer += -2 - (oparg & 1);
assert(WITHIN_STACK_BOUNDS());
@@ -1625,7 +1707,11 @@
PyStackRef_CLOSE(args[_i]);
}
PyStackRef_CLOSE(kwnames);
- if (true) { stack_pointer += -3 - oparg; goto error; }
+ if (true) {
+ stack_pointer += -3 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ goto error;
+ }
}
PyObject *res_o = PyObject_Vectorcall(
callable_o, args_o,
@@ -1655,7 +1741,11 @@
for (int i = 0; i < total_args; i++) {
PyStackRef_CLOSE(args[i]);
}
- if (res_o == NULL) { stack_pointer += -3 - oparg; goto error; }
+ if (res_o == NULL) {
+ stack_pointer += -3 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ goto error;
+ }
res = PyStackRef_FromPyObjectSteal(res_o);
stack_pointer[-3 - oparg] = res;
stack_pointer += -2 - oparg;
@@ -1785,7 +1875,11 @@
for (int _i = oparg; --_i >= 0;) {
PyStackRef_CLOSE(args[_i]);
}
- if (true) { stack_pointer += -2 - oparg; goto error; }
+ if (true) {
+ stack_pointer += -2 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ goto error;
+ }
}
PyObject *res_o = cfunc(self, (args_o + 1), nargs);
STACKREFS_TO_PYOBJECTS_CLEANUP(args_o);
@@ -1795,7 +1889,11 @@
PyStackRef_CLOSE(args[i]);
}
PyStackRef_CLOSE(callable);
- if (res_o == NULL) { stack_pointer += -2 - oparg; goto error; }
+ if (res_o == NULL) {
+ stack_pointer += -2 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ goto error;
+ }
res = PyStackRef_FromPyObjectSteal(res_o);
}
// _CHECK_PERIODIC
@@ -1848,7 +1946,11 @@
for (int _i = oparg; --_i >= 0;) {
PyStackRef_CLOSE(args[_i]);
}
- if (true) { stack_pointer += -2 - oparg; goto error; }
+ if (true) {
+ stack_pointer += -2 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ goto error;
+ }
}
PyObject *res_o = cfunc(self, (args_o + 1), nargs, NULL);
STACKREFS_TO_PYOBJECTS_CLEANUP(args_o);
@@ -1858,7 +1960,11 @@
PyStackRef_CLOSE(args[i]);
}
PyStackRef_CLOSE(callable);
- if (res_o == NULL) { stack_pointer += -2 - oparg; goto error; }
+ if (res_o == NULL) {
+ stack_pointer += -2 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ goto error;
+ }
res = PyStackRef_FromPyObjectSteal(res_o);
}
// _CHECK_PERIODIC
@@ -1912,7 +2018,11 @@
assert((res_o != NULL) ^ (_PyErr_Occurred(tstate) != NULL));
PyStackRef_CLOSE(self_stackref);
PyStackRef_CLOSE(callable);
- if (res_o == NULL) { stack_pointer += -2 - oparg; goto error; }
+ if (res_o == NULL) {
+ stack_pointer += -2 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ goto error;
+ }
res = PyStackRef_FromPyObjectSteal(res_o);
}
// _CHECK_PERIODIC
@@ -1969,7 +2079,11 @@
PyStackRef_CLOSE(self_stackref);
PyStackRef_CLOSE(arg_stackref);
PyStackRef_CLOSE(callable);
- if (res_o == NULL) { stack_pointer += -2 - oparg; goto error; }
+ if (res_o == NULL) {
+ stack_pointer += -2 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ goto error;
+ }
res = PyStackRef_FromPyObjectSteal(res_o);
}
// _CHECK_PERIODIC
@@ -2022,7 +2136,11 @@
for (int _i = oparg; --_i >= 0;) {
PyStackRef_CLOSE(args[_i]);
}
- if (true) { stack_pointer += -2 - oparg; goto error; }
+ if (true) {
+ stack_pointer += -2 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ goto error;
+ }
}
PyObject *res_o = PyObject_Vectorcall(
callable_o, args_o,
@@ -2034,7 +2152,11 @@
for (int i = 0; i < total_args; i++) {
PyStackRef_CLOSE(args[i]);
}
- if (res_o == NULL) { stack_pointer += -2 - oparg; goto error; }
+ if (res_o == NULL) {
+ stack_pointer += -2 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ goto error;
+ }
res = PyStackRef_FromPyObjectSteal(res_o);
}
// _CHECK_PERIODIC
@@ -3526,6 +3648,7 @@
self_or_null = stack_pointer[-1 - oparg];
callable = stack_pointer[-2 - oparg];
{
+ args = &stack_pointer[-oparg];
if (PyStackRef_TYPE(callable) == &PyMethod_Type && PyStackRef_IsNull(self_or_null)) {
PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable);
PyObject *self = ((PyMethodObject *)callable_o)->im_self;
@@ -3543,6 +3666,7 @@
}
}
// _MONITOR_CALL
+ args = &stack_pointer[-oparg];
{
int is_meth = !PyStackRef_IsNull(maybe_self);
PyObject *function = PyStackRef_AsPyObjectBorrow(func);
@@ -3563,6 +3687,7 @@
if (err) goto error;
}
// _DO_CALL
+ args = &stack_pointer[-oparg];
self_or_null = maybe_self;
callable = func;
{
@@ -3602,7 +3727,11 @@
for (int _i = oparg; --_i >= 0;) {
PyStackRef_CLOSE(args[_i]);
}
- if (true) { stack_pointer += -2 - oparg; goto error; }
+ if (true) {
+ stack_pointer += -2 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ goto error;
+ }
}
PyObject *res_o = PyObject_Vectorcall(
callable_o, args_o,
@@ -3631,7 +3760,11 @@
for (int i = 0; i < total_args; i++) {
PyStackRef_CLOSE(args[i]);
}
- if (res_o == NULL) { stack_pointer += -2 - oparg; goto error; }
+ if (res_o == NULL) {
+ stack_pointer += -2 - oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ goto error;
+ }
res = PyStackRef_FromPyObjectSteal(res_o);
}
// _CHECK_PERIODIC
@@ -5777,7 +5910,11 @@
"bad RAISE_VARARGS oparg");
break;
}
- if (true) { stack_pointer += -oparg; goto error; }
+ if (true) {
+ stack_pointer += -oparg;
+ assert(WITHIN_STACK_BOUNDS());
+ goto error;
+ }
}
TARGET(RERAISE) {
@@ -6834,13 +6971,14 @@
next_instr += 1;
INSTRUCTION_STATS(UNPACK_EX);
_PyStackRef seq;
+ _PyStackRef *right;
seq = stack_pointer[-1];
- int totalargs = 1 + (oparg & 0xFF) + (oparg >> 8);
- _PyStackRef *top = stack_pointer + totalargs - 1;
+ right = &stack_pointer[(oparg & 0xFF)];
+ _PyStackRef *top = right + (oparg >> 8);
int res = _PyEval_UnpackIterableStackRef(tstate, seq, oparg & 0xFF, oparg >> 8, top);
PyStackRef_CLOSE(seq);
if (res == 0) goto pop_1_error;
- stack_pointer += (oparg >> 8) + (oparg & 0xFF);
+ stack_pointer += (oparg & 0xFF) + (oparg >> 8);
assert(WITHIN_STACK_BOUNDS());
DISPATCH();
}
@@ -6853,6 +6991,7 @@
_Py_CODEUNIT *this_instr = next_instr - 2;
(void)this_instr;
_PyStackRef seq;
+ _PyStackRef *output;
// _SPECIALIZE_UNPACK_SEQUENCE
seq = stack_pointer[-1];
{
@@ -6872,7 +7011,8 @@
}
// _UNPACK_SEQUENCE
{
- _PyStackRef *top = stack_pointer + oparg - 1;
+ output = &stack_pointer[-1];
+ _PyStackRef *top = output + oparg;
int res = _PyEval_UnpackIterableStackRef(tstate, seq, oparg, -1, top);
PyStackRef_CLOSE(seq);
if (res == 0) goto pop_1_error;
diff --git a/Python/optimizer_cases.c.h b/Python/optimizer_cases.c.h
index 4fa40ff861ba70..b704c9e77319e4 100644
--- a/Python/optimizer_cases.c.h
+++ b/Python/optimizer_cases.c.h
@@ -753,7 +753,7 @@
for (int i = 0; i < totalargs; i++) {
values[i] = sym_new_unknown(ctx);
}
- stack_pointer += (oparg >> 8) + (oparg & 0xFF);
+ stack_pointer += (oparg & 0xFF) + (oparg >> 8);
assert(WITHIN_STACK_BOUNDS());
break;
}
@@ -1607,6 +1607,7 @@
args = &stack_pointer[-oparg];
self_or_null = stack_pointer[-1 - oparg];
callable = stack_pointer[-2 - oparg];
+ args = &stack_pointer[-oparg];
(void)callable;
(void)self_or_null;
(void)args;
diff --git a/Tools/cases_generator/analyzer.py b/Tools/cases_generator/analyzer.py
index 675dc0b9acaf45..f6625a3f7322d5 100644
--- a/Tools/cases_generator/analyzer.py
+++ b/Tools/cases_generator/analyzer.py
@@ -216,6 +216,7 @@ def is_super(self) -> bool:
@dataclass
class Instruction:
+ where: lexer.Token
name: str
parts: list[Part]
_properties: Properties | None
@@ -690,9 +691,10 @@ def add_op(op: parser.InstDef, uops: dict[str, Uop]) -> None:
def add_instruction(
- name: str, parts: list[Part], instructions: dict[str, Instruction]
+ where: lexer.Token, name: str, parts: list[Part],
+ instructions: dict[str, Instruction]
) -> None:
- instructions[name] = Instruction(name, parts, None)
+ instructions[name] = Instruction(where, name, parts, None)
def desugar_inst(
@@ -720,7 +722,7 @@ def desugar_inst(
parts.append(uop)
else:
parts[uop_index] = uop
- add_instruction(name, parts, instructions)
+ add_instruction(inst.first_token, name, parts, instructions)
def add_macro(
@@ -741,7 +743,7 @@ def add_macro(
case _:
assert False
assert parts
- add_instruction(macro.name, parts, instructions)
+ add_instruction(macro.first_token, macro.name, parts, instructions)
def add_family(
diff --git a/Tools/cases_generator/generators_common.py b/Tools/cases_generator/generators_common.py
index 587dc0d03eded5..ab8c99f1e25f97 100644
--- a/Tools/cases_generator/generators_common.py
+++ b/Tools/cases_generator/generators_common.py
@@ -8,7 +8,7 @@
StackItem,
)
from cwriter import CWriter
-from typing import Callable, Mapping, TextIO, Iterator, Tuple
+from typing import Callable, Mapping, TextIO, Iterator
from lexer import Token
from stack import Stack
@@ -25,7 +25,7 @@ def root_relative_path(filename: str) -> str:
return filename
-def type_and_null(var: StackItem) -> Tuple[str, str]:
+def type_and_null(var: StackItem) -> tuple[str, str]:
if var.type:
return var.type, "NULL"
elif var.is_array():
@@ -95,16 +95,23 @@ def replace_error(
c_offset = stack.peek_offset()
try:
offset = -int(c_offset)
- close = ";\n"
except ValueError:
- offset = None
- out.emit(f"{{ stack_pointer += {c_offset}; ")
- close = "; }\n"
- out.emit("goto ")
- if offset:
- out.emit(f"pop_{offset}_")
- out.emit(label)
- out.emit(close)
+ offset = -1
+ if offset > 0:
+ out.emit(f"goto pop_{offset}_")
+ out.emit(label)
+ out.emit(";\n")
+ elif offset == 0:
+ out.emit("goto ")
+ out.emit(label)
+ out.emit(";\n")
+ else:
+ out.emit("{\n")
+ stack.flush_locally(out)
+ out.emit("goto ")
+ out.emit(label)
+ out.emit(";\n")
+ out.emit("}\n")
def replace_error_no_pop(
diff --git a/Tools/cases_generator/optimizer_generator.py b/Tools/cases_generator/optimizer_generator.py
index 6a66693b93305d..f6c2fea40f0dbb 100644
--- a/Tools/cases_generator/optimizer_generator.py
+++ b/Tools/cases_generator/optimizer_generator.py
@@ -23,7 +23,7 @@
from cwriter import CWriter
from typing import TextIO, Iterator
from lexer import Token
-from stack import Stack, StackError
+from stack import Local, Stack, StackError
DEFAULT_OUTPUT = ROOT / "Python/optimizer_cases.c.h"
DEFAULT_ABSTRACT_INPUT = (ROOT / "Python/optimizer_bytecodes.c").absolute().as_posix()
@@ -98,19 +98,18 @@ def write_uop(
debug: bool,
skip_inputs: bool,
) -> None:
+ locals: dict[str, Local] = {}
try:
prototype = override if override else uop
is_override = override is not None
out.start_line()
for var in reversed(prototype.stack.inputs):
- res = stack.pop(var, extract_bits=True)
+ code, local = stack.pop(var, extract_bits=True)
if not skip_inputs:
- out.emit(res)
- if not prototype.properties.stores_sp:
- for i, var in enumerate(prototype.stack.outputs):
- res = stack.push(var)
- if not var.peek or is_override:
- out.emit(res)
+ out.emit(code)
+ if local.defined:
+ locals[local.name] = local
+ out.emit(stack.define_output_arrays(prototype.stack.outputs))
if debug:
args = []
for var in prototype.stack.inputs:
@@ -135,10 +134,12 @@ def write_uop(
else:
emit_default(out, uop)
- if prototype.properties.stores_sp:
- for i, var in enumerate(prototype.stack.outputs):
- if not var.peek or is_override:
- out.emit(stack.push(var))
+ for var in prototype.stack.outputs:
+ if var.name in locals:
+ local = locals[var.name]
+ else:
+ local = Local.local(var)
+ out.emit(stack.push(local))
out.start_line()
stack.flush(out, cast_type="_Py_UopsSymbol *", extract_bits=True)
except StackError as ex:
diff --git a/Tools/cases_generator/parsing.py b/Tools/cases_generator/parsing.py
index 8957838f7a90a1..5acad57f395ea6 100644
--- a/Tools/cases_generator/parsing.py
+++ b/Tools/cases_generator/parsing.py
@@ -60,6 +60,11 @@ def tokens(self) -> list[lx.Token]:
end = context.end
return tokens[begin:end]
+ @property
+ def first_token(self) -> lx.Token:
+ context = self.context
+ assert context is not None
+ return context.owner.tokens[context.begin]
@dataclass
class Block(Node):
diff --git a/Tools/cases_generator/stack.py b/Tools/cases_generator/stack.py
index 61dcfd3e30a510..d2d598a120892d 100644
--- a/Tools/cases_generator/stack.py
+++ b/Tools/cases_generator/stack.py
@@ -38,6 +38,43 @@ def var_size(var: StackItem) -> str:
else:
return "1"
+@dataclass
+class Local:
+
+ item: StackItem
+ cached: bool
+ in_memory: bool
+ defined: bool
+
+ @staticmethod
+ def unused(defn: StackItem) -> "Local":
+ return Local(defn, False, defn.is_array(), False)
+
+ @staticmethod
+ def local(defn: StackItem) -> "Local":
+ array = defn.is_array()
+ return Local(defn, not array, array, True)
+
+ @staticmethod
+ def redefinition(var: StackItem, prev: "Local") -> "Local":
+ assert var.is_array() == prev.is_array()
+ return Local(var, prev.cached, prev.in_memory, True)
+
+ @property
+ def size(self) -> str:
+ return self.item.size
+
+ @property
+ def name(self) -> str:
+ return self.item.name
+
+ @property
+ def condition(self) -> str | None:
+ return self.item.condition
+
+ def is_array(self) -> bool:
+ return self.item.is_array()
+
@dataclass
class StackOffset:
"The stack offset of the virtual base of the stack from the physical stack pointer"
@@ -66,7 +103,11 @@ def __neg__(self) -> "StackOffset":
def simplify(self) -> None:
"Remove matching values from both the popped and pushed list"
- if not self.popped or not self.pushed:
+ if not self.popped:
+ self.pushed.sort()
+ return
+ if not self.pushed:
+ self.popped.sort()
return
# Sort the list so the lexically largest element is last.
popped = sorted(self.popped)
@@ -87,6 +128,8 @@ def simplify(self) -> None:
popped.append(pop)
self.popped.extend(popped)
self.pushed.extend(pushed)
+ self.pushed.sort()
+ self.popped.sort()
def to_c(self) -> str:
self.simplify()
@@ -125,10 +168,10 @@ class Stack:
def __init__(self) -> None:
self.top_offset = StackOffset.empty()
self.base_offset = StackOffset.empty()
- self.variables: list[StackItem] = []
+ self.variables: list[Local] = []
self.defined: set[str] = set()
- def pop(self, var: StackItem, extract_bits: bool = False) -> str:
+ def pop(self, var: StackItem, extract_bits: bool = False) -> tuple[str, Local]:
self.top_offset.pop(var)
indirect = "&" if var.is_array() else ""
if self.variables:
@@ -141,21 +184,32 @@ def pop(self, var: StackItem, extract_bits: bool = False) -> str:
if var.name in UNUSED:
if popped.name not in UNUSED and popped.name in self.defined:
raise StackError(f"Value is declared unused, but is already cached by prior operation")
- return ""
- if popped.name in UNUSED or popped.name not in self.defined:
- self.defined.add(var.name)
+ return "", popped
+ if not var.used:
+ return "", popped
+ self.defined.add(var.name)
+ # Always define array variables as it is free, and their offset might have changed
+ if var.is_array():
+ return (
+ f"{var.name} = &stack_pointer[{self.top_offset.to_c()}];\n",
+ Local.redefinition(var, popped)
+ )
+ if not popped.defined:
return (
- f"{var.name} = {indirect}stack_pointer[{self.top_offset.to_c()}];\n"
+ f"{var.name} = stack_pointer[{self.top_offset.to_c()}];\n",
+ Local.redefinition(var, popped)
)
else:
- self.defined.add(var.name)
if popped.name == var.name:
- return ""
+ return "", popped
else:
- return f"{var.name} = {popped.name};\n"
+ return (
+ f"{var.name} = {popped.name};\n",
+ Local.redefinition(var, popped)
+ )
self.base_offset.pop(var)
if var.name in UNUSED or not var.used:
- return ""
+ return "", Local.unused(var)
self.defined.add(var.name)
cast = f"({var.type})" if (not indirect and var.type) else ""
bits = ".bits" if cast and not extract_bits else ""
@@ -164,61 +218,80 @@ def pop(self, var: StackItem, extract_bits: bool = False) -> str:
)
if var.condition:
if var.condition == "1":
- return f"{assign}\n"
+ assign = f"{assign}\n"
elif var.condition == "0":
- return ""
+ return "", Local.unused(var)
else:
- return f"if ({var.condition}) {{ {assign} }}\n"
- return f"{assign}\n"
+ assign = f"if ({var.condition}) {{ {assign} }}\n"
+ else:
+ assign = f"{assign}\n"
+ in_memory = var.is_array() or var.peek
+ return assign, Local(var, not var.is_array(), in_memory, True)
- def push(self, var: StackItem) -> str:
+ def push(self, var: Local) -> str:
self.variables.append(var)
- if var.is_array() and var.name not in self.defined and var.name not in UNUSED:
+ if var.is_array() and not var.defined and var.item.used:
+ assert var.in_memory
+ assert not var.cached
c_offset = self.top_offset.to_c()
- self.top_offset.push(var)
+ self.top_offset.push(var.item)
self.defined.add(var.name)
+ var.defined = True
return f"{var.name} = &stack_pointer[{c_offset}];\n"
else:
- self.top_offset.push(var)
- if var.used:
+ self.top_offset.push(var.item)
+ if var.item.used:
self.defined.add(var.name)
return ""
- def flush(self, out: CWriter, cast_type: str = "uintptr_t", extract_bits: bool = False) -> None:
+ def define_output_arrays(self, outputs: list[StackItem]) -> str:
+ res = []
+ top_offset = self.top_offset.copy()
+ for var in outputs:
+ if var.is_array() and var.used and not var.peek:
+ c_offset = top_offset.to_c()
+ top_offset.push(var)
+ res.append(f"{var.name} = &stack_pointer[{c_offset}];\n")
+ else:
+ top_offset.push(var)
+ return "\n".join(res)
+
+ @staticmethod
+ def _do_flush(out: CWriter, variables: list[Local], base_offset: StackOffset, top_offset: StackOffset,
+ cast_type: str = "uintptr_t", extract_bits: bool = False) -> None:
out.start_line()
- for var in self.variables:
- if not var.peek:
- cast = f"({cast_type})" if var.type else ""
+ for var in variables:
+ if var.cached and not var.in_memory and not var.item.peek and not var.name in UNUSED:
+ cast = f"({cast_type})" if var.item.type else ""
bits = ".bits" if cast and not extract_bits else ""
- if var.name not in UNUSED and not var.is_array():
- if var.condition:
- if var.condition == "0":
- continue
- elif var.condition != "1":
- out.emit(f"if ({var.condition}) ")
- out.emit(
- f"stack_pointer[{self.base_offset.to_c()}]{bits} = {cast}{var.name};\n"
- )
- self.base_offset.push(var)
- if self.base_offset.to_c() != self.top_offset.to_c():
- print("base", self.base_offset.to_c(), "top", self.top_offset.to_c())
+ if var.condition == "0":
+ continue
+ if var.condition and var.condition != "1":
+ out.emit(f"if ({var.condition}) ")
+ out.emit(
+ f"stack_pointer[{base_offset.to_c()}]{bits} = {cast}{var.name};\n"
+ )
+ base_offset.push(var.item)
+ if base_offset.to_c() != top_offset.to_c():
+ print("base", base_offset, "top", top_offset)
assert False
- number = self.base_offset.to_c()
+ number = base_offset.to_c()
if number != "0":
out.emit(f"stack_pointer += {number};\n")
out.emit("assert(WITHIN_STACK_BOUNDS());\n")
+ out.start_line()
+
+ def flush_locally(self, out: CWriter, cast_type: str = "uintptr_t", extract_bits: bool = False) -> None:
+ self._do_flush(out, self.variables[:], self.base_offset.copy(), self.top_offset.copy(), cast_type, extract_bits)
+
+ def flush(self, out: CWriter, cast_type: str = "uintptr_t", extract_bits: bool = False) -> None:
+ self._do_flush(out, self.variables, self.base_offset, self.top_offset, cast_type, extract_bits)
self.variables = []
self.base_offset.clear()
self.top_offset.clear()
- out.start_line()
def peek_offset(self) -> str:
- peek = self.base_offset.copy()
- for var in self.variables:
- if not var.peek:
- break
- peek.push(var)
- return peek.to_c()
+ return self.top_offset.to_c()
def as_comment(self) -> str:
return f"/* Variables: {[v.name for v in self.variables]}. Base offset: {self.base_offset.to_c()}. Top offset: {self.top_offset.to_c()} */"
@@ -236,8 +309,15 @@ def stacks(inst : Instruction | PseudoInstruction) -> Iterator[StackEffect]:
yield inst.stack
for s in stacks(inst):
+ locals: dict[str, Local] = {}
for var in reversed(s.inputs):
- stack.pop(var)
+ _, local = stack.pop(var)
+ if var.name != "unused":
+ locals[local.name] = local
for var in s.outputs:
- stack.push(var)
+ if var.name in locals:
+ local = locals[var.name]
+ else:
+ local = Local.unused(var)
+ stack.push(local)
return stack
diff --git a/Tools/cases_generator/tier1_generator.py b/Tools/cases_generator/tier1_generator.py
index 118f4b3a6eaa1c..1cdafbd35caea3 100644
--- a/Tools/cases_generator/tier1_generator.py
+++ b/Tools/cases_generator/tier1_generator.py
@@ -25,7 +25,7 @@
)
from cwriter import CWriter
from typing import TextIO
-from stack import Stack, StackError
+from stack import Local, Stack, StackError, get_stack_effect
DEFAULT_OUTPUT = ROOT / "Python/generated_cases.c.h"
@@ -43,18 +43,12 @@ def declare_variable(var: StackItem, out: CWriter) -> None:
def declare_variables(inst: Instruction, out: CWriter) -> None:
- stack = Stack()
- for part in inst.parts:
- if not isinstance(part, Uop):
- continue
- try:
- for var in reversed(part.stack.inputs):
- stack.pop(var)
- for var in part.stack.outputs:
- stack.push(var)
- except StackError as ex:
- raise analysis_error(ex.args[0], part.body[0]) from None
+ try:
+ stack = get_stack_effect(inst)
+ except StackError as ex:
+ raise analysis_error(ex.args[0], inst.where)
required = set(stack.defined)
+ required.discard("unused")
for part in inst.parts:
if not isinstance(part, Uop):
continue
@@ -80,16 +74,26 @@ def write_uop(
stack.flush(out)
return offset
try:
+ locals: dict[str, Local] = {}
out.start_line()
if braces:
out.emit(f"// {uop.name}\n")
+ peeks: list[Local] = []
for var in reversed(uop.stack.inputs):
- out.emit(stack.pop(var))
+ code, local = stack.pop(var)
+ out.emit(code)
+ if var.peek:
+ peeks.append(local)
+ if local.defined:
+ locals[local.name] = local
+ # Push back the peeks, so that they remain on the logical
+ # stack, but their values are cached.
+ while peeks:
+ stack.push(peeks.pop())
if braces:
out.emit("{\n")
- if not uop.properties.stores_sp:
- for i, var in enumerate(uop.stack.outputs):
- out.emit(stack.push(var))
+ out.emit(stack.define_output_arrays(uop.stack.outputs))
+
for cache in uop.caches:
if cache.name != "unused":
if cache.size == 4:
@@ -105,16 +109,22 @@ def write_uop(
out.emit(f"(void){cache.name};\n")
offset += cache.size
emit_tokens(out, uop, stack, inst)
- if uop.properties.stores_sp:
- for i, var in enumerate(uop.stack.outputs):
- out.emit(stack.push(var))
+ for i, var in enumerate(uop.stack.outputs):
+ if not var.peek:
+ if var.name in locals:
+ local = locals[var.name]
+ elif var.name == "unused":
+ local = Local.unused(var)
+ else:
+ local = Local.local(var)
+ out.emit(stack.push(local))
if braces:
out.start_line()
out.emit("}\n")
# out.emit(stack.as_comment() + "\n")
return offset
except StackError as ex:
- raise analysis_error(ex.args[0], uop.body[0]) from None
+ raise analysis_error(ex.args[0], uop.body[0])
def uses_this(inst: Instruction) -> bool:
diff --git a/Tools/cases_generator/tier2_generator.py b/Tools/cases_generator/tier2_generator.py
index 88ad0fd797f0cc..18bab2c13e7eb7 100644
--- a/Tools/cases_generator/tier2_generator.py
+++ b/Tools/cases_generator/tier2_generator.py
@@ -25,7 +25,7 @@
from cwriter import CWriter
from typing import TextIO, Iterator
from lexer import Token
-from stack import Stack, StackError
+from stack import Local, Stack, StackError, get_stack_effect
DEFAULT_OUTPUT = ROOT / "Python/executor_cases.c.h"
@@ -53,8 +53,9 @@ def declare_variables(uop: Uop, out: CWriter) -> None:
for var in reversed(uop.stack.inputs):
stack.pop(var)
for var in uop.stack.outputs:
- stack.push(var)
+ stack.push(Local.unused(var))
required = set(stack.defined)
+ required.discard("unused")
for var in reversed(uop.stack.inputs):
declare_variable(var, uop, required, out)
for var in uop.stack.outputs:
@@ -156,6 +157,7 @@ def tier2_replace_oparg(
def write_uop(uop: Uop, out: CWriter, stack: Stack) -> None:
+ locals: dict[str, Local] = {}
try:
out.start_line()
if uop.properties.oparg:
@@ -165,10 +167,11 @@ def write_uop(uop: Uop, out: CWriter, stack: Stack) -> None:
out.emit(f"oparg = {uop.properties.const_oparg};\n")
out.emit(f"assert(oparg == CURRENT_OPARG());\n")
for var in reversed(uop.stack.inputs):
- out.emit(stack.pop(var))
- if not uop.properties.stores_sp:
- for i, var in enumerate(uop.stack.outputs):
- out.emit(stack.push(var))
+ code, local = stack.pop(var)
+ out.emit(code)
+ if local.defined:
+ locals[local.name] = local
+ out.emit(stack.define_output_arrays(uop.stack.outputs))
for cache in uop.caches:
if cache.name != "unused":
if cache.size == 4:
@@ -178,9 +181,12 @@ def write_uop(uop: Uop, out: CWriter, stack: Stack) -> None:
cast = f"uint{cache.size*16}_t"
out.emit(f"{type}{cache.name} = ({cast})CURRENT_OPERAND();\n")
emit_tokens(out, uop, stack, None, TIER2_REPLACEMENT_FUNCTIONS)
- if uop.properties.stores_sp:
- for i, var in enumerate(uop.stack.outputs):
- out.emit(stack.push(var))
+ for i, var in enumerate(uop.stack.outputs):
+ if var.name in locals:
+ local = locals[var.name]
+ else:
+ local = Local.local(var)
+ out.emit(stack.push(local))
except StackError as ex:
raise analysis_error(ex.args[0], uop.body[0]) from None
From 58ffc4cf4aa4cfb47f8768a3c3eaf1dd7a7c4584 Mon Sep 17 00:00:00 2001
From: Rafael Fontenelle
Date: Thu, 1 Aug 2024 06:25:16 -0300
Subject: [PATCH 099/139] gh-122384: Mark strings from Download page for
translation (#122385)
Co-authored-by: Hugo van Kemenade <1324225+hugovk@users.noreply.github.com>
---
Doc/tools/templates/download.html | 74 ++++++++++++++++++-------------
1 file changed, 42 insertions(+), 32 deletions(-)
diff --git a/Doc/tools/templates/download.html b/Doc/tools/templates/download.html
index c3114e584fa942..9f99eea6f3c773 100644
--- a/Doc/tools/templates/download.html
+++ b/Doc/tools/templates/download.html
@@ -1,5 +1,5 @@
{% extends "layout.html" %}
-{% set title = 'Download' %}
+{% set title = _('Download') %}
{% if daily is defined %}
{% set dlbase = pathto('archives', 1) %}
{% else %}
@@ -11,58 +11,68 @@
{% endif %}
{% block body %}
-Download Python {{ release }} Documentation
+{% trans %}Download Python {{ release }} Documentation{% endtrans %}
-{% if last_updated %}Last updated on: {{ last_updated }}.
{% endif %}
+{% if last_updated %}{% trans %}Last updated on: {{ last_updated }}.{% endtrans %}
{% endif %}
-To download an archive containing all the documents for this version of
-Python in one of various formats, follow one of links in this table.
+{% trans %}To download an archive containing all the documents for this version of
+Python in one of various formats, follow one of links in this table.{% endtrans %}
- Format | Packed as .zip | Packed as .tar.bz2 |
- PDF (US-Letter paper size) |
- Download (ca. 17 MiB) |
- Download (ca. 17 MiB) |
+
+ {% trans %}Format{% endtrans %} |
+ {% trans %}Packed as .zip{% endtrans %} |
+ {% trans %}Packed as .tar.bz2{% endtrans %} |
- PDF (A4 paper size) |
- Download (ca. 17 MiB) |
- Download (ca. 17 MiB) |
+
+ {% trans %}PDF (US-Letter paper size){% endtrans %} |
+ {% trans download_size="17" %}Download (ca. {{ download_size }} MiB){% endtrans %} |
+ {% trans download_size="17" %}Download (ca. {{ download_size }} MiB){% endtrans %} |
- HTML |
- Download (ca. 13 MiB) |
- Download (ca. 8 MiB) |
+
+ {% trans %}PDF (A4 paper size){% endtrans %} |
+ {% trans download_size="17" %}Download (ca. {{ download_size }} MiB){% endtrans %} |
+ {% trans download_size="17" %}Download (ca. {{ download_size }} MiB){% endtrans %} |
- Plain text |
- Download (ca. 4 MiB) |
- Download (ca. 3 MiB) |
+
+ {% trans %}HTML{% endtrans %} |
+ {% trans download_size="13" %}Download (ca. {{ download_size }} MiB){% endtrans %} |
+ {% trans download_size="8" %}Download (ca. {{ download_size }} MiB){% endtrans %} |
- Texinfo |
- Download (ca. 9 MiB) |
- Download (ca. 7 MiB) |
+
+ {% trans %}Plain text{% endtrans %} |
+ {% trans download_size="4" %}Download (ca. {{ download_size }} MiB){% endtrans %} |
+ {% trans download_size="3" %}Download (ca. {{ download_size }} MiB){% endtrans %} |
+
+
+ {% trans %}Texinfo{% endtrans %} |
+ {% trans download_size="9" %}Download (ca. {{ download_size }} MiB){% endtrans %} |
+ {% trans download_size="7" %}Download (ca. {{ download_size }} MiB){% endtrans %} |
- EPUB |
- Download (ca. 6 MiB) |
+
+ {% trans %}EPUB{% endtrans %} |
+ {% trans download_size="6" %}Download (ca. {{ download_size }} MiB){% endtrans %} |
|
-These archives contain all the content in the documentation.
+{% trans %}These archives contain all the content in the documentation.{% endtrans %}
-Unpacking
+{% trans %}Unpacking{% endtrans %}
-Unix users should download the .tar.bz2 archives; these are bzipped tar
+
{% trans %}Unix users should download the .tar.bz2 archives; these are bzipped tar
archives and can be handled in the usual way using tar and the bzip2
program. The Info-ZIP unzip program can be
used to handle the ZIP archives if desired. The .tar.bz2 archives provide the
-best compression and fastest download times.
+best compression and fastest download times.{% endtrans %}
-Windows users can use the ZIP archives since those are customary on that
-platform. These are created on Unix using the Info-ZIP zip program.
+{% trans %}Windows users can use the ZIP archives since those are customary on that
+platform. These are created on Unix using the Info-ZIP zip program.{% endtrans %}
-Problems
+{% trans %}Problems{% endtrans %}
-If you have comments or suggestions for the Python documentation, please send
-email to docs@python.org.
+{% trans %}If you have comments or suggestions for the Python documentation, please send
+email to docs@python.org.{% endtrans %}
{% endblock %}
From 88030861e216ac791725c8784752201d6fe31329 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?B=C3=A9n=C3=A9dikt=20Tran?=
<10796600+picnixz@users.noreply.github.com>
Date: Thu, 1 Aug 2024 12:26:09 +0200
Subject: [PATCH 100/139] gh-122555: Remove removed functions from
`Doc/data/refcounts.dat` (#122556)
---
Doc/data/refcounts.dat | 10 ----------
Lib/importlib/_bootstrap.py | 1 -
2 files changed, 11 deletions(-)
diff --git a/Doc/data/refcounts.dat b/Doc/data/refcounts.dat
index a7d06e076a1b55..ccef104eeefde5 100644
--- a/Doc/data/refcounts.dat
+++ b/Doc/data/refcounts.dat
@@ -364,8 +364,6 @@ PyComplex_RealAsDouble:PyObject*:op:0:
PyContext_CheckExact:int:::
PyContext_CheckExact:PyObject*:o:0:
-PyContext_ClearFreeList:int:::
-
PyContext_Copy:PyObject*::+1:
PyContext_Copy:PyObject*:ctx:0:
@@ -1030,8 +1028,6 @@ PyImport_AddModule:const char*:name::
PyImport_AddModuleObject:PyObject*::0:reference borrowed from sys.modules
PyImport_AddModuleObject:PyObject*:name:0:
-PyImport_Cleanup:void:::
-
PyImport_ExecCodeModule:PyObject*::+1:
PyImport_ExecCodeModule:const char*:name::
PyImport_ExecCodeModule:PyObject*:co:0:
@@ -2405,12 +2401,6 @@ PyUnicode_DATA:PyObject*:o:0:
PyUnicode_GET_LENGTH:Py_ssize_t:::
PyUnicode_GET_LENGTH:PyObject*:o:0:
-PyUnicode_GET_SIZE:Py_ssize_t:::
-PyUnicode_GET_SIZE:PyObject*:o:0:
-
-PyUnicode_GET_DATA_SIZE:Py_ssize_t:::
-PyUnicode_GET_DATA_SIZE:PyObject*:o:0:
-
PyUnicode_KIND:int:::
PyUnicode_KIND:PyObject*:o:0:
diff --git a/Lib/importlib/_bootstrap.py b/Lib/importlib/_bootstrap.py
index de5651f0a7fc36..b70d09b32abce6 100644
--- a/Lib/importlib/_bootstrap.py
+++ b/Lib/importlib/_bootstrap.py
@@ -1241,7 +1241,6 @@ def _find_spec(name, path, target=None):
"""Find a module's spec."""
meta_path = sys.meta_path
if meta_path is None:
- # PyImport_Cleanup() is running or has been called.
raise ImportError("sys.meta_path is None, Python is likely "
"shutting down")
From fda6bd842a2b93a501526f1b830eb900d935ac73 Mon Sep 17 00:00:00 2001
From: Victor Stinner
Date: Thu, 1 Aug 2024 14:12:33 +0200
Subject: [PATCH 101/139] Replace PyObject_Del with PyObject_Free (#122453)
PyObject_Del() is just a alias to PyObject_Free() kept for backward
compatibility. Use directly PyObject_Free() instead.
---
Doc/c-api/typeobj.rst | 4 ++--
Modules/_testcapi/gc.c | 2 +-
Modules/_testcapimodule.c | 6 +++---
Objects/bytearrayobject.c | 2 +-
Objects/bytesobject.c | 2 +-
Objects/codeobject.c | 4 ++--
Objects/complexobject.c | 2 +-
Objects/fileobject.c | 2 +-
Objects/odictobject.c | 2 +-
Objects/rangeobject.c | 2 +-
Objects/typeobject.c | 4 ++--
Objects/unicodeobject.c | 2 +-
Python/optimizer.c | 2 +-
13 files changed, 18 insertions(+), 18 deletions(-)
diff --git a/Doc/c-api/typeobj.rst b/Doc/c-api/typeobj.rst
index 0091e084308245..b7b1418df513c6 100644
--- a/Doc/c-api/typeobj.rst
+++ b/Doc/c-api/typeobj.rst
@@ -650,7 +650,7 @@ and :c:data:`PyType_Type` effectively act as defaults.)
(doesn't have the :c:macro:`Py_TPFLAGS_BASETYPE` flag bit set), it is
permissible to call the object deallocator directly instead of via
:c:member:`~PyTypeObject.tp_free`. The object deallocator should be the one used to allocate the
- instance; this is normally :c:func:`PyObject_Del` if the instance was allocated
+ instance; this is normally :c:func:`PyObject_Free` if the instance was allocated
using :c:macro:`PyObject_New` or :c:macro:`PyObject_NewVar`, or
:c:func:`PyObject_GC_Del` if the instance was allocated using
:c:macro:`PyObject_GC_New` or :c:macro:`PyObject_GC_NewVar`.
@@ -1954,7 +1954,7 @@ and :c:data:`PyType_Type` effectively act as defaults.)
match :c:func:`PyType_GenericAlloc` and the value of the
:c:macro:`Py_TPFLAGS_HAVE_GC` flag bit.
- For static subtypes, :c:data:`PyBaseObject_Type` uses :c:func:`PyObject_Del`.
+ For static subtypes, :c:data:`PyBaseObject_Type` uses :c:func:`PyObject_Free`.
.. c:member:: inquiry PyTypeObject.tp_is_gc
diff --git a/Modules/_testcapi/gc.c b/Modules/_testcapi/gc.c
index b472a4185a98af..7e33e0d4861e84 100644
--- a/Modules/_testcapi/gc.c
+++ b/Modules/_testcapi/gc.c
@@ -72,7 +72,7 @@ without_gc(PyObject *Py_UNUSED(self), PyObject *obj)
if (PyType_IS_GC(tp)) {
// Don't try this at home, kids:
tp->tp_flags -= Py_TPFLAGS_HAVE_GC;
- tp->tp_free = PyObject_Del;
+ tp->tp_free = PyObject_Free;
tp->tp_traverse = NULL;
tp->tp_clear = NULL;
}
diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c
index 5ebcfef6143e02..4a371a5ce33ebe 100644
--- a/Modules/_testcapimodule.c
+++ b/Modules/_testcapimodule.c
@@ -289,7 +289,7 @@ static PyTypeObject _HashInheritanceTester_Type = {
"hashinheritancetester", /* Name of this type */
sizeof(PyObject), /* Basic object size */
0, /* Item size for varobject */
- (destructor)PyObject_Del, /* tp_dealloc */
+ (destructor)PyObject_Free, /* tp_dealloc */
0, /* tp_vectorcall_offset */
0, /* tp_getattr */
0, /* tp_setattr */
@@ -3587,7 +3587,7 @@ static PyTypeObject matmulType = {
0,
0,
PyType_GenericNew, /* tp_new */
- PyObject_Del, /* tp_free */
+ PyObject_Free, /* tp_free */
};
typedef struct {
@@ -3699,7 +3699,7 @@ static PyTypeObject awaitType = {
0,
0,
awaitObject_new, /* tp_new */
- PyObject_Del, /* tp_free */
+ PyObject_Free, /* tp_free */
};
diff --git a/Objects/bytearrayobject.c b/Objects/bytearrayobject.c
index 80679f93cd4c13..a80e4670665a22 100644
--- a/Objects/bytearrayobject.c
+++ b/Objects/bytearrayobject.c
@@ -2426,7 +2426,7 @@ PyTypeObject PyByteArray_Type = {
(initproc)bytearray___init__, /* tp_init */
PyType_GenericAlloc, /* tp_alloc */
PyType_GenericNew, /* tp_new */
- PyObject_Del, /* tp_free */
+ PyObject_Free, /* tp_free */
};
/*********************** Bytearray Iterator ****************************/
diff --git a/Objects/bytesobject.c b/Objects/bytesobject.c
index 459df6ceacf3a8..e88b199d89f758 100644
--- a/Objects/bytesobject.c
+++ b/Objects/bytesobject.c
@@ -3066,7 +3066,7 @@ PyTypeObject PyBytes_Type = {
0, /* tp_init */
bytes_alloc, /* tp_alloc */
bytes_new, /* tp_new */
- PyObject_Del, /* tp_free */
+ PyObject_Free, /* tp_free */
};
void
diff --git a/Objects/codeobject.c b/Objects/codeobject.c
index d45ba5ed4a9c06..6423a4214bfa9c 100644
--- a/Objects/codeobject.c
+++ b/Objects/codeobject.c
@@ -1352,7 +1352,7 @@ PyTypeObject _PyLineIterator = {
0, /* tp_init */
0, /* tp_alloc */
0, /* tp_new */
- PyObject_Del, /* tp_free */
+ PyObject_Free, /* tp_free */
};
static lineiterator *
@@ -1443,7 +1443,7 @@ PyTypeObject _PyPositionsIterator = {
0, /* tp_init */
0, /* tp_alloc */
0, /* tp_new */
- PyObject_Del, /* tp_free */
+ PyObject_Free, /* tp_free */
};
static PyObject*
diff --git a/Objects/complexobject.c b/Objects/complexobject.c
index 7c8a6bd9dfcd3f..4a8dac6c53f529 100644
--- a/Objects/complexobject.c
+++ b/Objects/complexobject.c
@@ -1247,5 +1247,5 @@ PyTypeObject PyComplex_Type = {
0, /* tp_init */
PyType_GenericAlloc, /* tp_alloc */
actual_complex_new, /* tp_new */
- PyObject_Del, /* tp_free */
+ PyObject_Free, /* tp_free */
};
diff --git a/Objects/fileobject.c b/Objects/fileobject.c
index bae49d367b65ee..c377d1bb28b56f 100644
--- a/Objects/fileobject.c
+++ b/Objects/fileobject.c
@@ -462,7 +462,7 @@ PyTypeObject PyStdPrinter_Type = {
0, /* tp_init */
PyType_GenericAlloc, /* tp_alloc */
0, /* tp_new */
- PyObject_Del, /* tp_free */
+ PyObject_Free, /* tp_free */
};
diff --git a/Objects/odictobject.c b/Objects/odictobject.c
index 30277aa0c23883..a9b801e70c9810 100644
--- a/Objects/odictobject.c
+++ b/Objects/odictobject.c
@@ -276,7 +276,7 @@ tp_dictoffset (__dict__) - -
tp_init __init__ object_init dict_init
tp_alloc - PyType_GenericAlloc (repeated)
tp_new __new__ object_new dict_new
-tp_free - PyObject_Del PyObject_GC_Del
+tp_free - PyObject_Free PyObject_GC_Del
================= ================ =================== ================
Relevant Methods
diff --git a/Objects/rangeobject.c b/Objects/rangeobject.c
index 9727b4f47b53a1..1318ce0319d438 100644
--- a/Objects/rangeobject.c
+++ b/Objects/rangeobject.c
@@ -899,7 +899,7 @@ PyTypeObject PyRangeIter_Type = {
sizeof(_PyRangeIterObject), /* tp_basicsize */
0, /* tp_itemsize */
/* methods */
- (destructor)PyObject_Del, /* tp_dealloc */
+ (destructor)PyObject_Free, /* tp_dealloc */
0, /* tp_vectorcall_offset */
0, /* tp_getattr */
0, /* tp_setattr */
diff --git a/Objects/typeobject.c b/Objects/typeobject.c
index 5b0a466f913495..a2d82e65b6ad9f 100644
--- a/Objects/typeobject.c
+++ b/Objects/typeobject.c
@@ -7456,7 +7456,7 @@ PyTypeObject PyBaseObject_Type = {
object_init, /* tp_init */
PyType_GenericAlloc, /* tp_alloc */
object_new, /* tp_new */
- PyObject_Del, /* tp_free */
+ PyObject_Free, /* tp_free */
};
@@ -8180,7 +8180,7 @@ type_ready_inherit(PyTypeObject *type)
/* Sanity check for tp_free. */
if (_PyType_IS_GC(type) && (type->tp_flags & Py_TPFLAGS_BASETYPE) &&
- (type->tp_free == NULL || type->tp_free == PyObject_Del))
+ (type->tp_free == NULL || type->tp_free == PyObject_Free))
{
/* This base class needs to call tp_free, but doesn't have
* one, or its tp_free is for non-gc'ed objects.
diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c
index ffb879a68745b1..12578812a762f6 100644
--- a/Objects/unicodeobject.c
+++ b/Objects/unicodeobject.c
@@ -15265,7 +15265,7 @@ PyTypeObject PyUnicode_Type = {
0, /* tp_init */
0, /* tp_alloc */
unicode_new, /* tp_new */
- PyObject_Del, /* tp_free */
+ PyObject_Free, /* tp_free */
.tp_vectorcall = unicode_vectorcall,
};
diff --git a/Python/optimizer.c b/Python/optimizer.c
index ce8a36575cde1d..9d0381357f2123 100644
--- a/Python/optimizer.c
+++ b/Python/optimizer.c
@@ -1374,7 +1374,7 @@ PyTypeObject _PyCounterOptimizer_Type = {
.tp_itemsize = 0,
.tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_DISALLOW_INSTANTIATION,
.tp_methods = counter_optimizer_methods,
- .tp_dealloc = (destructor)PyObject_Del,
+ .tp_dealloc = (destructor)PyObject_Free,
};
PyObject *
From df13a1821a90fcfb75eca59aad6af1f0893b1e77 Mon Sep 17 00:00:00 2001
From: Mark Shannon
Date: Fri, 2 Aug 2024 00:19:05 +0100
Subject: [PATCH 102/139] GH-118095: Add tier two support for
BINARY_SUBSCR_GETITEM (GH-120793)
---
Include/internal/pycore_opcode_metadata.h | 3 +-
Include/internal/pycore_optimizer.h | 10 +
Include/internal/pycore_uop_ids.h | 293 +++++++++++-----------
Include/internal/pycore_uop_metadata.h | 8 +
Python/bytecodes.c | 42 ++--
Python/executor_cases.c.h | 52 +++-
Python/generated_cases.c.h | 80 ++++--
Python/optimizer.c | 12 +-
Python/optimizer_analysis.c | 11 +-
Python/optimizer_cases.c.h | 13 +-
10 files changed, 317 insertions(+), 207 deletions(-)
diff --git a/Include/internal/pycore_opcode_metadata.h b/Include/internal/pycore_opcode_metadata.h
index 464d3500890e53..d8e5034268b343 100644
--- a/Include/internal/pycore_opcode_metadata.h
+++ b/Include/internal/pycore_opcode_metadata.h
@@ -505,7 +505,7 @@ int _PyOpcode_num_pushed(int opcode, int oparg) {
case BINARY_SUBSCR_DICT:
return 1;
case BINARY_SUBSCR_GETITEM:
- return 1;
+ return 0;
case BINARY_SUBSCR_LIST_INT:
return 1;
case BINARY_SUBSCR_STR_INT:
@@ -1231,6 +1231,7 @@ _PyOpcode_macro_expansion[256] = {
[BINARY_SLICE] = { .nuops = 1, .uops = { { _BINARY_SLICE, 0, 0 } } },
[BINARY_SUBSCR] = { .nuops = 1, .uops = { { _BINARY_SUBSCR, 0, 0 } } },
[BINARY_SUBSCR_DICT] = { .nuops = 1, .uops = { { _BINARY_SUBSCR_DICT, 0, 0 } } },
+ [BINARY_SUBSCR_GETITEM] = { .nuops = 4, .uops = { { _CHECK_PEP_523, 0, 0 }, { _BINARY_SUBSCR_CHECK_FUNC, 0, 0 }, { _BINARY_SUBSCR_INIT_CALL, 0, 0 }, { _PUSH_FRAME, 0, 0 } } },
[BINARY_SUBSCR_LIST_INT] = { .nuops = 1, .uops = { { _BINARY_SUBSCR_LIST_INT, 0, 0 } } },
[BINARY_SUBSCR_STR_INT] = { .nuops = 1, .uops = { { _BINARY_SUBSCR_STR_INT, 0, 0 } } },
[BINARY_SUBSCR_TUPLE_INT] = { .nuops = 1, .uops = { { _BINARY_SUBSCR_TUPLE_INT, 0, 0 } } },
diff --git a/Include/internal/pycore_optimizer.h b/Include/internal/pycore_optimizer.h
index bcbb8b73706359..b6da27c067727f 100644
--- a/Include/internal/pycore_optimizer.h
+++ b/Include/internal/pycore_optimizer.h
@@ -259,6 +259,16 @@ PyAPI_FUNC(PyObject *) _Py_uop_symbols_test(PyObject *self, PyObject *ignored);
PyAPI_FUNC(int) _PyOptimizer_Optimize(struct _PyInterpreterFrame *frame, _Py_CODEUNIT *start, _PyStackRef *stack_pointer, _PyExecutorObject **exec_ptr);
+static inline int is_terminator(const _PyUOpInstruction *uop)
+{
+ int opcode = uop->opcode;
+ return (
+ opcode == _EXIT_TRACE ||
+ opcode == _JUMP_TO_TOP ||
+ opcode == _DYNAMIC_EXIT
+ );
+}
+
#ifdef __cplusplus
}
#endif
diff --git a/Include/internal/pycore_uop_ids.h b/Include/internal/pycore_uop_ids.h
index d6c910255eb87b..27d7f96863fa8c 100644
--- a/Include/internal/pycore_uop_ids.h
+++ b/Include/internal/pycore_uop_ids.h
@@ -22,8 +22,9 @@ extern "C" {
#define _BINARY_OP_SUBTRACT_INT 310
#define _BINARY_SLICE BINARY_SLICE
#define _BINARY_SUBSCR 311
+#define _BINARY_SUBSCR_CHECK_FUNC 312
#define _BINARY_SUBSCR_DICT BINARY_SUBSCR_DICT
-#define _BINARY_SUBSCR_GETITEM BINARY_SUBSCR_GETITEM
+#define _BINARY_SUBSCR_INIT_CALL 313
#define _BINARY_SUBSCR_LIST_INT BINARY_SUBSCR_LIST_INT
#define _BINARY_SUBSCR_STR_INT BINARY_SUBSCR_STR_INT
#define _BINARY_SUBSCR_TUPLE_INT BINARY_SUBSCR_TUPLE_INT
@@ -34,10 +35,10 @@ extern "C" {
#define _BUILD_STRING BUILD_STRING
#define _BUILD_TUPLE BUILD_TUPLE
#define _CALL_ALLOC_AND_ENTER_INIT CALL_ALLOC_AND_ENTER_INIT
-#define _CALL_BUILTIN_CLASS 312
-#define _CALL_BUILTIN_FAST 313
-#define _CALL_BUILTIN_FAST_WITH_KEYWORDS 314
-#define _CALL_BUILTIN_O 315
+#define _CALL_BUILTIN_CLASS 314
+#define _CALL_BUILTIN_FAST 315
+#define _CALL_BUILTIN_FAST_WITH_KEYWORDS 316
+#define _CALL_BUILTIN_O 317
#define _CALL_FUNCTION_EX CALL_FUNCTION_EX
#define _CALL_INTRINSIC_1 CALL_INTRINSIC_1
#define _CALL_INTRINSIC_2 CALL_INTRINSIC_2
@@ -45,38 +46,38 @@ extern "C" {
#define _CALL_KW CALL_KW
#define _CALL_LEN CALL_LEN
#define _CALL_LIST_APPEND CALL_LIST_APPEND
-#define _CALL_METHOD_DESCRIPTOR_FAST 316
-#define _CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS 317
-#define _CALL_METHOD_DESCRIPTOR_NOARGS 318
-#define _CALL_METHOD_DESCRIPTOR_O 319
-#define _CALL_NON_PY_GENERAL 320
-#define _CALL_STR_1 321
-#define _CALL_TUPLE_1 322
+#define _CALL_METHOD_DESCRIPTOR_FAST 318
+#define _CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS 319
+#define _CALL_METHOD_DESCRIPTOR_NOARGS 320
+#define _CALL_METHOD_DESCRIPTOR_O 321
+#define _CALL_NON_PY_GENERAL 322
+#define _CALL_STR_1 323
+#define _CALL_TUPLE_1 324
#define _CALL_TYPE_1 CALL_TYPE_1
-#define _CHECK_ATTR_CLASS 323
-#define _CHECK_ATTR_METHOD_LAZY_DICT 324
-#define _CHECK_ATTR_MODULE 325
-#define _CHECK_ATTR_WITH_HINT 326
-#define _CHECK_CALL_BOUND_METHOD_EXACT_ARGS 327
+#define _CHECK_ATTR_CLASS 325
+#define _CHECK_ATTR_METHOD_LAZY_DICT 326
+#define _CHECK_ATTR_MODULE 327
+#define _CHECK_ATTR_WITH_HINT 328
+#define _CHECK_CALL_BOUND_METHOD_EXACT_ARGS 329
#define _CHECK_EG_MATCH CHECK_EG_MATCH
#define _CHECK_EXC_MATCH CHECK_EXC_MATCH
-#define _CHECK_FUNCTION 328
-#define _CHECK_FUNCTION_EXACT_ARGS 329
-#define _CHECK_FUNCTION_VERSION 330
-#define _CHECK_IS_NOT_PY_CALLABLE 331
-#define _CHECK_MANAGED_OBJECT_HAS_VALUES 332
-#define _CHECK_METHOD_VERSION 333
-#define _CHECK_PEP_523 334
-#define _CHECK_PERIODIC 335
-#define _CHECK_STACK_SPACE 336
-#define _CHECK_STACK_SPACE_OPERAND 337
-#define _CHECK_VALIDITY 338
-#define _CHECK_VALIDITY_AND_SET_IP 339
-#define _COMPARE_OP 340
-#define _COMPARE_OP_FLOAT 341
-#define _COMPARE_OP_INT 342
-#define _COMPARE_OP_STR 343
-#define _CONTAINS_OP 344
+#define _CHECK_FUNCTION 330
+#define _CHECK_FUNCTION_EXACT_ARGS 331
+#define _CHECK_FUNCTION_VERSION 332
+#define _CHECK_IS_NOT_PY_CALLABLE 333
+#define _CHECK_MANAGED_OBJECT_HAS_VALUES 334
+#define _CHECK_METHOD_VERSION 335
+#define _CHECK_PEP_523 336
+#define _CHECK_PERIODIC 337
+#define _CHECK_STACK_SPACE 338
+#define _CHECK_STACK_SPACE_OPERAND 339
+#define _CHECK_VALIDITY 340
+#define _CHECK_VALIDITY_AND_SET_IP 341
+#define _COMPARE_OP 342
+#define _COMPARE_OP_FLOAT 343
+#define _COMPARE_OP_INT 344
+#define _COMPARE_OP_STR 345
+#define _CONTAINS_OP 346
#define _CONTAINS_OP_DICT CONTAINS_OP_DICT
#define _CONTAINS_OP_SET CONTAINS_OP_SET
#define _CONVERT_VALUE CONVERT_VALUE
@@ -88,56 +89,56 @@ extern "C" {
#define _DELETE_GLOBAL DELETE_GLOBAL
#define _DELETE_NAME DELETE_NAME
#define _DELETE_SUBSCR DELETE_SUBSCR
-#define _DEOPT 345
+#define _DEOPT 347
#define _DICT_MERGE DICT_MERGE
#define _DICT_UPDATE DICT_UPDATE
-#define _DO_CALL 346
-#define _DYNAMIC_EXIT 347
+#define _DO_CALL 348
+#define _DYNAMIC_EXIT 349
#define _END_SEND END_SEND
-#define _ERROR_POP_N 348
+#define _ERROR_POP_N 350
#define _EXIT_INIT_CHECK EXIT_INIT_CHECK
-#define _EXPAND_METHOD 349
-#define _FATAL_ERROR 350
+#define _EXPAND_METHOD 351
+#define _FATAL_ERROR 352
#define _FORMAT_SIMPLE FORMAT_SIMPLE
#define _FORMAT_WITH_SPEC FORMAT_WITH_SPEC
-#define _FOR_ITER 351
-#define _FOR_ITER_GEN_FRAME 352
-#define _FOR_ITER_TIER_TWO 353
+#define _FOR_ITER 353
+#define _FOR_ITER_GEN_FRAME 354
+#define _FOR_ITER_TIER_TWO 355
#define _GET_AITER GET_AITER
#define _GET_ANEXT GET_ANEXT
#define _GET_AWAITABLE GET_AWAITABLE
#define _GET_ITER GET_ITER
#define _GET_LEN GET_LEN
#define _GET_YIELD_FROM_ITER GET_YIELD_FROM_ITER
-#define _GUARD_BOTH_FLOAT 354
-#define _GUARD_BOTH_INT 355
-#define _GUARD_BOTH_UNICODE 356
-#define _GUARD_BUILTINS_VERSION 357
-#define _GUARD_DORV_NO_DICT 358
-#define _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT 359
-#define _GUARD_GLOBALS_VERSION 360
-#define _GUARD_IS_FALSE_POP 361
-#define _GUARD_IS_NONE_POP 362
-#define _GUARD_IS_NOT_NONE_POP 363
-#define _GUARD_IS_TRUE_POP 364
-#define _GUARD_KEYS_VERSION 365
-#define _GUARD_NOS_FLOAT 366
-#define _GUARD_NOS_INT 367
-#define _GUARD_NOT_EXHAUSTED_LIST 368
-#define _GUARD_NOT_EXHAUSTED_RANGE 369
-#define _GUARD_NOT_EXHAUSTED_TUPLE 370
-#define _GUARD_TOS_FLOAT 371
-#define _GUARD_TOS_INT 372
-#define _GUARD_TYPE_VERSION 373
+#define _GUARD_BOTH_FLOAT 356
+#define _GUARD_BOTH_INT 357
+#define _GUARD_BOTH_UNICODE 358
+#define _GUARD_BUILTINS_VERSION 359
+#define _GUARD_DORV_NO_DICT 360
+#define _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT 361
+#define _GUARD_GLOBALS_VERSION 362
+#define _GUARD_IS_FALSE_POP 363
+#define _GUARD_IS_NONE_POP 364
+#define _GUARD_IS_NOT_NONE_POP 365
+#define _GUARD_IS_TRUE_POP 366
+#define _GUARD_KEYS_VERSION 367
+#define _GUARD_NOS_FLOAT 368
+#define _GUARD_NOS_INT 369
+#define _GUARD_NOT_EXHAUSTED_LIST 370
+#define _GUARD_NOT_EXHAUSTED_RANGE 371
+#define _GUARD_NOT_EXHAUSTED_TUPLE 372
+#define _GUARD_TOS_FLOAT 373
+#define _GUARD_TOS_INT 374
+#define _GUARD_TYPE_VERSION 375
#define _IMPORT_FROM IMPORT_FROM
#define _IMPORT_NAME IMPORT_NAME
-#define _INIT_CALL_BOUND_METHOD_EXACT_ARGS 374
-#define _INIT_CALL_PY_EXACT_ARGS 375
-#define _INIT_CALL_PY_EXACT_ARGS_0 376
-#define _INIT_CALL_PY_EXACT_ARGS_1 377
-#define _INIT_CALL_PY_EXACT_ARGS_2 378
-#define _INIT_CALL_PY_EXACT_ARGS_3 379
-#define _INIT_CALL_PY_EXACT_ARGS_4 380
+#define _INIT_CALL_BOUND_METHOD_EXACT_ARGS 376
+#define _INIT_CALL_PY_EXACT_ARGS 377
+#define _INIT_CALL_PY_EXACT_ARGS_0 378
+#define _INIT_CALL_PY_EXACT_ARGS_1 379
+#define _INIT_CALL_PY_EXACT_ARGS_2 380
+#define _INIT_CALL_PY_EXACT_ARGS_3 381
+#define _INIT_CALL_PY_EXACT_ARGS_4 382
#define _INSTRUMENTED_CALL_FUNCTION_EX INSTRUMENTED_CALL_FUNCTION_EX
#define _INSTRUMENTED_CALL_KW INSTRUMENTED_CALL_KW
#define _INSTRUMENTED_FOR_ITER INSTRUMENTED_FOR_ITER
@@ -151,65 +152,65 @@ extern "C" {
#define _INSTRUMENTED_POP_JUMP_IF_NOT_NONE INSTRUMENTED_POP_JUMP_IF_NOT_NONE
#define _INSTRUMENTED_POP_JUMP_IF_TRUE INSTRUMENTED_POP_JUMP_IF_TRUE
#define _INSTRUMENTED_RESUME INSTRUMENTED_RESUME
-#define _INTERNAL_INCREMENT_OPT_COUNTER 381
-#define _IS_NONE 382
+#define _INTERNAL_INCREMENT_OPT_COUNTER 383
+#define _IS_NONE 384
#define _IS_OP IS_OP
-#define _ITER_CHECK_LIST 383
-#define _ITER_CHECK_RANGE 384
-#define _ITER_CHECK_TUPLE 385
-#define _ITER_JUMP_LIST 386
-#define _ITER_JUMP_RANGE 387
-#define _ITER_JUMP_TUPLE 388
-#define _ITER_NEXT_LIST 389
-#define _ITER_NEXT_RANGE 390
-#define _ITER_NEXT_TUPLE 391
-#define _JUMP_TO_TOP 392
+#define _ITER_CHECK_LIST 385
+#define _ITER_CHECK_RANGE 386
+#define _ITER_CHECK_TUPLE 387
+#define _ITER_JUMP_LIST 388
+#define _ITER_JUMP_RANGE 389
+#define _ITER_JUMP_TUPLE 390
+#define _ITER_NEXT_LIST 391
+#define _ITER_NEXT_RANGE 392
+#define _ITER_NEXT_TUPLE 393
+#define _JUMP_TO_TOP 394
#define _LIST_APPEND LIST_APPEND
#define _LIST_EXTEND LIST_EXTEND
-#define _LOAD_ATTR 393
-#define _LOAD_ATTR_CLASS 394
-#define _LOAD_ATTR_CLASS_0 395
-#define _LOAD_ATTR_CLASS_1 396
+#define _LOAD_ATTR 395
+#define _LOAD_ATTR_CLASS 396
+#define _LOAD_ATTR_CLASS_0 397
+#define _LOAD_ATTR_CLASS_1 398
#define _LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN
-#define _LOAD_ATTR_INSTANCE_VALUE 397
-#define _LOAD_ATTR_INSTANCE_VALUE_0 398
-#define _LOAD_ATTR_INSTANCE_VALUE_1 399
-#define _LOAD_ATTR_METHOD_LAZY_DICT 400
-#define _LOAD_ATTR_METHOD_NO_DICT 401
-#define _LOAD_ATTR_METHOD_WITH_VALUES 402
-#define _LOAD_ATTR_MODULE 403
-#define _LOAD_ATTR_NONDESCRIPTOR_NO_DICT 404
-#define _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES 405
-#define _LOAD_ATTR_PROPERTY_FRAME 406
-#define _LOAD_ATTR_SLOT 407
-#define _LOAD_ATTR_SLOT_0 408
-#define _LOAD_ATTR_SLOT_1 409
-#define _LOAD_ATTR_WITH_HINT 410
+#define _LOAD_ATTR_INSTANCE_VALUE 399
+#define _LOAD_ATTR_INSTANCE_VALUE_0 400
+#define _LOAD_ATTR_INSTANCE_VALUE_1 401
+#define _LOAD_ATTR_METHOD_LAZY_DICT 402
+#define _LOAD_ATTR_METHOD_NO_DICT 403
+#define _LOAD_ATTR_METHOD_WITH_VALUES 404
+#define _LOAD_ATTR_MODULE 405
+#define _LOAD_ATTR_NONDESCRIPTOR_NO_DICT 406
+#define _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES 407
+#define _LOAD_ATTR_PROPERTY_FRAME 408
+#define _LOAD_ATTR_SLOT 409
+#define _LOAD_ATTR_SLOT_0 410
+#define _LOAD_ATTR_SLOT_1 411
+#define _LOAD_ATTR_WITH_HINT 412
#define _LOAD_BUILD_CLASS LOAD_BUILD_CLASS
#define _LOAD_COMMON_CONSTANT LOAD_COMMON_CONSTANT
#define _LOAD_CONST LOAD_CONST
-#define _LOAD_CONST_INLINE 411
-#define _LOAD_CONST_INLINE_BORROW 412
-#define _LOAD_CONST_INLINE_BORROW_WITH_NULL 413
-#define _LOAD_CONST_INLINE_WITH_NULL 414
+#define _LOAD_CONST_INLINE 413
+#define _LOAD_CONST_INLINE_BORROW 414
+#define _LOAD_CONST_INLINE_BORROW_WITH_NULL 415
+#define _LOAD_CONST_INLINE_WITH_NULL 416
#define _LOAD_DEREF LOAD_DEREF
-#define _LOAD_FAST 415
-#define _LOAD_FAST_0 416
-#define _LOAD_FAST_1 417
-#define _LOAD_FAST_2 418
-#define _LOAD_FAST_3 419
-#define _LOAD_FAST_4 420
-#define _LOAD_FAST_5 421
-#define _LOAD_FAST_6 422
-#define _LOAD_FAST_7 423
+#define _LOAD_FAST 417
+#define _LOAD_FAST_0 418
+#define _LOAD_FAST_1 419
+#define _LOAD_FAST_2 420
+#define _LOAD_FAST_3 421
+#define _LOAD_FAST_4 422
+#define _LOAD_FAST_5 423
+#define _LOAD_FAST_6 424
+#define _LOAD_FAST_7 425
#define _LOAD_FAST_AND_CLEAR LOAD_FAST_AND_CLEAR
#define _LOAD_FAST_CHECK LOAD_FAST_CHECK
#define _LOAD_FAST_LOAD_FAST LOAD_FAST_LOAD_FAST
#define _LOAD_FROM_DICT_OR_DEREF LOAD_FROM_DICT_OR_DEREF
#define _LOAD_FROM_DICT_OR_GLOBALS LOAD_FROM_DICT_OR_GLOBALS
-#define _LOAD_GLOBAL 424
-#define _LOAD_GLOBAL_BUILTINS 425
-#define _LOAD_GLOBAL_MODULE 426
+#define _LOAD_GLOBAL 426
+#define _LOAD_GLOBAL_BUILTINS 427
+#define _LOAD_GLOBAL_MODULE 428
#define _LOAD_LOCALS LOAD_LOCALS
#define _LOAD_NAME LOAD_NAME
#define _LOAD_SPECIAL LOAD_SPECIAL
@@ -222,55 +223,55 @@ extern "C" {
#define _MATCH_KEYS MATCH_KEYS
#define _MATCH_MAPPING MATCH_MAPPING
#define _MATCH_SEQUENCE MATCH_SEQUENCE
-#define _MAYBE_EXPAND_METHOD 427
-#define _MONITOR_CALL 428
+#define _MAYBE_EXPAND_METHOD 429
+#define _MONITOR_CALL 430
#define _NOP NOP
#define _POP_EXCEPT POP_EXCEPT
-#define _POP_JUMP_IF_FALSE 429
-#define _POP_JUMP_IF_TRUE 430
+#define _POP_JUMP_IF_FALSE 431
+#define _POP_JUMP_IF_TRUE 432
#define _POP_TOP POP_TOP
-#define _POP_TOP_LOAD_CONST_INLINE_BORROW 431
+#define _POP_TOP_LOAD_CONST_INLINE_BORROW 433
#define _PUSH_EXC_INFO PUSH_EXC_INFO
-#define _PUSH_FRAME 432
+#define _PUSH_FRAME 434
#define _PUSH_NULL PUSH_NULL
-#define _PY_FRAME_GENERAL 433
-#define _REPLACE_WITH_TRUE 434
+#define _PY_FRAME_GENERAL 435
+#define _REPLACE_WITH_TRUE 436
#define _RESUME_CHECK RESUME_CHECK
#define _RETURN_GENERATOR RETURN_GENERATOR
#define _RETURN_VALUE RETURN_VALUE
-#define _SAVE_RETURN_OFFSET 435
-#define _SEND 436
-#define _SEND_GEN_FRAME 437
+#define _SAVE_RETURN_OFFSET 437
+#define _SEND 438
+#define _SEND_GEN_FRAME 439
#define _SETUP_ANNOTATIONS SETUP_ANNOTATIONS
#define _SET_ADD SET_ADD
#define _SET_FUNCTION_ATTRIBUTE SET_FUNCTION_ATTRIBUTE
#define _SET_UPDATE SET_UPDATE
-#define _START_EXECUTOR 438
-#define _STORE_ATTR 439
-#define _STORE_ATTR_INSTANCE_VALUE 440
-#define _STORE_ATTR_SLOT 441
-#define _STORE_ATTR_WITH_HINT 442
+#define _START_EXECUTOR 440
+#define _STORE_ATTR 441
+#define _STORE_ATTR_INSTANCE_VALUE 442
+#define _STORE_ATTR_SLOT 443
+#define _STORE_ATTR_WITH_HINT 444
#define _STORE_DEREF STORE_DEREF
-#define _STORE_FAST 443
-#define _STORE_FAST_0 444
-#define _STORE_FAST_1 445
-#define _STORE_FAST_2 446
-#define _STORE_FAST_3 447
-#define _STORE_FAST_4 448
-#define _STORE_FAST_5 449
-#define _STORE_FAST_6 450
-#define _STORE_FAST_7 451
+#define _STORE_FAST 445
+#define _STORE_FAST_0 446
+#define _STORE_FAST_1 447
+#define _STORE_FAST_2 448
+#define _STORE_FAST_3 449
+#define _STORE_FAST_4 450
+#define _STORE_FAST_5 451
+#define _STORE_FAST_6 452
+#define _STORE_FAST_7 453
#define _STORE_FAST_LOAD_FAST STORE_FAST_LOAD_FAST
#define _STORE_FAST_STORE_FAST STORE_FAST_STORE_FAST
#define _STORE_GLOBAL STORE_GLOBAL
#define _STORE_NAME STORE_NAME
#define _STORE_SLICE STORE_SLICE
-#define _STORE_SUBSCR 452
+#define _STORE_SUBSCR 454
#define _STORE_SUBSCR_DICT STORE_SUBSCR_DICT
#define _STORE_SUBSCR_LIST_INT STORE_SUBSCR_LIST_INT
#define _SWAP SWAP
-#define _TIER2_RESUME_CHECK 453
-#define _TO_BOOL 454
+#define _TIER2_RESUME_CHECK 455
+#define _TO_BOOL 456
#define _TO_BOOL_BOOL TO_BOOL_BOOL
#define _TO_BOOL_INT TO_BOOL_INT
#define _TO_BOOL_LIST TO_BOOL_LIST
@@ -280,13 +281,13 @@ extern "C" {
#define _UNARY_NEGATIVE UNARY_NEGATIVE
#define _UNARY_NOT UNARY_NOT
#define _UNPACK_EX UNPACK_EX
-#define _UNPACK_SEQUENCE 455
+#define _UNPACK_SEQUENCE 457
#define _UNPACK_SEQUENCE_LIST UNPACK_SEQUENCE_LIST
#define _UNPACK_SEQUENCE_TUPLE UNPACK_SEQUENCE_TUPLE
#define _UNPACK_SEQUENCE_TWO_TUPLE UNPACK_SEQUENCE_TWO_TUPLE
#define _WITH_EXCEPT_START WITH_EXCEPT_START
#define _YIELD_VALUE YIELD_VALUE
-#define MAX_UOP_ID 455
+#define MAX_UOP_ID 457
#ifdef __cplusplus
}
diff --git a/Include/internal/pycore_uop_metadata.h b/Include/internal/pycore_uop_metadata.h
index d23a4e2ea14345..f5c666454dcbef 100644
--- a/Include/internal/pycore_uop_metadata.h
+++ b/Include/internal/pycore_uop_metadata.h
@@ -80,6 +80,8 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = {
[_BINARY_SUBSCR_STR_INT] = HAS_DEOPT_FLAG,
[_BINARY_SUBSCR_TUPLE_INT] = HAS_DEOPT_FLAG,
[_BINARY_SUBSCR_DICT] = HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
+ [_BINARY_SUBSCR_CHECK_FUNC] = HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG,
+ [_BINARY_SUBSCR_INIT_CALL] = 0,
[_LIST_APPEND] = HAS_ARG_FLAG | HAS_ERROR_FLAG,
[_SET_ADD] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_STORE_SUBSCR] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
@@ -288,7 +290,9 @@ const char *const _PyOpcode_uop_name[MAX_UOP_ID+1] = {
[_BINARY_OP_SUBTRACT_INT] = "_BINARY_OP_SUBTRACT_INT",
[_BINARY_SLICE] = "_BINARY_SLICE",
[_BINARY_SUBSCR] = "_BINARY_SUBSCR",
+ [_BINARY_SUBSCR_CHECK_FUNC] = "_BINARY_SUBSCR_CHECK_FUNC",
[_BINARY_SUBSCR_DICT] = "_BINARY_SUBSCR_DICT",
+ [_BINARY_SUBSCR_INIT_CALL] = "_BINARY_SUBSCR_INIT_CALL",
[_BINARY_SUBSCR_LIST_INT] = "_BINARY_SUBSCR_LIST_INT",
[_BINARY_SUBSCR_STR_INT] = "_BINARY_SUBSCR_STR_INT",
[_BINARY_SUBSCR_TUPLE_INT] = "_BINARY_SUBSCR_TUPLE_INT",
@@ -652,6 +656,10 @@ int _PyUop_num_popped(int opcode, int oparg)
return 2;
case _BINARY_SUBSCR_DICT:
return 2;
+ case _BINARY_SUBSCR_CHECK_FUNC:
+ return 2;
+ case _BINARY_SUBSCR_INIT_CALL:
+ return 2;
case _LIST_APPEND:
return 2 + (oparg-1);
case _SET_ADD:
diff --git a/Python/bytecodes.c b/Python/bytecodes.c
index abfd8039b293a1..414725549d1c20 100644
--- a/Python/bytecodes.c
+++ b/Python/bytecodes.c
@@ -765,32 +765,40 @@ dummy_func(
res = PyStackRef_FromPyObjectSteal(res_o);
}
- inst(BINARY_SUBSCR_GETITEM, (unused/1, container_st, sub_st -- unused)) {
- PyObject *container = PyStackRef_AsPyObjectBorrow(container_st);
-
- DEOPT_IF(tstate->interp->eval_frame);
- PyTypeObject *tp = Py_TYPE(container);
+ op(_BINARY_SUBSCR_CHECK_FUNC, (container, unused -- container, unused)) {
+ PyTypeObject *tp = Py_TYPE(PyStackRef_AsPyObjectBorrow(container));
DEOPT_IF(!PyType_HasFeature(tp, Py_TPFLAGS_HEAPTYPE));
PyHeapTypeObject *ht = (PyHeapTypeObject *)tp;
- PyObject *cached = ht->_spec_cache.getitem;
- DEOPT_IF(cached == NULL);
- assert(PyFunction_Check(cached));
- PyFunctionObject *getitem = (PyFunctionObject *)cached;
+ PyObject *getitem = ht->_spec_cache.getitem;
+ DEOPT_IF(getitem == NULL);
+ assert(PyFunction_Check(getitem));
uint32_t cached_version = ht->_spec_cache.getitem_version;
- DEOPT_IF(getitem->func_version != cached_version);
- PyCodeObject *code = (PyCodeObject *)getitem->func_code;
+ DEOPT_IF(((PyFunctionObject *)getitem)->func_version != cached_version);
+ PyCodeObject *code = (PyCodeObject *)PyFunction_GET_CODE(getitem);
assert(code->co_argcount == 2);
DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize));
STAT_INC(BINARY_SUBSCR, hit);
Py_INCREF(getitem);
- _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, getitem, 2);
- STACK_SHRINK(2);
- new_frame->localsplus[0] = container_st;
- new_frame->localsplus[1] = sub_st;
- frame->return_offset = (uint16_t)(next_instr - this_instr);
- DISPATCH_INLINED(new_frame);
}
+ op(_BINARY_SUBSCR_INIT_CALL, (container, sub -- new_frame: _PyInterpreterFrame* )) {
+ PyTypeObject *tp = Py_TYPE(PyStackRef_AsPyObjectBorrow(container));
+ PyHeapTypeObject *ht = (PyHeapTypeObject *)tp;
+ PyObject *getitem = ht->_spec_cache.getitem;
+ new_frame = _PyFrame_PushUnchecked(tstate, (PyFunctionObject *)getitem, 2);
+ SYNC_SP();
+ new_frame->localsplus[0] = container;
+ new_frame->localsplus[1] = sub;
+ frame->return_offset = (uint16_t)(1 + INLINE_CACHE_ENTRIES_BINARY_SUBSCR);
+ }
+
+ macro(BINARY_SUBSCR_GETITEM) =
+ unused/1 + // Skip over the counter
+ _CHECK_PEP_523 +
+ _BINARY_SUBSCR_CHECK_FUNC +
+ _BINARY_SUBSCR_INIT_CALL +
+ _PUSH_FRAME;
+
inst(LIST_APPEND, (list, unused[oparg-1], v -- list, unused[oparg-1])) {
ERROR_IF(_PyList_AppendTakeRef((PyListObject *)PyStackRef_AsPyObjectBorrow(list),
PyStackRef_AsPyObjectSteal(v)) < 0, error);
diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h
index f0acc3b6ea2ef4..61e1c5cf5c289d 100644
--- a/Python/executor_cases.c.h
+++ b/Python/executor_cases.c.h
@@ -966,7 +966,57 @@
break;
}
- /* _BINARY_SUBSCR_GETITEM is not a viable micro-op for tier 2 because it uses the 'this_instr' variable */
+ case _BINARY_SUBSCR_CHECK_FUNC: {
+ _PyStackRef container;
+ container = stack_pointer[-2];
+ PyTypeObject *tp = Py_TYPE(PyStackRef_AsPyObjectBorrow(container));
+ if (!PyType_HasFeature(tp, Py_TPFLAGS_HEAPTYPE)) {
+ UOP_STAT_INC(uopcode, miss);
+ JUMP_TO_JUMP_TARGET();
+ }
+ PyHeapTypeObject *ht = (PyHeapTypeObject *)tp;
+ PyObject *getitem = ht->_spec_cache.getitem;
+ if (getitem == NULL) {
+ UOP_STAT_INC(uopcode, miss);
+ JUMP_TO_JUMP_TARGET();
+ }
+ assert(PyFunction_Check(getitem));
+ uint32_t cached_version = ht->_spec_cache.getitem_version;
+ if (((PyFunctionObject *)getitem)->func_version != cached_version) {
+ UOP_STAT_INC(uopcode, miss);
+ JUMP_TO_JUMP_TARGET();
+ }
+ PyCodeObject *code = (PyCodeObject *)PyFunction_GET_CODE(getitem);
+ assert(code->co_argcount == 2);
+ if (!_PyThreadState_HasStackSpace(tstate, code->co_framesize)) {
+ UOP_STAT_INC(uopcode, miss);
+ JUMP_TO_JUMP_TARGET();
+ }
+ STAT_INC(BINARY_SUBSCR, hit);
+ Py_INCREF(getitem);
+ break;
+ }
+
+ case _BINARY_SUBSCR_INIT_CALL: {
+ _PyStackRef sub;
+ _PyStackRef container;
+ _PyInterpreterFrame *new_frame;
+ sub = stack_pointer[-1];
+ container = stack_pointer[-2];
+ PyTypeObject *tp = Py_TYPE(PyStackRef_AsPyObjectBorrow(container));
+ PyHeapTypeObject *ht = (PyHeapTypeObject *)tp;
+ PyObject *getitem = ht->_spec_cache.getitem;
+ new_frame = _PyFrame_PushUnchecked(tstate, (PyFunctionObject *)getitem, 2);
+ stack_pointer += -2;
+ assert(WITHIN_STACK_BOUNDS());
+ new_frame->localsplus[0] = container;
+ new_frame->localsplus[1] = sub;
+ frame->return_offset = (uint16_t)(1 + INLINE_CACHE_ENTRIES_BINARY_SUBSCR);
+ stack_pointer[0].bits = (uintptr_t)new_frame;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
case _LIST_APPEND: {
_PyStackRef v;
diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h
index ff8c4eab58f324..4efaf899f23d1a 100644
--- a/Python/generated_cases.c.h
+++ b/Python/generated_cases.c.h
@@ -469,37 +469,63 @@
}
TARGET(BINARY_SUBSCR_GETITEM) {
- _Py_CODEUNIT *this_instr = frame->instr_ptr = next_instr;
+ frame->instr_ptr = next_instr;
next_instr += 2;
INSTRUCTION_STATS(BINARY_SUBSCR_GETITEM);
static_assert(INLINE_CACHE_ENTRIES_BINARY_SUBSCR == 1, "incorrect cache size");
- _PyStackRef container_st;
- _PyStackRef sub_st;
+ _PyStackRef container;
+ _PyStackRef sub;
+ _PyInterpreterFrame *new_frame;
/* Skip 1 cache entry */
- sub_st = stack_pointer[-1];
- container_st = stack_pointer[-2];
- PyObject *container = PyStackRef_AsPyObjectBorrow(container_st);
- DEOPT_IF(tstate->interp->eval_frame, BINARY_SUBSCR);
- PyTypeObject *tp = Py_TYPE(container);
- DEOPT_IF(!PyType_HasFeature(tp, Py_TPFLAGS_HEAPTYPE), BINARY_SUBSCR);
- PyHeapTypeObject *ht = (PyHeapTypeObject *)tp;
- PyObject *cached = ht->_spec_cache.getitem;
- DEOPT_IF(cached == NULL, BINARY_SUBSCR);
- assert(PyFunction_Check(cached));
- PyFunctionObject *getitem = (PyFunctionObject *)cached;
- uint32_t cached_version = ht->_spec_cache.getitem_version;
- DEOPT_IF(getitem->func_version != cached_version, BINARY_SUBSCR);
- PyCodeObject *code = (PyCodeObject *)getitem->func_code;
- assert(code->co_argcount == 2);
- DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), BINARY_SUBSCR);
- STAT_INC(BINARY_SUBSCR, hit);
- Py_INCREF(getitem);
- _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, getitem, 2);
- STACK_SHRINK(2);
- new_frame->localsplus[0] = container_st;
- new_frame->localsplus[1] = sub_st;
- frame->return_offset = (uint16_t)(next_instr - this_instr);
- DISPATCH_INLINED(new_frame);
+ // _CHECK_PEP_523
+ {
+ DEOPT_IF(tstate->interp->eval_frame, BINARY_SUBSCR);
+ }
+ // _BINARY_SUBSCR_CHECK_FUNC
+ container = stack_pointer[-2];
+ {
+ PyTypeObject *tp = Py_TYPE(PyStackRef_AsPyObjectBorrow(container));
+ DEOPT_IF(!PyType_HasFeature(tp, Py_TPFLAGS_HEAPTYPE), BINARY_SUBSCR);
+ PyHeapTypeObject *ht = (PyHeapTypeObject *)tp;
+ PyObject *getitem = ht->_spec_cache.getitem;
+ DEOPT_IF(getitem == NULL, BINARY_SUBSCR);
+ assert(PyFunction_Check(getitem));
+ uint32_t cached_version = ht->_spec_cache.getitem_version;
+ DEOPT_IF(((PyFunctionObject *)getitem)->func_version != cached_version, BINARY_SUBSCR);
+ PyCodeObject *code = (PyCodeObject *)PyFunction_GET_CODE(getitem);
+ assert(code->co_argcount == 2);
+ DEOPT_IF(!_PyThreadState_HasStackSpace(tstate, code->co_framesize), BINARY_SUBSCR);
+ STAT_INC(BINARY_SUBSCR, hit);
+ Py_INCREF(getitem);
+ }
+ // _BINARY_SUBSCR_INIT_CALL
+ sub = stack_pointer[-1];
+ {
+ PyTypeObject *tp = Py_TYPE(PyStackRef_AsPyObjectBorrow(container));
+ PyHeapTypeObject *ht = (PyHeapTypeObject *)tp;
+ PyObject *getitem = ht->_spec_cache.getitem;
+ new_frame = _PyFrame_PushUnchecked(tstate, (PyFunctionObject *)getitem, 2);
+ stack_pointer += -2;
+ assert(WITHIN_STACK_BOUNDS());
+ new_frame->localsplus[0] = container;
+ new_frame->localsplus[1] = sub;
+ frame->return_offset = (uint16_t)(1 + INLINE_CACHE_ENTRIES_BINARY_SUBSCR);
+ }
+ // _PUSH_FRAME
+ {
+ // Write it out explicitly because it's subtly different.
+ // Eventually this should be the only occurrence of this code.
+ assert(tstate->interp->eval_frame == NULL);
+ _PyFrame_SetStackPointer(frame, stack_pointer);
+ new_frame->previous = frame;
+ CALL_STAT_INC(inlined_py_calls);
+ frame = tstate->current_frame = new_frame;
+ tstate->py_recursion_remaining--;
+ LOAD_SP();
+ LOAD_IP(0);
+ LLTRACE_RESUME_FRAME();
+ }
+ DISPATCH();
}
TARGET(BINARY_SUBSCR_LIST_INT) {
diff --git a/Python/optimizer.c b/Python/optimizer.c
index 9d0381357f2123..e9cbfc5497189c 100644
--- a/Python/optimizer.c
+++ b/Python/optimizer.c
@@ -795,6 +795,7 @@ translate_bytecode_to_trace(
assert(i + 1 == nuops);
if (opcode == FOR_ITER_GEN ||
opcode == LOAD_ATTR_PROPERTY ||
+ opcode == BINARY_SUBSCR_GETITEM ||
opcode == SEND_GEN)
{
DPRINTF(2, "Bailing due to dynamic target\n");
@@ -921,7 +922,9 @@ translate_bytecode_to_trace(
2 * INSTR_IP(initial_instr, code));
return 0;
}
- if (trace[trace_length-1].opcode != _JUMP_TO_TOP) {
+ if (!is_terminator(&trace[trace_length-1])) {
+ /* Allow space for _EXIT_TRACE */
+ max_length += 2;
ADD_TO_TRACE(_EXIT_TRACE, 0, 0, target);
}
DPRINTF(1,
@@ -1102,7 +1105,7 @@ sanity_check(_PyExecutorObject *executor)
CHECK(inst->format == UOP_FORMAT_JUMP);
CHECK(inst->error_target < executor->code_size);
}
- if (opcode == _JUMP_TO_TOP || opcode == _EXIT_TRACE) {
+ if (is_terminator(inst)) {
ended = true;
i++;
break;
@@ -1207,8 +1210,7 @@ int effective_trace_length(_PyUOpInstruction *buffer, int length)
if (opcode == _NOP) {
nop_count++;
}
- if (opcode == _EXIT_TRACE ||
- opcode == _JUMP_TO_TOP) {
+ if (is_terminator(&buffer[i])) {
return i+1-nop_count;
}
}
@@ -1257,7 +1259,7 @@ uop_optimize(
else if (oparg < _PyUop_Replication[opcode]) {
buffer[pc].opcode = opcode + oparg + 1;
}
- else if (opcode == _JUMP_TO_TOP || opcode == _EXIT_TRACE) {
+ else if (is_terminator(&buffer[pc])) {
break;
}
assert(_PyOpcode_uop_name[buffer[pc].opcode]);
diff --git a/Python/optimizer_analysis.c b/Python/optimizer_analysis.c
index 8c866417478128..f7adb44c9e09ef 100644
--- a/Python/optimizer_analysis.c
+++ b/Python/optimizer_analysis.c
@@ -52,14 +52,6 @@
#define DPRINTF(level, ...)
#endif
-
-
-static inline bool
-op_is_end(uint32_t opcode)
-{
- return opcode == _EXIT_TRACE || opcode == _JUMP_TO_TOP;
-}
-
static int
get_mutations(PyObject* dict) {
assert(PyDict_CheckExact(dict));
@@ -288,7 +280,7 @@ remove_globals(_PyInterpreterFrame *frame, _PyUOpInstruction *buffer,
prechecked_function_version = (uint32_t)buffer[pc].operand;
break;
default:
- if (op_is_end(opcode)) {
+ if (is_terminator(inst)) {
return 1;
}
break;
@@ -552,6 +544,7 @@ remove_unneeded_uops(_PyUOpInstruction *buffer, int buffer_size)
}
case _JUMP_TO_TOP:
case _EXIT_TRACE:
+ case _DYNAMIC_EXIT:
return pc + 1;
default:
{
diff --git a/Python/optimizer_cases.c.h b/Python/optimizer_cases.c.h
index b704c9e77319e4..50aa9728cf2939 100644
--- a/Python/optimizer_cases.c.h
+++ b/Python/optimizer_cases.c.h
@@ -539,7 +539,18 @@
break;
}
- /* _BINARY_SUBSCR_GETITEM is not a viable micro-op for tier 2 */
+ case _BINARY_SUBSCR_CHECK_FUNC: {
+ break;
+ }
+
+ case _BINARY_SUBSCR_INIT_CALL: {
+ _PyInterpreterFrame *new_frame;
+ new_frame = sym_new_not_null(ctx);
+ stack_pointer[-2] = (_Py_UopsSymbol *)new_frame;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
case _LIST_APPEND: {
stack_pointer += -1;
From 8234419c32b9890689e26da936882bc1e9ee161f Mon Sep 17 00:00:00 2001
From: Jelle Zijlstra
Date: Thu, 1 Aug 2024 16:28:25 -0700
Subject: [PATCH 103/139] gh-122562: Remove ste_free and ste_child_free from
symtable (#122563)
---
Include/internal/pycore_symtable.h | 3 ---
Python/symtable.c | 12 ------------
2 files changed, 15 deletions(-)
diff --git a/Include/internal/pycore_symtable.h b/Include/internal/pycore_symtable.h
index d9ed16a3d2321f..b449e8b9dcd91f 100644
--- a/Include/internal/pycore_symtable.h
+++ b/Include/internal/pycore_symtable.h
@@ -106,9 +106,6 @@ typedef struct _symtable_entry {
const char *ste_scope_info;
int ste_nested; /* true if block is nested */
- unsigned ste_free : 1; /* true if block has free variables */
- unsigned ste_child_free : 1; /* true if a child block has free vars,
- including free refs to globals */
unsigned ste_generator : 1; /* true if namespace is a generator */
unsigned ste_coroutine : 1; /* true if namespace is a coroutine */
unsigned ste_annotations_used : 1; /* true if there are any annotations in this scope */
diff --git a/Python/symtable.c b/Python/symtable.c
index 88af37198bfba5..89a0d8a2ccec1a 100644
--- a/Python/symtable.c
+++ b/Python/symtable.c
@@ -115,7 +115,6 @@ ste_new(struct symtable *st, identifier name, _Py_block_ty block,
ste->ste_scope_info = NULL;
ste->ste_nested = 0;
- ste->ste_free = 0;
ste->ste_varargs = 0;
ste->ste_varkeywords = 0;
ste->ste_annotations_used = 0;
@@ -125,7 +124,6 @@ ste_new(struct symtable *st, identifier name, _Py_block_ty block,
(st->st_cur->ste_nested ||
_PyST_IsFunctionLike(st->st_cur)))
ste->ste_nested = 1;
- ste->ste_child_free = 0;
ste->ste_generator = 0;
ste->ste_coroutine = 0;
ste->ste_comprehension = NoComprehension;
@@ -299,8 +297,6 @@ static void _dump_symtable(PySTEntryObject* ste, PyObject* prefix)
comptype,
prefix,
ste->ste_nested ? " nested" : "",
- ste->ste_free ? " free" : "",
- ste->ste_child_free ? " child_free" : "",
ste->ste_generator ? " generator" : "",
ste->ste_coroutine ? " coroutine" : "",
ste->ste_varargs ? " varargs" : "",
@@ -692,7 +688,6 @@ analyze_name(PySTEntryObject *ste, PyObject *scopes, PyObject *name, long flags,
return error_at_directive(ste, name);
}
SET_SCOPE(scopes, name, FREE);
- ste->ste_free = 1;
return PySet_Add(free, name) >= 0;
}
if (flags & DEF_BOUND) {
@@ -741,7 +736,6 @@ analyze_name(PySTEntryObject *ste, PyObject *scopes, PyObject *name, long flags,
}
if (contains) {
SET_SCOPE(scopes, name, FREE);
- ste->ste_free = 1;
return PySet_Add(free, name) >= 0;
}
}
@@ -758,8 +752,6 @@ analyze_name(PySTEntryObject *ste, PyObject *scopes, PyObject *name, long flags,
return 1;
}
}
- if (ste->ste_nested)
- ste->ste_free = 1;
SET_SCOPE(scopes, name, GLOBAL_IMPLICIT);
return 1;
}
@@ -842,7 +834,6 @@ inline_comprehension(PySTEntryObject *ste, PySTEntryObject *comp,
}
}
}
- comp->ste_free = PySet_Size(comp_free) > 0;
if (remove_dunder_class && PyDict_DelItemString(comp->ste_symbols, "__class__") < 0) {
return 0;
}
@@ -1202,9 +1193,6 @@ analyze_block(PySTEntryObject *ste, PyObject *bound, PyObject *free,
if (!temp)
goto error;
Py_DECREF(temp);
- /* Check if any children have free variables */
- if (entry->ste_free || entry->ste_child_free)
- ste->ste_child_free = 1;
}
/* Splice children of inlined comprehensions into our children list */
From dbdbef3668293abdceac2b8a7b3e4615e6bde143 Mon Sep 17 00:00:00 2001
From: Jonathon Vandezande
Date: Thu, 1 Aug 2024 21:31:37 -0400
Subject: [PATCH 104/139] Fixes typo in idlelib/idle_test/example_stub.pyi
(#122520)
---------
Co-authored-by: Terry Jan Reedy
---
Lib/idlelib/idle_test/example_stub.pyi | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Lib/idlelib/idle_test/example_stub.pyi b/Lib/idlelib/idle_test/example_stub.pyi
index 17b58010a9d8de..abcdbc17529974 100644
--- a/Lib/idlelib/idle_test/example_stub.pyi
+++ b/Lib/idlelib/idle_test/example_stub.pyi
@@ -1,4 +1,4 @@
-" Example to test recognition of .pyi file as Python source code.
+# An example file to test recognition of a .pyi file as Python source code.
class Example:
def method(self, argument1: str, argument2: list[int]) -> None: ...
From 5a7f7c48644baf82988f30bcb43e03dcfceb75dd Mon Sep 17 00:00:00 2001
From: John Riggles
Date: Thu, 1 Aug 2024 23:02:43 -0400
Subject: [PATCH 105/139] gh-120083: Add IDLE Hovertip foreground color needed
for recent macOS (#120605)
On recent versions of macOS (sometime between Catalina and Sonoma 14.5), the default Hovertip foreground color changed from black to white, thereby matching the background. This might be a matter of matching the white foreground of the dark-mode text. The unreadable result is shown here (#120083 (comment)).
The foreground and background colors were made parameters so we can pass different colors for future additional hovertips in IDLE.
---------
Co-authored-by: Terry Jan Reedy
---
Lib/idlelib/News3.txt | 4 ++++
Lib/idlelib/tooltip.py | 8 ++++++--
.../IDLE/2024-06-16-21-42-45.gh-issue-120083.nczuyv.rst | 1 +
3 files changed, 11 insertions(+), 2 deletions(-)
create mode 100644 Misc/NEWS.d/next/IDLE/2024-06-16-21-42-45.gh-issue-120083.nczuyv.rst
diff --git a/Lib/idlelib/News3.txt b/Lib/idlelib/News3.txt
index a7a92e97b6c244..37ff93f9866e3c 100644
--- a/Lib/idlelib/News3.txt
+++ b/Lib/idlelib/News3.txt
@@ -4,6 +4,10 @@ Released on 2024-10-xx
=========================
+gh-120083: Add explicit black IDLE Hovertip foreground color needed for
+recent macOS. Fixes Sonoma showing unreadable white on pale yellow.
+Patch by John Riggles.
+
gh-122482: Change About IDLE to direct users to discuss.python.org
instead of the now unused idle-dev email and mailing list.
diff --git a/Lib/idlelib/tooltip.py b/Lib/idlelib/tooltip.py
index 3983690dd41177..df5b1fe1dcfb08 100644
--- a/Lib/idlelib/tooltip.py
+++ b/Lib/idlelib/tooltip.py
@@ -144,7 +144,8 @@ def hidetip(self):
class Hovertip(OnHoverTooltipBase):
"A tooltip that pops up when a mouse hovers over an anchor widget."
- def __init__(self, anchor_widget, text, hover_delay=1000):
+ def __init__(self, anchor_widget, text, hover_delay=1000,
+ foreground="#000000", background="#ffffe0"):
"""Create a text tooltip with a mouse hover delay.
anchor_widget: the widget next to which the tooltip will be shown
@@ -156,10 +157,13 @@ def __init__(self, anchor_widget, text, hover_delay=1000):
"""
super().__init__(anchor_widget, hover_delay=hover_delay)
self.text = text
+ self.foreground = foreground
+ self.background = background
def showcontents(self):
label = Label(self.tipwindow, text=self.text, justify=LEFT,
- background="#ffffe0", relief=SOLID, borderwidth=1)
+ relief=SOLID, borderwidth=1,
+ foreground=self.foreground, background=self.background)
label.pack()
diff --git a/Misc/NEWS.d/next/IDLE/2024-06-16-21-42-45.gh-issue-120083.nczuyv.rst b/Misc/NEWS.d/next/IDLE/2024-06-16-21-42-45.gh-issue-120083.nczuyv.rst
new file mode 100644
index 00000000000000..643c2bb38c6e1f
--- /dev/null
+++ b/Misc/NEWS.d/next/IDLE/2024-06-16-21-42-45.gh-issue-120083.nczuyv.rst
@@ -0,0 +1 @@
+Add explicit black IDLE Hovertip foreground color needed for recent macOS. Fixes Sonoma showing unreadable white on pale yellow. Patch by John Riggles.
From d57f8a9f76e75384ec997686c2a826b1dc3c69c4 Mon Sep 17 00:00:00 2001
From: Damien <81557462+Damien-Chen@users.noreply.github.com>
Date: Fri, 2 Aug 2024 14:09:27 +0800
Subject: [PATCH 106/139] gh-122544: Change OS image in readthedocs.yml to
ubuntu-24.04 (#122568)
---
.readthedocs.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.readthedocs.yml b/.readthedocs.yml
index d0d0c3b93ed207..a57de00544e4e3 100644
--- a/.readthedocs.yml
+++ b/.readthedocs.yml
@@ -8,7 +8,7 @@ sphinx:
configuration: Doc/conf.py
build:
- os: ubuntu-22.04
+ os: ubuntu-24.04
tools:
python: "3"
From 03b88522f5e847773845b0fac90fd06d04937a65 Mon Sep 17 00:00:00 2001
From: Serhiy Storchaka
Date: Fri, 2 Aug 2024 13:12:19 +0300
Subject: [PATCH 107/139] gh-122188: Remove _imp.pyc_magic_number (GH-122503)
_imp.pyc_magic_number_token should be enough.
---
Lib/importlib/_bootstrap_external.py | 2 +-
Lib/test/test_import/__init__.py | 10 ++++++----
Python/import.c | 6 +-----
3 files changed, 8 insertions(+), 10 deletions(-)
diff --git a/Lib/importlib/_bootstrap_external.py b/Lib/importlib/_bootstrap_external.py
index 4d154dc4c25edc..5bbcb376a4a6b3 100644
--- a/Lib/importlib/_bootstrap_external.py
+++ b/Lib/importlib/_bootstrap_external.py
@@ -221,7 +221,7 @@ def _write_atomic(path, data, mode=0o666):
_code_type = type(_write_atomic.__code__)
-MAGIC_NUMBER = (_imp.pyc_magic_number).to_bytes(2, 'little') + b'\r\n'
+MAGIC_NUMBER = _imp.pyc_magic_number_token.to_bytes(4, 'little')
_PYCACHE = '__pycache__'
_OPT = 'opt-'
diff --git a/Lib/test/test_import/__init__.py b/Lib/test/test_import/__init__.py
index 56c6ffe93fce37..fd778ec216cc98 100644
--- a/Lib/test/test_import/__init__.py
+++ b/Lib/test/test_import/__init__.py
@@ -3116,10 +3116,12 @@ def test_pyimport_addmodule_create(self):
@cpython_only
class TestMagicNumber(unittest.TestCase):
def test_magic_number_endianness(self):
- magic_number = (_imp.pyc_magic_number).to_bytes(2, 'little') + b'\r\n'
- raw_magic_number = int.from_bytes(magic_number, 'little')
-
- self.assertEqual(raw_magic_number, _imp.pyc_magic_number_token)
+ magic_number_bytes = _imp.pyc_magic_number_token.to_bytes(4, 'little')
+ self.assertEqual(magic_number_bytes[2:], b'\r\n')
+ # Starting with Python 3.11, Python 3.n starts with magic number 2900+50n.
+ magic_number = int.from_bytes(magic_number_bytes[:2], 'little')
+ start = 2900 + sys.version_info.minor * 50
+ self.assertIn(magic_number, range(start, start + 50))
if __name__ == '__main__':
diff --git a/Python/import.c b/Python/import.c
index 540874a0f0414f..f4c0d544fbdefa 100644
--- a/Python/import.c
+++ b/Python/import.c
@@ -6,7 +6,7 @@
#include "pycore_import.h" // _PyImport_BootstrapImp()
#include "pycore_initconfig.h" // _PyStatus_OK()
#include "pycore_interp.h" // struct _import_runtime_state
-#include "pycore_magic_number.h" // PYC_MAGIC_NUMBER
+#include "pycore_magic_number.h" // PYC_MAGIC_NUMBER_TOKEN
#include "pycore_namespace.h" // _PyNamespace_Type
#include "pycore_object.h" // _Py_SetImmortal()
#include "pycore_pyerrors.h" // _PyErr_SetString()
@@ -4810,10 +4810,6 @@ imp_module_exec(PyObject *module)
return -1;
}
- if (PyModule_AddIntConstant(module, "pyc_magic_number", PYC_MAGIC_NUMBER) < 0) {
- return -1;
- }
-
if (PyModule_AddIntConstant(
module, "pyc_magic_number_token", PYC_MAGIC_NUMBER_TOKEN) < 0)
{
From addbb73927f55855dfcc62fd47b0018de8a814ed Mon Sep 17 00:00:00 2001
From: Victor Stinner
Date: Fri, 2 Aug 2024 12:13:33 +0200
Subject: [PATCH 108/139] Update PyObject_Del() documentation (#122597)
Replace PyMem_Del() with PyMem_Free().
---
Doc/c-api/allocation.rst | 7 +------
Doc/c-api/memory.rst | 6 +++---
Modules/_sre/sre.c | 2 +-
Modules/_testcapi/heaptype.c | 16 ++++++++--------
4 files changed, 13 insertions(+), 18 deletions(-)
diff --git a/Doc/c-api/allocation.rst b/Doc/c-api/allocation.rst
index b3609c233156b6..0d53b18ea87d5e 100644
--- a/Doc/c-api/allocation.rst
+++ b/Doc/c-api/allocation.rst
@@ -54,12 +54,7 @@ Allocating Objects on the Heap
.. c:function:: void PyObject_Del(void *op)
- Releases memory allocated to an object using :c:macro:`PyObject_New` or
- :c:macro:`PyObject_NewVar`. This is normally called from the
- :c:member:`~PyTypeObject.tp_dealloc` handler specified in the object's type. The fields of
- the object should not be accessed after this call as the memory is no
- longer a valid Python object.
-
+ Same as :c:func:`PyObject_Free`.
.. c:var:: PyObject _Py_NoneStruct
diff --git a/Doc/c-api/memory.rst b/Doc/c-api/memory.rst
index 9da09a21607f61..4ecc998b37e598 100644
--- a/Doc/c-api/memory.rst
+++ b/Doc/c-api/memory.rst
@@ -734,7 +734,7 @@ The same code using the type-oriented function set::
return PyErr_NoMemory();
/* ...Do some I/O operation involving buf... */
res = PyBytes_FromString(buf);
- PyMem_Del(buf); /* allocated with PyMem_New */
+ PyMem_Free(buf); /* allocated with PyMem_New */
return res;
Note that in the two examples above, the buffer is always manipulated via
@@ -750,11 +750,11 @@ allocators operating on different heaps. ::
...
PyMem_Del(buf3); /* Wrong -- should be PyMem_Free() */
free(buf2); /* Right -- allocated via malloc() */
- free(buf1); /* Fatal -- should be PyMem_Del() */
+ free(buf1); /* Fatal -- should be PyMem_Free() */
In addition to the functions aimed at handling raw memory blocks from the Python
heap, objects in Python are allocated and released with :c:macro:`PyObject_New`,
-:c:macro:`PyObject_NewVar` and :c:func:`PyObject_Del`.
+:c:macro:`PyObject_NewVar` and :c:func:`PyObject_Free`.
These will be explained in the next chapter on defining and implementing new
object types in C.
diff --git a/Modules/_sre/sre.c b/Modules/_sre/sre.c
index 0a888af31b0497..01420d1a10b1cf 100644
--- a/Modules/_sre/sre.c
+++ b/Modules/_sre/sre.c
@@ -530,7 +530,7 @@ state_fini(SRE_STATE* state)
PyBuffer_Release(&state->buffer);
Py_XDECREF(state->string);
data_stack_dealloc(state);
- /* See above PyMem_Del for why we explicitly cast here. */
+ /* See above PyMem_Free() for why we explicitly cast here. */
PyMem_Free((void*) state->mark);
state->mark = NULL;
}
diff --git a/Modules/_testcapi/heaptype.c b/Modules/_testcapi/heaptype.c
index 4526583a8059d9..b45b890b88d81f 100644
--- a/Modules/_testcapi/heaptype.c
+++ b/Modules/_testcapi/heaptype.c
@@ -269,16 +269,16 @@ test_type_from_ephemeral_spec(PyObject *self, PyObject *Py_UNUSED(ignored))
// (Explicitly overwrite memory before freeing,
// so bugs show themselves even without the debug allocator's help.)
memset(spec, 0xdd, sizeof(PyType_Spec));
- PyMem_Del(spec);
+ PyMem_Free(spec);
spec = NULL;
memset(name, 0xdd, sizeof(NAME));
- PyMem_Del(name);
+ PyMem_Free(name);
name = NULL;
memset(doc, 0xdd, sizeof(DOC));
- PyMem_Del(doc);
+ PyMem_Free(doc);
doc = NULL;
memset(slots, 0xdd, 3 * sizeof(PyType_Slot));
- PyMem_Del(slots);
+ PyMem_Free(slots);
slots = NULL;
/* check that everything works */
@@ -304,10 +304,10 @@ test_type_from_ephemeral_spec(PyObject *self, PyObject *Py_UNUSED(ignored))
result = Py_NewRef(Py_None);
finally:
- PyMem_Del(spec);
- PyMem_Del(name);
- PyMem_Del(doc);
- PyMem_Del(slots);
+ PyMem_Free(spec);
+ PyMem_Free(name);
+ PyMem_Free(doc);
+ PyMem_Free(slots);
Py_XDECREF(class);
Py_XDECREF(instance);
Py_XDECREF(obj);
From fb864c76cd5e450e789a7b4095832e118cc49a39 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?B=C3=A9n=C3=A9dikt=20Tran?=
<10796600+picnixz@users.noreply.github.com>
Date: Fri, 2 Aug 2024 13:16:32 +0200
Subject: [PATCH 109/139] gh-121723: Relax constraints on queue objects for
`logging.handlers.QueueHandler`. (GH-122154)
---
Doc/library/logging.config.rst | 9 +-
Lib/logging/config.py | 55 ++++-----
Lib/test/test_logging.py | 107 ++++++++++++++----
...-07-23-10-59-38.gh-issue-121723.iJEf7e.rst | 3 +
4 files changed, 124 insertions(+), 50 deletions(-)
create mode 100644 Misc/NEWS.d/next/Library/2024-07-23-10-59-38.gh-issue-121723.iJEf7e.rst
diff --git a/Doc/library/logging.config.rst b/Doc/library/logging.config.rst
index dfbf0b1cf2f9ff..0ddbc1a5f88048 100644
--- a/Doc/library/logging.config.rst
+++ b/Doc/library/logging.config.rst
@@ -753,9 +753,12 @@ The ``queue`` and ``listener`` keys are optional.
If the ``queue`` key is present, the corresponding value can be one of the following:
-* An actual instance of :class:`queue.Queue` or a subclass thereof. This is of course
- only possible if you are constructing or modifying the configuration dictionary in
- code.
+* An object implementing the :class:`queue.Queue` public API. For instance,
+ this may be an actual instance of :class:`queue.Queue` or a subclass thereof,
+ or a proxy obtained by :meth:`multiprocessing.managers.SyncManager.Queue`.
+
+ This is of course only possible if you are constructing or modifying
+ the configuration dictionary in code.
* A string that resolves to a callable which, when called with no arguments, returns
the :class:`queue.Queue` instance to use. That callable could be a
diff --git a/Lib/logging/config.py b/Lib/logging/config.py
index 95e129ae988c24..3781cb1aeb9ae2 100644
--- a/Lib/logging/config.py
+++ b/Lib/logging/config.py
@@ -497,6 +497,33 @@ def as_tuple(self, value):
value = tuple(value)
return value
+def _is_queue_like_object(obj):
+ """Check that *obj* implements the Queue API."""
+ if isinstance(obj, queue.Queue):
+ return True
+ # defer importing multiprocessing as much as possible
+ from multiprocessing.queues import Queue as MPQueue
+ if isinstance(obj, MPQueue):
+ return True
+ # Depending on the multiprocessing start context, we cannot create
+ # a multiprocessing.managers.BaseManager instance 'mm' to get the
+ # runtime type of mm.Queue() or mm.JoinableQueue() (see gh-119819).
+ #
+ # Since we only need an object implementing the Queue API, we only
+ # do a protocol check, but we do not use typing.runtime_checkable()
+ # and typing.Protocol to reduce import time (see gh-121723).
+ #
+ # Ideally, we would have wanted to simply use strict type checking
+ # instead of a protocol-based type checking since the latter does
+ # not check the method signatures.
+ queue_interface = [
+ 'empty', 'full', 'get', 'get_nowait',
+ 'put', 'put_nowait', 'join', 'qsize',
+ 'task_done',
+ ]
+ return all(callable(getattr(obj, method, None))
+ for method in queue_interface)
+
class DictConfigurator(BaseConfigurator):
"""
Configure logging using a dictionary-like object to describe the
@@ -791,32 +818,8 @@ def configure_handler(self, config):
if '()' not in qspec:
raise TypeError('Invalid queue specifier %r' % qspec)
config['queue'] = self.configure_custom(dict(qspec))
- else:
- from multiprocessing.queues import Queue as MPQueue
-
- if not isinstance(qspec, (queue.Queue, MPQueue)):
- # Safely check if 'qspec' is an instance of Manager.Queue
- # / Manager.JoinableQueue
-
- from multiprocessing import Manager as MM
- from multiprocessing.managers import BaseProxy
-
- # if it's not an instance of BaseProxy, it also can't be
- # an instance of Manager.Queue / Manager.JoinableQueue
- if isinstance(qspec, BaseProxy):
- # Sometimes manager or queue creation might fail
- # (e.g. see issue gh-120868). In that case, any
- # exception during the creation of these queues will
- # propagate up to the caller and be wrapped in a
- # `ValueError`, whose cause will indicate the details of
- # the failure.
- mm = MM()
- proxy_queue = mm.Queue()
- proxy_joinable_queue = mm.JoinableQueue()
- if not isinstance(qspec, (type(proxy_queue), type(proxy_joinable_queue))):
- raise TypeError('Invalid queue specifier %r' % qspec)
- else:
- raise TypeError('Invalid queue specifier %r' % qspec)
+ elif not _is_queue_like_object(qspec):
+ raise TypeError('Invalid queue specifier %r' % qspec)
if 'listener' in config:
lspec = config['listener']
diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py
index 6d688d4b81bbf4..49523756e115c6 100644
--- a/Lib/test/test_logging.py
+++ b/Lib/test/test_logging.py
@@ -2368,6 +2368,26 @@ class CustomListener(logging.handlers.QueueListener):
class CustomQueue(queue.Queue):
pass
+class CustomQueueProtocol:
+ def __init__(self, maxsize=0):
+ self.queue = queue.Queue(maxsize)
+
+ def __getattr__(self, attribute):
+ queue = object.__getattribute__(self, 'queue')
+ return getattr(queue, attribute)
+
+class CustomQueueFakeProtocol(CustomQueueProtocol):
+ # An object implementing the Queue API (incorrect signatures).
+ # The object will be considered a valid queue class since we
+ # do not check the signatures (only callability of methods)
+ # but will NOT be usable in production since a TypeError will
+ # be raised due to a missing argument.
+ def empty(self, x):
+ pass
+
+class CustomQueueWrongProtocol(CustomQueueProtocol):
+ empty = None
+
def queueMaker():
return queue.Queue()
@@ -3901,18 +3921,16 @@ def do_queuehandler_configuration(self, qspec, lspec):
@threading_helper.requires_working_threading()
@support.requires_subprocess()
def test_config_queue_handler(self):
- q = CustomQueue()
- dq = {
- '()': __name__ + '.CustomQueue',
- 'maxsize': 10
- }
+ qs = [CustomQueue(), CustomQueueProtocol()]
+ dqs = [{'()': f'{__name__}.{cls}', 'maxsize': 10}
+ for cls in ['CustomQueue', 'CustomQueueProtocol']]
dl = {
'()': __name__ + '.listenerMaker',
'arg1': None,
'arg2': None,
'respect_handler_level': True
}
- qvalues = (None, __name__ + '.queueMaker', __name__ + '.CustomQueue', dq, q)
+ qvalues = (None, __name__ + '.queueMaker', __name__ + '.CustomQueue', *dqs, *qs)
lvalues = (None, __name__ + '.CustomListener', dl, CustomListener)
for qspec, lspec in itertools.product(qvalues, lvalues):
self.do_queuehandler_configuration(qspec, lspec)
@@ -3932,15 +3950,21 @@ def test_config_queue_handler(self):
@support.requires_subprocess()
@patch("multiprocessing.Manager")
def test_config_queue_handler_does_not_create_multiprocessing_manager(self, manager):
- # gh-120868
+ # gh-120868, gh-121723
from multiprocessing import Queue as MQ
q1 = {"()": "queue.Queue", "maxsize": -1}
q2 = MQ()
q3 = queue.Queue()
-
- for qspec in (q1, q2, q3):
+ # CustomQueueFakeProtocol passes the checks but will not be usable
+ # since the signatures are incompatible. Checking the Queue API
+ # without testing the type of the actual queue is a trade-off
+ # between usability and the work we need to do in order to safely
+ # check that the queue object correctly implements the API.
+ q4 = CustomQueueFakeProtocol()
+
+ for qspec in (q1, q2, q3, q4):
self.apply_config(
{
"version": 1,
@@ -3956,21 +3980,62 @@ def test_config_queue_handler_does_not_create_multiprocessing_manager(self, mana
@patch("multiprocessing.Manager")
def test_config_queue_handler_invalid_config_does_not_create_multiprocessing_manager(self, manager):
- # gh-120868
+ # gh-120868, gh-121723
- with self.assertRaises(ValueError):
- self.apply_config(
- {
- "version": 1,
- "handlers": {
- "queue_listener": {
- "class": "logging.handlers.QueueHandler",
- "queue": object(),
+ for qspec in [object(), CustomQueueWrongProtocol()]:
+ with self.assertRaises(ValueError):
+ self.apply_config(
+ {
+ "version": 1,
+ "handlers": {
+ "queue_listener": {
+ "class": "logging.handlers.QueueHandler",
+ "queue": qspec,
+ },
},
- },
+ }
+ )
+ manager.assert_not_called()
+
+ @skip_if_tsan_fork
+ @support.requires_subprocess()
+ @unittest.skipUnless(support.Py_DEBUG, "requires a debug build for testing"
+ "assertions in multiprocessing")
+ def test_config_queue_handler_multiprocessing_context(self):
+ # regression test for gh-121723
+ if support.MS_WINDOWS:
+ start_methods = ['spawn']
+ else:
+ start_methods = ['spawn', 'fork', 'forkserver']
+ for start_method in start_methods:
+ with self.subTest(start_method=start_method):
+ ctx = multiprocessing.get_context(start_method)
+ with ctx.Manager() as manager:
+ q = manager.Queue()
+ records = []
+ # use 1 process and 1 task per child to put 1 record
+ with ctx.Pool(1, initializer=self._mpinit_issue121723,
+ initargs=(q, "text"), maxtasksperchild=1):
+ records.append(q.get(timeout=60))
+ self.assertTrue(q.empty())
+ self.assertEqual(len(records), 1)
+
+ @staticmethod
+ def _mpinit_issue121723(qspec, message_to_log):
+ # static method for pickling support
+ logging.config.dictConfig({
+ 'version': 1,
+ 'disable_existing_loggers': True,
+ 'handlers': {
+ 'log_to_parent': {
+ 'class': 'logging.handlers.QueueHandler',
+ 'queue': qspec
}
- )
- manager.assert_not_called()
+ },
+ 'root': {'handlers': ['log_to_parent'], 'level': 'DEBUG'}
+ })
+ # log a message (this creates a record put in the queue)
+ logging.getLogger().info(message_to_log)
@skip_if_tsan_fork
@support.requires_subprocess()
diff --git a/Misc/NEWS.d/next/Library/2024-07-23-10-59-38.gh-issue-121723.iJEf7e.rst b/Misc/NEWS.d/next/Library/2024-07-23-10-59-38.gh-issue-121723.iJEf7e.rst
new file mode 100644
index 00000000000000..cabb4024fb10f1
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-07-23-10-59-38.gh-issue-121723.iJEf7e.rst
@@ -0,0 +1,3 @@
+Make :func:`logging.config.dictConfig` accept any object implementing the
+Queue public API. See the :ref:`queue configuration `
+section for details. Patch by Bénédikt Tran.
From b5e6fb39a246bf7ee470d58632cdf588bb9d0298 Mon Sep 17 00:00:00 2001
From: Sam Gross
Date: Fri, 2 Aug 2024 09:32:08 -0400
Subject: [PATCH 110/139] gh-120974: Make asyncio `swap_current_task` safe in
free-threaded build (#122317)
* gh-120974: Make asyncio `swap_current_task` safe in free-threaded build
---
Include/internal/pycore_dict.h | 7 ++++-
Modules/_asynciomodule.c | 37 ++++++++++++++---------
Objects/dictobject.c | 54 ++++++++++++++++++++++++----------
3 files changed, 67 insertions(+), 31 deletions(-)
diff --git a/Include/internal/pycore_dict.h b/Include/internal/pycore_dict.h
index fc304aca7fea10..a84246ee34efff 100644
--- a/Include/internal/pycore_dict.h
+++ b/Include/internal/pycore_dict.h
@@ -108,8 +108,13 @@ PyAPI_FUNC(PyObject *)_PyDict_LoadGlobal(PyDictObject *, PyDictObject *, PyObjec
/* Consumes references to key and value */
PyAPI_FUNC(int) _PyDict_SetItem_Take2(PyDictObject *op, PyObject *key, PyObject *value);
extern int _PyDict_SetItem_LockHeld(PyDictObject *dict, PyObject *name, PyObject *value);
-extern int _PyDict_GetItemRef_Unicode_LockHeld(PyDictObject *op, PyObject *key, PyObject **result);
+// Export for '_asyncio' shared extension
+PyAPI_FUNC(int) _PyDict_SetItem_KnownHash_LockHeld(PyDictObject *mp, PyObject *key,
+ PyObject *value, Py_hash_t hash);
+// Export for '_asyncio' shared extension
+PyAPI_FUNC(int) _PyDict_GetItemRef_KnownHash_LockHeld(PyDictObject *op, PyObject *key, Py_hash_t hash, PyObject **result);
extern int _PyDict_GetItemRef_KnownHash(PyDictObject *op, PyObject *key, Py_hash_t hash, PyObject **result);
+extern int _PyDict_GetItemRef_Unicode_LockHeld(PyDictObject *op, PyObject *key, PyObject **result);
extern int _PyObjectDict_SetItem(PyTypeObject *tp, PyObject *obj, PyObject **dictptr, PyObject *name, PyObject *value);
extern int _PyDict_Pop_KnownHash(
diff --git a/Modules/_asynciomodule.c b/Modules/_asynciomodule.c
index 873c17cd78709d..c6eb43f044fdbd 100644
--- a/Modules/_asynciomodule.c
+++ b/Modules/_asynciomodule.c
@@ -2026,6 +2026,24 @@ leave_task(asyncio_state *state, PyObject *loop, PyObject *task)
return res;
}
+static PyObject *
+swap_current_task_lock_held(PyDictObject *current_tasks, PyObject *loop,
+ Py_hash_t hash, PyObject *task)
+{
+ PyObject *prev_task;
+ if (_PyDict_GetItemRef_KnownHash_LockHeld(current_tasks, loop, hash, &prev_task) < 0) {
+ return NULL;
+ }
+ if (_PyDict_SetItem_KnownHash_LockHeld(current_tasks, loop, task, hash) < 0) {
+ Py_XDECREF(prev_task);
+ return NULL;
+ }
+ if (prev_task == NULL) {
+ Py_RETURN_NONE;
+ }
+ return prev_task;
+}
+
static PyObject *
swap_current_task(asyncio_state *state, PyObject *loop, PyObject *task)
{
@@ -2041,24 +2059,15 @@ swap_current_task(asyncio_state *state, PyObject *loop, PyObject *task)
return prev_task;
}
- Py_hash_t hash;
- hash = PyObject_Hash(loop);
+ Py_hash_t hash = PyObject_Hash(loop);
if (hash == -1) {
return NULL;
}
- prev_task = _PyDict_GetItem_KnownHash(state->current_tasks, loop, hash);
- if (prev_task == NULL) {
- if (PyErr_Occurred()) {
- return NULL;
- }
- prev_task = Py_None;
- }
- Py_INCREF(prev_task);
- if (_PyDict_SetItem_KnownHash(state->current_tasks, loop, task, hash) == -1) {
- Py_DECREF(prev_task);
- return NULL;
- }
+ PyDictObject *current_tasks = (PyDictObject *)state->current_tasks;
+ Py_BEGIN_CRITICAL_SECTION(current_tasks);
+ prev_task = swap_current_task_lock_held(current_tasks, loop, hash, task);
+ Py_END_CRITICAL_SECTION();
return prev_task;
}
diff --git a/Objects/dictobject.c b/Objects/dictobject.c
index 6a16a04102a6c0..3e9f982ae070a3 100644
--- a/Objects/dictobject.c
+++ b/Objects/dictobject.c
@@ -2216,6 +2216,29 @@ _PyDict_GetItem_KnownHash(PyObject *op, PyObject *key, Py_hash_t hash)
return value; // borrowed reference
}
+/* Gets an item and provides a new reference if the value is present.
+ * Returns 1 if the key is present, 0 if the key is missing, and -1 if an
+ * exception occurred.
+*/
+int
+_PyDict_GetItemRef_KnownHash_LockHeld(PyDictObject *op, PyObject *key,
+ Py_hash_t hash, PyObject **result)
+{
+ PyObject *value;
+ Py_ssize_t ix = _Py_dict_lookup(op, key, hash, &value);
+ assert(ix >= 0 || value == NULL);
+ if (ix == DKIX_ERROR) {
+ *result = NULL;
+ return -1;
+ }
+ if (value == NULL) {
+ *result = NULL;
+ return 0; // missing key
+ }
+ *result = Py_NewRef(value);
+ return 1; // key is present
+}
+
/* Gets an item and provides a new reference if the value is present.
* Returns 1 if the key is present, 0 if the key is missing, and -1 if an
* exception occurred.
@@ -2460,11 +2483,21 @@ setitem_lock_held(PyDictObject *mp, PyObject *key, PyObject *value)
int
-_PyDict_SetItem_KnownHash(PyObject *op, PyObject *key, PyObject *value,
- Py_hash_t hash)
+_PyDict_SetItem_KnownHash_LockHeld(PyDictObject *mp, PyObject *key, PyObject *value,
+ Py_hash_t hash)
{
- PyDictObject *mp;
+ PyInterpreterState *interp = _PyInterpreterState_GET();
+ if (mp->ma_keys == Py_EMPTY_KEYS) {
+ return insert_to_emptydict(interp, mp, Py_NewRef(key), hash, Py_NewRef(value));
+ }
+ /* insertdict() handles any resizing that might be necessary */
+ return insertdict(interp, mp, Py_NewRef(key), hash, Py_NewRef(value));
+}
+int
+_PyDict_SetItem_KnownHash(PyObject *op, PyObject *key, PyObject *value,
+ Py_hash_t hash)
+{
if (!PyDict_Check(op)) {
PyErr_BadInternalCall();
return -1;
@@ -2472,21 +2505,10 @@ _PyDict_SetItem_KnownHash(PyObject *op, PyObject *key, PyObject *value,
assert(key);
assert(value);
assert(hash != -1);
- mp = (PyDictObject *)op;
int res;
- PyInterpreterState *interp = _PyInterpreterState_GET();
-
- Py_BEGIN_CRITICAL_SECTION(mp);
-
- if (mp->ma_keys == Py_EMPTY_KEYS) {
- res = insert_to_emptydict(interp, mp, Py_NewRef(key), hash, Py_NewRef(value));
- }
- else {
- /* insertdict() handles any resizing that might be necessary */
- res = insertdict(interp, mp, Py_NewRef(key), hash, Py_NewRef(value));
- }
-
+ Py_BEGIN_CRITICAL_SECTION(op);
+ res = _PyDict_SetItem_KnownHash_LockHeld((PyDictObject *)op, key, value, hash);
Py_END_CRITICAL_SECTION();
return res;
}
From 9fc1c992d6fcea0b7558c581846eef6bdd811f6c Mon Sep 17 00:00:00 2001
From: neonene <53406459+neonene@users.noreply.github.com>
Date: Fri, 2 Aug 2024 22:36:20 +0900
Subject: [PATCH 111/139] gh-122334: Fix crash when importing ssl after
re-initialization (#122481)
* Fix crash when importing ssl after re-initialization
---
Lib/test/test_embed.py | 19 +++++++++++++++++++
...-07-30-21-29-30.gh-issue-122334.LeoE1x.rst | 1 +
Python/getargs.c | 13 +++++++++++++
3 files changed, 33 insertions(+)
create mode 100644 Misc/NEWS.d/next/Library/2024-07-30-21-29-30.gh-issue-122334.LeoE1x.rst
diff --git a/Lib/test/test_embed.py b/Lib/test/test_embed.py
index 9602f1a92c37c8..ab112d6be85b46 100644
--- a/Lib/test/test_embed.py
+++ b/Lib/test/test_embed.py
@@ -461,6 +461,25 @@ def add(cls, slot, own):
self.assertEqual(result, {})
self.assertEqual(out, '')
+ def test_getargs_reset_static_parser(self):
+ # Test _PyArg_Parser initializations via _PyArg_UnpackKeywords()
+ # https://github.com/python/cpython/issues/122334
+ code = textwrap.dedent("""
+ import _ssl
+ _ssl.txt2obj(txt='1.3')
+ print('1')
+
+ import _queue
+ _queue.SimpleQueue().put_nowait(item=None)
+ print('2')
+
+ import _zoneinfo
+ _zoneinfo.ZoneInfo.clear_cache(only_keys=['Foo/Bar'])
+ print('3')
+ """)
+ out, err = self.run_embedded_interpreter("test_repeated_init_exec", code)
+ self.assertEqual(out, '1\n2\n3\n' * INIT_LOOPS)
+
@unittest.skipIf(_testinternalcapi is None, "requires _testinternalcapi")
class InitConfigTests(EmbeddingTestsMixin, unittest.TestCase):
diff --git a/Misc/NEWS.d/next/Library/2024-07-30-21-29-30.gh-issue-122334.LeoE1x.rst b/Misc/NEWS.d/next/Library/2024-07-30-21-29-30.gh-issue-122334.LeoE1x.rst
new file mode 100644
index 00000000000000..cef801c950faa6
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-07-30-21-29-30.gh-issue-122334.LeoE1x.rst
@@ -0,0 +1 @@
+Fix crash when importing :mod:`ssl` after the main interpreter restarts.
diff --git a/Python/getargs.c b/Python/getargs.c
index b96ce3a22dae7c..ec2eeb15c832c3 100644
--- a/Python/getargs.c
+++ b/Python/getargs.c
@@ -2030,6 +2030,19 @@ parser_clear(struct _PyArg_Parser *parser)
if (parser->is_kwtuple_owned) {
Py_CLEAR(parser->kwtuple);
}
+
+ if (parser->format) {
+ parser->fname = NULL;
+ }
+ else {
+ assert(parser->fname != NULL);
+ }
+ parser->custom_msg = NULL;
+ parser->pos = 0;
+ parser->min = 0;
+ parser->max = 0;
+ parser->is_kwtuple_owned = 0;
+ parser->once.v = 0;
}
static PyObject*
From 498376d7a7d6f704f22a2c963130cc15c17e7a6f Mon Sep 17 00:00:00 2001
From: Irit Katriel <1055913+iritkatriel@users.noreply.github.com>
Date: Fri, 2 Aug 2024 15:40:42 +0100
Subject: [PATCH 112/139] gh-122445: populate only modified fields in
__static_attributes__ (#122446)
---
Doc/reference/datamodel.rst | 2 +-
Doc/whatsnew/3.13.rst | 2 +-
Lib/test/test_compile.py | 5 ++++-
...4-07-30-11-41-35.gh-issue-122445.Rq0bjS.rst | 1 +
Python/compile.c | 18 +++++++++++-------
5 files changed, 18 insertions(+), 10 deletions(-)
create mode 100644 Misc/NEWS.d/next/Core and Builtins/2024-07-30-11-41-35.gh-issue-122445.Rq0bjS.rst
diff --git a/Doc/reference/datamodel.rst b/Doc/reference/datamodel.rst
index 2576f9a07284eb..aa61fbdd3131f6 100644
--- a/Doc/reference/datamodel.rst
+++ b/Doc/reference/datamodel.rst
@@ -999,7 +999,7 @@ Special attributes:
a :ref:`generic class `.
:attr:`~class.__static_attributes__`
- A tuple containing names of attributes of this class which are accessed
+ A tuple containing names of attributes of this class which are assigned
through ``self.X`` from any function in its body.
:attr:`__firstlineno__`
diff --git a/Doc/whatsnew/3.13.rst b/Doc/whatsnew/3.13.rst
index 5761712a3c714b..5c57b5d7ebe2ff 100644
--- a/Doc/whatsnew/3.13.rst
+++ b/Doc/whatsnew/3.13.rst
@@ -247,7 +247,7 @@ Improved Error Messages
TypeError: split() got an unexpected keyword argument 'max_split'. Did you mean 'maxsplit'?
* Classes have a new :attr:`~class.__static_attributes__` attribute, populated by the compiler,
- with a tuple of names of attributes of this class which are accessed
+ with a tuple of names of attributes of this class which are assigned
through ``self.X`` from any function in its body. (Contributed by Irit Katriel
in :gh:`115775`.)
diff --git a/Lib/test/test_compile.py b/Lib/test/test_compile.py
index 9def47e101b496..4ebc605a3980f0 100644
--- a/Lib/test/test_compile.py
+++ b/Lib/test/test_compile.py
@@ -2089,12 +2089,15 @@ def f():
self.assertEqual(end_col, 20)
-class TestExpectedAttributes(unittest.TestCase):
+class TestStaticAttributes(unittest.TestCase):
def test_basic(self):
class C:
def f(self):
self.a = self.b = 42
+ # read fields are not included
+ self.f()
+ self.arr[3]
self.assertIsInstance(C.__static_attributes__, tuple)
self.assertEqual(sorted(C.__static_attributes__), ['a', 'b'])
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-07-30-11-41-35.gh-issue-122445.Rq0bjS.rst b/Misc/NEWS.d/next/Core and Builtins/2024-07-30-11-41-35.gh-issue-122445.Rq0bjS.rst
new file mode 100644
index 00000000000000..f5aa07c6513ea9
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2024-07-30-11-41-35.gh-issue-122445.Rq0bjS.rst
@@ -0,0 +1 @@
+Add only fields which are modified via self.* to :attr:`~class.__static_attributes__`.
diff --git a/Python/compile.c b/Python/compile.c
index 02b5345cedd0a3..87b2c2705474a4 100644
--- a/Python/compile.c
+++ b/Python/compile.c
@@ -563,8 +563,16 @@ compiler_unit_free(struct compiler_unit *u)
}
static int
-compiler_add_static_attribute_to_class(struct compiler *c, PyObject *attr)
+compiler_maybe_add_static_attribute_to_class(struct compiler *c, expr_ty e)
{
+ assert(e->kind == Attribute_kind);
+ expr_ty attr_value = e->v.Attribute.value;
+ if (attr_value->kind != Name_kind ||
+ e->v.Attribute.ctx != Store ||
+ !_PyUnicode_EqualToASCIIString(attr_value->v.Name.id, "self"))
+ {
+ return SUCCESS;
+ }
Py_ssize_t stack_size = PyList_GET_SIZE(c->c_stack);
for (Py_ssize_t i = stack_size - 1; i >= 0; i--) {
PyObject *capsule = PyList_GET_ITEM(c->c_stack, i);
@@ -573,7 +581,7 @@ compiler_add_static_attribute_to_class(struct compiler *c, PyObject *attr)
assert(u);
if (u->u_scope_type == COMPILER_SCOPE_CLASS) {
assert(u->u_static_attributes);
- RETURN_IF_ERROR(PySet_Add(u->u_static_attributes, attr));
+ RETURN_IF_ERROR(PySet_Add(u->u_static_attributes, e->v.Attribute.attr));
break;
}
}
@@ -6065,11 +6073,7 @@ compiler_visit_expr(struct compiler *c, expr_ty e)
ADDOP(c, loc, NOP);
return SUCCESS;
}
- if (e->v.Attribute.value->kind == Name_kind &&
- _PyUnicode_EqualToASCIIString(e->v.Attribute.value->v.Name.id, "self"))
- {
- RETURN_IF_ERROR(compiler_add_static_attribute_to_class(c, e->v.Attribute.attr));
- }
+ RETURN_IF_ERROR(compiler_maybe_add_static_attribute_to_class(c, e));
VISIT(c, expr, e->v.Attribute.value);
loc = LOC(e);
loc = update_start_location_to_match_attr(c, loc, e);
From 7aca84e557d0a6d242f322c493d53947a56bde91 Mon Sep 17 00:00:00 2001
From: Mark Shannon
Date: Fri, 2 Aug 2024 16:31:17 +0100
Subject: [PATCH 113/139] GH-117224: Move the body of a few large-ish micro-ops
into helper functions (GH-122601)
---
Include/internal/pycore_ceval.h | 4 +
Python/bytecodes.c | 125 ++---------------------------
Python/ceval.c | 135 ++++++++++++++++++++++++++++++++
Python/executor_cases.c.h | 117 ++-------------------------
Python/generated_cases.c.h | 117 ++-------------------------
5 files changed, 163 insertions(+), 335 deletions(-)
diff --git a/Include/internal/pycore_ceval.h b/Include/internal/pycore_ceval.h
index 4fdee9fdf2a1ff..e4af731be0e87f 100644
--- a/Include/internal/pycore_ceval.h
+++ b/Include/internal/pycore_ceval.h
@@ -270,6 +270,10 @@ PyAPI_FUNC(PyObject **) _PyObjectArray_FromStackRefArray(_PyStackRef *input, Py_
PyAPI_FUNC(void) _PyObjectArray_Free(PyObject **array, PyObject **scratch);
+PyAPI_FUNC(PyObject *) _PyEval_GetANext(PyObject *aiter);
+PyAPI_FUNC(PyObject *) _PyEval_LoadGlobal(PyObject *globals, PyObject *builtins, PyObject *name);
+PyAPI_FUNC(PyObject *) _PyEval_GetAwaitable(PyObject *iterable, int oparg);
+PyAPI_FUNC(PyObject *) _PyEval_LoadName(PyThreadState *tstate, _PyInterpreterFrame *frame, PyObject *name);
/* Bits that can be set in PyThreadState.eval_breaker */
#define _PY_GIL_DROP_REQUEST_BIT (1U << 0)
diff --git a/Python/bytecodes.c b/Python/bytecodes.c
index 414725549d1c20..48b74f93b92ce8 100644
--- a/Python/bytecodes.c
+++ b/Python/bytecodes.c
@@ -1010,77 +1010,16 @@ dummy_func(
}
inst(GET_ANEXT, (aiter -- aiter, awaitable)) {
- unaryfunc getter = NULL;
- PyObject *next_iter = NULL;
- PyObject *awaitable_o;
- PyObject *aiter_o = PyStackRef_AsPyObjectBorrow(aiter);
- PyTypeObject *type = Py_TYPE(aiter_o);
-
- if (PyAsyncGen_CheckExact(aiter_o)) {
- awaitable_o = type->tp_as_async->am_anext(aiter_o);
- if (awaitable_o == NULL) {
- ERROR_NO_POP();
- }
- } else {
- if (type->tp_as_async != NULL){
- getter = type->tp_as_async->am_anext;
- }
-
- if (getter != NULL) {
- next_iter = (*getter)(aiter_o);
- if (next_iter == NULL) {
- ERROR_NO_POP();
- }
- }
- else {
- _PyErr_Format(tstate, PyExc_TypeError,
- "'async for' requires an iterator with "
- "__anext__ method, got %.100s",
- type->tp_name);
- ERROR_NO_POP();
- }
-
- awaitable_o = _PyCoro_GetAwaitableIter(next_iter);
- if (awaitable_o == NULL) {
- _PyErr_FormatFromCause(
- PyExc_TypeError,
- "'async for' received an invalid object "
- "from __anext__: %.100s",
- Py_TYPE(next_iter)->tp_name);
-
- Py_DECREF(next_iter);
- ERROR_NO_POP();
- } else {
- Py_DECREF(next_iter);
- }
+ PyObject *awaitable_o = _PyEval_GetANext(PyStackRef_AsPyObjectBorrow(aiter));
+ if (awaitable_o == NULL) {
+ ERROR_NO_POP();
}
awaitable = PyStackRef_FromPyObjectSteal(awaitable_o);
}
inst(GET_AWAITABLE, (iterable -- iter)) {
- PyObject *iter_o = _PyCoro_GetAwaitableIter(PyStackRef_AsPyObjectBorrow(iterable));
-
- if (iter_o == NULL) {
- _PyEval_FormatAwaitableError(tstate,
- Py_TYPE(PyStackRef_AsPyObjectBorrow(iterable)), oparg);
- }
-
+ PyObject *iter_o = _PyEval_GetAwaitable(PyStackRef_AsPyObjectBorrow(iterable), oparg);
DECREF_INPUTS();
-
- if (iter_o != NULL && PyCoro_CheckExact(iter_o)) {
- PyObject *yf = _PyGen_yf((PyGenObject*)iter_o);
- if (yf != NULL) {
- /* `iter` is a coroutine object that is being
- awaited, `yf` is a pointer to the current awaitable
- being awaited on. */
- Py_DECREF(yf);
- Py_CLEAR(iter_o);
- _PyErr_SetString(tstate, PyExc_RuntimeError,
- "coroutine is being awaited already");
- /* The code below jumps to `error` if `iter` is NULL. */
- }
- }
-
ERROR_IF(iter_o == NULL, error);
iter = PyStackRef_FromPyObjectSteal(iter_o);
}
@@ -1527,27 +1466,9 @@ dummy_func(
}
inst(LOAD_NAME, (-- v)) {
- PyObject *v_o;
- PyObject *mod_or_class_dict = LOCALS();
- if (mod_or_class_dict == NULL) {
- _PyErr_SetString(tstate, PyExc_SystemError,
- "no locals found");
- ERROR_IF(true, error);
- }
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
- ERROR_IF(PyMapping_GetOptionalItem(mod_or_class_dict, name, &v_o) < 0, error);
- if (v_o == NULL) {
- ERROR_IF(PyDict_GetItemRef(GLOBALS(), name, &v_o) < 0, error);
- if (v_o == NULL) {
- ERROR_IF(PyMapping_GetOptionalItem(BUILTINS(), name, &v_o) < 0, error);
- if (v_o == NULL) {
- _PyEval_FormatExcCheckArg(
- tstate, PyExc_NameError,
- NAME_ERROR_MSG, name);
- ERROR_IF(true, error);
- }
- }
- }
+ PyObject *v_o = _PyEval_LoadName(tstate, frame, name);
+ ERROR_IF(v_o == NULL, error);
v = PyStackRef_FromPyObjectSteal(v_o);
}
@@ -1571,38 +1492,8 @@ dummy_func(
op(_LOAD_GLOBAL, ( -- res, null if (oparg & 1))) {
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
- PyObject *res_o;
- if (PyDict_CheckExact(GLOBALS())
- && PyDict_CheckExact(BUILTINS()))
- {
- res_o = _PyDict_LoadGlobal((PyDictObject *)GLOBALS(),
- (PyDictObject *)BUILTINS(),
- name);
- if (res_o == NULL) {
- if (!_PyErr_Occurred(tstate)) {
- /* _PyDict_LoadGlobal() returns NULL without raising
- * an exception if the key doesn't exist */
- _PyEval_FormatExcCheckArg(tstate, PyExc_NameError,
- NAME_ERROR_MSG, name);
- }
- ERROR_IF(true, error);
- }
- }
- else {
- /* Slow-path if globals or builtins is not a dict */
- /* namespace 1: globals */
- ERROR_IF(PyMapping_GetOptionalItem(GLOBALS(), name, &res_o) < 0, error);
- if (res_o == NULL) {
- /* namespace 2: builtins */
- ERROR_IF(PyMapping_GetOptionalItem(BUILTINS(), name, &res_o) < 0, error);
- if (res_o == NULL) {
- _PyEval_FormatExcCheckArg(
- tstate, PyExc_NameError,
- NAME_ERROR_MSG, name);
- ERROR_IF(true, error);
- }
- }
- }
+ PyObject *res_o = _PyEval_LoadGlobal(GLOBALS(), BUILTINS(), name);
+ ERROR_IF(res_o == NULL, error);
null = PyStackRef_NULL;
res = PyStackRef_FromPyObjectSteal(res_o);
}
diff --git a/Python/ceval.c b/Python/ceval.c
index 425a2a01bea8ed..f1663ee539aeac 100644
--- a/Python/ceval.c
+++ b/Python/ceval.c
@@ -720,6 +720,7 @@ _PyObjectArray_Free(PyObject **array, PyObject **scratch)
}
}
+
/* _PyEval_EvalFrameDefault() is a *big* function,
* so consume 3 units of C stack */
#define PY_EVAL_C_STACK_UNITS 2
@@ -3031,3 +3032,137 @@ void Py_LeaveRecursiveCall(void)
{
_Py_LeaveRecursiveCall();
}
+
+PyObject *
+_PyEval_GetANext(PyObject *aiter)
+{
+ unaryfunc getter = NULL;
+ PyObject *next_iter = NULL;
+ PyTypeObject *type = Py_TYPE(aiter);
+ if (PyAsyncGen_CheckExact(aiter)) {
+ return type->tp_as_async->am_anext(aiter);
+ }
+ if (type->tp_as_async != NULL){
+ getter = type->tp_as_async->am_anext;
+ }
+
+ if (getter != NULL) {
+ next_iter = (*getter)(aiter);
+ if (next_iter == NULL) {
+ return NULL;
+ }
+ }
+ else {
+ PyErr_Format(PyExc_TypeError,
+ "'async for' requires an iterator with "
+ "__anext__ method, got %.100s",
+ type->tp_name);
+ return NULL;
+ }
+
+ PyObject *awaitable = _PyCoro_GetAwaitableIter(next_iter);
+ if (awaitable == NULL) {
+ _PyErr_FormatFromCause(
+ PyExc_TypeError,
+ "'async for' received an invalid object "
+ "from __anext__: %.100s",
+ Py_TYPE(next_iter)->tp_name);
+ }
+ Py_DECREF(next_iter);
+ return awaitable;
+}
+
+PyObject *
+_PyEval_LoadGlobal(PyObject *globals, PyObject *builtins, PyObject *name)
+{
+ PyObject *res;
+ if (PyDict_CheckExact(globals) && PyDict_CheckExact(builtins)) {
+ res = _PyDict_LoadGlobal((PyDictObject *)globals,
+ (PyDictObject *)builtins,
+ name);
+ if (res == NULL && !PyErr_Occurred()) {
+ /* _PyDict_LoadGlobal() returns NULL without raising
+ * an exception if the key doesn't exist */
+ _PyEval_FormatExcCheckArg(PyThreadState_GET(), PyExc_NameError,
+ NAME_ERROR_MSG, name);
+ }
+ }
+ else {
+ /* Slow-path if globals or builtins is not a dict */
+ /* namespace 1: globals */
+ if (PyMapping_GetOptionalItem(globals, name, &res) < 0) {
+ return NULL;
+ }
+ if (res == NULL) {
+ /* namespace 2: builtins */
+ if (PyMapping_GetOptionalItem(builtins, name, &res) < 0) {
+ return NULL;
+ }
+ if (res == NULL) {
+ _PyEval_FormatExcCheckArg(
+ PyThreadState_GET(), PyExc_NameError,
+ NAME_ERROR_MSG, name);
+ }
+ }
+ }
+ return res;
+}
+
+PyObject *
+_PyEval_GetAwaitable(PyObject *iterable, int oparg)
+{
+ PyObject *iter = _PyCoro_GetAwaitableIter(iterable);
+
+ if (iter == NULL) {
+ _PyEval_FormatAwaitableError(PyThreadState_GET(),
+ Py_TYPE(iterable), oparg);
+ }
+ else if (PyCoro_CheckExact(iter)) {
+ PyObject *yf = _PyGen_yf((PyGenObject*)iter);
+ if (yf != NULL) {
+ /* `iter` is a coroutine object that is being
+ awaited, `yf` is a pointer to the current awaitable
+ being awaited on. */
+ Py_DECREF(yf);
+ Py_CLEAR(iter);
+ _PyErr_SetString(PyThreadState_GET(), PyExc_RuntimeError,
+ "coroutine is being awaited already");
+ }
+ }
+ return iter;
+}
+
+PyObject *
+_PyEval_LoadName(PyThreadState *tstate, _PyInterpreterFrame *frame, PyObject *name)
+{
+
+ PyObject *value;
+ if (frame->f_locals == NULL) {
+ _PyErr_SetString(tstate, PyExc_SystemError,
+ "no locals found");
+ return NULL;
+ }
+ if (PyMapping_GetOptionalItem(frame->f_locals, name, &value) < 0) {
+ return NULL;
+ }
+ if (value != NULL) {
+ return value;
+ }
+ if (PyDict_GetItemRef(frame->f_globals, name, &value) < 0) {
+ return NULL;
+ }
+ if (value != NULL) {
+ return value;
+ }
+ if (PyMapping_GetOptionalItem(frame->f_builtins, name, &value) < 0) {
+ return NULL;
+ }
+ if (value == NULL) {
+ _PyEval_FormatExcCheckArg(
+ tstate, PyExc_NameError,
+ NAME_ERROR_MSG, name);
+ }
+ return value;
+}
+
+
diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h
index 61e1c5cf5c289d..7f89196192504b 100644
--- a/Python/executor_cases.c.h
+++ b/Python/executor_cases.c.h
@@ -1243,45 +1243,9 @@
_PyStackRef aiter;
_PyStackRef awaitable;
aiter = stack_pointer[-1];
- unaryfunc getter = NULL;
- PyObject *next_iter = NULL;
- PyObject *awaitable_o;
- PyObject *aiter_o = PyStackRef_AsPyObjectBorrow(aiter);
- PyTypeObject *type = Py_TYPE(aiter_o);
- if (PyAsyncGen_CheckExact(aiter_o)) {
- awaitable_o = type->tp_as_async->am_anext(aiter_o);
- if (awaitable_o == NULL) {
- JUMP_TO_ERROR();
- }
- } else {
- if (type->tp_as_async != NULL){
- getter = type->tp_as_async->am_anext;
- }
- if (getter != NULL) {
- next_iter = (*getter)(aiter_o);
- if (next_iter == NULL) {
- JUMP_TO_ERROR();
- }
- }
- else {
- _PyErr_Format(tstate, PyExc_TypeError,
- "'async for' requires an iterator with "
- "__anext__ method, got %.100s",
- type->tp_name);
- JUMP_TO_ERROR();
- }
- awaitable_o = _PyCoro_GetAwaitableIter(next_iter);
- if (awaitable_o == NULL) {
- _PyErr_FormatFromCause(
- PyExc_TypeError,
- "'async for' received an invalid object "
- "from __anext__: %.100s",
- Py_TYPE(next_iter)->tp_name);
- Py_DECREF(next_iter);
- JUMP_TO_ERROR();
- } else {
- Py_DECREF(next_iter);
- }
+ PyObject *awaitable_o = _PyEval_GetANext(PyStackRef_AsPyObjectBorrow(aiter));
+ if (awaitable_o == NULL) {
+ JUMP_TO_ERROR();
}
awaitable = PyStackRef_FromPyObjectSteal(awaitable_o);
stack_pointer[0] = awaitable;
@@ -1295,25 +1259,8 @@
_PyStackRef iter;
oparg = CURRENT_OPARG();
iterable = stack_pointer[-1];
- PyObject *iter_o = _PyCoro_GetAwaitableIter(PyStackRef_AsPyObjectBorrow(iterable));
- if (iter_o == NULL) {
- _PyEval_FormatAwaitableError(tstate,
- Py_TYPE(PyStackRef_AsPyObjectBorrow(iterable)), oparg);
- }
+ PyObject *iter_o = _PyEval_GetAwaitable(PyStackRef_AsPyObjectBorrow(iterable), oparg);
PyStackRef_CLOSE(iterable);
- if (iter_o != NULL && PyCoro_CheckExact(iter_o)) {
- PyObject *yf = _PyGen_yf((PyGenObject*)iter_o);
- if (yf != NULL) {
- /* `iter` is a coroutine object that is being
- awaited, `yf` is a pointer to the current awaitable
- being awaited on. */
- Py_DECREF(yf);
- Py_CLEAR(iter_o);
- _PyErr_SetString(tstate, PyExc_RuntimeError,
- "coroutine is being awaited already");
- /* The code below jumps to `error` if `iter` is NULL. */
- }
- }
if (iter_o == NULL) JUMP_TO_ERROR();
iter = PyStackRef_FromPyObjectSteal(iter_o);
stack_pointer[-1] = iter;
@@ -1676,27 +1623,9 @@
case _LOAD_NAME: {
_PyStackRef v;
oparg = CURRENT_OPARG();
- PyObject *v_o;
- PyObject *mod_or_class_dict = LOCALS();
- if (mod_or_class_dict == NULL) {
- _PyErr_SetString(tstate, PyExc_SystemError,
- "no locals found");
- if (true) JUMP_TO_ERROR();
- }
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
- if (PyMapping_GetOptionalItem(mod_or_class_dict, name, &v_o) < 0) JUMP_TO_ERROR();
- if (v_o == NULL) {
- if (PyDict_GetItemRef(GLOBALS(), name, &v_o) < 0) JUMP_TO_ERROR();
- if (v_o == NULL) {
- if (PyMapping_GetOptionalItem(BUILTINS(), name, &v_o) < 0) JUMP_TO_ERROR();
- if (v_o == NULL) {
- _PyEval_FormatExcCheckArg(
- tstate, PyExc_NameError,
- NAME_ERROR_MSG, name);
- if (true) JUMP_TO_ERROR();
- }
- }
- }
+ PyObject *v_o = _PyEval_LoadName(tstate, frame, name);
+ if (v_o == NULL) JUMP_TO_ERROR();
v = PyStackRef_FromPyObjectSteal(v_o);
stack_pointer[0] = v;
stack_pointer += 1;
@@ -1709,38 +1638,8 @@
_PyStackRef null = PyStackRef_NULL;
oparg = CURRENT_OPARG();
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
- PyObject *res_o;
- if (PyDict_CheckExact(GLOBALS())
- && PyDict_CheckExact(BUILTINS()))
- {
- res_o = _PyDict_LoadGlobal((PyDictObject *)GLOBALS(),
- (PyDictObject *)BUILTINS(),
- name);
- if (res_o == NULL) {
- if (!_PyErr_Occurred(tstate)) {
- /* _PyDict_LoadGlobal() returns NULL without raising
- * an exception if the key doesn't exist */
- _PyEval_FormatExcCheckArg(tstate, PyExc_NameError,
- NAME_ERROR_MSG, name);
- }
- if (true) JUMP_TO_ERROR();
- }
- }
- else {
- /* Slow-path if globals or builtins is not a dict */
- /* namespace 1: globals */
- if (PyMapping_GetOptionalItem(GLOBALS(), name, &res_o) < 0) JUMP_TO_ERROR();
- if (res_o == NULL) {
- /* namespace 2: builtins */
- if (PyMapping_GetOptionalItem(BUILTINS(), name, &res_o) < 0) JUMP_TO_ERROR();
- if (res_o == NULL) {
- _PyEval_FormatExcCheckArg(
- tstate, PyExc_NameError,
- NAME_ERROR_MSG, name);
- if (true) JUMP_TO_ERROR();
- }
- }
- }
+ PyObject *res_o = _PyEval_LoadGlobal(GLOBALS(), BUILTINS(), name);
+ if (res_o == NULL) JUMP_TO_ERROR();
null = PyStackRef_NULL;
res = PyStackRef_FromPyObjectSteal(res_o);
stack_pointer[0] = res;
diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h
index 4efaf899f23d1a..bed194e34d5376 100644
--- a/Python/generated_cases.c.h
+++ b/Python/generated_cases.c.h
@@ -3468,45 +3468,9 @@
_PyStackRef aiter;
_PyStackRef awaitable;
aiter = stack_pointer[-1];
- unaryfunc getter = NULL;
- PyObject *next_iter = NULL;
- PyObject *awaitable_o;
- PyObject *aiter_o = PyStackRef_AsPyObjectBorrow(aiter);
- PyTypeObject *type = Py_TYPE(aiter_o);
- if (PyAsyncGen_CheckExact(aiter_o)) {
- awaitable_o = type->tp_as_async->am_anext(aiter_o);
- if (awaitable_o == NULL) {
- goto error;
- }
- } else {
- if (type->tp_as_async != NULL){
- getter = type->tp_as_async->am_anext;
- }
- if (getter != NULL) {
- next_iter = (*getter)(aiter_o);
- if (next_iter == NULL) {
- goto error;
- }
- }
- else {
- _PyErr_Format(tstate, PyExc_TypeError,
- "'async for' requires an iterator with "
- "__anext__ method, got %.100s",
- type->tp_name);
- goto error;
- }
- awaitable_o = _PyCoro_GetAwaitableIter(next_iter);
- if (awaitable_o == NULL) {
- _PyErr_FormatFromCause(
- PyExc_TypeError,
- "'async for' received an invalid object "
- "from __anext__: %.100s",
- Py_TYPE(next_iter)->tp_name);
- Py_DECREF(next_iter);
- goto error;
- } else {
- Py_DECREF(next_iter);
- }
+ PyObject *awaitable_o = _PyEval_GetANext(PyStackRef_AsPyObjectBorrow(aiter));
+ if (awaitable_o == NULL) {
+ goto error;
}
awaitable = PyStackRef_FromPyObjectSteal(awaitable_o);
stack_pointer[0] = awaitable;
@@ -3522,25 +3486,8 @@
_PyStackRef iterable;
_PyStackRef iter;
iterable = stack_pointer[-1];
- PyObject *iter_o = _PyCoro_GetAwaitableIter(PyStackRef_AsPyObjectBorrow(iterable));
- if (iter_o == NULL) {
- _PyEval_FormatAwaitableError(tstate,
- Py_TYPE(PyStackRef_AsPyObjectBorrow(iterable)), oparg);
- }
+ PyObject *iter_o = _PyEval_GetAwaitable(PyStackRef_AsPyObjectBorrow(iterable), oparg);
PyStackRef_CLOSE(iterable);
- if (iter_o != NULL && PyCoro_CheckExact(iter_o)) {
- PyObject *yf = _PyGen_yf((PyGenObject*)iter_o);
- if (yf != NULL) {
- /* `iter` is a coroutine object that is being
- awaited, `yf` is a pointer to the current awaitable
- being awaited on. */
- Py_DECREF(yf);
- Py_CLEAR(iter_o);
- _PyErr_SetString(tstate, PyExc_RuntimeError,
- "coroutine is being awaited already");
- /* The code below jumps to `error` if `iter` is NULL. */
- }
- }
if (iter_o == NULL) goto pop_1_error;
iter = PyStackRef_FromPyObjectSteal(iter_o);
stack_pointer[-1] = iter;
@@ -5231,38 +5178,8 @@
// _LOAD_GLOBAL
{
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
- PyObject *res_o;
- if (PyDict_CheckExact(GLOBALS())
- && PyDict_CheckExact(BUILTINS()))
- {
- res_o = _PyDict_LoadGlobal((PyDictObject *)GLOBALS(),
- (PyDictObject *)BUILTINS(),
- name);
- if (res_o == NULL) {
- if (!_PyErr_Occurred(tstate)) {
- /* _PyDict_LoadGlobal() returns NULL without raising
- * an exception if the key doesn't exist */
- _PyEval_FormatExcCheckArg(tstate, PyExc_NameError,
- NAME_ERROR_MSG, name);
- }
- if (true) goto error;
- }
- }
- else {
- /* Slow-path if globals or builtins is not a dict */
- /* namespace 1: globals */
- if (PyMapping_GetOptionalItem(GLOBALS(), name, &res_o) < 0) goto error;
- if (res_o == NULL) {
- /* namespace 2: builtins */
- if (PyMapping_GetOptionalItem(BUILTINS(), name, &res_o) < 0) goto error;
- if (res_o == NULL) {
- _PyEval_FormatExcCheckArg(
- tstate, PyExc_NameError,
- NAME_ERROR_MSG, name);
- if (true) goto error;
- }
- }
- }
+ PyObject *res_o = _PyEval_LoadGlobal(GLOBALS(), BUILTINS(), name);
+ if (res_o == NULL) goto error;
null = PyStackRef_NULL;
res = PyStackRef_FromPyObjectSteal(res_o);
}
@@ -5375,27 +5292,9 @@
next_instr += 1;
INSTRUCTION_STATS(LOAD_NAME);
_PyStackRef v;
- PyObject *v_o;
- PyObject *mod_or_class_dict = LOCALS();
- if (mod_or_class_dict == NULL) {
- _PyErr_SetString(tstate, PyExc_SystemError,
- "no locals found");
- if (true) goto error;
- }
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
- if (PyMapping_GetOptionalItem(mod_or_class_dict, name, &v_o) < 0) goto error;
- if (v_o == NULL) {
- if (PyDict_GetItemRef(GLOBALS(), name, &v_o) < 0) goto error;
- if (v_o == NULL) {
- if (PyMapping_GetOptionalItem(BUILTINS(), name, &v_o) < 0) goto error;
- if (v_o == NULL) {
- _PyEval_FormatExcCheckArg(
- tstate, PyExc_NameError,
- NAME_ERROR_MSG, name);
- if (true) goto error;
- }
- }
- }
+ PyObject *v_o = _PyEval_LoadName(tstate, frame, name);
+ if (v_o == NULL) goto error;
v = PyStackRef_FromPyObjectSteal(v_o);
stack_pointer[0] = v;
stack_pointer += 1;
From 4b63cd170e5dd840bffc80922f09f2d69932ff5c Mon Sep 17 00:00:00 2001
From: Sam Gross
Date: Fri, 2 Aug 2024 12:11:44 -0400
Subject: [PATCH 114/139] gh-122527: Fix a crash on deallocation of
`PyStructSequence` (GH-122577)
The `PyStructSequence` destructor would crash if it was deallocated after
its type's dictionary was cleared by the GC, because it couldn't compute
the "real size" of the instance. This could occur with relatively
straightforward code in the free-threaded build or with a reference
cycle involving the type in the default build, due to differing orders
in which `tp_clear()` was called.
Account for the non-sequence fields in `tp_basicsize` and use that,
along with `Py_SIZE()`, to compute the "real" size of a
`PyStructSequence` in the dealloc function. This avoids the accesses to
the type's dictionary during dealloc, which were unsafe.
---
Lib/test/test_structseq.py | 13 +++++++++
Lib/test/test_sys.py | 3 +-
...-08-01-19-13-58.gh-issue-122527.eztso6.rst | 4 +++
Objects/structseq.c | 28 +++++++++++++++----
4 files changed, 41 insertions(+), 7 deletions(-)
create mode 100644 Misc/NEWS.d/next/Core_and_Builtins/2024-08-01-19-13-58.gh-issue-122527.eztso6.rst
diff --git a/Lib/test/test_structseq.py b/Lib/test/test_structseq.py
index 6aec63e2603412..d0bc0bd7b61520 100644
--- a/Lib/test/test_structseq.py
+++ b/Lib/test/test_structseq.py
@@ -2,8 +2,10 @@
import os
import pickle
import re
+import textwrap
import time
import unittest
+from test.support import script_helper
class StructSeqTest(unittest.TestCase):
@@ -342,6 +344,17 @@ def test_copy_replace_with_unnamed_fields(self):
with self.assertRaisesRegex(TypeError, error_message):
copy.replace(r, st_mode=1, error=2)
+ def test_reference_cycle(self):
+ # gh-122527: Check that a structseq that's part of a reference cycle
+ # with its own type doesn't crash. Previously, if the type's dictionary
+ # was cleared first, the structseq instance would crash in the
+ # destructor.
+ script_helper.assert_python_ok("-c", textwrap.dedent(r"""
+ import time
+ t = time.gmtime()
+ type(t).refcyle = t
+ """))
+
if __name__ == "__main__":
unittest.main()
diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py
index 7e4bc980b390f7..709355e293f2fc 100644
--- a/Lib/test/test_sys.py
+++ b/Lib/test/test_sys.py
@@ -1823,7 +1823,8 @@ def test_pythontypes(self):
# symtable entry
# XXX
# sys.flags
- check(sys.flags, vsize('') + self.P * len(sys.flags))
+ # FIXME: The +1 will not be necessary once gh-122575 is fixed
+ check(sys.flags, vsize('') + self.P * (1 + len(sys.flags)))
def test_asyncgen_hooks(self):
old = sys.get_asyncgen_hooks()
diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-08-01-19-13-58.gh-issue-122527.eztso6.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-08-01-19-13-58.gh-issue-122527.eztso6.rst
new file mode 100644
index 00000000000000..f697ed99d0c523
--- /dev/null
+++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-08-01-19-13-58.gh-issue-122527.eztso6.rst
@@ -0,0 +1,4 @@
+Fix a crash that occurred when a ``PyStructSequence`` was deallocated after
+its type's dictionary was cleared by the GC. The type's
+:c:member:`~PyTypeObject.tp_basicsize` now accounts for non-sequence fields
+that aren't included in the :c:macro:`Py_SIZE` of the sequence.
diff --git a/Objects/structseq.c b/Objects/structseq.c
index d8289f2638db0f..94f09b3ee0a337 100644
--- a/Objects/structseq.c
+++ b/Objects/structseq.c
@@ -41,12 +41,20 @@ get_type_attr_as_size(PyTypeObject *tp, PyObject *name)
get_type_attr_as_size(tp, &_Py_ID(n_sequence_fields))
#define REAL_SIZE_TP(tp) \
get_type_attr_as_size(tp, &_Py_ID(n_fields))
-#define REAL_SIZE(op) REAL_SIZE_TP(Py_TYPE(op))
+#define REAL_SIZE(op) get_real_size((PyObject *)op)
#define UNNAMED_FIELDS_TP(tp) \
get_type_attr_as_size(tp, &_Py_ID(n_unnamed_fields))
#define UNNAMED_FIELDS(op) UNNAMED_FIELDS_TP(Py_TYPE(op))
+static Py_ssize_t
+get_real_size(PyObject *op)
+{
+ // Compute the real size from the visible size (i.e., Py_SIZE()) and the
+ // number of non-sequence fields accounted for in tp_basicsize.
+ Py_ssize_t hidden = Py_TYPE(op)->tp_basicsize - offsetof(PyStructSequence, ob_item);
+ return Py_SIZE(op) + hidden / sizeof(PyObject *);
+}
PyObject *
PyStructSequence_New(PyTypeObject *type)
@@ -120,6 +128,9 @@ structseq_dealloc(PyStructSequence *obj)
PyObject_GC_UnTrack(obj);
PyTypeObject *tp = Py_TYPE(obj);
+ // gh-122527: We can't use REAL_SIZE_TP() or any macros that access the
+ // type's dictionary here, because the dictionary may have already been
+ // cleared by the garbage collector.
size = REAL_SIZE(obj);
for (i = 0; i < size; ++i) {
Py_XDECREF(obj->ob_item[i]);
@@ -565,10 +576,14 @@ initialize_members(PyStructSequence_Desc *desc,
static void
initialize_static_fields(PyTypeObject *type, PyStructSequence_Desc *desc,
- PyMemberDef *tp_members, unsigned long tp_flags)
+ PyMemberDef *tp_members, Py_ssize_t n_members,
+ unsigned long tp_flags)
{
type->tp_name = desc->name;
- type->tp_basicsize = sizeof(PyStructSequence) - sizeof(PyObject *);
+ // Account for hidden members in tp_basicsize because they are not
+ // included in the variable size.
+ Py_ssize_t n_hidden = n_members - desc->n_in_sequence;
+ type->tp_basicsize = sizeof(PyStructSequence) + (n_hidden - 1) * sizeof(PyObject *);
type->tp_itemsize = sizeof(PyObject *);
type->tp_dealloc = (destructor)structseq_dealloc;
type->tp_repr = (reprfunc)structseq_repr;
@@ -621,7 +636,7 @@ _PyStructSequence_InitBuiltinWithFlags(PyInterpreterState *interp,
if (members == NULL) {
goto error;
}
- initialize_static_fields(type, desc, members, tp_flags);
+ initialize_static_fields(type, desc, members, n_members, tp_flags);
_Py_SetImmortal((PyObject *)type);
}
@@ -684,7 +699,7 @@ PyStructSequence_InitType2(PyTypeObject *type, PyStructSequence_Desc *desc)
if (members == NULL) {
return -1;
}
- initialize_static_fields(type, desc, members, 0);
+ initialize_static_fields(type, desc, members, n_members, 0);
if (initialize_static_type(type, desc, n_members, n_unnamed_members) < 0) {
PyMem_Free(members);
return -1;
@@ -760,7 +775,8 @@ _PyStructSequence_NewType(PyStructSequence_Desc *desc, unsigned long tp_flags)
/* The name in this PyType_Spec is statically allocated so it is */
/* expected that it'll outlive the PyType_Spec */
spec.name = desc->name;
- spec.basicsize = sizeof(PyStructSequence) - sizeof(PyObject *);
+ Py_ssize_t hidden = n_members - desc->n_in_sequence;
+ spec.basicsize = (int)(sizeof(PyStructSequence) + (hidden - 1) * sizeof(PyObject *));
spec.itemsize = sizeof(PyObject *);
spec.flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | tp_flags;
spec.slots = slots;
From fe0a28d850943cf2ba132c9b0a933bb0c98ff0ae Mon Sep 17 00:00:00 2001
From: Irit Katriel <1055913+iritkatriel@users.noreply.github.com>
Date: Fri, 2 Aug 2024 23:56:51 +0100
Subject: [PATCH 115/139] gh-122560: add test that comprehension loop var
appears only in one scope of the symtable (#122582)
---
Lib/test/test_symtable.py | 21 +++++++++++++++++++++
1 file changed, 21 insertions(+)
diff --git a/Lib/test/test_symtable.py b/Lib/test/test_symtable.py
index bd367c1591c744..24d89b09d946ad 100644
--- a/Lib/test/test_symtable.py
+++ b/Lib/test/test_symtable.py
@@ -528,6 +528,27 @@ def test_symtable_entry_repr(self):
self.assertEqual(repr(self.top._table), expected)
+class ComprehensionTests(unittest.TestCase):
+ def get_identifiers_recursive(self, st, res):
+ res.extend(st.get_identifiers())
+ for ch in st.get_children():
+ self.get_identifiers_recursive(ch, res)
+
+ def test_loopvar_in_only_one_scope(self):
+ # ensure that the loop variable appears only once in the symtable
+ comps = [
+ "[x for x in [1]]",
+ "{x for x in [1]}",
+ "{x:x*x for x in [1]}",
+ ]
+ for comp in comps:
+ with self.subTest(comp=comp):
+ st = symtable.symtable(comp, "?", "exec")
+ ids = []
+ self.get_identifiers_recursive(st, ids)
+ self.assertEqual(len([x for x in ids if x == 'x']), 1)
+
+
class CommandLineTest(unittest.TestCase):
maxDiff = None
From efcd65cd84d5ebcc6cacb67971f235a726a205e7 Mon Sep 17 00:00:00 2001
From: Serhiy Storchaka
Date: Sat, 3 Aug 2024 12:45:45 +0300
Subject: [PATCH 116/139] gh-122313: Clean up deep recursion guarding code in
the compiler (GH-122640)
Add ENTER_RECURSIVE and LEAVE_RECURSIVE macros in ast.c, ast_opt.c and
symtable.c. Remove VISIT_QUIT macro in symtable.c.
The current recursion depth counter only needs to be updated during
normal execution -- all functions should just return an error code
if an error occurs.
---
Python/ast.c | 46 +++++----
Python/ast_opt.c | 39 ++++----
Python/symtable.c | 241 +++++++++++++++++++++++-----------------------
3 files changed, 164 insertions(+), 162 deletions(-)
diff --git a/Python/ast.c b/Python/ast.c
index 1d1a48ec885686..bf1ff5f3ec18ba 100644
--- a/Python/ast.c
+++ b/Python/ast.c
@@ -14,6 +14,20 @@ struct validator {
int recursion_limit; /* recursion limit */
};
+#define ENTER_RECURSIVE(ST) \
+ do { \
+ if (++(ST)->recursion_depth > (ST)->recursion_limit) { \
+ PyErr_SetString(PyExc_RecursionError, \
+ "maximum recursion depth exceeded during compilation"); \
+ return 0; \
+ } \
+ } while(0)
+
+#define LEAVE_RECURSIVE(ST) \
+ do { \
+ --(ST)->recursion_depth; \
+ } while(0)
+
static int validate_stmts(struct validator *, asdl_stmt_seq *);
static int validate_exprs(struct validator *, asdl_expr_seq *, expr_context_ty, int);
static int validate_patterns(struct validator *, asdl_pattern_seq *, int);
@@ -166,11 +180,7 @@ validate_constant(struct validator *state, PyObject *value)
return 1;
if (PyTuple_CheckExact(value) || PyFrozenSet_CheckExact(value)) {
- if (++state->recursion_depth > state->recursion_limit) {
- PyErr_SetString(PyExc_RecursionError,
- "maximum recursion depth exceeded during compilation");
- return 0;
- }
+ ENTER_RECURSIVE(state);
PyObject *it = PyObject_GetIter(value);
if (it == NULL)
@@ -195,7 +205,7 @@ validate_constant(struct validator *state, PyObject *value)
}
Py_DECREF(it);
- --state->recursion_depth;
+ LEAVE_RECURSIVE(state);
return 1;
}
@@ -213,11 +223,7 @@ validate_expr(struct validator *state, expr_ty exp, expr_context_ty ctx)
assert(!PyErr_Occurred());
VALIDATE_POSITIONS(exp);
int ret = -1;
- if (++state->recursion_depth > state->recursion_limit) {
- PyErr_SetString(PyExc_RecursionError,
- "maximum recursion depth exceeded during compilation");
- return 0;
- }
+ ENTER_RECURSIVE(state);
int check_ctx = 1;
expr_context_ty actual_ctx;
@@ -398,7 +404,7 @@ validate_expr(struct validator *state, expr_ty exp, expr_context_ty ctx)
PyErr_SetString(PyExc_SystemError, "unexpected expression");
ret = 0;
}
- state->recursion_depth--;
+ LEAVE_RECURSIVE(state);
return ret;
}
@@ -544,11 +550,7 @@ validate_pattern(struct validator *state, pattern_ty p, int star_ok)
assert(!PyErr_Occurred());
VALIDATE_POSITIONS(p);
int ret = -1;
- if (++state->recursion_depth > state->recursion_limit) {
- PyErr_SetString(PyExc_RecursionError,
- "maximum recursion depth exceeded during compilation");
- return 0;
- }
+ ENTER_RECURSIVE(state);
switch (p->kind) {
case MatchValue_kind:
ret = validate_pattern_match_value(state, p->v.MatchValue.value);
@@ -690,7 +692,7 @@ validate_pattern(struct validator *state, pattern_ty p, int star_ok)
PyErr_SetString(PyExc_SystemError, "unexpected pattern");
ret = 0;
}
- state->recursion_depth--;
+ LEAVE_RECURSIVE(state);
return ret;
}
@@ -725,11 +727,7 @@ validate_stmt(struct validator *state, stmt_ty stmt)
assert(!PyErr_Occurred());
VALIDATE_POSITIONS(stmt);
int ret = -1;
- if (++state->recursion_depth > state->recursion_limit) {
- PyErr_SetString(PyExc_RecursionError,
- "maximum recursion depth exceeded during compilation");
- return 0;
- }
+ ENTER_RECURSIVE(state);
switch (stmt->kind) {
case FunctionDef_kind:
ret = validate_body(state, stmt->v.FunctionDef.body, "FunctionDef") &&
@@ -946,7 +944,7 @@ validate_stmt(struct validator *state, stmt_ty stmt)
PyErr_SetString(PyExc_SystemError, "unexpected statement");
ret = 0;
}
- state->recursion_depth--;
+ LEAVE_RECURSIVE(state);
return ret;
}
diff --git a/Python/ast_opt.c b/Python/ast_opt.c
index 2e2c78b9d4d7d2..d7a26e64150e55 100644
--- a/Python/ast_opt.c
+++ b/Python/ast_opt.c
@@ -15,6 +15,19 @@ typedef struct {
int recursion_limit; /* recursion limit */
} _PyASTOptimizeState;
+#define ENTER_RECURSIVE(ST) \
+ do { \
+ if (++(ST)->recursion_depth > (ST)->recursion_limit) { \
+ PyErr_SetString(PyExc_RecursionError, \
+ "maximum recursion depth exceeded during compilation"); \
+ return 0; \
+ } \
+ } while(0)
+
+#define LEAVE_RECURSIVE(ST) \
+ do { \
+ --(ST)->recursion_depth; \
+ } while(0)
static int
make_const(expr_ty node, PyObject *val, PyArena *arena)
@@ -708,11 +721,7 @@ astfold_mod(mod_ty node_, PyArena *ctx_, _PyASTOptimizeState *state)
static int
astfold_expr(expr_ty node_, PyArena *ctx_, _PyASTOptimizeState *state)
{
- if (++state->recursion_depth > state->recursion_limit) {
- PyErr_SetString(PyExc_RecursionError,
- "maximum recursion depth exceeded during compilation");
- return 0;
- }
+ ENTER_RECURSIVE(state);
switch (node_->kind) {
case BoolOp_kind:
CALL_SEQ(astfold_expr, expr, node_->v.BoolOp.values);
@@ -811,7 +820,7 @@ astfold_expr(expr_ty node_, PyArena *ctx_, _PyASTOptimizeState *state)
case Name_kind:
if (node_->v.Name.ctx == Load &&
_PyUnicode_EqualToASCIIString(node_->v.Name.id, "__debug__")) {
- state->recursion_depth--;
+ LEAVE_RECURSIVE(state);
return make_const(node_, PyBool_FromLong(!state->optimize), ctx_);
}
break;
@@ -824,7 +833,7 @@ astfold_expr(expr_ty node_, PyArena *ctx_, _PyASTOptimizeState *state)
// No default case, so the compiler will emit a warning if new expression
// kinds are added without being handled here
}
- state->recursion_depth--;
+ LEAVE_RECURSIVE(state);;
return 1;
}
@@ -871,11 +880,7 @@ astfold_arg(arg_ty node_, PyArena *ctx_, _PyASTOptimizeState *state)
static int
astfold_stmt(stmt_ty node_, PyArena *ctx_, _PyASTOptimizeState *state)
{
- if (++state->recursion_depth > state->recursion_limit) {
- PyErr_SetString(PyExc_RecursionError,
- "maximum recursion depth exceeded during compilation");
- return 0;
- }
+ ENTER_RECURSIVE(state);
switch (node_->kind) {
case FunctionDef_kind:
CALL_SEQ(astfold_type_param, type_param, node_->v.FunctionDef.type_params);
@@ -999,7 +1004,7 @@ astfold_stmt(stmt_ty node_, PyArena *ctx_, _PyASTOptimizeState *state)
// No default case, so the compiler will emit a warning if new statement
// kinds are added without being handled here
}
- state->recursion_depth--;
+ LEAVE_RECURSIVE(state);
return 1;
}
@@ -1031,11 +1036,7 @@ astfold_pattern(pattern_ty node_, PyArena *ctx_, _PyASTOptimizeState *state)
// Currently, this is really only used to form complex/negative numeric
// constants in MatchValue and MatchMapping nodes
// We still recurse into all subexpressions and subpatterns anyway
- if (++state->recursion_depth > state->recursion_limit) {
- PyErr_SetString(PyExc_RecursionError,
- "maximum recursion depth exceeded during compilation");
- return 0;
- }
+ ENTER_RECURSIVE(state);
switch (node_->kind) {
case MatchValue_kind:
CALL(astfold_expr, expr_ty, node_->v.MatchValue.value);
@@ -1067,7 +1068,7 @@ astfold_pattern(pattern_ty node_, PyArena *ctx_, _PyASTOptimizeState *state)
// No default case, so the compiler will emit a warning if new pattern
// kinds are added without being handled here
}
- state->recursion_depth--;
+ LEAVE_RECURSIVE(state);
return 1;
}
diff --git a/Python/symtable.c b/Python/symtable.c
index 89a0d8a2ccec1a..ef81a0799de3aa 100644
--- a/Python/symtable.c
+++ b/Python/symtable.c
@@ -1617,17 +1617,17 @@ symtable_enter_type_param_block(struct symtable *st, identifier name,
VISIT_SEQ_TAIL permits the start of an ASDL sequence to be skipped, which is
useful if the first node in the sequence requires special treatment.
- VISIT_QUIT macro returns the specified value exiting from the function but
- first adjusts current recursion counter depth.
-*/
+ ENTER_RECURSIVE macro increments the current recursion depth counter.
+ It should be used at the beginning of the recursive function.
-#define VISIT_QUIT(ST, X) \
- return --(ST)->recursion_depth,(X)
+ LEAVE_RECURSIVE macro decrements the current recursion depth counter.
+ It should be used at the end of the recursive function.
+*/
#define VISIT(ST, TYPE, V) \
do { \
if (!symtable_visit_ ## TYPE((ST), (V))) { \
- VISIT_QUIT((ST), 0); \
+ return 0; \
} \
} while(0)
@@ -1638,7 +1638,7 @@ symtable_enter_type_param_block(struct symtable *st, identifier name,
for (i = 0; i < asdl_seq_LEN(seq); i++) { \
TYPE ## _ty elt = (TYPE ## _ty)asdl_seq_GET(seq, i); \
if (!symtable_visit_ ## TYPE((ST), elt)) \
- VISIT_QUIT((ST), 0); \
+ return 0; \
} \
} while(0)
@@ -1649,7 +1649,7 @@ symtable_enter_type_param_block(struct symtable *st, identifier name,
for (i = (START); i < asdl_seq_LEN(seq); i++) { \
TYPE ## _ty elt = (TYPE ## _ty)asdl_seq_GET(seq, i); \
if (!symtable_visit_ ## TYPE((ST), elt)) \
- VISIT_QUIT((ST), 0); \
+ return 0; \
} \
} while(0)
@@ -1661,10 +1661,25 @@ symtable_enter_type_param_block(struct symtable *st, identifier name,
TYPE ## _ty elt = (TYPE ## _ty)asdl_seq_GET(seq, i); \
if (!elt) continue; /* can be NULL */ \
if (!symtable_visit_ ## TYPE((ST), elt)) \
- VISIT_QUIT((ST), 0); \
+ return 0; \
} \
} while(0)
+#define ENTER_RECURSIVE(ST) \
+ do { \
+ if (++(ST)->recursion_depth > (ST)->recursion_limit) { \
+ PyErr_SetString(PyExc_RecursionError, \
+ "maximum recursion depth exceeded during compilation"); \
+ return 0; \
+ } \
+ } while(0)
+
+#define LEAVE_RECURSIVE(ST) \
+ do { \
+ --(ST)->recursion_depth; \
+ } while(0)
+
+
static int
symtable_record_directive(struct symtable *st, identifier name, _Py_SourceLocation loc)
{
@@ -1737,15 +1752,11 @@ maybe_set_ste_coroutine_for_module(struct symtable *st, stmt_ty s)
static int
symtable_visit_stmt(struct symtable *st, stmt_ty s)
{
- if (++st->recursion_depth > st->recursion_limit) {
- PyErr_SetString(PyExc_RecursionError,
- "maximum recursion depth exceeded during compilation");
- VISIT_QUIT(st, 0);
- }
+ ENTER_RECURSIVE(st);
switch (s->kind) {
case FunctionDef_kind: {
if (!symtable_add_def(st, s->v.FunctionDef.name, DEF_LOCAL, LOCATION(s)))
- VISIT_QUIT(st, 0);
+ return 0;
if (s->v.FunctionDef.args->defaults)
VISIT_SEQ(st, expr, s->v.FunctionDef.args->defaults);
if (s->v.FunctionDef.args->kw_defaults)
@@ -1761,40 +1772,40 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s)
s->v.FunctionDef.args->kw_defaults),
s->kind,
LOCATION(s))) {
- VISIT_QUIT(st, 0);
+ return 0;
}
VISIT_SEQ(st, type_param, s->v.FunctionDef.type_params);
}
PySTEntryObject *new_ste = ste_new(st, s->v.FunctionDef.name, FunctionBlock, (void *)s,
LOCATION(s));
if (!new_ste) {
- VISIT_QUIT(st, 0);
+ return 0;
}
if (!symtable_visit_annotations(st, s, s->v.FunctionDef.args,
s->v.FunctionDef.returns, new_ste)) {
Py_DECREF(new_ste);
- VISIT_QUIT(st, 0);
+ return 0;
}
if (!symtable_enter_existing_block(st, new_ste)) {
Py_DECREF(new_ste);
- VISIT_QUIT(st, 0);
+ return 0;
}
Py_DECREF(new_ste);
VISIT(st, arguments, s->v.FunctionDef.args);
VISIT_SEQ(st, stmt, s->v.FunctionDef.body);
if (!symtable_exit_block(st))
- VISIT_QUIT(st, 0);
+ return 0;
if (asdl_seq_LEN(s->v.FunctionDef.type_params) > 0) {
if (!symtable_exit_block(st))
- VISIT_QUIT(st, 0);
+ return 0;
}
break;
}
case ClassDef_kind: {
PyObject *tmp;
if (!symtable_add_def(st, s->v.ClassDef.name, DEF_LOCAL, LOCATION(s)))
- VISIT_QUIT(st, 0);
+ return 0;
if (s->v.ClassDef.decorator_list)
VISIT_SEQ(st, expr, s->v.ClassDef.decorator_list);
tmp = st->st_private;
@@ -1803,42 +1814,42 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s)
(void *)s->v.ClassDef.type_params,
false, false, s->kind,
LOCATION(s))) {
- VISIT_QUIT(st, 0);
+ return 0;
}
st->st_private = s->v.ClassDef.name;
st->st_cur->ste_mangled_names = PySet_New(NULL);
if (!st->st_cur->ste_mangled_names) {
- VISIT_QUIT(st, 0);
+ return 0;
}
VISIT_SEQ(st, type_param, s->v.ClassDef.type_params);
}
VISIT_SEQ(st, expr, s->v.ClassDef.bases);
if (!check_keywords(st, s->v.ClassDef.keywords)) {
- VISIT_QUIT(st, 0);
+ return 0;
}
VISIT_SEQ(st, keyword, s->v.ClassDef.keywords);
if (!symtable_enter_block(st, s->v.ClassDef.name, ClassBlock,
(void *)s, LOCATION(s))) {
- VISIT_QUIT(st, 0);
+ return 0;
}
st->st_private = s->v.ClassDef.name;
if (asdl_seq_LEN(s->v.ClassDef.type_params) > 0) {
if (!symtable_add_def(st, &_Py_ID(__type_params__),
DEF_LOCAL, LOCATION(s))) {
- VISIT_QUIT(st, 0);
+ return 0;
}
_Py_DECLARE_STR(type_params, ".type_params");
if (!symtable_add_def(st, &_Py_STR(type_params),
USE, LOCATION(s))) {
- VISIT_QUIT(st, 0);
+ return 0;
}
}
VISIT_SEQ(st, stmt, s->v.ClassDef.body);
if (!symtable_exit_block(st))
- VISIT_QUIT(st, 0);
+ return 0;
if (asdl_seq_LEN(s->v.ClassDef.type_params) > 0) {
if (!symtable_exit_block(st))
- VISIT_QUIT(st, 0);
+ return 0;
}
st->st_private = tmp;
break;
@@ -1855,24 +1866,24 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s)
(void *)s->v.TypeAlias.type_params,
false, false, s->kind,
LOCATION(s))) {
- VISIT_QUIT(st, 0);
+ return 0;
}
VISIT_SEQ(st, type_param, s->v.TypeAlias.type_params);
}
if (!symtable_enter_block(st, name, TypeAliasBlock,
(void *)s, LOCATION(s))) {
- VISIT_QUIT(st, 0);
+ return 0;
}
st->st_cur->ste_can_see_class_scope = is_in_class;
if (is_in_class && !symtable_add_def(st, &_Py_ID(__classdict__), USE, LOCATION(s->v.TypeAlias.value))) {
- VISIT_QUIT(st, 0);
+ return 0;
}
VISIT(st, expr, s->v.TypeAlias.value);
if (!symtable_exit_block(st))
- VISIT_QUIT(st, 0);
+ return 0;
if (is_generic) {
if (!symtable_exit_block(st))
- VISIT_QUIT(st, 0);
+ return 0;
}
break;
}
@@ -1895,7 +1906,7 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s)
expr_ty e_name = s->v.AnnAssign.target;
long cur = symtable_lookup(st, e_name->v.Name.id);
if (cur < 0) {
- VISIT_QUIT(st, 0);
+ return 0;
}
if ((cur & (DEF_GLOBAL | DEF_NONLOCAL))
&& (st->st_cur->ste_symbols != st->st_global)
@@ -1904,17 +1915,17 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s)
cur & DEF_GLOBAL ? GLOBAL_ANNOT : NONLOCAL_ANNOT,
e_name->v.Name.id);
SET_ERROR_LOCATION(st->st_filename, LOCATION(s));
- VISIT_QUIT(st, 0);
+ return 0;
}
if (s->v.AnnAssign.simple &&
!symtable_add_def(st, e_name->v.Name.id,
DEF_ANNOT | DEF_LOCAL, LOCATION(e_name))) {
- VISIT_QUIT(st, 0);
+ return 0;
}
else {
if (s->v.AnnAssign.value
&& !symtable_add_def(st, e_name->v.Name.id, DEF_LOCAL, LOCATION(e_name))) {
- VISIT_QUIT(st, 0);
+ return 0;
}
}
}
@@ -1923,7 +1934,7 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s)
}
if (!symtable_visit_annotation(st, s->v.AnnAssign.annotation,
(void *)((uintptr_t)st->st_cur->ste_id + 1))) {
- VISIT_QUIT(st, 0);
+ return 0;
}
if (s->v.AnnAssign.value) {
@@ -1990,7 +2001,7 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s)
case ImportFrom_kind:
VISIT_SEQ(st, alias, s->v.ImportFrom.names);
if (!check_import_from(st, s)) {
- VISIT_QUIT(st, 0);
+ return 0;
}
break;
case Global_kind: {
@@ -2000,7 +2011,7 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s)
identifier name = (identifier)asdl_seq_GET(seq, i);
long cur = symtable_lookup(st, name);
if (cur < 0)
- VISIT_QUIT(st, 0);
+ return 0;
if (cur & (DEF_PARAM | DEF_LOCAL | USE | DEF_ANNOT)) {
const char* msg;
if (cur & DEF_PARAM) {
@@ -2015,13 +2026,13 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s)
PyErr_Format(PyExc_SyntaxError,
msg, name);
SET_ERROR_LOCATION(st->st_filename, LOCATION(s));
- VISIT_QUIT(st, 0);
+ return 0;
}
if (!symtable_add_def(st, name, DEF_GLOBAL, LOCATION(s))) {
- VISIT_QUIT(st, 0);
+ return 0;
}
if (!symtable_record_directive(st, name, LOCATION(s))) {
- VISIT_QUIT(st, 0);
+ return 0;
}
}
break;
@@ -2033,7 +2044,7 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s)
identifier name = (identifier)asdl_seq_GET(seq, i);
long cur = symtable_lookup(st, name);
if (cur < 0)
- VISIT_QUIT(st, 0);
+ return 0;
if (cur & (DEF_PARAM | DEF_LOCAL | USE | DEF_ANNOT)) {
const char* msg;
if (cur & DEF_PARAM) {
@@ -2047,12 +2058,12 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s)
}
PyErr_Format(PyExc_SyntaxError, msg, name);
SET_ERROR_LOCATION(st->st_filename, LOCATION(s));
- VISIT_QUIT(st, 0);
+ return 0;
}
if (!symtable_add_def(st, name, DEF_NONLOCAL, LOCATION(s)))
- VISIT_QUIT(st, 0);
+ return 0;
if (!symtable_record_directive(st, name, LOCATION(s))) {
- VISIT_QUIT(st, 0);
+ return 0;
}
}
break;
@@ -2071,7 +2082,7 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s)
break;
case AsyncFunctionDef_kind: {
if (!symtable_add_def(st, s->v.AsyncFunctionDef.name, DEF_LOCAL, LOCATION(s)))
- VISIT_QUIT(st, 0);
+ return 0;
if (s->v.AsyncFunctionDef.args->defaults)
VISIT_SEQ(st, expr, s->v.AsyncFunctionDef.args->defaults);
if (s->v.AsyncFunctionDef.args->kw_defaults)
@@ -2088,24 +2099,24 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s)
s->v.AsyncFunctionDef.args->kw_defaults),
s->kind,
LOCATION(s))) {
- VISIT_QUIT(st, 0);
+ return 0;
}
VISIT_SEQ(st, type_param, s->v.AsyncFunctionDef.type_params);
}
PySTEntryObject *new_ste = ste_new(st, s->v.FunctionDef.name, FunctionBlock, (void *)s,
LOCATION(s));
if (!new_ste) {
- VISIT_QUIT(st, 0);
+ return 0;
}
if (!symtable_visit_annotations(st, s, s->v.AsyncFunctionDef.args,
s->v.AsyncFunctionDef.returns, new_ste)) {
Py_DECREF(new_ste);
- VISIT_QUIT(st, 0);
+ return 0;
}
if (!symtable_enter_existing_block(st, new_ste)) {
Py_DECREF(new_ste);
- VISIT_QUIT(st, 0);
+ return 0;
}
Py_DECREF(new_ste);
@@ -2113,17 +2124,17 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s)
VISIT(st, arguments, s->v.AsyncFunctionDef.args);
VISIT_SEQ(st, stmt, s->v.AsyncFunctionDef.body);
if (!symtable_exit_block(st))
- VISIT_QUIT(st, 0);
+ return 0;
if (asdl_seq_LEN(s->v.AsyncFunctionDef.type_params) > 0) {
if (!symtable_exit_block(st))
- VISIT_QUIT(st, 0);
+ return 0;
}
break;
}
case AsyncWith_kind:
maybe_set_ste_coroutine_for_module(st, s);
if (!symtable_raise_if_not_coroutine(st, ASYNC_WITH_OUTSIDE_ASYNC_FUNC, LOCATION(s))) {
- VISIT_QUIT(st, 0);
+ return 0;
}
VISIT_SEQ(st, withitem, s->v.AsyncWith.items);
VISIT_SEQ(st, stmt, s->v.AsyncWith.body);
@@ -2131,7 +2142,7 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s)
case AsyncFor_kind:
maybe_set_ste_coroutine_for_module(st, s);
if (!symtable_raise_if_not_coroutine(st, ASYNC_FOR_OUTSIDE_ASYNC_FUNC, LOCATION(s))) {
- VISIT_QUIT(st, 0);
+ return 0;
}
VISIT(st, expr, s->v.AsyncFor.target);
VISIT(st, expr, s->v.AsyncFor.iter);
@@ -2140,7 +2151,8 @@ symtable_visit_stmt(struct symtable *st, stmt_ty s)
VISIT_SEQ(st, stmt, s->v.AsyncFor.orelse);
break;
}
- VISIT_QUIT(st, 1);
+ LEAVE_RECURSIVE(st);
+ return 1;
}
static int
@@ -2168,7 +2180,7 @@ symtable_extend_namedexpr_scope(struct symtable *st, expr_ty e)
(target_in_scope & DEF_LOCAL)) {
PyErr_Format(PyExc_SyntaxError, NAMED_EXPR_COMP_CONFLICT, target_name);
SET_ERROR_LOCATION(st->st_filename, LOCATION(e));
- VISIT_QUIT(st, 0);
+ return 0;
}
continue;
}
@@ -2178,14 +2190,14 @@ symtable_extend_namedexpr_scope(struct symtable *st, expr_ty e)
long target_in_scope = symtable_lookup_entry(st, ste, target_name);
if (target_in_scope & DEF_GLOBAL) {
if (!symtable_add_def(st, target_name, DEF_GLOBAL, LOCATION(e)))
- VISIT_QUIT(st, 0);
+ return 0;
} else {
if (!symtable_add_def(st, target_name, DEF_NONLOCAL, LOCATION(e))) {
- VISIT_QUIT(st, 0);
+ return 0;
}
}
if (!symtable_record_directive(st, target_name, LOCATION(e))) {
- VISIT_QUIT(st, 0);
+ return 0;
}
return symtable_add_def_helper(st, target_name, DEF_LOCAL, ste, LOCATION(e));
@@ -2193,10 +2205,10 @@ symtable_extend_namedexpr_scope(struct symtable *st, expr_ty e)
/* If we find a ModuleBlock entry, add as GLOBAL */
if (ste->ste_type == ModuleBlock) {
if (!symtable_add_def(st, target_name, DEF_GLOBAL, LOCATION(e))) {
- VISIT_QUIT(st, 0);
+ return 0;
}
if (!symtable_record_directive(st, target_name, LOCATION(e))) {
- VISIT_QUIT(st, 0);
+ return 0;
}
return symtable_add_def_helper(st, target_name, DEF_GLOBAL, ste, LOCATION(e));
@@ -2223,7 +2235,7 @@ symtable_extend_namedexpr_scope(struct symtable *st, expr_ty e)
Py_UNREACHABLE();
}
SET_ERROR_LOCATION(st->st_filename, LOCATION(e));
- VISIT_QUIT(st, 0);
+ return 0;
}
}
@@ -2256,18 +2268,14 @@ symtable_handle_namedexpr(struct symtable *st, expr_ty e)
static int
symtable_visit_expr(struct symtable *st, expr_ty e)
{
- if (++st->recursion_depth > st->recursion_limit) {
- PyErr_SetString(PyExc_RecursionError,
- "maximum recursion depth exceeded during compilation");
- VISIT_QUIT(st, 0);
- }
+ ENTER_RECURSIVE(st);
switch (e->kind) {
case NamedExpr_kind:
if (!symtable_raise_if_annotation_block(st, "named expression", e)) {
- VISIT_QUIT(st, 0);
+ return 0;
}
if(!symtable_handle_namedexpr(st, e))
- VISIT_QUIT(st, 0);
+ return 0;
break;
case BoolOp_kind:
VISIT_SEQ(st, expr, e->v.BoolOp.values);
@@ -2286,12 +2294,12 @@ symtable_visit_expr(struct symtable *st, expr_ty e)
VISIT_SEQ_WITH_NULL(st, expr, e->v.Lambda.args->kw_defaults);
if (!symtable_enter_block(st, &_Py_ID(lambda),
FunctionBlock, (void *)e, LOCATION(e))) {
- VISIT_QUIT(st, 0);
+ return 0;
}
VISIT(st, arguments, e->v.Lambda.args);
VISIT(st, expr, e->v.Lambda.body);
if (!symtable_exit_block(st))
- VISIT_QUIT(st, 0);
+ return 0;
break;
}
case IfExp_kind:
@@ -2308,23 +2316,23 @@ symtable_visit_expr(struct symtable *st, expr_ty e)
break;
case GeneratorExp_kind:
if (!symtable_visit_genexp(st, e))
- VISIT_QUIT(st, 0);
+ return 0;
break;
case ListComp_kind:
if (!symtable_visit_listcomp(st, e))
- VISIT_QUIT(st, 0);
+ return 0;
break;
case SetComp_kind:
if (!symtable_visit_setcomp(st, e))
- VISIT_QUIT(st, 0);
+ return 0;
break;
case DictComp_kind:
if (!symtable_visit_dictcomp(st, e))
- VISIT_QUIT(st, 0);
+ return 0;
break;
case Yield_kind:
if (!symtable_raise_if_annotation_block(st, "yield expression", e)) {
- VISIT_QUIT(st, 0);
+ return 0;
}
if (e->v.Yield.value)
VISIT(st, expr, e->v.Yield.value);
@@ -2335,7 +2343,7 @@ symtable_visit_expr(struct symtable *st, expr_ty e)
break;
case YieldFrom_kind:
if (!symtable_raise_if_annotation_block(st, "yield expression", e)) {
- VISIT_QUIT(st, 0);
+ return 0;
}
VISIT(st, expr, e->v.YieldFrom.value);
st->st_cur->ste_generator = 1;
@@ -2345,20 +2353,20 @@ symtable_visit_expr(struct symtable *st, expr_ty e)
break;
case Await_kind:
if (!symtable_raise_if_annotation_block(st, "await expression", e)) {
- VISIT_QUIT(st, 0);
+ return 0;
}
if (!allows_top_level_await(st)) {
if (!_PyST_IsFunctionLike(st->st_cur)) {
PyErr_SetString(PyExc_SyntaxError,
"'await' outside function");
SET_ERROR_LOCATION(st->st_filename, LOCATION(e));
- VISIT_QUIT(st, 0);
+ return 0;
}
if (!IS_ASYNC_DEF(st) && st->st_cur->ste_comprehension == NoComprehension) {
PyErr_SetString(PyExc_SyntaxError,
"'await' outside async function");
SET_ERROR_LOCATION(st->st_filename, LOCATION(e));
- VISIT_QUIT(st, 0);
+ return 0;
}
}
VISIT(st, expr, e->v.Await.value);
@@ -2372,7 +2380,7 @@ symtable_visit_expr(struct symtable *st, expr_ty e)
VISIT(st, expr, e->v.Call.func);
VISIT_SEQ(st, expr, e->v.Call.args);
if (!check_keywords(st, e->v.Call.keywords)) {
- VISIT_QUIT(st, 0);
+ return 0;
}
VISIT_SEQ_WITH_NULL(st, keyword, e->v.Call.keywords);
break;
@@ -2390,7 +2398,7 @@ symtable_visit_expr(struct symtable *st, expr_ty e)
/* The following exprs can be assignment targets. */
case Attribute_kind:
if (!check_name(st, e->v.Attribute.attr, LOCATION(e), e->v.Attribute.ctx)) {
- VISIT_QUIT(st, 0);
+ return 0;
}
VISIT(st, expr, e->v.Attribute.value);
break;
@@ -2413,14 +2421,14 @@ symtable_visit_expr(struct symtable *st, expr_ty e)
if (!symtable_add_def_ctx(st, e->v.Name.id,
e->v.Name.ctx == Load ? USE : DEF_LOCAL,
LOCATION(e), e->v.Name.ctx)) {
- VISIT_QUIT(st, 0);
+ return 0;
}
/* Special-case super: it counts as a use of __class__ */
if (e->v.Name.ctx == Load &&
_PyST_IsFunctionLike(st->st_cur) &&
_PyUnicode_EqualToASCIIString(e->v.Name.id, "super")) {
if (!symtable_add_def(st, &_Py_ID(__class__), USE, LOCATION(e)))
- VISIT_QUIT(st, 0);
+ return 0;
}
break;
/* child nodes of List and Tuple will have expr_context set */
@@ -2431,7 +2439,8 @@ symtable_visit_expr(struct symtable *st, expr_ty e)
VISIT_SEQ(st, expr, e->v.Tuple.elts);
break;
}
- VISIT_QUIT(st, 1);
+ LEAVE_RECURSIVE(st);
+ return 1;
}
static int
@@ -2447,7 +2456,7 @@ symtable_visit_type_param_bound_or_default(
st->st_cur->ste_can_see_class_scope = is_in_class;
if (is_in_class && !symtable_add_def(st, &_Py_ID(__classdict__), USE, LOCATION(e))) {
- VISIT_QUIT(st, 0);
+ return 0;
}
assert(ste_scope_info != NULL);
@@ -2464,15 +2473,11 @@ symtable_visit_type_param_bound_or_default(
static int
symtable_visit_type_param(struct symtable *st, type_param_ty tp)
{
- if (++st->recursion_depth > st->recursion_limit) {
- PyErr_SetString(PyExc_RecursionError,
- "maximum recursion depth exceeded during compilation");
- VISIT_QUIT(st, 0);
- }
+ ENTER_RECURSIVE(st);
switch(tp->kind) {
case TypeVar_kind:
if (!symtable_add_def(st, tp->v.TypeVar.name, DEF_TYPE_PARAM | DEF_LOCAL, LOCATION(tp)))
- VISIT_QUIT(st, 0);
+ return 0;
const char *ste_scope_info = NULL;
const expr_ty bound = tp->v.TypeVar.bound;
@@ -2488,46 +2493,43 @@ symtable_visit_type_param(struct symtable *st, type_param_ty tp)
// compile.c where the scope is retrieved.
if (!symtable_visit_type_param_bound_or_default(st, tp->v.TypeVar.bound, tp->v.TypeVar.name,
(void *)tp, ste_scope_info)) {
- VISIT_QUIT(st, 0);
+ return 0;
}
if (!symtable_visit_type_param_bound_or_default(st, tp->v.TypeVar.default_value, tp->v.TypeVar.name,
(void *)((uintptr_t)tp + 1), "a TypeVar default")) {
- VISIT_QUIT(st, 0);
+ return 0;
}
break;
case TypeVarTuple_kind:
if (!symtable_add_def(st, tp->v.TypeVarTuple.name, DEF_TYPE_PARAM | DEF_LOCAL, LOCATION(tp))) {
- VISIT_QUIT(st, 0);
+ return 0;
}
if (!symtable_visit_type_param_bound_or_default(st, tp->v.TypeVarTuple.default_value, tp->v.TypeVarTuple.name,
(void *)tp, "a TypeVarTuple default")) {
- VISIT_QUIT(st, 0);
+ return 0;
}
break;
case ParamSpec_kind:
if (!symtable_add_def(st, tp->v.ParamSpec.name, DEF_TYPE_PARAM | DEF_LOCAL, LOCATION(tp))) {
- VISIT_QUIT(st, 0);
+ return 0;
}
if (!symtable_visit_type_param_bound_or_default(st, tp->v.ParamSpec.default_value, tp->v.ParamSpec.name,
(void *)tp, "a ParamSpec default")) {
- VISIT_QUIT(st, 0);
+ return 0;
}
break;
}
- VISIT_QUIT(st, 1);
+ LEAVE_RECURSIVE(st);
+ return 1;
}
static int
symtable_visit_pattern(struct symtable *st, pattern_ty p)
{
- if (++st->recursion_depth > st->recursion_limit) {
- PyErr_SetString(PyExc_RecursionError,
- "maximum recursion depth exceeded during compilation");
- VISIT_QUIT(st, 0);
- }
+ ENTER_RECURSIVE(st);
switch (p->kind) {
case MatchValue_kind:
VISIT(st, expr, p->v.MatchValue.value);
@@ -2541,7 +2543,7 @@ symtable_visit_pattern(struct symtable *st, pattern_ty p)
case MatchStar_kind:
if (p->v.MatchStar.name) {
if (!symtable_add_def(st, p->v.MatchStar.name, DEF_LOCAL, LOCATION(p))) {
- VISIT_QUIT(st, 0);
+ return 0;
}
}
break;
@@ -2550,7 +2552,7 @@ symtable_visit_pattern(struct symtable *st, pattern_ty p)
VISIT_SEQ(st, pattern, p->v.MatchMapping.patterns);
if (p->v.MatchMapping.rest) {
if (!symtable_add_def(st, p->v.MatchMapping.rest, DEF_LOCAL, LOCATION(p))) {
- VISIT_QUIT(st, 0);
+ return 0;
}
}
break;
@@ -2558,7 +2560,7 @@ symtable_visit_pattern(struct symtable *st, pattern_ty p)
VISIT(st, expr, p->v.MatchClass.cls);
VISIT_SEQ(st, pattern, p->v.MatchClass.patterns);
if (!check_kwd_patterns(st, p)) {
- VISIT_QUIT(st, 0);
+ return 0;
}
VISIT_SEQ(st, pattern, p->v.MatchClass.kwd_patterns);
break;
@@ -2568,7 +2570,7 @@ symtable_visit_pattern(struct symtable *st, pattern_ty p)
}
if (p->v.MatchAs.name) {
if (!symtable_add_def(st, p->v.MatchAs.name, DEF_LOCAL, LOCATION(p))) {
- VISIT_QUIT(st, 0);
+ return 0;
}
}
break;
@@ -2576,7 +2578,8 @@ symtable_visit_pattern(struct symtable *st, pattern_ty p)
VISIT_SEQ(st, pattern, p->v.MatchOr.patterns);
break;
}
- VISIT_QUIT(st, 1);
+ LEAVE_RECURSIVE(st);
+ return 1;
}
static int
@@ -2618,7 +2621,7 @@ symtable_visit_annotation(struct symtable *st, expr_ty annotation, void *key)
_Py_block_ty current_type = parent_ste->ste_type;
if (!symtable_enter_block(st, &_Py_ID(__annotate__), AnnotationBlock,
key, LOCATION(annotation))) {
- VISIT_QUIT(st, 0);
+ return 0;
}
parent_ste->ste_annotation_block =
(struct _symtable_entry *)Py_NewRef(st->st_cur);
@@ -2632,12 +2635,12 @@ symtable_visit_annotation(struct symtable *st, expr_ty annotation, void *key)
}
else {
if (!symtable_enter_existing_block(st, parent_ste->ste_annotation_block)) {
- VISIT_QUIT(st, 0);
+ return 0;
}
}
VISIT(st, expr, annotation);
if (!symtable_exit_block(st)) {
- VISIT_QUIT(st, 0);
+ return 0;
}
return 1;
}
@@ -2669,7 +2672,7 @@ symtable_visit_annotations(struct symtable *st, stmt_ty o, arguments_ty a, expr_
_Py_block_ty current_type = st->st_cur->ste_type;
if (!symtable_enter_block(st, &_Py_ID(__annotate__), AnnotationBlock,
(void *)a, LOCATION(o))) {
- VISIT_QUIT(st, 0);
+ return 0;
}
if (is_in_class || current_type == ClassBlock) {
st->st_cur->ste_can_see_class_scope = 1;
@@ -2696,7 +2699,7 @@ symtable_visit_annotations(struct symtable *st, stmt_ty o, arguments_ty a, expr_
VISIT(st, expr, returns);
}
if (!symtable_exit_block(st)) {
- VISIT_QUIT(st, 0);
+ return 0;
}
return 1;
}
@@ -2963,7 +2966,7 @@ symtable_raise_if_comprehension_block(struct symtable *st, expr_ty e) {
(type == DictComprehension) ? "'yield' inside dict comprehension" :
"'yield' inside generator expression");
SET_ERROR_LOCATION(st->st_filename, LOCATION(e));
- VISIT_QUIT(st, 0);
+ return 0;
}
static int
From 7a5c4103b094aaf1b65af6de65795d172cfe8fe0 Mon Sep 17 00:00:00 2001
From: Matth-M <93771840+Matth-M@users.noreply.github.com>
Date: Sat, 3 Aug 2024 12:18:59 +0100
Subject: [PATCH 117/139] Doc: Improve wording of ``os.path.commonpath()``
(#122627)
Co-authored-by: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
---
Doc/library/os.path.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Doc/library/os.path.rst b/Doc/library/os.path.rst
index ac24bf05c289b6..ecbbc1d7605f9f 100644
--- a/Doc/library/os.path.rst
+++ b/Doc/library/os.path.rst
@@ -81,7 +81,7 @@ the :mod:`glob` module.)
Return the longest common sub-path of each pathname in the iterable
*paths*. Raise :exc:`ValueError` if *paths* contain both absolute
- and relative pathnames, the *paths* are on the different drives or
+ and relative pathnames, if *paths* are on different drives, or
if *paths* is empty. Unlike :func:`commonprefix`, this returns a
valid path.
From d91ac525ef166edc0083acf5a96f81b87324fe7f Mon Sep 17 00:00:00 2001
From: Sergey B Kirpichev
Date: Sat, 3 Aug 2024 14:20:10 +0300
Subject: [PATCH 118/139] gh-122613: Document PyLong_GetInfo() (part of Limited
API) (GH-#122280)
---
Doc/c-api/long.rst | 11 +++++++++++
1 file changed, 11 insertions(+)
diff --git a/Doc/c-api/long.rst b/Doc/c-api/long.rst
index 42162914c0aec8..9f2c48d98b8344 100644
--- a/Doc/c-api/long.rst
+++ b/Doc/c-api/long.rst
@@ -514,6 +514,17 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate.
.. versionadded:: 3.14
+.. c:function:: PyObject* PyLong_GetInfo(void)
+
+ On success, return a read only :term:`named tuple`, that holds
+ information about Python's internal representation of integers.
+ See :data:`sys.int_info` for description of individual fields.
+
+ On failure, return ``NULL`` with an exception set.
+
+ .. versionadded:: 3.1
+
+
.. c:function:: int PyUnstable_Long_IsCompact(const PyLongObject* op)
Return 1 if *op* is compact, 0 otherwise.
From cc6839a1810290e483e5d5f0786d9b46c4294d47 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Sat, 3 Aug 2024 12:52:21 +0100
Subject: [PATCH 119/139] GH-109408: Stop running patchcheck in CI (#109895)
---
.azure-pipelines/posix-steps.yml | 6 --
.azure-pipelines/pr.yml | 17 -----
Tools/patchcheck/patchcheck.py | 120 ++++---------------------------
3 files changed, 12 insertions(+), 131 deletions(-)
diff --git a/.azure-pipelines/posix-steps.yml b/.azure-pipelines/posix-steps.yml
index e23c7b1dcb55c1..99fb7f3b1105b6 100644
--- a/.azure-pipelines/posix-steps.yml
+++ b/.azure-pipelines/posix-steps.yml
@@ -18,9 +18,3 @@ steps:
- script: make pythoninfo
displayName: 'Display build info'
-
-- script: |
- git fetch origin
- ./python Tools/patchcheck/patchcheck.py --ci true
- displayName: 'Run patchcheck.py'
- condition: and(succeeded(), eq(variables['Build.Reason'], 'PullRequest'))
diff --git a/.azure-pipelines/pr.yml b/.azure-pipelines/pr.yml
index 335a4b407cb83c..433396778ab891 100644
--- a/.azure-pipelines/pr.yml
+++ b/.azure-pipelines/pr.yml
@@ -9,20 +9,3 @@ jobs:
steps:
- template: ./prebuild-checks.yml
-
-
-- job: Ubuntu_Patchcheck
- displayName: Ubuntu patchcheck
- dependsOn: Prebuild
- condition: and(succeeded(), eq(dependencies.Prebuild.outputs['tests.run'], 'true'))
-
- pool:
- vmImage: ubuntu-22.04
-
- variables:
- testRunTitle: '$(system.pullRequest.TargetBranch)-linux'
- testRunPlatform: linux
- openssl_version: 1.1.1u
-
- steps:
- - template: ./posix-steps.yml
diff --git a/Tools/patchcheck/patchcheck.py b/Tools/patchcheck/patchcheck.py
index fc338f389ca6d9..0dcf6ef844a048 100755
--- a/Tools/patchcheck/patchcheck.py
+++ b/Tools/patchcheck/patchcheck.py
@@ -5,9 +5,6 @@
import subprocess
import sysconfig
-import reindent
-import untabify
-
def get_python_source_dir():
src_dir = sysconfig.get_config_var('abs_srcdir')
@@ -16,13 +13,6 @@ def get_python_source_dir():
return os.path.abspath(src_dir)
-# Excluded directories which are copies of external libraries:
-# don't check their coding style
-EXCLUDE_DIRS = [
- os.path.join('Modules', '_decimal', 'libmpdec'),
- os.path.join('Modules', 'expat'),
- os.path.join('Modules', 'zlib'),
- ]
SRCDIR = get_python_source_dir()
@@ -153,62 +143,7 @@ def changed_files(base_branch=None):
else:
sys.exit('need a git checkout to get modified files')
- filenames2 = []
- for filename in filenames:
- # Normalize the path to be able to match using .startswith()
- filename = os.path.normpath(filename)
- if any(filename.startswith(path) for path in EXCLUDE_DIRS):
- # Exclude the file
- continue
- filenames2.append(filename)
-
- return filenames2
-
-
-def report_modified_files(file_paths):
- count = len(file_paths)
- if count == 0:
- return n_files_str(count)
- else:
- lines = [f"{n_files_str(count)}:"]
- for path in file_paths:
- lines.append(f" {path}")
- return "\n".join(lines)
-
-
-#: Python files that have tabs by design:
-_PYTHON_FILES_WITH_TABS = frozenset({
- 'Tools/c-analyzer/cpython/_parser.py',
-})
-
-
-@status("Fixing Python file whitespace", info=report_modified_files)
-def normalize_whitespace(file_paths):
- """Make sure that the whitespace for .py files have been normalized."""
- reindent.makebackup = False # No need to create backups.
- fixed = [
- path for path in file_paths
- if (
- path.endswith('.py')
- and path not in _PYTHON_FILES_WITH_TABS
- and reindent.check(os.path.join(SRCDIR, path))
- )
- ]
- return fixed
-
-
-@status("Fixing C file whitespace", info=report_modified_files)
-def normalize_c_whitespace(file_paths):
- """Report if any C files """
- fixed = []
- for path in file_paths:
- abspath = os.path.join(SRCDIR, path)
- with open(abspath, 'r') as f:
- if '\t' not in f.read():
- continue
- untabify.process(abspath, 8, verbose=False)
- fixed.append(path)
- return fixed
+ return list(map(os.path.normpath, filenames))
@status("Docs modified", modal=True)
@@ -248,40 +183,14 @@ def regenerated_pyconfig_h_in(file_paths):
return "not needed"
-def ci(pull_request):
- if pull_request == 'false':
- print('Not a pull request; skipping')
- return
- base_branch = get_base_branch()
- file_paths = changed_files(base_branch)
- python_files = [fn for fn in file_paths if fn.endswith('.py')]
- c_files = [fn for fn in file_paths if fn.endswith(('.c', '.h'))]
- fixed = []
- fixed.extend(normalize_whitespace(python_files))
- fixed.extend(normalize_c_whitespace(c_files))
- if not fixed:
- print('No whitespace issues found')
- else:
- count = len(fixed)
- print(f'Please fix the {n_files_str(count)} with whitespace issues')
- print('(on Unix you can run `make patchcheck` to make the fixes)')
- sys.exit(1)
-
-
def main():
base_branch = get_base_branch()
file_paths = changed_files(base_branch)
- python_files = [fn for fn in file_paths if fn.endswith('.py')]
- c_files = [fn for fn in file_paths if fn.endswith(('.c', '.h'))]
- doc_files = [fn for fn in file_paths if fn.startswith('Doc') and
- fn.endswith(('.rst', '.inc'))]
+ has_doc_files = any(fn for fn in file_paths if fn.startswith('Doc') and
+ fn.endswith(('.rst', '.inc')))
misc_files = {p for p in file_paths if p.startswith('Misc')}
- # PEP 8 whitespace rules enforcement.
- normalize_whitespace(python_files)
- # C rules enforcement.
- normalize_c_whitespace(c_files)
# Docs updated.
- docs_modified(doc_files)
+ docs_modified(has_doc_files)
# Misc/ACKS changed.
credit_given(misc_files)
# Misc/NEWS changed.
@@ -292,19 +201,14 @@ def main():
regenerated_pyconfig_h_in(file_paths)
# Test suite run and passed.
- if python_files or c_files:
- end = " and check for refleaks?" if c_files else "?"
- print()
- print("Did you run the test suite" + end)
+ has_c_files = any(fn for fn in file_paths if fn.endswith(('.c', '.h')))
+ has_python_files = any(fn for fn in file_paths if fn.endswith('.py'))
+ print()
+ if has_c_files:
+ print("Did you run the test suite and check for refleaks?")
+ elif has_python_files:
+ print("Did you run the test suite?")
if __name__ == '__main__':
- import argparse
- parser = argparse.ArgumentParser(description=__doc__)
- parser.add_argument('--ci',
- help='Perform pass/fail checks')
- args = parser.parse_args()
- if args.ci:
- ci(args.ci)
- else:
- main()
+ main()
From 50b36037518a8e7f7eee39b597d56b5b2756eb86 Mon Sep 17 00:00:00 2001
From: neonene <53406459+neonene@users.noreply.github.com>
Date: Sat, 3 Aug 2024 22:15:26 +0900
Subject: [PATCH 120/139] gh-122334: Fix test_embed failure when missing _ssl
module (GH-122630)
Co-authored-by: Wulian233 <1055917385@qq.com>
---
Lib/test/test_embed.py | 8 ++++++--
1 file changed, 6 insertions(+), 2 deletions(-)
diff --git a/Lib/test/test_embed.py b/Lib/test/test_embed.py
index ab112d6be85b46..916a9a79887dfc 100644
--- a/Lib/test/test_embed.py
+++ b/Lib/test/test_embed.py
@@ -465,8 +465,12 @@ def test_getargs_reset_static_parser(self):
# Test _PyArg_Parser initializations via _PyArg_UnpackKeywords()
# https://github.com/python/cpython/issues/122334
code = textwrap.dedent("""
- import _ssl
- _ssl.txt2obj(txt='1.3')
+ try:
+ import _ssl
+ except ModuleNotFoundError:
+ _ssl = None
+ if _ssl is not None:
+ _ssl.txt2obj(txt='1.3')
print('1')
import _queue
From 06eb9701a182b4720dfa8766cb41cc5a3728a8b9 Mon Sep 17 00:00:00 2001
From: scottwoodall
Date: Sat, 3 Aug 2024 09:24:29 -0400
Subject: [PATCH 121/139] Doc: Grammar fix in ``library/ssl.rst``, 'Verifying
certificates' (#122646)
---
Doc/library/ssl.rst | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Doc/library/ssl.rst b/Doc/library/ssl.rst
index 7d4c1f0f2de347..ad441c528d0d66 100644
--- a/Doc/library/ssl.rst
+++ b/Doc/library/ssl.rst
@@ -2710,7 +2710,7 @@ Verifying certificates
When calling the :class:`SSLContext` constructor directly,
:const:`CERT_NONE` is the default. Since it does not authenticate the other
-peer, it can be insecure, especially in client mode where most of time you
+peer, it can be insecure, especially in client mode where most of the time you
would like to ensure the authenticity of the server you're talking to.
Therefore, when in client mode, it is highly recommended to use
:const:`CERT_REQUIRED`. However, it is in itself not sufficient; you also
From 1573d90ce17f27fd30a251de897a35bf598d2655 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Sat, 3 Aug 2024 16:11:48 +0100
Subject: [PATCH 122/139] gh-109408: Remove ``.azure-pipelines/pr.yml``
(#122643)
This no longer does anything useful, beyond wasting Azure resources.
---
.azure-pipelines/posix-deps-apt.sh | 27 ---------------------------
.azure-pipelines/posix-steps.yml | 20 --------------------
.azure-pipelines/pr.yml | 11 -----------
3 files changed, 58 deletions(-)
delete mode 100755 .azure-pipelines/posix-deps-apt.sh
delete mode 100644 .azure-pipelines/posix-steps.yml
delete mode 100644 .azure-pipelines/pr.yml
diff --git a/.azure-pipelines/posix-deps-apt.sh b/.azure-pipelines/posix-deps-apt.sh
deleted file mode 100755
index e0f4ca5d8d8e88..00000000000000
--- a/.azure-pipelines/posix-deps-apt.sh
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/bin/sh
-apt-get update
-
-apt-get -yq install \
- build-essential \
- zlib1g-dev \
- libbz2-dev \
- liblzma-dev \
- libncurses5-dev \
- libreadline6-dev \
- libsqlite3-dev \
- libssl-dev \
- libgdbm-dev \
- tk-dev \
- lzma \
- lzma-dev \
- liblzma-dev \
- libffi-dev \
- uuid-dev \
- xvfb
-
-if [ ! -z "$1" ]
-then
- echo ##vso[task.prependpath]$PWD/multissl/openssl/$1
- echo ##vso[task.setvariable variable=OPENSSL_DIR]$PWD/multissl/openssl/$1
- python3 Tools/ssl/multissltests.py --steps=library --base-directory $PWD/multissl --openssl $1 --system Linux
-fi
diff --git a/.azure-pipelines/posix-steps.yml b/.azure-pipelines/posix-steps.yml
deleted file mode 100644
index 99fb7f3b1105b6..00000000000000
--- a/.azure-pipelines/posix-steps.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-steps:
-- checkout: self
- clean: true
- fetchDepth: 5
-
-# Work around a known issue affecting Ubuntu VMs on Pipelines
-- script: sudo setfacl -Rb /home/vsts
- displayName: 'Workaround ACL issue'
-
-- script: sudo ./.azure-pipelines/posix-deps-apt.sh $(openssl_version)
- displayName: 'Install dependencies'
-
-- script: ./configure --with-pydebug
- displayName: 'Configure CPython (debug)'
-
-- script: make -j4
- displayName: 'Build CPython'
-
-- script: make pythoninfo
- displayName: 'Display build info'
diff --git a/.azure-pipelines/pr.yml b/.azure-pipelines/pr.yml
deleted file mode 100644
index 433396778ab891..00000000000000
--- a/.azure-pipelines/pr.yml
+++ /dev/null
@@ -1,11 +0,0 @@
-pr: ['main', '3.12', '3.11', '3.10', '3.9', '3.8', '3.7']
-
-jobs:
-- job: Prebuild
- displayName: Pre-build checks
-
- pool:
- vmImage: ubuntu-22.04
-
- steps:
- - template: ./prebuild-checks.yml
From 95f5c89b545beaafad73f05a695742da3e90bc41 Mon Sep 17 00:00:00 2001
From: Adam Turner <9087854+AA-Turner@users.noreply.github.com>
Date: Sat, 3 Aug 2024 17:41:26 +0100
Subject: [PATCH 123/139] GH-121970: Fix ``gettext`` for audit events (#122651)
---
Doc/tools/extensions/audit_events.py | 2 ++
1 file changed, 2 insertions(+)
diff --git a/Doc/tools/extensions/audit_events.py b/Doc/tools/extensions/audit_events.py
index d0f08522d21ea2..23d82c0f4414bf 100644
--- a/Doc/tools/extensions/audit_events.py
+++ b/Doc/tools/extensions/audit_events.py
@@ -149,6 +149,7 @@ def run(self) -> list[nodes.paragraph]:
node = nodes.paragraph("", classes=["audit-hook"], ids=ids)
self.set_source_info(node)
if self.content:
+ node.rawsource = '\n'.join(self.content) # for gettext
self.state.nested_parse(self.content, self.content_offset, node)
else:
num_args = min(2, len(args))
@@ -156,6 +157,7 @@ def run(self) -> list[nodes.paragraph]:
name=f"``{name}``",
args=", ".join(f"``{a}``" for a in args),
)
+ node.rawsource = text # for gettext
parsed, messages = self.state.inline_text(text, self.lineno)
node += parsed
node += messages
From 151934a324789c58cca9c7bbd6753d735454df5a Mon Sep 17 00:00:00 2001
From: sobolevn
Date: Sun, 4 Aug 2024 00:55:47 +0300
Subject: [PATCH 124/139] gh-122623: Improve `c-api/bytearray.rst` with error
handling info (#122624)
---
Doc/c-api/bytearray.rst | 9 +++++++--
1 file changed, 7 insertions(+), 2 deletions(-)
diff --git a/Doc/c-api/bytearray.rst b/Doc/c-api/bytearray.rst
index 456f7d89bca03c..9045689a6be567 100644
--- a/Doc/c-api/bytearray.rst
+++ b/Doc/c-api/bytearray.rst
@@ -42,17 +42,22 @@ Direct API functions
Return a new bytearray object from any object, *o*, that implements the
:ref:`buffer protocol `.
+ On failure, return ``NULL`` with an exception set.
+
.. c:function:: PyObject* PyByteArray_FromStringAndSize(const char *string, Py_ssize_t len)
- Create a new bytearray object from *string* and its length, *len*. On
- failure, ``NULL`` is returned.
+ Create a new bytearray object from *string* and its length, *len*.
+
+ On failure, return ``NULL`` with an exception set.
.. c:function:: PyObject* PyByteArray_Concat(PyObject *a, PyObject *b)
Concat bytearrays *a* and *b* and return a new bytearray with the result.
+ On failure, return ``NULL`` with an exception set.
+
.. c:function:: Py_ssize_t PyByteArray_Size(PyObject *bytearray)
From 3462a80d2cf37a63fe43f46f64a8c9823f84531d Mon Sep 17 00:00:00 2001
From: Sergey B Kirpichev
Date: Sun, 4 Aug 2024 11:53:17 +0300
Subject: [PATCH 125/139] gh-121889: cmath.acosh(0+nanj) returns nan+pi/2j
(#121892)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
As per C11 DR#471 (adjusted resolution accepted for C17), cacosh (0 +
iNaN) should return NaN ± i pi/2, not NaN + iNaN. This patch
fixes cmath's code to do same.
---
Lib/test/mathdata/cmath_testcases.txt | 4 ++--
.../Library/2024-07-17-09-23-03.gh-issue-121889.6se9jS.rst | 1 +
Modules/cmathmodule.c | 4 ++--
3 files changed, 5 insertions(+), 4 deletions(-)
create mode 100644 Misc/NEWS.d/next/Library/2024-07-17-09-23-03.gh-issue-121889.6se9jS.rst
diff --git a/Lib/test/mathdata/cmath_testcases.txt b/Lib/test/mathdata/cmath_testcases.txt
index 0165e17634f41c..0c0d2d703f07f8 100644
--- a/Lib/test/mathdata/cmath_testcases.txt
+++ b/Lib/test/mathdata/cmath_testcases.txt
@@ -371,9 +371,9 @@ acosh1002 acosh 0.0 inf -> inf 1.5707963267948966
acosh1003 acosh 2.3 inf -> inf 1.5707963267948966
acosh1004 acosh -0.0 inf -> inf 1.5707963267948966
acosh1005 acosh -2.3 inf -> inf 1.5707963267948966
-acosh1006 acosh 0.0 nan -> nan nan
+acosh1006 acosh 0.0 nan -> nan 1.5707963267948966 ignore-imag-sign
acosh1007 acosh 2.3 nan -> nan nan
-acosh1008 acosh -0.0 nan -> nan nan
+acosh1008 acosh -0.0 nan -> nan 1.5707963267948966 ignore-imag-sign
acosh1009 acosh -2.3 nan -> nan nan
acosh1010 acosh -inf 0.0 -> inf 3.1415926535897931
acosh1011 acosh -inf 2.3 -> inf 3.1415926535897931
diff --git a/Misc/NEWS.d/next/Library/2024-07-17-09-23-03.gh-issue-121889.6se9jS.rst b/Misc/NEWS.d/next/Library/2024-07-17-09-23-03.gh-issue-121889.6se9jS.rst
new file mode 100644
index 00000000000000..a7babe0580b3e4
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-07-17-09-23-03.gh-issue-121889.6se9jS.rst
@@ -0,0 +1 @@
+Adjusts ``cmath.acosh(complex('0+nanj'))`` for recent C standards.
diff --git a/Modules/cmathmodule.c b/Modules/cmathmodule.c
index bf86a211bcb188..16ac663bdb9949 100644
--- a/Modules/cmathmodule.c
+++ b/Modules/cmathmodule.c
@@ -1259,8 +1259,8 @@ cmath_exec(PyObject *mod)
INIT_SPECIAL_VALUES(acosh_special_values, {
C(INF,-P34) C(INF,-P) C(INF,-P) C(INF,P) C(INF,P) C(INF,P34) C(INF,N)
C(INF,-P12) C(U,U) C(U,U) C(U,U) C(U,U) C(INF,P12) C(N,N)
- C(INF,-P12) C(U,U) C(0.,-P12) C(0.,P12) C(U,U) C(INF,P12) C(N,N)
- C(INF,-P12) C(U,U) C(0.,-P12) C(0.,P12) C(U,U) C(INF,P12) C(N,N)
+ C(INF,-P12) C(U,U) C(0.,-P12) C(0.,P12) C(U,U) C(INF,P12) C(N,P12)
+ C(INF,-P12) C(U,U) C(0.,-P12) C(0.,P12) C(U,U) C(INF,P12) C(N,P12)
C(INF,-P12) C(U,U) C(U,U) C(U,U) C(U,U) C(INF,P12) C(N,N)
C(INF,-P14) C(INF,-0.) C(INF,-0.) C(INF,0.) C(INF,0.) C(INF,P14) C(INF,N)
C(INF,N) C(N,N) C(N,N) C(N,N) C(N,N) C(INF,N) C(N,N)
From e6fad7a0e3d824f4a3c9cd71a48208880606d705 Mon Sep 17 00:00:00 2001
From: Sergey B Kirpichev
Date: Sun, 4 Aug 2024 12:05:30 +0300
Subject: [PATCH 126/139] =?UTF-8?q?gh-122637:=20fix=20tanh(=C2=B10+infj)?=
=?UTF-8?q?=20and=20tanh(=C2=B10+nanj)=20to=20return=20=C2=B10+nanj=20(#12?=
=?UTF-8?q?2638)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
As per C11 DR#471, ctanh (0 + i NaN) and ctanh (0 + i Inf) should return
0 + i NaN (with "invalid" exception in the second case). This has
corresponding implications for ctan(z), as its errors and special cases
are handled as if the operation is implemented by -i*ctanh(i*z).
This patch fixes cmath's code to do same.
Glibs patch: https://sourceware.org/git/?p=glibc.git;a=commitdiff;h=d15e83c5f5231d971472b5ffc9219d54056ca0f1
---
Lib/test/mathdata/cmath_testcases.txt | 24 +++++++++----------
...-08-03-06-51-08.gh-issue-122637.gpas8J.rst | 1 +
Modules/cmathmodule.c | 4 ++--
3 files changed, 15 insertions(+), 14 deletions(-)
create mode 100644 Misc/NEWS.d/next/Library/2024-08-03-06-51-08.gh-issue-122637.gpas8J.rst
diff --git a/Lib/test/mathdata/cmath_testcases.txt b/Lib/test/mathdata/cmath_testcases.txt
index 0c0d2d703f07f8..7b98b5a2998413 100644
--- a/Lib/test/mathdata/cmath_testcases.txt
+++ b/Lib/test/mathdata/cmath_testcases.txt
@@ -1992,9 +1992,9 @@ tanh0065 tanh 1.797e+308 0.0 -> 1.0 0.0
--special values
tanh1000 tanh 0.0 0.0 -> 0.0 0.0
-tanh1001 tanh 0.0 inf -> nan nan invalid
+tanh1001 tanh 0.0 inf -> 0.0 nan invalid
tanh1002 tanh 2.3 inf -> nan nan invalid
-tanh1003 tanh 0.0 nan -> nan nan
+tanh1003 tanh 0.0 nan -> 0.0 nan
tanh1004 tanh 2.3 nan -> nan nan
tanh1005 tanh inf 0.0 -> 1.0 0.0
tanh1006 tanh inf 0.7 -> 1.0 0.0
@@ -2009,7 +2009,7 @@ tanh1014 tanh nan 2.3 -> nan nan
tanh1015 tanh nan inf -> nan nan
tanh1016 tanh nan nan -> nan nan
tanh1017 tanh 0.0 -0.0 -> 0.0 -0.0
-tanh1018 tanh 0.0 -inf -> nan nan invalid
+tanh1018 tanh 0.0 -inf -> 0.0 nan invalid
tanh1019 tanh 2.3 -inf -> nan nan invalid
tanh1020 tanh inf -0.0 -> 1.0 -0.0
tanh1021 tanh inf -0.7 -> 1.0 -0.0
@@ -2022,9 +2022,9 @@ tanh1027 tanh nan -0.0 -> nan -0.0
tanh1028 tanh nan -2.3 -> nan nan
tanh1029 tanh nan -inf -> nan nan
tanh1030 tanh -0.0 -0.0 -> -0.0 -0.0
-tanh1031 tanh -0.0 -inf -> nan nan invalid
+tanh1031 tanh -0.0 -inf -> -0.0 nan invalid
tanh1032 tanh -2.3 -inf -> nan nan invalid
-tanh1033 tanh -0.0 nan -> nan nan
+tanh1033 tanh -0.0 nan -> -0.0 nan
tanh1034 tanh -2.3 nan -> nan nan
tanh1035 tanh -inf -0.0 -> -1.0 -0.0
tanh1036 tanh -inf -0.7 -> -1.0 -0.0
@@ -2035,7 +2035,7 @@ tanh1040 tanh -inf -3.5 -> -1.0 -0.0
tanh1041 tanh -inf -inf -> -1.0 0.0 ignore-imag-sign
tanh1042 tanh -inf nan -> -1.0 0.0 ignore-imag-sign
tanh1043 tanh -0.0 0.0 -> -0.0 0.0
-tanh1044 tanh -0.0 inf -> nan nan invalid
+tanh1044 tanh -0.0 inf -> -0.0 nan invalid
tanh1045 tanh -2.3 inf -> nan nan invalid
tanh1046 tanh -inf 0.0 -> -1.0 0.0
tanh1047 tanh -inf 0.7 -> -1.0 0.0
@@ -2307,9 +2307,9 @@ tan0066 tan -8.79645943005142 0.0 -> 0.7265425280053614098 0.0
-- special values
tan1000 tan -0.0 0.0 -> -0.0 0.0
-tan1001 tan -inf 0.0 -> nan nan invalid
+tan1001 tan -inf 0.0 -> nan 0.0 invalid
tan1002 tan -inf 2.2999999999999998 -> nan nan invalid
-tan1003 tan nan 0.0 -> nan nan
+tan1003 tan nan 0.0 -> nan 0.0
tan1004 tan nan 2.2999999999999998 -> nan nan
tan1005 tan -0.0 inf -> -0.0 1.0
tan1006 tan -0.69999999999999996 inf -> -0.0 1.0
@@ -2324,7 +2324,7 @@ tan1014 tan -2.2999999999999998 nan -> nan nan
tan1015 tan -inf nan -> nan nan
tan1016 tan nan nan -> nan nan
tan1017 tan 0.0 0.0 -> 0.0 0.0
-tan1018 tan inf 0.0 -> nan nan invalid
+tan1018 tan inf 0.0 -> nan 0.0 invalid
tan1019 tan inf 2.2999999999999998 -> nan nan invalid
tan1020 tan 0.0 inf -> 0.0 1.0
tan1021 tan 0.69999999999999996 inf -> 0.0 1.0
@@ -2337,9 +2337,9 @@ tan1027 tan 0.0 nan -> 0.0 nan
tan1028 tan 2.2999999999999998 nan -> nan nan
tan1029 tan inf nan -> nan nan
tan1030 tan 0.0 -0.0 -> 0.0 -0.0
-tan1031 tan inf -0.0 -> nan nan invalid
+tan1031 tan inf -0.0 -> nan -0.0 invalid
tan1032 tan inf -2.2999999999999998 -> nan nan invalid
-tan1033 tan nan -0.0 -> nan nan
+tan1033 tan nan -0.0 -> nan -0.0
tan1034 tan nan -2.2999999999999998 -> nan nan
tan1035 tan 0.0 -inf -> 0.0 -1.0
tan1036 tan 0.69999999999999996 -inf -> 0.0 -1.0
@@ -2350,7 +2350,7 @@ tan1040 tan 3.5 -inf -> 0.0 -1.0
tan1041 tan inf -inf -> -0.0 -1.0 ignore-real-sign
tan1042 tan nan -inf -> -0.0 -1.0 ignore-real-sign
tan1043 tan -0.0 -0.0 -> -0.0 -0.0
-tan1044 tan -inf -0.0 -> nan nan invalid
+tan1044 tan -inf -0.0 -> nan -0.0 invalid
tan1045 tan -inf -2.2999999999999998 -> nan nan invalid
tan1046 tan -0.0 -inf -> -0.0 -1.0
tan1047 tan -0.69999999999999996 -inf -> -0.0 -1.0
diff --git a/Misc/NEWS.d/next/Library/2024-08-03-06-51-08.gh-issue-122637.gpas8J.rst b/Misc/NEWS.d/next/Library/2024-08-03-06-51-08.gh-issue-122637.gpas8J.rst
new file mode 100644
index 00000000000000..2ded33d75b35bd
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-08-03-06-51-08.gh-issue-122637.gpas8J.rst
@@ -0,0 +1 @@
+Adjust ``cmath.tanh(nanj)`` and ``cmath.tanh(infj)`` for recent C standards.
diff --git a/Modules/cmathmodule.c b/Modules/cmathmodule.c
index 16ac663bdb9949..3c7f0bb6453ef0 100644
--- a/Modules/cmathmodule.c
+++ b/Modules/cmathmodule.c
@@ -1339,8 +1339,8 @@ cmath_exec(PyObject *mod)
INIT_SPECIAL_VALUES(tanh_special_values, {
C(-1.,0.) C(U,U) C(-1.,-0.) C(-1.,0.) C(U,U) C(-1.,0.) C(-1.,0.)
C(N,N) C(U,U) C(U,U) C(U,U) C(U,U) C(N,N) C(N,N)
- C(N,N) C(U,U) C(-0.,-0.) C(-0.,0.) C(U,U) C(N,N) C(N,N)
- C(N,N) C(U,U) C(0.,-0.) C(0.,0.) C(U,U) C(N,N) C(N,N)
+ C(-0.0,N) C(U,U) C(-0.,-0.) C(-0.,0.) C(U,U) C(-0.0,N) C(-0.,N)
+ C(0.0,N) C(U,U) C(0.,-0.) C(0.,0.) C(U,U) C(0.0,N) C(0.,N)
C(N,N) C(U,U) C(U,U) C(U,U) C(U,U) C(N,N) C(N,N)
C(1.,0.) C(U,U) C(1.,-0.) C(1.,0.) C(U,U) C(1.,0.) C(1.,0.)
C(N,N) C(N,N) C(N,-0.) C(N,0.) C(N,N) C(N,N) C(N,N)
From f5c39b3e9cc88d1eaa9229d610b0221305a83ad9 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Libor=20Mart=C3=ADnek?=
Date: Sun, 4 Aug 2024 14:02:29 +0000
Subject: [PATCH 127/139] gh-122661: Remove GNU make-specific directive from
Doc/Makefile (#122662)
---
Doc/Makefile | 36 ++++++++++++++++++++----------------
1 file changed, 20 insertions(+), 16 deletions(-)
diff --git a/Doc/Makefile b/Doc/Makefile
index c70768754834dd..b2ee3fe7d28ed0 100644
--- a/Doc/Makefile
+++ b/Doc/Makefile
@@ -6,6 +6,7 @@
# You can set these variables from the command line.
PYTHON = python3
VENVDIR = ./venv
+UV = uv
SPHINXBUILD = PATH=$(VENVDIR)/bin:$$PATH sphinx-build
BLURB = PATH=$(VENVDIR)/bin:$$PATH blurb
JOBS = auto
@@ -150,14 +151,10 @@ gettext: build
htmlview: html
$(PYTHON) -c "import os, webbrowser; webbrowser.open('file://' + os.path.realpath('build/html/index.html'))"
-.PHONY: ensure-sphinx-autobuild
-ensure-sphinx-autobuild: venv
- $(call ensure_package,sphinx-autobuild)
-
.PHONY: htmllive
htmllive: SPHINXBUILD = $(VENVDIR)/bin/sphinx-autobuild
htmllive: SPHINXOPTS = --re-ignore="/venv/" --open-browser --delay 0
-htmllive: ensure-sphinx-autobuild html
+htmllive: _ensure-sphinx-autobuild html
.PHONY: clean
clean: clean-venv
@@ -174,15 +171,15 @@ venv:
echo "To recreate it, remove it first with \`make clean-venv'."; \
else \
echo "Creating venv in $(VENVDIR)"; \
- if uv --version > /dev/null; then \
- uv venv $(VENVDIR); \
- VIRTUAL_ENV=$(VENVDIR) uv pip install -r $(REQUIREMENTS); \
+ if $(UV) --version >/dev/null 2>&1; then \
+ $(UV) venv $(VENVDIR); \
+ VIRTUAL_ENV=$(VENVDIR) $(UV) pip install -r $(REQUIREMENTS); \
else \
$(PYTHON) -m venv $(VENVDIR); \
$(VENVDIR)/bin/python3 -m pip install --upgrade pip; \
$(VENVDIR)/bin/python3 -m pip install -r $(REQUIREMENTS); \
- echo "The venv has been created in the $(VENVDIR) directory"; \
fi; \
+ echo "The venv has been created in the $(VENVDIR) directory"; \
fi
.PHONY: dist
@@ -240,17 +237,24 @@ dist:
rm -r dist/python-$(DISTVERSION)-docs-texinfo
rm dist/python-$(DISTVERSION)-docs-texinfo.tar
-define ensure_package
- if uv --version > /dev/null; then \
- $(VENVDIR)/bin/python3 -m $(1) --version > /dev/null || VIRTUAL_ENV=$(VENVDIR) uv pip install $(1); \
+.PHONY: _ensure-package
+_ensure-package: venv
+ if $(UV) --version >/dev/null 2>&1; then \
+ VIRTUAL_ENV=$(VENVDIR) $(UV) pip install $(PACKAGE); \
else \
- $(VENVDIR)/bin/python3 -m $(1) --version > /dev/null || $(VENVDIR)/bin/python3 -m pip install $(1); \
+ $(VENVDIR)/bin/python3 -m pip install $(PACKAGE); \
fi
-endef
+
+.PHONY: _ensure-pre-commit
+_ensure-pre-commit:
+ make _ensure-package PACKAGE=pre-commit
+
+.PHONY: _ensure-sphinx-autobuild
+_ensure-sphinx-autobuild:
+ make _ensure-package PACKAGE=sphinx-autobuild
.PHONY: check
-check: venv
- $(call ensure_package,pre_commit)
+check: _ensure-pre-commit
$(VENVDIR)/bin/python3 -m pre_commit run --all-files
.PHONY: serve
From 3bde3d8e03eb3d0632d0dced0ab710ab9e3b2894 Mon Sep 17 00:00:00 2001
From: Damien <81557462+Damien-Chen@users.noreply.github.com>
Date: Mon, 5 Aug 2024 00:57:20 +0800
Subject: [PATCH 128/139] Add `3.13` and remove `3.7` in Azure Pipelines
(#122670)
---
.azure-pipelines/ci.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.azure-pipelines/ci.yml b/.azure-pipelines/ci.yml
index b5b2765e43844f..d3e842d9f31d01 100644
--- a/.azure-pipelines/ci.yml
+++ b/.azure-pipelines/ci.yml
@@ -1,4 +1,4 @@
-trigger: ['main', '3.12', '3.11', '3.10', '3.9', '3.8', '3.7']
+trigger: ['main', '3.13', '3.12', '3.11', '3.10', '3.9', '3.8']
jobs:
- job: Prebuild
From d0b92dd5ca46a10558857adeb7bb48ecf39fa783 Mon Sep 17 00:00:00 2001
From: Jonathan Protzenko
Date: Sun, 4 Aug 2024 16:22:51 -0700
Subject: [PATCH 129/139] gh-122573: Require Python 3.10 or newer for Windows
builds (GH-122574)
Match statements in tooling require a more recent Python. Tools/cases_generator/*.py (and `Tools/jit/*.py` in 3.13+).
Co-authored-by: Erlend E. Aasland
Co-authored-by: Gregory P. Smith
---
.../Windows/2024-08-01-10-55-15.gh-issue-122573.4-UCFY.rst | 1 +
PCbuild/find_python.bat | 6 +++---
2 files changed, 4 insertions(+), 3 deletions(-)
create mode 100644 Misc/NEWS.d/next/Windows/2024-08-01-10-55-15.gh-issue-122573.4-UCFY.rst
diff --git a/Misc/NEWS.d/next/Windows/2024-08-01-10-55-15.gh-issue-122573.4-UCFY.rst b/Misc/NEWS.d/next/Windows/2024-08-01-10-55-15.gh-issue-122573.4-UCFY.rst
new file mode 100644
index 00000000000000..5cc69e206debf5
--- /dev/null
+++ b/Misc/NEWS.d/next/Windows/2024-08-01-10-55-15.gh-issue-122573.4-UCFY.rst
@@ -0,0 +1 @@
+The Windows build of CPython now requires 3.10 or newer.
diff --git a/PCbuild/find_python.bat b/PCbuild/find_python.bat
index af85f6d362466e..6db579fa8de08a 100644
--- a/PCbuild/find_python.bat
+++ b/PCbuild/find_python.bat
@@ -39,15 +39,15 @@
@if "%_Py_EXTERNALS_DIR%"=="" (set _Py_EXTERNALS_DIR=%_Py_D%\..\externals)
@rem If we have Python in externals, use that one
-@if exist "%_Py_EXTERNALS_DIR%\pythonx86\tools\python.exe" ("%_Py_EXTERNALS_DIR%\pythonx86\tools\python.exe" -Ec "import sys; assert sys.version_info[:2] >= (3, 8)" >nul 2>nul) && (set PYTHON="%_Py_EXTERNALS_DIR%\pythonx86\tools\python.exe") && (set _Py_Python_Source=found in externals directory) && goto :found || rmdir /Q /S "%_Py_EXTERNALS_DIR%\pythonx86"
+@if exist "%_Py_EXTERNALS_DIR%\pythonx86\tools\python.exe" ("%_Py_EXTERNALS_DIR%\pythonx86\tools\python.exe" -Ec "import sys; assert sys.version_info[:2] >= (3, 10)" >nul 2>nul) && (set PYTHON="%_Py_EXTERNALS_DIR%\pythonx86\tools\python.exe") && (set _Py_Python_Source=found in externals directory) && goto :found || rmdir /Q /S "%_Py_EXTERNALS_DIR%\pythonx86"
@rem If HOST_PYTHON is recent enough, use that
-@if NOT "%HOST_PYTHON%"=="" @%HOST_PYTHON% -Ec "import sys; assert sys.version_info[:2] >= (3, 9)" >nul 2>nul && (set PYTHON="%HOST_PYTHON%") && (set _Py_Python_Source=found as HOST_PYTHON) && goto :found
+@if NOT "%HOST_PYTHON%"=="" @%HOST_PYTHON% -Ec "import sys; assert sys.version_info[:2] >= (3, 10)" >nul 2>nul && (set PYTHON="%HOST_PYTHON%") && (set _Py_Python_Source=found as HOST_PYTHON) && goto :found
@rem If py.exe finds a recent enough version, use that one
@rem It is fine to add new versions to this list when they have released,
@rem but we do not use prerelease builds here.
-@for %%p in (3.12 3.11 3.10 3.9) do @py -%%p -EV >nul 2>&1 && (set PYTHON=py -%%p) && (set _Py_Python_Source=found %%p with py.exe) && goto :found
+@for %%p in (3.12 3.11 3.10) do @py -%%p -EV >nul 2>&1 && (set PYTHON=py -%%p) && (set _Py_Python_Source=found %%p with py.exe) && goto :found
@if NOT exist "%_Py_EXTERNALS_DIR%" mkdir "%_Py_EXTERNALS_DIR%"
@set _Py_NUGET=%NUGET%
From 5207adf228547273b0e8d0253c23c69b95d7fe11 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 5 Aug 2024 09:03:14 +0000
Subject: [PATCH 130/139] build(deps-dev): bump mypy from 1.10.1 to 1.11.1 in
/Tools (#122550)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Alex Waygood
---
Tools/clinic/libclinic/converter.py | 4 +++-
Tools/requirements-dev.txt | 2 +-
2 files changed, 4 insertions(+), 2 deletions(-)
diff --git a/Tools/clinic/libclinic/converter.py b/Tools/clinic/libclinic/converter.py
index 86853bb4fba253..2abf06dc4e89a2 100644
--- a/Tools/clinic/libclinic/converter.py
+++ b/Tools/clinic/libclinic/converter.py
@@ -545,7 +545,9 @@ def closure(f: CConverterClassT) -> CConverterClassT:
if not kwargs:
added_f = f
else:
- added_f = functools.partial(f, **kwargs)
+ # type ignore due to a mypy regression :(
+ # https://github.com/python/mypy/issues/17646
+ added_f = functools.partial(f, **kwargs) # type: ignore[misc]
if format_unit:
legacy_converters[format_unit] = added_f
return f
diff --git a/Tools/requirements-dev.txt b/Tools/requirements-dev.txt
index de8496a17b85ef..cbf4072b500061 100644
--- a/Tools/requirements-dev.txt
+++ b/Tools/requirements-dev.txt
@@ -1,6 +1,6 @@
# Requirements file for external linters and checks we run on
# Tools/clinic, Tools/cases_generator/, and Tools/peg_generator/ in CI
-mypy==1.10.1
+mypy==1.11.1
# needed for peg_generator:
types-psutil==6.0.0.20240621
From 1422500d020bd199b26357fc387f8b79b82226cd Mon Sep 17 00:00:00 2001
From: Irit Katriel <1055913+iritkatriel@users.noreply.github.com>
Date: Mon, 5 Aug 2024 10:17:55 +0100
Subject: [PATCH 131/139] gh-121367: [doc] BUILD_TUPLE arg can be 0 (#122663)
---
Doc/library/dis.rst | 12 ++++++++----
1 file changed, 8 insertions(+), 4 deletions(-)
diff --git a/Doc/library/dis.rst b/Doc/library/dis.rst
index 26b13c87181000..440ca233584e57 100644
--- a/Doc/library/dis.rst
+++ b/Doc/library/dis.rst
@@ -1081,11 +1081,15 @@ iterations of the loop.
.. opcode:: BUILD_TUPLE (count)
Creates a tuple consuming *count* items from the stack, and pushes the
- resulting tuple onto the stack.::
+ resulting tuple onto the stack::
- assert count > 0
- STACK, values = STACK[:-count], STACK[-count:]
- STACK.append(tuple(values))
+ if count == 0:
+ value = ()
+ else:
+ STACK = STACK[:-count]
+ value = tuple(STACK[-count:])
+
+ STACK.append(value)
.. opcode:: BUILD_LIST (count)
From 1bb955a2fe0237721c141fdfe520fd3ba46db11e Mon Sep 17 00:00:00 2001
From: Serhiy Storchaka
Date: Mon, 5 Aug 2024 16:21:32 +0300
Subject: [PATCH 132/139] gh-122459: Optimize pickling by name objects without
__module__ (GH-122460)
---
Lib/pickle.py | 97 ++++---
Lib/test/pickletester.py | 2 +-
...-07-30-15-57-07.gh-issue-122459.AYIoeN.rst | 2 +
Modules/_pickle.c | 249 +++++++++---------
4 files changed, 173 insertions(+), 177 deletions(-)
create mode 100644 Misc/NEWS.d/next/Library/2024-07-30-15-57-07.gh-issue-122459.AYIoeN.rst
diff --git a/Lib/pickle.py b/Lib/pickle.py
index 299c9e0e5e5641..b8e114a79f2202 100644
--- a/Lib/pickle.py
+++ b/Lib/pickle.py
@@ -313,38 +313,45 @@ def load_frame(self, frame_size):
# Tools used for pickling.
-def _getattribute(obj, name):
- top = obj
- for subpath in name.split('.'):
- if subpath == '':
- raise AttributeError("Can't get local attribute {!r} on {!r}"
- .format(name, top))
- try:
- parent = obj
- obj = getattr(obj, subpath)
- except AttributeError:
- raise AttributeError("Can't get attribute {!r} on {!r}"
- .format(name, top)) from None
- return obj, parent
+def _getattribute(obj, dotted_path):
+ for subpath in dotted_path:
+ obj = getattr(obj, subpath)
+ return obj
def whichmodule(obj, name):
"""Find the module an object belong to."""
+ dotted_path = name.split('.')
module_name = getattr(obj, '__module__', None)
- if module_name is not None:
- return module_name
- # Protect the iteration by using a list copy of sys.modules against dynamic
- # modules that trigger imports of other modules upon calls to getattr.
- for module_name, module in sys.modules.copy().items():
- if (module_name == '__main__'
- or module_name == '__mp_main__' # bpo-42406
- or module is None):
- continue
- try:
- if _getattribute(module, name)[0] is obj:
- return module_name
- except AttributeError:
- pass
- return '__main__'
+ if module_name is None and '' not in dotted_path:
+ # Protect the iteration by using a list copy of sys.modules against dynamic
+ # modules that trigger imports of other modules upon calls to getattr.
+ for module_name, module in sys.modules.copy().items():
+ if (module_name == '__main__'
+ or module_name == '__mp_main__' # bpo-42406
+ or module is None):
+ continue
+ try:
+ if _getattribute(module, dotted_path) is obj:
+ return module_name
+ except AttributeError:
+ pass
+ module_name = '__main__'
+ elif module_name is None:
+ module_name = '__main__'
+
+ try:
+ __import__(module_name, level=0)
+ module = sys.modules[module_name]
+ if _getattribute(module, dotted_path) is obj:
+ return module_name
+ except (ImportError, KeyError, AttributeError):
+ raise PicklingError(
+ "Can't pickle %r: it's not found as %s.%s" %
+ (obj, module_name, name)) from None
+
+ raise PicklingError(
+ "Can't pickle %r: it's not the same object as %s.%s" %
+ (obj, module_name, name))
def encode_long(x):
r"""Encode a long to a two's complement little-endian binary string.
@@ -1074,24 +1081,10 @@ def save_global(self, obj, name=None):
if name is None:
name = getattr(obj, '__qualname__', None)
- if name is None:
- name = obj.__name__
+ if name is None:
+ name = obj.__name__
module_name = whichmodule(obj, name)
- try:
- __import__(module_name, level=0)
- module = sys.modules[module_name]
- obj2, parent = _getattribute(module, name)
- except (ImportError, KeyError, AttributeError):
- raise PicklingError(
- "Can't pickle %r: it's not found as %s.%s" %
- (obj, module_name, name)) from None
- else:
- if obj2 is not obj:
- raise PicklingError(
- "Can't pickle %r: it's not the same object as %s.%s" %
- (obj, module_name, name))
-
if self.proto >= 2:
code = _extension_registry.get((module_name, name))
if code:
@@ -1103,10 +1096,7 @@ def save_global(self, obj, name=None):
else:
write(EXT4 + pack("= 3.
+
if self.proto >= 4:
self.save(module_name)
self.save(name)
@@ -1616,7 +1606,16 @@ def find_class(self, module, name):
module = _compat_pickle.IMPORT_MAPPING[module]
__import__(module, level=0)
if self.proto >= 4:
- return _getattribute(sys.modules[module], name)[0]
+ module = sys.modules[module]
+ dotted_path = name.split('.')
+ if '' in dotted_path:
+ raise AttributeError(
+ f"Can't get local attribute {name!r} on {module!r}")
+ try:
+ return _getattribute(module, dotted_path)
+ except AttributeError:
+ raise AttributeError(
+ f"Can't get attribute {name!r} on {module!r}") from None
else:
return getattr(sys.modules[module], name)
diff --git a/Lib/test/pickletester.py b/Lib/test/pickletester.py
index 3c936b3bc4029e..db42f13b0b98ab 100644
--- a/Lib/test/pickletester.py
+++ b/Lib/test/pickletester.py
@@ -2068,7 +2068,7 @@ def f():
self.dumps(f, proto)
self.assertIn(str(cm.exception), {
f"Can't pickle {f!r}: it's not found as {__name__}.{f.__qualname__}",
- f"Can't get local object {f.__qualname__!r}"})
+ f"Can't get local attribute {f.__qualname__!r} on {sys.modules[__name__]}"})
# Same without a __module__ attribute (exercises a different path
# in _pickle.c).
del f.__module__
diff --git a/Misc/NEWS.d/next/Library/2024-07-30-15-57-07.gh-issue-122459.AYIoeN.rst b/Misc/NEWS.d/next/Library/2024-07-30-15-57-07.gh-issue-122459.AYIoeN.rst
new file mode 100644
index 00000000000000..595504048302da
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2024-07-30-15-57-07.gh-issue-122459.AYIoeN.rst
@@ -0,0 +1,2 @@
+Optimize :mod:`pickling ` by name objects without the ``__module__``
+attribute.
diff --git a/Modules/_pickle.c b/Modules/_pickle.c
index 50c73dca0db281..5d9ee8cb6c679d 100644
--- a/Modules/_pickle.c
+++ b/Modules/_pickle.c
@@ -1803,13 +1803,15 @@ memo_put(PickleState *st, PicklerObject *self, PyObject *obj)
}
static PyObject *
-get_dotted_path(PyObject *obj, PyObject *name)
+get_dotted_path(PyObject *name)
+{
+ return PyUnicode_Split(name, _Py_LATIN1_CHR('.'), -1);
+}
+
+static int
+check_dotted_path(PyObject *obj, PyObject *name, PyObject *dotted_path)
{
- PyObject *dotted_path;
Py_ssize_t i, n;
- dotted_path = PyUnicode_Split(name, _Py_LATIN1_CHR('.'), -1);
- if (dotted_path == NULL)
- return NULL;
n = PyList_GET_SIZE(dotted_path);
assert(n >= 1);
for (i = 0; i < n; i++) {
@@ -1821,61 +1823,33 @@ get_dotted_path(PyObject *obj, PyObject *name)
else
PyErr_Format(PyExc_AttributeError,
"Can't get local attribute %R on %R", name, obj);
- Py_DECREF(dotted_path);
- return NULL;
+ return -1;
}
}
- return dotted_path;
+ return 0;
}
static PyObject *
-get_deep_attribute(PyObject *obj, PyObject *names, PyObject **pparent)
+getattribute(PyObject *obj, PyObject *names)
{
Py_ssize_t i, n;
- PyObject *parent = NULL;
assert(PyList_CheckExact(names));
Py_INCREF(obj);
n = PyList_GET_SIZE(names);
for (i = 0; i < n; i++) {
PyObject *name = PyList_GET_ITEM(names, i);
- Py_XSETREF(parent, obj);
+ PyObject *parent = obj;
(void)PyObject_GetOptionalAttr(parent, name, &obj);
+ Py_DECREF(parent);
if (obj == NULL) {
- Py_DECREF(parent);
return NULL;
}
}
- if (pparent != NULL)
- *pparent = parent;
- else
- Py_XDECREF(parent);
return obj;
}
-static PyObject *
-getattribute(PyObject *obj, PyObject *name, int allow_qualname)
-{
- PyObject *dotted_path, *attr;
-
- if (allow_qualname) {
- dotted_path = get_dotted_path(obj, name);
- if (dotted_path == NULL)
- return NULL;
- attr = get_deep_attribute(obj, dotted_path, NULL);
- Py_DECREF(dotted_path);
- }
- else {
- (void)PyObject_GetOptionalAttr(obj, name, &attr);
- }
- if (attr == NULL && !PyErr_Occurred()) {
- PyErr_Format(PyExc_AttributeError,
- "Can't get attribute %R on %R", name, obj);
- }
- return attr;
-}
-
static int
_checkmodule(PyObject *module_name, PyObject *module,
PyObject *global, PyObject *dotted_path)
@@ -1888,7 +1862,7 @@ _checkmodule(PyObject *module_name, PyObject *module,
return -1;
}
- PyObject *candidate = get_deep_attribute(module, dotted_path, NULL);
+ PyObject *candidate = getattribute(module, dotted_path);
if (candidate == NULL) {
return -1;
}
@@ -1901,7 +1875,7 @@ _checkmodule(PyObject *module_name, PyObject *module,
}
static PyObject *
-whichmodule(PyObject *global, PyObject *dotted_path)
+whichmodule(PickleState *st, PyObject *global, PyObject *global_name, PyObject *dotted_path)
{
PyObject *module_name;
PyObject *module = NULL;
@@ -1911,63 +1885,106 @@ whichmodule(PyObject *global, PyObject *dotted_path)
if (PyObject_GetOptionalAttr(global, &_Py_ID(__module__), &module_name) < 0) {
return NULL;
}
- if (module_name) {
+ if (module_name == NULL || module_name == Py_None) {
/* In some rare cases (e.g., bound methods of extension types),
__module__ can be None. If it is so, then search sys.modules for
the module of global. */
- if (module_name != Py_None)
- return module_name;
Py_CLEAR(module_name);
- }
- assert(module_name == NULL);
-
- /* Fallback on walking sys.modules */
- PyThreadState *tstate = _PyThreadState_GET();
- modules = _PySys_GetAttr(tstate, &_Py_ID(modules));
- if (modules == NULL) {
- PyErr_SetString(PyExc_RuntimeError, "unable to get sys.modules");
- return NULL;
- }
- if (PyDict_CheckExact(modules)) {
- i = 0;
- while (PyDict_Next(modules, &i, &module_name, &module)) {
- if (_checkmodule(module_name, module, global, dotted_path) == 0) {
- return Py_NewRef(module_name);
- }
- if (PyErr_Occurred()) {
- return NULL;
- }
+ if (check_dotted_path(NULL, global_name, dotted_path) < 0) {
+ return NULL;
}
- }
- else {
- PyObject *iterator = PyObject_GetIter(modules);
- if (iterator == NULL) {
+ PyThreadState *tstate = _PyThreadState_GET();
+ modules = _PySys_GetAttr(tstate, &_Py_ID(modules));
+ if (modules == NULL) {
+ PyErr_SetString(PyExc_RuntimeError, "unable to get sys.modules");
return NULL;
}
- while ((module_name = PyIter_Next(iterator))) {
- module = PyObject_GetItem(modules, module_name);
- if (module == NULL) {
+ if (PyDict_CheckExact(modules)) {
+ i = 0;
+ while (PyDict_Next(modules, &i, &module_name, &module)) {
+ Py_INCREF(module_name);
+ Py_INCREF(module);
+ if (_checkmodule(module_name, module, global, dotted_path) == 0) {
+ Py_DECREF(module);
+ return module_name;
+ }
+ Py_DECREF(module);
Py_DECREF(module_name);
- Py_DECREF(iterator);
+ if (PyErr_Occurred()) {
+ return NULL;
+ }
+ }
+ }
+ else {
+ PyObject *iterator = PyObject_GetIter(modules);
+ if (iterator == NULL) {
return NULL;
}
- if (_checkmodule(module_name, module, global, dotted_path) == 0) {
+ while ((module_name = PyIter_Next(iterator))) {
+ module = PyObject_GetItem(modules, module_name);
+ if (module == NULL) {
+ Py_DECREF(module_name);
+ Py_DECREF(iterator);
+ return NULL;
+ }
+ if (_checkmodule(module_name, module, global, dotted_path) == 0) {
+ Py_DECREF(module);
+ Py_DECREF(iterator);
+ return module_name;
+ }
Py_DECREF(module);
- Py_DECREF(iterator);
- return module_name;
- }
- Py_DECREF(module);
- Py_DECREF(module_name);
- if (PyErr_Occurred()) {
- Py_DECREF(iterator);
- return NULL;
+ Py_DECREF(module_name);
+ if (PyErr_Occurred()) {
+ Py_DECREF(iterator);
+ return NULL;
+ }
}
+ Py_DECREF(iterator);
+ }
+ if (PyErr_Occurred()) {
+ return NULL;
}
- Py_DECREF(iterator);
+
+ /* If no module is found, use __main__. */
+ module_name = Py_NewRef(&_Py_ID(__main__));
}
- /* If no module is found, use __main__. */
- return &_Py_ID(__main__);
+ /* XXX: Change to use the import C API directly with level=0 to disallow
+ relative imports.
+
+ XXX: PyImport_ImportModuleLevel could be used. However, this bypasses
+ builtins.__import__. Therefore, _pickle, unlike pickle.py, will ignore
+ custom import functions (IMHO, this would be a nice security
+ feature). The import C API would need to be extended to support the
+ extra parameters of __import__ to fix that. */
+ module = PyImport_Import(module_name);
+ if (module == NULL) {
+ PyErr_Format(st->PicklingError,
+ "Can't pickle %R: import of module %R failed",
+ global, module_name);
+ return NULL;
+ }
+ if (check_dotted_path(module, global_name, dotted_path) < 0) {
+ Py_DECREF(module);
+ return NULL;
+ }
+ PyObject *actual = getattribute(module, dotted_path);
+ Py_DECREF(module);
+ if (actual == NULL) {
+ PyErr_Format(st->PicklingError,
+ "Can't pickle %R: attribute lookup %S on %S failed",
+ global, global_name, module_name);
+ return NULL;
+ }
+ if (actual != global) {
+ Py_DECREF(actual);
+ PyErr_Format(st->PicklingError,
+ "Can't pickle %R: it's not the same object as %S.%S",
+ global, module_name, global_name);
+ return NULL;
+ }
+ Py_DECREF(actual);
+ return module_name;
}
/* fast_save_enter() and fast_save_leave() are guards against recursive
@@ -3590,10 +3607,7 @@ save_global(PickleState *st, PicklerObject *self, PyObject *obj,
{
PyObject *global_name = NULL;
PyObject *module_name = NULL;
- PyObject *module = NULL;
- PyObject *parent = NULL;
PyObject *dotted_path = NULL;
- PyObject *cls;
int status = 0;
const char global_op = GLOBAL;
@@ -3611,44 +3625,13 @@ save_global(PickleState *st, PicklerObject *self, PyObject *obj,
}
}
- dotted_path = get_dotted_path(module, global_name);
+ dotted_path = get_dotted_path(global_name);
if (dotted_path == NULL)
goto error;
- module_name = whichmodule(obj, dotted_path);
+ module_name = whichmodule(st, obj, global_name, dotted_path);
if (module_name == NULL)
goto error;
- /* XXX: Change to use the import C API directly with level=0 to disallow
- relative imports.
-
- XXX: PyImport_ImportModuleLevel could be used. However, this bypasses
- builtins.__import__. Therefore, _pickle, unlike pickle.py, will ignore
- custom import functions (IMHO, this would be a nice security
- feature). The import C API would need to be extended to support the
- extra parameters of __import__ to fix that. */
- module = PyImport_Import(module_name);
- if (module == NULL) {
- PyErr_Format(st->PicklingError,
- "Can't pickle %R: import of module %R failed",
- obj, module_name);
- goto error;
- }
- cls = get_deep_attribute(module, dotted_path, &parent);
- if (cls == NULL) {
- PyErr_Format(st->PicklingError,
- "Can't pickle %R: attribute lookup %S on %S failed",
- obj, global_name, module_name);
- goto error;
- }
- if (cls != obj) {
- Py_DECREF(cls);
- PyErr_Format(st->PicklingError,
- "Can't pickle %R: it's not the same object as %S.%S",
- obj, module_name, global_name);
- goto error;
- }
- Py_DECREF(cls);
-
if (self->proto >= 2) {
/* See whether this is in the extension registry, and if
* so generate an EXT opcode.
@@ -3720,12 +3703,6 @@ save_global(PickleState *st, PicklerObject *self, PyObject *obj,
}
else {
gen_global:
- if (parent == module) {
- Py_SETREF(global_name,
- Py_NewRef(PyList_GET_ITEM(dotted_path,
- PyList_GET_SIZE(dotted_path) - 1)));
- Py_CLEAR(dotted_path);
- }
if (self->proto >= 4) {
const char stack_global_op = STACK_GLOBAL;
@@ -3845,8 +3822,6 @@ save_global(PickleState *st, PicklerObject *self, PyObject *obj,
}
Py_XDECREF(module_name);
Py_XDECREF(global_name);
- Py_XDECREF(module);
- Py_XDECREF(parent);
Py_XDECREF(dotted_path);
return status;
@@ -7063,7 +7038,27 @@ _pickle_Unpickler_find_class_impl(UnpicklerObject *self, PyTypeObject *cls,
if (module == NULL) {
return NULL;
}
- global = getattribute(module, global_name, self->proto >= 4);
+ if (self->proto >= 4) {
+ PyObject *dotted_path = get_dotted_path(global_name);
+ if (dotted_path == NULL) {
+ Py_DECREF(module);
+ return NULL;
+ }
+ if (check_dotted_path(module, global_name, dotted_path) < 0) {
+ Py_DECREF(dotted_path);
+ Py_DECREF(module);
+ return NULL;
+ }
+ global = getattribute(module, dotted_path);
+ Py_DECREF(dotted_path);
+ if (global == NULL && !PyErr_Occurred()) {
+ PyErr_Format(PyExc_AttributeError,
+ "Can't get attribute %R on %R", global_name, module);
+ }
+ }
+ else {
+ global = PyObject_GetAttr(module, global_name);
+ }
Py_DECREF(module);
return global;
}
From 5bd72912a1a85be96092de302608a4298741c6cd Mon Sep 17 00:00:00 2001
From: Mark Shannon
Date: Mon, 5 Aug 2024 16:27:48 +0100
Subject: [PATCH 133/139] GH-122616: Simplify LOAD_ATTR_WITH_HINT and
STORE_ATTR_WITH_HINT (GH-122620)
---
Python/bytecodes.c | 39 ++++++++----------------
Python/executor_cases.c.h | 61 +++++++++++++-------------------------
Python/generated_cases.c.h | 39 ++++++++----------------
3 files changed, 44 insertions(+), 95 deletions(-)
diff --git a/Python/bytecodes.c b/Python/bytecodes.c
index 48b74f93b92ce8..996f997d0ca8de 100644
--- a/Python/bytecodes.c
+++ b/Python/bytecodes.c
@@ -2053,16 +2053,10 @@ dummy_func(
PyDictObject *dict = _PyObject_GetManagedDict(owner_o);
DEOPT_IF(hint >= (size_t)dict->ma_keys->dk_nentries);
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
- if (DK_IS_UNICODE(dict->ma_keys)) {
- PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint;
- DEOPT_IF(ep->me_key != name);
- attr_o = ep->me_value;
- }
- else {
- PyDictKeyEntry *ep = DK_ENTRIES(dict->ma_keys) + hint;
- DEOPT_IF(ep->me_key != name);
- attr_o = ep->me_value;
- }
+ DEOPT_IF(!DK_IS_UNICODE(dict->ma_keys));
+ PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint;
+ DEOPT_IF(ep->me_key != name);
+ attr_o = ep->me_value;
DEOPT_IF(attr_o == NULL);
STAT_INC(LOAD_ATTR, hit);
Py_INCREF(attr_o);
@@ -2214,23 +2208,14 @@ dummy_func(
DEOPT_IF(hint >= (size_t)dict->ma_keys->dk_nentries);
PyObject *old_value;
uint64_t new_version;
- if (DK_IS_UNICODE(dict->ma_keys)) {
- PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint;
- DEOPT_IF(ep->me_key != name);
- old_value = ep->me_value;
- DEOPT_IF(old_value == NULL);
- new_version = _PyDict_NotifyEvent(tstate->interp, PyDict_EVENT_MODIFIED, dict, name, PyStackRef_AsPyObjectBorrow(value));
- ep->me_value = PyStackRef_AsPyObjectSteal(value);
- }
- else {
- PyDictKeyEntry *ep = DK_ENTRIES(dict->ma_keys) + hint;
- DEOPT_IF(ep->me_key != name);
- old_value = ep->me_value;
- DEOPT_IF(old_value == NULL);
- new_version = _PyDict_NotifyEvent(tstate->interp, PyDict_EVENT_MODIFIED, dict, name, PyStackRef_AsPyObjectBorrow(value));
- ep->me_value = PyStackRef_AsPyObjectSteal(value);
- }
- Py_DECREF(old_value);
+ DEOPT_IF(!DK_IS_UNICODE(dict->ma_keys));
+ PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint;
+ DEOPT_IF(ep->me_key != name);
+ old_value = ep->me_value;
+ PyDict_WatchEvent event = old_value == NULL ? PyDict_EVENT_ADDED : PyDict_EVENT_MODIFIED;
+ new_version = _PyDict_NotifyEvent(tstate->interp, event, dict, name, PyStackRef_AsPyObjectBorrow(value));
+ ep->me_value = PyStackRef_AsPyObjectSteal(value);
+ Py_XDECREF(old_value);
STAT_INC(STORE_ATTR, hit);
/* Ensure dict is GC tracked if it needs to be */
if (!_PyObject_GC_IS_TRACKED(dict) && _PyObject_GC_MAY_BE_TRACKED(PyStackRef_AsPyObjectBorrow(value))) {
diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h
index 7f89196192504b..cbee77d5cf67fc 100644
--- a/Python/executor_cases.c.h
+++ b/Python/executor_cases.c.h
@@ -2367,22 +2367,16 @@
JUMP_TO_JUMP_TARGET();
}
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
- if (DK_IS_UNICODE(dict->ma_keys)) {
- PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint;
- if (ep->me_key != name) {
- UOP_STAT_INC(uopcode, miss);
- JUMP_TO_JUMP_TARGET();
- }
- attr_o = ep->me_value;
+ if (!DK_IS_UNICODE(dict->ma_keys)) {
+ UOP_STAT_INC(uopcode, miss);
+ JUMP_TO_JUMP_TARGET();
}
- else {
- PyDictKeyEntry *ep = DK_ENTRIES(dict->ma_keys) + hint;
- if (ep->me_key != name) {
- UOP_STAT_INC(uopcode, miss);
- JUMP_TO_JUMP_TARGET();
- }
- attr_o = ep->me_value;
+ PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint;
+ if (ep->me_key != name) {
+ UOP_STAT_INC(uopcode, miss);
+ JUMP_TO_JUMP_TARGET();
}
+ attr_o = ep->me_value;
if (attr_o == NULL) {
UOP_STAT_INC(uopcode, miss);
JUMP_TO_JUMP_TARGET();
@@ -2601,35 +2595,20 @@
}
PyObject *old_value;
uint64_t new_version;
- if (DK_IS_UNICODE(dict->ma_keys)) {
- PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint;
- if (ep->me_key != name) {
- UOP_STAT_INC(uopcode, miss);
- JUMP_TO_JUMP_TARGET();
- }
- old_value = ep->me_value;
- if (old_value == NULL) {
- UOP_STAT_INC(uopcode, miss);
- JUMP_TO_JUMP_TARGET();
- }
- new_version = _PyDict_NotifyEvent(tstate->interp, PyDict_EVENT_MODIFIED, dict, name, PyStackRef_AsPyObjectBorrow(value));
- ep->me_value = PyStackRef_AsPyObjectSteal(value);
+ if (!DK_IS_UNICODE(dict->ma_keys)) {
+ UOP_STAT_INC(uopcode, miss);
+ JUMP_TO_JUMP_TARGET();
}
- else {
- PyDictKeyEntry *ep = DK_ENTRIES(dict->ma_keys) + hint;
- if (ep->me_key != name) {
- UOP_STAT_INC(uopcode, miss);
- JUMP_TO_JUMP_TARGET();
- }
- old_value = ep->me_value;
- if (old_value == NULL) {
- UOP_STAT_INC(uopcode, miss);
- JUMP_TO_JUMP_TARGET();
- }
- new_version = _PyDict_NotifyEvent(tstate->interp, PyDict_EVENT_MODIFIED, dict, name, PyStackRef_AsPyObjectBorrow(value));
- ep->me_value = PyStackRef_AsPyObjectSteal(value);
+ PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint;
+ if (ep->me_key != name) {
+ UOP_STAT_INC(uopcode, miss);
+ JUMP_TO_JUMP_TARGET();
}
- Py_DECREF(old_value);
+ old_value = ep->me_value;
+ PyDict_WatchEvent event = old_value == NULL ? PyDict_EVENT_ADDED : PyDict_EVENT_MODIFIED;
+ new_version = _PyDict_NotifyEvent(tstate->interp, event, dict, name, PyStackRef_AsPyObjectBorrow(value));
+ ep->me_value = PyStackRef_AsPyObjectSteal(value);
+ Py_XDECREF(old_value);
STAT_INC(STORE_ATTR, hit);
/* Ensure dict is GC tracked if it needs to be */
if (!_PyObject_GC_IS_TRACKED(dict) && _PyObject_GC_MAY_BE_TRACKED(PyStackRef_AsPyObjectBorrow(value))) {
diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h
index bed194e34d5376..879c40ab0cb6ba 100644
--- a/Python/generated_cases.c.h
+++ b/Python/generated_cases.c.h
@@ -4907,16 +4907,10 @@
PyDictObject *dict = _PyObject_GetManagedDict(owner_o);
DEOPT_IF(hint >= (size_t)dict->ma_keys->dk_nentries, LOAD_ATTR);
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
- if (DK_IS_UNICODE(dict->ma_keys)) {
- PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint;
- DEOPT_IF(ep->me_key != name, LOAD_ATTR);
- attr_o = ep->me_value;
- }
- else {
- PyDictKeyEntry *ep = DK_ENTRIES(dict->ma_keys) + hint;
- DEOPT_IF(ep->me_key != name, LOAD_ATTR);
- attr_o = ep->me_value;
- }
+ DEOPT_IF(!DK_IS_UNICODE(dict->ma_keys), LOAD_ATTR);
+ PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint;
+ DEOPT_IF(ep->me_key != name, LOAD_ATTR);
+ attr_o = ep->me_value;
DEOPT_IF(attr_o == NULL, LOAD_ATTR);
STAT_INC(LOAD_ATTR, hit);
Py_INCREF(attr_o);
@@ -6411,23 +6405,14 @@
DEOPT_IF(hint >= (size_t)dict->ma_keys->dk_nentries, STORE_ATTR);
PyObject *old_value;
uint64_t new_version;
- if (DK_IS_UNICODE(dict->ma_keys)) {
- PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint;
- DEOPT_IF(ep->me_key != name, STORE_ATTR);
- old_value = ep->me_value;
- DEOPT_IF(old_value == NULL, STORE_ATTR);
- new_version = _PyDict_NotifyEvent(tstate->interp, PyDict_EVENT_MODIFIED, dict, name, PyStackRef_AsPyObjectBorrow(value));
- ep->me_value = PyStackRef_AsPyObjectSteal(value);
- }
- else {
- PyDictKeyEntry *ep = DK_ENTRIES(dict->ma_keys) + hint;
- DEOPT_IF(ep->me_key != name, STORE_ATTR);
- old_value = ep->me_value;
- DEOPT_IF(old_value == NULL, STORE_ATTR);
- new_version = _PyDict_NotifyEvent(tstate->interp, PyDict_EVENT_MODIFIED, dict, name, PyStackRef_AsPyObjectBorrow(value));
- ep->me_value = PyStackRef_AsPyObjectSteal(value);
- }
- Py_DECREF(old_value);
+ DEOPT_IF(!DK_IS_UNICODE(dict->ma_keys), STORE_ATTR);
+ PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint;
+ DEOPT_IF(ep->me_key != name, STORE_ATTR);
+ old_value = ep->me_value;
+ PyDict_WatchEvent event = old_value == NULL ? PyDict_EVENT_ADDED : PyDict_EVENT_MODIFIED;
+ new_version = _PyDict_NotifyEvent(tstate->interp, event, dict, name, PyStackRef_AsPyObjectBorrow(value));
+ ep->me_value = PyStackRef_AsPyObjectSteal(value);
+ Py_XDECREF(old_value);
STAT_INC(STORE_ATTR, hit);
/* Ensure dict is GC tracked if it needs to be */
if (!_PyObject_GC_IS_TRACKED(dict) && _PyObject_GC_MAY_BE_TRACKED(PyStackRef_AsPyObjectBorrow(value))) {
From 44659d392751f0161a0f958fec39ad013da45427 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Miro=20Hron=C4=8Dok?=
Date: Tue, 6 Aug 2024 01:10:40 +0200
Subject: [PATCH 134/139] GH-118943: Handle races when moving jit_stencils.h
(GH-120690)
Co-authored-by: Kirill Podoprigora
---
.../Build/2024-06-18-15-28-25.gh-issue-118943.aie7nn.rst | 3 +++
Tools/jit/_targets.py | 7 ++++++-
2 files changed, 9 insertions(+), 1 deletion(-)
create mode 100644 Misc/NEWS.d/next/Build/2024-06-18-15-28-25.gh-issue-118943.aie7nn.rst
diff --git a/Misc/NEWS.d/next/Build/2024-06-18-15-28-25.gh-issue-118943.aie7nn.rst b/Misc/NEWS.d/next/Build/2024-06-18-15-28-25.gh-issue-118943.aie7nn.rst
new file mode 100644
index 00000000000000..997c990a96e476
--- /dev/null
+++ b/Misc/NEWS.d/next/Build/2024-06-18-15-28-25.gh-issue-118943.aie7nn.rst
@@ -0,0 +1,3 @@
+Fix a possible race condition affecting parallel builds configured with
+``--enable-experimental-jit``, in which :exc:`FileNotFoundError` could be caused by
+another process already moving ``jit_stencils.h.new`` to ``jit_stencils.h``.
diff --git a/Tools/jit/_targets.py b/Tools/jit/_targets.py
index 5604c429bcf8ad..73d10a128756eb 100644
--- a/Tools/jit/_targets.py
+++ b/Tools/jit/_targets.py
@@ -221,7 +221,12 @@ def build(
file.write("\n")
for line in _writer.dump(stencil_groups):
file.write(f"{line}\n")
- jit_stencils_new.replace(jit_stencils)
+ try:
+ jit_stencils_new.replace(jit_stencils)
+ except FileNotFoundError:
+ # another process probably already moved the file
+ if not jit_stencils.is_file():
+ raise
finally:
jit_stencils_new.unlink(missing_ok=True)
From 35ae4aab1aae93c1c11c45ac431787ff79ce7907 Mon Sep 17 00:00:00 2001
From: Sergey B Kirpichev
Date: Tue, 6 Aug 2024 06:56:50 +0300
Subject: [PATCH 135/139] gh-122686: Pin attrs package (for Hypothesis
workflow) (#122687)
Co-authored-by: Victor Stinner
---
Tools/requirements-hypothesis.txt | 3 +++
1 file changed, 3 insertions(+)
diff --git a/Tools/requirements-hypothesis.txt b/Tools/requirements-hypothesis.txt
index ab3f39ac6ee087..03f955ba8bf310 100644
--- a/Tools/requirements-hypothesis.txt
+++ b/Tools/requirements-hypothesis.txt
@@ -1,4 +1,7 @@
# Requirements file for hypothesis that
# we use to run our property-based tests in CI.
+# see https://github.com/python/cpython/issues/122686
+attrs<=23.2.0
+
hypothesis==6.104.2
From b0c48b8fd88f26b31ec2f743358091073277dcde Mon Sep 17 00:00:00 2001
From: Malcolm Smith
Date: Tue, 6 Aug 2024 05:28:58 +0100
Subject: [PATCH 136/139] gh-116622: Android logging fixes (#122698)
Modifies the handling of stdout/stderr redirection on Android to accomodate
the rate and buffer size limits imposed by Android's logging infrastructure.
---
Lib/_android_support.py | 128 +++++++++++++----
Lib/test/test_android.py | 132 +++++++++++++++---
...-08-05-19-04-06.gh-issue-116622.3LWUzE.rst | 1 +
3 files changed, 213 insertions(+), 48 deletions(-)
create mode 100644 Misc/NEWS.d/next/Core and Builtins/2024-08-05-19-04-06.gh-issue-116622.3LWUzE.rst
diff --git a/Lib/_android_support.py b/Lib/_android_support.py
index 590e85ea8c2db1..d5d13ec6a48e14 100644
--- a/Lib/_android_support.py
+++ b/Lib/_android_support.py
@@ -1,19 +1,20 @@
import io
import sys
-
+from threading import RLock
+from time import sleep, time
# The maximum length of a log message in bytes, including the level marker and
-# tag, is defined as LOGGER_ENTRY_MAX_PAYLOAD in
-# platform/system/logging/liblog/include/log/log.h. As of API level 30, messages
-# longer than this will be be truncated by logcat. This limit has already been
-# reduced at least once in the history of Android (from 4076 to 4068 between API
-# level 23 and 26), so leave some headroom.
+# tag, is defined as LOGGER_ENTRY_MAX_PAYLOAD at
+# https://cs.android.com/android/platform/superproject/+/android-14.0.0_r1:system/logging/liblog/include/log/log.h;l=71.
+# Messages longer than this will be be truncated by logcat. This limit has already
+# been reduced at least once in the history of Android (from 4076 to 4068 between
+# API level 23 and 26), so leave some headroom.
MAX_BYTES_PER_WRITE = 4000
# UTF-8 uses a maximum of 4 bytes per character, so limiting text writes to this
-# size ensures that TextIOWrapper can always avoid exceeding MAX_BYTES_PER_WRITE.
+# size ensures that we can always avoid exceeding MAX_BYTES_PER_WRITE.
# However, if the actual number of bytes per character is smaller than that,
-# then TextIOWrapper may still join multiple consecutive text writes into binary
+# then we may still join multiple consecutive text writes into binary
# writes containing a larger number of characters.
MAX_CHARS_PER_WRITE = MAX_BYTES_PER_WRITE // 4
@@ -26,18 +27,22 @@ def init_streams(android_log_write, stdout_prio, stderr_prio):
if sys.executable:
return # Not embedded in an app.
+ global logcat
+ logcat = Logcat(android_log_write)
+
sys.stdout = TextLogStream(
- android_log_write, stdout_prio, "python.stdout", errors=sys.stdout.errors)
+ stdout_prio, "python.stdout", errors=sys.stdout.errors)
sys.stderr = TextLogStream(
- android_log_write, stderr_prio, "python.stderr", errors=sys.stderr.errors)
+ stderr_prio, "python.stderr", errors=sys.stderr.errors)
class TextLogStream(io.TextIOWrapper):
- def __init__(self, android_log_write, prio, tag, **kwargs):
+ def __init__(self, prio, tag, **kwargs):
kwargs.setdefault("encoding", "UTF-8")
- kwargs.setdefault("line_buffering", True)
- super().__init__(BinaryLogStream(android_log_write, prio, tag), **kwargs)
- self._CHUNK_SIZE = MAX_BYTES_PER_WRITE
+ super().__init__(BinaryLogStream(prio, tag), **kwargs)
+ self._lock = RLock()
+ self._pending_bytes = []
+ self._pending_bytes_count = 0
def __repr__(self):
return f""
@@ -52,19 +57,48 @@ def write(self, s):
s = str.__str__(s)
# We want to emit one log message per line wherever possible, so split
- # the string before sending it to the superclass. Note that
- # "".splitlines() == [], so nothing will be logged for an empty string.
- for line in s.splitlines(keepends=True):
- while line:
- super().write(line[:MAX_CHARS_PER_WRITE])
- line = line[MAX_CHARS_PER_WRITE:]
+ # the string into lines first. Note that "".splitlines() == [], so
+ # nothing will be logged for an empty string.
+ with self._lock:
+ for line in s.splitlines(keepends=True):
+ while line:
+ chunk = line[:MAX_CHARS_PER_WRITE]
+ line = line[MAX_CHARS_PER_WRITE:]
+ self._write_chunk(chunk)
return len(s)
+ # The size and behavior of TextIOWrapper's buffer is not part of its public
+ # API, so we handle buffering ourselves to avoid truncation.
+ def _write_chunk(self, s):
+ b = s.encode(self.encoding, self.errors)
+ if self._pending_bytes_count + len(b) > MAX_BYTES_PER_WRITE:
+ self.flush()
+
+ self._pending_bytes.append(b)
+ self._pending_bytes_count += len(b)
+ if (
+ self.write_through
+ or b.endswith(b"\n")
+ or self._pending_bytes_count > MAX_BYTES_PER_WRITE
+ ):
+ self.flush()
+
+ def flush(self):
+ with self._lock:
+ self.buffer.write(b"".join(self._pending_bytes))
+ self._pending_bytes.clear()
+ self._pending_bytes_count = 0
+
+ # Since this is a line-based logging system, line buffering cannot be turned
+ # off, i.e. a newline always causes a flush.
+ @property
+ def line_buffering(self):
+ return True
+
class BinaryLogStream(io.RawIOBase):
- def __init__(self, android_log_write, prio, tag):
- self.android_log_write = android_log_write
+ def __init__(self, prio, tag):
self.prio = prio
self.tag = tag
@@ -85,10 +119,48 @@ def write(self, b):
# Writing an empty string to the stream should have no effect.
if b:
- # Encode null bytes using "modified UTF-8" to avoid truncating the
- # message. This should not affect the return value, as the caller
- # may be expecting it to match the length of the input.
- self.android_log_write(self.prio, self.tag,
- b.replace(b"\x00", b"\xc0\x80"))
-
+ logcat.write(self.prio, self.tag, b)
return len(b)
+
+
+# When a large volume of data is written to logcat at once, e.g. when a test
+# module fails in --verbose3 mode, there's a risk of overflowing logcat's own
+# buffer and losing messages. We avoid this by imposing a rate limit using the
+# token bucket algorithm, based on a conservative estimate of how fast `adb
+# logcat` can consume data.
+MAX_BYTES_PER_SECOND = 1024 * 1024
+
+# The logcat buffer size of a device can be determined by running `logcat -g`.
+# We set the token bucket size to half of the buffer size of our current minimum
+# API level, because other things on the system will be producing messages as
+# well.
+BUCKET_SIZE = 128 * 1024
+
+# https://cs.android.com/android/platform/superproject/+/android-14.0.0_r1:system/logging/liblog/include/log/log_read.h;l=39
+PER_MESSAGE_OVERHEAD = 28
+
+
+class Logcat:
+ def __init__(self, android_log_write):
+ self.android_log_write = android_log_write
+ self._lock = RLock()
+ self._bucket_level = 0
+ self._prev_write_time = time()
+
+ def write(self, prio, tag, message):
+ # Encode null bytes using "modified UTF-8" to avoid them truncating the
+ # message.
+ message = message.replace(b"\x00", b"\xc0\x80")
+
+ with self._lock:
+ now = time()
+ self._bucket_level += (
+ (now - self._prev_write_time) * MAX_BYTES_PER_SECOND)
+ self._bucket_level = min(self._bucket_level, BUCKET_SIZE)
+ self._prev_write_time = now
+
+ self._bucket_level -= PER_MESSAGE_OVERHEAD + len(tag) + len(message)
+ if self._bucket_level < 0:
+ sleep(-self._bucket_level / MAX_BYTES_PER_SECOND)
+
+ self.android_log_write(prio, tag, message)
diff --git a/Lib/test/test_android.py b/Lib/test/test_android.py
index 115882a4c281f6..82035061bb6fdd 100644
--- a/Lib/test/test_android.py
+++ b/Lib/test/test_android.py
@@ -1,14 +1,17 @@
+import io
import platform
import queue
import re
import subprocess
import sys
import unittest
+from _android_support import TextLogStream
from array import array
-from contextlib import contextmanager
+from contextlib import ExitStack, contextmanager
from threading import Thread
from test.support import LOOPBACK_TIMEOUT
-from time import time
+from time import sleep, time
+from unittest.mock import patch
if sys.platform != "android":
@@ -81,18 +84,39 @@ def unbuffered(self, stream):
finally:
stream.reconfigure(write_through=False)
+ # In --verbose3 mode, sys.stdout and sys.stderr are captured, so we can't
+ # test them directly. Detect this mode and use some temporary streams with
+ # the same properties.
+ def stream_context(self, stream_name, level):
+ # https://developer.android.com/ndk/reference/group/logging
+ prio = {"I": 4, "W": 5}[level]
+
+ stack = ExitStack()
+ stack.enter_context(self.subTest(stream_name))
+ stream = getattr(sys, stream_name)
+ if isinstance(stream, io.StringIO):
+ stack.enter_context(
+ patch(
+ f"sys.{stream_name}",
+ TextLogStream(
+ prio, f"python.{stream_name}", errors="backslashreplace"
+ ),
+ )
+ )
+ return stack
+
def test_str(self):
for stream_name, level in [("stdout", "I"), ("stderr", "W")]:
- with self.subTest(stream=stream_name):
+ with self.stream_context(stream_name, level):
stream = getattr(sys, stream_name)
tag = f"python.{stream_name}"
self.assertEqual(f"", repr(stream))
- self.assertTrue(stream.writable())
- self.assertFalse(stream.readable())
+ self.assertIs(stream.writable(), True)
+ self.assertIs(stream.readable(), False)
self.assertEqual("UTF-8", stream.encoding)
- self.assertTrue(stream.line_buffering)
- self.assertFalse(stream.write_through)
+ self.assertIs(stream.line_buffering, True)
+ self.assertIs(stream.write_through, False)
# stderr is backslashreplace by default; stdout is configured
# that way by libregrtest.main.
@@ -147,6 +171,13 @@ def write(s, lines=None, *, write_len=None):
write("f\n\ng", ["exxf", ""])
write("\n", ["g"])
+ # Since this is a line-based logging system, line buffering
+ # cannot be turned off, i.e. a newline always causes a flush.
+ stream.reconfigure(line_buffering=False)
+ self.assertIs(stream.line_buffering, True)
+
+ # However, buffering can be turned off completely if you want a
+ # flush after every write.
with self.unbuffered(stream):
write("\nx", ["", "x"])
write("\na\n", ["", "a"])
@@ -209,30 +240,30 @@ def __str__(self):
# (MAX_BYTES_PER_WRITE).
#
# ASCII (1 byte per character)
- write(("foobar" * 700) + "\n",
- [("foobar" * 666) + "foob", # 4000 bytes
- "ar" + ("foobar" * 33)]) # 200 bytes
+ write(("foobar" * 700) + "\n", # 4200 bytes in
+ [("foobar" * 666) + "foob", # 4000 bytes out
+ "ar" + ("foobar" * 33)]) # 200 bytes out
# "Full-width" digits 0-9 (3 bytes per character)
s = "\uff10\uff11\uff12\uff13\uff14\uff15\uff16\uff17\uff18\uff19"
- write((s * 150) + "\n",
- [s * 100, # 3000 bytes
- s * 50]) # 1500 bytes
+ write((s * 150) + "\n", # 4500 bytes in
+ [s * 100, # 3000 bytes out
+ s * 50]) # 1500 bytes out
s = "0123456789"
- write(s * 200, [])
- write(s * 150, [])
- write(s * 51, [s * 350]) # 3500 bytes
- write("\n", [s * 51]) # 510 bytes
+ write(s * 200, []) # 2000 bytes in
+ write(s * 150, []) # 1500 bytes in
+ write(s * 51, [s * 350]) # 510 bytes in, 3500 bytes out
+ write("\n", [s * 51]) # 0 bytes in, 510 bytes out
def test_bytes(self):
for stream_name, level in [("stdout", "I"), ("stderr", "W")]:
- with self.subTest(stream=stream_name):
+ with self.stream_context(stream_name, level):
stream = getattr(sys, stream_name).buffer
tag = f"python.{stream_name}"
self.assertEqual(f"", repr(stream))
- self.assertTrue(stream.writable())
- self.assertFalse(stream.readable())
+ self.assertIs(stream.writable(), True)
+ self.assertIs(stream.readable(), False)
def write(b, lines=None, *, write_len=None):
if write_len is None:
@@ -330,3 +361,64 @@ def write(b, lines=None, *, write_len=None):
fr"{type(obj).__name__}"
):
stream.write(obj)
+
+ def test_rate_limit(self):
+ # https://cs.android.com/android/platform/superproject/+/android-14.0.0_r1:system/logging/liblog/include/log/log_read.h;l=39
+ PER_MESSAGE_OVERHEAD = 28
+
+ # https://developer.android.com/ndk/reference/group/logging
+ ANDROID_LOG_DEBUG = 3
+
+ # To avoid flooding the test script output, use a different tag rather
+ # than stdout or stderr.
+ tag = "python.rate_limit"
+ stream = TextLogStream(ANDROID_LOG_DEBUG, tag)
+
+ # Make a test message which consumes 1 KB of the logcat buffer.
+ message = "Line {:03d} "
+ message += "." * (
+ 1024 - PER_MESSAGE_OVERHEAD - len(tag) - len(message.format(0))
+ ) + "\n"
+
+ # See _android_support.py. The default values of these parameters work
+ # well across a wide range of devices, but we'll use smaller values to
+ # ensure a quick and reliable test that doesn't flood the log too much.
+ MAX_KB_PER_SECOND = 100
+ BUCKET_KB = 10
+ with (
+ patch("_android_support.MAX_BYTES_PER_SECOND", MAX_KB_PER_SECOND * 1024),
+ patch("_android_support.BUCKET_SIZE", BUCKET_KB * 1024),
+ ):
+ # Make sure the token bucket is full.
+ sleep(BUCKET_KB / MAX_KB_PER_SECOND)
+ line_num = 0
+
+ # Write BUCKET_KB messages, and return the rate at which they were
+ # accepted in KB per second.
+ def write_bucketful():
+ nonlocal line_num
+ start = time()
+ max_line_num = line_num + BUCKET_KB
+ while line_num < max_line_num:
+ stream.write(message.format(line_num))
+ line_num += 1
+ return BUCKET_KB / (time() - start)
+
+ # The first bucketful should be written with minimal delay. The
+ # factor of 2 here is not arbitrary: it verifies that the system can
+ # write fast enough to empty the bucket within two bucketfuls, which
+ # the next part of the test depends on.
+ self.assertGreater(write_bucketful(), MAX_KB_PER_SECOND * 2)
+
+ # Write another bucketful to empty the token bucket completely.
+ write_bucketful()
+
+ # The next bucketful should be written at the rate limit.
+ self.assertAlmostEqual(
+ write_bucketful(), MAX_KB_PER_SECOND,
+ delta=MAX_KB_PER_SECOND * 0.1
+ )
+
+ # Once the token bucket refills, we should go back to full speed.
+ sleep(BUCKET_KB / MAX_KB_PER_SECOND)
+ self.assertGreater(write_bucketful(), MAX_KB_PER_SECOND * 2)
diff --git a/Misc/NEWS.d/next/Core and Builtins/2024-08-05-19-04-06.gh-issue-116622.3LWUzE.rst b/Misc/NEWS.d/next/Core and Builtins/2024-08-05-19-04-06.gh-issue-116622.3LWUzE.rst
new file mode 100644
index 00000000000000..9320928477af2c
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2024-08-05-19-04-06.gh-issue-116622.3LWUzE.rst
@@ -0,0 +1 @@
+Fix Android stdout and stderr messages being truncated or lost.
From 94a4bd79a7ab7b0ff5f216782d6fdaff6ed348fc Mon Sep 17 00:00:00 2001
From: Kirill Podoprigora
Date: Tue, 6 Aug 2024 08:57:36 +0300
Subject: [PATCH 137/139] gh-122704: Fix reference leak in Modules/_pickle.c
(GH-122705)
---
Modules/_pickle.c | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/Modules/_pickle.c b/Modules/_pickle.c
index 5d9ee8cb6c679d..dc0ef0a184d205 100644
--- a/Modules/_pickle.c
+++ b/Modules/_pickle.c
@@ -1962,9 +1962,11 @@ whichmodule(PickleState *st, PyObject *global, PyObject *global_name, PyObject *
PyErr_Format(st->PicklingError,
"Can't pickle %R: import of module %R failed",
global, module_name);
+ Py_DECREF(module_name);
return NULL;
}
if (check_dotted_path(module, global_name, dotted_path) < 0) {
+ Py_DECREF(module_name);
Py_DECREF(module);
return NULL;
}
@@ -1974,6 +1976,7 @@ whichmodule(PickleState *st, PyObject *global, PyObject *global_name, PyObject *
PyErr_Format(st->PicklingError,
"Can't pickle %R: attribute lookup %S on %S failed",
global, global_name, module_name);
+ Py_DECREF(module_name);
return NULL;
}
if (actual != global) {
@@ -1981,6 +1984,7 @@ whichmodule(PickleState *st, PyObject *global, PyObject *global_name, PyObject *
PyErr_Format(st->PicklingError,
"Can't pickle %R: it's not the same object as %S.%S",
global, module_name, global_name);
+ Py_DECREF(module_name);
return NULL;
}
Py_DECREF(actual);
From e74680b7186e6823ea37cf7ab326d3d6bfa6f59a Mon Sep 17 00:00:00 2001
From: Serhiy Storchaka
Date: Tue, 6 Aug 2024 08:59:44 +0300
Subject: [PATCH 138/139] gh-122595: Add more error checks in the compiler
(GH-122596)
---
Python/compile.c | 78 +++++++++++++++++++++---------
Python/symtable.c | 119 ++++++++++++++++++++++++++++++++++------------
2 files changed, 145 insertions(+), 52 deletions(-)
diff --git a/Python/compile.c b/Python/compile.c
index 87b2c2705474a4..9695a99d201144 100644
--- a/Python/compile.c
+++ b/Python/compile.c
@@ -505,21 +505,35 @@ dictbytype(PyObject *src, int scope_type, int flag, Py_ssize_t offset)
deterministic, then the generated bytecode is not deterministic.
*/
sorted_keys = PyDict_Keys(src);
- if (sorted_keys == NULL)
+ if (sorted_keys == NULL) {
+ Py_DECREF(dest);
return NULL;
+ }
if (PyList_Sort(sorted_keys) != 0) {
Py_DECREF(sorted_keys);
+ Py_DECREF(dest);
return NULL;
}
num_keys = PyList_GET_SIZE(sorted_keys);
for (key_i = 0; key_i < num_keys; key_i++) {
- /* XXX this should probably be a macro in symtable.h */
- long vi;
k = PyList_GET_ITEM(sorted_keys, key_i);
v = PyDict_GetItemWithError(src, k);
- assert(v && PyLong_Check(v));
- vi = PyLong_AS_LONG(v);
+ if (!v) {
+ if (!PyErr_Occurred()) {
+ PyErr_SetObject(PyExc_KeyError, k);
+ }
+ Py_DECREF(sorted_keys);
+ Py_DECREF(dest);
+ return NULL;
+ }
+ long vi = PyLong_AsLong(v);
+ if (vi == -1 && PyErr_Occurred()) {
+ Py_DECREF(sorted_keys);
+ Py_DECREF(dest);
+ return NULL;
+ }
+ /* XXX this should probably be a macro in symtable.h */
scope = (vi >> SCOPE_OFFSET) & SCOPE_MASK;
if (scope == scope_type || vi & flag) {
@@ -631,6 +645,7 @@ compiler_set_qualname(struct compiler *c)
scope = _PyST_GetScope(parent->u_ste, mangled);
Py_DECREF(mangled);
+ RETURN_IF_ERROR(scope);
assert(scope != GLOBAL_IMPLICIT);
if (scope == GLOBAL_EXPLICIT)
force_global = 1;
@@ -1648,7 +1663,7 @@ dict_lookup_arg(PyObject *dict, PyObject *name)
if (v == NULL) {
return ERROR;
}
- return PyLong_AS_LONG(v);
+ return PyLong_AsLong(v);
}
static int
@@ -1671,7 +1686,7 @@ compiler_lookup_arg(struct compiler *c, PyCodeObject *co, PyObject *name)
else {
arg = dict_lookup_arg(c->u->u_metadata.u_freevars, name);
}
- if (arg == -1) {
+ if (arg == -1 && !PyErr_Occurred()) {
PyObject *freevars = _PyCode_GetFreevars(co);
if (freevars == NULL) {
PyErr_Clear();
@@ -4085,6 +4100,8 @@ compiler_nameop(struct compiler *c, location loc,
case GLOBAL_EXPLICIT:
optype = OP_GLOBAL;
break;
+ case -1:
+ goto error;
default:
/* scope can be 0 */
break;
@@ -4638,6 +4655,7 @@ is_import_originated(struct compiler *c, expr_ty e)
}
long flags = _PyST_GetSymbol(SYMTABLE(c)->st_top, e->v.Name.id);
+ RETURN_IF_ERROR(flags);
return flags & DEF_IMPORT;
}
@@ -4657,10 +4675,12 @@ can_optimize_super_call(struct compiler *c, expr_ty attr)
PyObject *super_name = e->v.Call.func->v.Name.id;
// detect statically-visible shadowing of 'super' name
int scope = _PyST_GetScope(SYMTABLE_ENTRY(c), super_name);
+ RETURN_IF_ERROR(scope);
if (scope != GLOBAL_IMPLICIT) {
return 0;
}
scope = _PyST_GetScope(SYMTABLE(c)->st_top, super_name);
+ RETURN_IF_ERROR(scope);
if (scope != 0) {
return 0;
}
@@ -4767,7 +4787,9 @@ maybe_optimize_method_call(struct compiler *c, expr_ty e)
}
/* Check that the base object is not something that is imported */
- if (is_import_originated(c, meth->v.Attribute.value)) {
+ int ret = is_import_originated(c, meth->v.Attribute.value);
+ RETURN_IF_ERROR(ret);
+ if (ret) {
return 0;
}
@@ -4795,7 +4817,9 @@ maybe_optimize_method_call(struct compiler *c, expr_ty e)
/* Alright, we can optimize the code. */
location loc = LOC(meth);
- if (can_optimize_super_call(c, meth)) {
+ ret = can_optimize_super_call(c, meth);
+ RETURN_IF_ERROR(ret);
+ if (ret) {
RETURN_IF_ERROR(load_args_for_super(c, meth->v.Attribute.value));
int opcode = asdl_seq_LEN(meth->v.Attribute.value->v.Call.args) ?
LOAD_SUPER_METHOD : LOAD_ZERO_SUPER_METHOD;
@@ -5367,8 +5391,10 @@ push_inlined_comprehension_state(struct compiler *c, location loc,
PyObject *k, *v;
Py_ssize_t pos = 0;
while (PyDict_Next(entry->ste_symbols, &pos, &k, &v)) {
- assert(PyLong_Check(v));
- long symbol = PyLong_AS_LONG(v);
+ long symbol = PyLong_AsLong(v);
+ if (symbol == -1 && PyErr_Occurred()) {
+ return ERROR;
+ }
long scope = (symbol >> SCOPE_OFFSET) & SCOPE_MASK;
PyObject *outv = PyDict_GetItemWithError(SYMTABLE_ENTRY(c)->ste_symbols, k);
if (outv == NULL) {
@@ -5377,8 +5403,11 @@ push_inlined_comprehension_state(struct compiler *c, location loc,
}
outv = _PyLong_GetZero();
}
- assert(PyLong_CheckExact(outv));
- long outsc = (PyLong_AS_LONG(outv) >> SCOPE_OFFSET) & SCOPE_MASK;
+ long outsymbol = PyLong_AsLong(outv);
+ if (outsymbol == -1 && PyErr_Occurred()) {
+ return ERROR;
+ }
+ long outsc = (outsymbol >> SCOPE_OFFSET) & SCOPE_MASK;
// If a name has different scope inside than outside the comprehension,
// we need to temporarily handle it with the right scope while
// compiling the comprehension. If it's free in the comprehension
@@ -6064,14 +6093,18 @@ compiler_visit_expr(struct compiler *c, expr_ty e)
return compiler_formatted_value(c, e);
/* The following exprs can be assignment targets. */
case Attribute_kind:
- if (e->v.Attribute.ctx == Load && can_optimize_super_call(c, e)) {
- RETURN_IF_ERROR(load_args_for_super(c, e->v.Attribute.value));
- int opcode = asdl_seq_LEN(e->v.Attribute.value->v.Call.args) ?
- LOAD_SUPER_ATTR : LOAD_ZERO_SUPER_ATTR;
- ADDOP_NAME(c, loc, opcode, e->v.Attribute.attr, names);
- loc = update_start_location_to_match_attr(c, loc, e);
- ADDOP(c, loc, NOP);
- return SUCCESS;
+ if (e->v.Attribute.ctx == Load) {
+ int ret = can_optimize_super_call(c, e);
+ RETURN_IF_ERROR(ret);
+ if (ret) {
+ RETURN_IF_ERROR(load_args_for_super(c, e->v.Attribute.value));
+ int opcode = asdl_seq_LEN(e->v.Attribute.value->v.Call.args) ?
+ LOAD_SUPER_ATTR : LOAD_ZERO_SUPER_ATTR;
+ ADDOP_NAME(c, loc, opcode, e->v.Attribute.attr, names);
+ loc = update_start_location_to_match_attr(c, loc, e);
+ ADDOP(c, loc, NOP);
+ return SUCCESS;
+ }
}
RETURN_IF_ERROR(compiler_maybe_add_static_attribute_to_class(c, e));
VISIT(c, expr, e->v.Attribute.value);
@@ -7300,7 +7333,8 @@ consts_dict_keys_inorder(PyObject *dict)
if (consts == NULL)
return NULL;
while (PyDict_Next(dict, &pos, &k, &v)) {
- i = PyLong_AS_LONG(v);
+ assert(PyLong_CheckExact(v));
+ i = PyLong_AsLong(v);
/* The keys of the dictionary can be tuples wrapping a constant.
* (see dict_add_o and _PyCode_ConstantKey). In that case
* the object we want is always second. */
diff --git a/Python/symtable.c b/Python/symtable.c
index ef81a0799de3aa..4acf762f8fca39 100644
--- a/Python/symtable.c
+++ b/Python/symtable.c
@@ -526,17 +526,31 @@ _PySymtable_LookupOptional(struct symtable *st, void *key,
long
_PyST_GetSymbol(PySTEntryObject *ste, PyObject *name)
{
- PyObject *v = PyDict_GetItemWithError(ste->ste_symbols, name);
- if (!v)
+ PyObject *v;
+ if (PyDict_GetItemRef(ste->ste_symbols, name, &v) < 0) {
+ return -1;
+ }
+ if (!v) {
return 0;
- assert(PyLong_Check(v));
- return PyLong_AS_LONG(v);
+ }
+ long symbol = PyLong_AsLong(v);
+ Py_DECREF(v);
+ if (symbol < 0) {
+ if (!PyErr_Occurred()) {
+ PyErr_SetString(PyExc_SystemError, "invalid symbol");
+ }
+ return -1;
+ }
+ return symbol;
}
int
_PyST_GetScope(PySTEntryObject *ste, PyObject *name)
{
long symbol = _PyST_GetSymbol(ste, name);
+ if (symbol < 0) {
+ return -1;
+ }
return (symbol >> SCOPE_OFFSET) & SCOPE_MASK;
}
@@ -715,11 +729,14 @@ analyze_name(PySTEntryObject *ste, PyObject *scopes, PyObject *name, long flags,
// global statement), we want to also treat it as a global in this scope.
if (class_entry != NULL) {
long class_flags = _PyST_GetSymbol(class_entry, name);
+ if (class_flags < 0) {
+ return 0;
+ }
if (class_flags & DEF_GLOBAL) {
SET_SCOPE(scopes, name, GLOBAL_EXPLICIT);
return 1;
}
- else if (class_flags & DEF_BOUND && !(class_flags & DEF_NONLOCAL)) {
+ else if ((class_flags & DEF_BOUND) && !(class_flags & DEF_NONLOCAL)) {
SET_SCOPE(scopes, name, GLOBAL_IMPLICIT);
return 1;
}
@@ -763,6 +780,9 @@ is_free_in_any_child(PySTEntryObject *entry, PyObject *key)
PySTEntryObject *child_ste = (PySTEntryObject *)PyList_GET_ITEM(
entry->ste_children, i);
long scope = _PyST_GetScope(child_ste, key);
+ if (scope < 0) {
+ return -1;
+ }
if (scope == FREE) {
return 1;
}
@@ -781,7 +801,10 @@ inline_comprehension(PySTEntryObject *ste, PySTEntryObject *comp,
while (PyDict_Next(comp->ste_symbols, &pos, &k, &v)) {
// skip comprehension parameter
- long comp_flags = PyLong_AS_LONG(v);
+ long comp_flags = PyLong_AsLong(v);
+ if (comp_flags == -1 && PyErr_Occurred()) {
+ return 0;
+ }
if (comp_flags & DEF_PARAM) {
assert(_PyUnicode_EqualToASCIIString(k, ".0"));
continue;
@@ -822,11 +845,19 @@ inline_comprehension(PySTEntryObject *ste, PySTEntryObject *comp,
SET_SCOPE(scopes, k, scope);
}
else {
- if (PyLong_AsLong(existing) & DEF_BOUND) {
+ long flags = PyLong_AsLong(existing);
+ if (flags == -1 && PyErr_Occurred()) {
+ return 0;
+ }
+ if ((flags & DEF_BOUND) && ste->ste_type != ClassBlock) {
// free vars in comprehension that are locals in outer scope can
// now simply be locals, unless they are free in comp children,
// or if the outer scope is a class block
- if (!is_free_in_any_child(comp, k) && ste->ste_type != ClassBlock) {
+ int ok = is_free_in_any_child(comp, k);
+ if (ok < 0) {
+ return 0;
+ }
+ if (!ok) {
if (PySet_Discard(comp_free, k) < 0) {
return 0;
}
@@ -861,9 +892,10 @@ analyze_cells(PyObject *scopes, PyObject *free, PyObject *inlined_cells)
if (!v_cell)
return 0;
while (PyDict_Next(scopes, &pos, &name, &v)) {
- long scope;
- assert(PyLong_Check(v));
- scope = PyLong_AS_LONG(v);
+ long scope = PyLong_AsLong(v);
+ if (scope == -1 && PyErr_Occurred()) {
+ goto error;
+ }
if (scope != LOCAL)
continue;
int contains = PySet_Contains(free, name);
@@ -926,9 +958,10 @@ update_symbols(PyObject *symbols, PyObject *scopes,
/* Update scope information for all symbols in this scope */
while (PyDict_Next(symbols, &pos, &name, &v)) {
- long scope, flags;
- assert(PyLong_Check(v));
- flags = PyLong_AS_LONG(v);
+ long flags = PyLong_AsLong(v);
+ if (flags == -1 && PyErr_Occurred()) {
+ return 0;
+ }
int contains = PySet_Contains(inlined_cells, name);
if (contains < 0) {
return 0;
@@ -936,9 +969,18 @@ update_symbols(PyObject *symbols, PyObject *scopes,
if (contains) {
flags |= DEF_COMP_CELL;
}
- v_scope = PyDict_GetItemWithError(scopes, name);
- assert(v_scope && PyLong_Check(v_scope));
- scope = PyLong_AS_LONG(v_scope);
+ if (PyDict_GetItemRef(scopes, name, &v_scope) < 0) {
+ return 0;
+ }
+ if (!v_scope) {
+ PyErr_SetObject(PyExc_KeyError, name);
+ return 0;
+ }
+ long scope = PyLong_AsLong(v_scope);
+ Py_DECREF(v_scope);
+ if (scope == -1 && PyErr_Occurred()) {
+ return 0;
+ }
flags |= (scope << SCOPE_OFFSET);
v_new = PyLong_FromLong(flags);
if (!v_new)
@@ -971,7 +1013,11 @@ update_symbols(PyObject *symbols, PyObject *scopes,
or global in the class scope.
*/
if (classflag) {
- long flags = PyLong_AS_LONG(v) | DEF_FREE_CLASS;
+ long flags = PyLong_AsLong(v);
+ if (flags == -1 && PyErr_Occurred()) {
+ goto error;
+ }
+ flags |= DEF_FREE_CLASS;
v_new = PyLong_FromLong(flags);
if (!v_new) {
goto error;
@@ -1110,7 +1156,10 @@ analyze_block(PySTEntryObject *ste, PyObject *bound, PyObject *free,
}
while (PyDict_Next(ste->ste_symbols, &pos, &name, &v)) {
- long flags = PyLong_AS_LONG(v);
+ long flags = PyLong_AsLong(v);
+ if (flags == -1 && PyErr_Occurred()) {
+ goto error;
+ }
if (!analyze_name(ste, scopes, name, flags,
bound, local, free, global, type_params, class_entry))
goto error;
@@ -1395,9 +1444,12 @@ symtable_lookup_entry(struct symtable *st, PySTEntryObject *ste, PyObject *name)
{
PyObject *mangled = _Py_MaybeMangle(st->st_private, ste, name);
if (!mangled)
- return 0;
+ return -1;
long ret = _PyST_GetSymbol(ste, mangled);
Py_DECREF(mangled);
+ if (ret < 0) {
+ return -1;
+ }
return ret;
}
@@ -1420,7 +1472,10 @@ symtable_add_def_helper(struct symtable *st, PyObject *name, int flag, struct _s
return 0;
dict = ste->ste_symbols;
if ((o = PyDict_GetItemWithError(dict, mangled))) {
- val = PyLong_AS_LONG(o);
+ val = PyLong_AsLong(o);
+ if (val == -1 && PyErr_Occurred()) {
+ goto error;
+ }
if ((flag & DEF_PARAM) && (val & DEF_PARAM)) {
/* Is it better to use 'mangled' or 'name' here? */
PyErr_Format(PyExc_SyntaxError, DUPLICATE_ARGUMENT, name);
@@ -1466,16 +1521,20 @@ symtable_add_def_helper(struct symtable *st, PyObject *name, int flag, struct _s
if (flag & DEF_PARAM) {
if (PyList_Append(ste->ste_varnames, mangled) < 0)
goto error;
- } else if (flag & DEF_GLOBAL) {
+ } else if (flag & DEF_GLOBAL) {
/* XXX need to update DEF_GLOBAL for other flags too;
perhaps only DEF_FREE_GLOBAL */
- val = flag;
+ val = 0;
if ((o = PyDict_GetItemWithError(st->st_global, mangled))) {
- val |= PyLong_AS_LONG(o);
+ val = PyLong_AsLong(o);
+ if (val == -1 && PyErr_Occurred()) {
+ goto error;
+ }
}
else if (PyErr_Occurred()) {
goto error;
}
+ val |= flag;
o = PyLong_FromLong(val);
if (o == NULL)
goto error;
@@ -2176,6 +2235,9 @@ symtable_extend_namedexpr_scope(struct symtable *st, expr_ty e)
*/
if (ste->ste_comprehension) {
long target_in_scope = symtable_lookup_entry(st, ste, target_name);
+ if (target_in_scope < 0) {
+ return 0;
+ }
if ((target_in_scope & DEF_COMP_ITER) &&
(target_in_scope & DEF_LOCAL)) {
PyErr_Format(PyExc_SyntaxError, NAMED_EXPR_COMP_CONFLICT, target_name);
@@ -2188,6 +2250,9 @@ symtable_extend_namedexpr_scope(struct symtable *st, expr_ty e)
/* If we find a FunctionBlock entry, add as GLOBAL/LOCAL or NONLOCAL/LOCAL */
if (ste->ste_type == FunctionBlock) {
long target_in_scope = symtable_lookup_entry(st, ste, target_name);
+ if (target_in_scope < 0) {
+ return 0;
+ }
if (target_in_scope & DEF_GLOBAL) {
if (!symtable_add_def(st, target_name, DEF_GLOBAL, LOCATION(e)))
return 0;
@@ -2601,9 +2666,6 @@ symtable_visit_params(struct symtable *st, asdl_arg_seq *args)
{
Py_ssize_t i;
- if (!args)
- return -1;
-
for (i = 0; i < asdl_seq_LEN(args); i++) {
arg_ty arg = (arg_ty)asdl_seq_GET(args, i);
if (!symtable_add_def(st, arg->arg, DEF_PARAM, LOCATION(arg)))
@@ -2650,9 +2712,6 @@ symtable_visit_argannotations(struct symtable *st, asdl_arg_seq *args)
{
Py_ssize_t i;
- if (!args)
- return -1;
-
for (i = 0; i < asdl_seq_LEN(args); i++) {
arg_ty arg = (arg_ty)asdl_seq_GET(args, i);
if (arg->annotation) {
From b72c748d7fb4ecc0bc4626c7bc05fbc6c83f0ba8 Mon Sep 17 00:00:00 2001
From: Jelle Zijlstra
Date: Mon, 5 Aug 2024 23:16:29 -0700
Subject: [PATCH 139/139] Fix syntax in generate_re_casefix.py (#122699)
This was broken in gh-97963.
---
Lib/re/_casefix.py | 2 +-
Tools/build/generate_re_casefix.py | 4 ++--
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/Lib/re/_casefix.py b/Lib/re/_casefix.py
index 06507d08bee02b..fed2d84fc01473 100644
--- a/Lib/re/_casefix.py
+++ b/Lib/re/_casefix.py
@@ -1,4 +1,4 @@
-# Auto-generated by Tools/scripts/generate_re_casefix.py.
+# Auto-generated by Tools/build/generate_re_casefix.py.
# Maps the code of lowercased character to codes of different lowercased
# characters which have the same uppercase.
diff --git a/Tools/build/generate_re_casefix.py b/Tools/build/generate_re_casefix.py
index b57ac07426c27c..6cebfbd025c58c 100755
--- a/Tools/build/generate_re_casefix.py
+++ b/Tools/build/generate_re_casefix.py
@@ -23,9 +23,9 @@ def update_file(file, content):
# Maps the code of lowercased character to codes of different lowercased
# characters which have the same uppercase.
-_EXTRA_CASES = {
+_EXTRA_CASES = {{
%s
-}
+}}
"""
def uname(i):